rdkafka 0.15.1 → 0.16.0.rc1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (42) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/workflows/ci.yml +2 -5
  4. data/.gitignore +2 -0
  5. data/.ruby-version +1 -1
  6. data/CHANGELOG.md +16 -1
  7. data/README.md +19 -9
  8. data/docker-compose.yml +1 -1
  9. data/ext/Rakefile +8 -0
  10. data/lib/rdkafka/abstract_handle.rb +44 -20
  11. data/lib/rdkafka/admin/config_binding_result.rb +30 -0
  12. data/lib/rdkafka/admin/config_resource_binding_result.rb +18 -0
  13. data/lib/rdkafka/admin/create_topic_report.rb +1 -1
  14. data/lib/rdkafka/admin/delete_groups_report.rb +1 -1
  15. data/lib/rdkafka/admin/delete_topic_report.rb +1 -1
  16. data/lib/rdkafka/admin/describe_acl_report.rb +1 -0
  17. data/lib/rdkafka/admin/describe_configs_handle.rb +33 -0
  18. data/lib/rdkafka/admin/describe_configs_report.rb +54 -0
  19. data/lib/rdkafka/admin/incremental_alter_configs_handle.rb +33 -0
  20. data/lib/rdkafka/admin/incremental_alter_configs_report.rb +54 -0
  21. data/lib/rdkafka/admin.rb +219 -0
  22. data/lib/rdkafka/bindings.rb +86 -3
  23. data/lib/rdkafka/callbacks.rb +103 -19
  24. data/lib/rdkafka/config.rb +69 -15
  25. data/lib/rdkafka/consumer.rb +7 -0
  26. data/lib/rdkafka/helpers/oauth.rb +58 -0
  27. data/lib/rdkafka/native_kafka.rb +32 -19
  28. data/lib/rdkafka/producer.rb +101 -4
  29. data/lib/rdkafka/version.rb +1 -1
  30. data/lib/rdkafka.rb +7 -0
  31. data/rdkafka.gemspec +1 -1
  32. data/spec/rdkafka/abstract_handle_spec.rb +34 -21
  33. data/spec/rdkafka/admin_spec.rb +336 -3
  34. data/spec/rdkafka/bindings_spec.rb +97 -0
  35. data/spec/rdkafka/config_spec.rb +53 -0
  36. data/spec/rdkafka/consumer_spec.rb +54 -0
  37. data/spec/rdkafka/native_kafka_spec.rb +8 -1
  38. data/spec/rdkafka/producer_spec.rb +85 -0
  39. data/spec/spec_helper.rb +16 -1
  40. data.tar.gz.sig +0 -0
  41. metadata +11 -4
  42. metadata.gz.sig +0 -0
@@ -15,13 +15,13 @@ module Rdkafka
15
15
  @@opaques = ObjectSpace::WeakMap.new
16
16
  # @private
17
17
  @@log_queue = Queue.new
18
-
19
- Thread.start do
20
- loop do
21
- severity, msg = @@log_queue.pop
22
- @@logger.add(severity, msg)
23
- end
24
- end
18
+ # We memoize thread on the first log flush
19
+ # This allows us also to restart logger thread on forks
20
+ @@log_thread = nil
21
+ # @private
22
+ @@log_mutex = Mutex.new
23
+ # @private
24
+ @@oauthbearer_token_refresh_callback = nil
25
25
 
26
26
  # Returns the current logger, by default this is a logger to stdout.
27
27
  #
@@ -30,6 +30,24 @@ module Rdkafka
30
30
  @@logger
31
31
  end
32
32
 
33
+ # Makes sure that there is a thread for consuming logs
34
+ # We do not spawn thread immediately and we need to check if it operates to support forking
35
+ def self.ensure_log_thread
36
+ return if @@log_thread && @@log_thread.alive?
37
+
38
+ @@log_mutex.synchronize do
39
+ # Restart if dead (fork, crash)
40
+ @@log_thread = nil if @@log_thread && !@@log_thread.alive?
41
+
42
+ @@log_thread ||= Thread.start do
43
+ loop do
44
+ severity, msg = @@log_queue.pop
45
+ @@logger.add(severity, msg)
46
+ end
47
+ end
48
+ end
49
+ end
50
+
33
51
  # Returns a queue whose contents will be passed to the configured logger. Each entry
34
52
  # should follow the format [Logger::Severity, String]. The benefit over calling the
35
53
  # logger directly is that this is safe to use from trap contexts.
@@ -87,6 +105,24 @@ module Rdkafka
87
105
  @@error_callback
88
106
  end
89
107
 
108
+ # Sets the SASL/OAUTHBEARER token refresh callback.
109
+ # This callback will be triggered when it is time to refresh the client's OAUTHBEARER token
110
+ #
111
+ # @param callback [Proc, #call] The callback
112
+ #
113
+ # @return [nil]
114
+ def self.oauthbearer_token_refresh_callback=(callback)
115
+ raise TypeError.new("Callback has to be callable") unless callback.respond_to?(:call) || callback == nil
116
+ @@oauthbearer_token_refresh_callback = callback
117
+ end
118
+
119
+ # Returns the current oauthbearer_token_refresh_callback callback, by default this is nil.
120
+ #
121
+ # @return [Proc, nil]
122
+ def self.oauthbearer_token_refresh_callback
123
+ @@oauthbearer_token_refresh_callback
124
+ end
125
+
90
126
  # @private
91
127
  def self.opaques
92
128
  @@opaques
@@ -159,11 +195,13 @@ module Rdkafka
159
195
 
160
196
  # Creates a consumer with this configuration.
161
197
  #
198
+ # @param native_kafka_auto_start [Boolean] should the native kafka operations be started
199
+ # automatically. Defaults to true. Set to false only when doing complex initialization.
162
200
  # @return [Consumer] The created consumer
163
201
  #
164
202
  # @raise [ConfigError] When the configuration contains invalid options
165
203
  # @raise [ClientCreationError] When the native client cannot be created
166
- def consumer
204
+ def consumer(native_kafka_auto_start: true)
167
205
  opaque = Opaque.new
168
206
  config = native_config(opaque)
169
207
 
@@ -183,18 +221,21 @@ module Rdkafka
183
221
  Rdkafka::NativeKafka.new(
184
222
  kafka,
185
223
  run_polling_thread: false,
186
- opaque: opaque
224
+ opaque: opaque,
225
+ auto_start: native_kafka_auto_start
187
226
  )
188
227
  )
189
228
  end
190
229
 
191
230
  # Create a producer with this configuration.
192
231
  #
232
+ # @param native_kafka_auto_start [Boolean] should the native kafka operations be started
233
+ # automatically. Defaults to true. Set to false only when doing complex initialization.
193
234
  # @return [Producer] The created producer
194
235
  #
195
236
  # @raise [ConfigError] When the configuration contains invalid options
196
237
  # @raise [ClientCreationError] When the native client cannot be created
197
- def producer
238
+ def producer(native_kafka_auto_start: true)
198
239
  # Create opaque
199
240
  opaque = Opaque.new
200
241
  # Create Kafka config
@@ -203,11 +244,15 @@ module Rdkafka
203
244
  Rdkafka::Bindings.rd_kafka_conf_set_dr_msg_cb(config, Rdkafka::Callbacks::DeliveryCallbackFunction)
204
245
  # Return producer with Kafka client
205
246
  partitioner_name = self[:partitioner] || self["partitioner"]
247
+
248
+ kafka = native_kafka(config, :rd_kafka_producer)
249
+
206
250
  Rdkafka::Producer.new(
207
251
  Rdkafka::NativeKafka.new(
208
- native_kafka(config, :rd_kafka_producer),
252
+ kafka,
209
253
  run_polling_thread: true,
210
- opaque: opaque
254
+ opaque: opaque,
255
+ auto_start: native_kafka_auto_start
211
256
  ),
212
257
  partitioner_name
213
258
  ).tap do |producer|
@@ -217,19 +262,25 @@ module Rdkafka
217
262
 
218
263
  # Creates an admin instance with this configuration.
219
264
  #
265
+ # @param native_kafka_auto_start [Boolean] should the native kafka operations be started
266
+ # automatically. Defaults to true. Set to false only when doing complex initialization.
220
267
  # @return [Admin] The created admin instance
221
268
  #
222
269
  # @raise [ConfigError] When the configuration contains invalid options
223
270
  # @raise [ClientCreationError] When the native client cannot be created
224
- def admin
271
+ def admin(native_kafka_auto_start: true)
225
272
  opaque = Opaque.new
226
273
  config = native_config(opaque)
227
274
  Rdkafka::Bindings.rd_kafka_conf_set_background_event_cb(config, Rdkafka::Callbacks::BackgroundEventCallbackFunction)
275
+
276
+ kafka = native_kafka(config, :rd_kafka_producer)
277
+
228
278
  Rdkafka::Admin.new(
229
279
  Rdkafka::NativeKafka.new(
230
- native_kafka(config, :rd_kafka_producer),
280
+ kafka,
231
281
  run_polling_thread: true,
232
- opaque: opaque
282
+ opaque: opaque,
283
+ auto_start: native_kafka_auto_start
233
284
  )
234
285
  )
235
286
  end
@@ -283,6 +334,9 @@ module Rdkafka
283
334
 
284
335
  # Set error callback
285
336
  Rdkafka::Bindings.rd_kafka_conf_set_error_cb(config, Rdkafka::Bindings::ErrorCallback)
337
+
338
+ # Set oauth callback
339
+ Rdkafka::Bindings.rd_kafka_conf_set_oauthbearer_token_refresh_cb(config, Rdkafka::Bindings::OAuthbearerTokenRefreshCallback)
286
340
  end
287
341
  end
288
342
 
@@ -13,12 +13,19 @@ module Rdkafka
13
13
  class Consumer
14
14
  include Enumerable
15
15
  include Helpers::Time
16
+ include Helpers::OAuth
16
17
 
17
18
  # @private
18
19
  def initialize(native_kafka)
19
20
  @native_kafka = native_kafka
20
21
  end
21
22
 
23
+ # Starts the native Kafka polling thread and kicks off the init polling
24
+ # @note Not needed to run unless explicit start was disabled
25
+ def start
26
+ @native_kafka.start
27
+ end
28
+
22
29
  # @return [String] consumer name
23
30
  def name
24
31
  @name ||= @native_kafka.with_inner do |inner|
@@ -0,0 +1,58 @@
1
+ module Rdkafka
2
+ module Helpers
3
+
4
+ module OAuth
5
+
6
+ # Set the OAuthBearer token
7
+ #
8
+ # @param token [String] the mandatory token value to set, often (but not necessarily) a JWS compact serialization as per https://tools.ietf.org/html/rfc7515#section-3.1.
9
+ # @param lifetime_ms [Integer] when the token expires, in terms of the number of milliseconds since the epoch. See https://currentmillis.com/.
10
+ # @param principal_name [String] the mandatory Kafka principal name associated with the token.
11
+ # @param extensions [Hash] optional SASL extensions key-value pairs to be communicated to the broker as additional key-value pairs during the initial client response as per https://tools.ietf.org/html/rfc7628#section-3.1.
12
+ # @return [Integer] 0 on success
13
+ def oauthbearer_set_token(token:, lifetime_ms:, principal_name:, extensions: nil)
14
+ error_buffer = FFI::MemoryPointer.from_string(" " * 256)
15
+
16
+ response = @native_kafka.with_inner do |inner|
17
+ Rdkafka::Bindings.rd_kafka_oauthbearer_set_token(
18
+ inner, token, lifetime_ms, principal_name,
19
+ flatten_extensions(extensions), extension_size(extensions), error_buffer, 256
20
+ )
21
+ end
22
+
23
+ return response if response.zero?
24
+
25
+ oauthbearer_set_token_failure("Failed to set token: #{error_buffer.read_string}")
26
+
27
+ response
28
+ end
29
+
30
+ # Marks failed oauth token acquire in librdkafka
31
+ #
32
+ # @param reason [String] human readable error reason for failing to acquire token
33
+ def oauthbearer_set_token_failure(reason)
34
+ @native_kafka.with_inner do |inner|
35
+ Rdkafka::Bindings.rd_kafka_oauthbearer_set_token_failure(
36
+ inner,
37
+ reason
38
+ )
39
+ end
40
+ end
41
+
42
+ private
43
+
44
+ # Flatten the extensions hash into a string according to the spec, https://datatracker.ietf.org/doc/html/rfc7628#section-3.1
45
+ def flatten_extensions(extensions)
46
+ return nil unless extensions
47
+ "\x01#{extensions.map { |e| e.join("=") }.join("\x01")}"
48
+ end
49
+
50
+ # extension_size is the number of keys + values which should be a non-negative even number
51
+ # https://github.com/confluentinc/librdkafka/blob/master/src/rdkafka_sasl_oauthbearer.c#L327-L347
52
+ def extension_size(extensions)
53
+ return 0 unless extensions
54
+ extensions.size * 2
55
+ end
56
+ end
57
+ end
58
+ end
@@ -4,7 +4,7 @@ module Rdkafka
4
4
  # @private
5
5
  # A wrapper around a native kafka that polls and cleanly exits
6
6
  class NativeKafka
7
- def initialize(inner, run_polling_thread:, opaque:)
7
+ def initialize(inner, run_polling_thread:, opaque:, auto_start: true)
8
8
  @inner = inner
9
9
  @opaque = opaque
10
10
  # Lock around external access
@@ -28,30 +28,43 @@ module Rdkafka
28
28
  # counter for operations in progress using inner
29
29
  @operations_in_progress = 0
30
30
 
31
- # Trigger initial poll to make sure oauthbearer cb and other initial cb are handled
32
- Rdkafka::Bindings.rd_kafka_poll(inner, 0)
31
+ @run_polling_thread = run_polling_thread
33
32
 
34
- if run_polling_thread
35
- # Start thread to poll client for delivery callbacks,
36
- # not used in consumer.
37
- @polling_thread = Thread.new do
38
- loop do
39
- @poll_mutex.synchronize do
40
- Rdkafka::Bindings.rd_kafka_poll(inner, 100)
41
- end
33
+ start if auto_start
42
34
 
43
- # Exit thread if closing and the poll queue is empty
44
- if Thread.current[:closing] && Rdkafka::Bindings.rd_kafka_outq_len(inner) == 0
45
- break
35
+ @closing = false
36
+ end
37
+
38
+ def start
39
+ synchronize do
40
+ return if @started
41
+
42
+ @started = true
43
+
44
+ # Trigger initial poll to make sure oauthbearer cb and other initial cb are handled
45
+ Rdkafka::Bindings.rd_kafka_poll(@inner, 0)
46
+
47
+ if @run_polling_thread
48
+ # Start thread to poll client for delivery callbacks,
49
+ # not used in consumer.
50
+ @polling_thread = Thread.new do
51
+ loop do
52
+ @poll_mutex.synchronize do
53
+ Rdkafka::Bindings.rd_kafka_poll(@inner, 100)
54
+ end
55
+
56
+ # Exit thread if closing and the poll queue is empty
57
+ if Thread.current[:closing] && Rdkafka::Bindings.rd_kafka_outq_len(@inner) == 0
58
+ break
59
+ end
46
60
  end
47
61
  end
48
- end
49
62
 
50
- @polling_thread.abort_on_exception = true
51
- @polling_thread[:closing] = false
63
+ @polling_thread.name = "rdkafka.native_kafka##{Rdkafka::Bindings.rd_kafka_name(@inner).gsub('rdkafka', '')}"
64
+ @polling_thread.abort_on_exception = true
65
+ @polling_thread[:closing] = false
66
+ end
52
67
  end
53
-
54
- @closing = false
55
68
  end
56
69
 
57
70
  def with_inner
@@ -4,11 +4,20 @@ module Rdkafka
4
4
  # A producer for Kafka messages. To create a producer set up a {Config} and call {Config#producer producer} on that.
5
5
  class Producer
6
6
  include Helpers::Time
7
+ include Helpers::OAuth
7
8
 
8
9
  # Cache partitions count for 30 seconds
9
10
  PARTITIONS_COUNT_TTL = 30
10
11
 
11
- private_constant :PARTITIONS_COUNT_TTL
12
+ # Empty hash used as a default
13
+ EMPTY_HASH = {}.freeze
14
+
15
+ private_constant :PARTITIONS_COUNT_TTL, :EMPTY_HASH
16
+
17
+ # Raised when there was a critical issue when invoking rd_kafka_topic_new
18
+ # This is a temporary solution until https://github.com/karafka/rdkafka-ruby/issues/451 is
19
+ # resolved and this is normalized in all the places
20
+ class TopicHandleCreationError < RuntimeError; end
12
21
 
13
22
  # @private
14
23
  # Returns the current delivery callback, by default this is nil.
@@ -27,6 +36,8 @@ module Rdkafka
27
36
  # @param partitioner_name [String, nil] name of the partitioner we want to use or nil to use
28
37
  # the "consistent_random" default
29
38
  def initialize(native_kafka, partitioner_name)
39
+ @topics_refs_map = {}
40
+ @topics_configs = {}
30
41
  @native_kafka = native_kafka
31
42
  @partitioner_name = partitioner_name || "consistent_random"
32
43
 
@@ -53,6 +64,58 @@ module Rdkafka
53
64
  end
54
65
  end
55
66
 
67
+ # Sets alternative set of configuration details that can be set per topic
68
+ # @note It is not allowed to re-set the same topic config twice because of the underlying
69
+ # librdkafka caching
70
+ # @param topic [String] The topic name
71
+ # @param config [Hash] config we want to use per topic basis
72
+ # @param config_hash [Integer] hash of the config. We expect it here instead of computing it,
73
+ # because it is already computed during the retrieval attempt in the `#produce` flow.
74
+ def set_topic_config(topic, config, config_hash)
75
+ # Ensure lock on topic reference just in case
76
+ @native_kafka.with_inner do |inner|
77
+ @topics_refs_map[topic] ||= {}
78
+ @topics_configs[topic] ||= {}
79
+
80
+ return if @topics_configs[topic].key?(config_hash)
81
+
82
+ # If config is empty, we create an empty reference that will be used with defaults
83
+ rd_topic_config = if config.empty?
84
+ nil
85
+ else
86
+ Rdkafka::Bindings.rd_kafka_topic_conf_new.tap do |topic_config|
87
+ config.each do |key, value|
88
+ error_buffer = FFI::MemoryPointer.new(:char, 256)
89
+ result = Rdkafka::Bindings.rd_kafka_topic_conf_set(
90
+ topic_config,
91
+ key.to_s,
92
+ value.to_s,
93
+ error_buffer,
94
+ 256
95
+ )
96
+
97
+ unless result == :config_ok
98
+ raise Config::ConfigError.new(error_buffer.read_string)
99
+ end
100
+ end
101
+ end
102
+ end
103
+
104
+ topic_handle = Bindings.rd_kafka_topic_new(inner, topic, rd_topic_config)
105
+
106
+ raise TopicHandleCreationError.new("Error creating topic handle for topic #{topic}") if topic_handle.null?
107
+
108
+ @topics_configs[topic][config_hash] = config
109
+ @topics_refs_map[topic][config_hash] = topic_handle
110
+ end
111
+ end
112
+
113
+ # Starts the native Kafka polling thread and kicks off the init polling
114
+ # @note Not needed to run unless explicit start was disabled
115
+ def start
116
+ @native_kafka.start
117
+ end
118
+
56
119
  # @return [String] producer name
57
120
  def name
58
121
  @name ||= @native_kafka.with_inner do |inner|
@@ -76,7 +139,18 @@ module Rdkafka
76
139
  def close
77
140
  return if closed?
78
141
  ObjectSpace.undefine_finalizer(self)
79
- @native_kafka.close
142
+
143
+ @native_kafka.close do
144
+ # We need to remove the topics references objects before we destroy the producer,
145
+ # otherwise they would leak out
146
+ @topics_refs_map.each_value do |refs|
147
+ refs.each_value do |ref|
148
+ Rdkafka::Bindings.rd_kafka_topic_destroy(ref)
149
+ end
150
+ end
151
+ end
152
+
153
+ @topics_refs_map.clear
80
154
  end
81
155
 
82
156
  # Whether this producer has closed
@@ -175,11 +249,22 @@ module Rdkafka
175
249
  # @param timestamp [Time,Integer,nil] Optional timestamp of this message. Integer timestamp is in milliseconds since Jan 1 1970.
176
250
  # @param headers [Hash<String,String>] Optional message headers
177
251
  # @param label [Object, nil] a label that can be assigned when producing a message that will be part of the delivery handle and the delivery report
252
+ # @param topic_config [Hash] topic config for given message dispatch. Allows to send messages to topics with different configuration
178
253
  #
179
254
  # @return [DeliveryHandle] Delivery handle that can be used to wait for the result of producing this message
180
255
  #
181
256
  # @raise [RdkafkaError] When adding the message to rdkafka's queue failed
182
- def produce(topic:, payload: nil, key: nil, partition: nil, partition_key: nil, timestamp: nil, headers: nil, label: nil)
257
+ def produce(
258
+ topic:,
259
+ payload: nil,
260
+ key: nil,
261
+ partition: nil,
262
+ partition_key: nil,
263
+ timestamp: nil,
264
+ headers: nil,
265
+ label: nil,
266
+ topic_config: EMPTY_HASH
267
+ )
183
268
  closed_producer_check(__method__)
184
269
 
185
270
  # Start by checking and converting the input
@@ -198,8 +283,20 @@ module Rdkafka
198
283
  key.bytesize
199
284
  end
200
285
 
286
+ topic_config_hash = topic_config.hash
287
+
288
+ # Checks if we have the rdkafka topic reference object ready. It saves us on object
289
+ # allocation and allows to use custom config on demand.
290
+ set_topic_config(topic, topic_config, topic_config_hash) unless @topics_refs_map.dig(topic, topic_config_hash)
291
+ topic_ref = @topics_refs_map.dig(topic, topic_config_hash)
292
+
201
293
  if partition_key
202
294
  partition_count = partition_count(topic)
295
+
296
+ # Check if there are no overrides for the partitioner and use the default one only when
297
+ # no per-topic is present.
298
+ partitioner_name = @topics_configs.dig(topic, topic_config_hash, :partitioner) || @partitioner_name
299
+
203
300
  # If the topic is not present, set to -1
204
301
  partition = Rdkafka::Bindings.partitioner(partition_key, partition_count, @partitioner_name) if partition_count.positive?
205
302
  end
@@ -229,7 +326,7 @@ module Rdkafka
229
326
  DeliveryHandle.register(delivery_handle)
230
327
 
231
328
  args = [
232
- :int, Rdkafka::Bindings::RD_KAFKA_VTYPE_TOPIC, :string, topic,
329
+ :int, Rdkafka::Bindings::RD_KAFKA_VTYPE_RKT, :pointer, topic_ref,
233
330
  :int, Rdkafka::Bindings::RD_KAFKA_VTYPE_MSGFLAGS, :int, Rdkafka::Bindings::RD_KAFKA_MSG_F_COPY,
234
331
  :int, Rdkafka::Bindings::RD_KAFKA_VTYPE_VALUE, :buffer_in, payload, :size_t, payload_size,
235
332
  :int, Rdkafka::Bindings::RD_KAFKA_VTYPE_KEY, :buffer_in, key, :size_t, key_size,
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Rdkafka
4
- VERSION = "0.15.1"
4
+ VERSION = "0.16.0.rc1"
5
5
  LIBRDKAFKA_VERSION = "2.3.0"
6
6
  LIBRDKAFKA_SOURCE_SHA256 = "2d49c35c77eeb3d42fa61c43757fcbb6a206daa560247154e60642bcdcc14d12"
7
7
  end
data/lib/rdkafka.rb CHANGED
@@ -7,6 +7,7 @@ require "json"
7
7
 
8
8
  require "rdkafka/version"
9
9
  require "rdkafka/helpers/time"
10
+ require "rdkafka/helpers/oauth"
10
11
  require "rdkafka/abstract_handle"
11
12
  require "rdkafka/admin"
12
13
  require "rdkafka/admin/create_topic_handle"
@@ -23,7 +24,13 @@ require "rdkafka/admin/delete_acl_handle"
23
24
  require "rdkafka/admin/delete_acl_report"
24
25
  require "rdkafka/admin/describe_acl_handle"
25
26
  require "rdkafka/admin/describe_acl_report"
27
+ require "rdkafka/admin/describe_configs_handle"
28
+ require "rdkafka/admin/describe_configs_report"
29
+ require "rdkafka/admin/incremental_alter_configs_handle"
30
+ require "rdkafka/admin/incremental_alter_configs_report"
26
31
  require "rdkafka/admin/acl_binding_result"
32
+ require "rdkafka/admin/config_binding_result"
33
+ require "rdkafka/admin/config_resource_binding_result"
27
34
  require "rdkafka/bindings"
28
35
  require "rdkafka/callbacks"
29
36
  require "rdkafka/config"
data/rdkafka.gemspec CHANGED
@@ -15,7 +15,7 @@ Gem::Specification.new do |gem|
15
15
  gem.name = 'rdkafka'
16
16
  gem.require_paths = ['lib']
17
17
  gem.version = Rdkafka::VERSION
18
- gem.required_ruby_version = '>= 2.7'
18
+ gem.required_ruby_version = '>= 3.0'
19
19
  gem.extensions = %w(ext/Rakefile)
20
20
  gem.cert_chain = %w[certs/cert_chain.pem]
21
21
 
@@ -76,37 +76,50 @@ describe Rdkafka::AbstractHandle do
76
76
  end
77
77
 
78
78
  describe "#wait" do
79
- let(:pending_handle) { true }
79
+ context 'when pending_handle true' do
80
+ let(:pending_handle) { true }
80
81
 
81
- it "should wait until the timeout and then raise an error" do
82
- expect {
83
- subject.wait(max_wait_timeout: 0.1)
84
- }.to raise_error Rdkafka::AbstractHandle::WaitTimeoutError, /test_operation/
82
+ it "should wait until the timeout and then raise an error" do
83
+ expect(Kernel).not_to receive(:warn)
84
+ expect {
85
+ subject.wait(max_wait_timeout: 0.1)
86
+ }.to raise_error Rdkafka::AbstractHandle::WaitTimeoutError, /test_operation/
87
+ end
85
88
  end
86
89
 
87
- context "when not pending anymore and no error" do
90
+ context 'when pending_handle false' do
88
91
  let(:pending_handle) { false }
89
- let(:result) { 1 }
90
92
 
91
- it "should return a result" do
92
- wait_result = subject.wait
93
- expect(wait_result).to eq(result)
93
+ it 'should show a deprecation warning when wait_timeout is set' do
94
+ expect(Kernel).to receive(:warn).with(Rdkafka::AbstractHandle::WAIT_TIMEOUT_DEPRECATION_MESSAGE)
95
+ subject.wait(wait_timeout: 0.1)
94
96
  end
95
97
 
96
- it "should wait without a timeout" do
97
- wait_result = subject.wait(max_wait_timeout: nil)
98
- expect(wait_result).to eq(result)
98
+ context "without error" do
99
+ let(:result) { 1 }
100
+
101
+ it "should return a result" do
102
+ expect(Kernel).not_to receive(:warn)
103
+ wait_result = subject.wait
104
+ expect(wait_result).to eq(result)
105
+ end
106
+
107
+ it "should wait without a timeout" do
108
+ expect(Kernel).not_to receive(:warn)
109
+ wait_result = subject.wait(max_wait_timeout: nil)
110
+ expect(wait_result).to eq(result)
111
+ end
99
112
  end
100
- end
101
113
 
102
- context "when not pending anymore and there was an error" do
103
- let(:pending_handle) { false }
104
- let(:response) { 20 }
114
+ context "with error" do
115
+ let(:response) { 20 }
105
116
 
106
- it "should raise an rdkafka error" do
107
- expect {
108
- subject.wait
109
- }.to raise_error Rdkafka::RdkafkaError
117
+ it "should raise an rdkafka error" do
118
+ expect(Kernel).not_to receive(:warn)
119
+ expect {
120
+ subject.wait
121
+ }.to raise_error Rdkafka::RdkafkaError
122
+ end
110
123
  end
111
124
  end
112
125
  end