karafka-rdkafka 0.14.10 → 0.15.0.alpha2

Sign up to get free protection for your applications and to get access to all the features.
@@ -26,6 +26,7 @@ module Rdkafka
26
26
 
27
27
  RD_KAFKA_RESP_ERR__ASSIGN_PARTITIONS = -175
28
28
  RD_KAFKA_RESP_ERR__REVOKE_PARTITIONS = -174
29
+ RD_KAFKA_RESP_ERR__STATE = -172
29
30
  RD_KAFKA_RESP_ERR__NOENT = -156
30
31
  RD_KAFKA_RESP_ERR_NO_ERROR = 0
31
32
 
@@ -97,6 +98,48 @@ module Rdkafka
97
98
  attach_function :rd_kafka_topic_partition_list_destroy, [:pointer], :void
98
99
  attach_function :rd_kafka_topic_partition_list_copy, [:pointer], :pointer
99
100
 
101
+ # Configs management
102
+ #
103
+ # Structs for management of configurations
104
+ # Each configuration is attached to a resource and one resource can have many configuration
105
+ # details. Each resource will also have separate errors results if obtaining configuration
106
+ # was not possible for any reason
107
+ class ConfigResource < FFI::Struct
108
+ layout :type, :int,
109
+ :name, :string
110
+ end
111
+
112
+ attach_function :rd_kafka_DescribeConfigs, [:pointer, :pointer, :size_t, :pointer, :pointer], :void, blocking: true
113
+ attach_function :rd_kafka_ConfigResource_new, [:int32, :pointer], :pointer
114
+ attach_function :rd_kafka_ConfigResource_destroy_array, [:pointer, :int32], :void
115
+ attach_function :rd_kafka_event_DescribeConfigs_result, [:pointer], :pointer
116
+ attach_function :rd_kafka_DescribeConfigs_result_resources, [:pointer, :pointer], :pointer
117
+ attach_function :rd_kafka_ConfigResource_configs, [:pointer, :pointer], :pointer
118
+ attach_function :rd_kafka_ConfigEntry_name, [:pointer], :string
119
+ attach_function :rd_kafka_ConfigEntry_value, [:pointer], :string
120
+ attach_function :rd_kafka_ConfigEntry_is_read_only, [:pointer], :int
121
+ attach_function :rd_kafka_ConfigEntry_is_default, [:pointer], :int
122
+ attach_function :rd_kafka_ConfigEntry_is_sensitive, [:pointer], :int
123
+ attach_function :rd_kafka_ConfigEntry_is_synonym, [:pointer], :int
124
+ attach_function :rd_kafka_ConfigEntry_synonyms, [:pointer, :pointer], :pointer
125
+ attach_function :rd_kafka_ConfigResource_error, [:pointer], :int
126
+ attach_function :rd_kafka_ConfigResource_error_string, [:pointer], :string
127
+ attach_function :rd_kafka_IncrementalAlterConfigs, [:pointer, :pointer, :size_t, :pointer, :pointer], :void, blocking: true
128
+ attach_function :rd_kafka_IncrementalAlterConfigs_result_resources, [:pointer, :pointer], :pointer
129
+ attach_function :rd_kafka_ConfigResource_add_incremental_config, [:pointer, :string, :int32, :string], :pointer
130
+ attach_function :rd_kafka_event_IncrementalAlterConfigs_result, [:pointer], :pointer
131
+
132
+ RD_KAFKA_ADMIN_OP_DESCRIBECONFIGS = 5
133
+ RD_KAFKA_EVENT_DESCRIBECONFIGS_RESULT = 104
134
+
135
+ RD_KAFKA_ADMIN_OP_INCREMENTALALTERCONFIGS = 16
136
+ RD_KAFKA_EVENT_INCREMENTALALTERCONFIGS_RESULT = 131072
137
+
138
+ RD_KAFKA_ALTER_CONFIG_OP_TYPE_SET = 0
139
+ RD_KAFKA_ALTER_CONFIG_OP_TYPE_DELETE = 1
140
+ RD_KAFKA_ALTER_CONFIG_OP_TYPE_APPEND = 2
141
+ RD_KAFKA_ALTER_CONFIG_OP_TYPE_SUBTRACT = 3
142
+
100
143
  # Errors
101
144
 
102
145
  attach_function :rd_kafka_err2name, [:int], :string
@@ -125,7 +168,10 @@ module Rdkafka
125
168
  callback :error_cb, [:pointer, :int, :string, :pointer], :void
126
169
  attach_function :rd_kafka_conf_set_error_cb, [:pointer, :error_cb], :void
127
170
  attach_function :rd_kafka_rebalance_protocol, [:pointer], :string
128
-
171
+ callback :oauthbearer_token_refresh_cb, [:pointer, :string, :pointer], :void
172
+ attach_function :rd_kafka_conf_set_oauthbearer_token_refresh_cb, [:pointer, :oauthbearer_token_refresh_cb], :void
173
+ attach_function :rd_kafka_oauthbearer_set_token, [:pointer, :string, :int64, :pointer, :pointer, :int, :pointer, :int], :int
174
+ attach_function :rd_kafka_oauthbearer_set_token_failure, [:pointer, :string], :int
129
175
  # Log queue
130
176
  attach_function :rd_kafka_set_log_queue, [:pointer, :pointer], :void
131
177
  attach_function :rd_kafka_queue_get_main, [:pointer], :pointer
@@ -175,6 +221,32 @@ module Rdkafka
175
221
  end
176
222
  end
177
223
 
224
+ # The OAuth callback is currently global and contextless.
225
+ # This means that the callback will be called for all instances, and the callback must be able to determine to which instance it is associated.
226
+ # The instance name will be provided in the callback, allowing the callback to reference the correct instance.
227
+ #
228
+ # An example of how to use the instance name in the callback is given below.
229
+ # The `refresh_token` is configured as the `oauthbearer_token_refresh_callback`.
230
+ # `instances` is a map of client names to client instances, maintained by the user.
231
+ #
232
+ # ```
233
+ # def refresh_token(config, client_name)
234
+ # client = instances[client_name]
235
+ # client.oauthbearer_set_token(
236
+ # token: 'new-token-value',
237
+ # lifetime_ms: token-lifetime-ms,
238
+ # principal_name: 'principal-name'
239
+ # )
240
+ # end
241
+ # ```
242
+ OAuthbearerTokenRefreshCallback = FFI::Function.new(
243
+ :void, [:pointer, :string, :pointer]
244
+ ) do |client_ptr, config, _opaque|
245
+ if Rdkafka::Config.oauthbearer_token_refresh_callback
246
+ Rdkafka::Config.oauthbearer_token_refresh_callback.call(config, Rdkafka::Bindings.rd_kafka_name(client_ptr))
247
+ end
248
+ end
249
+
178
250
  # Handle
179
251
 
180
252
  enum :kafka_type, [
@@ -113,6 +113,42 @@ module Rdkafka
113
113
  end
114
114
  end
115
115
 
116
+ class DescribeConfigsResult
117
+ attr_reader :result_error, :error_string, :results, :results_count
118
+
119
+ def initialize(event_ptr)
120
+ @results=[]
121
+ @result_error = Rdkafka::Bindings.rd_kafka_event_error(event_ptr)
122
+ @error_string = Rdkafka::Bindings.rd_kafka_event_error_string(event_ptr)
123
+
124
+ if @result_error == 0
125
+ configs_describe_result = Rdkafka::Bindings.rd_kafka_event_DescribeConfigs_result(event_ptr)
126
+ # Get the number of matching acls
127
+ pointer_to_size_t = FFI::MemoryPointer.new(:int32)
128
+ @results = Rdkafka::Bindings.rd_kafka_DescribeConfigs_result_resources(configs_describe_result, pointer_to_size_t)
129
+ @results_count = pointer_to_size_t.read_int
130
+ end
131
+ end
132
+ end
133
+
134
+ class IncrementalAlterConfigsResult
135
+ attr_reader :result_error, :error_string, :results, :results_count
136
+
137
+ def initialize(event_ptr)
138
+ @results=[]
139
+ @result_error = Rdkafka::Bindings.rd_kafka_event_error(event_ptr)
140
+ @error_string = Rdkafka::Bindings.rd_kafka_event_error_string(event_ptr)
141
+
142
+ if @result_error == 0
143
+ incremental_alter_result = Rdkafka::Bindings.rd_kafka_event_IncrementalAlterConfigs_result(event_ptr)
144
+ # Get the number of matching acls
145
+ pointer_to_size_t = FFI::MemoryPointer.new(:int32)
146
+ @results = Rdkafka::Bindings.rd_kafka_IncrementalAlterConfigs_result_resources(incremental_alter_result, pointer_to_size_t)
147
+ @results_count = pointer_to_size_t.read_int
148
+ end
149
+ end
150
+ end
151
+
116
152
  # FFI Function used for Create Topic and Delete Topic callbacks
117
153
  BackgroundEventCallbackFunction = FFI::Function.new(
118
154
  :void, [:pointer, :pointer, :pointer]
@@ -123,20 +159,24 @@ module Rdkafka
123
159
  # @private
124
160
  class BackgroundEventCallback
125
161
  def self.call(_, event_ptr, _)
126
- event_type = Rdkafka::Bindings.rd_kafka_event_type(event_ptr)
127
- if event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_CREATETOPICS_RESULT
162
+ case Rdkafka::Bindings.rd_kafka_event_type(event_ptr)
163
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_CREATETOPICS_RESULT
128
164
  process_create_topic(event_ptr)
129
- elsif event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_DELETETOPICS_RESULT
165
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_DESCRIBECONFIGS_RESULT
166
+ process_describe_configs(event_ptr)
167
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_INCREMENTALALTERCONFIGS_RESULT
168
+ process_incremental_alter_configs(event_ptr)
169
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_DELETETOPICS_RESULT
130
170
  process_delete_topic(event_ptr)
131
- elsif event_type == Rdkafka::Bindings::RD_KAFKA_ADMIN_OP_CREATEPARTITIONS_RESULT
171
+ when Rdkafka::Bindings::RD_KAFKA_ADMIN_OP_CREATEPARTITIONS_RESULT
132
172
  process_create_partitions(event_ptr)
133
- elsif event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_CREATEACLS_RESULT
173
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_CREATEACLS_RESULT
134
174
  process_create_acl(event_ptr)
135
- elsif event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_DELETEACLS_RESULT
175
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_DELETEACLS_RESULT
136
176
  process_delete_acl(event_ptr)
137
- elsif event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_DESCRIBEACLS_RESULT
177
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_DESCRIBEACLS_RESULT
138
178
  process_describe_acl(event_ptr)
139
- elsif event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_DELETEGROUPS_RESULT
179
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_DELETEGROUPS_RESULT
140
180
  process_delete_groups(event_ptr)
141
181
  end
142
182
  end
@@ -156,7 +196,44 @@ module Rdkafka
156
196
  create_topic_handle[:response] = create_topic_results[0].result_error
157
197
  create_topic_handle[:error_string] = create_topic_results[0].error_string
158
198
  create_topic_handle[:result_name] = create_topic_results[0].result_name
159
- create_topic_handle[:pending] = false
199
+
200
+ create_topic_handle.unlock
201
+ end
202
+ end
203
+
204
+ def self.process_describe_configs(event_ptr)
205
+ describe_configs = DescribeConfigsResult.new(event_ptr)
206
+ describe_configs_handle_ptr = Rdkafka::Bindings.rd_kafka_event_opaque(event_ptr)
207
+
208
+ if describe_configs_handle = Rdkafka::Admin::DescribeConfigsHandle.remove(describe_configs_handle_ptr.address)
209
+ describe_configs_handle[:response] = describe_configs.result_error
210
+ describe_configs_handle[:response_string] = describe_configs.error_string
211
+ describe_configs_handle[:pending] = false
212
+
213
+ if describe_configs.result_error == 0
214
+ describe_configs_handle[:config_entries] = describe_configs.results
215
+ describe_configs_handle[:entry_count] = describe_configs.results_count
216
+ end
217
+
218
+ describe_configs_handle.unlock
219
+ end
220
+ end
221
+
222
+ def self.process_incremental_alter_configs(event_ptr)
223
+ incremental_alter = IncrementalAlterConfigsResult.new(event_ptr)
224
+ incremental_alter_handle_ptr = Rdkafka::Bindings.rd_kafka_event_opaque(event_ptr)
225
+
226
+ if incremental_alter_handle = Rdkafka::Admin::IncrementalAlterConfigsHandle.remove(incremental_alter_handle_ptr.address)
227
+ incremental_alter_handle[:response] = incremental_alter.result_error
228
+ incremental_alter_handle[:response_string] = incremental_alter.error_string
229
+ incremental_alter_handle[:pending] = false
230
+
231
+ if incremental_alter.result_error == 0
232
+ incremental_alter_handle[:config_entries] = incremental_alter.results
233
+ incremental_alter_handle[:entry_count] = incremental_alter.results_count
234
+ end
235
+
236
+ incremental_alter_handle.unlock
160
237
  end
161
238
  end
162
239
 
@@ -173,7 +250,8 @@ module Rdkafka
173
250
  delete_group_handle[:response] = delete_group_results[0].result_error
174
251
  delete_group_handle[:error_string] = delete_group_results[0].error_string
175
252
  delete_group_handle[:result_name] = delete_group_results[0].result_name
176
- delete_group_handle[:pending] = false
253
+
254
+ delete_group_handle.unlock
177
255
  end
178
256
  end
179
257
 
@@ -190,7 +268,8 @@ module Rdkafka
190
268
  delete_topic_handle[:response] = delete_topic_results[0].result_error
191
269
  delete_topic_handle[:error_string] = delete_topic_results[0].error_string
192
270
  delete_topic_handle[:result_name] = delete_topic_results[0].result_name
193
- delete_topic_handle[:pending] = false
271
+
272
+ delete_topic_handle.unlock
194
273
  end
195
274
  end
196
275
 
@@ -207,7 +286,8 @@ module Rdkafka
207
286
  create_partitions_handle[:response] = create_partitions_results[0].result_error
208
287
  create_partitions_handle[:error_string] = create_partitions_results[0].error_string
209
288
  create_partitions_handle[:result_name] = create_partitions_results[0].result_name
210
- create_partitions_handle[:pending] = false
289
+
290
+ create_partitions_handle.unlock
211
291
  end
212
292
  end
213
293
 
@@ -223,7 +303,8 @@ module Rdkafka
223
303
  if create_acl_handle = Rdkafka::Admin::CreateAclHandle.remove(create_acl_handle_ptr.address)
224
304
  create_acl_handle[:response] = create_acl_results[0].result_error
225
305
  create_acl_handle[:response_string] = create_acl_results[0].error_string
226
- create_acl_handle[:pending] = false
306
+
307
+ create_acl_handle.unlock
227
308
  end
228
309
  end
229
310
 
@@ -239,11 +320,13 @@ module Rdkafka
239
320
  if delete_acl_handle = Rdkafka::Admin::DeleteAclHandle.remove(delete_acl_handle_ptr.address)
240
321
  delete_acl_handle[:response] = delete_acl_results[0].result_error
241
322
  delete_acl_handle[:response_string] = delete_acl_results[0].error_string
242
- delete_acl_handle[:pending] = false
323
+
243
324
  if delete_acl_results[0].result_error == 0
244
325
  delete_acl_handle[:matching_acls] = delete_acl_results[0].matching_acls
245
326
  delete_acl_handle[:matching_acls_count] = delete_acl_results[0].matching_acls_count
246
327
  end
328
+
329
+ delete_acl_handle.unlock
247
330
  end
248
331
  end
249
332
 
@@ -254,17 +337,18 @@ module Rdkafka
254
337
  if describe_acl_handle = Rdkafka::Admin::DescribeAclHandle.remove(describe_acl_handle_ptr.address)
255
338
  describe_acl_handle[:response] = describe_acl.result_error
256
339
  describe_acl_handle[:response_string] = describe_acl.error_string
257
- describe_acl_handle[:pending] = false
340
+
258
341
  if describe_acl.result_error == 0
259
- describe_acl_handle[:acls] = describe_acl.matching_acls
342
+ describe_acl_handle[:acls] = describe_acl.matching_acls
260
343
  describe_acl_handle[:acls_count] = describe_acl.matching_acls_count
261
344
  end
345
+
346
+ describe_acl_handle.unlock
262
347
  end
263
348
  end
264
349
  end
265
350
 
266
351
  # FFI Function used for Message Delivery callbacks
267
-
268
352
  DeliveryCallbackFunction = FFI::Function.new(
269
353
  :void, [:pointer, :pointer, :pointer]
270
354
  ) do |client_ptr, message_ptr, opaque_ptr|
@@ -284,7 +368,6 @@ module Rdkafka
284
368
  delivery_handle[:partition] = message[:partition]
285
369
  delivery_handle[:offset] = message[:offset]
286
370
  delivery_handle[:topic_name] = FFI::MemoryPointer.from_string(topic_name)
287
- delivery_handle[:pending] = false
288
371
 
289
372
  # Call delivery callback on opaque
290
373
  if opaque = Rdkafka::Config.opaques[opaque_ptr.to_i]
@@ -299,9 +382,10 @@ module Rdkafka
299
382
  delivery_handle
300
383
  )
301
384
  end
385
+
386
+ delivery_handle.unlock
302
387
  end
303
388
  end
304
389
  end
305
-
306
390
  end
307
391
  end
@@ -15,12 +15,13 @@ module Rdkafka
15
15
  @@opaques = ObjectSpace::WeakMap.new
16
16
  # @private
17
17
  @@log_queue = Queue.new
18
- # @private
19
18
  # We memoize thread on the first log flush
20
19
  # This allows us also to restart logger thread on forks
21
20
  @@log_thread = nil
22
21
  # @private
23
22
  @@log_mutex = Mutex.new
23
+ # @private
24
+ @@oauthbearer_token_refresh_callback = nil
24
25
 
25
26
  # Returns the current logger, by default this is a logger to stdout.
26
27
  #
@@ -104,6 +105,24 @@ module Rdkafka
104
105
  @@error_callback
105
106
  end
106
107
 
108
+ # Sets the SASL/OAUTHBEARER token refresh callback.
109
+ # This callback will be triggered when it is time to refresh the client's OAUTHBEARER token
110
+ #
111
+ # @param callback [Proc, #call] The callback
112
+ #
113
+ # @return [nil]
114
+ def self.oauthbearer_token_refresh_callback=(callback)
115
+ raise TypeError.new("Callback has to be callable") unless callback.respond_to?(:call) || callback == nil
116
+ @@oauthbearer_token_refresh_callback = callback
117
+ end
118
+
119
+ # Returns the current oauthbearer_token_refresh_callback callback, by default this is nil.
120
+ #
121
+ # @return [Proc, nil]
122
+ def self.oauthbearer_token_refresh_callback
123
+ @@oauthbearer_token_refresh_callback
124
+ end
125
+
107
126
  # @private
108
127
  def self.opaques
109
128
  @@opaques
@@ -300,6 +319,9 @@ module Rdkafka
300
319
 
301
320
  # Set error callback
302
321
  Rdkafka::Bindings.rd_kafka_conf_set_error_cb(config, Rdkafka::Bindings::ErrorCallback)
322
+
323
+ # Set oauth callback
324
+ Rdkafka::Bindings.rd_kafka_conf_set_oauthbearer_token_refresh_cb(config, Rdkafka::Bindings::OAuthbearerTokenRefreshCallback)
303
325
  end
304
326
  end
305
327
 
@@ -13,6 +13,7 @@ module Rdkafka
13
13
  class Consumer
14
14
  include Enumerable
15
15
  include Helpers::Time
16
+ include Helpers::OAuth
16
17
 
17
18
  # @private
18
19
  def initialize(native_kafka)
@@ -0,0 +1,47 @@
1
+ module Rdkafka
2
+ module Helpers
3
+
4
+ module OAuth
5
+
6
+ # Set the OAuthBearer token
7
+ #
8
+ # @param token [String] the mandatory token value to set, often (but not necessarily) a JWS compact serialization as per https://tools.ietf.org/html/rfc7515#section-3.1.
9
+ # @param lifetime_ms [Integer] when the token expires, in terms of the number of milliseconds since the epoch. See https://currentmillis.com/.
10
+ # @param principal_name [String] the mandatory Kafka principal name associated with the token.
11
+ # @param extensions [Hash] optional SASL extensions key-value pairs to be communicated to the broker as additional key-value pairs during the initial client response as per https://tools.ietf.org/html/rfc7628#section-3.1.
12
+ # @return [Integer] 0 on success
13
+ def oauthbearer_set_token(token:, lifetime_ms:, principal_name:, extensions: nil)
14
+ error_buffer = FFI::MemoryPointer.from_string(" " * 256)
15
+ @native_kafka.with_inner do |inner|
16
+ response = Rdkafka::Bindings.rd_kafka_oauthbearer_set_token(
17
+ inner, token, lifetime_ms, principal_name,
18
+ flatten_extensions(extensions), extension_size(extensions), error_buffer, 256
19
+ )
20
+ if response != 0
21
+ Rdkafka::Bindings.rd_kafka_oauthbearer_set_token_failure(
22
+ inner,
23
+ "Failed to set token: #{error_buffer.read_string}"
24
+ )
25
+ end
26
+
27
+ response
28
+ end
29
+ end
30
+
31
+ private
32
+
33
+ # Flatten the extensions hash into a string according to the spec, https://datatracker.ietf.org/doc/html/rfc7628#section-3.1
34
+ def flatten_extensions(extensions)
35
+ return nil unless extensions
36
+ "\x01#{extensions.map { |e| e.join("=") }.join("\x01")}"
37
+ end
38
+
39
+ # extension_size is the number of keys + values which should be a non-negative even number
40
+ # https://github.com/confluentinc/librdkafka/blob/master/src/rdkafka_sasl_oauthbearer.c#L327-L347
41
+ def extension_size(extensions)
42
+ return 0 unless extensions
43
+ extensions.size * 2
44
+ end
45
+ end
46
+ end
47
+ end
@@ -4,6 +4,7 @@ module Rdkafka
4
4
  # A producer for Kafka messages. To create a producer set up a {Config} and call {Config#producer producer} on that.
5
5
  class Producer
6
6
  include Helpers::Time
7
+ include Helpers::OAuth
7
8
 
8
9
  # Cache partitions count for 30 seconds
9
10
  PARTITIONS_COUNT_TTL = 30
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Rdkafka
4
- VERSION = "0.14.10"
4
+ VERSION = "0.15.0.alpha2"
5
5
  LIBRDKAFKA_VERSION = "2.3.0"
6
6
  LIBRDKAFKA_SOURCE_SHA256 = "2d49c35c77eeb3d42fa61c43757fcbb6a206daa560247154e60642bcdcc14d12"
7
7
  end
data/lib/rdkafka.rb CHANGED
@@ -7,6 +7,7 @@ require "json"
7
7
 
8
8
  require "rdkafka/version"
9
9
  require "rdkafka/helpers/time"
10
+ require "rdkafka/helpers/oauth"
10
11
  require "rdkafka/abstract_handle"
11
12
  require "rdkafka/admin"
12
13
  require "rdkafka/admin/create_topic_handle"
@@ -23,7 +24,13 @@ require "rdkafka/admin/delete_acl_handle"
23
24
  require "rdkafka/admin/delete_acl_report"
24
25
  require "rdkafka/admin/describe_acl_handle"
25
26
  require "rdkafka/admin/describe_acl_report"
27
+ require "rdkafka/admin/describe_configs_handle"
28
+ require "rdkafka/admin/describe_configs_report"
29
+ require "rdkafka/admin/incremental_alter_configs_handle"
30
+ require "rdkafka/admin/incremental_alter_configs_report"
26
31
  require "rdkafka/admin/acl_binding_result"
32
+ require "rdkafka/admin/config_binding_result"
33
+ require "rdkafka/admin/config_resource_binding_result"
27
34
  require "rdkafka/bindings"
28
35
  require "rdkafka/callbacks"
29
36
  require "rdkafka/config"
@@ -76,37 +76,50 @@ describe Rdkafka::AbstractHandle do
76
76
  end
77
77
 
78
78
  describe "#wait" do
79
- let(:pending_handle) { true }
79
+ context 'when pending_handle true' do
80
+ let(:pending_handle) { true }
80
81
 
81
- it "should wait until the timeout and then raise an error" do
82
- expect {
83
- subject.wait(max_wait_timeout: 0.1)
84
- }.to raise_error Rdkafka::AbstractHandle::WaitTimeoutError, /test_operation/
82
+ it "should wait until the timeout and then raise an error" do
83
+ expect(Kernel).not_to receive(:warn)
84
+ expect {
85
+ subject.wait(max_wait_timeout: 0.1)
86
+ }.to raise_error Rdkafka::AbstractHandle::WaitTimeoutError, /test_operation/
87
+ end
85
88
  end
86
89
 
87
- context "when not pending anymore and no error" do
90
+ context 'when pending_handle false' do
88
91
  let(:pending_handle) { false }
89
- let(:result) { 1 }
90
92
 
91
- it "should return a result" do
92
- wait_result = subject.wait
93
- expect(wait_result).to eq(result)
93
+ it 'should show a deprecation warning when wait_timeout is set' do
94
+ expect(Kernel).to receive(:warn).with(Rdkafka::AbstractHandle::WAIT_TIMEOUT_DEPRECATION_MESSAGE)
95
+ subject.wait(wait_timeout: 0.1)
94
96
  end
95
97
 
96
- it "should wait without a timeout" do
97
- wait_result = subject.wait(max_wait_timeout: nil)
98
- expect(wait_result).to eq(result)
98
+ context "without error" do
99
+ let(:result) { 1 }
100
+
101
+ it "should return a result" do
102
+ expect(Kernel).not_to receive(:warn)
103
+ wait_result = subject.wait
104
+ expect(wait_result).to eq(result)
105
+ end
106
+
107
+ it "should wait without a timeout" do
108
+ expect(Kernel).not_to receive(:warn)
109
+ wait_result = subject.wait(max_wait_timeout: nil)
110
+ expect(wait_result).to eq(result)
111
+ end
99
112
  end
100
- end
101
113
 
102
- context "when not pending anymore and there was an error" do
103
- let(:pending_handle) { false }
104
- let(:response) { 20 }
114
+ context "with error" do
115
+ let(:response) { 20 }
105
116
 
106
- it "should raise an rdkafka error" do
107
- expect {
108
- subject.wait
109
- }.to raise_error Rdkafka::RdkafkaError
117
+ it "should raise an rdkafka error" do
118
+ expect(Kernel).not_to receive(:warn)
119
+ expect {
120
+ subject.wait
121
+ }.to raise_error Rdkafka::RdkafkaError
122
+ end
110
123
  end
111
124
  end
112
125
  end