karafka-rdkafka 0.14.10 → 0.15.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (41) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/workflows/ci.yml +2 -4
  4. data/.gitignore +2 -0
  5. data/.ruby-version +1 -1
  6. data/CHANGELOG.md +11 -0
  7. data/README.md +19 -9
  8. data/docker-compose.yml +1 -1
  9. data/ext/Rakefile +8 -0
  10. data/lib/rdkafka/abstract_handle.rb +44 -20
  11. data/lib/rdkafka/admin/config_binding_result.rb +30 -0
  12. data/lib/rdkafka/admin/config_resource_binding_result.rb +18 -0
  13. data/lib/rdkafka/admin/create_topic_report.rb +1 -1
  14. data/lib/rdkafka/admin/delete_groups_report.rb +1 -1
  15. data/lib/rdkafka/admin/delete_topic_report.rb +1 -1
  16. data/lib/rdkafka/admin/describe_acl_report.rb +1 -0
  17. data/lib/rdkafka/admin/describe_configs_handle.rb +33 -0
  18. data/lib/rdkafka/admin/describe_configs_report.rb +48 -0
  19. data/lib/rdkafka/admin/incremental_alter_configs_handle.rb +33 -0
  20. data/lib/rdkafka/admin/incremental_alter_configs_report.rb +48 -0
  21. data/lib/rdkafka/admin.rb +174 -0
  22. data/lib/rdkafka/bindings.rb +75 -3
  23. data/lib/rdkafka/callbacks.rb +103 -19
  24. data/lib/rdkafka/config.rb +46 -9
  25. data/lib/rdkafka/consumer.rb +7 -0
  26. data/lib/rdkafka/helpers/oauth.rb +58 -0
  27. data/lib/rdkafka/native_kafka.rb +32 -19
  28. data/lib/rdkafka/producer.rb +7 -0
  29. data/lib/rdkafka/version.rb +1 -1
  30. data/lib/rdkafka.rb +7 -0
  31. data/spec/rdkafka/abstract_handle_spec.rb +34 -21
  32. data/spec/rdkafka/admin_spec.rb +328 -3
  33. data/spec/rdkafka/bindings_spec.rb +97 -0
  34. data/spec/rdkafka/config_spec.rb +33 -0
  35. data/spec/rdkafka/consumer_spec.rb +50 -1
  36. data/spec/rdkafka/native_kafka_spec.rb +8 -1
  37. data/spec/rdkafka/producer_spec.rb +43 -0
  38. data/spec/spec_helper.rb +16 -1
  39. data.tar.gz.sig +0 -0
  40. metadata +10 -3
  41. metadata.gz.sig +0 -0
@@ -0,0 +1,58 @@
1
+ module Rdkafka
2
+ module Helpers
3
+
4
+ module OAuth
5
+
6
+ # Set the OAuthBearer token
7
+ #
8
+ # @param token [String] the mandatory token value to set, often (but not necessarily) a JWS compact serialization as per https://tools.ietf.org/html/rfc7515#section-3.1.
9
+ # @param lifetime_ms [Integer] when the token expires, in terms of the number of milliseconds since the epoch. See https://currentmillis.com/.
10
+ # @param principal_name [String] the mandatory Kafka principal name associated with the token.
11
+ # @param extensions [Hash] optional SASL extensions key-value pairs to be communicated to the broker as additional key-value pairs during the initial client response as per https://tools.ietf.org/html/rfc7628#section-3.1.
12
+ # @return [Integer] 0 on success
13
+ def oauthbearer_set_token(token:, lifetime_ms:, principal_name:, extensions: nil)
14
+ error_buffer = FFI::MemoryPointer.from_string(" " * 256)
15
+
16
+ response = @native_kafka.with_inner do |inner|
17
+ Rdkafka::Bindings.rd_kafka_oauthbearer_set_token(
18
+ inner, token, lifetime_ms, principal_name,
19
+ flatten_extensions(extensions), extension_size(extensions), error_buffer, 256
20
+ )
21
+ end
22
+
23
+ return response if response.zero?
24
+
25
+ oauthbearer_set_token_failure("Failed to set token: #{error_buffer.read_string}")
26
+
27
+ response
28
+ end
29
+
30
+ # Marks failed oauth token acquire in librdkafka
31
+ #
32
+ # @param reason [String] human readable error reason for failing to acquire token
33
+ def oauthbearer_set_token_failure(reason)
34
+ @native_kafka.with_inner do |inner|
35
+ Rdkafka::Bindings.rd_kafka_oauthbearer_set_token_failure(
36
+ inner,
37
+ reason
38
+ )
39
+ end
40
+ end
41
+
42
+ private
43
+
44
+ # Flatten the extensions hash into a string according to the spec, https://datatracker.ietf.org/doc/html/rfc7628#section-3.1
45
+ def flatten_extensions(extensions)
46
+ return nil unless extensions
47
+ "\x01#{extensions.map { |e| e.join("=") }.join("\x01")}"
48
+ end
49
+
50
+ # extension_size is the number of keys + values which should be a non-negative even number
51
+ # https://github.com/confluentinc/librdkafka/blob/master/src/rdkafka_sasl_oauthbearer.c#L327-L347
52
+ def extension_size(extensions)
53
+ return 0 unless extensions
54
+ extensions.size * 2
55
+ end
56
+ end
57
+ end
58
+ end
@@ -4,7 +4,7 @@ module Rdkafka
4
4
  # @private
5
5
  # A wrapper around a native kafka that polls and cleanly exits
6
6
  class NativeKafka
7
- def initialize(inner, run_polling_thread:, opaque:)
7
+ def initialize(inner, run_polling_thread:, opaque:, auto_start: true)
8
8
  @inner = inner
9
9
  @opaque = opaque
10
10
  # Lock around external access
@@ -28,30 +28,43 @@ module Rdkafka
28
28
  # counter for operations in progress using inner
29
29
  @operations_in_progress = 0
30
30
 
31
- # Trigger initial poll to make sure oauthbearer cb and other initial cb are handled
32
- Rdkafka::Bindings.rd_kafka_poll(inner, 0)
31
+ @run_polling_thread = run_polling_thread
33
32
 
34
- if run_polling_thread
35
- # Start thread to poll client for delivery callbacks,
36
- # not used in consumer.
37
- @polling_thread = Thread.new do
38
- loop do
39
- @poll_mutex.synchronize do
40
- Rdkafka::Bindings.rd_kafka_poll(inner, 100)
41
- end
33
+ start if auto_start
42
34
 
43
- # Exit thread if closing and the poll queue is empty
44
- if Thread.current[:closing] && Rdkafka::Bindings.rd_kafka_outq_len(inner) == 0
45
- break
35
+ @closing = false
36
+ end
37
+
38
+ def start
39
+ synchronize do
40
+ return if @started
41
+
42
+ @started = true
43
+
44
+ # Trigger initial poll to make sure oauthbearer cb and other initial cb are handled
45
+ Rdkafka::Bindings.rd_kafka_poll(@inner, 0)
46
+
47
+ if @run_polling_thread
48
+ # Start thread to poll client for delivery callbacks,
49
+ # not used in consumer.
50
+ @polling_thread = Thread.new do
51
+ loop do
52
+ @poll_mutex.synchronize do
53
+ Rdkafka::Bindings.rd_kafka_poll(@inner, 100)
54
+ end
55
+
56
+ # Exit thread if closing and the poll queue is empty
57
+ if Thread.current[:closing] && Rdkafka::Bindings.rd_kafka_outq_len(@inner) == 0
58
+ break
59
+ end
46
60
  end
47
61
  end
48
- end
49
62
 
50
- @polling_thread.abort_on_exception = true
51
- @polling_thread[:closing] = false
63
+ @polling_thread.name = "rdkafka.native_kafka##{Rdkafka::Bindings.rd_kafka_name(@inner).gsub('rdkafka', '')}"
64
+ @polling_thread.abort_on_exception = true
65
+ @polling_thread[:closing] = false
66
+ end
52
67
  end
53
-
54
- @closing = false
55
68
  end
56
69
 
57
70
  def with_inner
@@ -4,6 +4,7 @@ module Rdkafka
4
4
  # A producer for Kafka messages. To create a producer set up a {Config} and call {Config#producer producer} on that.
5
5
  class Producer
6
6
  include Helpers::Time
7
+ include Helpers::OAuth
7
8
 
8
9
  # Cache partitions count for 30 seconds
9
10
  PARTITIONS_COUNT_TTL = 30
@@ -53,6 +54,12 @@ module Rdkafka
53
54
  end
54
55
  end
55
56
 
57
+ # Starts the native Kafka polling thread and kicks off the init polling
58
+ # @note Not needed to run unless explicit start was disabled
59
+ def start
60
+ @native_kafka.start
61
+ end
62
+
56
63
  # @return [String] producer name
57
64
  def name
58
65
  @name ||= @native_kafka.with_inner do |inner|
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Rdkafka
4
- VERSION = "0.14.10"
4
+ VERSION = "0.15.0"
5
5
  LIBRDKAFKA_VERSION = "2.3.0"
6
6
  LIBRDKAFKA_SOURCE_SHA256 = "2d49c35c77eeb3d42fa61c43757fcbb6a206daa560247154e60642bcdcc14d12"
7
7
  end
data/lib/rdkafka.rb CHANGED
@@ -7,6 +7,7 @@ require "json"
7
7
 
8
8
  require "rdkafka/version"
9
9
  require "rdkafka/helpers/time"
10
+ require "rdkafka/helpers/oauth"
10
11
  require "rdkafka/abstract_handle"
11
12
  require "rdkafka/admin"
12
13
  require "rdkafka/admin/create_topic_handle"
@@ -23,7 +24,13 @@ require "rdkafka/admin/delete_acl_handle"
23
24
  require "rdkafka/admin/delete_acl_report"
24
25
  require "rdkafka/admin/describe_acl_handle"
25
26
  require "rdkafka/admin/describe_acl_report"
27
+ require "rdkafka/admin/describe_configs_handle"
28
+ require "rdkafka/admin/describe_configs_report"
29
+ require "rdkafka/admin/incremental_alter_configs_handle"
30
+ require "rdkafka/admin/incremental_alter_configs_report"
26
31
  require "rdkafka/admin/acl_binding_result"
32
+ require "rdkafka/admin/config_binding_result"
33
+ require "rdkafka/admin/config_resource_binding_result"
27
34
  require "rdkafka/bindings"
28
35
  require "rdkafka/callbacks"
29
36
  require "rdkafka/config"
@@ -76,37 +76,50 @@ describe Rdkafka::AbstractHandle do
76
76
  end
77
77
 
78
78
  describe "#wait" do
79
- let(:pending_handle) { true }
79
+ context 'when pending_handle true' do
80
+ let(:pending_handle) { true }
80
81
 
81
- it "should wait until the timeout and then raise an error" do
82
- expect {
83
- subject.wait(max_wait_timeout: 0.1)
84
- }.to raise_error Rdkafka::AbstractHandle::WaitTimeoutError, /test_operation/
82
+ it "should wait until the timeout and then raise an error" do
83
+ expect(Kernel).not_to receive(:warn)
84
+ expect {
85
+ subject.wait(max_wait_timeout: 0.1)
86
+ }.to raise_error Rdkafka::AbstractHandle::WaitTimeoutError, /test_operation/
87
+ end
85
88
  end
86
89
 
87
- context "when not pending anymore and no error" do
90
+ context 'when pending_handle false' do
88
91
  let(:pending_handle) { false }
89
- let(:result) { 1 }
90
92
 
91
- it "should return a result" do
92
- wait_result = subject.wait
93
- expect(wait_result).to eq(result)
93
+ it 'should show a deprecation warning when wait_timeout is set' do
94
+ expect(Kernel).to receive(:warn).with(Rdkafka::AbstractHandle::WAIT_TIMEOUT_DEPRECATION_MESSAGE)
95
+ subject.wait(wait_timeout: 0.1)
94
96
  end
95
97
 
96
- it "should wait without a timeout" do
97
- wait_result = subject.wait(max_wait_timeout: nil)
98
- expect(wait_result).to eq(result)
98
+ context "without error" do
99
+ let(:result) { 1 }
100
+
101
+ it "should return a result" do
102
+ expect(Kernel).not_to receive(:warn)
103
+ wait_result = subject.wait
104
+ expect(wait_result).to eq(result)
105
+ end
106
+
107
+ it "should wait without a timeout" do
108
+ expect(Kernel).not_to receive(:warn)
109
+ wait_result = subject.wait(max_wait_timeout: nil)
110
+ expect(wait_result).to eq(result)
111
+ end
99
112
  end
100
- end
101
113
 
102
- context "when not pending anymore and there was an error" do
103
- let(:pending_handle) { false }
104
- let(:response) { 20 }
114
+ context "with error" do
115
+ let(:response) { 20 }
105
116
 
106
- it "should raise an rdkafka error" do
107
- expect {
108
- subject.wait
109
- }.to raise_error Rdkafka::RdkafkaError
117
+ it "should raise an rdkafka error" do
118
+ expect(Kernel).not_to receive(:warn)
119
+ expect {
120
+ subject.wait
121
+ }.to raise_error Rdkafka::RdkafkaError
122
+ end
110
123
  end
111
124
  end
112
125
  end
@@ -16,12 +16,12 @@ describe Rdkafka::Admin do
16
16
  admin.close
17
17
  end
18
18
 
19
- let(:topic_name) { "test-topic-#{Random.new.rand(0..1_000_000)}" }
19
+ let(:topic_name) { "test-topic-#{SecureRandom.uuid}" }
20
20
  let(:topic_partition_count) { 3 }
21
21
  let(:topic_replication_factor) { 1 }
22
22
  let(:topic_config) { {"cleanup.policy" => "compact", "min.cleanable.dirty.ratio" => 0.8} }
23
23
  let(:invalid_topic_config) { {"cleeeeenup.policee" => "campact"} }
24
- let(:group_name) { "test-group-#{Random.new.rand(0..1_000_000)}" }
24
+ let(:group_name) { "test-group-#{SecureRandom.uuid}" }
25
25
 
26
26
  let(:resource_name) {"acl-test-topic"}
27
27
  let(:resource_type) {Rdkafka::Bindings::RD_KAFKA_RESOURCE_TOPIC}
@@ -31,6 +31,19 @@ describe Rdkafka::Admin do
31
31
  let(:operation) {Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_READ}
32
32
  let(:permission_type) {Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ALLOW}
33
33
 
34
+ describe 'admin without auto-start' do
35
+ let(:admin) { config.admin(native_kafka_auto_start: false) }
36
+
37
+ it 'expect to be able to start it later and close' do
38
+ admin.start
39
+ admin.close
40
+ end
41
+
42
+ it 'expect to be able to close it without starting' do
43
+ admin.close
44
+ end
45
+ end
46
+
34
47
  describe "#create_topic" do
35
48
  describe "called with invalid input" do
36
49
  describe "with an invalid topic name" do
@@ -129,6 +142,275 @@ describe Rdkafka::Admin do
129
142
  end
130
143
  end
131
144
 
145
+ describe "describe_configs" do
146
+ subject(:resources_results) { admin.describe_configs(resources).wait.resources }
147
+
148
+ before do
149
+ admin.create_topic(topic_name, 2, 1).wait
150
+ sleep(1)
151
+ end
152
+
153
+ context 'when describing config of an existing topic' do
154
+ let(:resources) { [{ resource_type: 2, resource_name: topic_name }] }
155
+
156
+ it do
157
+ expect(resources_results.size).to eq(1)
158
+ expect(resources_results.first.type).to eq(2)
159
+ expect(resources_results.first.name).to eq(topic_name)
160
+ expect(resources_results.first.configs.size).to be > 25
161
+ expect(resources_results.first.configs.first.name).to eq('compression.type')
162
+ expect(resources_results.first.configs.first.value).to eq('producer')
163
+ expect(resources_results.first.configs.map(&:synonyms)).not_to be_empty
164
+ end
165
+ end
166
+
167
+ context 'when describing config of a non-existing topic' do
168
+ let(:resources) { [{ resource_type: 2, resource_name: SecureRandom.uuid }] }
169
+
170
+ it 'expect to raise error' do
171
+ expect { resources_results }.to raise_error(Rdkafka::RdkafkaError, /unknown_topic_or_part/)
172
+ end
173
+ end
174
+
175
+ context 'when describing both existing and non-existing topics' do
176
+ let(:resources) do
177
+ [
178
+ { resource_type: 2, resource_name: topic_name },
179
+ { resource_type: 2, resource_name: SecureRandom.uuid }
180
+ ]
181
+ end
182
+
183
+ it 'expect to raise error' do
184
+ expect { resources_results }.to raise_error(Rdkafka::RdkafkaError, /unknown_topic_or_part/)
185
+ end
186
+ end
187
+
188
+ context 'when describing multiple existing topics' do
189
+ let(:resources) do
190
+ [
191
+ { resource_type: 2, resource_name: 'example_topic' },
192
+ { resource_type: 2, resource_name: topic_name }
193
+ ]
194
+ end
195
+
196
+ it do
197
+ expect(resources_results.size).to eq(2)
198
+ expect(resources_results.first.type).to eq(2)
199
+ expect(resources_results.first.name).to eq('example_topic')
200
+ expect(resources_results.last.type).to eq(2)
201
+ expect(resources_results.last.name).to eq(topic_name)
202
+ end
203
+ end
204
+
205
+ context 'when trying to describe invalid resource type' do
206
+ let(:resources) { [{ resource_type: 0, resource_name: SecureRandom.uuid }] }
207
+
208
+ it 'expect to raise error' do
209
+ expect { resources_results }.to raise_error(Rdkafka::RdkafkaError, /invalid_request/)
210
+ end
211
+ end
212
+
213
+ context 'when trying to describe invalid broker' do
214
+ let(:resources) { [{ resource_type: 4, resource_name: 'non-existing' }] }
215
+
216
+ it 'expect to raise error' do
217
+ expect { resources_results }.to raise_error(Rdkafka::RdkafkaError, /invalid_arg/)
218
+ end
219
+ end
220
+
221
+ context 'when trying to describe valid broker' do
222
+ let(:resources) { [{ resource_type: 4, resource_name: '1' }] }
223
+
224
+ it do
225
+ expect(resources_results.size).to eq(1)
226
+ expect(resources_results.first.type).to eq(4)
227
+ expect(resources_results.first.name).to eq('1')
228
+ expect(resources_results.first.configs.size).to be > 230
229
+ expect(resources_results.first.configs.first.name).to eq('log.cleaner.min.compaction.lag.ms')
230
+ expect(resources_results.first.configs.first.value).to eq('0')
231
+ expect(resources_results.first.configs.map(&:synonyms)).not_to be_empty
232
+ end
233
+ end
234
+
235
+ context 'when describing valid broker with topics in one request' do
236
+ let(:resources) do
237
+ [
238
+ { resource_type: 4, resource_name: '1' },
239
+ { resource_type: 2, resource_name: topic_name }
240
+ ]
241
+ end
242
+
243
+ it do
244
+ expect(resources_results.size).to eq(2)
245
+ expect(resources_results.first.type).to eq(4)
246
+ expect(resources_results.first.name).to eq('1')
247
+ expect(resources_results.first.configs.size).to be > 230
248
+ expect(resources_results.first.configs.first.name).to eq('log.cleaner.min.compaction.lag.ms')
249
+ expect(resources_results.first.configs.first.value).to eq('0')
250
+ expect(resources_results.last.type).to eq(2)
251
+ expect(resources_results.last.name).to eq(topic_name)
252
+ expect(resources_results.last.configs.size).to be > 25
253
+ expect(resources_results.last.configs.first.name).to eq('compression.type')
254
+ expect(resources_results.last.configs.first.value).to eq('producer')
255
+ end
256
+ end
257
+ end
258
+
259
+ describe "incremental_alter_configs" do
260
+ subject(:resources_results) { admin.incremental_alter_configs(resources_with_configs).wait.resources }
261
+
262
+ before do
263
+ admin.create_topic(topic_name, 2, 1).wait
264
+ sleep(1)
265
+ end
266
+
267
+ context 'when altering one topic with one valid config via set' do
268
+ let(:target_retention) { (86400002 + rand(10_000)).to_s }
269
+ let(:resources_with_configs) do
270
+ [
271
+ {
272
+ resource_type: 2,
273
+ resource_name: topic_name,
274
+ configs: [
275
+ {
276
+ name: 'delete.retention.ms',
277
+ value: target_retention,
278
+ op_type: 0
279
+ }
280
+ ]
281
+ }
282
+ ]
283
+ end
284
+
285
+ it do
286
+ expect(resources_results.size).to eq(1)
287
+ expect(resources_results.first.type).to eq(2)
288
+ expect(resources_results.first.name).to eq(topic_name)
289
+
290
+ ret_config = admin.describe_configs(resources_with_configs).wait.resources.first.configs.find do |config|
291
+ config.name == 'delete.retention.ms'
292
+ end
293
+
294
+ expect(ret_config.value).to eq(target_retention)
295
+ end
296
+ end
297
+
298
+ context 'when altering one topic with one valid config via delete' do
299
+ let(:target_retention) { (8640002 + rand(10_000)).to_s }
300
+ let(:resources_with_configs) do
301
+ [
302
+ {
303
+ resource_type: 2,
304
+ resource_name: topic_name,
305
+ configs: [
306
+ {
307
+ name: 'delete.retention.ms',
308
+ value: target_retention,
309
+ op_type: 1
310
+ }
311
+ ]
312
+ }
313
+ ]
314
+ end
315
+
316
+ it do
317
+ expect(resources_results.size).to eq(1)
318
+ expect(resources_results.first.type).to eq(2)
319
+ expect(resources_results.first.name).to eq(topic_name)
320
+ ret_config = admin.describe_configs(resources_with_configs).wait.resources.first.configs.find do |config|
321
+ config.name == 'delete.retention.ms'
322
+ end
323
+
324
+ expect(ret_config.value).to eq('86400000')
325
+ end
326
+ end
327
+
328
+ context 'when altering one topic with one valid config via append' do
329
+ let(:target_policy) { 'compact' }
330
+ let(:resources_with_configs) do
331
+ [
332
+ {
333
+ resource_type: 2,
334
+ resource_name: topic_name,
335
+ configs: [
336
+ {
337
+ name: 'cleanup.policy',
338
+ value: target_policy,
339
+ op_type: 2
340
+ }
341
+ ]
342
+ }
343
+ ]
344
+ end
345
+
346
+ it do
347
+ expect(resources_results.size).to eq(1)
348
+ expect(resources_results.first.type).to eq(2)
349
+ expect(resources_results.first.name).to eq(topic_name)
350
+
351
+ ret_config = admin.describe_configs(resources_with_configs).wait.resources.first.configs.find do |config|
352
+ config.name == 'cleanup.policy'
353
+ end
354
+
355
+ expect(ret_config.value).to eq("delete,#{target_policy}")
356
+ end
357
+ end
358
+
359
+ context 'when altering one topic with one valid config via subtrack' do
360
+ let(:target_policy) { 'delete' }
361
+ let(:resources_with_configs) do
362
+ [
363
+ {
364
+ resource_type: 2,
365
+ resource_name: topic_name,
366
+ configs: [
367
+ {
368
+ name: 'cleanup.policy',
369
+ value: target_policy,
370
+ op_type: 3
371
+ }
372
+ ]
373
+ }
374
+ ]
375
+ end
376
+
377
+ it do
378
+ expect(resources_results.size).to eq(1)
379
+ expect(resources_results.first.type).to eq(2)
380
+ expect(resources_results.first.name).to eq(topic_name)
381
+
382
+ ret_config = admin.describe_configs(resources_with_configs).wait.resources.first.configs.find do |config|
383
+ config.name == 'cleanup.policy'
384
+ end
385
+
386
+ expect(ret_config.value).to eq('')
387
+ end
388
+ end
389
+
390
+ context 'when altering one topic with invalid config' do
391
+ let(:target_retention) { '-10' }
392
+ let(:resources_with_configs) do
393
+ [
394
+ {
395
+ resource_type: 2,
396
+ resource_name: topic_name,
397
+ configs: [
398
+ {
399
+ name: 'delete.retention.ms',
400
+ value: target_retention,
401
+ op_type: 0
402
+ }
403
+ ]
404
+ }
405
+ ]
406
+ end
407
+
408
+ it 'expect to raise error' do
409
+ expect { resources_results }.to raise_error(Rdkafka::RdkafkaError, /invalid_config/)
410
+ end
411
+ end
412
+ end
413
+
132
414
  describe "#delete_topic" do
133
415
  describe "called with invalid input" do
134
416
  # https://github.com/apache/kafka/blob/trunk/clients/src/main/java/org/apache/kafka/common/internals/Topic.java#L29
@@ -275,6 +557,9 @@ describe Rdkafka::Admin do
275
557
  expect(create_acl_report.rdkafka_response).to eq(0)
276
558
  expect(create_acl_report.rdkafka_response_string).to eq("")
277
559
 
560
+ # Since we create and immediately check, this is slow on loaded CIs, hence we wait
561
+ sleep(2)
562
+
278
563
  #describe_acl
279
564
  describe_acl_handle = admin.describe_acl(resource_type: Rdkafka::Bindings::RD_KAFKA_RESOURCE_ANY, resource_name: nil, resource_pattern_type: Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_ANY, principal: nil, host: nil, operation: Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_ANY, permission_type: Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ANY)
280
565
  describe_acl_report = describe_acl_handle.wait(max_wait_timeout: 15.0)
@@ -396,7 +681,10 @@ describe Rdkafka::Admin do
396
681
  end
397
682
 
398
683
  context 'when topic has less then desired number of partitions' do
399
- before { admin.create_topic(topic_name, 1, 1).wait }
684
+ before do
685
+ admin.create_topic(topic_name, 1, 1).wait
686
+ sleep(1)
687
+ end
400
688
 
401
689
  it 'expect to change number of partitions' do
402
690
  admin.create_partitions(topic_name, 10).wait
@@ -404,4 +692,41 @@ describe Rdkafka::Admin do
404
692
  end
405
693
  end
406
694
  end
695
+
696
+ describe '#oauthbearer_set_token' do
697
+ context 'when sasl not configured' do
698
+ it 'should return RD_KAFKA_RESP_ERR__STATE' do
699
+ response = admin.oauthbearer_set_token(
700
+ token: "foo",
701
+ lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
702
+ principal_name: "kafka-cluster"
703
+ )
704
+ expect(response).to eq(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__STATE)
705
+ end
706
+ end
707
+
708
+ context 'when sasl configured' do
709
+ before do
710
+ config_sasl = rdkafka_config(
711
+ "security.protocol": "sasl_ssl",
712
+ "sasl.mechanisms": 'OAUTHBEARER'
713
+ )
714
+ $admin_sasl = config_sasl.admin
715
+ end
716
+
717
+ after do
718
+ $admin_sasl.close
719
+ end
720
+
721
+ it 'should succeed' do
722
+
723
+ response = $admin_sasl.oauthbearer_set_token(
724
+ token: "foo",
725
+ lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
726
+ principal_name: "kafka-cluster"
727
+ )
728
+ expect(response).to eq(0)
729
+ end
730
+ end
731
+ end
407
732
  end