rdkafka 0.15.1 → 0.16.0.rc1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/workflows/ci.yml +2 -5
  4. data/.gitignore +2 -0
  5. data/.ruby-version +1 -1
  6. data/CHANGELOG.md +16 -1
  7. data/README.md +19 -9
  8. data/docker-compose.yml +1 -1
  9. data/ext/Rakefile +8 -0
  10. data/lib/rdkafka/abstract_handle.rb +44 -20
  11. data/lib/rdkafka/admin/config_binding_result.rb +30 -0
  12. data/lib/rdkafka/admin/config_resource_binding_result.rb +18 -0
  13. data/lib/rdkafka/admin/create_topic_report.rb +1 -1
  14. data/lib/rdkafka/admin/delete_groups_report.rb +1 -1
  15. data/lib/rdkafka/admin/delete_topic_report.rb +1 -1
  16. data/lib/rdkafka/admin/describe_acl_report.rb +1 -0
  17. data/lib/rdkafka/admin/describe_configs_handle.rb +33 -0
  18. data/lib/rdkafka/admin/describe_configs_report.rb +54 -0
  19. data/lib/rdkafka/admin/incremental_alter_configs_handle.rb +33 -0
  20. data/lib/rdkafka/admin/incremental_alter_configs_report.rb +54 -0
  21. data/lib/rdkafka/admin.rb +219 -0
  22. data/lib/rdkafka/bindings.rb +86 -3
  23. data/lib/rdkafka/callbacks.rb +103 -19
  24. data/lib/rdkafka/config.rb +69 -15
  25. data/lib/rdkafka/consumer.rb +7 -0
  26. data/lib/rdkafka/helpers/oauth.rb +58 -0
  27. data/lib/rdkafka/native_kafka.rb +32 -19
  28. data/lib/rdkafka/producer.rb +101 -4
  29. data/lib/rdkafka/version.rb +1 -1
  30. data/lib/rdkafka.rb +7 -0
  31. data/rdkafka.gemspec +1 -1
  32. data/spec/rdkafka/abstract_handle_spec.rb +34 -21
  33. data/spec/rdkafka/admin_spec.rb +336 -3
  34. data/spec/rdkafka/bindings_spec.rb +97 -0
  35. data/spec/rdkafka/config_spec.rb +53 -0
  36. data/spec/rdkafka/consumer_spec.rb +54 -0
  37. data/spec/rdkafka/native_kafka_spec.rb +8 -1
  38. data/spec/rdkafka/producer_spec.rb +85 -0
  39. data/spec/spec_helper.rb +16 -1
  40. data.tar.gz.sig +0 -0
  41. metadata +11 -4
  42. metadata.gz.sig +0 -0
@@ -16,12 +16,12 @@ describe Rdkafka::Admin do
16
16
  admin.close
17
17
  end
18
18
 
19
- let(:topic_name) { "test-topic-#{Random.new.rand(0..1_000_000)}" }
19
+ let(:topic_name) { "test-topic-#{SecureRandom.uuid}" }
20
20
  let(:topic_partition_count) { 3 }
21
21
  let(:topic_replication_factor) { 1 }
22
22
  let(:topic_config) { {"cleanup.policy" => "compact", "min.cleanable.dirty.ratio" => 0.8} }
23
23
  let(:invalid_topic_config) { {"cleeeeenup.policee" => "campact"} }
24
- let(:group_name) { "test-group-#{Random.new.rand(0..1_000_000)}" }
24
+ let(:group_name) { "test-group-#{SecureRandom.uuid}" }
25
25
 
26
26
  let(:resource_name) {"acl-test-topic"}
27
27
  let(:resource_type) {Rdkafka::Bindings::RD_KAFKA_RESOURCE_TOPIC}
@@ -31,6 +31,27 @@ describe Rdkafka::Admin do
31
31
  let(:operation) {Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_READ}
32
32
  let(:permission_type) {Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ALLOW}
33
33
 
34
+ describe '#describe_errors' do
35
+ let(:errors) { admin.class.describe_errors }
36
+
37
+ it { expect(errors.size).to eq(162) }
38
+ it { expect(errors[-184]).to eq(code: -184, description: 'Local: Queue full', name: '_QUEUE_FULL') }
39
+ it { expect(errors[21]).to eq(code: 21, description: 'Broker: Invalid required acks value', name: 'INVALID_REQUIRED_ACKS') }
40
+ end
41
+
42
+ describe 'admin without auto-start' do
43
+ let(:admin) { config.admin(native_kafka_auto_start: false) }
44
+
45
+ it 'expect to be able to start it later and close' do
46
+ admin.start
47
+ admin.close
48
+ end
49
+
50
+ it 'expect to be able to close it without starting' do
51
+ admin.close
52
+ end
53
+ end
54
+
34
55
  describe "#create_topic" do
35
56
  describe "called with invalid input" do
36
57
  describe "with an invalid topic name" do
@@ -129,6 +150,275 @@ expect(ex.broker_message).to match(/Topic name.*is invalid: .* contains one or m
129
150
  end
130
151
  end
131
152
 
153
+ describe "describe_configs" do
154
+ subject(:resources_results) { admin.describe_configs(resources).wait.resources }
155
+
156
+ before do
157
+ admin.create_topic(topic_name, 2, 1).wait
158
+ sleep(1)
159
+ end
160
+
161
+ context 'when describing config of an existing topic' do
162
+ let(:resources) { [{ resource_type: 2, resource_name: topic_name }] }
163
+
164
+ it do
165
+ expect(resources_results.size).to eq(1)
166
+ expect(resources_results.first.type).to eq(2)
167
+ expect(resources_results.first.name).to eq(topic_name)
168
+ expect(resources_results.first.configs.size).to be > 25
169
+ expect(resources_results.first.configs.first.name).to eq('compression.type')
170
+ expect(resources_results.first.configs.first.value).to eq('producer')
171
+ expect(resources_results.first.configs.map(&:synonyms)).not_to be_empty
172
+ end
173
+ end
174
+
175
+ context 'when describing config of a non-existing topic' do
176
+ let(:resources) { [{ resource_type: 2, resource_name: SecureRandom.uuid }] }
177
+
178
+ it 'expect to raise error' do
179
+ expect { resources_results }.to raise_error(Rdkafka::RdkafkaError, /unknown_topic_or_part/)
180
+ end
181
+ end
182
+
183
+ context 'when describing both existing and non-existing topics' do
184
+ let(:resources) do
185
+ [
186
+ { resource_type: 2, resource_name: topic_name },
187
+ { resource_type: 2, resource_name: SecureRandom.uuid }
188
+ ]
189
+ end
190
+
191
+ it 'expect to raise error' do
192
+ expect { resources_results }.to raise_error(Rdkafka::RdkafkaError, /unknown_topic_or_part/)
193
+ end
194
+ end
195
+
196
+ context 'when describing multiple existing topics' do
197
+ let(:resources) do
198
+ [
199
+ { resource_type: 2, resource_name: 'example_topic' },
200
+ { resource_type: 2, resource_name: topic_name }
201
+ ]
202
+ end
203
+
204
+ it do
205
+ expect(resources_results.size).to eq(2)
206
+ expect(resources_results.first.type).to eq(2)
207
+ expect(resources_results.first.name).to eq('example_topic')
208
+ expect(resources_results.last.type).to eq(2)
209
+ expect(resources_results.last.name).to eq(topic_name)
210
+ end
211
+ end
212
+
213
+ context 'when trying to describe invalid resource type' do
214
+ let(:resources) { [{ resource_type: 0, resource_name: SecureRandom.uuid }] }
215
+
216
+ it 'expect to raise error' do
217
+ expect { resources_results }.to raise_error(Rdkafka::RdkafkaError, /invalid_request/)
218
+ end
219
+ end
220
+
221
+ context 'when trying to describe invalid broker' do
222
+ let(:resources) { [{ resource_type: 4, resource_name: 'non-existing' }] }
223
+
224
+ it 'expect to raise error' do
225
+ expect { resources_results }.to raise_error(Rdkafka::RdkafkaError, /invalid_arg/)
226
+ end
227
+ end
228
+
229
+ context 'when trying to describe valid broker' do
230
+ let(:resources) { [{ resource_type: 4, resource_name: '1' }] }
231
+
232
+ it do
233
+ expect(resources_results.size).to eq(1)
234
+ expect(resources_results.first.type).to eq(4)
235
+ expect(resources_results.first.name).to eq('1')
236
+ expect(resources_results.first.configs.size).to be > 230
237
+ expect(resources_results.first.configs.first.name).to eq('log.cleaner.min.compaction.lag.ms')
238
+ expect(resources_results.first.configs.first.value).to eq('0')
239
+ expect(resources_results.first.configs.map(&:synonyms)).not_to be_empty
240
+ end
241
+ end
242
+
243
+ context 'when describing valid broker with topics in one request' do
244
+ let(:resources) do
245
+ [
246
+ { resource_type: 4, resource_name: '1' },
247
+ { resource_type: 2, resource_name: topic_name }
248
+ ]
249
+ end
250
+
251
+ it do
252
+ expect(resources_results.size).to eq(2)
253
+ expect(resources_results.first.type).to eq(4)
254
+ expect(resources_results.first.name).to eq('1')
255
+ expect(resources_results.first.configs.size).to be > 230
256
+ expect(resources_results.first.configs.first.name).to eq('log.cleaner.min.compaction.lag.ms')
257
+ expect(resources_results.first.configs.first.value).to eq('0')
258
+ expect(resources_results.last.type).to eq(2)
259
+ expect(resources_results.last.name).to eq(topic_name)
260
+ expect(resources_results.last.configs.size).to be > 25
261
+ expect(resources_results.last.configs.first.name).to eq('compression.type')
262
+ expect(resources_results.last.configs.first.value).to eq('producer')
263
+ end
264
+ end
265
+ end
266
+
267
+ describe "incremental_alter_configs" do
268
+ subject(:resources_results) { admin.incremental_alter_configs(resources_with_configs).wait.resources }
269
+
270
+ before do
271
+ admin.create_topic(topic_name, 2, 1).wait
272
+ sleep(1)
273
+ end
274
+
275
+ context 'when altering one topic with one valid config via set' do
276
+ let(:target_retention) { (86400002 + rand(10_000)).to_s }
277
+ let(:resources_with_configs) do
278
+ [
279
+ {
280
+ resource_type: 2,
281
+ resource_name: topic_name,
282
+ configs: [
283
+ {
284
+ name: 'delete.retention.ms',
285
+ value: target_retention,
286
+ op_type: 0
287
+ }
288
+ ]
289
+ }
290
+ ]
291
+ end
292
+
293
+ it do
294
+ expect(resources_results.size).to eq(1)
295
+ expect(resources_results.first.type).to eq(2)
296
+ expect(resources_results.first.name).to eq(topic_name)
297
+
298
+ ret_config = admin.describe_configs(resources_with_configs).wait.resources.first.configs.find do |config|
299
+ config.name == 'delete.retention.ms'
300
+ end
301
+
302
+ expect(ret_config.value).to eq(target_retention)
303
+ end
304
+ end
305
+
306
+ context 'when altering one topic with one valid config via delete' do
307
+ let(:target_retention) { (8640002 + rand(10_000)).to_s }
308
+ let(:resources_with_configs) do
309
+ [
310
+ {
311
+ resource_type: 2,
312
+ resource_name: topic_name,
313
+ configs: [
314
+ {
315
+ name: 'delete.retention.ms',
316
+ value: target_retention,
317
+ op_type: 1
318
+ }
319
+ ]
320
+ }
321
+ ]
322
+ end
323
+
324
+ it do
325
+ expect(resources_results.size).to eq(1)
326
+ expect(resources_results.first.type).to eq(2)
327
+ expect(resources_results.first.name).to eq(topic_name)
328
+ ret_config = admin.describe_configs(resources_with_configs).wait.resources.first.configs.find do |config|
329
+ config.name == 'delete.retention.ms'
330
+ end
331
+
332
+ expect(ret_config.value).to eq('86400000')
333
+ end
334
+ end
335
+
336
+ context 'when altering one topic with one valid config via append' do
337
+ let(:target_policy) { 'compact' }
338
+ let(:resources_with_configs) do
339
+ [
340
+ {
341
+ resource_type: 2,
342
+ resource_name: topic_name,
343
+ configs: [
344
+ {
345
+ name: 'cleanup.policy',
346
+ value: target_policy,
347
+ op_type: 2
348
+ }
349
+ ]
350
+ }
351
+ ]
352
+ end
353
+
354
+ it do
355
+ expect(resources_results.size).to eq(1)
356
+ expect(resources_results.first.type).to eq(2)
357
+ expect(resources_results.first.name).to eq(topic_name)
358
+
359
+ ret_config = admin.describe_configs(resources_with_configs).wait.resources.first.configs.find do |config|
360
+ config.name == 'cleanup.policy'
361
+ end
362
+
363
+ expect(ret_config.value).to eq("delete,#{target_policy}")
364
+ end
365
+ end
366
+
367
+ context 'when altering one topic with one valid config via subtrack' do
368
+ let(:target_policy) { 'delete' }
369
+ let(:resources_with_configs) do
370
+ [
371
+ {
372
+ resource_type: 2,
373
+ resource_name: topic_name,
374
+ configs: [
375
+ {
376
+ name: 'cleanup.policy',
377
+ value: target_policy,
378
+ op_type: 3
379
+ }
380
+ ]
381
+ }
382
+ ]
383
+ end
384
+
385
+ it do
386
+ expect(resources_results.size).to eq(1)
387
+ expect(resources_results.first.type).to eq(2)
388
+ expect(resources_results.first.name).to eq(topic_name)
389
+
390
+ ret_config = admin.describe_configs(resources_with_configs).wait.resources.first.configs.find do |config|
391
+ config.name == 'cleanup.policy'
392
+ end
393
+
394
+ expect(ret_config.value).to eq('')
395
+ end
396
+ end
397
+
398
+ context 'when altering one topic with invalid config' do
399
+ let(:target_retention) { '-10' }
400
+ let(:resources_with_configs) do
401
+ [
402
+ {
403
+ resource_type: 2,
404
+ resource_name: topic_name,
405
+ configs: [
406
+ {
407
+ name: 'delete.retention.ms',
408
+ value: target_retention,
409
+ op_type: 0
410
+ }
411
+ ]
412
+ }
413
+ ]
414
+ end
415
+
416
+ it 'expect to raise error' do
417
+ expect { resources_results }.to raise_error(Rdkafka::RdkafkaError, /invalid_config/)
418
+ end
419
+ end
420
+ end
421
+
132
422
  describe "#delete_topic" do
133
423
  describe "called with invalid input" do
134
424
  # https://github.com/apache/kafka/blob/trunk/clients/src/main/java/org/apache/kafka/common/internals/Topic.java#L29
@@ -275,6 +565,9 @@ expect(ex.broker_message).to match(/Topic name.*is invalid: .* contains one or m
275
565
  expect(create_acl_report.rdkafka_response).to eq(0)
276
566
  expect(create_acl_report.rdkafka_response_string).to eq("")
277
567
 
568
+ # Since we create and immediately check, this is slow on loaded CIs, hence we wait
569
+ sleep(2)
570
+
278
571
  #describe_acl
279
572
  describe_acl_handle = admin.describe_acl(resource_type: Rdkafka::Bindings::RD_KAFKA_RESOURCE_ANY, resource_name: nil, resource_pattern_type: Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_ANY, principal: nil, host: nil, operation: Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_ANY, permission_type: Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ANY)
280
573
  describe_acl_report = describe_acl_handle.wait(max_wait_timeout: 15.0)
@@ -396,7 +689,10 @@ expect(ex.broker_message).to match(/Topic name.*is invalid: .* contains one or m
396
689
  end
397
690
 
398
691
  context 'when topic has less then desired number of partitions' do
399
- before { admin.create_topic(topic_name, 1, 1).wait }
692
+ before do
693
+ admin.create_topic(topic_name, 1, 1).wait
694
+ sleep(1)
695
+ end
400
696
 
401
697
  it 'expect to change number of partitions' do
402
698
  admin.create_partitions(topic_name, 10).wait
@@ -404,4 +700,41 @@ expect(ex.broker_message).to match(/Topic name.*is invalid: .* contains one or m
404
700
  end
405
701
  end
406
702
  end
703
+
704
+ describe '#oauthbearer_set_token' do
705
+ context 'when sasl not configured' do
706
+ it 'should return RD_KAFKA_RESP_ERR__STATE' do
707
+ response = admin.oauthbearer_set_token(
708
+ token: "foo",
709
+ lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
710
+ principal_name: "kafka-cluster"
711
+ )
712
+ expect(response).to eq(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__STATE)
713
+ end
714
+ end
715
+
716
+ context 'when sasl configured' do
717
+ before do
718
+ config_sasl = rdkafka_config(
719
+ "security.protocol": "sasl_ssl",
720
+ "sasl.mechanisms": 'OAUTHBEARER'
721
+ )
722
+ $admin_sasl = config_sasl.admin
723
+ end
724
+
725
+ after do
726
+ $admin_sasl.close
727
+ end
728
+
729
+ it 'should succeed' do
730
+
731
+ response = $admin_sasl.oauthbearer_set_token(
732
+ token: "foo",
733
+ lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
734
+ principal_name: "kafka-cluster"
735
+ )
736
+ expect(response).to eq(0)
737
+ end
738
+ end
739
+ end
407
740
  end
@@ -36,6 +36,16 @@ describe Rdkafka::Bindings do
36
36
  expect(log_queue).to have_received(:<<).with([Logger::FATAL, "rdkafka: log line"])
37
37
  end
38
38
 
39
+ it "should log fatal messages" do
40
+ Rdkafka::Bindings::LogCallback.call(nil, 1, nil, "log line")
41
+ expect(log_queue).to have_received(:<<).with([Logger::FATAL, "rdkafka: log line"])
42
+ end
43
+
44
+ it "should log fatal messages" do
45
+ Rdkafka::Bindings::LogCallback.call(nil, 2, nil, "log line")
46
+ expect(log_queue).to have_received(:<<).with([Logger::FATAL, "rdkafka: log line"])
47
+ end
48
+
39
49
  it "should log error messages" do
40
50
  Rdkafka::Bindings::LogCallback.call(nil, 3, nil, "log line")
41
51
  expect(log_queue).to have_received(:<<).with([Logger::ERROR, "rdkafka: log line"])
@@ -51,6 +61,11 @@ describe Rdkafka::Bindings do
51
61
  expect(log_queue).to have_received(:<<).with([Logger::INFO, "rdkafka: log line"])
52
62
  end
53
63
 
64
+ it "should log info messages" do
65
+ Rdkafka::Bindings::LogCallback.call(nil, 6, nil, "log line")
66
+ expect(log_queue).to have_received(:<<).with([Logger::INFO, "rdkafka: log line"])
67
+ end
68
+
54
69
  it "should log debug messages" do
55
70
  Rdkafka::Bindings::LogCallback.call(nil, 7, nil, "log line")
56
71
  expect(log_queue).to have_received(:<<).with([Logger::DEBUG, "rdkafka: log line"])
@@ -132,4 +147,86 @@ describe Rdkafka::Bindings do
132
147
  end
133
148
  end
134
149
  end
150
+
151
+ describe "oauthbearer set token" do
152
+
153
+ context "without args" do
154
+ it "should raise argument error" do
155
+ expect {
156
+ Rdkafka::Bindings.rd_kafka_oauthbearer_set_token
157
+ }.to raise_error(ArgumentError)
158
+ end
159
+ end
160
+
161
+ context "with args" do
162
+ before do
163
+ DEFAULT_TOKEN_EXPIRY_SECONDS = 900
164
+ $token_value = "token"
165
+ $md_lifetime_ms = Time.now.to_i*1000 + DEFAULT_TOKEN_EXPIRY_SECONDS * 1000
166
+ $md_principal_name = "kafka-cluster"
167
+ $extensions = nil
168
+ $extension_size = 0
169
+ $error_buffer = FFI::MemoryPointer.from_string(" " * 256)
170
+ end
171
+
172
+ it "should set token or capture failure" do
173
+ RdKafkaTestConsumer.with do |consumer_ptr|
174
+ response = Rdkafka::Bindings.rd_kafka_oauthbearer_set_token(consumer_ptr, $token_value, $md_lifetime_ms, $md_principal_name, $extensions, $extension_size, $error_buffer, 256)
175
+ expect(response).to eq(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__STATE)
176
+ expect($error_buffer.read_string).to eq("SASL/OAUTHBEARER is not the configured authentication mechanism")
177
+ end
178
+ end
179
+ end
180
+ end
181
+
182
+ describe "oauthbearer set token failure" do
183
+
184
+ context "without args" do
185
+
186
+ it "should fail" do
187
+ expect {
188
+ Rdkafka::Bindings.rd_kafka_oauthbearer_set_token_failure
189
+ }.to raise_error(ArgumentError)
190
+ end
191
+ end
192
+
193
+ context "with args" do
194
+ it "should succeed" do
195
+ expect {
196
+ errstr = "error"
197
+ RdKafkaTestConsumer.with do |consumer_ptr|
198
+ Rdkafka::Bindings.rd_kafka_oauthbearer_set_token_failure(consumer_ptr, errstr)
199
+ end
200
+ }.to_not raise_error
201
+ end
202
+ end
203
+ end
204
+
205
+ describe "oauthbearer callback" do
206
+
207
+ context "without an oauthbearer callback" do
208
+ it "should do nothing" do
209
+ expect {
210
+ Rdkafka::Bindings::OAuthbearerTokenRefreshCallback.call(nil, "", nil)
211
+ }.not_to raise_error
212
+ end
213
+ end
214
+
215
+ context "with an oauthbearer callback" do
216
+ before do
217
+ Rdkafka::Config.oauthbearer_token_refresh_callback = lambda do |config, client_name|
218
+ $received_config = config
219
+ $received_client_name = client_name
220
+ end
221
+ end
222
+
223
+ it "should call the oauth bearer callback and receive config and client name" do
224
+ RdKafkaTestConsumer.with do |consumer_ptr|
225
+ Rdkafka::Bindings::OAuthbearerTokenRefreshCallback.call(consumer_ptr, "{}", nil)
226
+ expect($received_config).to eq("{}")
227
+ expect($received_client_name).to match(/consumer/)
228
+ end
229
+ end
230
+ end
231
+ end
135
232
  end
@@ -22,6 +22,7 @@ describe Rdkafka::Config do
22
22
  it "supports logging queue" do
23
23
  log = StringIO.new
24
24
  Rdkafka::Config.logger = Logger.new(log)
25
+ Rdkafka::Config.ensure_log_thread
25
26
 
26
27
  Rdkafka::Config.log_queue << [Logger::FATAL, "I love testing"]
27
28
  20.times do
@@ -31,6 +32,25 @@ describe Rdkafka::Config do
31
32
 
32
33
  expect(log.string).to include "FATAL -- : I love testing"
33
34
  end
35
+
36
+ it "expect to start new logger thread after fork and work" do
37
+ reader, writer = IO.pipe
38
+
39
+ pid = fork do
40
+ $stdout.reopen(writer)
41
+ Rdkafka::Config.logger = Logger.new($stdout)
42
+ reader.close
43
+ producer = rdkafka_producer_config(debug: 'all').producer
44
+ producer.close
45
+ writer.close
46
+ sleep(1)
47
+ end
48
+
49
+ writer.close
50
+ Process.wait(pid)
51
+ output = reader.read
52
+ expect(output.split("\n").size).to be >= 20
53
+ end
34
54
  end
35
55
 
36
56
  context "statistics callback" do
@@ -95,6 +115,39 @@ describe Rdkafka::Config do
95
115
  end
96
116
  end
97
117
 
118
+ context "oauthbearer calllback" do
119
+ context "with a proc/lambda" do
120
+ it "should set the callback" do
121
+ expect {
122
+ Rdkafka::Config.oauthbearer_token_refresh_callback = lambda do |config, client_name|
123
+ puts config
124
+ puts client_name
125
+ end
126
+ }.not_to raise_error
127
+ expect(Rdkafka::Config.oauthbearer_token_refresh_callback).to respond_to :call
128
+ end
129
+ end
130
+
131
+ context "with a callable object" do
132
+ it "should set the callback" do
133
+ callback = Class.new do
134
+ def call(config, client_name); end
135
+ end
136
+
137
+ expect {
138
+ Rdkafka::Config.oauthbearer_token_refresh_callback = callback.new
139
+ }.not_to raise_error
140
+ expect(Rdkafka::Config.oauthbearer_token_refresh_callback).to respond_to :call
141
+ end
142
+ end
143
+
144
+ it "should not accept a callback that's not callable" do
145
+ expect {
146
+ Rdkafka::Config.oauthbearer_token_refresh_callback = 'not a callback'
147
+ }.to raise_error(TypeError)
148
+ end
149
+ end
150
+
98
151
  context "configuration" do
99
152
  it "should store configuration" do
100
153
  config = Rdkafka::Config.new
@@ -14,6 +14,19 @@ describe Rdkafka::Consumer do
14
14
  it { expect(consumer.name).to include('rdkafka#consumer-') }
15
15
  end
16
16
 
17
+ describe 'consumer without auto-start' do
18
+ let(:consumer) { rdkafka_consumer_config.consumer(native_kafka_auto_start: false) }
19
+
20
+ it 'expect to be able to start it later and close' do
21
+ consumer.start
22
+ consumer.close
23
+ end
24
+
25
+ it 'expect to be able to close it without starting' do
26
+ consumer.close
27
+ end
28
+ end
29
+
17
30
  describe "#subscribe, #unsubscribe and #subscription" do
18
31
  it "should subscribe, unsubscribe and return the subscription" do
19
32
  expect(consumer.subscription).to be_empty
@@ -211,6 +224,11 @@ describe Rdkafka::Consumer do
211
224
 
212
225
  # 7. ensure same message is read again
213
226
  message2 = consumer.poll(timeout)
227
+
228
+ # This is needed because `enable.auto.offset.store` is true but when running in CI that
229
+ # is overloaded, offset store lags
230
+ sleep(2)
231
+
214
232
  consumer.commit
215
233
  expect(message1.offset).to eq message2.offset
216
234
  expect(message1.payload).to eq message2.payload
@@ -1296,4 +1314,40 @@ describe Rdkafka::Consumer do
1296
1314
  ])
1297
1315
  end
1298
1316
  end
1317
+
1318
+ describe '#oauthbearer_set_token' do
1319
+ context 'when sasl not configured' do
1320
+ it 'should return RD_KAFKA_RESP_ERR__STATE' do
1321
+ response = consumer.oauthbearer_set_token(
1322
+ token: "foo",
1323
+ lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
1324
+ principal_name: "kafka-cluster"
1325
+ )
1326
+ expect(response).to eq(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__STATE)
1327
+ end
1328
+ end
1329
+
1330
+ context 'when sasl configured' do
1331
+ before do
1332
+ $consumer_sasl = rdkafka_producer_config(
1333
+ "security.protocol": "sasl_ssl",
1334
+ "sasl.mechanisms": 'OAUTHBEARER'
1335
+ ).consumer
1336
+ end
1337
+
1338
+ after do
1339
+ $consumer_sasl.close
1340
+ end
1341
+
1342
+ it 'should succeed' do
1343
+
1344
+ response = $consumer_sasl.oauthbearer_set_token(
1345
+ token: "foo",
1346
+ lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
1347
+ principal_name: "kafka-cluster"
1348
+ )
1349
+ expect(response).to eq(0)
1350
+ end
1351
+ end
1352
+ end
1299
1353
  end
@@ -10,8 +10,9 @@ describe Rdkafka::NativeKafka do
10
10
  subject(:client) { described_class.new(native, run_polling_thread: true, opaque: opaque) }
11
11
 
12
12
  before do
13
+ allow(Rdkafka::Bindings).to receive(:rd_kafka_name).and_return('producer-1')
13
14
  allow(Thread).to receive(:new).and_return(thread)
14
-
15
+ allow(thread).to receive(:name=).with("rdkafka.native_kafka#producer-1")
15
16
  allow(thread).to receive(:[]=).with(:closing, anything)
16
17
  allow(thread).to receive(:join)
17
18
  allow(thread).to receive(:abort_on_exception=).with(anything)
@@ -20,6 +21,12 @@ describe Rdkafka::NativeKafka do
20
21
  after { client.close }
21
22
 
22
23
  context "defaults" do
24
+ it "sets the thread name" do
25
+ expect(thread).to receive(:name=).with("rdkafka.native_kafka#producer-1")
26
+
27
+ client
28
+ end
29
+
23
30
  it "sets the thread to abort on exception" do
24
31
  expect(thread).to receive(:abort_on_exception=).with(true)
25
32