rdkafka 0.14.0 → 0.15.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data/.github/FUNDING.yml +1 -0
- data/.github/workflows/ci.yml +2 -3
- data/.ruby-version +1 -1
- data/CHANGELOG.md +25 -0
- data/README.md +44 -22
- data/docker-compose.yml +3 -1
- data/ext/Rakefile +43 -26
- data/lib/rdkafka/admin/acl_binding_result.rb +51 -0
- data/lib/rdkafka/admin/create_acl_handle.rb +28 -0
- data/lib/rdkafka/admin/create_acl_report.rb +24 -0
- data/lib/rdkafka/admin/create_partitions_handle.rb +27 -0
- data/lib/rdkafka/admin/create_partitions_report.rb +6 -0
- data/lib/rdkafka/admin/delete_acl_handle.rb +30 -0
- data/lib/rdkafka/admin/delete_acl_report.rb +23 -0
- data/lib/rdkafka/admin/delete_groups_handle.rb +28 -0
- data/lib/rdkafka/admin/delete_groups_report.rb +24 -0
- data/lib/rdkafka/admin/describe_acl_handle.rb +30 -0
- data/lib/rdkafka/admin/describe_acl_report.rb +23 -0
- data/lib/rdkafka/admin.rb +443 -0
- data/lib/rdkafka/bindings.rb +125 -2
- data/lib/rdkafka/callbacks.rb +196 -1
- data/lib/rdkafka/config.rb +24 -3
- data/lib/rdkafka/consumer/headers.rb +1 -1
- data/lib/rdkafka/consumer/topic_partition_list.rb +8 -7
- data/lib/rdkafka/consumer.rb +80 -29
- data/lib/rdkafka/producer/delivery_handle.rb +12 -1
- data/lib/rdkafka/producer/delivery_report.rb +16 -3
- data/lib/rdkafka/producer.rb +42 -12
- data/lib/rdkafka/version.rb +3 -3
- data/lib/rdkafka.rb +11 -0
- data/rdkafka.gemspec +2 -2
- data/spec/rdkafka/admin/create_acl_handle_spec.rb +56 -0
- data/spec/rdkafka/admin/create_acl_report_spec.rb +18 -0
- data/spec/rdkafka/admin/delete_acl_handle_spec.rb +85 -0
- data/spec/rdkafka/admin/delete_acl_report_spec.rb +72 -0
- data/spec/rdkafka/admin/describe_acl_handle_spec.rb +85 -0
- data/spec/rdkafka/admin/describe_acl_report_spec.rb +73 -0
- data/spec/rdkafka/admin_spec.rb +204 -0
- data/spec/rdkafka/config_spec.rb +8 -0
- data/spec/rdkafka/consumer_spec.rb +89 -0
- data/spec/rdkafka/producer/delivery_report_spec.rb +4 -0
- data/spec/rdkafka/producer_spec.rb +26 -2
- data/spec/spec_helper.rb +3 -1
- data.tar.gz.sig +0 -0
- metadata +29 -4
- metadata.gz.sig +0 -0
data/spec/rdkafka/admin_spec.rb
CHANGED
@@ -9,6 +9,10 @@ describe Rdkafka::Admin do
|
|
9
9
|
after do
|
10
10
|
# Registry should always end up being empty
|
11
11
|
expect(Rdkafka::Admin::CreateTopicHandle::REGISTRY).to be_empty
|
12
|
+
expect(Rdkafka::Admin::CreatePartitionsHandle::REGISTRY).to be_empty
|
13
|
+
expect(Rdkafka::Admin::DescribeAclHandle::REGISTRY).to be_empty
|
14
|
+
expect(Rdkafka::Admin::CreateAclHandle::REGISTRY).to be_empty
|
15
|
+
expect(Rdkafka::Admin::DeleteAclHandle::REGISTRY).to be_empty
|
12
16
|
admin.close
|
13
17
|
end
|
14
18
|
|
@@ -17,6 +21,15 @@ describe Rdkafka::Admin do
|
|
17
21
|
let(:topic_replication_factor) { 1 }
|
18
22
|
let(:topic_config) { {"cleanup.policy" => "compact", "min.cleanable.dirty.ratio" => 0.8} }
|
19
23
|
let(:invalid_topic_config) { {"cleeeeenup.policee" => "campact"} }
|
24
|
+
let(:group_name) { "test-group-#{Random.new.rand(0..1_000_000)}" }
|
25
|
+
|
26
|
+
let(:resource_name) {"acl-test-topic"}
|
27
|
+
let(:resource_type) {Rdkafka::Bindings::RD_KAFKA_RESOURCE_TOPIC}
|
28
|
+
let(:resource_pattern_type) {Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_LITERAL}
|
29
|
+
let(:principal) {"User:anonymous"}
|
30
|
+
let(:host) {"*"}
|
31
|
+
let(:operation) {Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_READ}
|
32
|
+
let(:permission_type) {Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ALLOW}
|
20
33
|
|
21
34
|
describe "#create_topic" do
|
22
35
|
describe "called with invalid input" do
|
@@ -200,4 +213,195 @@ expect(ex.broker_message).to match(/Topic name.*is invalid: .* contains one or m
|
|
200
213
|
expect(delete_topic_report.result_name).to eq(topic_name)
|
201
214
|
end
|
202
215
|
end
|
216
|
+
|
217
|
+
describe "#ACL tests" do
|
218
|
+
let(:non_existing_resource_name) {"non-existing-topic"}
|
219
|
+
before do
|
220
|
+
#create topic for testing acl
|
221
|
+
create_topic_handle = admin.create_topic(resource_name, topic_partition_count, topic_replication_factor)
|
222
|
+
create_topic_report = create_topic_handle.wait(max_wait_timeout: 15.0)
|
223
|
+
end
|
224
|
+
|
225
|
+
after do
|
226
|
+
#delete acl
|
227
|
+
delete_acl_handle = admin.delete_acl(resource_type: resource_type, resource_name: resource_name, resource_pattern_type: resource_pattern_type, principal: principal, host: host, operation: operation, permission_type: permission_type)
|
228
|
+
delete_acl_report = delete_acl_handle.wait(max_wait_timeout: 15.0)
|
229
|
+
|
230
|
+
#delete topic that was created for testing acl
|
231
|
+
delete_topic_handle = admin.delete_topic(resource_name)
|
232
|
+
delete_topic_report = delete_topic_handle.wait(max_wait_timeout: 15.0)
|
233
|
+
end
|
234
|
+
|
235
|
+
describe "#create_acl" do
|
236
|
+
it "create acl for a topic that does not exist" do
|
237
|
+
# acl creation for resources that does not exist will still get created successfully.
|
238
|
+
create_acl_handle = admin.create_acl(resource_type: resource_type, resource_name: non_existing_resource_name, resource_pattern_type: resource_pattern_type, principal: principal, host: host, operation: operation, permission_type: permission_type)
|
239
|
+
create_acl_report = create_acl_handle.wait(max_wait_timeout: 15.0)
|
240
|
+
expect(create_acl_report.rdkafka_response).to eq(0)
|
241
|
+
expect(create_acl_report.rdkafka_response_string).to eq("")
|
242
|
+
|
243
|
+
# delete the acl that was created for a non existing topic"
|
244
|
+
delete_acl_handle = admin.delete_acl(resource_type: resource_type, resource_name: non_existing_resource_name, resource_pattern_type: resource_pattern_type, principal: principal, host: host, operation: operation, permission_type: permission_type)
|
245
|
+
delete_acl_report = delete_acl_handle.wait(max_wait_timeout: 15.0)
|
246
|
+
expect(delete_acl_handle[:response]).to eq(0)
|
247
|
+
expect(delete_acl_report.deleted_acls.size).to eq(1)
|
248
|
+
end
|
249
|
+
|
250
|
+
it "creates a acl for topic that was newly created" do
|
251
|
+
create_acl_handle = admin.create_acl(resource_type: resource_type, resource_name: resource_name, resource_pattern_type: resource_pattern_type, principal: principal, host: host, operation: operation, permission_type: permission_type)
|
252
|
+
create_acl_report = create_acl_handle.wait(max_wait_timeout: 15.0)
|
253
|
+
expect(create_acl_report.rdkafka_response).to eq(0)
|
254
|
+
expect(create_acl_report.rdkafka_response_string).to eq("")
|
255
|
+
end
|
256
|
+
end
|
257
|
+
|
258
|
+
describe "#describe_acl" do
|
259
|
+
it "describe acl of a topic that does not exist" do
|
260
|
+
describe_acl_handle = admin.describe_acl(resource_type: resource_type, resource_name: non_existing_resource_name, resource_pattern_type: resource_pattern_type, principal: principal, host: host, operation: operation, permission_type: permission_type)
|
261
|
+
describe_acl_report = describe_acl_handle.wait(max_wait_timeout: 15.0)
|
262
|
+
expect(describe_acl_handle[:response]).to eq(0)
|
263
|
+
expect(describe_acl_report.acls.size).to eq(0)
|
264
|
+
end
|
265
|
+
|
266
|
+
it "create acls and describe the newly created acls" do
|
267
|
+
#create_acl
|
268
|
+
create_acl_handle = admin.create_acl(resource_type: resource_type, resource_name: "test_acl_topic_1", resource_pattern_type: resource_pattern_type, principal: principal, host: host, operation: operation, permission_type: permission_type)
|
269
|
+
create_acl_report = create_acl_handle.wait(max_wait_timeout: 15.0)
|
270
|
+
expect(create_acl_report.rdkafka_response).to eq(0)
|
271
|
+
expect(create_acl_report.rdkafka_response_string).to eq("")
|
272
|
+
|
273
|
+
create_acl_handle = admin.create_acl(resource_type: resource_type, resource_name: "test_acl_topic_2", resource_pattern_type: resource_pattern_type, principal: principal, host: host, operation: operation, permission_type: permission_type)
|
274
|
+
create_acl_report = create_acl_handle.wait(max_wait_timeout: 15.0)
|
275
|
+
expect(create_acl_report.rdkafka_response).to eq(0)
|
276
|
+
expect(create_acl_report.rdkafka_response_string).to eq("")
|
277
|
+
|
278
|
+
#describe_acl
|
279
|
+
describe_acl_handle = admin.describe_acl(resource_type: Rdkafka::Bindings::RD_KAFKA_RESOURCE_ANY, resource_name: nil, resource_pattern_type: Rdkafka::Bindings::RD_KAFKA_RESOURCE_PATTERN_ANY, principal: nil, host: nil, operation: Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_ANY, permission_type: Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ANY)
|
280
|
+
describe_acl_report = describe_acl_handle.wait(max_wait_timeout: 15.0)
|
281
|
+
expect(describe_acl_handle[:response]).to eq(0)
|
282
|
+
expect(describe_acl_report.acls.length).to eq(2)
|
283
|
+
end
|
284
|
+
end
|
285
|
+
|
286
|
+
describe "#delete_acl" do
|
287
|
+
it "delete acl of a topic that does not exist" do
|
288
|
+
delete_acl_handle = admin.delete_acl(resource_type: resource_type, resource_name: non_existing_resource_name, resource_pattern_type: resource_pattern_type, principal: principal, host: host, operation: operation, permission_type: permission_type)
|
289
|
+
delete_acl_report = delete_acl_handle.wait(max_wait_timeout: 15.0)
|
290
|
+
expect(delete_acl_handle[:response]).to eq(0)
|
291
|
+
expect(delete_acl_report.deleted_acls.size).to eq(0)
|
292
|
+
end
|
293
|
+
|
294
|
+
it "create an acl and delete the newly created acl" do
|
295
|
+
#create_acl
|
296
|
+
create_acl_handle = admin.create_acl(resource_type: resource_type, resource_name: "test_acl_topic_1", resource_pattern_type: resource_pattern_type, principal: principal, host: host, operation: operation, permission_type: permission_type)
|
297
|
+
create_acl_report = create_acl_handle.wait(max_wait_timeout: 15.0)
|
298
|
+
expect(create_acl_report.rdkafka_response).to eq(0)
|
299
|
+
expect(create_acl_report.rdkafka_response_string).to eq("")
|
300
|
+
|
301
|
+
create_acl_handle = admin.create_acl(resource_type: resource_type, resource_name: "test_acl_topic_2", resource_pattern_type: resource_pattern_type, principal: principal, host: host, operation: operation, permission_type: permission_type)
|
302
|
+
create_acl_report = create_acl_handle.wait(max_wait_timeout: 15.0)
|
303
|
+
expect(create_acl_report.rdkafka_response).to eq(0)
|
304
|
+
expect(create_acl_report.rdkafka_response_string).to eq("")
|
305
|
+
|
306
|
+
#delete_acl - resource_name nil - to delete all acls with any resource name and matching all other filters.
|
307
|
+
delete_acl_handle = admin.delete_acl(resource_type: resource_type, resource_name: nil, resource_pattern_type: resource_pattern_type, principal: principal, host: host, operation: operation, permission_type: permission_type)
|
308
|
+
delete_acl_report = delete_acl_handle.wait(max_wait_timeout: 15.0)
|
309
|
+
expect(delete_acl_handle[:response]).to eq(0)
|
310
|
+
expect(delete_acl_report.deleted_acls.length).to eq(2)
|
311
|
+
|
312
|
+
end
|
313
|
+
end
|
314
|
+
end
|
315
|
+
|
316
|
+
describe('Group tests') do
|
317
|
+
describe "#delete_group" do
|
318
|
+
describe("with an existing group") do
|
319
|
+
let(:consumer_config) { rdkafka_consumer_config('group.id': group_name) }
|
320
|
+
let(:producer_config) { rdkafka_producer_config }
|
321
|
+
let(:producer) { producer_config.producer }
|
322
|
+
let(:consumer) { consumer_config.consumer }
|
323
|
+
|
324
|
+
before do
|
325
|
+
# Create a topic, post a message to it, consume it and commit offsets, this will create a group that we can then delete.
|
326
|
+
admin.create_topic(topic_name, topic_partition_count, topic_replication_factor).wait(max_wait_timeout: 15.0)
|
327
|
+
|
328
|
+
producer.produce(topic: topic_name, payload: "test", key: "test").wait(max_wait_timeout: 15.0)
|
329
|
+
|
330
|
+
consumer.subscribe(topic_name)
|
331
|
+
wait_for_assignment(consumer)
|
332
|
+
message = consumer.poll(100)
|
333
|
+
|
334
|
+
expect(message).to_not be_nil
|
335
|
+
|
336
|
+
consumer.commit
|
337
|
+
consumer.close
|
338
|
+
end
|
339
|
+
|
340
|
+
after do
|
341
|
+
producer.close
|
342
|
+
consumer.close
|
343
|
+
end
|
344
|
+
|
345
|
+
it "deletes the group" do
|
346
|
+
delete_group_handle = admin.delete_group(group_name)
|
347
|
+
report = delete_group_handle.wait(max_wait_timeout: 15.0)
|
348
|
+
|
349
|
+
expect(report.result_name).to eql(group_name)
|
350
|
+
end
|
351
|
+
end
|
352
|
+
|
353
|
+
describe "called with invalid input" do
|
354
|
+
describe "with the name of a group that does not exist" do
|
355
|
+
it "raises an exception" do
|
356
|
+
delete_group_handle = admin.delete_group(group_name)
|
357
|
+
|
358
|
+
expect {
|
359
|
+
delete_group_handle.wait(max_wait_timeout: 15.0)
|
360
|
+
}.to raise_exception { |ex|
|
361
|
+
expect(ex).to be_a(Rdkafka::RdkafkaError)
|
362
|
+
expect(ex.message).to match(/Broker: The group id does not exist \(group_id_not_found\)/)
|
363
|
+
}
|
364
|
+
end
|
365
|
+
end
|
366
|
+
end
|
367
|
+
|
368
|
+
end
|
369
|
+
end
|
370
|
+
|
371
|
+
describe '#create_partitions' do
|
372
|
+
let(:metadata) { admin.metadata(topic_name).topics.first }
|
373
|
+
|
374
|
+
context 'when topic does not exist' do
|
375
|
+
it 'expect to fail due to unknown partition' do
|
376
|
+
expect { admin.create_partitions(topic_name, 10).wait }.to raise_error(Rdkafka::RdkafkaError, /unknown_topic_or_part/)
|
377
|
+
end
|
378
|
+
end
|
379
|
+
|
380
|
+
context 'when topic already has the desired number of partitions' do
|
381
|
+
before { admin.create_topic(topic_name, 2, 1).wait }
|
382
|
+
|
383
|
+
it 'expect not to change number of partitions' do
|
384
|
+
expect { admin.create_partitions(topic_name, 2).wait }.to raise_error(Rdkafka::RdkafkaError, /invalid_partitions/)
|
385
|
+
expect(metadata[:partition_count]).to eq(2)
|
386
|
+
end
|
387
|
+
end
|
388
|
+
|
389
|
+
context 'when topic has more than the requested number of partitions' do
|
390
|
+
before { admin.create_topic(topic_name, 5, 1).wait }
|
391
|
+
|
392
|
+
it 'expect not to change number of partitions' do
|
393
|
+
expect { admin.create_partitions(topic_name, 2).wait }.to raise_error(Rdkafka::RdkafkaError, /invalid_partitions/)
|
394
|
+
expect(metadata[:partition_count]).to eq(5)
|
395
|
+
end
|
396
|
+
end
|
397
|
+
|
398
|
+
context 'when topic has less then desired number of partitions' do
|
399
|
+
before { admin.create_topic(topic_name, 1, 1).wait }
|
400
|
+
|
401
|
+
it 'expect to change number of partitions' do
|
402
|
+
admin.create_partitions(topic_name, 10).wait
|
403
|
+
expect(metadata[:partition_count]).to eq(10)
|
404
|
+
end
|
405
|
+
end
|
406
|
+
end
|
203
407
|
end
|
data/spec/rdkafka/config_spec.rb
CHANGED
@@ -113,6 +113,14 @@ describe Rdkafka::Config do
|
|
113
113
|
consumer.close
|
114
114
|
end
|
115
115
|
|
116
|
+
it "should create a consumer with consumer_poll_set set to false" do
|
117
|
+
config = rdkafka_consumer_config
|
118
|
+
config.consumer_poll_set = false
|
119
|
+
consumer = config.consumer
|
120
|
+
expect(consumer).to be_a Rdkafka::Consumer
|
121
|
+
consumer.close
|
122
|
+
end
|
123
|
+
|
116
124
|
it "should raise an error when creating a consumer with invalid config" do
|
117
125
|
config = Rdkafka::Config.new('invalid.key' => 'value')
|
118
126
|
expect {
|
@@ -54,6 +54,30 @@ describe Rdkafka::Consumer do
|
|
54
54
|
consumer.subscription
|
55
55
|
}.to raise_error(Rdkafka::RdkafkaError)
|
56
56
|
end
|
57
|
+
|
58
|
+
context "when using consumer without the poll set" do
|
59
|
+
let(:consumer) do
|
60
|
+
config = rdkafka_consumer_config
|
61
|
+
config.consumer_poll_set = false
|
62
|
+
config.consumer
|
63
|
+
end
|
64
|
+
|
65
|
+
it "should subscribe, unsubscribe and return the subscription" do
|
66
|
+
expect(consumer.subscription).to be_empty
|
67
|
+
|
68
|
+
consumer.subscribe("consume_test_topic")
|
69
|
+
|
70
|
+
expect(consumer.subscription).not_to be_empty
|
71
|
+
expected_subscription = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
|
72
|
+
list.add_topic("consume_test_topic")
|
73
|
+
end
|
74
|
+
expect(consumer.subscription).to eq expected_subscription
|
75
|
+
|
76
|
+
consumer.unsubscribe
|
77
|
+
|
78
|
+
expect(consumer.subscription).to be_empty
|
79
|
+
end
|
80
|
+
end
|
57
81
|
end
|
58
82
|
|
59
83
|
describe "#pause and #resume" do
|
@@ -273,6 +297,28 @@ describe Rdkafka::Consumer do
|
|
273
297
|
end
|
274
298
|
end
|
275
299
|
|
300
|
+
describe '#assignment_lost?' do
|
301
|
+
it "should not return true as we do have an assignment" do
|
302
|
+
consumer.subscribe("consume_test_topic")
|
303
|
+
expected_subscription = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
|
304
|
+
list.add_topic("consume_test_topic")
|
305
|
+
end
|
306
|
+
|
307
|
+
expect(consumer.assignment_lost?).to eq false
|
308
|
+
consumer.unsubscribe
|
309
|
+
end
|
310
|
+
|
311
|
+
it "should not return true after voluntary unsubscribing" do
|
312
|
+
consumer.subscribe("consume_test_topic")
|
313
|
+
expected_subscription = Rdkafka::Consumer::TopicPartitionList.new.tap do |list|
|
314
|
+
list.add_topic("consume_test_topic")
|
315
|
+
end
|
316
|
+
|
317
|
+
consumer.unsubscribe
|
318
|
+
expect(consumer.assignment_lost?).to eq false
|
319
|
+
end
|
320
|
+
end
|
321
|
+
|
276
322
|
describe "#close" do
|
277
323
|
it "should close a consumer" do
|
278
324
|
consumer.subscribe("consume_test_topic")
|
@@ -434,6 +480,8 @@ describe Rdkafka::Consumer do
|
|
434
480
|
end
|
435
481
|
|
436
482
|
describe "#store_offset" do
|
483
|
+
let(:consumer) { rdkafka_consumer_config('enable.auto.offset.store': false).consumer }
|
484
|
+
|
437
485
|
before do
|
438
486
|
config = {}
|
439
487
|
config[:'enable.auto.offset.store'] = false
|
@@ -496,6 +544,14 @@ describe Rdkafka::Consumer do
|
|
496
544
|
}.to raise_error(Rdkafka::RdkafkaError)
|
497
545
|
end
|
498
546
|
end
|
547
|
+
|
548
|
+
context "when trying to use with enable.auto.offset.store set to true" do
|
549
|
+
let(:consumer) { rdkafka_consumer_config('enable.auto.offset.store': true).consumer }
|
550
|
+
|
551
|
+
it "expect to raise invalid configuration error" do
|
552
|
+
expect { consumer.store_offset(message) }.to raise_error(Rdkafka::RdkafkaError, /invalid_arg/)
|
553
|
+
end
|
554
|
+
end
|
499
555
|
end
|
500
556
|
end
|
501
557
|
end
|
@@ -1054,6 +1110,39 @@ describe Rdkafka::Consumer do
|
|
1054
1110
|
end
|
1055
1111
|
end
|
1056
1112
|
|
1113
|
+
# Only relevant in case of a consumer with separate queues
|
1114
|
+
describe '#events_poll' do
|
1115
|
+
let(:stats) { [] }
|
1116
|
+
|
1117
|
+
before { Rdkafka::Config.statistics_callback = ->(published) { stats << published } }
|
1118
|
+
|
1119
|
+
after { Rdkafka::Config.statistics_callback = nil }
|
1120
|
+
|
1121
|
+
let(:consumer) do
|
1122
|
+
config = rdkafka_consumer_config('statistics.interval.ms': 100)
|
1123
|
+
config.consumer_poll_set = false
|
1124
|
+
config.consumer
|
1125
|
+
end
|
1126
|
+
|
1127
|
+
it "expect to run events_poll, operate and propagate stats on events_poll and not poll" do
|
1128
|
+
consumer.subscribe("consume_test_topic")
|
1129
|
+
consumer.poll(1_000)
|
1130
|
+
expect(stats).to be_empty
|
1131
|
+
consumer.events_poll(-1)
|
1132
|
+
expect(stats).not_to be_empty
|
1133
|
+
end
|
1134
|
+
end
|
1135
|
+
|
1136
|
+
describe '#consumer_group_metadata_pointer' do
|
1137
|
+
let(:pointer) { consumer.consumer_group_metadata_pointer }
|
1138
|
+
|
1139
|
+
after { Rdkafka::Bindings.rd_kafka_consumer_group_metadata_destroy(pointer) }
|
1140
|
+
|
1141
|
+
it 'expect to return a pointer' do
|
1142
|
+
expect(pointer).to be_a(FFI::Pointer)
|
1143
|
+
end
|
1144
|
+
end
|
1145
|
+
|
1057
1146
|
describe "a rebalance listener" do
|
1058
1147
|
let(:consumer) do
|
1059
1148
|
config = rdkafka_consumer_config
|
@@ -15,6 +15,10 @@ describe Rdkafka::Producer::DeliveryReport do
|
|
15
15
|
expect(subject.topic_name).to eq "topic"
|
16
16
|
end
|
17
17
|
|
18
|
+
it "should get the same topic name under topic alias" do
|
19
|
+
expect(subject.topic).to eq "topic"
|
20
|
+
end
|
21
|
+
|
18
22
|
it "should get the error" do
|
19
23
|
expect(subject.error).to eq -1
|
20
24
|
end
|
@@ -34,6 +34,7 @@ describe Rdkafka::Producer do
|
|
34
34
|
|
35
35
|
producer.delivery_callback = lambda do |report|
|
36
36
|
expect(report).not_to be_nil
|
37
|
+
expect(report.label).to eq "label"
|
37
38
|
expect(report.partition).to eq 1
|
38
39
|
expect(report.offset).to be >= 0
|
39
40
|
expect(report.topic_name).to eq "produce_test_topic"
|
@@ -44,9 +45,12 @@ describe Rdkafka::Producer do
|
|
44
45
|
handle = producer.produce(
|
45
46
|
topic: "produce_test_topic",
|
46
47
|
payload: "payload",
|
47
|
-
key: "key"
|
48
|
+
key: "key",
|
49
|
+
label: "label"
|
48
50
|
)
|
49
51
|
|
52
|
+
expect(handle.label).to eq "label"
|
53
|
+
|
50
54
|
# Wait for it to be delivered
|
51
55
|
handle.wait(max_wait_timeout: 15)
|
52
56
|
|
@@ -175,11 +179,13 @@ describe Rdkafka::Producer do
|
|
175
179
|
handle = producer.produce(
|
176
180
|
topic: "produce_test_topic",
|
177
181
|
payload: "payload",
|
178
|
-
key: "key"
|
182
|
+
key: "key",
|
183
|
+
label: "label"
|
179
184
|
)
|
180
185
|
|
181
186
|
# Should be pending at first
|
182
187
|
expect(handle.pending?).to be true
|
188
|
+
expect(handle.label).to eq "label"
|
183
189
|
|
184
190
|
# Check delivery handle and report
|
185
191
|
report = handle.wait(max_wait_timeout: 5)
|
@@ -187,6 +193,7 @@ describe Rdkafka::Producer do
|
|
187
193
|
expect(report).not_to be_nil
|
188
194
|
expect(report.partition).to eq 1
|
189
195
|
expect(report.offset).to be >= 0
|
196
|
+
expect(report.label).to eq "label"
|
190
197
|
|
191
198
|
# Flush and close producer
|
192
199
|
producer.flush
|
@@ -558,6 +565,23 @@ describe Rdkafka::Producer do
|
|
558
565
|
end
|
559
566
|
end
|
560
567
|
|
568
|
+
context "when not being able to deliver the message" do
|
569
|
+
let(:producer) do
|
570
|
+
rdkafka_producer_config(
|
571
|
+
"bootstrap.servers": "localhost:9093",
|
572
|
+
"message.timeout.ms": 100
|
573
|
+
).producer
|
574
|
+
end
|
575
|
+
|
576
|
+
it "should contain the error in the response when not deliverable" do
|
577
|
+
handler = producer.produce(topic: 'produce_test_topic', payload: nil, label: 'na')
|
578
|
+
# Wait for the async callbacks and delivery registry to update
|
579
|
+
sleep(2)
|
580
|
+
expect(handler.create_result.error).to be_a(Rdkafka::RdkafkaError)
|
581
|
+
expect(handler.create_result.label).to eq('na')
|
582
|
+
end
|
583
|
+
end
|
584
|
+
|
561
585
|
describe '#partition_count' do
|
562
586
|
it { expect(producer.partition_count('consume_test_topic')).to eq(3) }
|
563
587
|
|
data/spec/spec_helper.rb
CHANGED
@@ -11,6 +11,7 @@ require "pry"
|
|
11
11
|
require "rspec"
|
12
12
|
require "rdkafka"
|
13
13
|
require "timeout"
|
14
|
+
require "securerandom"
|
14
15
|
|
15
16
|
def rdkafka_base_config
|
16
17
|
{
|
@@ -35,7 +36,7 @@ def rdkafka_consumer_config(config_overrides={})
|
|
35
36
|
# Add consumer specific fields to it
|
36
37
|
config[:"auto.offset.reset"] = "earliest"
|
37
38
|
config[:"enable.partition.eof"] = false
|
38
|
-
config[:"group.id"] = "ruby-test-#{
|
39
|
+
config[:"group.id"] = "ruby-test-#{SecureRandom.uuid}"
|
39
40
|
# Enable debug mode if required
|
40
41
|
if ENV["DEBUG_CONSUMER"]
|
41
42
|
config[:debug] = "cgrp,topic,fetch"
|
@@ -134,6 +135,7 @@ RSpec.configure do |config|
|
|
134
135
|
rake_test_topic: 3,
|
135
136
|
watermarks_test_topic: 3,
|
136
137
|
partitioner_test_topic: 25,
|
138
|
+
example_topic: 1
|
137
139
|
}.each do |topic, partitions|
|
138
140
|
create_topic_handle = admin.create_topic(topic.to_s, partitions, 1)
|
139
141
|
begin
|
data.tar.gz.sig
CHANGED
Binary file
|
metadata
CHANGED
@@ -1,10 +1,11 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: rdkafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.15.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Thijs Cadier
|
8
|
+
- Maciej Mensfeld
|
8
9
|
autorequire:
|
9
10
|
bindir: bin
|
10
11
|
cert_chain:
|
@@ -35,7 +36,7 @@ cert_chain:
|
|
35
36
|
AnG1dJU+yL2BK7vaVytLTstJME5mepSZ46qqIJXMuWob/YPDmVaBF39TDSG9e34s
|
36
37
|
msG3BiCqgOgHAnL23+CN3Rt8MsuRfEtoTKpJVcCfoEoNHOkc
|
37
38
|
-----END CERTIFICATE-----
|
38
|
-
date:
|
39
|
+
date: 2024-01-30 00:00:00.000000000 Z
|
39
40
|
dependencies:
|
40
41
|
- !ruby/object:Gem::Dependency
|
41
42
|
name: ffi
|
@@ -171,6 +172,7 @@ extensions:
|
|
171
172
|
- ext/Rakefile
|
172
173
|
extra_rdoc_files: []
|
173
174
|
files:
|
175
|
+
- ".github/FUNDING.yml"
|
174
176
|
- ".github/workflows/ci.yml"
|
175
177
|
- ".gitignore"
|
176
178
|
- ".rspec"
|
@@ -190,10 +192,21 @@ files:
|
|
190
192
|
- lib/rdkafka.rb
|
191
193
|
- lib/rdkafka/abstract_handle.rb
|
192
194
|
- lib/rdkafka/admin.rb
|
195
|
+
- lib/rdkafka/admin/acl_binding_result.rb
|
196
|
+
- lib/rdkafka/admin/create_acl_handle.rb
|
197
|
+
- lib/rdkafka/admin/create_acl_report.rb
|
198
|
+
- lib/rdkafka/admin/create_partitions_handle.rb
|
199
|
+
- lib/rdkafka/admin/create_partitions_report.rb
|
193
200
|
- lib/rdkafka/admin/create_topic_handle.rb
|
194
201
|
- lib/rdkafka/admin/create_topic_report.rb
|
202
|
+
- lib/rdkafka/admin/delete_acl_handle.rb
|
203
|
+
- lib/rdkafka/admin/delete_acl_report.rb
|
204
|
+
- lib/rdkafka/admin/delete_groups_handle.rb
|
205
|
+
- lib/rdkafka/admin/delete_groups_report.rb
|
195
206
|
- lib/rdkafka/admin/delete_topic_handle.rb
|
196
207
|
- lib/rdkafka/admin/delete_topic_report.rb
|
208
|
+
- lib/rdkafka/admin/describe_acl_handle.rb
|
209
|
+
- lib/rdkafka/admin/describe_acl_report.rb
|
197
210
|
- lib/rdkafka/bindings.rb
|
198
211
|
- lib/rdkafka/callbacks.rb
|
199
212
|
- lib/rdkafka/config.rb
|
@@ -213,10 +226,16 @@ files:
|
|
213
226
|
- rdkafka.gemspec
|
214
227
|
- renovate.json
|
215
228
|
- spec/rdkafka/abstract_handle_spec.rb
|
229
|
+
- spec/rdkafka/admin/create_acl_handle_spec.rb
|
230
|
+
- spec/rdkafka/admin/create_acl_report_spec.rb
|
216
231
|
- spec/rdkafka/admin/create_topic_handle_spec.rb
|
217
232
|
- spec/rdkafka/admin/create_topic_report_spec.rb
|
233
|
+
- spec/rdkafka/admin/delete_acl_handle_spec.rb
|
234
|
+
- spec/rdkafka/admin/delete_acl_report_spec.rb
|
218
235
|
- spec/rdkafka/admin/delete_topic_handle_spec.rb
|
219
236
|
- spec/rdkafka/admin/delete_topic_report_spec.rb
|
237
|
+
- spec/rdkafka/admin/describe_acl_handle_spec.rb
|
238
|
+
- spec/rdkafka/admin/describe_acl_report_spec.rb
|
220
239
|
- spec/rdkafka/admin_spec.rb
|
221
240
|
- spec/rdkafka/bindings_spec.rb
|
222
241
|
- spec/rdkafka/callbacks_spec.rb
|
@@ -259,18 +278,24 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
259
278
|
- !ruby/object:Gem::Version
|
260
279
|
version: '0'
|
261
280
|
requirements: []
|
262
|
-
rubygems_version: 3.
|
281
|
+
rubygems_version: 3.5.3
|
263
282
|
signing_key:
|
264
283
|
specification_version: 4
|
265
284
|
summary: The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka.
|
266
285
|
It wraps the production-ready C client using the ffi gem and targets Kafka 1.0+
|
267
|
-
and Ruby 2.
|
286
|
+
and Ruby 2.7+.
|
268
287
|
test_files:
|
269
288
|
- spec/rdkafka/abstract_handle_spec.rb
|
289
|
+
- spec/rdkafka/admin/create_acl_handle_spec.rb
|
290
|
+
- spec/rdkafka/admin/create_acl_report_spec.rb
|
270
291
|
- spec/rdkafka/admin/create_topic_handle_spec.rb
|
271
292
|
- spec/rdkafka/admin/create_topic_report_spec.rb
|
293
|
+
- spec/rdkafka/admin/delete_acl_handle_spec.rb
|
294
|
+
- spec/rdkafka/admin/delete_acl_report_spec.rb
|
272
295
|
- spec/rdkafka/admin/delete_topic_handle_spec.rb
|
273
296
|
- spec/rdkafka/admin/delete_topic_report_spec.rb
|
297
|
+
- spec/rdkafka/admin/describe_acl_handle_spec.rb
|
298
|
+
- spec/rdkafka/admin/describe_acl_report_spec.rb
|
274
299
|
- spec/rdkafka/admin_spec.rb
|
275
300
|
- spec/rdkafka/bindings_spec.rb
|
276
301
|
- spec/rdkafka/callbacks_spec.rb
|
metadata.gz.sig
CHANGED
Binary file
|