rdkafka 0.16.0.beta1 → 0.16.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data/.github/workflows/ci.yml +1 -1
- data/.ruby-version +1 -1
- data/CHANGELOG.md +9 -1
- data/README.md +1 -0
- data/lib/rdkafka/admin/config_binding_result.rb +30 -0
- data/lib/rdkafka/admin/config_resource_binding_result.rb +18 -0
- data/lib/rdkafka/admin/describe_acl_report.rb +1 -0
- data/lib/rdkafka/admin/describe_configs_handle.rb +33 -0
- data/lib/rdkafka/admin/describe_configs_report.rb +54 -0
- data/lib/rdkafka/admin/incremental_alter_configs_handle.rb +33 -0
- data/lib/rdkafka/admin/incremental_alter_configs_report.rb +54 -0
- data/lib/rdkafka/admin.rb +204 -0
- data/lib/rdkafka/bindings.rb +51 -0
- data/lib/rdkafka/callbacks.rb +85 -9
- data/lib/rdkafka/consumer/headers.rb +3 -13
- data/lib/rdkafka/producer/delivery_handle.rb +5 -1
- data/lib/rdkafka/producer.rb +96 -5
- data/lib/rdkafka/version.rb +1 -1
- data/lib/rdkafka.rb +6 -0
- data/rdkafka.gemspec +1 -1
- data/spec/rdkafka/admin_spec.rb +283 -3
- data/spec/rdkafka/consumer/headers_spec.rb +2 -5
- data/spec/rdkafka/producer/delivery_handle_spec.rb +1 -1
- data/spec/rdkafka/producer_spec.rb +42 -0
- data.tar.gz.sig +0 -0
- metadata +10 -4
- metadata.gz.sig +0 -0
data/spec/rdkafka/admin_spec.rb
CHANGED
@@ -16,12 +16,12 @@ describe Rdkafka::Admin do
|
|
16
16
|
admin.close
|
17
17
|
end
|
18
18
|
|
19
|
-
let(:topic_name) { "test-topic-#{
|
19
|
+
let(:topic_name) { "test-topic-#{SecureRandom.uuid}" }
|
20
20
|
let(:topic_partition_count) { 3 }
|
21
21
|
let(:topic_replication_factor) { 1 }
|
22
22
|
let(:topic_config) { {"cleanup.policy" => "compact", "min.cleanable.dirty.ratio" => 0.8} }
|
23
23
|
let(:invalid_topic_config) { {"cleeeeenup.policee" => "campact"} }
|
24
|
-
let(:group_name) { "test-group-#{
|
24
|
+
let(:group_name) { "test-group-#{SecureRandom.uuid}" }
|
25
25
|
|
26
26
|
let(:resource_name) {"acl-test-topic"}
|
27
27
|
let(:resource_type) {Rdkafka::Bindings::RD_KAFKA_RESOURCE_TOPIC}
|
@@ -31,6 +31,14 @@ describe Rdkafka::Admin do
|
|
31
31
|
let(:operation) {Rdkafka::Bindings::RD_KAFKA_ACL_OPERATION_READ}
|
32
32
|
let(:permission_type) {Rdkafka::Bindings::RD_KAFKA_ACL_PERMISSION_TYPE_ALLOW}
|
33
33
|
|
34
|
+
describe '#describe_errors' do
|
35
|
+
let(:errors) { admin.class.describe_errors }
|
36
|
+
|
37
|
+
it { expect(errors.size).to eq(162) }
|
38
|
+
it { expect(errors[-184]).to eq(code: -184, description: 'Local: Queue full', name: '_QUEUE_FULL') }
|
39
|
+
it { expect(errors[21]).to eq(code: 21, description: 'Broker: Invalid required acks value', name: 'INVALID_REQUIRED_ACKS') }
|
40
|
+
end
|
41
|
+
|
34
42
|
describe 'admin without auto-start' do
|
35
43
|
let(:admin) { config.admin(native_kafka_auto_start: false) }
|
36
44
|
|
@@ -142,6 +150,275 @@ expect(ex.broker_message).to match(/Topic name.*is invalid: .* contains one or m
|
|
142
150
|
end
|
143
151
|
end
|
144
152
|
|
153
|
+
describe "describe_configs" do
|
154
|
+
subject(:resources_results) { admin.describe_configs(resources).wait.resources }
|
155
|
+
|
156
|
+
before do
|
157
|
+
admin.create_topic(topic_name, 2, 1).wait
|
158
|
+
sleep(1)
|
159
|
+
end
|
160
|
+
|
161
|
+
context 'when describing config of an existing topic' do
|
162
|
+
let(:resources) { [{ resource_type: 2, resource_name: topic_name }] }
|
163
|
+
|
164
|
+
it do
|
165
|
+
expect(resources_results.size).to eq(1)
|
166
|
+
expect(resources_results.first.type).to eq(2)
|
167
|
+
expect(resources_results.first.name).to eq(topic_name)
|
168
|
+
expect(resources_results.first.configs.size).to be > 25
|
169
|
+
expect(resources_results.first.configs.first.name).to eq('compression.type')
|
170
|
+
expect(resources_results.first.configs.first.value).to eq('producer')
|
171
|
+
expect(resources_results.first.configs.map(&:synonyms)).not_to be_empty
|
172
|
+
end
|
173
|
+
end
|
174
|
+
|
175
|
+
context 'when describing config of a non-existing topic' do
|
176
|
+
let(:resources) { [{ resource_type: 2, resource_name: SecureRandom.uuid }] }
|
177
|
+
|
178
|
+
it 'expect to raise error' do
|
179
|
+
expect { resources_results }.to raise_error(Rdkafka::RdkafkaError, /unknown_topic_or_part/)
|
180
|
+
end
|
181
|
+
end
|
182
|
+
|
183
|
+
context 'when describing both existing and non-existing topics' do
|
184
|
+
let(:resources) do
|
185
|
+
[
|
186
|
+
{ resource_type: 2, resource_name: topic_name },
|
187
|
+
{ resource_type: 2, resource_name: SecureRandom.uuid }
|
188
|
+
]
|
189
|
+
end
|
190
|
+
|
191
|
+
it 'expect to raise error' do
|
192
|
+
expect { resources_results }.to raise_error(Rdkafka::RdkafkaError, /unknown_topic_or_part/)
|
193
|
+
end
|
194
|
+
end
|
195
|
+
|
196
|
+
context 'when describing multiple existing topics' do
|
197
|
+
let(:resources) do
|
198
|
+
[
|
199
|
+
{ resource_type: 2, resource_name: 'example_topic' },
|
200
|
+
{ resource_type: 2, resource_name: topic_name }
|
201
|
+
]
|
202
|
+
end
|
203
|
+
|
204
|
+
it do
|
205
|
+
expect(resources_results.size).to eq(2)
|
206
|
+
expect(resources_results.first.type).to eq(2)
|
207
|
+
expect(resources_results.first.name).to eq('example_topic')
|
208
|
+
expect(resources_results.last.type).to eq(2)
|
209
|
+
expect(resources_results.last.name).to eq(topic_name)
|
210
|
+
end
|
211
|
+
end
|
212
|
+
|
213
|
+
context 'when trying to describe invalid resource type' do
|
214
|
+
let(:resources) { [{ resource_type: 0, resource_name: SecureRandom.uuid }] }
|
215
|
+
|
216
|
+
it 'expect to raise error' do
|
217
|
+
expect { resources_results }.to raise_error(Rdkafka::RdkafkaError, /invalid_request/)
|
218
|
+
end
|
219
|
+
end
|
220
|
+
|
221
|
+
context 'when trying to describe invalid broker' do
|
222
|
+
let(:resources) { [{ resource_type: 4, resource_name: 'non-existing' }] }
|
223
|
+
|
224
|
+
it 'expect to raise error' do
|
225
|
+
expect { resources_results }.to raise_error(Rdkafka::RdkafkaError, /invalid_arg/)
|
226
|
+
end
|
227
|
+
end
|
228
|
+
|
229
|
+
context 'when trying to describe valid broker' do
|
230
|
+
let(:resources) { [{ resource_type: 4, resource_name: '1' }] }
|
231
|
+
|
232
|
+
it do
|
233
|
+
expect(resources_results.size).to eq(1)
|
234
|
+
expect(resources_results.first.type).to eq(4)
|
235
|
+
expect(resources_results.first.name).to eq('1')
|
236
|
+
expect(resources_results.first.configs.size).to be > 230
|
237
|
+
expect(resources_results.first.configs.first.name).to eq('log.cleaner.min.compaction.lag.ms')
|
238
|
+
expect(resources_results.first.configs.first.value).to eq('0')
|
239
|
+
expect(resources_results.first.configs.map(&:synonyms)).not_to be_empty
|
240
|
+
end
|
241
|
+
end
|
242
|
+
|
243
|
+
context 'when describing valid broker with topics in one request' do
|
244
|
+
let(:resources) do
|
245
|
+
[
|
246
|
+
{ resource_type: 4, resource_name: '1' },
|
247
|
+
{ resource_type: 2, resource_name: topic_name }
|
248
|
+
]
|
249
|
+
end
|
250
|
+
|
251
|
+
it do
|
252
|
+
expect(resources_results.size).to eq(2)
|
253
|
+
expect(resources_results.first.type).to eq(4)
|
254
|
+
expect(resources_results.first.name).to eq('1')
|
255
|
+
expect(resources_results.first.configs.size).to be > 230
|
256
|
+
expect(resources_results.first.configs.first.name).to eq('log.cleaner.min.compaction.lag.ms')
|
257
|
+
expect(resources_results.first.configs.first.value).to eq('0')
|
258
|
+
expect(resources_results.last.type).to eq(2)
|
259
|
+
expect(resources_results.last.name).to eq(topic_name)
|
260
|
+
expect(resources_results.last.configs.size).to be > 25
|
261
|
+
expect(resources_results.last.configs.first.name).to eq('compression.type')
|
262
|
+
expect(resources_results.last.configs.first.value).to eq('producer')
|
263
|
+
end
|
264
|
+
end
|
265
|
+
end
|
266
|
+
|
267
|
+
describe "incremental_alter_configs" do
|
268
|
+
subject(:resources_results) { admin.incremental_alter_configs(resources_with_configs).wait.resources }
|
269
|
+
|
270
|
+
before do
|
271
|
+
admin.create_topic(topic_name, 2, 1).wait
|
272
|
+
sleep(1)
|
273
|
+
end
|
274
|
+
|
275
|
+
context 'when altering one topic with one valid config via set' do
|
276
|
+
let(:target_retention) { (86400002 + rand(10_000)).to_s }
|
277
|
+
let(:resources_with_configs) do
|
278
|
+
[
|
279
|
+
{
|
280
|
+
resource_type: 2,
|
281
|
+
resource_name: topic_name,
|
282
|
+
configs: [
|
283
|
+
{
|
284
|
+
name: 'delete.retention.ms',
|
285
|
+
value: target_retention,
|
286
|
+
op_type: 0
|
287
|
+
}
|
288
|
+
]
|
289
|
+
}
|
290
|
+
]
|
291
|
+
end
|
292
|
+
|
293
|
+
it do
|
294
|
+
expect(resources_results.size).to eq(1)
|
295
|
+
expect(resources_results.first.type).to eq(2)
|
296
|
+
expect(resources_results.first.name).to eq(topic_name)
|
297
|
+
|
298
|
+
ret_config = admin.describe_configs(resources_with_configs).wait.resources.first.configs.find do |config|
|
299
|
+
config.name == 'delete.retention.ms'
|
300
|
+
end
|
301
|
+
|
302
|
+
expect(ret_config.value).to eq(target_retention)
|
303
|
+
end
|
304
|
+
end
|
305
|
+
|
306
|
+
context 'when altering one topic with one valid config via delete' do
|
307
|
+
let(:target_retention) { (8640002 + rand(10_000)).to_s }
|
308
|
+
let(:resources_with_configs) do
|
309
|
+
[
|
310
|
+
{
|
311
|
+
resource_type: 2,
|
312
|
+
resource_name: topic_name,
|
313
|
+
configs: [
|
314
|
+
{
|
315
|
+
name: 'delete.retention.ms',
|
316
|
+
value: target_retention,
|
317
|
+
op_type: 1
|
318
|
+
}
|
319
|
+
]
|
320
|
+
}
|
321
|
+
]
|
322
|
+
end
|
323
|
+
|
324
|
+
it do
|
325
|
+
expect(resources_results.size).to eq(1)
|
326
|
+
expect(resources_results.first.type).to eq(2)
|
327
|
+
expect(resources_results.first.name).to eq(topic_name)
|
328
|
+
ret_config = admin.describe_configs(resources_with_configs).wait.resources.first.configs.find do |config|
|
329
|
+
config.name == 'delete.retention.ms'
|
330
|
+
end
|
331
|
+
|
332
|
+
expect(ret_config.value).to eq('86400000')
|
333
|
+
end
|
334
|
+
end
|
335
|
+
|
336
|
+
context 'when altering one topic with one valid config via append' do
|
337
|
+
let(:target_policy) { 'compact' }
|
338
|
+
let(:resources_with_configs) do
|
339
|
+
[
|
340
|
+
{
|
341
|
+
resource_type: 2,
|
342
|
+
resource_name: topic_name,
|
343
|
+
configs: [
|
344
|
+
{
|
345
|
+
name: 'cleanup.policy',
|
346
|
+
value: target_policy,
|
347
|
+
op_type: 2
|
348
|
+
}
|
349
|
+
]
|
350
|
+
}
|
351
|
+
]
|
352
|
+
end
|
353
|
+
|
354
|
+
it do
|
355
|
+
expect(resources_results.size).to eq(1)
|
356
|
+
expect(resources_results.first.type).to eq(2)
|
357
|
+
expect(resources_results.first.name).to eq(topic_name)
|
358
|
+
|
359
|
+
ret_config = admin.describe_configs(resources_with_configs).wait.resources.first.configs.find do |config|
|
360
|
+
config.name == 'cleanup.policy'
|
361
|
+
end
|
362
|
+
|
363
|
+
expect(ret_config.value).to eq("delete,#{target_policy}")
|
364
|
+
end
|
365
|
+
end
|
366
|
+
|
367
|
+
context 'when altering one topic with one valid config via subtrack' do
|
368
|
+
let(:target_policy) { 'delete' }
|
369
|
+
let(:resources_with_configs) do
|
370
|
+
[
|
371
|
+
{
|
372
|
+
resource_type: 2,
|
373
|
+
resource_name: topic_name,
|
374
|
+
configs: [
|
375
|
+
{
|
376
|
+
name: 'cleanup.policy',
|
377
|
+
value: target_policy,
|
378
|
+
op_type: 3
|
379
|
+
}
|
380
|
+
]
|
381
|
+
}
|
382
|
+
]
|
383
|
+
end
|
384
|
+
|
385
|
+
it do
|
386
|
+
expect(resources_results.size).to eq(1)
|
387
|
+
expect(resources_results.first.type).to eq(2)
|
388
|
+
expect(resources_results.first.name).to eq(topic_name)
|
389
|
+
|
390
|
+
ret_config = admin.describe_configs(resources_with_configs).wait.resources.first.configs.find do |config|
|
391
|
+
config.name == 'cleanup.policy'
|
392
|
+
end
|
393
|
+
|
394
|
+
expect(ret_config.value).to eq('')
|
395
|
+
end
|
396
|
+
end
|
397
|
+
|
398
|
+
context 'when altering one topic with invalid config' do
|
399
|
+
let(:target_retention) { '-10' }
|
400
|
+
let(:resources_with_configs) do
|
401
|
+
[
|
402
|
+
{
|
403
|
+
resource_type: 2,
|
404
|
+
resource_name: topic_name,
|
405
|
+
configs: [
|
406
|
+
{
|
407
|
+
name: 'delete.retention.ms',
|
408
|
+
value: target_retention,
|
409
|
+
op_type: 0
|
410
|
+
}
|
411
|
+
]
|
412
|
+
}
|
413
|
+
]
|
414
|
+
end
|
415
|
+
|
416
|
+
it 'expect to raise error' do
|
417
|
+
expect { resources_results }.to raise_error(Rdkafka::RdkafkaError, /invalid_config/)
|
418
|
+
end
|
419
|
+
end
|
420
|
+
end
|
421
|
+
|
145
422
|
describe "#delete_topic" do
|
146
423
|
describe "called with invalid input" do
|
147
424
|
# https://github.com/apache/kafka/blob/trunk/clients/src/main/java/org/apache/kafka/common/internals/Topic.java#L29
|
@@ -412,7 +689,10 @@ expect(ex.broker_message).to match(/Topic name.*is invalid: .* contains one or m
|
|
412
689
|
end
|
413
690
|
|
414
691
|
context 'when topic has less then desired number of partitions' do
|
415
|
-
before
|
692
|
+
before do
|
693
|
+
admin.create_topic(topic_name, 1, 1).wait
|
694
|
+
sleep(1)
|
695
|
+
end
|
416
696
|
|
417
697
|
it 'expect to change number of partitions' do
|
418
698
|
admin.create_partitions(topic_name, 10).wait
|
@@ -50,11 +50,8 @@ describe Rdkafka::Consumer::Headers do
|
|
50
50
|
expect(subject['version']).to eq("2.1.3")
|
51
51
|
end
|
52
52
|
|
53
|
-
it '
|
54
|
-
expect(
|
55
|
-
receive(:warn).with("rdkafka deprecation warning: header access with Symbol key :version treated as a String. " \
|
56
|
-
"Please change your code to use String keys to avoid this warning. Symbol keys will break in version 1.")
|
57
|
-
expect(subject[:version]).to eq("2.1.3")
|
53
|
+
it 'does not support symbols mappings' do
|
54
|
+
expect(subject.key?(:version)).to eq(false)
|
58
55
|
end
|
59
56
|
end
|
60
57
|
end
|
@@ -31,6 +31,48 @@ describe Rdkafka::Producer do
|
|
31
31
|
it { expect(producer.name).to include('rdkafka#producer-') }
|
32
32
|
end
|
33
33
|
|
34
|
+
describe '#produce with topic config alterations' do
|
35
|
+
context 'when config is not valid' do
|
36
|
+
it 'expect to raise error' do
|
37
|
+
expect do
|
38
|
+
producer.produce(topic: 'test', payload: '', topic_config: { 'invalid': 'invalid' })
|
39
|
+
end.to raise_error(Rdkafka::Config::ConfigError)
|
40
|
+
end
|
41
|
+
end
|
42
|
+
|
43
|
+
context 'when config is valid' do
|
44
|
+
it 'expect to raise error' do
|
45
|
+
expect do
|
46
|
+
producer.produce(topic: 'test', payload: '', topic_config: { 'acks': 1 }).wait
|
47
|
+
end.not_to raise_error
|
48
|
+
end
|
49
|
+
|
50
|
+
context 'when alteration should change behavior' do
|
51
|
+
# This is set incorrectly for a reason
|
52
|
+
# If alteration would not work, this will hang the spec suite
|
53
|
+
let(:producer) do
|
54
|
+
rdkafka_producer_config(
|
55
|
+
'message.timeout.ms': 1_000_000,
|
56
|
+
:"bootstrap.servers" => "localhost:9094",
|
57
|
+
).producer
|
58
|
+
end
|
59
|
+
|
60
|
+
it 'expect to give up on delivery fast based on alteration config' do
|
61
|
+
expect do
|
62
|
+
producer.produce(
|
63
|
+
topic: 'produce_config_test',
|
64
|
+
payload: 'test',
|
65
|
+
topic_config: {
|
66
|
+
'compression.type': 'gzip',
|
67
|
+
'message.timeout.ms': 1
|
68
|
+
}
|
69
|
+
).wait
|
70
|
+
end.to raise_error(Rdkafka::RdkafkaError, /msg_timed_out/)
|
71
|
+
end
|
72
|
+
end
|
73
|
+
end
|
74
|
+
end
|
75
|
+
|
34
76
|
context "delivery callback" do
|
35
77
|
context "with a proc/lambda" do
|
36
78
|
it "should set the callback" do
|
data.tar.gz.sig
CHANGED
Binary file
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: rdkafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.16.0
|
4
|
+
version: 0.16.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Thijs Cadier
|
@@ -36,7 +36,7 @@ cert_chain:
|
|
36
36
|
AnG1dJU+yL2BK7vaVytLTstJME5mepSZ46qqIJXMuWob/YPDmVaBF39TDSG9e34s
|
37
37
|
msG3BiCqgOgHAnL23+CN3Rt8MsuRfEtoTKpJVcCfoEoNHOkc
|
38
38
|
-----END CERTIFICATE-----
|
39
|
-
date: 2024-
|
39
|
+
date: 2024-06-13 00:00:00.000000000 Z
|
40
40
|
dependencies:
|
41
41
|
- !ruby/object:Gem::Dependency
|
42
42
|
name: ffi
|
@@ -193,6 +193,8 @@ files:
|
|
193
193
|
- lib/rdkafka/abstract_handle.rb
|
194
194
|
- lib/rdkafka/admin.rb
|
195
195
|
- lib/rdkafka/admin/acl_binding_result.rb
|
196
|
+
- lib/rdkafka/admin/config_binding_result.rb
|
197
|
+
- lib/rdkafka/admin/config_resource_binding_result.rb
|
196
198
|
- lib/rdkafka/admin/create_acl_handle.rb
|
197
199
|
- lib/rdkafka/admin/create_acl_report.rb
|
198
200
|
- lib/rdkafka/admin/create_partitions_handle.rb
|
@@ -207,6 +209,10 @@ files:
|
|
207
209
|
- lib/rdkafka/admin/delete_topic_report.rb
|
208
210
|
- lib/rdkafka/admin/describe_acl_handle.rb
|
209
211
|
- lib/rdkafka/admin/describe_acl_report.rb
|
212
|
+
- lib/rdkafka/admin/describe_configs_handle.rb
|
213
|
+
- lib/rdkafka/admin/describe_configs_report.rb
|
214
|
+
- lib/rdkafka/admin/incremental_alter_configs_handle.rb
|
215
|
+
- lib/rdkafka/admin/incremental_alter_configs_report.rb
|
210
216
|
- lib/rdkafka/bindings.rb
|
211
217
|
- lib/rdkafka/callbacks.rb
|
212
218
|
- lib/rdkafka/config.rb
|
@@ -272,14 +278,14 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
272
278
|
requirements:
|
273
279
|
- - ">="
|
274
280
|
- !ruby/object:Gem::Version
|
275
|
-
version: '
|
281
|
+
version: '3.0'
|
276
282
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
277
283
|
requirements:
|
278
284
|
- - ">="
|
279
285
|
- !ruby/object:Gem::Version
|
280
286
|
version: '0'
|
281
287
|
requirements: []
|
282
|
-
rubygems_version: 3.5.
|
288
|
+
rubygems_version: 3.5.11
|
283
289
|
signing_key:
|
284
290
|
specification_version: 4
|
285
291
|
summary: The rdkafka gem is a modern Kafka client library for Ruby based on librdkafka.
|
metadata.gz.sig
CHANGED
Binary file
|