karafka-rdkafka 0.14.10 → 0.15.0.alpha2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -16,12 +16,12 @@ describe Rdkafka::Admin do
16
16
  admin.close
17
17
  end
18
18
 
19
- let(:topic_name) { "test-topic-#{Random.new.rand(0..1_000_000)}" }
19
+ let(:topic_name) { "test-topic-#{SecureRandom.uuid}" }
20
20
  let(:topic_partition_count) { 3 }
21
21
  let(:topic_replication_factor) { 1 }
22
22
  let(:topic_config) { {"cleanup.policy" => "compact", "min.cleanable.dirty.ratio" => 0.8} }
23
23
  let(:invalid_topic_config) { {"cleeeeenup.policee" => "campact"} }
24
- let(:group_name) { "test-group-#{Random.new.rand(0..1_000_000)}" }
24
+ let(:group_name) { "test-group-#{SecureRandom.uuid}" }
25
25
 
26
26
  let(:resource_name) {"acl-test-topic"}
27
27
  let(:resource_type) {Rdkafka::Bindings::RD_KAFKA_RESOURCE_TOPIC}
@@ -129,6 +129,275 @@ describe Rdkafka::Admin do
129
129
  end
130
130
  end
131
131
 
132
+ describe "describe_configs" do
133
+ subject(:resources_results) { admin.describe_configs(resources).wait.resources }
134
+
135
+ before do
136
+ admin.create_topic(topic_name, 2, 1).wait
137
+ sleep(1)
138
+ end
139
+
140
+ context 'when describing config of an existing topic' do
141
+ let(:resources) { [{ resource_type: 2, resource_name: topic_name }] }
142
+
143
+ it do
144
+ expect(resources_results.size).to eq(1)
145
+ expect(resources_results.first.type).to eq(2)
146
+ expect(resources_results.first.name).to eq(topic_name)
147
+ expect(resources_results.first.configs.size).to be > 25
148
+ expect(resources_results.first.configs.first.name).to eq('compression.type')
149
+ expect(resources_results.first.configs.first.value).to eq('producer')
150
+ expect(resources_results.first.configs.map(&:synonyms)).not_to be_empty
151
+ end
152
+ end
153
+
154
+ context 'when describing config of a non-existing topic' do
155
+ let(:resources) { [{ resource_type: 2, resource_name: SecureRandom.uuid }] }
156
+
157
+ it 'expect to raise error' do
158
+ expect { resources_results }.to raise_error(Rdkafka::RdkafkaError, /unknown_topic_or_part/)
159
+ end
160
+ end
161
+
162
+ context 'when describing both existing and non-existing topics' do
163
+ let(:resources) do
164
+ [
165
+ { resource_type: 2, resource_name: topic_name },
166
+ { resource_type: 2, resource_name: SecureRandom.uuid }
167
+ ]
168
+ end
169
+
170
+ it 'expect to raise error' do
171
+ expect { resources_results }.to raise_error(Rdkafka::RdkafkaError, /unknown_topic_or_part/)
172
+ end
173
+ end
174
+
175
+ context 'when describing multiple existing topics' do
176
+ let(:resources) do
177
+ [
178
+ { resource_type: 2, resource_name: 'example_topic' },
179
+ { resource_type: 2, resource_name: topic_name }
180
+ ]
181
+ end
182
+
183
+ it do
184
+ expect(resources_results.size).to eq(2)
185
+ expect(resources_results.first.type).to eq(2)
186
+ expect(resources_results.first.name).to eq('example_topic')
187
+ expect(resources_results.last.type).to eq(2)
188
+ expect(resources_results.last.name).to eq(topic_name)
189
+ end
190
+ end
191
+
192
+ context 'when trying to describe invalid resource type' do
193
+ let(:resources) { [{ resource_type: 0, resource_name: SecureRandom.uuid }] }
194
+
195
+ it 'expect to raise error' do
196
+ expect { resources_results }.to raise_error(Rdkafka::RdkafkaError, /invalid_request/)
197
+ end
198
+ end
199
+
200
+ context 'when trying to describe invalid broker' do
201
+ let(:resources) { [{ resource_type: 4, resource_name: 'non-existing' }] }
202
+
203
+ it 'expect to raise error' do
204
+ expect { resources_results }.to raise_error(Rdkafka::RdkafkaError, /invalid_arg/)
205
+ end
206
+ end
207
+
208
+ context 'when trying to describe valid broker' do
209
+ let(:resources) { [{ resource_type: 4, resource_name: '1' }] }
210
+
211
+ it do
212
+ expect(resources_results.size).to eq(1)
213
+ expect(resources_results.first.type).to eq(4)
214
+ expect(resources_results.first.name).to eq('1')
215
+ expect(resources_results.first.configs.size).to be > 230
216
+ expect(resources_results.first.configs.first.name).to eq('log.cleaner.min.compaction.lag.ms')
217
+ expect(resources_results.first.configs.first.value).to eq('0')
218
+ expect(resources_results.first.configs.map(&:synonyms)).not_to be_empty
219
+ end
220
+ end
221
+
222
+ context 'when describing valid broker with topics in one request' do
223
+ let(:resources) do
224
+ [
225
+ { resource_type: 4, resource_name: '1' },
226
+ { resource_type: 2, resource_name: topic_name }
227
+ ]
228
+ end
229
+
230
+ it do
231
+ expect(resources_results.size).to eq(2)
232
+ expect(resources_results.first.type).to eq(4)
233
+ expect(resources_results.first.name).to eq('1')
234
+ expect(resources_results.first.configs.size).to be > 230
235
+ expect(resources_results.first.configs.first.name).to eq('log.cleaner.min.compaction.lag.ms')
236
+ expect(resources_results.first.configs.first.value).to eq('0')
237
+ expect(resources_results.last.type).to eq(2)
238
+ expect(resources_results.last.name).to eq(topic_name)
239
+ expect(resources_results.last.configs.size).to be > 25
240
+ expect(resources_results.last.configs.first.name).to eq('compression.type')
241
+ expect(resources_results.last.configs.first.value).to eq('producer')
242
+ end
243
+ end
244
+ end
245
+
246
+ describe "incremental_alter_configs" do
247
+ subject(:resources_results) { admin.incremental_alter_configs(resources_with_configs).wait.resources }
248
+
249
+ before do
250
+ admin.create_topic(topic_name, 2, 1).wait
251
+ sleep(1)
252
+ end
253
+
254
+ context 'when altering one topic with one valid config via set' do
255
+ let(:target_retention) { (86400002 + rand(10_000)).to_s }
256
+ let(:resources_with_configs) do
257
+ [
258
+ {
259
+ resource_type: 2,
260
+ resource_name: topic_name,
261
+ configs: [
262
+ {
263
+ name: 'delete.retention.ms',
264
+ value: target_retention,
265
+ op_type: 0
266
+ }
267
+ ]
268
+ }
269
+ ]
270
+ end
271
+
272
+ it do
273
+ expect(resources_results.size).to eq(1)
274
+ expect(resources_results.first.type).to eq(2)
275
+ expect(resources_results.first.name).to eq(topic_name)
276
+
277
+ ret_config = admin.describe_configs(resources_with_configs).wait.resources.first.configs.find do |config|
278
+ config.name == 'delete.retention.ms'
279
+ end
280
+
281
+ expect(ret_config.value).to eq(target_retention)
282
+ end
283
+ end
284
+
285
+ context 'when altering one topic with one valid config via delete' do
286
+ let(:target_retention) { (8640002 + rand(10_000)).to_s }
287
+ let(:resources_with_configs) do
288
+ [
289
+ {
290
+ resource_type: 2,
291
+ resource_name: topic_name,
292
+ configs: [
293
+ {
294
+ name: 'delete.retention.ms',
295
+ value: target_retention,
296
+ op_type: 1
297
+ }
298
+ ]
299
+ }
300
+ ]
301
+ end
302
+
303
+ it do
304
+ expect(resources_results.size).to eq(1)
305
+ expect(resources_results.first.type).to eq(2)
306
+ expect(resources_results.first.name).to eq(topic_name)
307
+ ret_config = admin.describe_configs(resources_with_configs).wait.resources.first.configs.find do |config|
308
+ config.name == 'delete.retention.ms'
309
+ end
310
+
311
+ expect(ret_config.value).to eq('86400000')
312
+ end
313
+ end
314
+
315
+ context 'when altering one topic with one valid config via append' do
316
+ let(:target_policy) { 'compact' }
317
+ let(:resources_with_configs) do
318
+ [
319
+ {
320
+ resource_type: 2,
321
+ resource_name: topic_name,
322
+ configs: [
323
+ {
324
+ name: 'cleanup.policy',
325
+ value: target_policy,
326
+ op_type: 2
327
+ }
328
+ ]
329
+ }
330
+ ]
331
+ end
332
+
333
+ it do
334
+ expect(resources_results.size).to eq(1)
335
+ expect(resources_results.first.type).to eq(2)
336
+ expect(resources_results.first.name).to eq(topic_name)
337
+
338
+ ret_config = admin.describe_configs(resources_with_configs).wait.resources.first.configs.find do |config|
339
+ config.name == 'cleanup.policy'
340
+ end
341
+
342
+ expect(ret_config.value).to eq("delete,#{target_policy}")
343
+ end
344
+ end
345
+
346
+ context 'when altering one topic with one valid config via subtrack' do
347
+ let(:target_policy) { 'delete' }
348
+ let(:resources_with_configs) do
349
+ [
350
+ {
351
+ resource_type: 2,
352
+ resource_name: topic_name,
353
+ configs: [
354
+ {
355
+ name: 'cleanup.policy',
356
+ value: target_policy,
357
+ op_type: 3
358
+ }
359
+ ]
360
+ }
361
+ ]
362
+ end
363
+
364
+ it do
365
+ expect(resources_results.size).to eq(1)
366
+ expect(resources_results.first.type).to eq(2)
367
+ expect(resources_results.first.name).to eq(topic_name)
368
+
369
+ ret_config = admin.describe_configs(resources_with_configs).wait.resources.first.configs.find do |config|
370
+ config.name == 'cleanup.policy'
371
+ end
372
+
373
+ expect(ret_config.value).to eq('')
374
+ end
375
+ end
376
+
377
+ context 'when altering one topic with invalid config' do
378
+ let(:target_retention) { '-10' }
379
+ let(:resources_with_configs) do
380
+ [
381
+ {
382
+ resource_type: 2,
383
+ resource_name: topic_name,
384
+ configs: [
385
+ {
386
+ name: 'delete.retention.ms',
387
+ value: target_retention,
388
+ op_type: 0
389
+ }
390
+ ]
391
+ }
392
+ ]
393
+ end
394
+
395
+ it 'expect to raise error' do
396
+ expect { resources_results }.to raise_error(Rdkafka::RdkafkaError, /invalid_config/)
397
+ end
398
+ end
399
+ end
400
+
132
401
  describe "#delete_topic" do
133
402
  describe "called with invalid input" do
134
403
  # https://github.com/apache/kafka/blob/trunk/clients/src/main/java/org/apache/kafka/common/internals/Topic.java#L29
@@ -396,7 +665,10 @@ describe Rdkafka::Admin do
396
665
  end
397
666
 
398
667
  context 'when topic has less then desired number of partitions' do
399
- before { admin.create_topic(topic_name, 1, 1).wait }
668
+ before do
669
+ admin.create_topic(topic_name, 1, 1).wait
670
+ sleep(1)
671
+ end
400
672
 
401
673
  it 'expect to change number of partitions' do
402
674
  admin.create_partitions(topic_name, 10).wait
@@ -404,4 +676,41 @@ describe Rdkafka::Admin do
404
676
  end
405
677
  end
406
678
  end
679
+
680
+ describe '#oauthbearer_set_token' do
681
+ context 'when sasl not configured' do
682
+ it 'should return RD_KAFKA_RESP_ERR__STATE' do
683
+ response = admin.oauthbearer_set_token(
684
+ token: "foo",
685
+ lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
686
+ principal_name: "kafka-cluster"
687
+ )
688
+ expect(response).to eq(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__STATE)
689
+ end
690
+ end
691
+
692
+ context 'when sasl configured' do
693
+ before do
694
+ config_sasl = rdkafka_config(
695
+ "security.protocol": "sasl_ssl",
696
+ "sasl.mechanisms": 'OAUTHBEARER'
697
+ )
698
+ $admin_sasl = config_sasl.admin
699
+ end
700
+
701
+ after do
702
+ $admin_sasl.close
703
+ end
704
+
705
+ it 'should succeed' do
706
+
707
+ response = $admin_sasl.oauthbearer_set_token(
708
+ token: "foo",
709
+ lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
710
+ principal_name: "kafka-cluster"
711
+ )
712
+ expect(response).to eq(0)
713
+ end
714
+ end
715
+ end
407
716
  end
@@ -132,4 +132,86 @@ describe Rdkafka::Bindings do
132
132
  end
133
133
  end
134
134
  end
135
+
136
+ describe "oauthbearer set token" do
137
+
138
+ context "without args" do
139
+ it "should raise argument error" do
140
+ expect {
141
+ Rdkafka::Bindings.rd_kafka_oauthbearer_set_token
142
+ }.to raise_error(ArgumentError)
143
+ end
144
+ end
145
+
146
+ context "with args" do
147
+ before do
148
+ DEFAULT_TOKEN_EXPIRY_SECONDS = 900
149
+ $token_value = "token"
150
+ $md_lifetime_ms = Time.now.to_i*1000 + DEFAULT_TOKEN_EXPIRY_SECONDS * 1000
151
+ $md_principal_name = "kafka-cluster"
152
+ $extensions = nil
153
+ $extension_size = 0
154
+ $error_buffer = FFI::MemoryPointer.from_string(" " * 256)
155
+ end
156
+
157
+ it "should set token or capture failure" do
158
+ RdKafkaTestConsumer.with do |consumer_ptr|
159
+ response = Rdkafka::Bindings.rd_kafka_oauthbearer_set_token(consumer_ptr, $token_value, $md_lifetime_ms, $md_principal_name, $extensions, $extension_size, $error_buffer, 256)
160
+ expect(response).to eq(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__STATE)
161
+ expect($error_buffer.read_string).to eq("SASL/OAUTHBEARER is not the configured authentication mechanism")
162
+ end
163
+ end
164
+ end
165
+ end
166
+
167
+ describe "oauthbearer set token failure" do
168
+
169
+ context "without args" do
170
+
171
+ it "should fail" do
172
+ expect {
173
+ Rdkafka::Bindings.rd_kafka_oauthbearer_set_token_failure
174
+ }.to raise_error(ArgumentError)
175
+ end
176
+ end
177
+
178
+ context "with args" do
179
+ it "should succeed" do
180
+ expect {
181
+ errstr = "error"
182
+ RdKafkaTestConsumer.with do |consumer_ptr|
183
+ Rdkafka::Bindings.rd_kafka_oauthbearer_set_token_failure(consumer_ptr, errstr)
184
+ end
185
+ }.to_not raise_error
186
+ end
187
+ end
188
+ end
189
+
190
+ describe "oauthbearer callback" do
191
+
192
+ context "without an oauthbearer callback" do
193
+ it "should do nothing" do
194
+ expect {
195
+ Rdkafka::Bindings::OAuthbearerTokenRefreshCallback.call(nil, "", nil)
196
+ }.not_to raise_error
197
+ end
198
+ end
199
+
200
+ context "with an oauthbearer callback" do
201
+ before do
202
+ Rdkafka::Config.oauthbearer_token_refresh_callback = lambda do |config, client_name|
203
+ $received_config = config
204
+ $received_client_name = client_name
205
+ end
206
+ end
207
+
208
+ it "should call the oauth bearer callback and receive config and client name" do
209
+ RdKafkaTestConsumer.with do |consumer_ptr|
210
+ Rdkafka::Bindings::OAuthbearerTokenRefreshCallback.call(consumer_ptr, "{}", nil)
211
+ expect($received_config).to eq("{}")
212
+ expect($received_client_name).to match(/consumer/)
213
+ end
214
+ end
215
+ end
216
+ end
135
217
  end
@@ -115,6 +115,39 @@ describe Rdkafka::Config do
115
115
  end
116
116
  end
117
117
 
118
+ context "oauthbearer calllback" do
119
+ context "with a proc/lambda" do
120
+ it "should set the callback" do
121
+ expect {
122
+ Rdkafka::Config.oauthbearer_token_refresh_callback = lambda do |config, client_name|
123
+ puts config
124
+ puts client_name
125
+ end
126
+ }.not_to raise_error
127
+ expect(Rdkafka::Config.oauthbearer_token_refresh_callback).to respond_to :call
128
+ end
129
+ end
130
+
131
+ context "with a callable object" do
132
+ it "should set the callback" do
133
+ callback = Class.new do
134
+ def call(config, client_name); end
135
+ end
136
+
137
+ expect {
138
+ Rdkafka::Config.oauthbearer_token_refresh_callback = callback.new
139
+ }.not_to raise_error
140
+ expect(Rdkafka::Config.oauthbearer_token_refresh_callback).to respond_to :call
141
+ end
142
+ end
143
+
144
+ it "should not accept a callback that's not callable" do
145
+ expect {
146
+ Rdkafka::Config.oauthbearer_token_refresh_callback = 'not a callback'
147
+ }.to raise_error(TypeError)
148
+ end
149
+ end
150
+
118
151
  context "configuration" do
119
152
  it "should store configuration" do
120
153
  config = Rdkafka::Config.new
@@ -214,7 +214,7 @@ describe Rdkafka::Consumer do
214
214
 
215
215
  # This is needed because `enable.auto.offset.store` is true but when running in CI that
216
216
  # is overloaded, offset store lags
217
- sleep(1)
217
+ sleep(2)
218
218
 
219
219
  consumer.commit
220
220
  expect(message1.offset).to eq message2.offset
@@ -1329,4 +1329,40 @@ describe Rdkafka::Consumer do
1329
1329
  ])
1330
1330
  end
1331
1331
  end
1332
+
1333
+ describe '#oauthbearer_set_token' do
1334
+ context 'when sasl not configured' do
1335
+ it 'should return RD_KAFKA_RESP_ERR__STATE' do
1336
+ response = consumer.oauthbearer_set_token(
1337
+ token: "foo",
1338
+ lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
1339
+ principal_name: "kafka-cluster"
1340
+ )
1341
+ expect(response).to eq(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__STATE)
1342
+ end
1343
+ end
1344
+
1345
+ context 'when sasl configured' do
1346
+ before do
1347
+ $consumer_sasl = rdkafka_producer_config(
1348
+ "security.protocol": "sasl_ssl",
1349
+ "sasl.mechanisms": 'OAUTHBEARER'
1350
+ ).consumer
1351
+ end
1352
+
1353
+ after do
1354
+ $consumer_sasl.close
1355
+ end
1356
+
1357
+ it 'should succeed' do
1358
+
1359
+ response = $consumer_sasl.oauthbearer_set_token(
1360
+ token: "foo",
1361
+ lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
1362
+ principal_name: "kafka-cluster"
1363
+ )
1364
+ expect(response).to eq(0)
1365
+ end
1366
+ end
1367
+ end
1332
1368
  end
@@ -917,4 +917,34 @@ describe Rdkafka::Producer do
917
917
  end
918
918
  end
919
919
  end
920
+
921
+ describe '#oauthbearer_set_token' do
922
+ context 'when sasl not configured' do
923
+ it 'should return RD_KAFKA_RESP_ERR__STATE' do
924
+ response = producer.oauthbearer_set_token(
925
+ token: "foo",
926
+ lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
927
+ principal_name: "kafka-cluster"
928
+ )
929
+ expect(response).to eq(Rdkafka::Bindings::RD_KAFKA_RESP_ERR__STATE)
930
+ end
931
+ end
932
+
933
+ context 'when sasl configured' do
934
+ it 'should succeed' do
935
+ producer_sasl = rdkafka_producer_config(
936
+ {
937
+ "security.protocol": "sasl_ssl",
938
+ "sasl.mechanisms": 'OAUTHBEARER'
939
+ }
940
+ ).producer
941
+ response = producer_sasl.oauthbearer_set_token(
942
+ token: "foo",
943
+ lifetime_ms: Time.now.to_i*1000 + 900 * 1000,
944
+ principal_name: "kafka-cluster"
945
+ )
946
+ expect(response).to eq(0)
947
+ end
948
+ end
949
+ end
920
950
  end
data/spec/spec_helper.rb CHANGED
@@ -139,7 +139,7 @@ RSpec.configure do |config|
139
139
  }.each do |topic, partitions|
140
140
  create_topic_handle = admin.create_topic(topic.to_s, partitions, 1)
141
141
  begin
142
- create_topic_handle.wait(max_wait_timeout: 15)
142
+ create_topic_handle.wait(max_wait_timeout: 1.0)
143
143
  rescue Rdkafka::RdkafkaError => ex
144
144
  raise unless ex.message.match?(/topic_already_exists/)
145
145
  end
@@ -155,3 +155,18 @@ RSpec.configure do |config|
155
155
  end
156
156
  end
157
157
  end
158
+
159
+ class RdKafkaTestConsumer
160
+ def self.with
161
+ consumer = Rdkafka::Bindings.rd_kafka_new(
162
+ :rd_kafka_consumer,
163
+ nil,
164
+ nil,
165
+ 0
166
+ )
167
+ yield consumer
168
+ ensure
169
+ Rdkafka::Bindings.rd_kafka_consumer_close(consumer)
170
+ Rdkafka::Bindings.rd_kafka_destroy(consumer)
171
+ end
172
+ end
data.tar.gz.sig CHANGED
Binary file
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka-rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.14.10
4
+ version: 0.15.0.alpha2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
@@ -36,7 +36,7 @@ cert_chain:
36
36
  AnG1dJU+yL2BK7vaVytLTstJME5mepSZ46qqIJXMuWob/YPDmVaBF39TDSG9e34s
37
37
  msG3BiCqgOgHAnL23+CN3Rt8MsuRfEtoTKpJVcCfoEoNHOkc
38
38
  -----END CERTIFICATE-----
39
- date: 2024-02-08 00:00:00.000000000 Z
39
+ date: 2024-03-20 00:00:00.000000000 Z
40
40
  dependencies:
41
41
  - !ruby/object:Gem::Dependency
42
42
  name: ffi
@@ -194,6 +194,8 @@ files:
194
194
  - lib/rdkafka/abstract_handle.rb
195
195
  - lib/rdkafka/admin.rb
196
196
  - lib/rdkafka/admin/acl_binding_result.rb
197
+ - lib/rdkafka/admin/config_binding_result.rb
198
+ - lib/rdkafka/admin/config_resource_binding_result.rb
197
199
  - lib/rdkafka/admin/create_acl_handle.rb
198
200
  - lib/rdkafka/admin/create_acl_report.rb
199
201
  - lib/rdkafka/admin/create_partitions_handle.rb
@@ -208,6 +210,10 @@ files:
208
210
  - lib/rdkafka/admin/delete_topic_report.rb
209
211
  - lib/rdkafka/admin/describe_acl_handle.rb
210
212
  - lib/rdkafka/admin/describe_acl_report.rb
213
+ - lib/rdkafka/admin/describe_configs_handle.rb
214
+ - lib/rdkafka/admin/describe_configs_report.rb
215
+ - lib/rdkafka/admin/incremental_alter_configs_handle.rb
216
+ - lib/rdkafka/admin/incremental_alter_configs_report.rb
211
217
  - lib/rdkafka/bindings.rb
212
218
  - lib/rdkafka/callbacks.rb
213
219
  - lib/rdkafka/config.rb
@@ -217,6 +223,7 @@ files:
217
223
  - lib/rdkafka/consumer/partition.rb
218
224
  - lib/rdkafka/consumer/topic_partition_list.rb
219
225
  - lib/rdkafka/error.rb
226
+ - lib/rdkafka/helpers/oauth.rb
220
227
  - lib/rdkafka/helpers/time.rb
221
228
  - lib/rdkafka/metadata.rb
222
229
  - lib/rdkafka/native_kafka.rb
metadata.gz.sig CHANGED
Binary file