logstash-integration-kafka 10.7.6-java → 10.7.7-java
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +4 -0
- data/lib/logstash/inputs/kafka.rb +4 -1
- data/lib/logstash/plugin_mixins/common.rb +1 -2
- data/logstash-integration-kafka.gemspec +1 -1
- data/spec/fixtures/jaas.config +5 -0
- data/spec/fixtures/pwd +5 -0
- data/spec/integration/inputs/kafka_spec.rb +113 -24
- metadata +6 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 7a9e872c7a49cd1eb9fc9709c10be72d1c9609b0f19ef5550a2dd7cb317a925e
|
4
|
+
data.tar.gz: b0541ab279e7b7fcea74f1a951e7916abf54adde8bae60e2c4b5897b6f4daadb
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 1e53d939b471032107afaeb2b7656bf6c755d803e1effbda487b953ae6cc0b79fd97458834d079168023e0ecfa8aeec67a474dc1a4a99336a7be10af3d1b6412
|
7
|
+
data.tar.gz: bea64e388923bab253278e46eccc0701c2bda8f3067cc1e429223fbc3f9cd3a797ced2e334755bbbe3bbf1cf711237359d85e1c6ec22cde822ced2285840fd9f
|
data/CHANGELOG.md
CHANGED
@@ -1,3 +1,7 @@
|
|
1
|
+
## 10.7.7
|
2
|
+
- Fix: Correct the settings to allow basic auth to work properly, either by setting `schema_registry_key/secret` or embedding username/password in the
|
3
|
+
url [#94](https://github.com/logstash-plugins/logstash-integration-kafka/pull/94)
|
4
|
+
|
1
5
|
## 10.7.6
|
2
6
|
- Test: specify development dependency version [#91](https://github.com/logstash-plugins/logstash-integration-kafka/pull/91)
|
3
7
|
|
@@ -433,13 +433,16 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
|
|
433
433
|
if schema_registry_url
|
434
434
|
props.put(kafka::VALUE_DESERIALIZER_CLASS_CONFIG, Java::io.confluent.kafka.serializers.KafkaAvroDeserializer.java_class)
|
435
435
|
serdes_config = Java::io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig
|
436
|
-
props.put(serdes_config::SCHEMA_REGISTRY_URL_CONFIG, schema_registry_url.to_s)
|
436
|
+
props.put(serdes_config::SCHEMA_REGISTRY_URL_CONFIG, schema_registry_url.uri.to_s)
|
437
437
|
if schema_registry_proxy && !schema_registry_proxy.empty?
|
438
438
|
props.put(serdes_config::PROXY_HOST, @schema_registry_proxy_host)
|
439
439
|
props.put(serdes_config::PROXY_PORT, @schema_registry_proxy_port)
|
440
440
|
end
|
441
441
|
if schema_registry_key && !schema_registry_key.empty?
|
442
|
+
props.put(serdes_config::BASIC_AUTH_CREDENTIALS_SOURCE, 'USER_INFO')
|
442
443
|
props.put(serdes_config::USER_INFO_CONFIG, schema_registry_key + ":" + schema_registry_secret.value)
|
444
|
+
else
|
445
|
+
props.put(serdes_config::BASIC_AUTH_CREDENTIALS_SOURCE, 'URL')
|
443
446
|
end
|
444
447
|
end
|
445
448
|
if security_protocol == "SSL"
|
@@ -53,9 +53,8 @@ module LogStash
|
|
53
53
|
options[:auth] = {:user => schema_registry_key, :password => schema_registry_secret.value}
|
54
54
|
end
|
55
55
|
client = Manticore::Client.new(options)
|
56
|
-
|
57
56
|
begin
|
58
|
-
response = client.get(@schema_registry_url.to_s + '/subjects').body
|
57
|
+
response = client.get(@schema_registry_url.uri.to_s + '/subjects').body
|
59
58
|
rescue Manticore::ManticoreException => e
|
60
59
|
raise LogStash::ConfigurationError.new("Schema registry service doesn't respond, error: #{e.message}")
|
61
60
|
end
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-integration-kafka'
|
3
|
-
s.version = '10.7.
|
3
|
+
s.version = '10.7.7'
|
4
4
|
s.licenses = ['Apache-2.0']
|
5
5
|
s.summary = "Integration with Kafka - input and output plugins"
|
6
6
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline "+
|
data/spec/fixtures/pwd
ADDED
@@ -206,6 +206,16 @@ end
|
|
206
206
|
|
207
207
|
|
208
208
|
describe "schema registry connection options" do
|
209
|
+
schema_registry = Manticore::Client.new
|
210
|
+
before (:all) do
|
211
|
+
shutdown_schema_registry
|
212
|
+
startup_schema_registry(schema_registry)
|
213
|
+
end
|
214
|
+
|
215
|
+
after(:all) do
|
216
|
+
shutdown_schema_registry
|
217
|
+
end
|
218
|
+
|
209
219
|
context "remote endpoint validation" do
|
210
220
|
it "should fail if not reachable" do
|
211
221
|
config = {'schema_registry_url' => 'http://localnothost:8081'}
|
@@ -232,8 +242,7 @@ describe "schema registry connection options" do
|
|
232
242
|
end
|
233
243
|
|
234
244
|
after(:each) do
|
235
|
-
|
236
|
-
delete_remote_schema(schema_registry_client, SUBJECT_NAME)
|
245
|
+
delete_remote_schema(schema_registry, SUBJECT_NAME)
|
237
246
|
end
|
238
247
|
|
239
248
|
it "should correctly complete registration phase" do
|
@@ -264,9 +273,25 @@ end
|
|
264
273
|
|
265
274
|
# AdminClientConfig = org.alpache.kafka.clients.admin.AdminClientConfig
|
266
275
|
|
276
|
+
def startup_schema_registry(schema_registry, auth=false)
|
277
|
+
system('./stop_schema_registry.sh')
|
278
|
+
auth ? system('./start_auth_schema_registry.sh') : system('./start_schema_registry.sh')
|
279
|
+
url = auth ? "http://barney:changeme@localhost:8081" : "http://localhost:8081"
|
280
|
+
Stud.try(20.times, [Manticore::SocketException, StandardError, RSpec::Expectations::ExpectationNotMetError]) do
|
281
|
+
expect(schema_registry.get(url).code).to eq(200)
|
282
|
+
end
|
283
|
+
end
|
284
|
+
|
267
285
|
describe "Schema registry API", :integration => true do
|
286
|
+
schema_registry = Manticore::Client.new
|
287
|
+
|
288
|
+
before(:all) do
|
289
|
+
startup_schema_registry(schema_registry)
|
290
|
+
end
|
268
291
|
|
269
|
-
|
292
|
+
after(:all) do
|
293
|
+
shutdown_schema_registry
|
294
|
+
end
|
270
295
|
|
271
296
|
context 'listing subject on clean instance' do
|
272
297
|
it "should return an empty set" do
|
@@ -292,37 +317,58 @@ describe "Schema registry API", :integration => true do
|
|
292
317
|
expect( subjects ).to be_empty
|
293
318
|
end
|
294
319
|
end
|
320
|
+
end
|
321
|
+
|
322
|
+
def shutdown_schema_registry
|
323
|
+
system('./stop_schema_registry.sh')
|
324
|
+
end
|
325
|
+
|
326
|
+
describe "Deserializing with the schema registry", :integration => true do
|
327
|
+
schema_registry = Manticore::Client.new
|
328
|
+
|
329
|
+
shared_examples 'it reads from a topic using a schema registry' do |with_auth|
|
330
|
+
|
331
|
+
before(:all) do
|
332
|
+
shutdown_schema_registry
|
333
|
+
startup_schema_registry(schema_registry, with_auth)
|
334
|
+
end
|
335
|
+
|
336
|
+
after(:all) do
|
337
|
+
shutdown_schema_registry
|
338
|
+
end
|
295
339
|
|
296
|
-
context 'use the schema to serialize' do
|
297
340
|
after(:each) do
|
298
|
-
expect( schema_registry.delete(
|
341
|
+
expect( schema_registry.delete("#{subject_url}/#{avro_topic_name}-value").code ).to be(200)
|
299
342
|
sleep 1
|
300
|
-
expect( schema_registry.delete(
|
343
|
+
expect( schema_registry.delete("#{subject_url}/#{avro_topic_name}-value?permanent=true").code ).to be(200)
|
301
344
|
|
302
345
|
Stud.try(3.times, [StandardError, RSpec::Expectations::ExpectationNotMetError]) do
|
303
346
|
wait(10).for do
|
304
|
-
subjects = JSON.parse schema_registry.get(
|
347
|
+
subjects = JSON.parse schema_registry.get(subject_url).body
|
305
348
|
subjects.empty?
|
306
349
|
end.to be_truthy
|
307
350
|
end
|
308
351
|
end
|
309
352
|
|
310
|
-
let(:
|
311
|
-
|
312
|
-
|
313
|
-
|
314
|
-
|
315
|
-
|
316
|
-
|
317
|
-
'codec' => 'plain',
|
318
|
-
'group_id' => group_id_1,
|
319
|
-
'auto_offset_reset' => 'earliest' }
|
353
|
+
let(:base_config) do
|
354
|
+
{
|
355
|
+
'topics' => [avro_topic_name],
|
356
|
+
'codec' => 'plain',
|
357
|
+
'group_id' => group_id_1,
|
358
|
+
'auto_offset_reset' => 'earliest'
|
359
|
+
}
|
320
360
|
end
|
321
361
|
|
322
|
-
|
362
|
+
let(:group_id_1) {rand(36**8).to_s(36)}
|
363
|
+
|
364
|
+
def delete_topic_if_exists(topic_name, user = nil, password = nil)
|
323
365
|
props = java.util.Properties.new
|
324
366
|
props.put(Java::org.apache.kafka.clients.admin.AdminClientConfig::BOOTSTRAP_SERVERS_CONFIG, "localhost:9092")
|
325
|
-
|
367
|
+
serdes_config = Java::io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig
|
368
|
+
unless user.nil?
|
369
|
+
props.put(serdes_config::BASIC_AUTH_CREDENTIALS_SOURCE, 'USER_INFO')
|
370
|
+
props.put(serdes_config::USER_INFO_CONFIG, "#{user}:#{password}")
|
371
|
+
end
|
326
372
|
admin_client = org.apache.kafka.clients.admin.AdminClient.create(props)
|
327
373
|
topics_list = admin_client.listTopics().names().get()
|
328
374
|
if topics_list.contains(topic_name)
|
@@ -331,7 +377,7 @@ describe "Schema registry API", :integration => true do
|
|
331
377
|
end
|
332
378
|
end
|
333
379
|
|
334
|
-
def write_some_data_to(topic_name)
|
380
|
+
def write_some_data_to(topic_name, user = nil, password = nil)
|
335
381
|
props = java.util.Properties.new
|
336
382
|
config = org.apache.kafka.clients.producer.ProducerConfig
|
337
383
|
|
@@ -339,6 +385,10 @@ describe "Schema registry API", :integration => true do
|
|
339
385
|
props.put(serdes_config::SCHEMA_REGISTRY_URL_CONFIG, "http://localhost:8081")
|
340
386
|
|
341
387
|
props.put(config::BOOTSTRAP_SERVERS_CONFIG, "localhost:9092")
|
388
|
+
unless user.nil?
|
389
|
+
props.put(serdes_config::BASIC_AUTH_CREDENTIALS_SOURCE, 'USER_INFO')
|
390
|
+
props.put(serdes_config::USER_INFO_CONFIG, "#{user}:#{password}")
|
391
|
+
end
|
342
392
|
props.put(config::KEY_SERIALIZER_CLASS_CONFIG, org.apache.kafka.common.serialization.StringSerializer.java_class)
|
343
393
|
props.put(config::VALUE_SERIALIZER_CLASS_CONFIG, Java::io.confluent.kafka.serializers.KafkaAvroSerializer.java_class)
|
344
394
|
|
@@ -360,11 +410,11 @@ describe "Schema registry API", :integration => true do
|
|
360
410
|
end
|
361
411
|
|
362
412
|
it "stored a new schema using Avro Kafka serdes" do
|
363
|
-
delete_topic_if_exists avro_topic_name
|
364
|
-
write_some_data_to avro_topic_name
|
413
|
+
auth ? delete_topic_if_exists(avro_topic_name, user, password) : delete_topic_if_exists(avro_topic_name)
|
414
|
+
auth ? write_some_data_to(avro_topic_name, user, password) : write_some_data_to(avro_topic_name)
|
365
415
|
|
366
|
-
subjects = JSON.parse schema_registry.get(
|
367
|
-
expect( subjects ).to contain_exactly("
|
416
|
+
subjects = JSON.parse schema_registry.get(subject_url).body
|
417
|
+
expect( subjects ).to contain_exactly("#{avro_topic_name}-value")
|
368
418
|
|
369
419
|
num_events = 1
|
370
420
|
queue = consume_messages(plain_config, timeout: 30, event_count: num_events)
|
@@ -375,4 +425,43 @@ describe "Schema registry API", :integration => true do
|
|
375
425
|
expect( elem.get("map_field")["inner_field"] ).to eq("inner value")
|
376
426
|
end
|
377
427
|
end
|
428
|
+
|
429
|
+
context 'with an unauthed schema registry' do
|
430
|
+
let(:auth) { false }
|
431
|
+
let(:avro_topic_name) { "topic_avro" }
|
432
|
+
let(:subject_url) { "http://localhost:8081/subjects" }
|
433
|
+
let(:plain_config) { base_config.merge!({'schema_registry_url' => "http://localhost:8081"}) }
|
434
|
+
|
435
|
+
it_behaves_like 'it reads from a topic using a schema registry', false
|
436
|
+
end
|
437
|
+
|
438
|
+
context 'with an authed schema registry' do
|
439
|
+
let(:auth) { true }
|
440
|
+
let(:user) { "barney" }
|
441
|
+
let(:password) { "changeme" }
|
442
|
+
let(:avro_topic_name) { "topic_avro_auth" }
|
443
|
+
let(:subject_url) { "http://#{user}:#{password}@localhost:8081/subjects" }
|
444
|
+
|
445
|
+
context 'using schema_registry_key' do
|
446
|
+
let(:plain_config) do
|
447
|
+
base_config.merge!({
|
448
|
+
'schema_registry_url' => "http://localhost:8081",
|
449
|
+
'schema_registry_key' => user,
|
450
|
+
'schema_registry_secret' => password
|
451
|
+
})
|
452
|
+
end
|
453
|
+
|
454
|
+
it_behaves_like 'it reads from a topic using a schema registry', true
|
455
|
+
end
|
456
|
+
|
457
|
+
context 'using schema_registry_url' do
|
458
|
+
let(:plain_config) do
|
459
|
+
base_config.merge!({
|
460
|
+
'schema_registry_url' => "http://#{user}:#{password}@localhost:8081"
|
461
|
+
})
|
462
|
+
end
|
463
|
+
|
464
|
+
it_behaves_like 'it reads from a topic using a schema registry', true
|
465
|
+
end
|
466
|
+
end
|
378
467
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-integration-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 10.7.
|
4
|
+
version: 10.7.7
|
5
5
|
platform: java
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2021-
|
11
|
+
date: 2021-07-06 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
@@ -235,6 +235,8 @@ files:
|
|
235
235
|
- lib/logstash/plugin_mixins/kafka_support.rb
|
236
236
|
- logstash-integration-kafka.gemspec
|
237
237
|
- spec/check_docs_spec.rb
|
238
|
+
- spec/fixtures/jaas.config
|
239
|
+
- spec/fixtures/pwd
|
238
240
|
- spec/fixtures/trust-store_stub.jks
|
239
241
|
- spec/integration/inputs/kafka_spec.rb
|
240
242
|
- spec/integration/outputs/kafka_spec.rb
|
@@ -285,6 +287,8 @@ specification_version: 4
|
|
285
287
|
summary: Integration with Kafka - input and output plugins
|
286
288
|
test_files:
|
287
289
|
- spec/check_docs_spec.rb
|
290
|
+
- spec/fixtures/jaas.config
|
291
|
+
- spec/fixtures/pwd
|
288
292
|
- spec/fixtures/trust-store_stub.jks
|
289
293
|
- spec/integration/inputs/kafka_spec.rb
|
290
294
|
- spec/integration/outputs/kafka_spec.rb
|