logstash-integration-kafka 10.7.5-java → 10.8.1-java

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 5e76697fe666ea555f7256a8a42865a749ee00e05f9e7294d89c481b1ba6a7c8
4
- data.tar.gz: 5cfffe44f6e36776efb87a77e3a30ac03cdc802ae769aa423abdae4e0a36a6d5
3
+ metadata.gz: ff8065a6958598d8e33ecd2f78f27d7ec2c0ce1334516966d41a4426d437a4bb
4
+ data.tar.gz: f5a04ae25fc6394751fd63ff0c76171db82b7fccdd9962d35c94c06f8b516c39
5
5
  SHA512:
6
- metadata.gz: 9052d2ae8570e274840882751383a0cfb3c76e6858ce0519ccfdf2be8ca773b0becc9c27e1ec0f52319004b5ee21e4fdd32d60affed719f5b54b4772b4329541
7
- data.tar.gz: ff4924b87505befa7f19ddfc2699c109fbdcd53c313b4b7cb5775b7fe6de2853cc98480fae1c9267463e88eac3dbf8c2484178af803505fb820aa0b26af4e5e9
6
+ metadata.gz: d6a2630bb76e853e185c02933fe065be84e6d0f863e255e611e67b9de772b5661d09cf74fb8ba0e1df035e2742a9fbf73a3b00f2bb87801414e65d5043c9491c
7
+ data.tar.gz: e6ff413abd74c78f463d95f7c476fdecc2005cee804f426e56056e66300a785cf0789f73b5aa1d5aed4966d42a34a007b52361744612ecb5420891ff493c0df0
data/CHANGELOG.md CHANGED
@@ -1,3 +1,17 @@
1
+ ## 10.8.1
2
+ - [DOC] Removed a setting recommendation that is no longer applicable for Kafka 2.0+ [#99](https://github.com/logstash-plugins/logstash-integration-kafka/pull/99)
3
+
4
+ ## 10.8.0
5
+ - Added config setting to enable schema registry validation to be skipped when an authentication scheme unsupported
6
+ by the validator is used [#97](https://github.com/logstash-plugins/logstash-integration-kafka/pull/97)
7
+
8
+ ## 10.7.7
9
+ - Fix: Correct the settings to allow basic auth to work properly, either by setting `schema_registry_key/secret` or embedding username/password in the
10
+ url [#94](https://github.com/logstash-plugins/logstash-integration-kafka/pull/94)
11
+
12
+ ## 10.7.6
13
+ - Test: specify development dependency version [#91](https://github.com/logstash-plugins/logstash-integration-kafka/pull/91)
14
+
1
15
  ## 10.7.5
2
16
  - Improved error handling in the input plugin to avoid errors 'escaping' from the plugin, and crashing the logstash
3
17
  process [#87](https://github.com/logstash-plugins/logstash-integration-kafka/pull/87)
@@ -128,6 +128,7 @@ See the https://kafka.apache.org/{kafka_client_doc}/documentation for more detai
128
128
  | <<plugins-{type}s-{plugin}-schema_registry_proxy>> |<<uri,uri>>|No
129
129
  | <<plugins-{type}s-{plugin}-schema_registry_secret>> |<<string,string>>|No
130
130
  | <<plugins-{type}s-{plugin}-schema_registry_url>> |<<uri,uri>>|No
131
+ | <<plugins-{type}s-{plugin}-schema_registry_validation>> |<<string,string>>|No
131
132
  | <<plugins-{type}s-{plugin}-security_protocol>> |<<string,string>>, one of `["PLAINTEXT", "SSL", "SASL_PLAINTEXT", "SASL_SSL"]`|No
132
133
  | <<plugins-{type}s-{plugin}-send_buffer_bytes>> |<<number,number>>|No
133
134
  | <<plugins-{type}s-{plugin}-session_timeout_ms>> |<<number,number>>|No
@@ -414,7 +415,6 @@ The maximum delay between invocations of poll() when using consumer group manage
414
415
  an upper bound on the amount of time that the consumer can be idle before fetching more records.
415
416
  If poll() is not called before expiration of this timeout, then the consumer is considered failed and
416
417
  the group will rebalance in order to reassign the partitions to another member.
417
- The value of the configuration `request_timeout_ms` must always be larger than `max_poll_interval_ms`. ???
418
418
 
419
419
  [id="plugins-{type}s-{plugin}-max_poll_records"]
420
420
  ===== `max_poll_records`
@@ -576,6 +576,18 @@ The schemas must follow a naming convention with the pattern <topic name>-value.
576
576
  Use either the Schema Registry config option or the
577
577
  <<plugins-{type}s-{plugin}-value_deserializer_class>> config option, but not both.
578
578
 
579
+ [id="plugins-{type}s-{plugin}-schema_registry_validation"]
580
+ ===== `schema_registry_validation`
581
+
582
+ * Value can be either of: `auto`, `skip`
583
+ * Default value is `"auto"`
584
+
585
+ NOTE: Under most circumstances, the default setting of `auto` should not need to be changed.
586
+
587
+ When using the schema registry, by default the plugin checks connectivity and validates the schema registry, during plugin registration, before events are processed.
588
+ In some circumstances, this process may fail when it tries to validate an authenticated schema registry, causing the plugin to crash.
589
+ This setting allows the plugin to skip validation during registration, which allows the plugin to continue and events to be processed. Note that an incorrectly configured schema registry will still stop the plugin from processing events.
590
+
579
591
  [id="plugins-{type}s-{plugin}-security_protocol"]
580
592
  ===== `security_protocol`
581
593
 
@@ -433,13 +433,16 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
433
433
  if schema_registry_url
434
434
  props.put(kafka::VALUE_DESERIALIZER_CLASS_CONFIG, Java::io.confluent.kafka.serializers.KafkaAvroDeserializer.java_class)
435
435
  serdes_config = Java::io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig
436
- props.put(serdes_config::SCHEMA_REGISTRY_URL_CONFIG, schema_registry_url.to_s)
436
+ props.put(serdes_config::SCHEMA_REGISTRY_URL_CONFIG, schema_registry_url.uri.to_s)
437
437
  if schema_registry_proxy && !schema_registry_proxy.empty?
438
438
  props.put(serdes_config::PROXY_HOST, @schema_registry_proxy_host)
439
439
  props.put(serdes_config::PROXY_PORT, @schema_registry_proxy_port)
440
440
  end
441
441
  if schema_registry_key && !schema_registry_key.empty?
442
+ props.put(serdes_config::BASIC_AUTH_CREDENTIALS_SOURCE, 'USER_INFO')
442
443
  props.put(serdes_config::USER_INFO_CONFIG, schema_registry_key + ":" + schema_registry_secret.value)
444
+ else
445
+ props.put(serdes_config::BASIC_AUTH_CREDENTIALS_SOURCE, 'URL')
443
446
  end
444
447
  end
445
448
  if security_protocol == "SSL"
@@ -22,6 +22,10 @@ module LogStash
22
22
  # Option to set the proxy of the Schema Registry.
23
23
  # This option permits to define a proxy to be used to reach the schema registry service instance.
24
24
  config :schema_registry_proxy, :validate => :uri
25
+
26
+ # Option to skip validating the schema registry during registration. This can be useful when using
27
+ # certificate based auth
28
+ config :schema_registry_validation, :validate => ['auto', 'skip'], :default => 'auto'
25
29
  end
26
30
 
27
31
  def check_schema_registry_parameters
@@ -29,10 +33,21 @@ module LogStash
29
33
  check_for_schema_registry_conflicts
30
34
  @schema_registry_proxy_host, @schema_registry_proxy_port = split_proxy_into_host_and_port(schema_registry_proxy)
31
35
  check_for_key_and_secret
32
- check_for_schema_registry_connectivity_and_subjects
36
+ check_for_schema_registry_connectivity_and_subjects if schema_registry_validation?
33
37
  end
34
38
  end
35
39
 
40
+ def schema_registry_validation?
41
+ return false if schema_registry_validation.to_s == 'skip'
42
+ return false if using_kerberos? # pre-validation doesn't support kerberos
43
+
44
+ true
45
+ end
46
+
47
+ def using_kerberos?
48
+ security_protocol == "SASL_PLAINTEXT" || security_protocol == "SASL_SSL"
49
+ end
50
+
36
51
  private
37
52
  def check_for_schema_registry_conflicts
38
53
  if @value_deserializer_class != LogStash::Inputs::Kafka::DEFAULT_DESERIALIZER_CLASS
@@ -53,9 +68,8 @@ module LogStash
53
68
  options[:auth] = {:user => schema_registry_key, :password => schema_registry_secret.value}
54
69
  end
55
70
  client = Manticore::Client.new(options)
56
-
57
71
  begin
58
- response = client.get(@schema_registry_url.to_s + '/subjects').body
72
+ response = client.get(@schema_registry_url.uri.to_s + '/subjects').body
59
73
  rescue Manticore::ManticoreException => e
60
74
  raise LogStash::ConfigurationError.new("Schema registry service doesn't respond, error: #{e.message}")
61
75
  end
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-integration-kafka'
3
- s.version = '10.7.5'
3
+ s.version = '10.8.1'
4
4
  s.licenses = ['Apache-2.0']
5
5
  s.summary = "Integration with Kafka - input and output plugins"
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline "+
@@ -51,6 +51,7 @@ Gem::Specification.new do |s|
51
51
 
52
52
  s.add_development_dependency 'logstash-devutils'
53
53
  s.add_development_dependency 'rspec-wait'
54
- s.add_development_dependency 'ruby-kafka'
54
+ s.add_development_dependency 'digest-crc', '~> 0.5.1' # 0.6.0 started using a C-ext
55
+ s.add_development_dependency 'ruby-kafka' # depends on digest-crc
55
56
  s.add_development_dependency 'snappy'
56
57
  end
@@ -0,0 +1,5 @@
1
+ SchemaRegistry-Props {
2
+ org.eclipse.jetty.jaas.spi.PropertyFileLoginModule required
3
+ file="build/confluent_platform/etc/schema-registry/pwd"
4
+ debug="true";
5
+ };
data/spec/fixtures/pwd ADDED
@@ -0,0 +1,5 @@
1
+ fred: OBF:1w8t1tvf1w261w8v1w1c1tvn1w8x,user,admin
2
+ barney: changeme,user,developer
3
+ admin:admin,admin
4
+ betty: MD5:164c88b302622e17050af52c89945d44,user
5
+ wilma: CRYPT:adpexzg3FUZAk,admin,sr-user
@@ -206,6 +206,16 @@ end
206
206
 
207
207
 
208
208
  describe "schema registry connection options" do
209
+ schema_registry = Manticore::Client.new
210
+ before (:all) do
211
+ shutdown_schema_registry
212
+ startup_schema_registry(schema_registry)
213
+ end
214
+
215
+ after(:all) do
216
+ shutdown_schema_registry
217
+ end
218
+
209
219
  context "remote endpoint validation" do
210
220
  it "should fail if not reachable" do
211
221
  config = {'schema_registry_url' => 'http://localnothost:8081'}
@@ -232,8 +242,7 @@ describe "schema registry connection options" do
232
242
  end
233
243
 
234
244
  after(:each) do
235
- schema_registry_client = Manticore::Client.new
236
- delete_remote_schema(schema_registry_client, SUBJECT_NAME)
245
+ delete_remote_schema(schema_registry, SUBJECT_NAME)
237
246
  end
238
247
 
239
248
  it "should correctly complete registration phase" do
@@ -264,9 +273,25 @@ end
264
273
 
265
274
  # AdminClientConfig = org.alpache.kafka.clients.admin.AdminClientConfig
266
275
 
276
+ def startup_schema_registry(schema_registry, auth=false)
277
+ system('./stop_schema_registry.sh')
278
+ auth ? system('./start_auth_schema_registry.sh') : system('./start_schema_registry.sh')
279
+ url = auth ? "http://barney:changeme@localhost:8081" : "http://localhost:8081"
280
+ Stud.try(20.times, [Manticore::SocketException, StandardError, RSpec::Expectations::ExpectationNotMetError]) do
281
+ expect(schema_registry.get(url).code).to eq(200)
282
+ end
283
+ end
284
+
267
285
  describe "Schema registry API", :integration => true do
286
+ schema_registry = Manticore::Client.new
287
+
288
+ before(:all) do
289
+ startup_schema_registry(schema_registry)
290
+ end
268
291
 
269
- let(:schema_registry) { Manticore::Client.new }
292
+ after(:all) do
293
+ shutdown_schema_registry
294
+ end
270
295
 
271
296
  context 'listing subject on clean instance' do
272
297
  it "should return an empty set" do
@@ -292,37 +317,58 @@ describe "Schema registry API", :integration => true do
292
317
  expect( subjects ).to be_empty
293
318
  end
294
319
  end
320
+ end
321
+
322
+ def shutdown_schema_registry
323
+ system('./stop_schema_registry.sh')
324
+ end
325
+
326
+ describe "Deserializing with the schema registry", :integration => true do
327
+ schema_registry = Manticore::Client.new
328
+
329
+ shared_examples 'it reads from a topic using a schema registry' do |with_auth|
330
+
331
+ before(:all) do
332
+ shutdown_schema_registry
333
+ startup_schema_registry(schema_registry, with_auth)
334
+ end
335
+
336
+ after(:all) do
337
+ shutdown_schema_registry
338
+ end
295
339
 
296
- context 'use the schema to serialize' do
297
340
  after(:each) do
298
- expect( schema_registry.delete('http://localhost:8081/subjects/topic_avro-value').code ).to be(200)
341
+ expect( schema_registry.delete("#{subject_url}/#{avro_topic_name}-value").code ).to be(200)
299
342
  sleep 1
300
- expect( schema_registry.delete('http://localhost:8081/subjects/topic_avro-value?permanent=true').code ).to be(200)
343
+ expect( schema_registry.delete("#{subject_url}/#{avro_topic_name}-value?permanent=true").code ).to be(200)
301
344
 
302
345
  Stud.try(3.times, [StandardError, RSpec::Expectations::ExpectationNotMetError]) do
303
346
  wait(10).for do
304
- subjects = JSON.parse schema_registry.get('http://localhost:8081/subjects').body
347
+ subjects = JSON.parse schema_registry.get(subject_url).body
305
348
  subjects.empty?
306
349
  end.to be_truthy
307
350
  end
308
351
  end
309
352
 
310
- let(:group_id_1) {rand(36**8).to_s(36)}
311
-
312
- let(:avro_topic_name) { "topic_avro" }
313
-
314
- let(:plain_config) do
315
- { 'schema_registry_url' => 'http://localhost:8081',
316
- 'topics' => [avro_topic_name],
317
- 'codec' => 'plain',
318
- 'group_id' => group_id_1,
319
- 'auto_offset_reset' => 'earliest' }
353
+ let(:base_config) do
354
+ {
355
+ 'topics' => [avro_topic_name],
356
+ 'codec' => 'plain',
357
+ 'group_id' => group_id_1,
358
+ 'auto_offset_reset' => 'earliest'
359
+ }
320
360
  end
321
361
 
322
- def delete_topic_if_exists(topic_name)
362
+ let(:group_id_1) {rand(36**8).to_s(36)}
363
+
364
+ def delete_topic_if_exists(topic_name, user = nil, password = nil)
323
365
  props = java.util.Properties.new
324
366
  props.put(Java::org.apache.kafka.clients.admin.AdminClientConfig::BOOTSTRAP_SERVERS_CONFIG, "localhost:9092")
325
-
367
+ serdes_config = Java::io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig
368
+ unless user.nil?
369
+ props.put(serdes_config::BASIC_AUTH_CREDENTIALS_SOURCE, 'USER_INFO')
370
+ props.put(serdes_config::USER_INFO_CONFIG, "#{user}:#{password}")
371
+ end
326
372
  admin_client = org.apache.kafka.clients.admin.AdminClient.create(props)
327
373
  topics_list = admin_client.listTopics().names().get()
328
374
  if topics_list.contains(topic_name)
@@ -331,7 +377,7 @@ describe "Schema registry API", :integration => true do
331
377
  end
332
378
  end
333
379
 
334
- def write_some_data_to(topic_name)
380
+ def write_some_data_to(topic_name, user = nil, password = nil)
335
381
  props = java.util.Properties.new
336
382
  config = org.apache.kafka.clients.producer.ProducerConfig
337
383
 
@@ -339,6 +385,10 @@ describe "Schema registry API", :integration => true do
339
385
  props.put(serdes_config::SCHEMA_REGISTRY_URL_CONFIG, "http://localhost:8081")
340
386
 
341
387
  props.put(config::BOOTSTRAP_SERVERS_CONFIG, "localhost:9092")
388
+ unless user.nil?
389
+ props.put(serdes_config::BASIC_AUTH_CREDENTIALS_SOURCE, 'USER_INFO')
390
+ props.put(serdes_config::USER_INFO_CONFIG, "#{user}:#{password}")
391
+ end
342
392
  props.put(config::KEY_SERIALIZER_CLASS_CONFIG, org.apache.kafka.common.serialization.StringSerializer.java_class)
343
393
  props.put(config::VALUE_SERIALIZER_CLASS_CONFIG, Java::io.confluent.kafka.serializers.KafkaAvroSerializer.java_class)
344
394
 
@@ -360,11 +410,11 @@ describe "Schema registry API", :integration => true do
360
410
  end
361
411
 
362
412
  it "stored a new schema using Avro Kafka serdes" do
363
- delete_topic_if_exists avro_topic_name
364
- write_some_data_to avro_topic_name
413
+ auth ? delete_topic_if_exists(avro_topic_name, user, password) : delete_topic_if_exists(avro_topic_name)
414
+ auth ? write_some_data_to(avro_topic_name, user, password) : write_some_data_to(avro_topic_name)
365
415
 
366
- subjects = JSON.parse schema_registry.get('http://localhost:8081/subjects').body
367
- expect( subjects ).to contain_exactly("topic_avro-value")
416
+ subjects = JSON.parse schema_registry.get(subject_url).body
417
+ expect( subjects ).to contain_exactly("#{avro_topic_name}-value")
368
418
 
369
419
  num_events = 1
370
420
  queue = consume_messages(plain_config, timeout: 30, event_count: num_events)
@@ -375,4 +425,43 @@ describe "Schema registry API", :integration => true do
375
425
  expect( elem.get("map_field")["inner_field"] ).to eq("inner value")
376
426
  end
377
427
  end
428
+
429
+ context 'with an unauthed schema registry' do
430
+ let(:auth) { false }
431
+ let(:avro_topic_name) { "topic_avro" }
432
+ let(:subject_url) { "http://localhost:8081/subjects" }
433
+ let(:plain_config) { base_config.merge!({'schema_registry_url' => "http://localhost:8081"}) }
434
+
435
+ it_behaves_like 'it reads from a topic using a schema registry', false
436
+ end
437
+
438
+ context 'with an authed schema registry' do
439
+ let(:auth) { true }
440
+ let(:user) { "barney" }
441
+ let(:password) { "changeme" }
442
+ let(:avro_topic_name) { "topic_avro_auth" }
443
+ let(:subject_url) { "http://#{user}:#{password}@localhost:8081/subjects" }
444
+
445
+ context 'using schema_registry_key' do
446
+ let(:plain_config) do
447
+ base_config.merge!({
448
+ 'schema_registry_url' => "http://localhost:8081",
449
+ 'schema_registry_key' => user,
450
+ 'schema_registry_secret' => password
451
+ })
452
+ end
453
+
454
+ it_behaves_like 'it reads from a topic using a schema registry', true
455
+ end
456
+
457
+ context 'using schema_registry_url' do
458
+ let(:plain_config) do
459
+ base_config.merge!({
460
+ 'schema_registry_url' => "http://#{user}:#{password}@localhost:8081"
461
+ })
462
+ end
463
+
464
+ it_behaves_like 'it reads from a topic using a schema registry', true
465
+ end
466
+ end
378
467
  end
@@ -177,11 +177,16 @@ describe LogStash::Inputs::Kafka do
177
177
  end
178
178
  end
179
179
 
180
- context "register parameter verification" do
180
+ describe "schema registry parameter verification" do
181
+ let(:base_config) do {
182
+ 'schema_registry_url' => 'http://localhost:8081',
183
+ 'topics' => ['logstash'],
184
+ 'consumer_threads' => 4
185
+ }
186
+ end
187
+
181
188
  context "schema_registry_url" do
182
- let(:config) do
183
- { 'schema_registry_url' => 'http://localhost:8081', 'topics' => ['logstash'], 'consumer_threads' => 4 }
184
- end
189
+ let(:config) { base_config }
185
190
 
186
191
  it "conflict with value_deserializer_class should fail" do
187
192
  config['value_deserializer_class'] = 'my.fantasy.Deserializer'
@@ -194,19 +199,63 @@ describe LogStash::Inputs::Kafka do
194
199
  end
195
200
  end
196
201
 
197
- context "decorate_events" do
198
- let(:config) { { 'decorate_events' => 'extended'} }
202
+ context 'when kerberos auth is used' do
203
+ ['SASL_SSL', 'SASL_PLAINTEXT'].each do |protocol|
204
+ context "with #{protocol}" do
205
+ ['auto', 'skip'].each do |vsr|
206
+ context "when validata_schema_registry is #{vsr}" do
207
+ let(:config) { base_config.merge({'security_protocol' => protocol,
208
+ 'schema_registry_validation' => vsr})
209
+ }
210
+ it 'skips verification' do
211
+ expect(subject).not_to receive(:check_for_schema_registry_connectivity_and_subjects)
212
+ expect { subject.register }.not_to raise_error
213
+ end
214
+ end
215
+ end
216
+ end
217
+ end
218
+ end
199
219
 
200
- it "should raise error for invalid value" do
201
- config['decorate_events'] = 'avoid'
202
- expect { subject.register }.to raise_error LogStash::ConfigurationError, /Something is wrong with your configuration./
220
+ context 'when kerberos auth is not used' do
221
+ context "when skip_verify is set to auto" do
222
+ let(:config) { base_config.merge({'schema_registry_validation' => 'auto'})}
223
+ it 'performs verification' do
224
+ expect(subject).to receive(:check_for_schema_registry_connectivity_and_subjects)
225
+ expect { subject.register }.not_to raise_error
226
+ end
203
227
  end
204
228
 
205
- it "should map old true boolean value to :record_props mode" do
206
- config['decorate_events'] = "true"
207
- subject.register
208
- expect(subject.metadata_mode).to include(:record_props)
229
+ context "when skip_verify is set to default" do
230
+ let(:config) { base_config }
231
+ it 'performs verification' do
232
+ expect(subject).to receive(:check_for_schema_registry_connectivity_and_subjects)
233
+ expect { subject.register }.not_to raise_error
234
+ end
209
235
  end
236
+
237
+ context "when skip_verify is set to skip" do
238
+ let(:config) { base_config.merge({'schema_registry_validation' => 'skip'})}
239
+ it 'should skip verification' do
240
+ expect(subject).not_to receive(:check_for_schema_registry_connectivity_and_subjects)
241
+ expect { subject.register }.not_to raise_error
242
+ end
243
+ end
244
+ end
245
+ end
246
+
247
+ context "decorate_events" do
248
+ let(:config) { { 'decorate_events' => 'extended'} }
249
+
250
+ it "should raise error for invalid value" do
251
+ config['decorate_events'] = 'avoid'
252
+ expect { subject.register }.to raise_error LogStash::ConfigurationError, /Something is wrong with your configuration./
253
+ end
254
+
255
+ it "should map old true boolean value to :record_props mode" do
256
+ config['decorate_events'] = "true"
257
+ subject.register
258
+ expect(subject.metadata_mode).to include(:record_props)
210
259
  end
211
260
  end
212
261
 
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-integration-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 10.7.5
4
+ version: 10.8.1
5
5
  platform: java
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2021-05-26 00:00:00.000000000 Z
11
+ date: 2021-07-13 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -168,6 +168,20 @@ dependencies:
168
168
  - - ">="
169
169
  - !ruby/object:Gem::Version
170
170
  version: '0'
171
+ - !ruby/object:Gem::Dependency
172
+ requirement: !ruby/object:Gem::Requirement
173
+ requirements:
174
+ - - "~>"
175
+ - !ruby/object:Gem::Version
176
+ version: 0.5.1
177
+ name: digest-crc
178
+ prerelease: false
179
+ type: :development
180
+ version_requirements: !ruby/object:Gem::Requirement
181
+ requirements:
182
+ - - "~>"
183
+ - !ruby/object:Gem::Version
184
+ version: 0.5.1
171
185
  - !ruby/object:Gem::Dependency
172
186
  requirement: !ruby/object:Gem::Requirement
173
187
  requirements:
@@ -221,6 +235,8 @@ files:
221
235
  - lib/logstash/plugin_mixins/kafka_support.rb
222
236
  - logstash-integration-kafka.gemspec
223
237
  - spec/check_docs_spec.rb
238
+ - spec/fixtures/jaas.config
239
+ - spec/fixtures/pwd
224
240
  - spec/fixtures/trust-store_stub.jks
225
241
  - spec/integration/inputs/kafka_spec.rb
226
242
  - spec/integration/outputs/kafka_spec.rb
@@ -271,6 +287,8 @@ specification_version: 4
271
287
  summary: Integration with Kafka - input and output plugins
272
288
  test_files:
273
289
  - spec/check_docs_spec.rb
290
+ - spec/fixtures/jaas.config
291
+ - spec/fixtures/pwd
274
292
  - spec/fixtures/trust-store_stub.jks
275
293
  - spec/integration/inputs/kafka_spec.rb
276
294
  - spec/integration/outputs/kafka_spec.rb