logstash-integration-kafka 10.7.6-java → 10.8.2-java

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: fc9c66beb8b3a0074d0fa5090dc8896eedd796428e4b7644d8d96e159c7bed2f
4
- data.tar.gz: 780f7aeac690ee7a23e954e94c2359d8e9527bcd1464dde007ce7e55c4a023f8
3
+ metadata.gz: 2e11499f791fd2bd71102ddb6f8e52fcbd9791feecf4107c7ebd27dc9334cc34
4
+ data.tar.gz: 779cd381a6e53155f61457229df4fc827ceda02ef4e069d4eab0240dafa4ab83
5
5
  SHA512:
6
- metadata.gz: db2e1a6eefc076887de8b42944fe5e0d4ddf025915faf55a4f150710772b08c170e0b4bd629871ca89f4b45fa36b572eb15191c284dec5ad62409c228343e9c5
7
- data.tar.gz: b1ae9060e3cf83f6b7c6ca8d949ab2f78696b7a983c7154d8db01e2004875ece63f202ac5778095112adf9597b2d54cedac0ed79682b59bb0bd1b9002265fd36
6
+ metadata.gz: 5577048fe1adee0c28b012c0bf6eb0ff594b7be4760f7b7ebdf9072473fb0a1806e24997dd7b0659c21f205396a4d7eccb10bc4e789541c35fb7e32610562632
7
+ data.tar.gz: 7ee0e7428d63957a57e927a3f0408bcdddf94efad944c5cf0acbac3c1031be55b6751ad0290ea134e5201f10f9e26f2804c6d68150e925f651c9db27847bab5e
data/CHANGELOG.md CHANGED
@@ -1,3 +1,19 @@
1
+ ## 10.8.2
2
+ - [DOC] Updates description of `enable_auto_commit=false` to clarify that the commit happens after data is fetched AND written to the queue [#90](https://github.com/logstash-plugins/logstash-integration-kafka/pull/90)
3
+ - Fix: update to Gradle 7 [#104](https://github.com/logstash-plugins/logstash-integration-kafka/pull/104)
4
+ - [DOC] Clarify Kafka client does not support proxy [#103](https://github.com/logstash-plugins/logstash-integration-kafka/pull/103)
5
+
6
+ ## 10.8.1
7
+ - [DOC] Removed a setting recommendation that is no longer applicable for Kafka 2.0+ [#99](https://github.com/logstash-plugins/logstash-integration-kafka/pull/99)
8
+
9
+ ## 10.8.0
10
+ - Added config setting to enable schema registry validation to be skipped when an authentication scheme unsupported
11
+ by the validator is used [#97](https://github.com/logstash-plugins/logstash-integration-kafka/pull/97)
12
+
13
+ ## 10.7.7
14
+ - Fix: Correct the settings to allow basic auth to work properly, either by setting `schema_registry_key/secret` or embedding username/password in the
15
+ url [#94](https://github.com/logstash-plugins/logstash-integration-kafka/pull/94)
16
+
1
17
  ## 10.7.6
2
18
  - Test: specify development dependency version [#91](https://github.com/logstash-plugins/logstash-integration-kafka/pull/91)
3
19
 
@@ -42,6 +42,13 @@ This input supports connecting to Kafka over:
42
42
 
43
43
  By default security is disabled but can be turned on as needed.
44
44
 
45
+ [NOTE]
46
+ =======
47
+ This plugin does not support using a proxy when communicating to the Kafka broker.
48
+
49
+ This plugin does support using a proxy when communicating to the Schema Registry using the <<plugins-{type}s-{plugin}-schema_registry_proxy>> option.
50
+ =======
51
+
45
52
  The Logstash Kafka consumer handles group management and uses the default offset management
46
53
  strategy using Kafka topics.
47
54
 
@@ -128,6 +135,7 @@ See the https://kafka.apache.org/{kafka_client_doc}/documentation for more detai
128
135
  | <<plugins-{type}s-{plugin}-schema_registry_proxy>> |<<uri,uri>>|No
129
136
  | <<plugins-{type}s-{plugin}-schema_registry_secret>> |<<string,string>>|No
130
137
  | <<plugins-{type}s-{plugin}-schema_registry_url>> |<<uri,uri>>|No
138
+ | <<plugins-{type}s-{plugin}-schema_registry_validation>> |<<string,string>>|No
131
139
  | <<plugins-{type}s-{plugin}-security_protocol>> |<<string,string>>, one of `["PLAINTEXT", "SSL", "SASL_PLAINTEXT", "SASL_SSL"]`|No
132
140
  | <<plugins-{type}s-{plugin}-send_buffer_bytes>> |<<number,number>>|No
133
141
  | <<plugins-{type}s-{plugin}-session_timeout_ms>> |<<number,number>>|No
@@ -275,7 +283,7 @@ which the consumption will begin.
275
283
 
276
284
  If true, periodically commit to Kafka the offsets of messages already returned by
277
285
  the consumer. If value is `false` however, the offset is committed every time the
278
- consumer fetches the data from the topic.
286
+ consumer writes data fetched from the topic to the in-memory or persistent queue.
279
287
 
280
288
  [id="plugins-{type}s-{plugin}-exclude_internal_topics"]
281
289
  ===== `exclude_internal_topics`
@@ -414,7 +422,6 @@ The maximum delay between invocations of poll() when using consumer group manage
414
422
  an upper bound on the amount of time that the consumer can be idle before fetching more records.
415
423
  If poll() is not called before expiration of this timeout, then the consumer is considered failed and
416
424
  the group will rebalance in order to reassign the partitions to another member.
417
- The value of the configuration `request_timeout_ms` must always be larger than `max_poll_interval_ms`. ???
418
425
 
419
426
  [id="plugins-{type}s-{plugin}-max_poll_records"]
420
427
  ===== `max_poll_records`
@@ -576,6 +583,18 @@ The schemas must follow a naming convention with the pattern <topic name>-value.
576
583
  Use either the Schema Registry config option or the
577
584
  <<plugins-{type}s-{plugin}-value_deserializer_class>> config option, but not both.
578
585
 
586
+ [id="plugins-{type}s-{plugin}-schema_registry_validation"]
587
+ ===== `schema_registry_validation`
588
+
589
+ * Value can be either of: `auto`, `skip`
590
+ * Default value is `"auto"`
591
+
592
+ NOTE: Under most circumstances, the default setting of `auto` should not need to be changed.
593
+
594
+ When using the schema registry, by default the plugin checks connectivity and validates the schema registry, during plugin registration, before events are processed.
595
+ In some circumstances, this process may fail when it tries to validate an authenticated schema registry, causing the plugin to crash.
596
+ This setting allows the plugin to skip validation during registration, which allows the plugin to continue and events to be processed. Note that an incorrectly configured schema registry will still stop the plugin from processing events.
597
+
579
598
  [id="plugins-{type}s-{plugin}-security_protocol"]
580
599
  ===== `security_protocol`
581
600
 
@@ -64,6 +64,8 @@ https://kafka.apache.org/{kafka_client_doc}/documentation.html#theproducer
64
64
  Kafka producer configuration:
65
65
  https://kafka.apache.org/{kafka_client_doc}/documentation.html#producerconfigs
66
66
 
67
+ NOTE: This plugin does not support using a proxy when communicating to the Kafka broker.
68
+
67
69
  [id="plugins-{type}s-{plugin}-options"]
68
70
  ==== Kafka Output Configuration Options
69
71
 
@@ -433,13 +433,16 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
433
433
  if schema_registry_url
434
434
  props.put(kafka::VALUE_DESERIALIZER_CLASS_CONFIG, Java::io.confluent.kafka.serializers.KafkaAvroDeserializer.java_class)
435
435
  serdes_config = Java::io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig
436
- props.put(serdes_config::SCHEMA_REGISTRY_URL_CONFIG, schema_registry_url.to_s)
436
+ props.put(serdes_config::SCHEMA_REGISTRY_URL_CONFIG, schema_registry_url.uri.to_s)
437
437
  if schema_registry_proxy && !schema_registry_proxy.empty?
438
438
  props.put(serdes_config::PROXY_HOST, @schema_registry_proxy_host)
439
439
  props.put(serdes_config::PROXY_PORT, @schema_registry_proxy_port)
440
440
  end
441
441
  if schema_registry_key && !schema_registry_key.empty?
442
+ props.put(serdes_config::BASIC_AUTH_CREDENTIALS_SOURCE, 'USER_INFO')
442
443
  props.put(serdes_config::USER_INFO_CONFIG, schema_registry_key + ":" + schema_registry_secret.value)
444
+ else
445
+ props.put(serdes_config::BASIC_AUTH_CREDENTIALS_SOURCE, 'URL')
443
446
  end
444
447
  end
445
448
  if security_protocol == "SSL"
@@ -22,6 +22,10 @@ module LogStash
22
22
  # Option to set the proxy of the Schema Registry.
23
23
  # This option permits to define a proxy to be used to reach the schema registry service instance.
24
24
  config :schema_registry_proxy, :validate => :uri
25
+
26
+ # Option to skip validating the schema registry during registration. This can be useful when using
27
+ # certificate based auth
28
+ config :schema_registry_validation, :validate => ['auto', 'skip'], :default => 'auto'
25
29
  end
26
30
 
27
31
  def check_schema_registry_parameters
@@ -29,10 +33,21 @@ module LogStash
29
33
  check_for_schema_registry_conflicts
30
34
  @schema_registry_proxy_host, @schema_registry_proxy_port = split_proxy_into_host_and_port(schema_registry_proxy)
31
35
  check_for_key_and_secret
32
- check_for_schema_registry_connectivity_and_subjects
36
+ check_for_schema_registry_connectivity_and_subjects if schema_registry_validation?
33
37
  end
34
38
  end
35
39
 
40
+ def schema_registry_validation?
41
+ return false if schema_registry_validation.to_s == 'skip'
42
+ return false if using_kerberos? # pre-validation doesn't support kerberos
43
+
44
+ true
45
+ end
46
+
47
+ def using_kerberos?
48
+ security_protocol == "SASL_PLAINTEXT" || security_protocol == "SASL_SSL"
49
+ end
50
+
36
51
  private
37
52
  def check_for_schema_registry_conflicts
38
53
  if @value_deserializer_class != LogStash::Inputs::Kafka::DEFAULT_DESERIALIZER_CLASS
@@ -53,9 +68,8 @@ module LogStash
53
68
  options[:auth] = {:user => schema_registry_key, :password => schema_registry_secret.value}
54
69
  end
55
70
  client = Manticore::Client.new(options)
56
-
57
71
  begin
58
- response = client.get(@schema_registry_url.to_s + '/subjects').body
72
+ response = client.get(@schema_registry_url.uri.to_s + '/subjects').body
59
73
  rescue Manticore::ManticoreException => e
60
74
  raise LogStash::ConfigurationError.new("Schema registry service doesn't respond, error: #{e.message}")
61
75
  end
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-integration-kafka'
3
- s.version = '10.7.6'
3
+ s.version = '10.8.2'
4
4
  s.licenses = ['Apache-2.0']
5
5
  s.summary = "Integration with Kafka - input and output plugins"
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline "+
@@ -0,0 +1,5 @@
1
+ SchemaRegistry-Props {
2
+ org.eclipse.jetty.jaas.spi.PropertyFileLoginModule required
3
+ file="build/confluent_platform/etc/schema-registry/pwd"
4
+ debug="true";
5
+ };
data/spec/fixtures/pwd ADDED
@@ -0,0 +1,5 @@
1
+ fred: OBF:1w8t1tvf1w261w8v1w1c1tvn1w8x,user,admin
2
+ barney: changeme,user,developer
3
+ admin:admin,admin
4
+ betty: MD5:164c88b302622e17050af52c89945d44,user
5
+ wilma: CRYPT:adpexzg3FUZAk,admin,sr-user
@@ -206,6 +206,16 @@ end
206
206
 
207
207
 
208
208
  describe "schema registry connection options" do
209
+ schema_registry = Manticore::Client.new
210
+ before (:all) do
211
+ shutdown_schema_registry
212
+ startup_schema_registry(schema_registry)
213
+ end
214
+
215
+ after(:all) do
216
+ shutdown_schema_registry
217
+ end
218
+
209
219
  context "remote endpoint validation" do
210
220
  it "should fail if not reachable" do
211
221
  config = {'schema_registry_url' => 'http://localnothost:8081'}
@@ -232,8 +242,7 @@ describe "schema registry connection options" do
232
242
  end
233
243
 
234
244
  after(:each) do
235
- schema_registry_client = Manticore::Client.new
236
- delete_remote_schema(schema_registry_client, SUBJECT_NAME)
245
+ delete_remote_schema(schema_registry, SUBJECT_NAME)
237
246
  end
238
247
 
239
248
  it "should correctly complete registration phase" do
@@ -264,9 +273,25 @@ end
264
273
 
265
274
  # AdminClientConfig = org.alpache.kafka.clients.admin.AdminClientConfig
266
275
 
276
+ def startup_schema_registry(schema_registry, auth=false)
277
+ system('./stop_schema_registry.sh')
278
+ auth ? system('./start_auth_schema_registry.sh') : system('./start_schema_registry.sh')
279
+ url = auth ? "http://barney:changeme@localhost:8081" : "http://localhost:8081"
280
+ Stud.try(20.times, [Manticore::SocketException, StandardError, RSpec::Expectations::ExpectationNotMetError]) do
281
+ expect(schema_registry.get(url).code).to eq(200)
282
+ end
283
+ end
284
+
267
285
  describe "Schema registry API", :integration => true do
286
+ schema_registry = Manticore::Client.new
287
+
288
+ before(:all) do
289
+ startup_schema_registry(schema_registry)
290
+ end
268
291
 
269
- let(:schema_registry) { Manticore::Client.new }
292
+ after(:all) do
293
+ shutdown_schema_registry
294
+ end
270
295
 
271
296
  context 'listing subject on clean instance' do
272
297
  it "should return an empty set" do
@@ -292,37 +317,58 @@ describe "Schema registry API", :integration => true do
292
317
  expect( subjects ).to be_empty
293
318
  end
294
319
  end
320
+ end
321
+
322
+ def shutdown_schema_registry
323
+ system('./stop_schema_registry.sh')
324
+ end
325
+
326
+ describe "Deserializing with the schema registry", :integration => true do
327
+ schema_registry = Manticore::Client.new
328
+
329
+ shared_examples 'it reads from a topic using a schema registry' do |with_auth|
330
+
331
+ before(:all) do
332
+ shutdown_schema_registry
333
+ startup_schema_registry(schema_registry, with_auth)
334
+ end
335
+
336
+ after(:all) do
337
+ shutdown_schema_registry
338
+ end
295
339
 
296
- context 'use the schema to serialize' do
297
340
  after(:each) do
298
- expect( schema_registry.delete('http://localhost:8081/subjects/topic_avro-value').code ).to be(200)
341
+ expect( schema_registry.delete("#{subject_url}/#{avro_topic_name}-value").code ).to be(200)
299
342
  sleep 1
300
- expect( schema_registry.delete('http://localhost:8081/subjects/topic_avro-value?permanent=true').code ).to be(200)
343
+ expect( schema_registry.delete("#{subject_url}/#{avro_topic_name}-value?permanent=true").code ).to be(200)
301
344
 
302
345
  Stud.try(3.times, [StandardError, RSpec::Expectations::ExpectationNotMetError]) do
303
346
  wait(10).for do
304
- subjects = JSON.parse schema_registry.get('http://localhost:8081/subjects').body
347
+ subjects = JSON.parse schema_registry.get(subject_url).body
305
348
  subjects.empty?
306
349
  end.to be_truthy
307
350
  end
308
351
  end
309
352
 
310
- let(:group_id_1) {rand(36**8).to_s(36)}
311
-
312
- let(:avro_topic_name) { "topic_avro" }
313
-
314
- let(:plain_config) do
315
- { 'schema_registry_url' => 'http://localhost:8081',
316
- 'topics' => [avro_topic_name],
317
- 'codec' => 'plain',
318
- 'group_id' => group_id_1,
319
- 'auto_offset_reset' => 'earliest' }
353
+ let(:base_config) do
354
+ {
355
+ 'topics' => [avro_topic_name],
356
+ 'codec' => 'plain',
357
+ 'group_id' => group_id_1,
358
+ 'auto_offset_reset' => 'earliest'
359
+ }
320
360
  end
321
361
 
322
- def delete_topic_if_exists(topic_name)
362
+ let(:group_id_1) {rand(36**8).to_s(36)}
363
+
364
+ def delete_topic_if_exists(topic_name, user = nil, password = nil)
323
365
  props = java.util.Properties.new
324
366
  props.put(Java::org.apache.kafka.clients.admin.AdminClientConfig::BOOTSTRAP_SERVERS_CONFIG, "localhost:9092")
325
-
367
+ serdes_config = Java::io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig
368
+ unless user.nil?
369
+ props.put(serdes_config::BASIC_AUTH_CREDENTIALS_SOURCE, 'USER_INFO')
370
+ props.put(serdes_config::USER_INFO_CONFIG, "#{user}:#{password}")
371
+ end
326
372
  admin_client = org.apache.kafka.clients.admin.AdminClient.create(props)
327
373
  topics_list = admin_client.listTopics().names().get()
328
374
  if topics_list.contains(topic_name)
@@ -331,7 +377,7 @@ describe "Schema registry API", :integration => true do
331
377
  end
332
378
  end
333
379
 
334
- def write_some_data_to(topic_name)
380
+ def write_some_data_to(topic_name, user = nil, password = nil)
335
381
  props = java.util.Properties.new
336
382
  config = org.apache.kafka.clients.producer.ProducerConfig
337
383
 
@@ -339,6 +385,10 @@ describe "Schema registry API", :integration => true do
339
385
  props.put(serdes_config::SCHEMA_REGISTRY_URL_CONFIG, "http://localhost:8081")
340
386
 
341
387
  props.put(config::BOOTSTRAP_SERVERS_CONFIG, "localhost:9092")
388
+ unless user.nil?
389
+ props.put(serdes_config::BASIC_AUTH_CREDENTIALS_SOURCE, 'USER_INFO')
390
+ props.put(serdes_config::USER_INFO_CONFIG, "#{user}:#{password}")
391
+ end
342
392
  props.put(config::KEY_SERIALIZER_CLASS_CONFIG, org.apache.kafka.common.serialization.StringSerializer.java_class)
343
393
  props.put(config::VALUE_SERIALIZER_CLASS_CONFIG, Java::io.confluent.kafka.serializers.KafkaAvroSerializer.java_class)
344
394
 
@@ -360,11 +410,11 @@ describe "Schema registry API", :integration => true do
360
410
  end
361
411
 
362
412
  it "stored a new schema using Avro Kafka serdes" do
363
- delete_topic_if_exists avro_topic_name
364
- write_some_data_to avro_topic_name
413
+ auth ? delete_topic_if_exists(avro_topic_name, user, password) : delete_topic_if_exists(avro_topic_name)
414
+ auth ? write_some_data_to(avro_topic_name, user, password) : write_some_data_to(avro_topic_name)
365
415
 
366
- subjects = JSON.parse schema_registry.get('http://localhost:8081/subjects').body
367
- expect( subjects ).to contain_exactly("topic_avro-value")
416
+ subjects = JSON.parse schema_registry.get(subject_url).body
417
+ expect( subjects ).to contain_exactly("#{avro_topic_name}-value")
368
418
 
369
419
  num_events = 1
370
420
  queue = consume_messages(plain_config, timeout: 30, event_count: num_events)
@@ -375,4 +425,43 @@ describe "Schema registry API", :integration => true do
375
425
  expect( elem.get("map_field")["inner_field"] ).to eq("inner value")
376
426
  end
377
427
  end
428
+
429
+ context 'with an unauthed schema registry' do
430
+ let(:auth) { false }
431
+ let(:avro_topic_name) { "topic_avro" }
432
+ let(:subject_url) { "http://localhost:8081/subjects" }
433
+ let(:plain_config) { base_config.merge!({'schema_registry_url' => "http://localhost:8081"}) }
434
+
435
+ it_behaves_like 'it reads from a topic using a schema registry', false
436
+ end
437
+
438
+ context 'with an authed schema registry' do
439
+ let(:auth) { true }
440
+ let(:user) { "barney" }
441
+ let(:password) { "changeme" }
442
+ let(:avro_topic_name) { "topic_avro_auth" }
443
+ let(:subject_url) { "http://#{user}:#{password}@localhost:8081/subjects" }
444
+
445
+ context 'using schema_registry_key' do
446
+ let(:plain_config) do
447
+ base_config.merge!({
448
+ 'schema_registry_url' => "http://localhost:8081",
449
+ 'schema_registry_key' => user,
450
+ 'schema_registry_secret' => password
451
+ })
452
+ end
453
+
454
+ it_behaves_like 'it reads from a topic using a schema registry', true
455
+ end
456
+
457
+ context 'using schema_registry_url' do
458
+ let(:plain_config) do
459
+ base_config.merge!({
460
+ 'schema_registry_url' => "http://#{user}:#{password}@localhost:8081"
461
+ })
462
+ end
463
+
464
+ it_behaves_like 'it reads from a topic using a schema registry', true
465
+ end
466
+ end
378
467
  end
@@ -177,11 +177,16 @@ describe LogStash::Inputs::Kafka do
177
177
  end
178
178
  end
179
179
 
180
- context "register parameter verification" do
180
+ describe "schema registry parameter verification" do
181
+ let(:base_config) do {
182
+ 'schema_registry_url' => 'http://localhost:8081',
183
+ 'topics' => ['logstash'],
184
+ 'consumer_threads' => 4
185
+ }
186
+ end
187
+
181
188
  context "schema_registry_url" do
182
- let(:config) do
183
- { 'schema_registry_url' => 'http://localhost:8081', 'topics' => ['logstash'], 'consumer_threads' => 4 }
184
- end
189
+ let(:config) { base_config }
185
190
 
186
191
  it "conflict with value_deserializer_class should fail" do
187
192
  config['value_deserializer_class'] = 'my.fantasy.Deserializer'
@@ -194,19 +199,63 @@ describe LogStash::Inputs::Kafka do
194
199
  end
195
200
  end
196
201
 
197
- context "decorate_events" do
198
- let(:config) { { 'decorate_events' => 'extended'} }
202
+ context 'when kerberos auth is used' do
203
+ ['SASL_SSL', 'SASL_PLAINTEXT'].each do |protocol|
204
+ context "with #{protocol}" do
205
+ ['auto', 'skip'].each do |vsr|
206
+ context "when validata_schema_registry is #{vsr}" do
207
+ let(:config) { base_config.merge({'security_protocol' => protocol,
208
+ 'schema_registry_validation' => vsr})
209
+ }
210
+ it 'skips verification' do
211
+ expect(subject).not_to receive(:check_for_schema_registry_connectivity_and_subjects)
212
+ expect { subject.register }.not_to raise_error
213
+ end
214
+ end
215
+ end
216
+ end
217
+ end
218
+ end
199
219
 
200
- it "should raise error for invalid value" do
201
- config['decorate_events'] = 'avoid'
202
- expect { subject.register }.to raise_error LogStash::ConfigurationError, /Something is wrong with your configuration./
220
+ context 'when kerberos auth is not used' do
221
+ context "when skip_verify is set to auto" do
222
+ let(:config) { base_config.merge({'schema_registry_validation' => 'auto'})}
223
+ it 'performs verification' do
224
+ expect(subject).to receive(:check_for_schema_registry_connectivity_and_subjects)
225
+ expect { subject.register }.not_to raise_error
226
+ end
203
227
  end
204
228
 
205
- it "should map old true boolean value to :record_props mode" do
206
- config['decorate_events'] = "true"
207
- subject.register
208
- expect(subject.metadata_mode).to include(:record_props)
229
+ context "when skip_verify is set to default" do
230
+ let(:config) { base_config }
231
+ it 'performs verification' do
232
+ expect(subject).to receive(:check_for_schema_registry_connectivity_and_subjects)
233
+ expect { subject.register }.not_to raise_error
234
+ end
209
235
  end
236
+
237
+ context "when skip_verify is set to skip" do
238
+ let(:config) { base_config.merge({'schema_registry_validation' => 'skip'})}
239
+ it 'should skip verification' do
240
+ expect(subject).not_to receive(:check_for_schema_registry_connectivity_and_subjects)
241
+ expect { subject.register }.not_to raise_error
242
+ end
243
+ end
244
+ end
245
+ end
246
+
247
+ context "decorate_events" do
248
+ let(:config) { { 'decorate_events' => 'extended'} }
249
+
250
+ it "should raise error for invalid value" do
251
+ config['decorate_events'] = 'avoid'
252
+ expect { subject.register }.to raise_error LogStash::ConfigurationError, /Something is wrong with your configuration./
253
+ end
254
+
255
+ it "should map old true boolean value to :record_props mode" do
256
+ config['decorate_events'] = "true"
257
+ subject.register
258
+ expect(subject.metadata_mode).to include(:record_props)
210
259
  end
211
260
  end
212
261
 
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-integration-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 10.7.6
4
+ version: 10.8.2
5
5
  platform: java
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2021-05-31 00:00:00.000000000 Z
11
+ date: 2021-11-04 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -235,6 +235,8 @@ files:
235
235
  - lib/logstash/plugin_mixins/kafka_support.rb
236
236
  - logstash-integration-kafka.gemspec
237
237
  - spec/check_docs_spec.rb
238
+ - spec/fixtures/jaas.config
239
+ - spec/fixtures/pwd
238
240
  - spec/fixtures/trust-store_stub.jks
239
241
  - spec/integration/inputs/kafka_spec.rb
240
242
  - spec/integration/outputs/kafka_spec.rb
@@ -278,13 +280,14 @@ required_rubygems_version: !ruby/object:Gem::Requirement
278
280
  - !ruby/object:Gem::Version
279
281
  version: '0'
280
282
  requirements: []
281
- rubyforge_project:
282
- rubygems_version: 2.6.13
283
+ rubygems_version: 3.1.6
283
284
  signing_key:
284
285
  specification_version: 4
285
286
  summary: Integration with Kafka - input and output plugins
286
287
  test_files:
287
288
  - spec/check_docs_spec.rb
289
+ - spec/fixtures/jaas.config
290
+ - spec/fixtures/pwd
288
291
  - spec/fixtures/trust-store_stub.jks
289
292
  - spec/integration/inputs/kafka_spec.rb
290
293
  - spec/integration/outputs/kafka_spec.rb