logstash-integration-kafka 10.5.2-java → 10.7.2-java

Sign up to get free protection for your applications and to get access to all the features.
Files changed (32) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +19 -0
  3. data/README.md +1 -1
  4. data/docs/input-kafka.asciidoc +46 -1
  5. data/lib/logstash-integration-kafka_jars.rb +13 -4
  6. data/lib/logstash/inputs/kafka.rb +33 -29
  7. data/lib/logstash/outputs/kafka.rb +3 -32
  8. data/lib/logstash/plugin_mixins/common.rb +93 -0
  9. data/lib/logstash/plugin_mixins/kafka_support.rb +29 -0
  10. data/logstash-integration-kafka.gemspec +2 -1
  11. data/spec/fixtures/trust-store_stub.jks +0 -0
  12. data/spec/integration/inputs/kafka_spec.rb +186 -11
  13. data/spec/unit/inputs/avro_schema_fixture_payment.asvc +8 -0
  14. data/spec/unit/inputs/kafka_spec.rb +21 -5
  15. data/spec/unit/outputs/kafka_spec.rb +18 -7
  16. data/vendor/jar-dependencies/com/github/luben/zstd-jni/1.4.4-7/zstd-jni-1.4.4-7.jar +0 -0
  17. data/vendor/jar-dependencies/io/confluent/common-config/5.5.1/common-config-5.5.1.jar +0 -0
  18. data/vendor/jar-dependencies/io/confluent/common-utils/5.5.1/common-utils-5.5.1.jar +0 -0
  19. data/vendor/jar-dependencies/io/confluent/kafka-avro-serializer/5.5.1/kafka-avro-serializer-5.5.1.jar +0 -0
  20. data/vendor/jar-dependencies/io/confluent/kafka-schema-registry-client/5.5.1/kafka-schema-registry-client-5.5.1.jar +0 -0
  21. data/vendor/jar-dependencies/io/confluent/kafka-schema-serializer/5.5.1/kafka-schema-serializer-5.5.1.jar +0 -0
  22. data/vendor/jar-dependencies/javax/ws/rs/javax.ws.rs-api/2.1.1/javax.ws.rs-api-2.1.1.jar +0 -0
  23. data/vendor/jar-dependencies/org/apache/avro/avro/1.9.2/avro-1.9.2.jar +0 -0
  24. data/vendor/jar-dependencies/org/apache/kafka/kafka-clients/{2.4.1/kafka-clients-2.4.1.jar → 2.5.1/kafka-clients-2.5.1.jar} +0 -0
  25. data/vendor/jar-dependencies/org/apache/kafka/kafka_2.12/2.5.1/kafka_2.12-2.5.1.jar +0 -0
  26. data/vendor/jar-dependencies/org/glassfish/jersey/core/jersey-common/2.33/jersey-common-2.33.jar +0 -0
  27. data/vendor/jar-dependencies/org/lz4/lz4-java/1.7.1/lz4-java-1.7.1.jar +0 -0
  28. data/vendor/jar-dependencies/org/slf4j/slf4j-api/1.7.30/slf4j-api-1.7.30.jar +0 -0
  29. metadata +41 -6
  30. data/vendor/jar-dependencies/com/github/luben/zstd-jni/1.4.3-1/zstd-jni-1.4.3-1.jar +0 -0
  31. data/vendor/jar-dependencies/org/lz4/lz4-java/1.6.0/lz4-java-1.6.0.jar +0 -0
  32. data/vendor/jar-dependencies/org/slf4j/slf4j-api/1.7.28/slf4j-api-1.7.28.jar +0 -0
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 877f38709d9280199d61c2c7755461223a9695bd966d8d7ad77676b40c071102
4
- data.tar.gz: 34d9f91fc7ae51cc4419ef19ada731b8a122006338371daf0d5c6b23c692a46c
3
+ metadata.gz: 3ca014c7463762ed6c2f4be1a9c0dce13356980f7ddf244294dd02837862d54f
4
+ data.tar.gz: 1d68b07fa127636a6bba867ffa0fff6beefd5b1d0a7ba1419eedcea224a97fe5
5
5
  SHA512:
6
- metadata.gz: e1587ee36b4e8038c17c8ed9182d7e25e040447df017917bc4b8dc69f1ffe0c3ab9c624bccabe6ba4439d9ac7ffa38dfab417ac93ee46e0aef6308a303ba5dee
7
- data.tar.gz: 45c48cf5e734e86e9454b4084a83289d89ae51613d100c8b02587d7be4ce9914cb7956b0a2fa51bfd1dea2d9929081becf9ebc1c5f90cf4e03d6076bf3ed8f63
6
+ metadata.gz: 97c32c1cdf91c205a61fefa6ec23544b45cbc66205418df85b2e7bb0c724a2a5672f658b947c086f38808b445d99bc053ee0651eae6b3da0e5cee6065f519c59
7
+ data.tar.gz: 6f67718001a257aafa339dde326192eef009a1bd74a1c1d4e8b37ca277362f95da3a56100819d9ec972a782e0631a5e4c91013d59efab720ea8e8c39b9d100a4
data/CHANGELOG.md CHANGED
@@ -1,3 +1,22 @@
1
+ ## 10.7.2
2
+ - Update Jersey dependency to version 2.33 [#75](https://github.com/logstash-plugins/logstash-integration-kafka/pull/75)
3
+
4
+ ## 10.7.1
5
+ - Fix: dropped usage of SHUTDOWN event deprecated since Logstash 5.0 [#71](https://github.com/logstash-plugins/logstash-integration-kafka/issue/71)
6
+
7
+ ## 10.7.0
8
+ - Switched use from Faraday to Manticore as HTTP client library to access Schema Registry service
9
+ to fix issue [#63](https://github.com/logstash-plugins/logstash-integration-kafka/pull/63)
10
+
11
+ ## 10.6.0
12
+ - Added functionality to Kafka input to use Avro deserializer in retrieving data from Kafka. The schema is retrieved
13
+ from an instance of Confluent's Schema Registry service [#51](https://github.com/logstash-plugins/logstash-integration-kafka/pull/51)
14
+
15
+ ## 10.5.3
16
+ - Fix: set (optional) truststore when endpoint id check disabled [#60](https://github.com/logstash-plugins/logstash-integration-kafka/pull/60).
17
+ Since **10.1.0** disabling server host-name verification (`ssl_endpoint_identification_algorithm => ""`) did not allow
18
+ the (output) plugin to set `ssl_truststore_location => "..."`.
19
+
1
20
  ## 10.5.2
2
21
  - Docs: explain group_id in case of multiple inputs [#59](https://github.com/logstash-plugins/logstash-integration-kafka/pull/59)
3
22
 
data/README.md CHANGED
@@ -1,6 +1,6 @@
1
1
  # Logstash Plugin
2
2
 
3
- [![Travis Build Status](https://travis-ci.org/logstash-plugins/logstash-integration-kafka.svg)](https://travis-ci.org/logstash-plugins/logstash-integration-kafka)
3
+ [![Travis Build Status](https://travis-ci.com/logstash-plugins/logstash-integration-kafka.svg)](https://travis-ci.com/logstash-plugins/logstash-integration-kafka)
4
4
 
5
5
  This is a plugin for [Logstash](https://github.com/elastic/logstash).
6
6
 
@@ -124,6 +124,10 @@ See the https://kafka.apache.org/{kafka_client_doc}/documentation for more detai
124
124
  | <<plugins-{type}s-{plugin}-sasl_jaas_config>> |<<string,string>>|No
125
125
  | <<plugins-{type}s-{plugin}-sasl_kerberos_service_name>> |<<string,string>>|No
126
126
  | <<plugins-{type}s-{plugin}-sasl_mechanism>> |<<string,string>>|No
127
+ | <<plugins-{type}s-{plugin}-schema_registry_key>> |<<string,string>>|No
128
+ | <<plugins-{type}s-{plugin}-schema_registry_proxy>> |<<uri,uri>>|No
129
+ | <<plugins-{type}s-{plugin}-schema_registry_secret>> |<<string,string>>|No
130
+ | <<plugins-{type}s-{plugin}-schema_registry_url>> |<<uri,uri>>|No
127
131
  | <<plugins-{type}s-{plugin}-security_protocol>> |<<string,string>>, one of `["PLAINTEXT", "SSL", "SASL_PLAINTEXT", "SASL_SSL"]`|No
128
132
  | <<plugins-{type}s-{plugin}-send_buffer_bytes>> |<<number,number>>|No
129
133
  | <<plugins-{type}s-{plugin}-session_timeout_ms>> |<<number,number>>|No
@@ -528,6 +532,44 @@ http://kafka.apache.org/documentation.html#security_sasl[SASL mechanism] used fo
528
532
  This may be any mechanism for which a security provider is available.
529
533
  GSSAPI is the default mechanism.
530
534
 
535
+ [id="plugins-{type}s-{plugin}-schema_registry_key"]
536
+ ===== `schema_registry_key`
537
+
538
+ * Value type is <<string,string>>
539
+ * There is no default value for this setting.
540
+
541
+ Set the username for basic authorization to access remote Schema Registry.
542
+
543
+ [id="plugins-{type}s-{plugin}-schema_registry_proxy"]
544
+ ===== `schema_registry_proxy`
545
+
546
+ * Value type is <<uri,uri>>
547
+ * There is no default value for this setting.
548
+
549
+ Set the address of a forward HTTP proxy. An empty string is treated as if proxy was not set.
550
+
551
+ [id="plugins-{type}s-{plugin}-schema_registry_secret"]
552
+ ===== `schema_registry_secret`
553
+
554
+ * Value type is <<string,string>>
555
+ * There is no default value for this setting.
556
+
557
+ Set the password for basic authorization to access remote Schema Registry.
558
+
559
+ [id="plugins-{type}s-{plugin}-schema_registry_url"]
560
+ ===== `schema_registry_url`
561
+
562
+ * Value type is <<uri,uri>>
563
+
564
+ The URI that points to an instance of the
565
+ https://docs.confluent.io/current/schema-registry/index.html[Schema Registry] service,
566
+ used to manage Avro schemas. Be sure that the Avro schemas for deserializing the data from
567
+ the specified topics have been uploaded to the Schema Registry service.
568
+ The schemas must follow a naming convention with the pattern <topic name>-value.
569
+
570
+ Use either the Schema Registry config option or the
571
+ <<plugins-{type}s-{plugin}-value_deserializer_class>> config option, but not both.
572
+
531
573
  [id="plugins-{type}s-{plugin}-security_protocol"]
532
574
  ===== `security_protocol`
533
575
 
@@ -641,7 +683,10 @@ The topics configuration will be ignored when using this configuration.
641
683
  * Value type is <<string,string>>
642
684
  * Default value is `"org.apache.kafka.common.serialization.StringDeserializer"`
643
685
 
644
- Java Class used to deserialize the record's value
686
+ Java Class used to deserialize the record's value.
687
+ A custom value deserializer can be used only if you are not using a Schema Registry.
688
+ Use either the value_deserializer_class config option or the
689
+ <<plugins-{type}s-{plugin}-schema_registry_url>> config option, but not both.
645
690
 
646
691
  [id="plugins-{type}s-{plugin}-common-options"]
647
692
  include::{include_path}/{type}.asciidoc[]
@@ -1,8 +1,17 @@
1
1
  # AUTOGENERATED BY THE GRADLE SCRIPT. DO NOT EDIT.
2
2
 
3
3
  require 'jar_dependencies'
4
- require_jar('org.apache.kafka', 'kafka-clients', '2.4.1')
5
- require_jar('com.github.luben', 'zstd-jni', '1.4.3-1')
6
- require_jar('org.slf4j', 'slf4j-api', '1.7.28')
7
- require_jar('org.lz4', 'lz4-java', '1.6.0')
4
+ require_jar('io.confluent', 'kafka-avro-serializer', '5.5.1')
5
+ require_jar('io.confluent', 'kafka-schema-serializer', '5.5.1')
6
+ require_jar('io.confluent', 'common-config', '5.5.1')
7
+ require_jar('org.apache.avro', 'avro', '1.9.2')
8
+ require_jar('io.confluent', 'kafka-schema-registry-client', '5.5.1')
9
+ require_jar('org.apache.kafka', 'kafka_2.12', '2.5.1')
10
+ require_jar('io.confluent', 'common-utils', '5.5.1')
11
+ require_jar('javax.ws.rs', 'javax.ws.rs-api', '2.1.1')
12
+ require_jar('org.glassfish.jersey.core', 'jersey-common', '2.33')
13
+ require_jar('org.apache.kafka', 'kafka-clients', '2.5.1')
14
+ require_jar('com.github.luben', 'zstd-jni', '1.4.4-7')
15
+ require_jar('org.slf4j', 'slf4j-api', '1.7.30')
16
+ require_jar('org.lz4', 'lz4-java', '1.7.1')
8
17
  require_jar('org.xerial.snappy', 'snappy-java', '1.1.7.3')
@@ -3,6 +3,11 @@ require 'logstash/inputs/base'
3
3
  require 'stud/interval'
4
4
  require 'java'
5
5
  require 'logstash-integration-kafka_jars.rb'
6
+ require 'logstash/plugin_mixins/kafka_support'
7
+ require 'manticore'
8
+ require "json"
9
+ require "logstash/json"
10
+ require_relative '../plugin_mixins/common'
6
11
 
7
12
  # This input will read events from a Kafka topic. It uses the 0.10 version of
8
13
  # the consumer API provided by Kafka to read messages from the broker.
@@ -48,6 +53,12 @@ require 'logstash-integration-kafka_jars.rb'
48
53
  # Kafka consumer configuration: http://kafka.apache.org/documentation.html#consumerconfigs
49
54
  #
50
55
  class LogStash::Inputs::Kafka < LogStash::Inputs::Base
56
+
57
+ DEFAULT_DESERIALIZER_CLASS = "org.apache.kafka.common.serialization.StringDeserializer"
58
+
59
+ include LogStash::PluginMixins::KafkaSupport
60
+ include ::LogStash::PluginMixins::KafkaAvroSchemaRegistry
61
+
51
62
  config_name 'kafka'
52
63
 
53
64
  default :codec, 'plain'
@@ -163,7 +174,7 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
163
174
  # and a rebalance operation is triggered for the group identified by `group_id`
164
175
  config :session_timeout_ms, :validate => :number, :default => 10_000 # (10s) Kafka default
165
176
  # Java Class used to deserialize the record's value
166
- config :value_deserializer_class, :validate => :string, :default => "org.apache.kafka.common.serialization.StringDeserializer"
177
+ config :value_deserializer_class, :validate => :string, :default => DEFAULT_DESERIALIZER_CLASS
167
178
  # A list of topics to subscribe to, defaults to ["logstash"].
168
179
  config :topics, :validate => :array, :default => ["logstash"]
169
180
  # A topic regex pattern to subscribe to.
@@ -232,11 +243,11 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
232
243
  # `timestamp`: The timestamp of this message
233
244
  config :decorate_events, :validate => :boolean, :default => false
234
245
 
235
-
236
246
  public
237
247
  def register
238
248
  @runner_threads = []
239
- end # def register
249
+ check_schema_registry_parameters
250
+ end
240
251
 
241
252
  public
242
253
  def run(logstash_queue)
@@ -274,6 +285,13 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
274
285
  for record in records do
275
286
  codec_instance.decode(record.value.to_s) do |event|
276
287
  decorate(event)
288
+ if schema_registry_url
289
+ json = LogStash::Json.load(record.value.to_s)
290
+ json.each do |k, v|
291
+ event.set(k, v)
292
+ end
293
+ event.remove("message")
294
+ end
277
295
  if @decorate_events
278
296
  event.set("[@metadata][kafka][topic]", record.topic)
279
297
  event.set("[@metadata][kafka][consumer_group]", @group_id)
@@ -333,7 +351,18 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
333
351
  props.put(kafka::CLIENT_RACK_CONFIG, client_rack) unless client_rack.nil?
334
352
 
335
353
  props.put("security.protocol", security_protocol) unless security_protocol.nil?
336
-
354
+ if schema_registry_url
355
+ props.put(kafka::VALUE_DESERIALIZER_CLASS_CONFIG, Java::io.confluent.kafka.serializers.KafkaAvroDeserializer.java_class)
356
+ serdes_config = Java::io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig
357
+ props.put(serdes_config::SCHEMA_REGISTRY_URL_CONFIG, schema_registry_url.to_s)
358
+ if schema_registry_proxy && !schema_registry_proxy.empty?
359
+ props.put(serdes_config::PROXY_HOST, @schema_registry_proxy_host)
360
+ props.put(serdes_config::PROXY_PORT, @schema_registry_proxy_port)
361
+ end
362
+ if schema_registry_key && !schema_registry_key.empty?
363
+ props.put(serdes_config::USER_INFO_CONFIG, schema_registry_key + ":" + schema_registry_secret.value)
364
+ end
365
+ end
337
366
  if security_protocol == "SSL"
338
367
  set_trustore_keystore_config(props)
339
368
  elsif security_protocol == "SASL_PLAINTEXT"
@@ -370,29 +399,4 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
370
399
  end
371
400
  end
372
401
 
373
- def set_trustore_keystore_config(props)
374
- props.put("ssl.truststore.type", ssl_truststore_type) unless ssl_truststore_type.nil?
375
- props.put("ssl.truststore.location", ssl_truststore_location) unless ssl_truststore_location.nil?
376
- props.put("ssl.truststore.password", ssl_truststore_password.value) unless ssl_truststore_password.nil?
377
-
378
- # Client auth stuff
379
- props.put("ssl.keystore.type", ssl_keystore_type) unless ssl_keystore_type.nil?
380
- props.put("ssl.key.password", ssl_key_password.value) unless ssl_key_password.nil?
381
- props.put("ssl.keystore.location", ssl_keystore_location) unless ssl_keystore_location.nil?
382
- props.put("ssl.keystore.password", ssl_keystore_password.value) unless ssl_keystore_password.nil?
383
- props.put("ssl.endpoint.identification.algorithm", ssl_endpoint_identification_algorithm) unless ssl_endpoint_identification_algorithm.nil?
384
- end
385
-
386
- def set_sasl_config(props)
387
- java.lang.System.setProperty("java.security.auth.login.config", jaas_path) unless jaas_path.nil?
388
- java.lang.System.setProperty("java.security.krb5.conf", kerberos_config) unless kerberos_config.nil?
389
-
390
- props.put("sasl.mechanism", sasl_mechanism)
391
- if sasl_mechanism == "GSSAPI" && sasl_kerberos_service_name.nil?
392
- raise LogStash::ConfigurationError, "sasl_kerberos_service_name must be specified when SASL mechanism is GSSAPI"
393
- end
394
-
395
- props.put("sasl.kerberos.service.name", sasl_kerberos_service_name) unless sasl_kerberos_service_name.nil?
396
- props.put("sasl.jaas.config", sasl_jaas_config) unless sasl_jaas_config.nil?
397
- end
398
402
  end #class LogStash::Inputs::Kafka
@@ -2,6 +2,7 @@ require 'logstash/namespace'
2
2
  require 'logstash/outputs/base'
3
3
  require 'java'
4
4
  require 'logstash-integration-kafka_jars.rb'
5
+ require 'logstash/plugin_mixins/kafka_support'
5
6
 
6
7
  # Write events to a Kafka topic. This uses the Kafka Producer API to write messages to a topic on
7
8
  # the broker.
@@ -50,6 +51,8 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
50
51
 
51
52
  java_import org.apache.kafka.clients.producer.ProducerRecord
52
53
 
54
+ include LogStash::PluginMixins::KafkaSupport
55
+
53
56
  declare_threadsafe!
54
57
 
55
58
  config_name 'kafka'
@@ -221,7 +224,6 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
221
224
  end
222
225
 
223
226
  events.each do |event|
224
- break if event == LogStash::SHUTDOWN
225
227
  @codec.encode(event)
226
228
  end
227
229
 
@@ -389,35 +391,4 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
389
391
  end
390
392
  end
391
393
 
392
- def set_trustore_keystore_config(props)
393
- unless ssl_endpoint_identification_algorithm.to_s.strip.empty?
394
- if ssl_truststore_location.nil?
395
- raise LogStash::ConfigurationError, "ssl_truststore_location must be set when SSL is enabled"
396
- end
397
- props.put("ssl.truststore.type", ssl_truststore_type) unless ssl_truststore_type.nil?
398
- props.put("ssl.truststore.location", ssl_truststore_location)
399
- props.put("ssl.truststore.password", ssl_truststore_password.value) unless ssl_truststore_password.nil?
400
- end
401
-
402
- # Client auth stuff
403
- props.put("ssl.keystore.type", ssl_keystore_type) unless ssl_keystore_type.nil?
404
- props.put("ssl.key.password", ssl_key_password.value) unless ssl_key_password.nil?
405
- props.put("ssl.keystore.location", ssl_keystore_location) unless ssl_keystore_location.nil?
406
- props.put("ssl.keystore.password", ssl_keystore_password.value) unless ssl_keystore_password.nil?
407
- props.put("ssl.endpoint.identification.algorithm", ssl_endpoint_identification_algorithm) unless ssl_endpoint_identification_algorithm.nil?
408
- end
409
-
410
- def set_sasl_config(props)
411
- java.lang.System.setProperty("java.security.auth.login.config", jaas_path) unless jaas_path.nil?
412
- java.lang.System.setProperty("java.security.krb5.conf", kerberos_config) unless kerberos_config.nil?
413
-
414
- props.put("sasl.mechanism",sasl_mechanism)
415
- if sasl_mechanism == "GSSAPI" && sasl_kerberos_service_name.nil?
416
- raise LogStash::ConfigurationError, "sasl_kerberos_service_name must be specified when SASL mechanism is GSSAPI"
417
- end
418
-
419
- props.put("sasl.kerberos.service.name", sasl_kerberos_service_name) unless sasl_kerberos_service_name.nil?
420
- props.put("sasl.jaas.config", sasl_jaas_config) unless sasl_jaas_config.nil?
421
- end
422
-
423
394
  end #class LogStash::Outputs::Kafka
@@ -0,0 +1,93 @@
1
+ module LogStash
2
+ module PluginMixins
3
+ module KafkaAvroSchemaRegistry
4
+
5
+ def self.included(base)
6
+ base.extend(self)
7
+ base.setup_schema_registry_config
8
+ end
9
+
10
+ def setup_schema_registry_config
11
+ # Option to set key to access Schema Registry.
12
+ config :schema_registry_key, :validate => :string
13
+
14
+ # Option to set secret to access Schema Registry.
15
+ config :schema_registry_secret, :validate => :password
16
+
17
+ # Option to set the endpoint of the Schema Registry.
18
+ # This option permit the usage of Avro Kafka deserializer which retrieve the schema of the Avro message from an
19
+ # instance of schema registry. If this option has value `value_deserializer_class` nor `topics_pattern` could be valued
20
+ config :schema_registry_url, :validate => :uri
21
+
22
+ # Option to set the proxy of the Schema Registry.
23
+ # This option permits to define a proxy to be used to reach the schema registry service instance.
24
+ config :schema_registry_proxy, :validate => :uri
25
+ end
26
+
27
+ def check_schema_registry_parameters
28
+ if @schema_registry_url
29
+ check_for_schema_registry_conflicts
30
+ @schema_registry_proxy_host, @schema_registry_proxy_port = split_proxy_into_host_and_port(schema_registry_proxy)
31
+ check_for_key_and_secret
32
+ check_for_schema_registry_connectivity_and_subjects
33
+ end
34
+ end
35
+
36
+ private
37
+ def check_for_schema_registry_conflicts
38
+ if @value_deserializer_class != LogStash::Inputs::Kafka::DEFAULT_DESERIALIZER_CLASS
39
+ raise LogStash::ConfigurationError, 'Option schema_registry_url prohibit the customization of value_deserializer_class'
40
+ end
41
+ if @topics_pattern && !@topics_pattern.empty?
42
+ raise LogStash::ConfigurationError, 'Option schema_registry_url prohibit the customization of topics_pattern'
43
+ end
44
+ end
45
+
46
+ private
47
+ def check_for_schema_registry_connectivity_and_subjects
48
+ options = {}
49
+ if schema_registry_proxy && !schema_registry_proxy.empty?
50
+ options[:proxy] = schema_registry_proxy.to_s
51
+ end
52
+ if schema_registry_key and !schema_registry_key.empty?
53
+ options[:auth] = {:user => schema_registry_key, :password => schema_registry_secret.value}
54
+ end
55
+ client = Manticore::Client.new(options)
56
+
57
+ begin
58
+ response = client.get(@schema_registry_url.to_s + '/subjects').body
59
+ rescue Manticore::ManticoreException => e
60
+ raise LogStash::ConfigurationError.new("Schema registry service doesn't respond, error: #{e.message}")
61
+ end
62
+ registered_subjects = JSON.parse response
63
+ expected_subjects = @topics.map { |t| "#{t}-value"}
64
+ if (expected_subjects & registered_subjects).size != expected_subjects.size
65
+ undefined_topic_subjects = expected_subjects - registered_subjects
66
+ raise LogStash::ConfigurationError, "The schema registry does not contain definitions for required topic subjects: #{undefined_topic_subjects}"
67
+ end
68
+ end
69
+
70
+ def split_proxy_into_host_and_port(proxy_uri)
71
+ return nil unless proxy_uri && !proxy_uri.empty?
72
+
73
+ port = proxy_uri.port
74
+
75
+ host_spec = ""
76
+ host_spec << proxy_uri.scheme || "http"
77
+ host_spec << "://"
78
+ host_spec << "#{proxy_uri.userinfo}@" if proxy_uri.userinfo
79
+ host_spec << proxy_uri.host
80
+
81
+ [host_spec, port]
82
+ end
83
+
84
+ def check_for_key_and_secret
85
+ if schema_registry_key and !schema_registry_key.empty?
86
+ if !schema_registry_secret or schema_registry_secret.value.empty?
87
+ raise LogStash::ConfigurationError, "Setting `schema_registry_secret` is required when `schema_registry_key` is provided."
88
+ end
89
+ end
90
+ end
91
+ end
92
+ end
93
+ end
@@ -0,0 +1,29 @@
1
+ module LogStash module PluginMixins module KafkaSupport
2
+
3
+ def set_trustore_keystore_config(props)
4
+ props.put("ssl.truststore.type", ssl_truststore_type) unless ssl_truststore_type.nil?
5
+ props.put("ssl.truststore.location", ssl_truststore_location) unless ssl_truststore_location.nil?
6
+ props.put("ssl.truststore.password", ssl_truststore_password.value) unless ssl_truststore_password.nil?
7
+
8
+ # Client auth stuff
9
+ props.put("ssl.keystore.type", ssl_keystore_type) unless ssl_keystore_type.nil?
10
+ props.put("ssl.key.password", ssl_key_password.value) unless ssl_key_password.nil?
11
+ props.put("ssl.keystore.location", ssl_keystore_location) unless ssl_keystore_location.nil?
12
+ props.put("ssl.keystore.password", ssl_keystore_password.value) unless ssl_keystore_password.nil?
13
+ props.put("ssl.endpoint.identification.algorithm", ssl_endpoint_identification_algorithm) unless ssl_endpoint_identification_algorithm.nil?
14
+ end
15
+
16
+ def set_sasl_config(props)
17
+ java.lang.System.setProperty("java.security.auth.login.config", jaas_path) unless jaas_path.nil?
18
+ java.lang.System.setProperty("java.security.krb5.conf", kerberos_config) unless kerberos_config.nil?
19
+
20
+ props.put("sasl.mechanism", sasl_mechanism)
21
+ if sasl_mechanism == "GSSAPI" && sasl_kerberos_service_name.nil?
22
+ raise LogStash::ConfigurationError, "sasl_kerberos_service_name must be specified when SASL mechanism is GSSAPI"
23
+ end
24
+
25
+ props.put("sasl.kerberos.service.name", sasl_kerberos_service_name) unless sasl_kerberos_service_name.nil?
26
+ props.put("sasl.jaas.config", sasl_jaas_config) unless sasl_jaas_config.nil?
27
+ end
28
+
29
+ end end end
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-integration-kafka'
3
- s.version = '10.5.2'
3
+ s.version = '10.7.2'
4
4
  s.licenses = ['Apache-2.0']
5
5
  s.summary = "Integration with Kafka - input and output plugins"
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline "+
@@ -46,6 +46,7 @@ Gem::Specification.new do |s|
46
46
  s.add_runtime_dependency 'logstash-codec-json'
47
47
  s.add_runtime_dependency 'logstash-codec-plain'
48
48
  s.add_runtime_dependency 'stud', '>= 0.0.22', '< 0.1.0'
49
+ s.add_runtime_dependency "manticore", '>= 0.5.4', '< 1.0.0'
49
50
 
50
51
  s.add_development_dependency 'logstash-devutils'
51
52
  s.add_development_dependency 'rspec-wait'
File without changes
@@ -2,6 +2,9 @@
2
2
  require "logstash/devutils/rspec/spec_helper"
3
3
  require "logstash/inputs/kafka"
4
4
  require "rspec/wait"
5
+ require "stud/try"
6
+ require "manticore"
7
+ require "json"
5
8
 
6
9
  # Please run kafka_test_setup.sh prior to executing this integration test.
7
10
  describe "inputs/kafka", :integration => true do
@@ -120,20 +123,192 @@ describe "inputs/kafka", :integration => true do
120
123
  end
121
124
  end
122
125
  end
126
+ end
127
+
128
+ private
129
+
130
+ def consume_messages(config, queue: Queue.new, timeout:, event_count:)
131
+ kafka_input = LogStash::Inputs::Kafka.new(config)
132
+ t = Thread.new { kafka_input.run(queue) }
133
+ begin
134
+ t.run
135
+ wait(timeout).for { queue.length }.to eq(event_count) unless timeout.eql?(false)
136
+ block_given? ? yield(queue, kafka_input) : queue
137
+ ensure
138
+ t.kill
139
+ t.join(30_000)
140
+ end
141
+ end
142
+
143
+
144
+ describe "schema registry connection options" do
145
+ context "remote endpoint validation" do
146
+ it "should fail if not reachable" do
147
+ config = {'schema_registry_url' => 'http://localnothost:8081'}
148
+ kafka_input = LogStash::Inputs::Kafka.new(config)
149
+ expect { kafka_input.register }.to raise_error LogStash::ConfigurationError, /Schema registry service doesn't respond.*/
150
+ end
151
+
152
+ it "should fail if any topic is not matched by a subject on the schema registry" do
153
+ config = {
154
+ 'schema_registry_url' => 'http://localhost:8081',
155
+ 'topics' => ['temperature_stream']
156
+ }
157
+
158
+ kafka_input = LogStash::Inputs::Kafka.new(config)
159
+ expect { kafka_input.register }.to raise_error LogStash::ConfigurationError, /The schema registry does not contain definitions for required topic subjects: \["temperature_stream-value"\]/
160
+ end
161
+
162
+ context "register with subject present" do
163
+ SUBJECT_NAME = "temperature_stream-value"
164
+
165
+ before(:each) do
166
+ response = save_avro_schema_to_schema_registry(File.join(Dir.pwd, "spec", "unit", "inputs", "avro_schema_fixture_payment.asvc"), SUBJECT_NAME)
167
+ expect( response.code ).to be(200)
168
+ end
123
169
 
124
- private
170
+ after(:each) do
171
+ schema_registry_client = Manticore::Client.new
172
+ delete_remote_schema(schema_registry_client, SUBJECT_NAME)
173
+ end
125
174
 
126
- def consume_messages(config, queue: Queue.new, timeout:, event_count:)
127
- kafka_input = LogStash::Inputs::Kafka.new(config)
128
- t = Thread.new { kafka_input.run(queue) }
129
- begin
130
- t.run
131
- wait(timeout).for { queue.length }.to eq(event_count) unless timeout.eql?(false)
132
- block_given? ? yield(queue, kafka_input) : queue
133
- ensure
134
- t.kill
135
- t.join(30_000)
175
+ it "should correctly complete registration phase" do
176
+ config = {
177
+ 'schema_registry_url' => 'http://localhost:8081',
178
+ 'topics' => ['temperature_stream']
179
+ }
180
+ kafka_input = LogStash::Inputs::Kafka.new(config)
181
+ kafka_input.register
182
+ end
136
183
  end
137
184
  end
185
+ end
138
186
 
187
+ def save_avro_schema_to_schema_registry(schema_file, subject_name)
188
+ raw_schema = File.readlines(schema_file).map(&:chomp).join
189
+ raw_schema_quoted = raw_schema.gsub('"', '\"')
190
+ response = Manticore.post("http://localhost:8081/subjects/#{subject_name}/versions",
191
+ body: '{"schema": "' + raw_schema_quoted + '"}',
192
+ headers: {"Content-Type" => "application/vnd.schemaregistry.v1+json"})
193
+ response
139
194
  end
195
+
196
+ def delete_remote_schema(schema_registry_client, subject_name)
197
+ expect(schema_registry_client.delete("http://localhost:8081/subjects/#{subject_name}").code ).to be(200)
198
+ expect(schema_registry_client.delete("http://localhost:8081/subjects/#{subject_name}?permanent=true").code ).to be(200)
199
+ end
200
+
201
+ # AdminClientConfig = org.alpache.kafka.clients.admin.AdminClientConfig
202
+
203
+ describe "Schema registry API", :integration => true do
204
+
205
+ let(:schema_registry) { Manticore::Client.new }
206
+
207
+ context 'listing subject on clean instance' do
208
+ it "should return an empty set" do
209
+ subjects = JSON.parse schema_registry.get('http://localhost:8081/subjects').body
210
+ expect( subjects ).to be_empty
211
+ end
212
+ end
213
+
214
+ context 'send a schema definition' do
215
+ it "save the definition" do
216
+ response = save_avro_schema_to_schema_registry(File.join(Dir.pwd, "spec", "unit", "inputs", "avro_schema_fixture_payment.asvc"), "schema_test_1")
217
+ expect( response.code ).to be(200)
218
+ delete_remote_schema(schema_registry, "schema_test_1")
219
+ end
220
+
221
+ it "delete the schema just added" do
222
+ response = save_avro_schema_to_schema_registry(File.join(Dir.pwd, "spec", "unit", "inputs", "avro_schema_fixture_payment.asvc"), "schema_test_1")
223
+ expect( response.code ).to be(200)
224
+
225
+ expect( schema_registry.delete('http://localhost:8081/subjects/schema_test_1?permanent=false').code ).to be(200)
226
+ sleep(1)
227
+ subjects = JSON.parse schema_registry.get('http://localhost:8081/subjects').body
228
+ expect( subjects ).to be_empty
229
+ end
230
+ end
231
+
232
+ context 'use the schema to serialize' do
233
+ after(:each) do
234
+ expect( schema_registry.delete('http://localhost:8081/subjects/topic_avro-value').code ).to be(200)
235
+ sleep 1
236
+ expect( schema_registry.delete('http://localhost:8081/subjects/topic_avro-value?permanent=true').code ).to be(200)
237
+
238
+ Stud.try(3.times, [StandardError, RSpec::Expectations::ExpectationNotMetError]) do
239
+ wait(10).for do
240
+ subjects = JSON.parse schema_registry.get('http://localhost:8081/subjects').body
241
+ subjects.empty?
242
+ end.to be_truthy
243
+ end
244
+ end
245
+
246
+ let(:group_id_1) {rand(36**8).to_s(36)}
247
+
248
+ let(:avro_topic_name) { "topic_avro" }
249
+
250
+ let(:plain_config) do
251
+ { 'schema_registry_url' => 'http://localhost:8081',
252
+ 'topics' => [avro_topic_name],
253
+ 'codec' => 'plain',
254
+ 'group_id' => group_id_1,
255
+ 'auto_offset_reset' => 'earliest' }
256
+ end
257
+
258
+ def delete_topic_if_exists(topic_name)
259
+ props = java.util.Properties.new
260
+ props.put(Java::org.apache.kafka.clients.admin.AdminClientConfig::BOOTSTRAP_SERVERS_CONFIG, "localhost:9092")
261
+
262
+ admin_client = org.apache.kafka.clients.admin.AdminClient.create(props)
263
+ topics_list = admin_client.listTopics().names().get()
264
+ if topics_list.contains(topic_name)
265
+ result = admin_client.deleteTopics([topic_name])
266
+ result.values.get(topic_name).get()
267
+ end
268
+ end
269
+
270
+ def write_some_data_to(topic_name)
271
+ props = java.util.Properties.new
272
+ config = org.apache.kafka.clients.producer.ProducerConfig
273
+
274
+ serdes_config = Java::io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig
275
+ props.put(serdes_config::SCHEMA_REGISTRY_URL_CONFIG, "http://localhost:8081")
276
+
277
+ props.put(config::BOOTSTRAP_SERVERS_CONFIG, "localhost:9092")
278
+ props.put(config::KEY_SERIALIZER_CLASS_CONFIG, org.apache.kafka.common.serialization.StringSerializer.java_class)
279
+ props.put(config::VALUE_SERIALIZER_CLASS_CONFIG, Java::io.confluent.kafka.serializers.KafkaAvroSerializer.java_class)
280
+
281
+ parser = org.apache.avro.Schema::Parser.new()
282
+ user_schema = '''{"type":"record",
283
+ "name":"myrecord",
284
+ "fields":[
285
+ {"name":"str_field", "type": "string"},
286
+ {"name":"map_field", "type": {"type": "map", "values": "string"}}
287
+ ]}'''
288
+ schema = parser.parse(user_schema)
289
+ avro_record = org.apache.avro.generic.GenericData::Record.new(schema)
290
+ avro_record.put("str_field", "value1")
291
+ avro_record.put("map_field", {"inner_field" => "inner value"})
292
+
293
+ producer = org.apache.kafka.clients.producer.KafkaProducer.new(props)
294
+ record = org.apache.kafka.clients.producer.ProducerRecord.new(topic_name, "avro_key", avro_record)
295
+ producer.send(record)
296
+ end
297
+
298
+ it "stored a new schema using Avro Kafka serdes" do
299
+ delete_topic_if_exists avro_topic_name
300
+ write_some_data_to avro_topic_name
301
+
302
+ subjects = JSON.parse schema_registry.get('http://localhost:8081/subjects').body
303
+ expect( subjects ).to contain_exactly("topic_avro-value")
304
+
305
+ num_events = 1
306
+ queue = consume_messages(plain_config, timeout: 30, event_count: num_events)
307
+ expect(queue.length).to eq(num_events)
308
+ elem = queue.pop
309
+ expect( elem.to_hash).not_to include("message")
310
+ expect( elem.get("str_field") ).to eq("value1")
311
+ expect( elem.get("map_field")["inner_field"] ).to eq("inner value")
312
+ end
313
+ end
314
+ end
@@ -0,0 +1,8 @@
1
+ {"namespace": "io.confluent.examples.clients.basicavro",
2
+ "type": "record",
3
+ "name": "Payment",
4
+ "fields": [
5
+ {"name": "id", "type": "string"},
6
+ {"name": "amount", "type": "double"}
7
+ ]
8
+ }
@@ -37,8 +37,24 @@ describe LogStash::Inputs::Kafka do
37
37
  expect { subject.register }.to_not raise_error
38
38
  end
39
39
 
40
+ context "register parameter verification" do
41
+ let(:config) do
42
+ { 'schema_registry_url' => 'http://localhost:8081', 'topics' => ['logstash'], 'consumer_threads' => 4 }
43
+ end
44
+
45
+ it "schema_registry_url conflict with value_deserializer_class should fail" do
46
+ config['value_deserializer_class'] = 'my.fantasy.Deserializer'
47
+ expect { subject.register }.to raise_error LogStash::ConfigurationError, /Option schema_registry_url prohibit the customization of value_deserializer_class/
48
+ end
49
+
50
+ it "schema_registry_url conflict with topics_pattern should fail" do
51
+ config['topics_pattern'] = 'topic_.*'
52
+ expect { subject.register }.to raise_error LogStash::ConfigurationError, /Option schema_registry_url prohibit the customization of topics_pattern/
53
+ end
54
+ end
55
+
40
56
  context 'with client_rack' do
41
- let(:config) { super.merge('client_rack' => 'EU-R1') }
57
+ let(:config) { super().merge('client_rack' => 'EU-R1') }
42
58
 
43
59
  it "sets broker rack parameter" do
44
60
  expect(org.apache.kafka.clients.consumer.KafkaConsumer).
@@ -50,7 +66,7 @@ describe LogStash::Inputs::Kafka do
50
66
  end
51
67
 
52
68
  context 'string integer config' do
53
- let(:config) { super.merge('session_timeout_ms' => '25000', 'max_poll_interval_ms' => '345000') }
69
+ let(:config) { super().merge('session_timeout_ms' => '25000', 'max_poll_interval_ms' => '345000') }
54
70
 
55
71
  it "sets integer values" do
56
72
  expect(org.apache.kafka.clients.consumer.KafkaConsumer).
@@ -62,7 +78,7 @@ describe LogStash::Inputs::Kafka do
62
78
  end
63
79
 
64
80
  context 'integer config' do
65
- let(:config) { super.merge('session_timeout_ms' => 25200, 'max_poll_interval_ms' => 123_000) }
81
+ let(:config) { super().merge('session_timeout_ms' => 25200, 'max_poll_interval_ms' => 123_000) }
66
82
 
67
83
  it "sets integer values" do
68
84
  expect(org.apache.kafka.clients.consumer.KafkaConsumer).
@@ -74,7 +90,7 @@ describe LogStash::Inputs::Kafka do
74
90
  end
75
91
 
76
92
  context 'string boolean config' do
77
- let(:config) { super.merge('enable_auto_commit' => 'false', 'check_crcs' => 'true') }
93
+ let(:config) { super().merge('enable_auto_commit' => 'false', 'check_crcs' => 'true') }
78
94
 
79
95
  it "sets parameters" do
80
96
  expect(org.apache.kafka.clients.consumer.KafkaConsumer).
@@ -87,7 +103,7 @@ describe LogStash::Inputs::Kafka do
87
103
  end
88
104
 
89
105
  context 'boolean config' do
90
- let(:config) { super.merge('enable_auto_commit' => true, 'check_crcs' => false) }
106
+ let(:config) { super().merge('enable_auto_commit' => true, 'check_crcs' => false) }
91
107
 
92
108
  it "sets parameters" do
93
109
  expect(org.apache.kafka.clients.consumer.KafkaConsumer).
@@ -50,9 +50,10 @@ describe "outputs/kafka" do
50
50
  kafka.multi_receive([event])
51
51
  end
52
52
 
53
- it 'should raise config error when truststore location is not set and ssl is enabled' do
53
+ it 'should not raise config error when truststore location is not set and ssl is enabled' do
54
54
  kafka = LogStash::Outputs::Kafka.new(simple_kafka_config.merge("security_protocol" => "SSL"))
55
- expect { kafka.register }.to raise_error(LogStash::ConfigurationError, /ssl_truststore_location must be set when SSL is enabled/)
55
+ expect(org.apache.kafka.clients.producer.KafkaProducer).to receive(:new)
56
+ expect { kafka.register }.to_not raise_error
56
57
  end
57
58
  end
58
59
 
@@ -225,21 +226,31 @@ describe "outputs/kafka" do
225
226
  context 'when ssl endpoint identification disabled' do
226
227
 
227
228
  let(:config) do
228
- simple_kafka_config.merge('ssl_endpoint_identification_algorithm' => '', 'security_protocol' => 'SSL')
229
+ simple_kafka_config.merge(
230
+ 'security_protocol' => 'SSL',
231
+ 'ssl_endpoint_identification_algorithm' => '',
232
+ 'ssl_truststore_location' => truststore_path,
233
+ )
234
+ end
235
+
236
+ let(:truststore_path) do
237
+ File.join(File.dirname(__FILE__), '../../fixtures/trust-store_stub.jks')
229
238
  end
230
239
 
231
240
  subject { LogStash::Outputs::Kafka.new(config) }
232
241
 
233
- it 'does not configure truststore' do
242
+ it 'sets empty ssl.endpoint.identification.algorithm' do
234
243
  expect(org.apache.kafka.clients.producer.KafkaProducer).
235
- to receive(:new).with(hash_excluding('ssl.truststore.location' => anything))
244
+ to receive(:new).with(hash_including('ssl.endpoint.identification.algorithm' => ''))
236
245
  subject.register
237
246
  end
238
247
 
239
- it 'sets empty ssl.endpoint.identification.algorithm' do
248
+ it 'configures truststore' do
240
249
  expect(org.apache.kafka.clients.producer.KafkaProducer).
241
- to receive(:new).with(hash_including('ssl.endpoint.identification.algorithm' => ''))
250
+ to receive(:new).with(hash_including('ssl.truststore.location' => truststore_path))
242
251
  subject.register
243
252
  end
253
+
244
254
  end
255
+
245
256
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-integration-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 10.5.2
4
+ version: 10.7.2
5
5
  platform: java
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2020-10-15 00:00:00.000000000 Z
11
+ date: 2021-03-05 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -106,6 +106,26 @@ dependencies:
106
106
  - - "<"
107
107
  - !ruby/object:Gem::Version
108
108
  version: 0.1.0
109
+ - !ruby/object:Gem::Dependency
110
+ requirement: !ruby/object:Gem::Requirement
111
+ requirements:
112
+ - - ">="
113
+ - !ruby/object:Gem::Version
114
+ version: 0.5.4
115
+ - - "<"
116
+ - !ruby/object:Gem::Version
117
+ version: 1.0.0
118
+ name: manticore
119
+ prerelease: false
120
+ type: :runtime
121
+ version_requirements: !ruby/object:Gem::Requirement
122
+ requirements:
123
+ - - ">="
124
+ - !ruby/object:Gem::Version
125
+ version: 0.5.4
126
+ - - "<"
127
+ - !ruby/object:Gem::Version
128
+ version: 1.0.0
109
129
  - !ruby/object:Gem::Dependency
110
130
  requirement: !ruby/object:Gem::Requirement
111
131
  requirements:
@@ -183,15 +203,28 @@ files:
183
203
  - lib/logstash-integration-kafka_jars.rb
184
204
  - lib/logstash/inputs/kafka.rb
185
205
  - lib/logstash/outputs/kafka.rb
206
+ - lib/logstash/plugin_mixins/common.rb
207
+ - lib/logstash/plugin_mixins/kafka_support.rb
186
208
  - logstash-integration-kafka.gemspec
209
+ - spec/fixtures/trust-store_stub.jks
187
210
  - spec/integration/inputs/kafka_spec.rb
188
211
  - spec/integration/outputs/kafka_spec.rb
212
+ - spec/unit/inputs/avro_schema_fixture_payment.asvc
189
213
  - spec/unit/inputs/kafka_spec.rb
190
214
  - spec/unit/outputs/kafka_spec.rb
191
- - vendor/jar-dependencies/com/github/luben/zstd-jni/1.4.3-1/zstd-jni-1.4.3-1.jar
192
- - vendor/jar-dependencies/org/apache/kafka/kafka-clients/2.4.1/kafka-clients-2.4.1.jar
193
- - vendor/jar-dependencies/org/lz4/lz4-java/1.6.0/lz4-java-1.6.0.jar
194
- - vendor/jar-dependencies/org/slf4j/slf4j-api/1.7.28/slf4j-api-1.7.28.jar
215
+ - vendor/jar-dependencies/com/github/luben/zstd-jni/1.4.4-7/zstd-jni-1.4.4-7.jar
216
+ - vendor/jar-dependencies/io/confluent/common-config/5.5.1/common-config-5.5.1.jar
217
+ - vendor/jar-dependencies/io/confluent/common-utils/5.5.1/common-utils-5.5.1.jar
218
+ - vendor/jar-dependencies/io/confluent/kafka-avro-serializer/5.5.1/kafka-avro-serializer-5.5.1.jar
219
+ - vendor/jar-dependencies/io/confluent/kafka-schema-registry-client/5.5.1/kafka-schema-registry-client-5.5.1.jar
220
+ - vendor/jar-dependencies/io/confluent/kafka-schema-serializer/5.5.1/kafka-schema-serializer-5.5.1.jar
221
+ - vendor/jar-dependencies/javax/ws/rs/javax.ws.rs-api/2.1.1/javax.ws.rs-api-2.1.1.jar
222
+ - vendor/jar-dependencies/org/apache/avro/avro/1.9.2/avro-1.9.2.jar
223
+ - vendor/jar-dependencies/org/apache/kafka/kafka-clients/2.5.1/kafka-clients-2.5.1.jar
224
+ - vendor/jar-dependencies/org/apache/kafka/kafka_2.12/2.5.1/kafka_2.12-2.5.1.jar
225
+ - vendor/jar-dependencies/org/glassfish/jersey/core/jersey-common/2.33/jersey-common-2.33.jar
226
+ - vendor/jar-dependencies/org/lz4/lz4-java/1.7.1/lz4-java-1.7.1.jar
227
+ - vendor/jar-dependencies/org/slf4j/slf4j-api/1.7.30/slf4j-api-1.7.30.jar
195
228
  - vendor/jar-dependencies/org/xerial/snappy/snappy-java/1.1.7.3/snappy-java-1.1.7.3.jar
196
229
  homepage: http://www.elastic.co/guide/en/logstash/current/index.html
197
230
  licenses:
@@ -222,7 +255,9 @@ signing_key:
222
255
  specification_version: 4
223
256
  summary: Integration with Kafka - input and output plugins
224
257
  test_files:
258
+ - spec/fixtures/trust-store_stub.jks
225
259
  - spec/integration/inputs/kafka_spec.rb
226
260
  - spec/integration/outputs/kafka_spec.rb
261
+ - spec/unit/inputs/avro_schema_fixture_payment.asvc
227
262
  - spec/unit/inputs/kafka_spec.rb
228
263
  - spec/unit/outputs/kafka_spec.rb