logstash-integration-kafka 10.9.0-java → 10.12.1-java

Sign up to get free protection for your applications and to get access to all the features.
Files changed (35) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +16 -2
  3. data/DEVELOPER.md +1 -1
  4. data/docs/index.asciidoc +1 -1
  5. data/docs/input-kafka.asciidoc +1 -1
  6. data/docs/output-kafka.asciidoc +13 -4
  7. data/lib/logstash/inputs/kafka.rb +5 -15
  8. data/lib/logstash/outputs/kafka.rb +4 -11
  9. data/lib/logstash/plugin_mixins/kafka/avro_schema_registry.rb +108 -0
  10. data/lib/logstash/plugin_mixins/kafka/common.rb +47 -0
  11. data/lib/logstash-integration-kafka_jars.rb +11 -11
  12. data/logstash-integration-kafka.gemspec +1 -1
  13. data/spec/integration/outputs/kafka_spec.rb +21 -1
  14. data/vendor/jar-dependencies/com/github/luben/zstd-jni/1.5.2-2/zstd-jni-1.5.2-2.jar +0 -0
  15. data/vendor/jar-dependencies/io/confluent/common-config/{5.5.1/common-config-5.5.1.jar → 6.2.2/common-config-6.2.2.jar} +0 -0
  16. data/vendor/jar-dependencies/io/confluent/common-utils/{5.5.1/common-utils-5.5.1.jar → 6.2.2/common-utils-6.2.2.jar} +0 -0
  17. data/vendor/jar-dependencies/io/confluent/kafka-avro-serializer/6.2.2/kafka-avro-serializer-6.2.2.jar +0 -0
  18. data/vendor/jar-dependencies/io/confluent/kafka-schema-registry-client/6.2.2/kafka-schema-registry-client-6.2.2.jar +0 -0
  19. data/vendor/jar-dependencies/io/confluent/kafka-schema-serializer/6.2.2/kafka-schema-serializer-6.2.2.jar +0 -0
  20. data/vendor/jar-dependencies/org/apache/avro/avro/1.11.3/avro-1.11.3.jar +0 -0
  21. data/vendor/jar-dependencies/org/apache/kafka/kafka-clients/2.8.1/kafka-clients-2.8.1.jar +0 -0
  22. data/vendor/jar-dependencies/org/apache/kafka/kafka_2.12/2.8.1/kafka_2.12-2.8.1.jar +0 -0
  23. data/vendor/jar-dependencies/org/slf4j/slf4j-api/{1.7.30/slf4j-api-1.7.30.jar → 1.7.36/slf4j-api-1.7.36.jar} +0 -0
  24. data/vendor/jar-dependencies/org/xerial/snappy/snappy-java/1.1.8.4/snappy-java-1.1.8.4.jar +0 -0
  25. metadata +16 -16
  26. data/lib/logstash/plugin_mixins/common.rb +0 -107
  27. data/lib/logstash/plugin_mixins/kafka_support.rb +0 -29
  28. data/vendor/jar-dependencies/com/github/luben/zstd-jni/1.4.4-7/zstd-jni-1.4.4-7.jar +0 -0
  29. data/vendor/jar-dependencies/io/confluent/kafka-avro-serializer/5.5.1/kafka-avro-serializer-5.5.1.jar +0 -0
  30. data/vendor/jar-dependencies/io/confluent/kafka-schema-registry-client/5.5.1/kafka-schema-registry-client-5.5.1.jar +0 -0
  31. data/vendor/jar-dependencies/io/confluent/kafka-schema-serializer/5.5.1/kafka-schema-serializer-5.5.1.jar +0 -0
  32. data/vendor/jar-dependencies/org/apache/avro/avro/1.9.2/avro-1.9.2.jar +0 -0
  33. data/vendor/jar-dependencies/org/apache/kafka/kafka-clients/2.5.1/kafka-clients-2.5.1.jar +0 -0
  34. data/vendor/jar-dependencies/org/apache/kafka/kafka_2.12/2.5.1/kafka_2.12-2.5.1.jar +0 -0
  35. data/vendor/jar-dependencies/org/xerial/snappy/snappy-java/1.1.7.3/snappy-java-1.1.7.3.jar +0 -0
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: df9c89cdfcc2db6702409ec45ffb8d9f8f2b9274304889df1527e0697ccfcf95
4
- data.tar.gz: e907ad2e277d27c8cdbe98ebd203af86a3505a85574d64ace4402607c075a69e
3
+ metadata.gz: 53d305a9b4c08e49bde08c31edf4005a58b596ce74c03e86327a477bf5a4410c
4
+ data.tar.gz: 8f717699318bac4ad63f61722de0ac4a61fcc8686269bb26ddee22c8aaa4cddc
5
5
  SHA512:
6
- metadata.gz: bac93eb957af9028a6efc6e31a66c94818ae61333fa738daa6606abdd325dbea206fea2cd905d891f2b341a7bc983f8eaf5a5471015ac9548bc902f941b4a0d9
7
- data.tar.gz: 456739a2409ef5a42f007a23c8d0dbfceb3518e8e65b528c0f48266f2a219c2415a83a507fdab3ba028cbc5493d645c080ce191a0d39d7c1787557abded9a0e1
6
+ metadata.gz: 0bed7cd4a84af389ee1ffb23d7dd068113268a65c79ecc4fbbffa07f2d3f6a50a61c380873e24b25e2c555f33d7d1e9a34b00e10fdf6a03cab9e09f264ea8684
7
+ data.tar.gz: 5bf4ff031097a0fe5617760cf5238f125ee00cd029a67bc9d58e4387e7af6d816ac48057dd7c3e37e305adc0159f907d264cfb42177d929c4e9310736aa3cf5d
data/CHANGELOG.md CHANGED
@@ -1,6 +1,21 @@
1
+ ## 10.12.1
2
+ - Fix: update Avro library on 10.x [#149](https://api.github.com/repos/logstash-plugins/logstash-integration-kafka/pulls/149)
3
+
4
+ ## 10.12.0
5
+ - bump kafka client to 2.8.1 [#115](https://api.github.com/repos/logstash-plugins/logstash-integration-kafka/pulls/115)
6
+
7
+ ## 10.11.0
8
+ - Feat: added connections_max_idle_ms setting for output [#118](https://github.com/logstash-plugins/logstash-integration-kafka/pull/118)
9
+ - Refactor: mixins to follow shared mixin module naming
10
+
11
+ ## 10.10.1
12
+ - Update CHANGELOG.md [#114](https://api.github.com/repos/logstash-plugins/logstash-integration-kafka/pulls/114)
13
+
14
+ ## 10.10.0
15
+ - Added config setting to enable 'zstd' compression in the Kafka output [#112](https://github.com/logstash-plugins/logstash-integration-kafka/pull/112)
16
+
1
17
  ## 10.9.0
2
18
  - Refactor: leverage codec when using schema registry [#106](https://github.com/logstash-plugins/logstash-integration-kafka/pull/106)
3
-
4
19
  Previously using `schema_registry_url` parsed the payload as JSON even if `codec => 'plain'` was set, this is no longer the case.
5
20
 
6
21
  ## 10.8.2
@@ -87,7 +102,6 @@
87
102
  - Fix links in changelog pointing to stand-alone plugin changelogs.
88
103
  - Refactor: scope java_import to plugin class
89
104
 
90
-
91
105
  ## 10.0.0
92
106
  - Initial release of the Kafka Integration Plugin, which combines
93
107
  previously-separate Kafka plugins and shared dependencies into a single
data/DEVELOPER.md CHANGED
@@ -62,7 +62,7 @@ See http://kafka.apache.org/documentation.html#producerconfigs for details about
62
62
  kafka {
63
63
  topic_id => ... # string (required), The topic to produce the messages to
64
64
  broker_list => ... # string (optional), default: "localhost:9092", This is for bootstrapping and the producer will only use it for getting metadata
65
- compression_codec => ... # string (optional), one of ["none", "gzip", "snappy"], default: "none"
65
+ compression_codec => ... # string (optional), one of ["none", "gzip", "snappy", "lz4", "zstd"], default: "none"
66
66
  compressed_topics => ... # string (optional), default: "", This parameter allows you to set whether compression should be turned on for particular
67
67
  request_required_acks => ... # number (optional), one of [-1, 0, 1], default: 0, This value controls when a produce request is considered completed
68
68
  serializer_class => ... # string, (optional) default: "kafka.serializer.StringEncoder", The serializer class for messages. The default encoder takes a byte[] and returns the same byte[]
data/docs/index.asciidoc CHANGED
@@ -1,7 +1,7 @@
1
1
  :plugin: kafka
2
2
  :type: integration
3
3
  :no_codec:
4
- :kafka_client: 2.5.1
4
+ :kafka_client: 2.8.1
5
5
 
6
6
  ///////////////////////////////////////////
7
7
  START - GENERATED VARIABLES, DO NOT EDIT!
@@ -2,7 +2,7 @@
2
2
  :plugin: kafka
3
3
  :type: input
4
4
  :default_codec: plain
5
- :kafka_client: 2.5
5
+ :kafka_client: 2.8
6
6
  :kafka_client_doc: 25
7
7
 
8
8
  ///////////////////////////////////////////
@@ -2,7 +2,7 @@
2
2
  :plugin: kafka
3
3
  :type: output
4
4
  :default_codec: plain
5
- :kafka_client: 2.5
5
+ :kafka_client: 2.8
6
6
  :kafka_client_doc: 25
7
7
 
8
8
  ///////////////////////////////////////////
@@ -84,7 +84,8 @@ See the https://kafka.apache.org/{kafka_client_doc}/documentation for more detai
84
84
  | <<plugins-{type}s-{plugin}-buffer_memory>> |<<number,number>>|No
85
85
  | <<plugins-{type}s-{plugin}-client_dns_lookup>> |<<string,string>>|No
86
86
  | <<plugins-{type}s-{plugin}-client_id>> |<<string,string>>|No
87
- | <<plugins-{type}s-{plugin}-compression_type>> |<<string,string>>, one of `["none", "gzip", "snappy", "lz4"]`|No
87
+ | <<plugins-{type}s-{plugin}-compression_type>> |<<string,string>>, one of `["none", "gzip", "snappy", "lz4", "zstd"]`|No
88
+ | <<plugins-{type}s-{plugin}-connections_max_idle_ms>> |<<number,number>>|No
88
89
  | <<plugins-{type}s-{plugin}-jaas_path>> |a valid filesystem path|No
89
90
  | <<plugins-{type}s-{plugin}-kerberos_config>> |a valid filesystem path|No
90
91
  | <<plugins-{type}s-{plugin}-key_serializer>> |<<string,string>>|No
@@ -193,11 +194,19 @@ ip/port by allowing a logical application name to be included with the request
193
194
  [id="plugins-{type}s-{plugin}-compression_type"]
194
195
  ===== `compression_type`
195
196
 
196
- * Value can be any of: `none`, `gzip`, `snappy`, `lz4`
197
+ * Value can be any of: `none`, `gzip`, `snappy`, `lz4`, `zstd`
197
198
  * Default value is `"none"`
198
199
 
199
200
  The compression type for all data generated by the producer.
200
- The default is none (i.e. no compression). Valid values are none, gzip, snappy, or lz4.
201
+ The default is none (meaning no compression). Valid values are none, gzip, snappy, lz4, or zstd.
202
+
203
+ [id="plugins-{type}s-{plugin}-connections_max_idle_ms"]
204
+ ===== `connections_max_idle_ms`
205
+
206
+ * Value type is <<number,number>>
207
+ * Default value is `540000` milliseconds (9 minutes).
208
+
209
+ Close idle connections after the number of milliseconds specified by this config.
201
210
 
202
211
  [id="plugins-{type}s-{plugin}-jaas_path"]
203
212
  ===== `jaas_path`
@@ -2,12 +2,11 @@ require 'logstash/namespace'
2
2
  require 'logstash/inputs/base'
3
3
  require 'stud/interval'
4
4
  require 'java'
5
- require 'logstash-integration-kafka_jars.rb'
6
- require 'logstash/plugin_mixins/kafka_support'
7
- require 'manticore'
8
5
  require "json"
9
6
  require "logstash/json"
10
- require_relative '../plugin_mixins/common'
7
+ require 'logstash-integration-kafka_jars.rb'
8
+ require 'logstash/plugin_mixins/kafka/common'
9
+ require 'logstash/plugin_mixins/kafka/avro_schema_registry'
11
10
  require 'logstash/plugin_mixins/deprecation_logger_support'
12
11
 
13
12
  # This input will read events from a Kafka topic. It uses the 0.10 version of
@@ -57,8 +56,8 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
57
56
 
58
57
  DEFAULT_DESERIALIZER_CLASS = "org.apache.kafka.common.serialization.StringDeserializer"
59
58
 
60
- include LogStash::PluginMixins::KafkaSupport
61
- include ::LogStash::PluginMixins::KafkaAvroSchemaRegistry
59
+ include LogStash::PluginMixins::Kafka::Common
60
+ include LogStash::PluginMixins::Kafka::AvroSchemaRegistry
62
61
  include LogStash::PluginMixins::DeprecationLoggerSupport
63
62
 
64
63
  config_name 'kafka'
@@ -98,8 +97,6 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
98
97
  # is to be able to track the source of requests beyond just ip/port by allowing
99
98
  # a logical application name to be included.
100
99
  config :client_id, :validate => :string, :default => "logstash"
101
- # Close idle connections after the number of milliseconds specified by this config.
102
- config :connections_max_idle_ms, :validate => :number, :default => 540_000 # (9m) Kafka default
103
100
  # Ideally you should have as many threads as the number of partitions for a perfect
104
101
  # balance — more threads than partitions means that some threads will be idle
105
102
  config :consumer_threads, :validate => :number, :default => 1
@@ -152,9 +149,6 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
152
149
  config :max_partition_fetch_bytes, :validate => :number, :default => 1_048_576 # (1MB) Kafka default
153
150
  # The maximum number of records returned in a single call to poll().
154
151
  config :max_poll_records, :validate => :number, :default => 500 # Kafka default
155
- # The period of time in milliseconds after which we force a refresh of metadata even if
156
- # we haven't seen any partition leadership changes to proactively discover any new brokers or partitions
157
- config :metadata_max_age_ms, :validate => :number, :default => 300_000 # (5m) Kafka default
158
152
  # The name of the partition assignment strategy that the client uses to distribute
159
153
  # partition ownership amongst consumer instances, supported options are `range`,
160
154
  # `round_robin`, `sticky` and `cooperative_sticky`
@@ -167,10 +161,6 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
167
161
  # This avoids repeatedly connecting to a host in a tight loop.
168
162
  # This backoff applies to all connection attempts by the client to a broker.
169
163
  config :reconnect_backoff_ms, :validate => :number, :default => 50 # Kafka default
170
- # The configuration controls the maximum amount of time the client will wait for the response of a request.
171
- # If the response is not received before the timeout elapses the client will resend the request if necessary
172
- # or fail the request if retries are exhausted.
173
- config :request_timeout_ms, :validate => :number, :default => 40_000 # Kafka default
174
164
  # The amount of time to wait before attempting to retry a failed fetch request
175
165
  # to a given topic partition. This avoids repeated fetching-and-failing in a tight loop.
176
166
  config :retry_backoff_ms, :validate => :number, :default => 100 # Kafka default
@@ -2,7 +2,7 @@ require 'logstash/namespace'
2
2
  require 'logstash/outputs/base'
3
3
  require 'java'
4
4
  require 'logstash-integration-kafka_jars.rb'
5
- require 'logstash/plugin_mixins/kafka_support'
5
+ require 'logstash/plugin_mixins/kafka/common'
6
6
 
7
7
  # Write events to a Kafka topic. This uses the Kafka Producer API to write messages to a topic on
8
8
  # the broker.
@@ -51,7 +51,7 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
51
51
 
52
52
  java_import org.apache.kafka.clients.producer.ProducerRecord
53
53
 
54
- include LogStash::PluginMixins::KafkaSupport
54
+ include LogStash::PluginMixins::Kafka::Common
55
55
 
56
56
  declare_threadsafe!
57
57
 
@@ -80,8 +80,8 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
80
80
  # The total bytes of memory the producer can use to buffer records waiting to be sent to the server.
81
81
  config :buffer_memory, :validate => :number, :default => 33_554_432 # (32M) Kafka default
82
82
  # The compression type for all data generated by the producer.
83
- # The default is none (i.e. no compression). Valid values are none, gzip, or snappy.
84
- config :compression_type, :validate => ["none", "gzip", "snappy", "lz4"], :default => "none"
83
+ # The default is none (i.e. no compression). Valid values are none, gzip, snappy, lz4 or zstd.
84
+ config :compression_type, :validate => ["none", "gzip", "snappy", "lz4", "zstd"], :default => "none"
85
85
  # How DNS lookups should be done. If set to `use_all_dns_ips`, when the lookup returns multiple
86
86
  # IP addresses for a hostname, they will all be attempted to connect to before failing the
87
87
  # connection. If the value is `resolve_canonical_bootstrap_servers_only` each entry will be
@@ -107,19 +107,12 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
107
107
  config :message_key, :validate => :string
108
108
  # the timeout setting for initial metadata request to fetch topic metadata.
109
109
  config :metadata_fetch_timeout_ms, :validate => :number, :default => 60_000
110
- # the max time in milliseconds before a metadata refresh is forced.
111
- config :metadata_max_age_ms, :validate => :number, :default => 300_000 # (5m) Kafka default
112
110
  # Partitioner to use - can be `default`, `uniform_sticky`, `round_robin` or a fully qualified class name of a custom partitioner.
113
111
  config :partitioner, :validate => :string
114
112
  # The size of the TCP receive buffer to use when reading data
115
113
  config :receive_buffer_bytes, :validate => :number, :default => 32_768 # (32KB) Kafka default
116
114
  # The amount of time to wait before attempting to reconnect to a given host when a connection fails.
117
115
  config :reconnect_backoff_ms, :validate => :number, :default => 50 # Kafka default
118
- # The configuration controls the maximum amount of time the client will wait
119
- # for the response of a request. If the response is not received before the timeout
120
- # elapses the client will resend the request if necessary or fail the request if
121
- # retries are exhausted.
122
- config :request_timeout_ms, :validate => :number, :default => 40_000 # (40s) Kafka default
123
116
  # The default retry behavior is to retry until successful. To prevent data loss,
124
117
  # the use of this setting is discouraged.
125
118
  #
@@ -0,0 +1,108 @@
1
+ require 'manticore'
2
+
3
+ module LogStash module PluginMixins module Kafka
4
+ module AvroSchemaRegistry
5
+
6
+ def self.included(base)
7
+ base.extend(self)
8
+ base.setup_schema_registry_config
9
+ end
10
+
11
+ def setup_schema_registry_config
12
+ # Option to set key to access Schema Registry.
13
+ config :schema_registry_key, :validate => :string
14
+
15
+ # Option to set secret to access Schema Registry.
16
+ config :schema_registry_secret, :validate => :password
17
+
18
+ # Option to set the endpoint of the Schema Registry.
19
+ # This option permit the usage of Avro Kafka deserializer which retrieve the schema of the Avro message from an
20
+ # instance of schema registry. If this option has value `value_deserializer_class` nor `topics_pattern` could be valued
21
+ config :schema_registry_url, :validate => :uri
22
+
23
+ # Option to set the proxy of the Schema Registry.
24
+ # This option permits to define a proxy to be used to reach the schema registry service instance.
25
+ config :schema_registry_proxy, :validate => :uri
26
+
27
+ # Option to skip validating the schema registry during registration. This can be useful when using
28
+ # certificate based auth
29
+ config :schema_registry_validation, :validate => ['auto', 'skip'], :default => 'auto'
30
+ end
31
+
32
+ def check_schema_registry_parameters
33
+ if @schema_registry_url
34
+ check_for_schema_registry_conflicts
35
+ @schema_registry_proxy_host, @schema_registry_proxy_port = split_proxy_into_host_and_port(schema_registry_proxy)
36
+ check_for_key_and_secret
37
+ check_for_schema_registry_connectivity_and_subjects if schema_registry_validation?
38
+ end
39
+ end
40
+
41
+ def schema_registry_validation?
42
+ return false if schema_registry_validation.to_s == 'skip'
43
+ return false if using_kerberos? # pre-validation doesn't support kerberos
44
+
45
+ true
46
+ end
47
+
48
+ def using_kerberos?
49
+ security_protocol == "SASL_PLAINTEXT" || security_protocol == "SASL_SSL"
50
+ end
51
+
52
+ private
53
+ def check_for_schema_registry_conflicts
54
+ if @value_deserializer_class != LogStash::Inputs::Kafka::DEFAULT_DESERIALIZER_CLASS
55
+ raise LogStash::ConfigurationError, 'Option schema_registry_url prohibit the customization of value_deserializer_class'
56
+ end
57
+ if @topics_pattern && !@topics_pattern.empty?
58
+ raise LogStash::ConfigurationError, 'Option schema_registry_url prohibit the customization of topics_pattern'
59
+ end
60
+ end
61
+
62
+ private
63
+ def check_for_schema_registry_connectivity_and_subjects
64
+ options = {}
65
+ if schema_registry_proxy && !schema_registry_proxy.empty?
66
+ options[:proxy] = schema_registry_proxy.to_s
67
+ end
68
+ if schema_registry_key and !schema_registry_key.empty?
69
+ options[:auth] = {:user => schema_registry_key, :password => schema_registry_secret.value}
70
+ end
71
+ client = Manticore::Client.new(options)
72
+ begin
73
+ response = client.get(@schema_registry_url.uri.to_s + '/subjects').body
74
+ rescue Manticore::ManticoreException => e
75
+ raise LogStash::ConfigurationError.new("Schema registry service doesn't respond, error: #{e.message}")
76
+ end
77
+ registered_subjects = JSON.parse response
78
+ expected_subjects = @topics.map { |t| "#{t}-value"}
79
+ if (expected_subjects & registered_subjects).size != expected_subjects.size
80
+ undefined_topic_subjects = expected_subjects - registered_subjects
81
+ raise LogStash::ConfigurationError, "The schema registry does not contain definitions for required topic subjects: #{undefined_topic_subjects}"
82
+ end
83
+ end
84
+
85
+ def split_proxy_into_host_and_port(proxy_uri)
86
+ return nil unless proxy_uri && !proxy_uri.empty?
87
+
88
+ port = proxy_uri.port
89
+
90
+ host_spec = ""
91
+ host_spec << proxy_uri.scheme || "http"
92
+ host_spec << "://"
93
+ host_spec << "#{proxy_uri.userinfo}@" if proxy_uri.userinfo
94
+ host_spec << proxy_uri.host
95
+
96
+ [host_spec, port]
97
+ end
98
+
99
+ def check_for_key_and_secret
100
+ if schema_registry_key and !schema_registry_key.empty?
101
+ if !schema_registry_secret or schema_registry_secret.value.empty?
102
+ raise LogStash::ConfigurationError, "Setting `schema_registry_secret` is required when `schema_registry_key` is provided."
103
+ end
104
+ end
105
+ end
106
+
107
+ end
108
+ end end end
@@ -0,0 +1,47 @@
1
+ module LogStash module PluginMixins module Kafka
2
+ module Common
3
+
4
+ def self.included(base)
5
+ # COMMON CONFIGURATION SUPPORTED BY BOTH PRODUCER/CONSUMER
6
+
7
+ # Close idle connections after the number of milliseconds specified by this config.
8
+ base.config :connections_max_idle_ms, :validate => :number, :default => 540_000 # (9m) Kafka default
9
+
10
+ # The period of time in milliseconds after which we force a refresh of metadata even if
11
+ # we haven't seen any partition leadership changes to proactively discover any new brokers or partitions
12
+ base.config :metadata_max_age_ms, :validate => :number, :default => 300_000 # (5m) Kafka default
13
+
14
+ # The configuration controls the maximum amount of time the client will wait for the response of a request.
15
+ # If the response is not received before the timeout elapses the client will resend the request if necessary
16
+ # or fail the request if retries are exhausted.
17
+ base.config :request_timeout_ms, :validate => :number, :default => 40_000 # Kafka default
18
+ end
19
+
20
+ def set_trustore_keystore_config(props)
21
+ props.put("ssl.truststore.type", ssl_truststore_type) unless ssl_truststore_type.nil?
22
+ props.put("ssl.truststore.location", ssl_truststore_location) unless ssl_truststore_location.nil?
23
+ props.put("ssl.truststore.password", ssl_truststore_password.value) unless ssl_truststore_password.nil?
24
+
25
+ # Client auth stuff
26
+ props.put("ssl.keystore.type", ssl_keystore_type) unless ssl_keystore_type.nil?
27
+ props.put("ssl.key.password", ssl_key_password.value) unless ssl_key_password.nil?
28
+ props.put("ssl.keystore.location", ssl_keystore_location) unless ssl_keystore_location.nil?
29
+ props.put("ssl.keystore.password", ssl_keystore_password.value) unless ssl_keystore_password.nil?
30
+ props.put("ssl.endpoint.identification.algorithm", ssl_endpoint_identification_algorithm) unless ssl_endpoint_identification_algorithm.nil?
31
+ end
32
+
33
+ def set_sasl_config(props)
34
+ java.lang.System.setProperty("java.security.auth.login.config", jaas_path) unless jaas_path.nil?
35
+ java.lang.System.setProperty("java.security.krb5.conf", kerberos_config) unless kerberos_config.nil?
36
+
37
+ props.put("sasl.mechanism", sasl_mechanism)
38
+ if sasl_mechanism == "GSSAPI" && sasl_kerberos_service_name.nil?
39
+ raise LogStash::ConfigurationError, "sasl_kerberos_service_name must be specified when SASL mechanism is GSSAPI"
40
+ end
41
+
42
+ props.put("sasl.kerberos.service.name", sasl_kerberos_service_name) unless sasl_kerberos_service_name.nil?
43
+ props.put("sasl.jaas.config", sasl_jaas_config) unless sasl_jaas_config.nil?
44
+ end
45
+
46
+ end
47
+ end end end
@@ -1,17 +1,17 @@
1
1
  # AUTOGENERATED BY THE GRADLE SCRIPT. DO NOT EDIT.
2
2
 
3
3
  require 'jar_dependencies'
4
- require_jar('io.confluent', 'kafka-avro-serializer', '5.5.1')
5
- require_jar('io.confluent', 'kafka-schema-serializer', '5.5.1')
6
- require_jar('io.confluent', 'common-config', '5.5.1')
7
- require_jar('org.apache.avro', 'avro', '1.9.2')
8
- require_jar('io.confluent', 'kafka-schema-registry-client', '5.5.1')
9
- require_jar('org.apache.kafka', 'kafka_2.12', '2.5.1')
10
- require_jar('io.confluent', 'common-utils', '5.5.1')
4
+ require_jar('io.confluent', 'kafka-avro-serializer', '6.2.2')
5
+ require_jar('io.confluent', 'kafka-schema-serializer', '6.2.2')
6
+ require_jar('io.confluent', 'common-config', '6.2.2')
7
+ require_jar('org.apache.avro', 'avro', '1.11.3')
8
+ require_jar('io.confluent', 'kafka-schema-registry-client', '6.2.2')
9
+ require_jar('org.apache.kafka', 'kafka_2.12', '2.8.1')
10
+ require_jar('io.confluent', 'common-utils', '6.2.2')
11
11
  require_jar('javax.ws.rs', 'javax.ws.rs-api', '2.1.1')
12
12
  require_jar('org.glassfish.jersey.core', 'jersey-common', '2.33')
13
- require_jar('org.apache.kafka', 'kafka-clients', '2.5.1')
14
- require_jar('com.github.luben', 'zstd-jni', '1.4.4-7')
15
- require_jar('org.slf4j', 'slf4j-api', '1.7.30')
13
+ require_jar('org.apache.kafka', 'kafka-clients', '2.8.1')
14
+ require_jar('com.github.luben', 'zstd-jni', '1.5.2-2')
15
+ require_jar('org.slf4j', 'slf4j-api', '1.7.36')
16
16
  require_jar('org.lz4', 'lz4-java', '1.7.1')
17
- require_jar('org.xerial.snappy', 'snappy-java', '1.1.7.3')
17
+ require_jar('org.xerial.snappy', 'snappy-java', '1.1.8.4')
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-integration-kafka'
3
- s.version = '10.9.0'
3
+ s.version = '10.12.1'
4
4
  s.licenses = ['Apache-2.0']
5
5
  s.summary = "Integration with Kafka - input and output plugins"
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline "+
@@ -28,7 +28,8 @@ describe "outputs/kafka", :integration => true do
28
28
  let(:num_events) { 3 }
29
29
 
30
30
  before :each do
31
- config = base_config.merge({"topic_id" => test_topic})
31
+ # NOTE: the connections_max_idle_ms is irrelevant just testing that configuration works ...
32
+ config = base_config.merge({"topic_id" => test_topic, "connections_max_idle_ms" => 540_000})
32
33
  load_kafka_data(config)
33
34
  end
34
35
 
@@ -139,6 +140,25 @@ describe "outputs/kafka", :integration => true do
139
140
  # end
140
141
  end
141
142
 
143
+ context 'when using zstd compression' do
144
+ let(:test_topic) { 'logstash_integration_zstd_topic' }
145
+
146
+ before :each do
147
+ config = base_config.merge({"topic_id" => test_topic, "compression_type" => "zstd"})
148
+ load_kafka_data(config)
149
+ end
150
+
151
+ # NOTE: depends on zstd-ruby gem which is using a C-extension
152
+ # it 'should have data integrity' do
153
+ # messages = fetch_messages(test_topic)
154
+ #
155
+ # expect(messages.size).to eq(num_events)
156
+ # messages.each do |m|
157
+ # expect(m.value).to eq(event.to_s)
158
+ # end
159
+ # end
160
+ end
161
+
142
162
  context 'when using multi partition topic' do
143
163
  let(:num_events) { 100 } # ~ more than (batch.size) 16,384 bytes
144
164
  let(:test_topic) { 'logstash_integration_topic3' }
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-integration-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 10.9.0
4
+ version: 10.12.1
5
5
  platform: java
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2021-12-14 00:00:00.000000000 Z
11
+ date: 2023-10-16 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -245,8 +245,8 @@ files:
245
245
  - lib/logstash-integration-kafka_jars.rb
246
246
  - lib/logstash/inputs/kafka.rb
247
247
  - lib/logstash/outputs/kafka.rb
248
- - lib/logstash/plugin_mixins/common.rb
249
- - lib/logstash/plugin_mixins/kafka_support.rb
248
+ - lib/logstash/plugin_mixins/kafka/avro_schema_registry.rb
249
+ - lib/logstash/plugin_mixins/kafka/common.rb
250
250
  - logstash-integration-kafka.gemspec
251
251
  - spec/check_docs_spec.rb
252
252
  - spec/fixtures/jaas.config
@@ -257,20 +257,20 @@ files:
257
257
  - spec/unit/inputs/avro_schema_fixture_payment.asvc
258
258
  - spec/unit/inputs/kafka_spec.rb
259
259
  - spec/unit/outputs/kafka_spec.rb
260
- - vendor/jar-dependencies/com/github/luben/zstd-jni/1.4.4-7/zstd-jni-1.4.4-7.jar
261
- - vendor/jar-dependencies/io/confluent/common-config/5.5.1/common-config-5.5.1.jar
262
- - vendor/jar-dependencies/io/confluent/common-utils/5.5.1/common-utils-5.5.1.jar
263
- - vendor/jar-dependencies/io/confluent/kafka-avro-serializer/5.5.1/kafka-avro-serializer-5.5.1.jar
264
- - vendor/jar-dependencies/io/confluent/kafka-schema-registry-client/5.5.1/kafka-schema-registry-client-5.5.1.jar
265
- - vendor/jar-dependencies/io/confluent/kafka-schema-serializer/5.5.1/kafka-schema-serializer-5.5.1.jar
260
+ - vendor/jar-dependencies/com/github/luben/zstd-jni/1.5.2-2/zstd-jni-1.5.2-2.jar
261
+ - vendor/jar-dependencies/io/confluent/common-config/6.2.2/common-config-6.2.2.jar
262
+ - vendor/jar-dependencies/io/confluent/common-utils/6.2.2/common-utils-6.2.2.jar
263
+ - vendor/jar-dependencies/io/confluent/kafka-avro-serializer/6.2.2/kafka-avro-serializer-6.2.2.jar
264
+ - vendor/jar-dependencies/io/confluent/kafka-schema-registry-client/6.2.2/kafka-schema-registry-client-6.2.2.jar
265
+ - vendor/jar-dependencies/io/confluent/kafka-schema-serializer/6.2.2/kafka-schema-serializer-6.2.2.jar
266
266
  - vendor/jar-dependencies/javax/ws/rs/javax.ws.rs-api/2.1.1/javax.ws.rs-api-2.1.1.jar
267
- - vendor/jar-dependencies/org/apache/avro/avro/1.9.2/avro-1.9.2.jar
268
- - vendor/jar-dependencies/org/apache/kafka/kafka-clients/2.5.1/kafka-clients-2.5.1.jar
269
- - vendor/jar-dependencies/org/apache/kafka/kafka_2.12/2.5.1/kafka_2.12-2.5.1.jar
267
+ - vendor/jar-dependencies/org/apache/avro/avro/1.11.3/avro-1.11.3.jar
268
+ - vendor/jar-dependencies/org/apache/kafka/kafka-clients/2.8.1/kafka-clients-2.8.1.jar
269
+ - vendor/jar-dependencies/org/apache/kafka/kafka_2.12/2.8.1/kafka_2.12-2.8.1.jar
270
270
  - vendor/jar-dependencies/org/glassfish/jersey/core/jersey-common/2.33/jersey-common-2.33.jar
271
271
  - vendor/jar-dependencies/org/lz4/lz4-java/1.7.1/lz4-java-1.7.1.jar
272
- - vendor/jar-dependencies/org/slf4j/slf4j-api/1.7.30/slf4j-api-1.7.30.jar
273
- - vendor/jar-dependencies/org/xerial/snappy/snappy-java/1.1.7.3/snappy-java-1.1.7.3.jar
272
+ - vendor/jar-dependencies/org/slf4j/slf4j-api/1.7.36/slf4j-api-1.7.36.jar
273
+ - vendor/jar-dependencies/org/xerial/snappy/snappy-java/1.1.8.4/snappy-java-1.1.8.4.jar
274
274
  homepage: http://www.elastic.co/guide/en/logstash/current/index.html
275
275
  licenses:
276
276
  - Apache-2.0
@@ -294,7 +294,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
294
294
  - !ruby/object:Gem::Version
295
295
  version: '0'
296
296
  requirements: []
297
- rubygems_version: 3.1.6
297
+ rubygems_version: 3.2.33
298
298
  signing_key:
299
299
  specification_version: 4
300
300
  summary: Integration with Kafka - input and output plugins
@@ -1,107 +0,0 @@
1
- module LogStash
2
- module PluginMixins
3
- module KafkaAvroSchemaRegistry
4
-
5
- def self.included(base)
6
- base.extend(self)
7
- base.setup_schema_registry_config
8
- end
9
-
10
- def setup_schema_registry_config
11
- # Option to set key to access Schema Registry.
12
- config :schema_registry_key, :validate => :string
13
-
14
- # Option to set secret to access Schema Registry.
15
- config :schema_registry_secret, :validate => :password
16
-
17
- # Option to set the endpoint of the Schema Registry.
18
- # This option permit the usage of Avro Kafka deserializer which retrieve the schema of the Avro message from an
19
- # instance of schema registry. If this option has value `value_deserializer_class` nor `topics_pattern` could be valued
20
- config :schema_registry_url, :validate => :uri
21
-
22
- # Option to set the proxy of the Schema Registry.
23
- # This option permits to define a proxy to be used to reach the schema registry service instance.
24
- config :schema_registry_proxy, :validate => :uri
25
-
26
- # Option to skip validating the schema registry during registration. This can be useful when using
27
- # certificate based auth
28
- config :schema_registry_validation, :validate => ['auto', 'skip'], :default => 'auto'
29
- end
30
-
31
- def check_schema_registry_parameters
32
- if @schema_registry_url
33
- check_for_schema_registry_conflicts
34
- @schema_registry_proxy_host, @schema_registry_proxy_port = split_proxy_into_host_and_port(schema_registry_proxy)
35
- check_for_key_and_secret
36
- check_for_schema_registry_connectivity_and_subjects if schema_registry_validation?
37
- end
38
- end
39
-
40
- def schema_registry_validation?
41
- return false if schema_registry_validation.to_s == 'skip'
42
- return false if using_kerberos? # pre-validation doesn't support kerberos
43
-
44
- true
45
- end
46
-
47
- def using_kerberos?
48
- security_protocol == "SASL_PLAINTEXT" || security_protocol == "SASL_SSL"
49
- end
50
-
51
- private
52
- def check_for_schema_registry_conflicts
53
- if @value_deserializer_class != LogStash::Inputs::Kafka::DEFAULT_DESERIALIZER_CLASS
54
- raise LogStash::ConfigurationError, 'Option schema_registry_url prohibit the customization of value_deserializer_class'
55
- end
56
- if @topics_pattern && !@topics_pattern.empty?
57
- raise LogStash::ConfigurationError, 'Option schema_registry_url prohibit the customization of topics_pattern'
58
- end
59
- end
60
-
61
- private
62
- def check_for_schema_registry_connectivity_and_subjects
63
- options = {}
64
- if schema_registry_proxy && !schema_registry_proxy.empty?
65
- options[:proxy] = schema_registry_proxy.to_s
66
- end
67
- if schema_registry_key and !schema_registry_key.empty?
68
- options[:auth] = {:user => schema_registry_key, :password => schema_registry_secret.value}
69
- end
70
- client = Manticore::Client.new(options)
71
- begin
72
- response = client.get(@schema_registry_url.uri.to_s + '/subjects').body
73
- rescue Manticore::ManticoreException => e
74
- raise LogStash::ConfigurationError.new("Schema registry service doesn't respond, error: #{e.message}")
75
- end
76
- registered_subjects = JSON.parse response
77
- expected_subjects = @topics.map { |t| "#{t}-value"}
78
- if (expected_subjects & registered_subjects).size != expected_subjects.size
79
- undefined_topic_subjects = expected_subjects - registered_subjects
80
- raise LogStash::ConfigurationError, "The schema registry does not contain definitions for required topic subjects: #{undefined_topic_subjects}"
81
- end
82
- end
83
-
84
- def split_proxy_into_host_and_port(proxy_uri)
85
- return nil unless proxy_uri && !proxy_uri.empty?
86
-
87
- port = proxy_uri.port
88
-
89
- host_spec = ""
90
- host_spec << proxy_uri.scheme || "http"
91
- host_spec << "://"
92
- host_spec << "#{proxy_uri.userinfo}@" if proxy_uri.userinfo
93
- host_spec << proxy_uri.host
94
-
95
- [host_spec, port]
96
- end
97
-
98
- def check_for_key_and_secret
99
- if schema_registry_key and !schema_registry_key.empty?
100
- if !schema_registry_secret or schema_registry_secret.value.empty?
101
- raise LogStash::ConfigurationError, "Setting `schema_registry_secret` is required when `schema_registry_key` is provided."
102
- end
103
- end
104
- end
105
- end
106
- end
107
- end
@@ -1,29 +0,0 @@
1
- module LogStash module PluginMixins module KafkaSupport
2
-
3
- def set_trustore_keystore_config(props)
4
- props.put("ssl.truststore.type", ssl_truststore_type) unless ssl_truststore_type.nil?
5
- props.put("ssl.truststore.location", ssl_truststore_location) unless ssl_truststore_location.nil?
6
- props.put("ssl.truststore.password", ssl_truststore_password.value) unless ssl_truststore_password.nil?
7
-
8
- # Client auth stuff
9
- props.put("ssl.keystore.type", ssl_keystore_type) unless ssl_keystore_type.nil?
10
- props.put("ssl.key.password", ssl_key_password.value) unless ssl_key_password.nil?
11
- props.put("ssl.keystore.location", ssl_keystore_location) unless ssl_keystore_location.nil?
12
- props.put("ssl.keystore.password", ssl_keystore_password.value) unless ssl_keystore_password.nil?
13
- props.put("ssl.endpoint.identification.algorithm", ssl_endpoint_identification_algorithm) unless ssl_endpoint_identification_algorithm.nil?
14
- end
15
-
16
- def set_sasl_config(props)
17
- java.lang.System.setProperty("java.security.auth.login.config", jaas_path) unless jaas_path.nil?
18
- java.lang.System.setProperty("java.security.krb5.conf", kerberos_config) unless kerberos_config.nil?
19
-
20
- props.put("sasl.mechanism", sasl_mechanism)
21
- if sasl_mechanism == "GSSAPI" && sasl_kerberos_service_name.nil?
22
- raise LogStash::ConfigurationError, "sasl_kerberos_service_name must be specified when SASL mechanism is GSSAPI"
23
- end
24
-
25
- props.put("sasl.kerberos.service.name", sasl_kerberos_service_name) unless sasl_kerberos_service_name.nil?
26
- props.put("sasl.jaas.config", sasl_jaas_config) unless sasl_jaas_config.nil?
27
- end
28
-
29
- end end end