logstash-integration-kafka 10.10.0-java → 10.11.0-java

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 1033b1bc88694b441cc6b117c431792780093b27ead742684f4e903048ed54a5
4
- data.tar.gz: e2a74687db7bba3ccc192a544142226dccb3b144bb11e5cfecd84dd4c26cbdf0
3
+ metadata.gz: 2833139d65282055a4e924ccfb50b19341a61af69e0834ef318a62f1459f3a16
4
+ data.tar.gz: fbfd7e6b94e9a74109b44b838df344ed4d7447d56d624f24e642235eb62b00e3
5
5
  SHA512:
6
- metadata.gz: b1e206f1bfbd4acbf6ca66d11f974c2116faf357da1212dfa740675dbce47ca1dea661fb0c185df687798d2f6a053dabb781d63d687cd981769d38de938c148a
7
- data.tar.gz: ca5f79ea95cd3901b1f47b06e9465a4962c4aa585cb1441254a916fe38a5603c98b87178eb85883caac5c28215b0e42de39c1d810e742d79d33bb8c3df82501b
6
+ metadata.gz: b35d99bf553fbfd3b78f3f7de62b427b18001853573c5160090bebba5dfb78f4d17aa9f879a94a98805f949b1ed76a3fa82e9c1a7b95815f223a5d04104f5bce
7
+ data.tar.gz: 0e8edf4f07b00443c39ac55523fb5663395ba0ae524dfedc5181a56b5160c2c33fe110e24bccbb060b43c6be0aa2211b50422d041fa1dea59561a26efc79d687
data/CHANGELOG.md CHANGED
@@ -1,10 +1,15 @@
1
- ## 10.10.0
1
+ ## 10.11.0
2
+ - Feat: added connections_max_idle_ms setting for output [#118](https://github.com/logstash-plugins/logstash-integration-kafka/pull/118)
3
+ - Refactor: mixins to follow shared mixin module naming
4
+
5
+ ## 10.10.1
6
+ - Update CHANGELOG.md [#114](https://api.github.com/repos/logstash-plugins/logstash-integration-kafka/pulls/114)
2
7
 
8
+ ## 10.10.0
3
9
  - Added config setting to enable 'zstd' compression in the Kafka output [#112](https://github.com/logstash-plugins/logstash-integration-kafka/pull/112)
4
10
 
5
11
  ## 10.9.0
6
12
  - Refactor: leverage codec when using schema registry [#106](https://github.com/logstash-plugins/logstash-integration-kafka/pull/106)
7
-
8
13
  Previously using `schema_registry_url` parsed the payload as JSON even if `codec => 'plain'` was set, this is no longer the case.
9
14
 
10
15
  ## 10.8.2
@@ -91,7 +96,6 @@
91
96
  - Fix links in changelog pointing to stand-alone plugin changelogs.
92
97
  - Refactor: scope java_import to plugin class
93
98
 
94
-
95
99
  ## 10.0.0
96
100
  - Initial release of the Kafka Integration Plugin, which combines
97
101
  previously-separate Kafka plugins and shared dependencies into a single
@@ -85,6 +85,7 @@ See the https://kafka.apache.org/{kafka_client_doc}/documentation for more detai
85
85
  | <<plugins-{type}s-{plugin}-client_dns_lookup>> |<<string,string>>|No
86
86
  | <<plugins-{type}s-{plugin}-client_id>> |<<string,string>>|No
87
87
  | <<plugins-{type}s-{plugin}-compression_type>> |<<string,string>>, one of `["none", "gzip", "snappy", "lz4", "zstd"]`|No
88
+ | <<plugins-{type}s-{plugin}-connections_max_idle_ms>> |<<number,number>>|No
88
89
  | <<plugins-{type}s-{plugin}-jaas_path>> |a valid filesystem path|No
89
90
  | <<plugins-{type}s-{plugin}-kerberos_config>> |a valid filesystem path|No
90
91
  | <<plugins-{type}s-{plugin}-key_serializer>> |<<string,string>>|No
@@ -199,6 +200,14 @@ ip/port by allowing a logical application name to be included with the request
199
200
  The compression type for all data generated by the producer.
200
201
  The default is none (meaning no compression). Valid values are none, gzip, snappy, lz4, or zstd.
201
202
 
203
+ [id="plugins-{type}s-{plugin}-connections_max_idle_ms"]
204
+ ===== `connections_max_idle_ms`
205
+
206
+ * Value type is <<number,number>>
207
+ * Default value is `540000` milliseconds (9 minutes).
208
+
209
+ Close idle connections after the number of milliseconds specified by this config.
210
+
202
211
  [id="plugins-{type}s-{plugin}-jaas_path"]
203
212
  ===== `jaas_path`
204
213
 
@@ -2,12 +2,11 @@ require 'logstash/namespace'
2
2
  require 'logstash/inputs/base'
3
3
  require 'stud/interval'
4
4
  require 'java'
5
- require 'logstash-integration-kafka_jars.rb'
6
- require 'logstash/plugin_mixins/kafka_support'
7
- require 'manticore'
8
5
  require "json"
9
6
  require "logstash/json"
10
- require_relative '../plugin_mixins/common'
7
+ require 'logstash-integration-kafka_jars.rb'
8
+ require 'logstash/plugin_mixins/kafka/common'
9
+ require 'logstash/plugin_mixins/kafka/avro_schema_registry'
11
10
  require 'logstash/plugin_mixins/deprecation_logger_support'
12
11
 
13
12
  # This input will read events from a Kafka topic. It uses the 0.10 version of
@@ -57,8 +56,8 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
57
56
 
58
57
  DEFAULT_DESERIALIZER_CLASS = "org.apache.kafka.common.serialization.StringDeserializer"
59
58
 
60
- include LogStash::PluginMixins::KafkaSupport
61
- include ::LogStash::PluginMixins::KafkaAvroSchemaRegistry
59
+ include LogStash::PluginMixins::Kafka::Common
60
+ include LogStash::PluginMixins::Kafka::AvroSchemaRegistry
62
61
  include LogStash::PluginMixins::DeprecationLoggerSupport
63
62
 
64
63
  config_name 'kafka'
@@ -98,8 +97,6 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
98
97
  # is to be able to track the source of requests beyond just ip/port by allowing
99
98
  # a logical application name to be included.
100
99
  config :client_id, :validate => :string, :default => "logstash"
101
- # Close idle connections after the number of milliseconds specified by this config.
102
- config :connections_max_idle_ms, :validate => :number, :default => 540_000 # (9m) Kafka default
103
100
  # Ideally you should have as many threads as the number of partitions for a perfect
104
101
  # balance — more threads than partitions means that some threads will be idle
105
102
  config :consumer_threads, :validate => :number, :default => 1
@@ -152,9 +149,6 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
152
149
  config :max_partition_fetch_bytes, :validate => :number, :default => 1_048_576 # (1MB) Kafka default
153
150
  # The maximum number of records returned in a single call to poll().
154
151
  config :max_poll_records, :validate => :number, :default => 500 # Kafka default
155
- # The period of time in milliseconds after which we force a refresh of metadata even if
156
- # we haven't seen any partition leadership changes to proactively discover any new brokers or partitions
157
- config :metadata_max_age_ms, :validate => :number, :default => 300_000 # (5m) Kafka default
158
152
  # The name of the partition assignment strategy that the client uses to distribute
159
153
  # partition ownership amongst consumer instances, supported options are `range`,
160
154
  # `round_robin`, `sticky` and `cooperative_sticky`
@@ -167,10 +161,6 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
167
161
  # This avoids repeatedly connecting to a host in a tight loop.
168
162
  # This backoff applies to all connection attempts by the client to a broker.
169
163
  config :reconnect_backoff_ms, :validate => :number, :default => 50 # Kafka default
170
- # The configuration controls the maximum amount of time the client will wait for the response of a request.
171
- # If the response is not received before the timeout elapses the client will resend the request if necessary
172
- # or fail the request if retries are exhausted.
173
- config :request_timeout_ms, :validate => :number, :default => 40_000 # Kafka default
174
164
  # The amount of time to wait before attempting to retry a failed fetch request
175
165
  # to a given topic partition. This avoids repeated fetching-and-failing in a tight loop.
176
166
  config :retry_backoff_ms, :validate => :number, :default => 100 # Kafka default
@@ -2,7 +2,7 @@ require 'logstash/namespace'
2
2
  require 'logstash/outputs/base'
3
3
  require 'java'
4
4
  require 'logstash-integration-kafka_jars.rb'
5
- require 'logstash/plugin_mixins/kafka_support'
5
+ require 'logstash/plugin_mixins/kafka/common'
6
6
 
7
7
  # Write events to a Kafka topic. This uses the Kafka Producer API to write messages to a topic on
8
8
  # the broker.
@@ -51,7 +51,7 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
51
51
 
52
52
  java_import org.apache.kafka.clients.producer.ProducerRecord
53
53
 
54
- include LogStash::PluginMixins::KafkaSupport
54
+ include LogStash::PluginMixins::Kafka::Common
55
55
 
56
56
  declare_threadsafe!
57
57
 
@@ -107,19 +107,12 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
107
107
  config :message_key, :validate => :string
108
108
  # the timeout setting for initial metadata request to fetch topic metadata.
109
109
  config :metadata_fetch_timeout_ms, :validate => :number, :default => 60_000
110
- # the max time in milliseconds before a metadata refresh is forced.
111
- config :metadata_max_age_ms, :validate => :number, :default => 300_000 # (5m) Kafka default
112
110
  # Partitioner to use - can be `default`, `uniform_sticky`, `round_robin` or a fully qualified class name of a custom partitioner.
113
111
  config :partitioner, :validate => :string
114
112
  # The size of the TCP receive buffer to use when reading data
115
113
  config :receive_buffer_bytes, :validate => :number, :default => 32_768 # (32KB) Kafka default
116
114
  # The amount of time to wait before attempting to reconnect to a given host when a connection fails.
117
115
  config :reconnect_backoff_ms, :validate => :number, :default => 50 # Kafka default
118
- # The configuration controls the maximum amount of time the client will wait
119
- # for the response of a request. If the response is not received before the timeout
120
- # elapses the client will resend the request if necessary or fail the request if
121
- # retries are exhausted.
122
- config :request_timeout_ms, :validate => :number, :default => 40_000 # (40s) Kafka default
123
116
  # The default retry behavior is to retry until successful. To prevent data loss,
124
117
  # the use of this setting is discouraged.
125
118
  #
@@ -0,0 +1,108 @@
1
+ require 'manticore'
2
+
3
+ module LogStash module PluginMixins module Kafka
4
+ module AvroSchemaRegistry
5
+
6
+ def self.included(base)
7
+ base.extend(self)
8
+ base.setup_schema_registry_config
9
+ end
10
+
11
+ def setup_schema_registry_config
12
+ # Option to set key to access Schema Registry.
13
+ config :schema_registry_key, :validate => :string
14
+
15
+ # Option to set secret to access Schema Registry.
16
+ config :schema_registry_secret, :validate => :password
17
+
18
+ # Option to set the endpoint of the Schema Registry.
19
+ # This option permit the usage of Avro Kafka deserializer which retrieve the schema of the Avro message from an
20
+ # instance of schema registry. If this option has value `value_deserializer_class` nor `topics_pattern` could be valued
21
+ config :schema_registry_url, :validate => :uri
22
+
23
+ # Option to set the proxy of the Schema Registry.
24
+ # This option permits to define a proxy to be used to reach the schema registry service instance.
25
+ config :schema_registry_proxy, :validate => :uri
26
+
27
+ # Option to skip validating the schema registry during registration. This can be useful when using
28
+ # certificate based auth
29
+ config :schema_registry_validation, :validate => ['auto', 'skip'], :default => 'auto'
30
+ end
31
+
32
+ def check_schema_registry_parameters
33
+ if @schema_registry_url
34
+ check_for_schema_registry_conflicts
35
+ @schema_registry_proxy_host, @schema_registry_proxy_port = split_proxy_into_host_and_port(schema_registry_proxy)
36
+ check_for_key_and_secret
37
+ check_for_schema_registry_connectivity_and_subjects if schema_registry_validation?
38
+ end
39
+ end
40
+
41
+ def schema_registry_validation?
42
+ return false if schema_registry_validation.to_s == 'skip'
43
+ return false if using_kerberos? # pre-validation doesn't support kerberos
44
+
45
+ true
46
+ end
47
+
48
+ def using_kerberos?
49
+ security_protocol == "SASL_PLAINTEXT" || security_protocol == "SASL_SSL"
50
+ end
51
+
52
+ private
53
+ def check_for_schema_registry_conflicts
54
+ if @value_deserializer_class != LogStash::Inputs::Kafka::DEFAULT_DESERIALIZER_CLASS
55
+ raise LogStash::ConfigurationError, 'Option schema_registry_url prohibit the customization of value_deserializer_class'
56
+ end
57
+ if @topics_pattern && !@topics_pattern.empty?
58
+ raise LogStash::ConfigurationError, 'Option schema_registry_url prohibit the customization of topics_pattern'
59
+ end
60
+ end
61
+
62
+ private
63
+ def check_for_schema_registry_connectivity_and_subjects
64
+ options = {}
65
+ if schema_registry_proxy && !schema_registry_proxy.empty?
66
+ options[:proxy] = schema_registry_proxy.to_s
67
+ end
68
+ if schema_registry_key and !schema_registry_key.empty?
69
+ options[:auth] = {:user => schema_registry_key, :password => schema_registry_secret.value}
70
+ end
71
+ client = Manticore::Client.new(options)
72
+ begin
73
+ response = client.get(@schema_registry_url.uri.to_s + '/subjects').body
74
+ rescue Manticore::ManticoreException => e
75
+ raise LogStash::ConfigurationError.new("Schema registry service doesn't respond, error: #{e.message}")
76
+ end
77
+ registered_subjects = JSON.parse response
78
+ expected_subjects = @topics.map { |t| "#{t}-value"}
79
+ if (expected_subjects & registered_subjects).size != expected_subjects.size
80
+ undefined_topic_subjects = expected_subjects - registered_subjects
81
+ raise LogStash::ConfigurationError, "The schema registry does not contain definitions for required topic subjects: #{undefined_topic_subjects}"
82
+ end
83
+ end
84
+
85
+ def split_proxy_into_host_and_port(proxy_uri)
86
+ return nil unless proxy_uri && !proxy_uri.empty?
87
+
88
+ port = proxy_uri.port
89
+
90
+ host_spec = ""
91
+ host_spec << proxy_uri.scheme || "http"
92
+ host_spec << "://"
93
+ host_spec << "#{proxy_uri.userinfo}@" if proxy_uri.userinfo
94
+ host_spec << proxy_uri.host
95
+
96
+ [host_spec, port]
97
+ end
98
+
99
+ def check_for_key_and_secret
100
+ if schema_registry_key and !schema_registry_key.empty?
101
+ if !schema_registry_secret or schema_registry_secret.value.empty?
102
+ raise LogStash::ConfigurationError, "Setting `schema_registry_secret` is required when `schema_registry_key` is provided."
103
+ end
104
+ end
105
+ end
106
+
107
+ end
108
+ end end end
@@ -0,0 +1,47 @@
1
+ module LogStash module PluginMixins module Kafka
2
+ module Common
3
+
4
+ def self.included(base)
5
+ # COMMON CONFIGURATION SUPPORTED BY BOTH PRODUCER/CONSUMER
6
+
7
+ # Close idle connections after the number of milliseconds specified by this config.
8
+ base.config :connections_max_idle_ms, :validate => :number, :default => 540_000 # (9m) Kafka default
9
+
10
+ # The period of time in milliseconds after which we force a refresh of metadata even if
11
+ # we haven't seen any partition leadership changes to proactively discover any new brokers or partitions
12
+ base.config :metadata_max_age_ms, :validate => :number, :default => 300_000 # (5m) Kafka default
13
+
14
+ # The configuration controls the maximum amount of time the client will wait for the response of a request.
15
+ # If the response is not received before the timeout elapses the client will resend the request if necessary
16
+ # or fail the request if retries are exhausted.
17
+ base.config :request_timeout_ms, :validate => :number, :default => 40_000 # Kafka default
18
+ end
19
+
20
+ def set_trustore_keystore_config(props)
21
+ props.put("ssl.truststore.type", ssl_truststore_type) unless ssl_truststore_type.nil?
22
+ props.put("ssl.truststore.location", ssl_truststore_location) unless ssl_truststore_location.nil?
23
+ props.put("ssl.truststore.password", ssl_truststore_password.value) unless ssl_truststore_password.nil?
24
+
25
+ # Client auth stuff
26
+ props.put("ssl.keystore.type", ssl_keystore_type) unless ssl_keystore_type.nil?
27
+ props.put("ssl.key.password", ssl_key_password.value) unless ssl_key_password.nil?
28
+ props.put("ssl.keystore.location", ssl_keystore_location) unless ssl_keystore_location.nil?
29
+ props.put("ssl.keystore.password", ssl_keystore_password.value) unless ssl_keystore_password.nil?
30
+ props.put("ssl.endpoint.identification.algorithm", ssl_endpoint_identification_algorithm) unless ssl_endpoint_identification_algorithm.nil?
31
+ end
32
+
33
+ def set_sasl_config(props)
34
+ java.lang.System.setProperty("java.security.auth.login.config", jaas_path) unless jaas_path.nil?
35
+ java.lang.System.setProperty("java.security.krb5.conf", kerberos_config) unless kerberos_config.nil?
36
+
37
+ props.put("sasl.mechanism", sasl_mechanism)
38
+ if sasl_mechanism == "GSSAPI" && sasl_kerberos_service_name.nil?
39
+ raise LogStash::ConfigurationError, "sasl_kerberos_service_name must be specified when SASL mechanism is GSSAPI"
40
+ end
41
+
42
+ props.put("sasl.kerberos.service.name", sasl_kerberos_service_name) unless sasl_kerberos_service_name.nil?
43
+ props.put("sasl.jaas.config", sasl_jaas_config) unless sasl_jaas_config.nil?
44
+ end
45
+
46
+ end
47
+ end end end
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-integration-kafka'
3
- s.version = '10.10.0'
3
+ s.version = '10.11.0'
4
4
  s.licenses = ['Apache-2.0']
5
5
  s.summary = "Integration with Kafka - input and output plugins"
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline "+
@@ -28,7 +28,8 @@ describe "outputs/kafka", :integration => true do
28
28
  let(:num_events) { 3 }
29
29
 
30
30
  before :each do
31
- config = base_config.merge({"topic_id" => test_topic})
31
+ # NOTE: the connections_max_idle_ms is irrelevant just testing that configuration works ...
32
+ config = base_config.merge({"topic_id" => test_topic, "connections_max_idle_ms" => 540_000})
32
33
  load_kafka_data(config)
33
34
  end
34
35
 
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-integration-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 10.10.0
4
+ version: 10.11.0
5
5
  platform: java
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2022-02-02 00:00:00.000000000 Z
11
+ date: 2022-04-25 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -245,8 +245,8 @@ files:
245
245
  - lib/logstash-integration-kafka_jars.rb
246
246
  - lib/logstash/inputs/kafka.rb
247
247
  - lib/logstash/outputs/kafka.rb
248
- - lib/logstash/plugin_mixins/common.rb
249
- - lib/logstash/plugin_mixins/kafka_support.rb
248
+ - lib/logstash/plugin_mixins/kafka/avro_schema_registry.rb
249
+ - lib/logstash/plugin_mixins/kafka/common.rb
250
250
  - logstash-integration-kafka.gemspec
251
251
  - spec/check_docs_spec.rb
252
252
  - spec/fixtures/jaas.config
@@ -1,107 +0,0 @@
1
- module LogStash
2
- module PluginMixins
3
- module KafkaAvroSchemaRegistry
4
-
5
- def self.included(base)
6
- base.extend(self)
7
- base.setup_schema_registry_config
8
- end
9
-
10
- def setup_schema_registry_config
11
- # Option to set key to access Schema Registry.
12
- config :schema_registry_key, :validate => :string
13
-
14
- # Option to set secret to access Schema Registry.
15
- config :schema_registry_secret, :validate => :password
16
-
17
- # Option to set the endpoint of the Schema Registry.
18
- # This option permit the usage of Avro Kafka deserializer which retrieve the schema of the Avro message from an
19
- # instance of schema registry. If this option has value `value_deserializer_class` nor `topics_pattern` could be valued
20
- config :schema_registry_url, :validate => :uri
21
-
22
- # Option to set the proxy of the Schema Registry.
23
- # This option permits to define a proxy to be used to reach the schema registry service instance.
24
- config :schema_registry_proxy, :validate => :uri
25
-
26
- # Option to skip validating the schema registry during registration. This can be useful when using
27
- # certificate based auth
28
- config :schema_registry_validation, :validate => ['auto', 'skip'], :default => 'auto'
29
- end
30
-
31
- def check_schema_registry_parameters
32
- if @schema_registry_url
33
- check_for_schema_registry_conflicts
34
- @schema_registry_proxy_host, @schema_registry_proxy_port = split_proxy_into_host_and_port(schema_registry_proxy)
35
- check_for_key_and_secret
36
- check_for_schema_registry_connectivity_and_subjects if schema_registry_validation?
37
- end
38
- end
39
-
40
- def schema_registry_validation?
41
- return false if schema_registry_validation.to_s == 'skip'
42
- return false if using_kerberos? # pre-validation doesn't support kerberos
43
-
44
- true
45
- end
46
-
47
- def using_kerberos?
48
- security_protocol == "SASL_PLAINTEXT" || security_protocol == "SASL_SSL"
49
- end
50
-
51
- private
52
- def check_for_schema_registry_conflicts
53
- if @value_deserializer_class != LogStash::Inputs::Kafka::DEFAULT_DESERIALIZER_CLASS
54
- raise LogStash::ConfigurationError, 'Option schema_registry_url prohibit the customization of value_deserializer_class'
55
- end
56
- if @topics_pattern && !@topics_pattern.empty?
57
- raise LogStash::ConfigurationError, 'Option schema_registry_url prohibit the customization of topics_pattern'
58
- end
59
- end
60
-
61
- private
62
- def check_for_schema_registry_connectivity_and_subjects
63
- options = {}
64
- if schema_registry_proxy && !schema_registry_proxy.empty?
65
- options[:proxy] = schema_registry_proxy.to_s
66
- end
67
- if schema_registry_key and !schema_registry_key.empty?
68
- options[:auth] = {:user => schema_registry_key, :password => schema_registry_secret.value}
69
- end
70
- client = Manticore::Client.new(options)
71
- begin
72
- response = client.get(@schema_registry_url.uri.to_s + '/subjects').body
73
- rescue Manticore::ManticoreException => e
74
- raise LogStash::ConfigurationError.new("Schema registry service doesn't respond, error: #{e.message}")
75
- end
76
- registered_subjects = JSON.parse response
77
- expected_subjects = @topics.map { |t| "#{t}-value"}
78
- if (expected_subjects & registered_subjects).size != expected_subjects.size
79
- undefined_topic_subjects = expected_subjects - registered_subjects
80
- raise LogStash::ConfigurationError, "The schema registry does not contain definitions for required topic subjects: #{undefined_topic_subjects}"
81
- end
82
- end
83
-
84
- def split_proxy_into_host_and_port(proxy_uri)
85
- return nil unless proxy_uri && !proxy_uri.empty?
86
-
87
- port = proxy_uri.port
88
-
89
- host_spec = ""
90
- host_spec << proxy_uri.scheme || "http"
91
- host_spec << "://"
92
- host_spec << "#{proxy_uri.userinfo}@" if proxy_uri.userinfo
93
- host_spec << proxy_uri.host
94
-
95
- [host_spec, port]
96
- end
97
-
98
- def check_for_key_and_secret
99
- if schema_registry_key and !schema_registry_key.empty?
100
- if !schema_registry_secret or schema_registry_secret.value.empty?
101
- raise LogStash::ConfigurationError, "Setting `schema_registry_secret` is required when `schema_registry_key` is provided."
102
- end
103
- end
104
- end
105
- end
106
- end
107
- end
@@ -1,29 +0,0 @@
1
- module LogStash module PluginMixins module KafkaSupport
2
-
3
- def set_trustore_keystore_config(props)
4
- props.put("ssl.truststore.type", ssl_truststore_type) unless ssl_truststore_type.nil?
5
- props.put("ssl.truststore.location", ssl_truststore_location) unless ssl_truststore_location.nil?
6
- props.put("ssl.truststore.password", ssl_truststore_password.value) unless ssl_truststore_password.nil?
7
-
8
- # Client auth stuff
9
- props.put("ssl.keystore.type", ssl_keystore_type) unless ssl_keystore_type.nil?
10
- props.put("ssl.key.password", ssl_key_password.value) unless ssl_key_password.nil?
11
- props.put("ssl.keystore.location", ssl_keystore_location) unless ssl_keystore_location.nil?
12
- props.put("ssl.keystore.password", ssl_keystore_password.value) unless ssl_keystore_password.nil?
13
- props.put("ssl.endpoint.identification.algorithm", ssl_endpoint_identification_algorithm) unless ssl_endpoint_identification_algorithm.nil?
14
- end
15
-
16
- def set_sasl_config(props)
17
- java.lang.System.setProperty("java.security.auth.login.config", jaas_path) unless jaas_path.nil?
18
- java.lang.System.setProperty("java.security.krb5.conf", kerberos_config) unless kerberos_config.nil?
19
-
20
- props.put("sasl.mechanism", sasl_mechanism)
21
- if sasl_mechanism == "GSSAPI" && sasl_kerberos_service_name.nil?
22
- raise LogStash::ConfigurationError, "sasl_kerberos_service_name must be specified when SASL mechanism is GSSAPI"
23
- end
24
-
25
- props.put("sasl.kerberos.service.name", sasl_kerberos_service_name) unless sasl_kerberos_service_name.nil?
26
- props.put("sasl.jaas.config", sasl_jaas_config) unless sasl_jaas_config.nil?
27
- end
28
-
29
- end end end