logstash-output-kafka 6.0.0 → 6.1.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 3fbe7d272d3d724287711d102a7f95c9ebb0a12d
4
- data.tar.gz: 26bca0940efcb7a509ee256f939043aada0618d0
3
+ metadata.gz: a1dead8df45c0c2a4e381d7434aecf563c7b0fa4
4
+ data.tar.gz: db1029642c471a2e8a086c3c640191bbef1e5434
5
5
  SHA512:
6
- metadata.gz: 6947a968db077a2b434565940083e5f2727e1b303af01ffb33e7b7a80629707c2efaeaf84ad85ad02e0abfbc1eee625ff317d40e582b37def0d48bfcd32bfab3
7
- data.tar.gz: 92eae1ac1ca9812e11bf042167cefa518616c6b2a142d14c354cd47461bca823c5ce0cd79a43b7314b80c2077d7acb47ce0f4b4f6a41a49d67010c4a9facfe54
6
+ metadata.gz: 84ac4e33c643dd958dc31232219c806b5e923103e57cda756792b389fcdb7fbc06ec715eead06ac4da781fefbc8c2caeede0e2b8500412511157cca654f79a6b
7
+ data.tar.gz: fdd5a63002d004cd87476a3639cd35af40949e230791fd0924e5a45a89df80ebe3e7fef61bb0808d48928acff9e3a9db4e42cc496cb6fcc08fa008666eb2b4ed
data/CHANGELOG.md CHANGED
@@ -1,3 +1,6 @@
1
+ ## 6.1.0
2
+ - Add Kerberos authentication feature.
3
+
1
4
  ## 6.0.0
2
5
  - BREAKING: update to 0.10.1.0 client protocol. not backwards compatible with 5.0 (protocol versions <= 10.0.0.1)
3
6
 
@@ -23,8 +23,8 @@ require 'logstash-output-kafka_jars.rb'
23
23
  # upgrade brokers before clients because brokers target backwards compatibility. For example, the 0.9 broker
24
24
  # is compatible with both the 0.8 consumer and 0.9 consumer APIs, but not the other way around.
25
25
  #
26
- # The only required configuration is the topic_id. The default codec is json,
27
- # so events will be persisted on the broker in json format. If you select a codec of plain,
26
+ # The only required configuration is the topic_id. The default codec is plain,
27
+ # so events will be persisted on the broker in plain format. If you select a codec of plain,
28
28
  # Logstash will encode your messages with not only the message but also with a timestamp and
29
29
  # hostname. If you do not want anything but your message passing through, you should make the output
30
30
  # configuration something like:
@@ -113,15 +113,49 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
113
113
  # The size of the TCP send buffer to use when sending data.
114
114
  config :send_buffer_bytes, :validate => :number, :default => 131072
115
115
  # Enable SSL/TLS secured communication to Kafka broker.
116
- config :ssl, :validate => :boolean, :default => false
116
+ config :ssl, :validate => :boolean, :default => false, :deprecated => "Use security_protocol => 'ssl'"
117
+ # The truststore type.
118
+ config :ssl_truststore_type, :validate => :string
117
119
  # The JKS truststore path to validate the Kafka broker's certificate.
118
120
  config :ssl_truststore_location, :validate => :path
119
121
  # The truststore password
120
122
  config :ssl_truststore_password, :validate => :password
123
+ # The keystore type.
124
+ config :ssl_keystore_type, :validate => :string
121
125
  # If client authentication is required, this setting stores the keystore path.
122
126
  config :ssl_keystore_location, :validate => :path
123
127
  # If client authentication is required, this setting stores the keystore password
124
128
  config :ssl_keystore_password, :validate => :password
129
+ # The password of the private key in the key store file.
130
+ config :ssl_key_password, :validate => :password
131
+ # Security protocol to use, which can be either of PLAINTEXT,SSL,SASL_PLAINTEXT,SASL_SSL
132
+ config :security_protocol, :validate => ["PLAINTEXT", "SSL", "SASL_PLAINTEXT", "SASL_SSL"], :default => "PLAINTEXT"
133
+ # http://kafka.apache.org/documentation.html#security_sasl[SASL mechanism] used for client connections.
134
+ # This may be any mechanism for which a security provider is available.
135
+ # GSSAPI is the default mechanism.
136
+ config :sasl_mechanism, :validate => :string, :default => "GSSAPI"
137
+ # The Kerberos principal name that Kafka broker runs as.
138
+ # This can be defined either in Kafka's JAAS config or in Kafka's config.
139
+ config :sasl_kerberos_service_name, :validate => :string
140
+ # The Java Authentication and Authorization Service (JAAS) API supplies user authentication and authorization
141
+ # services for Kafka. This setting provides the path to the JAAS file. Sample JAAS file for Kafka client:
142
+ # [source,java]
143
+ # ----------------------------------
144
+ # KafkaClient {
145
+ # com.sun.security.auth.module.Krb5LoginModule required
146
+ # useTicketCache=true
147
+ # renewTicket=true
148
+ # serviceName="kafka";
149
+ # };
150
+ # ----------------------------------
151
+ #
152
+ # Please note that specifying `jaas_path` and `kerberos_config` in the config file will add these
153
+ # to the global JVM system properties. This means if you have multiple Kafka inputs, all of them would be sharing the same
154
+ # `jaas_path` and `kerberos_config`. If this is not desirable, you would have to run separate instances of Logstash on
155
+ # different JVM instances.
156
+ config :jaas_path, :validate => :path
157
+ # Optional path to kerberos config file. This is krb5.conf style as detailed in https://web.mit.edu/kerberos/krb5-1.12/doc/admin/conf_files/krb5_conf.html
158
+ config :kerberos_config, :validate => :path
125
159
  # The configuration controls the maximum amount of time the server will wait for acknowledgments
126
160
  # from followers to meet the acknowledgment requirements the producer has specified with the
127
161
  # acks configuration. If the requested number of acknowledgments are not met when the timeout
@@ -187,19 +221,18 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
187
221
  props.put(kafka::SEND_BUFFER_CONFIG, send_buffer_bytes.to_s)
188
222
  props.put(kafka::VALUE_SERIALIZER_CLASS_CONFIG, value_serializer)
189
223
 
190
- if ssl
191
- if ssl_truststore_location.nil?
192
- raise LogStash::ConfigurationError, "ssl_truststore_location must be set when SSL is enabled"
193
- end
194
- props.put("security.protocol", "SSL")
195
- props.put("ssl.truststore.location", ssl_truststore_location)
196
- props.put("ssl.truststore.password", ssl_truststore_password.value) unless ssl_truststore_password.nil?
224
+ props.put("security.protocol", security_protocol) unless security_protocol.nil?
197
225
 
198
- #Client auth stuff
199
- props.put("ssl.keystore.location", ssl_keystore_location) unless ssl_keystore_location.nil?
200
- props.put("ssl.keystore.password", ssl_keystore_password.value) unless ssl_keystore_password.nil?
226
+ if security_protocol == "SSL" || ssl
227
+ set_trustore_keystore_config(props)
228
+ elsif security_protocol == "SASL_PLAINTEXT"
229
+ set_sasl_config(props)
230
+ elsif security_protocol == "SASL_SSL"
231
+ set_trustore_keystore_config
232
+ set_sasl_config
201
233
  end
202
234
 
235
+
203
236
  org.apache.kafka.clients.producer.KafkaProducer.new(props)
204
237
  rescue => e
205
238
  logger.error("Unable to create Kafka producer from given configuration", :kafka_error_message => e)
@@ -207,4 +240,31 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
207
240
  end
208
241
  end
209
242
 
243
+ def set_trustore_keystore_config(props)
244
+ if ssl_truststore_location.nil?
245
+ raise LogStash::ConfigurationError, "ssl_truststore_location must be set when SSL is enabled"
246
+ end
247
+ props.put("ssl.truststore.type", ssl_truststore_type) unless ssl_truststore_type.nil?
248
+ props.put("ssl.truststore.location", ssl_truststore_location)
249
+ props.put("ssl.truststore.password", ssl_truststore_password.value) unless ssl_truststore_password.nil?
250
+
251
+ # Client auth stuff
252
+ props.put("ssl.keystore.type", ssl_keystore_type) unless ssl_keystore_type.nil?
253
+ props.put("ssl.key.password", ssl_key_password.value) unless ssl_key_password.nil?
254
+ props.put("ssl.keystore.location", ssl_keystore_location) unless ssl_keystore_location.nil?
255
+ props.put("ssl.keystore.password", ssl_keystore_password.value) unless ssl_keystore_password.nil?
256
+ end
257
+
258
+ def set_sasl_config(props)
259
+ java.lang.System.setProperty("java.security.auth.login.config",jaas_path) unless jaas_path.nil?
260
+ java.lang.System.setProperty("java.security.krb5.conf",kerberos_config) unless kerberos_config.nil?
261
+
262
+ props.put("sasl.mechanism",sasl_mechanism)
263
+ if sasl_mechanism == "GSSAPI" && sasl_kerberos_service_name.nil?
264
+ raise LogStash::ConfigurationError, "sasl_kerberos_service_name must be specified when SASL mechanism is GSSAPI"
265
+ end
266
+
267
+ props.put("sasl.kerberos.service.name",sasl_kerberos_service_name)
268
+ end
269
+
210
270
  end #class LogStash::Outputs::Kafka
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-output-kafka'
4
- s.version = '6.0.0'
4
+ s.version = '6.1.0'
5
5
  s.licenses = ['Apache License (2.0)']
6
6
  s.summary = 'Output events to a Kafka topic. This uses the Kafka Producer API to write messages to a topic on the broker'
7
7
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 6.0.0
4
+ version: 6.1.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elasticsearch
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2016-11-08 00:00:00.000000000 Z
11
+ date: 2016-11-18 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement