logstash-output-kafka 5.0.4 → 5.1.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +6 -0
- data/lib/logstash/outputs/kafka.rb +81 -20
- data/logstash-output-kafka.gemspec +1 -1
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: e9279e067fa0adfbc321675a10328320ef8f50d8
|
4
|
+
data.tar.gz: 6d1b7bf832ca676cfde01fca475f65a60f2ae33d
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 10cb4458e38f2b49e48a2b5886a6a0751a1161a8ebdda893e80de8de38d2cef6036f50d15f8b40f7385f39b248373036628952878125edc38c58a630ba615726
|
7
|
+
data.tar.gz: 51e40dae2b3450ddf3d896bdcfe104f73553ef812cd3e397aceb90bcecde3764d93547bb6d0d4acd52ecbd0179954da24f950ee95858033cd0f674f621a6ddea
|
data/CHANGELOG.md
CHANGED
@@ -6,20 +6,21 @@ require 'logstash-output-kafka_jars.rb'
|
|
6
6
|
# Write events to a Kafka topic. This uses the Kafka Producer API to write messages to a topic on
|
7
7
|
# the broker.
|
8
8
|
#
|
9
|
-
# Here's a compatibility matrix that shows the Kafka
|
9
|
+
# Here's a compatibility matrix that shows the Kafka client versions that are compatible with each combination
|
10
10
|
# of Logstash and the Kafka output plugin:
|
11
11
|
#
|
12
12
|
# [options="header"]
|
13
13
|
# |==========================================================
|
14
|
-
# |Kafka
|
15
|
-
# |0.8 |
|
16
|
-
# |0.9 |
|
17
|
-
# |0.9 |
|
18
|
-
# |0.10 |
|
14
|
+
# |Kafka Client Version |Logstash Version |Plugin Version |Security Features |Why?
|
15
|
+
# |0.8 |2.0.0 - 2.x.x |<3.0.0 | |Legacy, 0.8 is still popular
|
16
|
+
# |0.9 |2.0.0 - 2.3.x | 3.x.x |Basic Auth, SSL |Works with the old Ruby Event API (`event['product']['price'] = 10`)
|
17
|
+
# |0.9 |2.4.0 - 5.0.x | 4.x.x |Basic Auth, SSL |Works with the new getter/setter APIs (`event.set('[product][price]', 10)`)
|
18
|
+
# |0.10 |2.4.0 - 5.0.x | 5.x.x |Basic Auth, SSL |Not compatible with the 0.9 broker
|
19
19
|
# |==========================================================
|
20
20
|
#
|
21
|
-
# NOTE:
|
22
|
-
#
|
21
|
+
# NOTE: We recommended that you use matching Kafka client and broker versions. During upgrades, you should
|
22
|
+
# upgrade brokers before clients because brokers target backwards compatibility. For example, the 0.9 broker
|
23
|
+
# is compatible with both the 0.8 consumer and 0.9 consumer APIs, but not the other way around.
|
23
24
|
#
|
24
25
|
# The only required configuration is the topic_id. The default codec is json,
|
25
26
|
# so events will be persisted on the broker in json format. If you select a codec of plain,
|
@@ -111,15 +112,49 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
|
|
111
112
|
# The size of the TCP send buffer to use when sending data.
|
112
113
|
config :send_buffer_bytes, :validate => :number, :default => 131072
|
113
114
|
# Enable SSL/TLS secured communication to Kafka broker.
|
114
|
-
config :ssl, :validate => :boolean, :default => false
|
115
|
+
config :ssl, :validate => :boolean, :default => false, :deprecated => "Use security_protocol => 'ssl'"
|
116
|
+
# The truststore type.
|
117
|
+
config :ssl_truststore_type, :validate => :string
|
115
118
|
# The JKS truststore path to validate the Kafka broker's certificate.
|
116
119
|
config :ssl_truststore_location, :validate => :path
|
117
120
|
# The truststore password
|
118
121
|
config :ssl_truststore_password, :validate => :password
|
122
|
+
# The keystore type.
|
123
|
+
config :ssl_keystore_type, :validate => :string
|
119
124
|
# If client authentication is required, this setting stores the keystore path.
|
120
125
|
config :ssl_keystore_location, :validate => :path
|
121
126
|
# If client authentication is required, this setting stores the keystore password
|
122
127
|
config :ssl_keystore_password, :validate => :password
|
128
|
+
# The password of the private key in the key store file.
|
129
|
+
config :ssl_key_password, :validate => :password
|
130
|
+
# Security protocol to use, which can be either of PLAINTEXT,SSL,SASL_PLAINTEXT,SASL_SSL
|
131
|
+
config :security_protocol, :validate => ["PLAINTEXT", "SSL", "SASL_PLAINTEXT", "SASL_SSL"], :default => "PLAINTEXT"
|
132
|
+
# http://kafka.apache.org/documentation.html#security_sasl[SASL mechanism] used for client connections.
|
133
|
+
# This may be any mechanism for which a security provider is available.
|
134
|
+
# GSSAPI is the default mechanism.
|
135
|
+
config :sasl_mechanism, :validate => :string, :default => "GSSAPI"
|
136
|
+
# The Kerberos principal name that Kafka broker runs as.
|
137
|
+
# This can be defined either in Kafka's JAAS config or in Kafka's config.
|
138
|
+
config :sasl_kerberos_service_name, :validate => :string
|
139
|
+
# The Java Authentication and Authorization Service (JAAS) API supplies user authentication and authorization
|
140
|
+
# services for Kafka. This setting provides the path to the JAAS file. Sample JAAS file for Kafka client:
|
141
|
+
# [source,java]
|
142
|
+
# ----------------------------------
|
143
|
+
# KafkaClient {
|
144
|
+
# com.sun.security.auth.module.Krb5LoginModule required
|
145
|
+
# useTicketCache=true
|
146
|
+
# renewTicket=true
|
147
|
+
# serviceName="kafka";
|
148
|
+
# };
|
149
|
+
# ----------------------------------
|
150
|
+
#
|
151
|
+
# Please note that specifying `jaas_path` and `kerberos_config` in the config file will add these
|
152
|
+
# to the global JVM system properties. This means if you have multiple Kafka inputs, all of them would be sharing the same
|
153
|
+
# `jaas_path` and `kerberos_config`. If this is not desirable, you would have to run separate instances of Logstash on
|
154
|
+
# different JVM instances.
|
155
|
+
config :jaas_path, :validate => :path
|
156
|
+
# Optional path to kerberos config file. This is krb5.conf style as detailed in https://web.mit.edu/kerberos/krb5-1.12/doc/admin/conf_files/krb5_conf.html
|
157
|
+
config :kerberos_config, :validate => :path
|
123
158
|
# The configuration controls the maximum amount of time the server will wait for acknowledgments
|
124
159
|
# from followers to meet the acknowledgment requirements the producer has specified with the
|
125
160
|
# acks configuration. If the requested number of acknowledgments are not met when the timeout
|
@@ -143,7 +178,7 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
|
|
143
178
|
end
|
144
179
|
@producer.send(record)
|
145
180
|
rescue LogStash::ShutdownSignal
|
146
|
-
@logger.
|
181
|
+
@logger.debug('Kafka producer got shutdown signal')
|
147
182
|
rescue => e
|
148
183
|
@logger.warn('kafka producer threw exception, restarting',
|
149
184
|
:exception => e)
|
@@ -185,19 +220,18 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
|
|
185
220
|
props.put(kafka::SEND_BUFFER_CONFIG, send_buffer_bytes.to_s)
|
186
221
|
props.put(kafka::VALUE_SERIALIZER_CLASS_CONFIG, value_serializer)
|
187
222
|
|
188
|
-
|
189
|
-
if ssl_truststore_location.nil?
|
190
|
-
raise LogStash::ConfigurationError, "ssl_truststore_location must be set when SSL is enabled"
|
191
|
-
end
|
192
|
-
props.put("security.protocol", "SSL")
|
193
|
-
props.put("ssl.truststore.location", ssl_truststore_location)
|
194
|
-
props.put("ssl.truststore.password", ssl_truststore_password.value) unless ssl_truststore_password.nil?
|
223
|
+
props.put("security.protocol", security_protocol) unless security_protocol.nil?
|
195
224
|
|
196
|
-
|
197
|
-
props
|
198
|
-
|
225
|
+
if security_protocol == "SSL" || ssl
|
226
|
+
set_trustore_keystore_config(props)
|
227
|
+
elsif security_protocol == "SASL_PLAINTEXT"
|
228
|
+
set_sasl_config(props)
|
229
|
+
elsif security_protocol == "SASL_SSL"
|
230
|
+
set_trustore_keystore_config
|
231
|
+
set_sasl_config
|
199
232
|
end
|
200
233
|
|
234
|
+
|
201
235
|
org.apache.kafka.clients.producer.KafkaProducer.new(props)
|
202
236
|
rescue => e
|
203
237
|
logger.error("Unable to create Kafka producer from given configuration", :kafka_error_message => e)
|
@@ -205,4 +239,31 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
|
|
205
239
|
end
|
206
240
|
end
|
207
241
|
|
242
|
+
def set_trustore_keystore_config(props)
|
243
|
+
if ssl_truststore_location.nil?
|
244
|
+
raise LogStash::ConfigurationError, "ssl_truststore_location must be set when SSL is enabled"
|
245
|
+
end
|
246
|
+
props.put("ssl.truststore.type", ssl_truststore_type) unless ssl_truststore_type.nil?
|
247
|
+
props.put("ssl.truststore.location", ssl_truststore_location)
|
248
|
+
props.put("ssl.truststore.password", ssl_truststore_password.value) unless ssl_truststore_password.nil?
|
249
|
+
|
250
|
+
# Client auth stuff
|
251
|
+
props.put("ssl.keystore.type", ssl_keystore_type) unless ssl_keystore_type.nil?
|
252
|
+
props.put("ssl.key.password", ssl_key_password.value) unless ssl_key_password.nil?
|
253
|
+
props.put("ssl.keystore.location", ssl_keystore_location) unless ssl_keystore_location.nil?
|
254
|
+
props.put("ssl.keystore.password", ssl_keystore_password.value) unless ssl_keystore_password.nil?
|
255
|
+
end
|
256
|
+
|
257
|
+
def set_sasl_config(props)
|
258
|
+
java.lang.System.setProperty("java.security.auth.login.config",jaas_path) unless jaas_path.nil?
|
259
|
+
java.lang.System.setProperty("java.security.krb5.conf",kerberos_config) unless kerberos_config.nil?
|
260
|
+
|
261
|
+
props.put("sasl.mechanism",sasl_mechanism)
|
262
|
+
if sasl_mechanism == "GSSAPI" && sasl_kerberos_service_name.nil?
|
263
|
+
raise LogStash::ConfigurationError, "sasl_kerberos_service_name must be specified when SASL mechanism is GSSAPI"
|
264
|
+
end
|
265
|
+
|
266
|
+
props.put("sasl.kerberos.service.name",sasl_kerberos_service_name)
|
267
|
+
end
|
268
|
+
|
208
269
|
end #class LogStash::Outputs::Kafka
|
@@ -1,7 +1,7 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
|
3
3
|
s.name = 'logstash-output-kafka'
|
4
|
-
s.version = '5.0
|
4
|
+
s.version = '5.1.0'
|
5
5
|
s.licenses = ['Apache License (2.0)']
|
6
6
|
s.summary = 'Output events to a Kafka topic. This uses the Kafka Producer API to write messages to a topic on the broker'
|
7
7
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 5.0
|
4
|
+
version: 5.1.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Elasticsearch
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2016-
|
11
|
+
date: 2016-11-18 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|