logstash-integration-kafka 12.0.4-java → 12.0.6-java
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +6 -0
- data/docs/output-kafka.asciidoc +18 -0
- data/lib/logstash/inputs/kafka.rb +1 -1
- data/lib/logstash/plugin_mixins/kafka/common.rb +1 -1
- data/logstash-integration-kafka.gemspec +1 -1
- data/spec/integration/inputs/kafka_spec.rb +47 -0
- data/spec/unit/inputs/kafka_spec.rb +37 -0
- metadata +2 -2
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: afb32084af8da2c0d6fc1742cedff29093a8d55c2a4908afc636728e3cbdee2f
|
|
4
|
+
data.tar.gz: 289f441a9b23b6a5843b7e45bceafbc487301f9f44902ef8d5476e1f2c488a82
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: 12bf222b6766785f64ee6a6aa988505dfee1e90eed131aeb7b2d8432d7c996d3b53f5fcbbe00ef0e1e483576afc0592b0057ff0438b921bc57230b767d22c600
|
|
7
|
+
data.tar.gz: 0cdf0beda8aaadd10cff0f5c2921189feafdf70aeaf2f45bc6a8737832162a650ec2e546a44d7afb5f27ec83e755f5f96928d46439b33b8d83a01a11e3592530
|
data/CHANGELOG.md
CHANGED
|
@@ -1,3 +1,9 @@
|
|
|
1
|
+
## 12.0.6
|
|
2
|
+
- [DOC] Add info about Kafka timestamp behavior [#240](https://github.com/logstash-plugins/logstash-integration-kafka/pull/240)
|
|
3
|
+
|
|
4
|
+
## 12.0.5
|
|
5
|
+
- Redact `sasl_jaas_config` to prevent credentials from appearing in debug logs. [#232](https://github.com/logstash-plugins/logstash-integration-kafka/pull/232)
|
|
6
|
+
|
|
1
7
|
## 12.0.4
|
|
2
8
|
- Re-packaging the plugin [#221](https://github.com/logstash-plugins/logstash-integration-kafka/pull/221)
|
|
3
9
|
|
data/docs/output-kafka.asciidoc
CHANGED
|
@@ -66,6 +66,24 @@ https://kafka.apache.org/{kafka_client_doc}/documentation.html#producerconfigs
|
|
|
66
66
|
|
|
67
67
|
NOTE: This plugin does not support using a proxy when communicating to the Kafka broker.
|
|
68
68
|
|
|
69
|
+
.Kafka timestamps and Logstash
|
|
70
|
+
****
|
|
71
|
+
* Kafka 3.6+ introduces stricter timestamp validation with the introduction of two new broker/topic-level properties: https://docs.confluent.io/platform/current/installation/configuration/topic-configs.html#message-timestamp-before-max-ms[log.message.timestamp.before.max.ms] and
|
|
72
|
+
https://docs.confluent.io/platform/current/installation/configuration/topic-configs.html#message-timestamp-after-max-ms[log.message.timestamp.after.max.ms].
|
|
73
|
+
+
|
|
74
|
+
These properties limit the time difference between the message timestamp (from Logstash) and the Kafka broker receive time.
|
|
75
|
+
Messages can be rejected if the values are exceeded and `log.message.timestamp.type=CreateTime` is set.
|
|
76
|
+
+
|
|
77
|
+
These checks are ignored if `log.message.timestamp.type=LogAppendTime` is set.
|
|
78
|
+
|
|
79
|
+
* For Kafka version 0.10.0.0+ the message creation timestamp is set by Logstash and equals the initial timestamp of the event.
|
|
80
|
+
This behavior affects Kafka’s retention policy.
|
|
81
|
+
For example, if a Logstash event was created two weeks ago and Kafka retention is set to seven days, then when the message arrives in Kafka today it may be discarded immediately, because its timestamp is older than seven days.
|
|
82
|
+
+
|
|
83
|
+
You can change this behavior by setting timestamps on message arrival instead.
|
|
84
|
+
The message is not discarded but kept for 7 more days. Set `log.message.timestamp.type` to `LogAppendTime` (default `CreateTime`) in your Kafka configuration.
|
|
85
|
+
****
|
|
86
|
+
|
|
69
87
|
[id="plugins-{type}s-{plugin}-aws_msk_iam_auth"]
|
|
70
88
|
==== AWS MSK IAM authentication
|
|
71
89
|
If you use AWS MSK, the AWS MSK IAM access control enables you to handle both authentication and authorization for your MSK cluster with AWS IAM.
|
|
@@ -260,7 +260,7 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
|
|
|
260
260
|
# different JVM instances.
|
|
261
261
|
config :jaas_path, :validate => :path
|
|
262
262
|
# JAAS configuration settings. This allows JAAS config to be a part of the plugin configuration and allows for different JAAS configuration per each plugin config.
|
|
263
|
-
config :sasl_jaas_config, :validate => :
|
|
263
|
+
config :sasl_jaas_config, :validate => :password
|
|
264
264
|
# Optional path to kerberos config file. This is krb5.conf style as detailed in https://web.mit.edu/kerberos/krb5-1.12/doc/admin/conf_files/krb5_conf.html
|
|
265
265
|
config :kerberos_config, :validate => :path
|
|
266
266
|
# Option to add Kafka metadata like topic, message size and header key values to the event.
|
|
@@ -40,7 +40,7 @@ module LogStash module PluginMixins module Kafka
|
|
|
40
40
|
end
|
|
41
41
|
|
|
42
42
|
props.put("sasl.kerberos.service.name", sasl_kerberos_service_name) unless sasl_kerberos_service_name.nil?
|
|
43
|
-
props.put("sasl.jaas.config", sasl_jaas_config) unless sasl_jaas_config.nil?
|
|
43
|
+
props.put("sasl.jaas.config", sasl_jaas_config.value) unless sasl_jaas_config.nil?
|
|
44
44
|
props.put("sasl.client.callback.handler.class", sasl_client_callback_handler_class) unless sasl_client_callback_handler_class.nil?
|
|
45
45
|
props.put("sasl.oauthbearer.token.endpoint.url", sasl_oauthbearer_token_endpoint_url) unless sasl_oauthbearer_token_endpoint_url.nil?
|
|
46
46
|
props.put("sasl.oauthbearer.scope.claim.name", sasl_oauthbearer_scope_claim_name) unless sasl_oauthbearer_scope_claim_name.nil?
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Gem::Specification.new do |s|
|
|
2
2
|
s.name = 'logstash-integration-kafka'
|
|
3
|
-
s.version = '12.0.
|
|
3
|
+
s.version = '12.0.6'
|
|
4
4
|
s.licenses = ['Apache-2.0']
|
|
5
5
|
s.summary = "Integration with Kafka - input and output plugins"
|
|
6
6
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline "+
|
|
@@ -264,6 +264,53 @@ describe "inputs/kafka", :integration => true do
|
|
|
264
264
|
end
|
|
265
265
|
end
|
|
266
266
|
|
|
267
|
+
# ToDo: add tests for other sasl config options as well (https://github.com/logstash-plugins/logstash-integration-kafka/issues/234)
|
|
268
|
+
context 'setting sasl_jaas_config' do
|
|
269
|
+
let(:base_config) do
|
|
270
|
+
{
|
|
271
|
+
'topics' => ['logstash_integration_topic_plain'],
|
|
272
|
+
'group_id' => rand(36**8).to_s(36),
|
|
273
|
+
}
|
|
274
|
+
end
|
|
275
|
+
|
|
276
|
+
shared_examples 'sasl_jaas_config password handling' do
|
|
277
|
+
it 'stores sasl_jaas_config as password type' do
|
|
278
|
+
kafka_input = LogStash::Inputs::Kafka.new(consumer_config)
|
|
279
|
+
expect(kafka_input.sasl_jaas_config).to be_a(LogStash::Util::Password)
|
|
280
|
+
expect(kafka_input.sasl_jaas_config.value).to eq(jaas_config_value)
|
|
281
|
+
end
|
|
282
|
+
|
|
283
|
+
it 'does not expose password in inspect output' do
|
|
284
|
+
kafka_input = LogStash::Inputs::Kafka.new(consumer_config)
|
|
285
|
+
expect(kafka_input.sasl_jaas_config.inspect).to eq('<password>')
|
|
286
|
+
expect(kafka_input.sasl_jaas_config.inspect).not_to include('admin-secret')
|
|
287
|
+
end
|
|
288
|
+
end
|
|
289
|
+
|
|
290
|
+
context 'with single-line config' do
|
|
291
|
+
let(:jaas_config_value) { 'org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin-secret";' }
|
|
292
|
+
let(:consumer_config) { base_config.merge('sasl_jaas_config' => jaas_config_value) }
|
|
293
|
+
|
|
294
|
+
include_examples 'sasl_jaas_config password handling'
|
|
295
|
+
end
|
|
296
|
+
|
|
297
|
+
context 'with multiline config' do
|
|
298
|
+
let(:jaas_config_value) do
|
|
299
|
+
<<~JAAS
|
|
300
|
+
org.apache.kafka.common.security.plain.PlainLoginModule required
|
|
301
|
+
username="admin"
|
|
302
|
+
password="admin-secret"
|
|
303
|
+
user_admin="admin-secret"
|
|
304
|
+
user_alice="alice-secret";
|
|
305
|
+
JAAS
|
|
306
|
+
end
|
|
307
|
+
let(:consumer_config) { base_config.merge('sasl_jaas_config' => jaas_config_value) }
|
|
308
|
+
|
|
309
|
+
include_examples 'sasl_jaas_config password handling'
|
|
310
|
+
end
|
|
311
|
+
end
|
|
312
|
+
|
|
313
|
+
|
|
267
314
|
context "static membership 'group.instance.id' setting" do
|
|
268
315
|
let(:base_config) do
|
|
269
316
|
{
|
|
@@ -264,6 +264,43 @@ describe LogStash::Inputs::Kafka do
|
|
|
264
264
|
|
|
265
265
|
expect(subject.send(:create_consumer, 'test-client-2', 'group_instance_id')).to be kafka_client
|
|
266
266
|
end
|
|
267
|
+
|
|
268
|
+
context 'with sasl_jaas_config' do
|
|
269
|
+
shared_examples 'sasl_jaas_config password handling' do
|
|
270
|
+
it "sasl_jaas_config.value returns the original string" do
|
|
271
|
+
subject.register
|
|
272
|
+
expect(subject.sasl_jaas_config.value).to eq(jaas_config_value)
|
|
273
|
+
end
|
|
274
|
+
|
|
275
|
+
it "sasl_jaas_config.inspect does not expose the password" do
|
|
276
|
+
subject.register
|
|
277
|
+
expect(subject.sasl_jaas_config.inspect).not_to include('admin-secret')
|
|
278
|
+
expect(subject.sasl_jaas_config.inspect).to eq('<password>')
|
|
279
|
+
end
|
|
280
|
+
end
|
|
281
|
+
|
|
282
|
+
context 'with single-line config' do
|
|
283
|
+
let(:jaas_config_value) { 'org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin-secret";' }
|
|
284
|
+
let(:config) { super().merge('sasl_jaas_config' => jaas_config_value) }
|
|
285
|
+
|
|
286
|
+
include_examples 'sasl_jaas_config password handling'
|
|
287
|
+
end
|
|
288
|
+
|
|
289
|
+
context 'with multiline config' do
|
|
290
|
+
let(:jaas_config_value) {
|
|
291
|
+
<<~JAAS
|
|
292
|
+
org.apache.kafka.common.security.plain.PlainLoginModule required
|
|
293
|
+
username="admin"
|
|
294
|
+
password="admin-secret"
|
|
295
|
+
user_admin="admin-secret"
|
|
296
|
+
user_alice="alice-secret";
|
|
297
|
+
JAAS
|
|
298
|
+
}
|
|
299
|
+
let(:config) { super().merge('sasl_jaas_config' => jaas_config_value) }
|
|
300
|
+
|
|
301
|
+
include_examples 'sasl_jaas_config password handling'
|
|
302
|
+
end
|
|
303
|
+
end
|
|
267
304
|
end
|
|
268
305
|
|
|
269
306
|
describe "schema registry" do
|
metadata
CHANGED
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: logstash-integration-kafka
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 12.0.
|
|
4
|
+
version: 12.0.6
|
|
5
5
|
platform: java
|
|
6
6
|
authors:
|
|
7
7
|
- Elastic
|
|
8
8
|
bindir: bin
|
|
9
9
|
cert_chain: []
|
|
10
|
-
date: 2026-
|
|
10
|
+
date: 2026-03-16 00:00:00.000000000 Z
|
|
11
11
|
dependencies:
|
|
12
12
|
- !ruby/object:Gem::Dependency
|
|
13
13
|
name: logstash-core-plugin-api
|