logstash-integration-kafka 11.3.3-java → 11.4.0-java
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +6 -0
- data/docs/output-kafka.asciidoc +19 -2
- data/lib/logstash/outputs/kafka.rb +13 -3
- data/logstash-integration-kafka.gemspec +1 -1
- data/spec/integration/outputs/kafka_spec.rb +19 -0
- data/spec/unit/outputs/kafka_spec.rb +30 -0
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: fa23de31ba8ec88920997f731d28d11e68148b0179b85abeddcfc999f9e96bcf
|
4
|
+
data.tar.gz: bed04421d5654eb1cb3774c60e0a2989be5d79edf537ec1c23ca5c70bedfec82
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 38688f3130feb2ad66bef528007250d98adb28d9d8f3ab1df3817c894dceadbadbd7fa6a62f98b046c97b79089dcaadabda9f05b1663d58249e1f4e37fa845c0
|
7
|
+
data.tar.gz: 618a06ed2dd6c15c05f76e293ac83dd226e9b5d60876f35f58027468a8701468c0dcf4b621fdadb245697211ea7cde9d498af0f7123e0ab0c7299d229cd95c2d
|
data/CHANGELOG.md
CHANGED
@@ -1,3 +1,9 @@
|
|
1
|
+
## 11.4.0
|
2
|
+
- Add support for setting Kafka message headers in output plugin [#162](https://github.com/logstash-plugins/logstash-integration-kafka/pull/162)
|
3
|
+
|
4
|
+
## 11.3.4
|
5
|
+
- Fix "retries" and "value_serializer" error handling in output plugin (#160) [#160](https://github.com/logstash-plugins/logstash-integration-kafka/pull/160)
|
6
|
+
|
1
7
|
## 11.3.3
|
2
8
|
- Fix "Can't modify frozen string" error when record value is `nil` (tombstones) [#155](https://github.com/logstash-plugins/logstash-integration-kafka/pull/155)
|
3
9
|
|
data/docs/output-kafka.asciidoc
CHANGED
@@ -91,6 +91,7 @@ See the https://kafka.apache.org/{kafka_client_doc}/documentation for more detai
|
|
91
91
|
| <<plugins-{type}s-{plugin}-key_serializer>> |<<string,string>>|No
|
92
92
|
| <<plugins-{type}s-{plugin}-linger_ms>> |<<number,number>>|No
|
93
93
|
| <<plugins-{type}s-{plugin}-max_request_size>> |<<number,number>>|No
|
94
|
+
| <<plugins-{type}s-{plugin}-message_header>> |<<hash,hash>>|No
|
94
95
|
| <<plugins-{type}s-{plugin}-message_key>> |<<string,string>>|No
|
95
96
|
| <<plugins-{type}s-{plugin}-metadata_fetch_timeout_ms>> |<<number,number>>|No
|
96
97
|
| <<plugins-{type}s-{plugin}-metadata_max_age_ms>> |<<number,number>>|No
|
@@ -184,7 +185,7 @@ resolved and expanded into a list of canonical names.
|
|
184
185
|
[NOTE]
|
185
186
|
====
|
186
187
|
Starting from Kafka 3 `default` value for `client.dns.lookup` value has been removed.
|
187
|
-
If explicitly configured it
|
188
|
+
If not explicitly configured it defaults to `use_all_dns_ips`.
|
188
189
|
====
|
189
190
|
|
190
191
|
[id="plugins-{type}s-{plugin}-client_id"]
|
@@ -275,6 +276,22 @@ to allow other records to be sent so that the sends can be batched together.
|
|
275
276
|
|
276
277
|
The maximum size of a request
|
277
278
|
|
279
|
+
[id="plugins-{type}s-{plugin}-message_headers"]
|
280
|
+
===== `message_headers`
|
281
|
+
|
282
|
+
* Value type is <<hash,hash>>
|
283
|
+
** Keys are header names, and must be <<string,string>>
|
284
|
+
** Values are header values, and must be <<string,string>>
|
285
|
+
** Values support interpolation from event field values
|
286
|
+
* There is no default value for this setting.
|
287
|
+
|
288
|
+
A map of key value pairs, each corresponding to a header name and its value respectively.
|
289
|
+
Example:
|
290
|
+
[source,ruby]
|
291
|
+
----------------------------------
|
292
|
+
message_header => { "event_timestamp" => "%{@timestamp}" }
|
293
|
+
----------------------------------
|
294
|
+
|
278
295
|
[id="plugins-{type}s-{plugin}-message_key"]
|
279
296
|
===== `message_key`
|
280
297
|
|
@@ -348,7 +365,7 @@ retries are exhausted.
|
|
348
365
|
* There is no default value for this setting.
|
349
366
|
|
350
367
|
The default retry behavior is to retry until successful. To prevent data loss,
|
351
|
-
|
368
|
+
changing this setting is discouraged.
|
352
369
|
|
353
370
|
If you choose to set `retries`, a value greater than zero will cause the
|
354
371
|
client to only retry a fixed number of times. This will result in data loss
|
@@ -106,6 +106,8 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
|
|
106
106
|
config :max_request_size, :validate => :number, :default => 1_048_576 # (1MB) Kafka default
|
107
107
|
# The key for the message
|
108
108
|
config :message_key, :validate => :string
|
109
|
+
# Headers added to kafka message in the form of key-value pairs
|
110
|
+
config :message_headers, :validate => :hash, :default => {}
|
109
111
|
# the timeout setting for initial metadata request to fetch topic metadata.
|
110
112
|
config :metadata_fetch_timeout_ms, :validate => :number, :default => 60_000
|
111
113
|
# Partitioner to use - can be `default`, `uniform_sticky`, `round_robin` or a fully qualified class name of a custom partitioner.
|
@@ -185,7 +187,7 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
|
|
185
187
|
|
186
188
|
if !@retries.nil?
|
187
189
|
if @retries < 0
|
188
|
-
raise ConfigurationError, "A negative retry count (#{@retries}) is not valid. Must be a value >= 0"
|
190
|
+
raise LogStash::ConfigurationError, "A negative retry count (#{@retries}) is not valid. Must be a value >= 0"
|
189
191
|
end
|
190
192
|
|
191
193
|
logger.warn("Kafka output is configured with finite retry. This instructs Logstash to LOSE DATA after a set number of send attempts fails. If you do not want to lose data if Kafka is down, then you must remove the retry setting.", :retries => @retries)
|
@@ -193,7 +195,6 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
|
|
193
195
|
|
194
196
|
reassign_dns_lookup
|
195
197
|
|
196
|
-
@producer = create_producer
|
197
198
|
if value_serializer == 'org.apache.kafka.common.serialization.StringSerializer'
|
198
199
|
@codec.on_event do |event, data|
|
199
200
|
write_to_kafka(event, data)
|
@@ -203,8 +204,14 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
|
|
203
204
|
write_to_kafka(event, data.to_java_bytes)
|
204
205
|
end
|
205
206
|
else
|
206
|
-
raise ConfigurationError, "'value_serializer' only supports org.apache.kafka.common.serialization.ByteArraySerializer and org.apache.kafka.common.serialization.StringSerializer"
|
207
|
+
raise LogStash::ConfigurationError, "'value_serializer' only supports org.apache.kafka.common.serialization.ByteArraySerializer and org.apache.kafka.common.serialization.StringSerializer"
|
208
|
+
end
|
209
|
+
@message_headers.each do |key, value|
|
210
|
+
if !key.is_a? String
|
211
|
+
raise LogStash::ConfigurationError, "'message_headers' contains a key that is not a string!"
|
212
|
+
end
|
207
213
|
end
|
214
|
+
@producer = create_producer
|
208
215
|
end
|
209
216
|
|
210
217
|
def prepare(record)
|
@@ -315,6 +322,9 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
|
|
315
322
|
else
|
316
323
|
record = ProducerRecord.new(event.sprintf(@topic_id), event.sprintf(@message_key), serialized_data)
|
317
324
|
end
|
325
|
+
@message_headers.each do |key, value|
|
326
|
+
record.headers().add(key, event.sprintf(value).to_java_bytes)
|
327
|
+
end
|
318
328
|
prepare(record)
|
319
329
|
rescue LogStash::ShutdownSignal
|
320
330
|
logger.debug('producer received shutdown signal')
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-integration-kafka'
|
3
|
-
s.version = '11.
|
3
|
+
s.version = '11.4.0'
|
4
4
|
s.licenses = ['Apache-2.0']
|
5
5
|
s.summary = "Integration with Kafka - input and output plugins"
|
6
6
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline "+
|
@@ -191,6 +191,25 @@ describe "outputs/kafka", :integration => true do
|
|
191
191
|
end
|
192
192
|
end
|
193
193
|
|
194
|
+
context 'when setting message_headers' do
|
195
|
+
let(:num_events) { 10 }
|
196
|
+
let(:test_topic) { 'logstash_integration_topic4' }
|
197
|
+
|
198
|
+
before :each do
|
199
|
+
config = base_config.merge({"topic_id" => test_topic, "message_headers" => {"event_timestamp" => "%{@timestamp}"}})
|
200
|
+
load_kafka_data(config)
|
201
|
+
end
|
202
|
+
|
203
|
+
it 'messages should contain headers' do
|
204
|
+
messages = fetch_messages(test_topic)
|
205
|
+
|
206
|
+
expect(messages.size).to eq(num_events)
|
207
|
+
messages.each do |m|
|
208
|
+
expect(m.headers).to eq({"event_timestamp" => LogStash::Timestamp.at(0).to_s})
|
209
|
+
end
|
210
|
+
end
|
211
|
+
end
|
212
|
+
|
194
213
|
context 'setting partitioner' do
|
195
214
|
let(:test_topic) { 'logstash_integration_partitioner_topic' }
|
196
215
|
let(:partitioner) { nil }
|
@@ -60,6 +60,16 @@ describe "outputs/kafka" do
|
|
60
60
|
kafka.multi_receive([event])
|
61
61
|
end
|
62
62
|
|
63
|
+
it 'should support field referenced message_headers' do
|
64
|
+
expect(org.apache.kafka.clients.producer.ProducerRecord).to receive(:new).
|
65
|
+
with("test", event.to_s).and_call_original
|
66
|
+
expect_any_instance_of(org.apache.kafka.clients.producer.KafkaProducer).to receive(:send)
|
67
|
+
expect_any_instance_of(org.apache.kafka.common.header.internals.RecordHeaders).to receive(:add).with("host","172.0.0.1".to_java_bytes).and_call_original
|
68
|
+
kafka = LogStash::Outputs::Kafka.new(simple_kafka_config.merge({"message_headers" => { "host" => "%{host}"}}))
|
69
|
+
kafka.register
|
70
|
+
kafka.multi_receive([event])
|
71
|
+
end
|
72
|
+
|
63
73
|
it 'should not raise config error when truststore location is not set and ssl is enabled' do
|
64
74
|
kafka = LogStash::Outputs::Kafka.new(simple_kafka_config.merge("security_protocol" => "SSL"))
|
65
75
|
expect(org.apache.kafka.clients.producer.KafkaProducer).to receive(:new)
|
@@ -221,6 +231,26 @@ describe "outputs/kafka" do
|
|
221
231
|
kafka.multi_receive([event])
|
222
232
|
end
|
223
233
|
end
|
234
|
+
context 'when retries is -1' do
|
235
|
+
let(:retries) { -1 }
|
236
|
+
|
237
|
+
it "should raise a Configuration error" do
|
238
|
+
kafka = LogStash::Outputs::Kafka.new(simple_kafka_config.merge("retries" => retries))
|
239
|
+
expect { kafka.register }.to raise_error(LogStash::ConfigurationError)
|
240
|
+
end
|
241
|
+
end
|
242
|
+
end
|
243
|
+
|
244
|
+
describe "value_serializer" do
|
245
|
+
let(:output) { LogStash::Plugin.lookup("output", "kafka").new(config) }
|
246
|
+
|
247
|
+
context "when a random string is set" do
|
248
|
+
let(:config) { { "topic_id" => "random", "value_serializer" => "test_string" } }
|
249
|
+
|
250
|
+
it "raises a ConfigurationError" do
|
251
|
+
expect { output.register }.to raise_error(LogStash::ConfigurationError)
|
252
|
+
end
|
253
|
+
end
|
224
254
|
end
|
225
255
|
|
226
256
|
context 'when ssl endpoint identification disabled' do
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-integration-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 11.
|
4
|
+
version: 11.4.0
|
5
5
|
platform: java
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-
|
11
|
+
date: 2024-04-09 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|