logstash-integration-kafka 11.3.4-java → 11.4.0-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: fb2e5a937c705523e56611e75be5b76453c42b6d5ed241cf86f990251aa673c7
4
- data.tar.gz: 379bad770fdc823d19d676041247949f55f51f2f77fe267c7be5fc9f67141053
3
+ metadata.gz: fa23de31ba8ec88920997f731d28d11e68148b0179b85abeddcfc999f9e96bcf
4
+ data.tar.gz: bed04421d5654eb1cb3774c60e0a2989be5d79edf537ec1c23ca5c70bedfec82
5
5
  SHA512:
6
- metadata.gz: 320e5ee1e26113cceb2692c2ec6852f8c9b41adbcba9609e7e917116040c85196c91a0051fcc6a40213e97092c1f3ea4507930dc6b9f7bbf9501dd1c2caaab35
7
- data.tar.gz: ab200d48d0d3713f14ab97095d5b56467e79b6c4c22ef23bb32cf26eb7e80f1497565d4b6577d31373bae472bd2955a8dbb59fcd89674c20c3ab2acbe01842bb
6
+ metadata.gz: 38688f3130feb2ad66bef528007250d98adb28d9d8f3ab1df3817c894dceadbadbd7fa6a62f98b046c97b79089dcaadabda9f05b1663d58249e1f4e37fa845c0
7
+ data.tar.gz: 618a06ed2dd6c15c05f76e293ac83dd226e9b5d60876f35f58027468a8701468c0dcf4b621fdadb245697211ea7cde9d498af0f7123e0ab0c7299d229cd95c2d
data/CHANGELOG.md CHANGED
@@ -1,3 +1,6 @@
1
+ ## 11.4.0
2
+ - Add support for setting Kafka message headers in output plugin [#162](https://github.com/logstash-plugins/logstash-integration-kafka/pull/162)
3
+
1
4
  ## 11.3.4
2
5
  - Fix "retries" and "value_serializer" error handling in output plugin (#160) [#160](https://github.com/logstash-plugins/logstash-integration-kafka/pull/160)
3
6
 
@@ -91,6 +91,7 @@ See the https://kafka.apache.org/{kafka_client_doc}/documentation for more detai
91
91
  | <<plugins-{type}s-{plugin}-key_serializer>> |<<string,string>>|No
92
92
  | <<plugins-{type}s-{plugin}-linger_ms>> |<<number,number>>|No
93
93
  | <<plugins-{type}s-{plugin}-max_request_size>> |<<number,number>>|No
94
+ | <<plugins-{type}s-{plugin}-message_header>> |<<hash,hash>>|No
94
95
  | <<plugins-{type}s-{plugin}-message_key>> |<<string,string>>|No
95
96
  | <<plugins-{type}s-{plugin}-metadata_fetch_timeout_ms>> |<<number,number>>|No
96
97
  | <<plugins-{type}s-{plugin}-metadata_max_age_ms>> |<<number,number>>|No
@@ -275,6 +276,22 @@ to allow other records to be sent so that the sends can be batched together.
275
276
 
276
277
  The maximum size of a request
277
278
 
279
+ [id="plugins-{type}s-{plugin}-message_headers"]
280
+ ===== `message_headers`
281
+
282
+ * Value type is <<hash,hash>>
283
+ ** Keys are header names, and must be <<string,string>>
284
+ ** Values are header values, and must be <<string,string>>
285
+ ** Values support interpolation from event field values
286
+ * There is no default value for this setting.
287
+
288
+ A map of key value pairs, each corresponding to a header name and its value respectively.
289
+ Example:
290
+ [source,ruby]
291
+ ----------------------------------
292
+ message_header => { "event_timestamp" => "%{@timestamp}" }
293
+ ----------------------------------
294
+
278
295
  [id="plugins-{type}s-{plugin}-message_key"]
279
296
  ===== `message_key`
280
297
 
@@ -106,6 +106,8 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
106
106
  config :max_request_size, :validate => :number, :default => 1_048_576 # (1MB) Kafka default
107
107
  # The key for the message
108
108
  config :message_key, :validate => :string
109
+ # Headers added to kafka message in the form of key-value pairs
110
+ config :message_headers, :validate => :hash, :default => {}
109
111
  # the timeout setting for initial metadata request to fetch topic metadata.
110
112
  config :metadata_fetch_timeout_ms, :validate => :number, :default => 60_000
111
113
  # Partitioner to use - can be `default`, `uniform_sticky`, `round_robin` or a fully qualified class name of a custom partitioner.
@@ -204,6 +206,11 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
204
206
  else
205
207
  raise LogStash::ConfigurationError, "'value_serializer' only supports org.apache.kafka.common.serialization.ByteArraySerializer and org.apache.kafka.common.serialization.StringSerializer"
206
208
  end
209
+ @message_headers.each do |key, value|
210
+ if !key.is_a? String
211
+ raise LogStash::ConfigurationError, "'message_headers' contains a key that is not a string!"
212
+ end
213
+ end
207
214
  @producer = create_producer
208
215
  end
209
216
 
@@ -315,6 +322,9 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
315
322
  else
316
323
  record = ProducerRecord.new(event.sprintf(@topic_id), event.sprintf(@message_key), serialized_data)
317
324
  end
325
+ @message_headers.each do |key, value|
326
+ record.headers().add(key, event.sprintf(value).to_java_bytes)
327
+ end
318
328
  prepare(record)
319
329
  rescue LogStash::ShutdownSignal
320
330
  logger.debug('producer received shutdown signal')
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-integration-kafka'
3
- s.version = '11.3.4'
3
+ s.version = '11.4.0'
4
4
  s.licenses = ['Apache-2.0']
5
5
  s.summary = "Integration with Kafka - input and output plugins"
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline "+
@@ -191,6 +191,25 @@ describe "outputs/kafka", :integration => true do
191
191
  end
192
192
  end
193
193
 
194
+ context 'when setting message_headers' do
195
+ let(:num_events) { 10 }
196
+ let(:test_topic) { 'logstash_integration_topic4' }
197
+
198
+ before :each do
199
+ config = base_config.merge({"topic_id" => test_topic, "message_headers" => {"event_timestamp" => "%{@timestamp}"}})
200
+ load_kafka_data(config)
201
+ end
202
+
203
+ it 'messages should contain headers' do
204
+ messages = fetch_messages(test_topic)
205
+
206
+ expect(messages.size).to eq(num_events)
207
+ messages.each do |m|
208
+ expect(m.headers).to eq({"event_timestamp" => LogStash::Timestamp.at(0).to_s})
209
+ end
210
+ end
211
+ end
212
+
194
213
  context 'setting partitioner' do
195
214
  let(:test_topic) { 'logstash_integration_partitioner_topic' }
196
215
  let(:partitioner) { nil }
@@ -60,6 +60,16 @@ describe "outputs/kafka" do
60
60
  kafka.multi_receive([event])
61
61
  end
62
62
 
63
+ it 'should support field referenced message_headers' do
64
+ expect(org.apache.kafka.clients.producer.ProducerRecord).to receive(:new).
65
+ with("test", event.to_s).and_call_original
66
+ expect_any_instance_of(org.apache.kafka.clients.producer.KafkaProducer).to receive(:send)
67
+ expect_any_instance_of(org.apache.kafka.common.header.internals.RecordHeaders).to receive(:add).with("host","172.0.0.1".to_java_bytes).and_call_original
68
+ kafka = LogStash::Outputs::Kafka.new(simple_kafka_config.merge({"message_headers" => { "host" => "%{host}"}}))
69
+ kafka.register
70
+ kafka.multi_receive([event])
71
+ end
72
+
63
73
  it 'should not raise config error when truststore location is not set and ssl is enabled' do
64
74
  kafka = LogStash::Outputs::Kafka.new(simple_kafka_config.merge("security_protocol" => "SSL"))
65
75
  expect(org.apache.kafka.clients.producer.KafkaProducer).to receive(:new)
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-integration-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 11.3.4
4
+ version: 11.4.0
5
5
  platform: java
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2024-02-06 00:00:00.000000000 Z
11
+ date: 2024-04-09 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement