logstash-input-kafka 6.3.4 → 7.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +4 -0
- data/docs/index.asciidoc +13 -0
- data/lib/logstash/inputs/kafka.rb +6 -6
- data/logstash-input-kafka.gemspec +1 -1
- data/spec/integration/inputs/kafka_spec.rb +3 -3
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: f602cb85f24f6c18a37f9c80dbad01b5e5a17c1d
|
4
|
+
data.tar.gz: 5538817ea5cbd3ddc44c092e57fc35733780d7ac
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: a55bb633f746ebe5643bc1c765786f6b87807e26635ad088da30786369631833eb7b1be0b64687d6682b4c0d58f5bc3589d28718e9c299bf79b5cf115b3b0846
|
7
|
+
data.tar.gz: 77ec54a43e2ee2b18aa0612b206b73827fa19639c6e031f4479778c9a71374e2a8913c53a6837fd099c0658e91e4de8894393c2f7d2a5b4ab0d3d9ab0a404cc5
|
data/CHANGELOG.md
CHANGED
data/docs/index.asciidoc
CHANGED
@@ -64,6 +64,19 @@ For more information see http://kafka.apache.org/documentation.html#theconsumer
|
|
64
64
|
|
65
65
|
Kafka consumer configuration: http://kafka.apache.org/documentation.html#consumerconfigs
|
66
66
|
|
67
|
+
==== Metadata fields
|
68
|
+
|
69
|
+
The following metadata from Kafka broker are added under the `[@metadata]` field:
|
70
|
+
|
71
|
+
* `[@metadata][kafka][topic]`: Original Kafka topic from where the message was consumed.
|
72
|
+
* `[@metadata][kafka][consumer_group]`: Consumer group
|
73
|
+
* `[@metadata][kafka][partition]`: Partition info for this message.
|
74
|
+
* `[@metadata][kafka][offset]`: Original record offset for this message.
|
75
|
+
* `[@metadata][kafka][key]`: Record key, if any.
|
76
|
+
* `[@metadata][kafka][timestamp]`: Timestamp when this message was received by the Kafka broker.
|
77
|
+
|
78
|
+
Please note that `@metadata` fields are not part of any of your events at output time. If you need these information to be
|
79
|
+
inserted into your original event, you'll have to use the `mutate` filter to manually copy the required fields into your `event`.
|
67
80
|
|
68
81
|
[id="plugins-{type}s-{plugin}-options"]
|
69
82
|
==== Kafka Input Configuration Options
|
@@ -254,12 +254,12 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
|
|
254
254
|
codec_instance.decode(record.value.to_s) do |event|
|
255
255
|
decorate(event)
|
256
256
|
if @decorate_events
|
257
|
-
event.set("[kafka][topic]", record.topic)
|
258
|
-
event.set("[kafka][consumer_group]", @group_id)
|
259
|
-
event.set("[kafka][partition]", record.partition)
|
260
|
-
event.set("[kafka][offset]", record.offset)
|
261
|
-
event.set("[kafka][key]", record.key)
|
262
|
-
event.set("[kafka][timestamp]", record.timestamp)
|
257
|
+
event.set("[@metadata][kafka][topic]", record.topic)
|
258
|
+
event.set("[@metadata][kafka][consumer_group]", @group_id)
|
259
|
+
event.set("[@metadata][kafka][partition]", record.partition)
|
260
|
+
event.set("[@metadata][kafka][offset]", record.offset)
|
261
|
+
event.set("[@metadata][kafka][key]", record.key)
|
262
|
+
event.set("[@metadata][kafka][timestamp]", record.timestamp)
|
263
263
|
end
|
264
264
|
logstash_queue << event
|
265
265
|
end
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-input-kafka'
|
3
|
-
s.version = '
|
3
|
+
s.version = '7.0.0'
|
4
4
|
s.licenses = ['Apache License (2.0)']
|
5
5
|
s.summary = 'This input will read events from a Kafka topic. It uses the high level consumer API provided by Kafka to read messages from the broker'
|
6
6
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
@@ -134,9 +134,9 @@ describe "inputs/kafka", :integration => true do
|
|
134
134
|
wait(timeout_seconds).for {queue.length}.to eq(num_events)
|
135
135
|
expect(queue.length).to eq(num_events)
|
136
136
|
event = queue.shift
|
137
|
-
expect(event.get("kafka
|
138
|
-
expect(event.get("kafka
|
139
|
-
expect(event.get("kafka
|
137
|
+
expect(event.get("[@metadata][kafka][topic]")).to eq("logstash_topic_plain")
|
138
|
+
expect(event.get("[@metadata][kafka][consumer_group]")).to eq(group_id_3)
|
139
|
+
expect(event.get("[@metadata][kafka][timestamp]")).to be >= start
|
140
140
|
ensure
|
141
141
|
t.kill
|
142
142
|
t.join(30_000)
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-input-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version:
|
4
|
+
version: 7.0.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Elasticsearch
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2017-07-
|
11
|
+
date: 2017-07-18 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|