logstash-input-kafka 6.3.4 → 7.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 76720171728fa67ea7f524fbf533a23ac91cae85
4
- data.tar.gz: 715a70f704452226da80e304f1ac56f4a5b92844
3
+ metadata.gz: f602cb85f24f6c18a37f9c80dbad01b5e5a17c1d
4
+ data.tar.gz: 5538817ea5cbd3ddc44c092e57fc35733780d7ac
5
5
  SHA512:
6
- metadata.gz: b2ed0780870168cc55bf709925d39ce22fcefa4e1e509085ba1c230da1510e624f50f351fade71c0628fc7cf2e2b8363e327f87fccf39b3a9167c52048f89733
7
- data.tar.gz: e9a9112d7a2d08a8b268041d690fad6d521a2267f8a1c52f06c729b03b188a0df0195bae7c38e2004e9681dcaad004b4c796b0608173d28df05edba3551a31b4
6
+ metadata.gz: a55bb633f746ebe5643bc1c765786f6b87807e26635ad088da30786369631833eb7b1be0b64687d6682b4c0d58f5bc3589d28718e9c299bf79b5cf115b3b0846
7
+ data.tar.gz: 77ec54a43e2ee2b18aa0612b206b73827fa19639c6e031f4479778c9a71374e2a8913c53a6837fd099c0658e91e4de8894393c2f7d2a5b4ab0d3d9ab0a404cc5
data/CHANGELOG.md CHANGED
@@ -1,3 +1,7 @@
1
+ ## 7.0.0
2
+ - Breaking: Nest the decorated fields under `@metadata` field to avoid mapping conflicts with beats.
3
+ Fixes #198, #180
4
+
1
5
  ## 6.3.4
2
6
  - Fix an issue that led to random failures in decoding messages when using more than one input thread
3
7
 
data/docs/index.asciidoc CHANGED
@@ -64,6 +64,19 @@ For more information see http://kafka.apache.org/documentation.html#theconsumer
64
64
 
65
65
  Kafka consumer configuration: http://kafka.apache.org/documentation.html#consumerconfigs
66
66
 
67
+ ==== Metadata fields
68
+
69
+ The following metadata from Kafka broker are added under the `[@metadata]` field:
70
+
71
+ * `[@metadata][kafka][topic]`: Original Kafka topic from where the message was consumed.
72
+ * `[@metadata][kafka][consumer_group]`: Consumer group
73
+ * `[@metadata][kafka][partition]`: Partition info for this message.
74
+ * `[@metadata][kafka][offset]`: Original record offset for this message.
75
+ * `[@metadata][kafka][key]`: Record key, if any.
76
+ * `[@metadata][kafka][timestamp]`: Timestamp when this message was received by the Kafka broker.
77
+
78
+ Please note that `@metadata` fields are not part of any of your events at output time. If you need these information to be
79
+ inserted into your original event, you'll have to use the `mutate` filter to manually copy the required fields into your `event`.
67
80
 
68
81
  [id="plugins-{type}s-{plugin}-options"]
69
82
  ==== Kafka Input Configuration Options
@@ -254,12 +254,12 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
254
254
  codec_instance.decode(record.value.to_s) do |event|
255
255
  decorate(event)
256
256
  if @decorate_events
257
- event.set("[kafka][topic]", record.topic)
258
- event.set("[kafka][consumer_group]", @group_id)
259
- event.set("[kafka][partition]", record.partition)
260
- event.set("[kafka][offset]", record.offset)
261
- event.set("[kafka][key]", record.key)
262
- event.set("[kafka][timestamp]", record.timestamp)
257
+ event.set("[@metadata][kafka][topic]", record.topic)
258
+ event.set("[@metadata][kafka][consumer_group]", @group_id)
259
+ event.set("[@metadata][kafka][partition]", record.partition)
260
+ event.set("[@metadata][kafka][offset]", record.offset)
261
+ event.set("[@metadata][kafka][key]", record.key)
262
+ event.set("[@metadata][kafka][timestamp]", record.timestamp)
263
263
  end
264
264
  logstash_queue << event
265
265
  end
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-input-kafka'
3
- s.version = '6.3.4'
3
+ s.version = '7.0.0'
4
4
  s.licenses = ['Apache License (2.0)']
5
5
  s.summary = 'This input will read events from a Kafka topic. It uses the high level consumer API provided by Kafka to read messages from the broker'
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -134,9 +134,9 @@ describe "inputs/kafka", :integration => true do
134
134
  wait(timeout_seconds).for {queue.length}.to eq(num_events)
135
135
  expect(queue.length).to eq(num_events)
136
136
  event = queue.shift
137
- expect(event.get("kafka")["topic"]).to eq("logstash_topic_plain")
138
- expect(event.get("kafka")["consumer_group"]).to eq(group_id_3)
139
- expect(event.get("kafka")["timestamp"]).to be >= start
137
+ expect(event.get("[@metadata][kafka][topic]")).to eq("logstash_topic_plain")
138
+ expect(event.get("[@metadata][kafka][consumer_group]")).to eq(group_id_3)
139
+ expect(event.get("[@metadata][kafka][timestamp]")).to be >= start
140
140
  ensure
141
141
  t.kill
142
142
  t.join(30_000)
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-input-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 6.3.4
4
+ version: 7.0.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elasticsearch
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2017-07-13 00:00:00.000000000 Z
11
+ date: 2017-07-18 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement