logstash-input-kafka 6.3.0 → 6.3.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 67d7e39487de32bc46086f71dc728d0b73a8d51d
4
- data.tar.gz: 3e95c206a4142c3c90aa814e33d534edf5a6ce94
3
+ metadata.gz: b06b71f4df8ecdda42aed0a84edd1b3628e8d9b3
4
+ data.tar.gz: 7d90da1ec51686b3df424651784eaefb933b0676
5
5
  SHA512:
6
- metadata.gz: aba387cb731bc155f245f6fd913b4cdd41b0cd9ef39468f392412fb4424b9474bf01f4156465115eb94860d1a22cf8c60a2dcae81be699b30a0d91ce5ba4bb70
7
- data.tar.gz: aaebae3fb793842fca36c652c3de82812bbb6811a82d7d3ad6e29ebb3c9de38ebf14fd40e6eb5c14f37ac89b39d244de4991bcb6c605de055810da71094212d7
6
+ metadata.gz: 8dcf8af78c905b20ef38506c7a16cc4f09065065d2ec3351cc3a9343fb02539142351e53e98063aa5182a6561275eb30ae0b520c5601cec38fbe121bf89a4f67
7
+ data.tar.gz: 4363abfff3ad562071a4093bd925001ea591dba5f902cd3f724b539c2f4f779daa604d4288d10335e8556b242ac5d79e11ca56602f340d0538cfbc02f7c76955
@@ -1,3 +1,6 @@
1
+ ## 6.3.1
2
+ - fix: Added record timestamp in event decoration
3
+
1
4
  ## 6.3.0
2
5
  - Upgrade Kafka client to version 0.10.2.1
3
6
 
data/Gemfile CHANGED
@@ -1,3 +1,11 @@
1
1
  source 'https://rubygems.org'
2
2
 
3
3
  gemspec
4
+
5
+ logstash_path = ENV["LOGSTASH_PATH"] || "../../logstash"
6
+ use_logstash_source = ENV["LOGSTASH_SOURCE"] && ENV["LOGSTASH_SOURCE"].to_s == "1"
7
+
8
+ if Dir.exist?(logstash_path) && use_logstash_source
9
+ gem 'logstash-core', :path => "#{logstash_path}/logstash-core"
10
+ gem 'logstash-core-plugin-api', :path => "#{logstash_path}/logstash-core-plugin-api"
11
+ end
data/README.md CHANGED
@@ -6,6 +6,15 @@ This is a plugin for [Logstash](https://github.com/elastic/logstash).
6
6
 
7
7
  It is fully free and fully open source. The license is Apache 2.0, meaning you are pretty much free to use it however you want in whatever way.
8
8
 
9
+ ## Logging
10
+
11
+ Kafka logs do not respect the Log4J2 root logger level and defaults to INFO, for other levels, you must explicitly set the log level in your Logstash deployment's `log4j2.properties` file, e.g.:
12
+ ```
13
+ logger.kafka.name=org.apache.kafka
14
+ logger.kafka.appenderRef.console.ref=console
15
+ logger.kafka.level=debug
16
+ ```
17
+
9
18
  ## Documentation
10
19
 
11
20
  https://www.elastic.co/guide/en/logstash/current/plugins-inputs-kafka.html
@@ -7,14 +7,14 @@ START - GENERATED VARIABLES, DO NOT EDIT!
7
7
  :version: %VERSION%
8
8
  :release_date: %RELEASE_DATE%
9
9
  :changelog_url: %CHANGELOG_URL%
10
- :include_path: ../../../logstash/docs/include
10
+ :include_path: ../../../../logstash/docs/include
11
11
  ///////////////////////////////////////////
12
12
  END - GENERATED VARIABLES, DO NOT EDIT!
13
13
  ///////////////////////////////////////////
14
14
 
15
15
  [id="plugins-{type}-{plugin}"]
16
16
 
17
- === Kafka
17
+ === Kafka input plugin
18
18
 
19
19
  include::{include_path}/plugin_header.asciidoc[]
20
20
 
@@ -67,7 +67,7 @@ Kafka consumer configuration: http://kafka.apache.org/documentation.html#consume
67
67
  [id="plugins-{type}s-{plugin}-options"]
68
68
  ==== Kafka Input Configuration Options
69
69
 
70
- This plugin supports the following configuration options plus the <<plugins-{type}s-common-options>> described later.
70
+ This plugin supports the following configuration options plus the <<plugins-{type}s-{plugin}-common-options>> described later.
71
71
 
72
72
  [cols="<,<,<",options="header",]
73
73
  |=======================================================================
@@ -117,7 +117,7 @@ This plugin supports the following configuration options plus the <<plugins-{typ
117
117
  | <<plugins-{type}s-{plugin}-value_deserializer_class>> |<<string,string>>|No
118
118
  |=======================================================================
119
119
 
120
- Also see <<plugins-{type}s-common-options>> for a list of options supported by all
120
+ Also see <<plugins-{type}s-{plugin}-common-options>> for a list of options supported by all
121
121
  input plugins.
122
122
 
123
123
  &nbsp;
@@ -548,4 +548,5 @@ Java Class used to deserialize the record's value
548
548
 
549
549
 
550
550
 
551
- include::{include_path}/{type}.asciidoc[]
551
+ [id="plugins-{type}s-{plugin}-common-options"]
552
+ include::{include_path}/{type}.asciidoc[]
@@ -210,6 +210,7 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
210
210
  # `partition`: The partition this message is associated with
211
211
  # `offset`: The offset from the partition this message is associated with
212
212
  # `key`: A ByteBuffer containing the message key
213
+ # `timestamp`: The timestamp of this message
213
214
  config :decorate_events, :validate => :boolean, :default => false
214
215
 
215
216
 
@@ -257,6 +258,7 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
257
258
  event.set("[kafka][partition]", record.partition)
258
259
  event.set("[kafka][offset]", record.offset)
259
260
  event.set("[kafka][key]", record.key)
261
+ event.set("[kafka][timestamp]", record.timestamp)
260
262
  end
261
263
  logstash_queue << event
262
264
  end
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-input-kafka'
3
- s.version = '6.3.0'
3
+ s.version = '6.3.2'
4
4
  s.licenses = ['Apache License (2.0)']
5
5
  s.summary = 'This input will read events from a Kafka topic. It uses the high level consumer API provided by Kafka to read messages from the broker'
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -19,8 +19,7 @@ Gem::Specification.new do |s|
19
19
  s.metadata = { 'logstash_plugin' => 'true', 'group' => 'input'}
20
20
 
21
21
  s.requirements << "jar 'org.apache.kafka:kafka-clients', '0.10.2.1'"
22
- s.requirements << "jar 'org.slf4j:slf4j-log4j12', '1.7.21'"
23
- s.requirements << "jar 'org.apache.logging.log4j:log4j-1.2-api', '2.6.2'"
22
+ s.requirements << "jar 'org.apache.logging.log4j:log4j-slf4j-impl', '2.8.2'"
24
23
 
25
24
  s.add_development_dependency 'jar-dependencies', '~> 0.3.2'
26
25
 
@@ -11,14 +11,15 @@ describe "inputs/kafka", :integration => true do
11
11
  let(:group_id_2) {rand(36**8).to_s(36)}
12
12
  let(:group_id_3) {rand(36**8).to_s(36)}
13
13
  let(:group_id_4) {rand(36**8).to_s(36)}
14
+ let(:group_id_5) {rand(36**8).to_s(36)}
14
15
  let(:plain_config) { { 'topics' => ['logstash_topic_plain'], 'codec' => 'plain', 'group_id' => group_id_1, 'auto_offset_reset' => 'earliest'} }
15
16
  let(:multi_consumer_config) { plain_config.merge({"group_id" => group_id_4, "client_id" => "spec", "consumer_threads" => 3}) }
16
17
  let(:snappy_config) { { 'topics' => ['logstash_topic_snappy'], 'codec' => 'plain', 'group_id' => group_id_1, 'auto_offset_reset' => 'earliest'} }
17
18
  let(:lz4_config) { { 'topics' => ['logstash_topic_lz4'], 'codec' => 'plain', 'group_id' => group_id_1, 'auto_offset_reset' => 'earliest'} }
18
19
  let(:pattern_config) { { 'topics_pattern' => 'logstash_topic_.*', 'group_id' => group_id_2, 'codec' => 'plain', 'auto_offset_reset' => 'earliest'} }
19
20
  let(:decorate_config) { { 'topics' => ['logstash_topic_plain'], 'codec' => 'plain', 'group_id' => group_id_3, 'auto_offset_reset' => 'earliest', 'decorate_events' => true} }
20
- let(:manual_commit_config) { { 'topics' => ['logstash_topic_plain'], 'codec' => 'plain', 'group_id' => group_id_4, 'auto_offset_reset' => 'earliest', 'enable_auto_commit' => 'false'} }
21
- let(:timeout_seconds) { 120 }
21
+ let(:manual_commit_config) { { 'topics' => ['logstash_topic_plain'], 'codec' => 'plain', 'group_id' => group_id_5, 'auto_offset_reset' => 'earliest', 'enable_auto_commit' => 'false'} }
22
+ let(:timeout_seconds) { 30 }
22
23
  let(:num_events) { 103 }
23
24
 
24
25
  describe "#kafka-topics" do
@@ -32,40 +33,60 @@ describe "inputs/kafka", :integration => true do
32
33
 
33
34
  it "should consume all messages from plain 3-partition topic" do
34
35
  kafka_input = LogStash::Inputs::Kafka.new(plain_config)
35
- queue = Array.new
36
+ queue = Queue.new
36
37
  t = thread_it(kafka_input, queue)
37
- t.run
38
- wait(timeout_seconds).for { queue.length }.to eq(num_events)
39
- expect(queue.length).to eq(num_events)
38
+ begin
39
+ t.run
40
+ wait(timeout_seconds).for {queue.length}.to eq(num_events)
41
+ expect(queue.length).to eq(num_events)
42
+ ensure
43
+ t.kill
44
+ t.join(30_000)
45
+ end
40
46
  end
41
47
 
42
48
  it "should consume all messages from snappy 3-partition topic" do
43
49
  kafka_input = LogStash::Inputs::Kafka.new(snappy_config)
44
- queue = Array.new
50
+ queue = Queue.new
45
51
  t = thread_it(kafka_input, queue)
46
- t.run
47
- wait(timeout_seconds).for { queue.length }.to eq(num_events)
48
- expect(queue.length).to eq(num_events)
52
+ begin
53
+ t.run
54
+ wait(timeout_seconds).for {queue.length}.to eq(num_events)
55
+ expect(queue.length).to eq(num_events)
56
+ ensure
57
+ t.kill
58
+ t.join(30_000)
59
+ end
49
60
  end
50
61
 
51
62
  it "should consume all messages from lz4 3-partition topic" do
52
63
  kafka_input = LogStash::Inputs::Kafka.new(lz4_config)
53
- queue = Array.new
64
+ queue = Queue.new
54
65
  t = thread_it(kafka_input, queue)
55
- t.run
56
- wait(timeout_seconds).for { queue.length }.to eq(num_events)
57
- expect(queue.length).to eq(num_events)
66
+ begin
67
+ t.run
68
+ wait(timeout_seconds).for {queue.length}.to eq(num_events)
69
+ expect(queue.length).to eq(num_events)
70
+ ensure
71
+ t.kill
72
+ t.join(30_000)
73
+ end
58
74
  end
59
75
 
60
76
  it "should consumer all messages with multiple consumers" do
61
77
  kafka_input = LogStash::Inputs::Kafka.new(multi_consumer_config)
62
- queue = Array.new
78
+ queue = Queue.new
63
79
  t = thread_it(kafka_input, queue)
64
- t.run
65
- wait(timeout_seconds).for { queue.length }.to eq(num_events)
66
- expect(queue.length).to eq(num_events)
67
- kafka_input.kafka_consumers.each_with_index do |consumer, i|
68
- expect(consumer.metrics.keys.first.tags["client-id"]).to eq("spec-#{i}")
80
+ begin
81
+ t.run
82
+ wait(timeout_seconds).for {queue.length}.to eq(num_events)
83
+ expect(queue.length).to eq(num_events)
84
+ kafka_input.kafka_consumers.each_with_index do |consumer, i|
85
+ expect(consumer.metrics.keys.first.tags["client-id"]).to eq("spec-#{i}")
86
+ end
87
+ ensure
88
+ t.kill
89
+ t.join(30_000)
69
90
  end
70
91
  end
71
92
  end
@@ -81,11 +102,16 @@ describe "inputs/kafka", :integration => true do
81
102
 
82
103
  it "should consume all messages from all 3 topics" do
83
104
  kafka_input = LogStash::Inputs::Kafka.new(pattern_config)
84
- queue = Array.new
105
+ queue = Queue.new
85
106
  t = thread_it(kafka_input, queue)
86
- t.run
87
- wait(timeout_seconds).for { queue.length }.to eq(3*num_events)
88
- expect(queue.length).to eq(3*num_events)
107
+ begin
108
+ t.run
109
+ wait(timeout_seconds).for {queue.length}.to eq(3*num_events)
110
+ expect(queue.length).to eq(3*num_events)
111
+ ensure
112
+ t.kill
113
+ t.join(30_000)
114
+ end
89
115
  end
90
116
  end
91
117
 
@@ -99,15 +125,22 @@ describe "inputs/kafka", :integration => true do
99
125
  end
100
126
 
101
127
  it "should show the right topic and group name in decorated kafka section" do
128
+ start = LogStash::Timestamp.now.time.to_i
102
129
  kafka_input = LogStash::Inputs::Kafka.new(decorate_config)
103
130
  queue = Queue.new
104
131
  t = thread_it(kafka_input, queue)
105
- t.run
106
- wait(timeout_seconds).for { queue.length }.to eq(num_events)
107
- expect(queue.length).to eq(num_events)
108
- event = queue.shift
109
- expect(event.get("kafka")["topic"]).to eq("logstash_topic_plain")
110
- expect(event.get("kafka")["consumer_group"]).to eq(group_id_3)
132
+ begin
133
+ t.run
134
+ wait(timeout_seconds).for {queue.length}.to eq(num_events)
135
+ expect(queue.length).to eq(num_events)
136
+ event = queue.shift
137
+ expect(event.get("kafka")["topic"]).to eq("logstash_topic_plain")
138
+ expect(event.get("kafka")["consumer_group"]).to eq(group_id_3)
139
+ expect(event.get("kafka")["timestamp"]).to be >= start
140
+ ensure
141
+ t.kill
142
+ t.join(30_000)
143
+ end
111
144
  end
112
145
  end
113
146
 
@@ -122,11 +155,16 @@ describe "inputs/kafka", :integration => true do
122
155
 
123
156
  it "should manually commit offsets" do
124
157
  kafka_input = LogStash::Inputs::Kafka.new(manual_commit_config)
125
- queue = Array.new
158
+ queue = Queue.new
126
159
  t = thread_it(kafka_input, queue)
127
- t.run
128
- wait(timeout_seconds).for { queue.length }.to eq(num_events)
129
- expect(queue.length).to eq(num_events)
160
+ begin
161
+ t.run
162
+ wait(timeout_seconds).for {queue.length}.to eq(num_events)
163
+ expect(queue.length).to eq(num_events)
164
+ ensure
165
+ t.kill
166
+ t.join(30_000)
167
+ end
130
168
  end
131
169
  end
132
170
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-input-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 6.3.0
4
+ version: 6.3.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elasticsearch
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2017-05-11 00:00:00.000000000 Z
11
+ date: 2017-06-23 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -134,28 +134,22 @@ files:
134
134
  - NOTICE.TXT
135
135
  - README.md
136
136
  - docs/index.asciidoc
137
- - lib/log4j/log4j/1.2.17/log4j-1.2.17.jar
138
137
  - lib/logstash-input-kafka_jars.rb
139
138
  - lib/logstash/inputs/kafka.rb
140
139
  - lib/net/jpountz/lz4/lz4/1.3.0/lz4-1.3.0.jar
141
140
  - lib/org/apache/kafka/kafka-clients/0.10.2.1/kafka-clients-0.10.2.1.jar
142
- - lib/org/apache/logging/log4j/log4j-1.2-api/2.6.2/log4j-1.2-api-2.6.2.jar
143
- - lib/org/apache/logging/log4j/log4j-api/2.6.2/log4j-api-2.6.2.jar
144
- - lib/org/apache/logging/log4j/log4j-core/2.6.2/log4j-core-2.6.2.jar
141
+ - lib/org/apache/logging/log4j/log4j-api/2.8.2/log4j-api-2.8.2.jar
142
+ - lib/org/apache/logging/log4j/log4j-slf4j-impl/2.8.2/log4j-slf4j-impl-2.8.2.jar
145
143
  - lib/org/slf4j/slf4j-api/1.7.21/slf4j-api-1.7.21.jar
146
- - lib/org/slf4j/slf4j-log4j12/1.7.21/slf4j-log4j12-1.7.21.jar
147
144
  - lib/org/xerial/snappy/snappy-java/1.1.2.6/snappy-java-1.1.2.6.jar
148
145
  - logstash-input-kafka.gemspec
149
146
  - spec/integration/inputs/kafka_spec.rb
150
147
  - spec/unit/inputs/kafka_spec.rb
151
148
  - vendor/jar-dependencies/runtime-jars/kafka-clients-0.10.2.1.jar
152
- - vendor/jar-dependencies/runtime-jars/log4j-1.2-api-2.6.2.jar
153
- - vendor/jar-dependencies/runtime-jars/log4j-1.2.17.jar
154
- - vendor/jar-dependencies/runtime-jars/log4j-api-2.6.2.jar
155
- - vendor/jar-dependencies/runtime-jars/log4j-core-2.6.2.jar
149
+ - vendor/jar-dependencies/runtime-jars/log4j-api-2.8.2.jar
150
+ - vendor/jar-dependencies/runtime-jars/log4j-slf4j-impl-2.8.2.jar
156
151
  - vendor/jar-dependencies/runtime-jars/lz4-1.3.0.jar
157
152
  - vendor/jar-dependencies/runtime-jars/slf4j-api-1.7.21.jar
158
- - vendor/jar-dependencies/runtime-jars/slf4j-log4j12-1.7.21.jar
159
153
  - vendor/jar-dependencies/runtime-jars/snappy-java-1.1.2.6.jar
160
154
  homepage: http://www.elastic.co/guide/en/logstash/current/index.html
161
155
  licenses:
@@ -179,8 +173,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
179
173
  version: '0'
180
174
  requirements:
181
175
  - jar 'org.apache.kafka:kafka-clients', '0.10.2.1'
182
- - jar 'org.slf4j:slf4j-log4j12', '1.7.21'
183
- - jar 'org.apache.logging.log4j:log4j-1.2-api', '2.6.2'
176
+ - jar 'org.apache.logging.log4j:log4j-slf4j-impl', '2.8.2'
184
177
  rubyforge_project:
185
178
  rubygems_version: 2.4.8
186
179
  signing_key: