logstash-input-kafka 5.0.1 → 5.0.2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: f496e94c2acee0bbb82357e17efe952416381c4c
4
- data.tar.gz: 19279054c88de9ecc57dedab85c1e584afd9f160
3
+ metadata.gz: 1e36a1e371b9818c8112475405e598724b548818
4
+ data.tar.gz: fb077840636c3ef8f466eafc3c8d6304f2a1bc75
5
5
  SHA512:
6
- metadata.gz: 6b3ae71f305fe402aa9dbafdb815ace949445e90168b310f2b35f90cf68d83d5b4ec69389e61d3950e897b3b8c62f24126cc2995f98b6ada301b50641d495a00
7
- data.tar.gz: 3eb69a86fe5377bc4d348b601db819b76d1ae296ec44fe761a98db2629eae194a34590612f939f88e270d6cf5fc66b227d5ba97cc2c2ad5e129eee6944f863ff
6
+ metadata.gz: b03b3177c9c875c39f0e21a3408058a3f54d160e91190c19bebd8526b837fc0abaf41a575249e1d1f681f631535a6e88fe8896bca62f9dfd80cdce6a46ce1154
7
+ data.tar.gz: 4a4fe642ca9fbfc44885646c4b5374d9d74091c6529a8df175af69b95f1510b92e8cc66065419ea3943bc7f8308e6c125f506a1795ab325a059b423d0e120ff9
@@ -1,3 +1,6 @@
1
+ ## 5.0.2
2
+ - Release a new version of the gem that includes jars
3
+
1
4
  ## 5.0.1
2
5
  - Relax constraint on logstash-core-plugin-api to >= 1.60 <= 2.99
3
6
 
data/README.md CHANGED
@@ -17,9 +17,10 @@ Here's a table that describes the compatibility matrix for Kafka Broker support.
17
17
  | 0.9 | 2.4, 5.0 | 4.0.0 | 4.0.0 | Intermediate release before 0.10 with new get/set API |
18
18
  | 0.10 | 2.4, 5.0 | 5.0.0 | 5.0.0 | Track latest Kafka release. Not compatible with 0.9 broker |
19
19
 
20
-
21
20
  ## Documentation
22
21
 
22
+ https://www.elastic.co/guide/en/logstash/current/plugins-inputs-kafka.html
23
+
23
24
  Logstash provides infrastructure to automatically generate documentation for this plugin. We use the asciidoc format to write documentation so any comments in the source code will be first converted into asciidoc and then into html. All plugin documentation are placed under one [central location](http://www.elastic.co/guide/en/logstash/current/).
24
25
 
25
26
  - For formatting code or config example, you can use the asciidoc `[source,ruby]` directive
@@ -41,6 +42,7 @@ Need help? Try #logstash on freenode IRC or the https://discuss.elastic.co/c/log
41
42
  - Install dependencies
42
43
  ```sh
43
44
  bundle install
45
+ rake install_jars
44
46
  ```
45
47
 
46
48
  #### Test
@@ -49,6 +51,7 @@ bundle install
49
51
 
50
52
  ```sh
51
53
  bundle install
54
+ rake install_jars
52
55
  ```
53
56
 
54
57
  - Run tests
@@ -136,8 +136,11 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
136
136
  config :session_timeout_ms, :validate => :string
137
137
  # Java Class used to deserialize the record's value
138
138
  config :value_deserializer_class, :validate => :string, :default => "org.apache.kafka.common.serialization.StringDeserializer"
139
- # A list of topics to subscribe to.
140
- config :topics, :validate => :array, :required => true
139
+ # A list of topics to subscribe to, defaults to ["logstash"].
140
+ config :topics, :validate => :array, :default => ["logstash"]
141
+ # A topic regex pattern to subscribe to.
142
+ # The topics configuration will be ignored when using this configuration.
143
+ config :topics_pattern, :validate => :string
141
144
  # Time kafka consumer will wait to receive new messages from topics
142
145
  config :poll_timeout_ms, :validate => :number
143
146
  # Enable SSL/TLS secured communication to Kafka broker.
@@ -150,6 +153,14 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
150
153
  config :ssl_keystore_location, :validate => :path
151
154
  # If client authentication is required, this setting stores the keystore password
152
155
  config :ssl_keystore_password, :validate => :password
156
+ # Option to add Kafka metadata like topic, message size to the event.
157
+ # This will add a field named `kafka` to the logstash event containing the following attributes:
158
+ # `topic`: The topic this message is associated with
159
+ # `consumer_group`: The consumer group used to read in this event
160
+ # `partition`: The partition this message is associated with
161
+ # `offset`: The offset from the partition this message is associated with
162
+ # `key`: A ByteBuffer containing the message key
163
+ config :decorate_events, :validate => :boolean, :default => false
153
164
 
154
165
 
155
166
  public
@@ -174,12 +185,25 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
174
185
  def thread_runner(logstash_queue, consumer)
175
186
  Thread.new do
176
187
  begin
177
- consumer.subscribe(topics);
188
+ unless @topics_pattern.nil?
189
+ nooplistener = org.apache.kafka.clients.consumer.internals.NoOpConsumerRebalanceListener.new
190
+ pattern = java.util.regex.Pattern.compile(@topics_pattern)
191
+ consumer.subscribe(pattern, nooplistener)
192
+ else
193
+ consumer.subscribe(topics);
194
+ end
178
195
  while !stop?
179
196
  records = consumer.poll(poll_timeout_ms);
180
197
  for record in records do
181
198
  @codec.decode(record.value.to_s) do |event|
182
199
  decorate(event)
200
+ if @decorate_events
201
+ event.set("[kafka][topic]", record.topic)
202
+ event.set("[kafka][consumer_group]", @group_id)
203
+ event.set("[kafka][partition]", record.partition)
204
+ event.set("[kafka][offset]", record.offset)
205
+ event.set("[kafka][key]", record.key)
206
+ end
183
207
  logstash_queue << event
184
208
  end
185
209
  end
@@ -1,7 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
-
3
2
  s.name = 'logstash-input-kafka'
4
- s.version = '5.0.1'
3
+ s.version = '5.0.2'
5
4
  s.licenses = ['Apache License (2.0)']
6
5
  s.summary = 'This input will read events from a Kafka topic. It uses the high level consumer API provided by Kafka to read messages from the broker'
7
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -4,47 +4,97 @@ require "logstash/inputs/kafka"
4
4
  require "digest"
5
5
  require "rspec/wait"
6
6
 
7
- describe "input/kafka", :integration => true do
8
- let(:partition3_config) { { 'topics' => ['topic3'], 'codec' => 'plain', 'auto_offset_reset' => 'earliest'} }
9
- let(:snappy_config) { { 'topics' => ['snappy_topic'], 'codec' => 'plain', 'auto_offset_reset' => 'earliest'} }
10
- let(:lz4_config) { { 'topics' => ['lz4_topic'], 'codec' => 'plain', 'auto_offset_reset' => 'earliest'} }
11
-
12
- let(:timeout_seconds) { 3600 }
7
+ # Please run kafka_test_setup.sh prior to executing this integration test.
8
+ describe "inputs/kafka", :integration => true do
9
+ # Group ids to make sure that the consumers get all the logs.
10
+ let(:group_id_1) {rand(36**8).to_s(36)}
11
+ let(:group_id_2) {rand(36**8).to_s(36)}
12
+ let(:group_id_3) {rand(36**8).to_s(36)}
13
+ let(:plain_config) { { 'topics' => ['logstash_topic_plain'], 'codec' => 'plain', 'group_id' => group_id_1, 'auto_offset_reset' => 'earliest'} }
14
+ let(:snappy_config) { { 'topics' => ['logstash_topic_snappy'], 'codec' => 'plain', 'group_id' => group_id_1, 'auto_offset_reset' => 'earliest'} }
15
+ let(:lz4_config) { { 'topics' => ['logstash_topic_lz4'], 'codec' => 'plain', 'group_id' => group_id_1, 'auto_offset_reset' => 'earliest'} }
16
+ let(:pattern_config) { { 'topics_pattern' => 'logstash_topic_.*', 'group_id' => group_id_2, 'codec' => 'plain', 'auto_offset_reset' => 'earliest'} }
17
+ let(:decorate_config) { { 'topics' => ['logstash_topic_plain'], 'codec' => 'plain', 'group_id' => group_id_3, 'auto_offset_reset' => 'earliest', 'decorate_events' => true} }
18
+ let(:timeout_seconds) { 120 }
13
19
  let(:num_events) { 103 }
14
-
15
- def thread_it(kafka_input, queue)
16
- Thread.new do
17
- begin
18
- kafka_input.run(queue)
20
+
21
+ describe "#kafka-topics" do
22
+ def thread_it(kafka_input, queue)
23
+ Thread.new do
24
+ begin
25
+ kafka_input.run(queue)
26
+ end
19
27
  end
20
28
  end
21
- end
29
+
30
+ it "should consume all messages from plain 3-partition topic" do
31
+ kafka_input = LogStash::Inputs::Kafka.new(plain_config)
32
+ queue = Array.new
33
+ t = thread_it(kafka_input, queue)
34
+ t.run
35
+ wait(timeout_seconds).for { queue.length }.to eq(num_events)
36
+ expect(queue.length).to eq(num_events)
37
+ end
38
+
39
+ it "should consume all messages from snappy 3-partition topic" do
40
+ kafka_input = LogStash::Inputs::Kafka.new(snappy_config)
41
+ queue = Array.new
42
+ t = thread_it(kafka_input, queue)
43
+ t.run
44
+ wait(timeout_seconds).for { queue.length }.to eq(num_events)
45
+ expect(queue.length).to eq(num_events)
46
+ end
47
+
48
+ it "should consume all messages from lz4 3-partition topic" do
49
+ kafka_input = LogStash::Inputs::Kafka.new(lz4_config)
50
+ queue = Array.new
51
+ t = thread_it(kafka_input, queue)
52
+ t.run
53
+ wait(timeout_seconds).for { queue.length }.to eq(num_events)
54
+ expect(queue.length).to eq(num_events)
55
+ end
22
56
 
23
- it "should consume all messages from 3-partition topic" do
24
- kafka_input = LogStash::Inputs::Kafka.new(partition3_config)
25
- queue = Array.new
26
- t = thread_it(kafka_input, queue)
27
- t.run
28
- wait(timeout_seconds).for { queue.length }.to eq(num_events)
29
- expect(queue.length).to eq(num_events)
30
57
  end
31
-
32
- it "should consume all messages from snappy 3-partition topic" do
33
- kafka_input = LogStash::Inputs::Kafka.new(snappy_config)
34
- queue = Array.new
35
- t = thread_it(kafka_input, queue)
36
- t.run
37
- wait(timeout_seconds).for { queue.length }.to eq(num_events)
38
- expect(queue.length).to eq(num_events)
58
+
59
+ describe "#kafka-topics-pattern" do
60
+
61
+ def thread_it(kafka_input, queue)
62
+ Thread.new do
63
+ begin
64
+ kafka_input.run(queue)
65
+ end
66
+ end
67
+ end
68
+
69
+ it "should consume all messages from all 3 topics" do
70
+ kafka_input = LogStash::Inputs::Kafka.new(pattern_config)
71
+ queue = Array.new
72
+ t = thread_it(kafka_input, queue)
73
+ t.run
74
+ wait(timeout_seconds).for { queue.length }.to eq(3*num_events)
75
+ expect(queue.length).to eq(3*num_events)
76
+ end
39
77
  end
40
78
 
41
- it "should consume all messages from lz4 3-partition topic" do
42
- kafka_input = LogStash::Inputs::Kafka.new(lz4_config)
43
- queue = Array.new
44
- t = thread_it(kafka_input, queue)
45
- t.run
46
- wait(timeout_seconds).for { queue.length }.to eq(num_events)
47
- expect(queue.length).to eq(num_events)
79
+ describe "#kafka-decorate" do
80
+ def thread_it(kafka_input, queue)
81
+ Thread.new do
82
+ begin
83
+ kafka_input.run(queue)
84
+ end
85
+ end
86
+ end
87
+
88
+ it "should show the right topic and group name in decorated kafka section" do
89
+ kafka_input = LogStash::Inputs::Kafka.new(decorate_config)
90
+ queue = Queue.new
91
+ t = thread_it(kafka_input, queue)
92
+ t.run
93
+ wait(timeout_seconds).for { queue.length }.to eq(num_events)
94
+ expect(queue.length).to eq(num_events)
95
+ event = queue.shift
96
+ expect(event.get("kafka")["topic"]).to eq("logstash_topic_plain")
97
+ expect(event.get("kafka")["consumer_group"]).to eq(group_id_3)
98
+ end
48
99
  end
49
-
50
100
  end
@@ -16,7 +16,7 @@ class MockConsumer
16
16
  raise org.apache.kafka.common.errors.WakeupException.new
17
17
  else
18
18
  10.times.map do
19
- org.apache.kafka.clients.consumer.ConsumerRecord.new("test", 0, 0, "key", "value")
19
+ org.apache.kafka.clients.consumer.ConsumerRecord.new("logstash", 0, 0, "key", "value")
20
20
  end
21
21
  end
22
22
  end
@@ -30,7 +30,7 @@ class MockConsumer
30
30
  end
31
31
 
32
32
  describe LogStash::Inputs::Kafka do
33
- let(:config) { { 'topics' => ['test'], 'consumer_threads' => 4 } }
33
+ let(:config) { { 'topics' => ['logstash'], 'consumer_threads' => 4 } }
34
34
  subject { LogStash::Inputs::Kafka.new(config) }
35
35
 
36
36
  it "should register" do
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-input-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 5.0.1
4
+ version: 5.0.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elasticsearch
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2016-07-14 00:00:00.000000000 Z
11
+ date: 2016-08-10 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -133,11 +133,23 @@ files:
133
133
  - LICENSE
134
134
  - NOTICE.TXT
135
135
  - README.md
136
+ - lib/log4j/log4j/1.2.17/log4j-1.2.17.jar
136
137
  - lib/logstash-input-kafka_jars.rb
137
138
  - lib/logstash/inputs/kafka.rb
139
+ - lib/net/jpountz/lz4/lz4/1.3.0/lz4-1.3.0.jar
140
+ - lib/org/apache/kafka/kafka-clients/0.10.0.0/kafka-clients-0.10.0.0.jar
141
+ - lib/org/slf4j/slf4j-api/1.7.21/slf4j-api-1.7.21.jar
142
+ - lib/org/slf4j/slf4j-log4j12/1.7.21/slf4j-log4j12-1.7.21.jar
143
+ - lib/org/xerial/snappy/snappy-java/1.1.2.4/snappy-java-1.1.2.4.jar
138
144
  - logstash-input-kafka.gemspec
139
145
  - spec/integration/inputs/kafka_spec.rb
140
146
  - spec/unit/inputs/kafka_spec.rb
147
+ - vendor/jar-dependencies/runtime-jars/kafka-clients-0.10.0.0.jar
148
+ - vendor/jar-dependencies/runtime-jars/log4j-1.2.17.jar
149
+ - vendor/jar-dependencies/runtime-jars/lz4-1.3.0.jar
150
+ - vendor/jar-dependencies/runtime-jars/slf4j-api-1.7.21.jar
151
+ - vendor/jar-dependencies/runtime-jars/slf4j-log4j12-1.7.21.jar
152
+ - vendor/jar-dependencies/runtime-jars/snappy-java-1.1.2.4.jar
141
153
  homepage: http://www.elastic.co/guide/en/logstash/current/index.html
142
154
  licenses:
143
155
  - Apache License (2.0)
@@ -162,7 +174,7 @@ requirements:
162
174
  - jar 'org.apache.kafka:kafka-clients', '0.10.0.0'
163
175
  - jar 'org.slf4j:slf4j-log4j12', '1.7.21'
164
176
  rubyforge_project:
165
- rubygems_version: 2.6.3
177
+ rubygems_version: 2.4.8
166
178
  signing_key:
167
179
  specification_version: 4
168
180
  summary: This input will read events from a Kafka topic. It uses the high level consumer API provided by Kafka to read messages from the broker