logstash-input-kafka 3.0.3 → 3.1.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 15340382c64806f572f5bd3deb5ef912d4cc0f96
4
- data.tar.gz: e573c0a1b2168d2e4fe0346e9a7269d48e8bcaa8
3
+ metadata.gz: a987ea01376b0e65e4fd7e8cadcccf2b173f5f84
4
+ data.tar.gz: 8ddfd1408df8c2a3750bba4c00af7774a69357b1
5
5
  SHA512:
6
- metadata.gz: 2fd86eae77621259aebc77480d4e092fb3ca79e5fa573e4916bbb2492dca53cbd2966c5d82e65904677c3eea4cd98c7619a8c86cb42d1993eeea466d136f2c7f
7
- data.tar.gz: 4c55185651161fc07d6737c15e3ffe127f2ffb4437bba9d73c49bfb9362b39548a94575e7f8d85e6c308c20ad653cda4c98654f9b4ed577dfe2e559449dbd2ef
6
+ metadata.gz: abe122982f283fdc82c640171be896646f3b321ddfb7ff96a3c562a56788a049c1868b9e1e4432d895ea2680458da8ab47ac4b5d32b7d3bfc7204c530a165d2f
7
+ data.tar.gz: 48e2f000909d011655386798ddad46200ede382763a3eec13a716bd29264ffc0f488bd18588922bcda739c09f9e1080b0fbf83c8bc6cab82e695d55dce2f5e01
@@ -1,3 +1,9 @@
1
+ ## 3.1.0
2
+ - bump to client version 0.9.0.1
3
+ - add back :decorate_events option
4
+ - update :topics to default to ["logstash"]
5
+ - add :topics_pattern configuration option
6
+
1
7
  ## 3.0.3
2
8
  - Revert back to not configuring log4j here in this plugin. This setup method used
3
9
  will hardcode log4j setting which means you cannot configure it. Its better to add
@@ -114,8 +114,11 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
114
114
  config :session_timeout_ms, :validate => :string, :default => "30000"
115
115
  # Java Class used to deserialize the record's value
116
116
  config :value_deserializer_class, :validate => :string, :default => "org.apache.kafka.common.serialization.StringDeserializer"
117
- # A list of topics to subscribe to.
118
- config :topics, :validate => :array, :required => true
117
+ # A list of topics to subscribe to, defaults to ["logstash"].
118
+ config :topics, :validate => :array, :default => ["logstash"]
119
+ # A topic regex pattern to subscribe to.
120
+ # The topics configuration will be ignored when using this configuration.
121
+ config :topics_pattern, :validate => :string
119
122
  # Time kafka consumer will wait to receive new messages from topics
120
123
  config :poll_timeout_ms, :validate => :number, :default => 100
121
124
  # Enable SSL/TLS secured communication to Kafka broker. Note that secure communication
@@ -129,6 +132,14 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
129
132
  config :ssl_keystore_location, :validate => :path
130
133
  # If client authentication is required, this setting stores the keystore password
131
134
  config :ssl_keystore_password, :validate => :password
135
+ # Option to add Kafka metadata like topic, message size to the event.
136
+ # This will add a field named `kafka` to the logstash event containing the following attributes:
137
+ # `topic`: The topic this message is associated with
138
+ # `consumer_group`: The consumer group used to read in this event
139
+ # `partition`: The partition this message is associated with
140
+ # `offset`: The offset from the partition this message is associated with
141
+ # `key`: A ByteBuffer containing the message key
142
+ config :decorate_events, :validate => :boolean, :default => false
132
143
 
133
144
 
134
145
  public
@@ -152,12 +163,25 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
152
163
  def thread_runner(logstash_queue, consumer)
153
164
  Thread.new do
154
165
  begin
155
- consumer.subscribe(topics);
166
+ unless @topics_pattern.nil?
167
+ nooplistener = org.apache.kafka.clients.consumer.internals.NoOpConsumerRebalanceListener.new
168
+ pattern = java.util.regex.Pattern.compile(@topics_pattern)
169
+ consumer.subscribe(pattern, nooplistener)
170
+ else
171
+ consumer.subscribe(topics);
172
+ end
156
173
  while !stop?
157
174
  records = consumer.poll(poll_timeout_ms);
158
175
  for record in records do
159
176
  @codec.decode(record.value.to_s) do |event|
160
177
  decorate(event)
178
+ if @decorate_events
179
+ event.set("[kafka][topic]", record.topic)
180
+ event.set("[kafka][consumer_group]", @group_id)
181
+ event.set("[kafka][partition]", record.partition)
182
+ event.set("[kafka][offset]", record.offset)
183
+ event.set("[kafka][key]", record.key)
184
+ end
161
185
  logstash_queue << event
162
186
  end
163
187
  end
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-input-kafka'
4
- s.version = '3.0.3'
4
+ s.version = '3.1.0'
5
5
  s.licenses = ['Apache License (2.0)']
6
6
  s.summary = 'This input will read events from a Kafka topic. It uses the high level consumer API provided by Kafka to read messages from the broker'
7
7
  s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
@@ -3,58 +3,97 @@ require "logstash/devutils/rspec/spec_helper"
3
3
  require "logstash/inputs/kafka"
4
4
  require "digest"
5
5
 
6
- describe "input/kafka", :integration => true do
7
- let(:partition3_config) { { 'topics' => ['topic3'], 'codec' => 'plain', 'auto_offset_reset' => 'earliest'} }
8
- let(:snappy_config) { { 'topics' => ['snappy_topic'], 'codec' => 'plain', 'auto_offset_reset' => 'earliest'} }
9
- let(:lz4_config) { { 'topics' => ['lz4_topic'], 'codec' => 'plain', 'auto_offset_reset' => 'earliest'} }
10
-
11
- let(:tries) { 60 }
6
+ # Please run kafka_test_setup.sh prior to executing this integration test.
7
+ describe "inputs/kafka", :integration => true do
8
+ # Group ids to make sure that the consumers get all the logs.
9
+ let(:group_id_1) {rand(36**8).to_s(36)}
10
+ let(:group_id_2) {rand(36**8).to_s(36)}
11
+ let(:group_id_3) {rand(36**8).to_s(36)}
12
+ let(:plain_config) { { 'topics' => ['logstash_topic_plain'], 'codec' => 'plain', 'group_id' => group_id_1, 'auto_offset_reset' => 'earliest'} }
13
+ let(:snappy_config) { { 'topics' => ['logstash_topic_snappy'], 'codec' => 'plain', 'group_id' => group_id_1, 'auto_offset_reset' => 'earliest'} }
14
+ let(:lz4_config) { { 'topics' => ['logstash_topic_lz4'], 'codec' => 'plain', 'group_id' => group_id_1, 'auto_offset_reset' => 'earliest'} }
15
+ let(:pattern_config) { { 'topics_pattern' => 'logstash_topic_.*', 'group_id' => group_id_2, 'codec' => 'plain', 'auto_offset_reset' => 'earliest'} }
16
+ let(:decorate_config) { { 'topics' => ['logstash_topic_plain'], 'codec' => 'plain', 'group_id' => group_id_3, 'auto_offset_reset' => 'earliest', 'decorate_events' => true} }
17
+ let(:timeout_seconds) { 120 }
12
18
  let(:num_events) { 103 }
13
-
14
- def thread_it(kafka_input, queue)
15
- Thread.new do
16
- begin
17
- kafka_input.run(queue)
19
+
20
+ describe "#kafka-topics" do
21
+ def thread_it(kafka_input, queue)
22
+ Thread.new do
23
+ begin
24
+ kafka_input.run(queue)
25
+ end
18
26
  end
19
27
  end
28
+
29
+ it "should consume all messages from plain 3-partition topic" do
30
+ kafka_input = LogStash::Inputs::Kafka.new(plain_config)
31
+ queue = Array.new
32
+ t = thread_it(kafka_input, queue)
33
+ t.run
34
+ wait(timeout_seconds).for { queue.length }.to eq(num_events)
35
+ expect(queue.length).to eq(num_events)
36
+ end
37
+
38
+ it "should consume all messages from snappy 3-partition topic" do
39
+ kafka_input = LogStash::Inputs::Kafka.new(snappy_config)
40
+ queue = Array.new
41
+ t = thread_it(kafka_input, queue)
42
+ t.run
43
+ wait(timeout_seconds).for { queue.length }.to eq(num_events)
44
+ expect(queue.length).to eq(num_events)
45
+ end
46
+
47
+ it "should consume all messages from lz4 3-partition topic" do
48
+ kafka_input = LogStash::Inputs::Kafka.new(lz4_config)
49
+ queue = Array.new
50
+ t = thread_it(kafka_input, queue)
51
+ t.run
52
+ wait(timeout_seconds).for { queue.length }.to eq(num_events)
53
+ expect(queue.length).to eq(num_events)
54
+ end
55
+
20
56
  end
21
-
22
- def wait_for_events(queue, num_events)
23
- begin
24
- timeout(30) do
25
- until queue.length == num_events do
26
- sleep 1
27
- next
57
+
58
+ describe "#kafka-topics-pattern" do
59
+
60
+ def thread_it(kafka_input, queue)
61
+ Thread.new do
62
+ begin
63
+ kafka_input.run(queue)
28
64
  end
29
65
  end
30
66
  end
31
- end
32
-
33
- it "should consume all messages from 3-partition topic" do
34
- kafka_input = LogStash::Inputs::Kafka.new(partition3_config)
35
- queue = Array.new
36
- t = thread_it(kafka_input, queue)
37
- t.run
38
- wait_for_events(queue, num_events)
39
- expect(queue.size).to eq(num_events)
40
- end
41
-
42
- it "should consume all messages from snappy 3-partition topic" do
43
- kafka_input = LogStash::Inputs::Kafka.new(snappy_config)
44
- queue = Array.new
45
- t = thread_it(kafka_input, queue)
46
- t.run
47
- wait_for_events(queue, num_events)
48
- expect(queue.size).to eq(num_events)
67
+
68
+ it "should consume all messages from all 3 topics" do
69
+ kafka_input = LogStash::Inputs::Kafka.new(pattern_config)
70
+ queue = Array.new
71
+ t = thread_it(kafka_input, queue)
72
+ t.run
73
+ wait(timeout_seconds).for { queue.length }.to eq(3*num_events)
74
+ expect(queue.length).to eq(3*num_events)
75
+ end
49
76
  end
50
77
 
51
- it "should consume all messages from lz4 3-partition topic" do
52
- kafka_input = LogStash::Inputs::Kafka.new(lz4_config)
53
- queue = Array.new
54
- t = thread_it(kafka_input, queue)
55
- t.run
56
- wait_for_events(queue, num_events)
57
- expect(queue.size).to eq(num_events)
78
+ describe "#kafka-decorate" do
79
+ def thread_it(kafka_input, queue)
80
+ Thread.new do
81
+ begin
82
+ kafka_input.run(queue)
83
+ end
84
+ end
85
+ end
86
+
87
+ it "should show the right topic and group name in decorated kafka section" do
88
+ kafka_input = LogStash::Inputs::Kafka.new(decorate_config)
89
+ queue = Queue.new
90
+ t = thread_it(kafka_input, queue)
91
+ t.run
92
+ wait(timeout_seconds).for { queue.length }.to eq(num_events)
93
+ expect(queue.length).to eq(num_events)
94
+ event = queue.shift
95
+ expect(event.get("kafka")["topic"]).to eq("logstash_topic_plain")
96
+ expect(event.get("kafka")["consumer_group"]).to eq(group_id_3)
97
+ end
58
98
  end
59
-
60
99
  end
@@ -16,7 +16,7 @@ class MockConsumer
16
16
  raise org.apache.kafka.common.errors.WakeupException.new
17
17
  else
18
18
  10.times.map do
19
- org.apache.kafka.clients.consumer.ConsumerRecord.new("test", 0, 0, "key", "value")
19
+ org.apache.kafka.clients.consumer.ConsumerRecord.new("logstash", 0, 0, "key", "value")
20
20
  end
21
21
  end
22
22
  end
@@ -30,7 +30,7 @@ class MockConsumer
30
30
  end
31
31
 
32
32
  describe LogStash::Inputs::Kafka do
33
- let(:config) { { 'topics' => ['test'], 'consumer_threads' => 4 } }
33
+ let(:config) { { 'topics' => ['logstash'], 'consumer_threads' => 4 } }
34
34
  subject { LogStash::Inputs::Kafka.new(config) }
35
35
 
36
36
  it "should register" do
metadata CHANGED
@@ -1,19 +1,19 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-input-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 3.0.3
4
+ version: 3.1.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elasticsearch
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2016-06-03 00:00:00.000000000 Z
11
+ date: 2016-11-16 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
15
15
  requirements:
16
- - - ~>
16
+ - - "~>"
17
17
  - !ruby/object:Gem::Version
18
18
  version: 0.3.2
19
19
  name: jar-dependencies
@@ -21,13 +21,13 @@ dependencies:
21
21
  type: :development
22
22
  version_requirements: !ruby/object:Gem::Requirement
23
23
  requirements:
24
- - - ~>
24
+ - - "~>"
25
25
  - !ruby/object:Gem::Version
26
26
  version: 0.3.2
27
27
  - !ruby/object:Gem::Dependency
28
28
  requirement: !ruby/object:Gem::Requirement
29
29
  requirements:
30
- - - ~>
30
+ - - "~>"
31
31
  - !ruby/object:Gem::Version
32
32
  version: '1.0'
33
33
  name: logstash-core-plugin-api
@@ -35,13 +35,13 @@ dependencies:
35
35
  type: :runtime
36
36
  version_requirements: !ruby/object:Gem::Requirement
37
37
  requirements:
38
- - - ~>
38
+ - - "~>"
39
39
  - !ruby/object:Gem::Version
40
40
  version: '1.0'
41
41
  - !ruby/object:Gem::Dependency
42
42
  requirement: !ruby/object:Gem::Requirement
43
43
  requirements:
44
- - - '>='
44
+ - - ">="
45
45
  - !ruby/object:Gem::Version
46
46
  version: '0'
47
47
  name: logstash-codec-json
@@ -49,13 +49,13 @@ dependencies:
49
49
  type: :runtime
50
50
  version_requirements: !ruby/object:Gem::Requirement
51
51
  requirements:
52
- - - '>='
52
+ - - ">="
53
53
  - !ruby/object:Gem::Version
54
54
  version: '0'
55
55
  - !ruby/object:Gem::Dependency
56
56
  requirement: !ruby/object:Gem::Requirement
57
57
  requirements:
58
- - - '>='
58
+ - - ">="
59
59
  - !ruby/object:Gem::Version
60
60
  version: '0'
61
61
  name: logstash-codec-plain
@@ -63,16 +63,16 @@ dependencies:
63
63
  type: :runtime
64
64
  version_requirements: !ruby/object:Gem::Requirement
65
65
  requirements:
66
- - - '>='
66
+ - - ">="
67
67
  - !ruby/object:Gem::Version
68
68
  version: '0'
69
69
  - !ruby/object:Gem::Dependency
70
70
  requirement: !ruby/object:Gem::Requirement
71
71
  requirements:
72
- - - '>='
72
+ - - ">="
73
73
  - !ruby/object:Gem::Version
74
74
  version: 0.0.22
75
- - - <
75
+ - - "<"
76
76
  - !ruby/object:Gem::Version
77
77
  version: 0.1.0
78
78
  name: stud
@@ -80,16 +80,16 @@ dependencies:
80
80
  type: :runtime
81
81
  version_requirements: !ruby/object:Gem::Requirement
82
82
  requirements:
83
- - - '>='
83
+ - - ">="
84
84
  - !ruby/object:Gem::Version
85
85
  version: 0.0.22
86
- - - <
86
+ - - "<"
87
87
  - !ruby/object:Gem::Version
88
88
  version: 0.1.0
89
89
  - !ruby/object:Gem::Dependency
90
90
  requirement: !ruby/object:Gem::Requirement
91
91
  requirements:
92
- - - '>='
92
+ - - ">="
93
93
  - !ruby/object:Gem::Version
94
94
  version: '0'
95
95
  name: logstash-devutils
@@ -97,7 +97,7 @@ dependencies:
97
97
  type: :development
98
98
  version_requirements: !ruby/object:Gem::Requirement
99
99
  requirements:
100
- - - '>='
100
+ - - ">="
101
101
  - !ruby/object:Gem::Version
102
102
  version: '0'
103
103
  description: This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program
@@ -118,12 +118,6 @@ files:
118
118
  - logstash-input-kafka.gemspec
119
119
  - spec/integration/inputs/kafka_spec.rb
120
120
  - spec/unit/inputs/kafka_spec.rb
121
- - vendor/jar-dependencies/runtime-jars/kafka-clients-0.9.0.1.jar
122
- - vendor/jar-dependencies/runtime-jars/log4j-1.2.17.jar
123
- - vendor/jar-dependencies/runtime-jars/lz4-1.2.0.jar
124
- - vendor/jar-dependencies/runtime-jars/slf4j-api-1.7.6.jar
125
- - vendor/jar-dependencies/runtime-jars/slf4j-log4j12-1.7.13.jar
126
- - vendor/jar-dependencies/runtime-jars/snappy-java-1.1.1.7.jar
127
121
  homepage: http://www.elastic.co/guide/en/logstash/current/index.html
128
122
  licenses:
129
123
  - Apache License (2.0)
@@ -136,19 +130,19 @@ require_paths:
136
130
  - lib
137
131
  required_ruby_version: !ruby/object:Gem::Requirement
138
132
  requirements:
139
- - - '>='
133
+ - - ">="
140
134
  - !ruby/object:Gem::Version
141
135
  version: '0'
142
136
  required_rubygems_version: !ruby/object:Gem::Requirement
143
137
  requirements:
144
- - - '>='
138
+ - - ">="
145
139
  - !ruby/object:Gem::Version
146
140
  version: '0'
147
141
  requirements:
148
142
  - jar 'org.apache.kafka:kafka-clients', '0.9.0.1'
149
143
  - jar 'org.slf4j:slf4j-log4j12', '1.7.13'
150
144
  rubyforge_project:
151
- rubygems_version: 2.4.5
145
+ rubygems_version: 2.4.8
152
146
  signing_key:
153
147
  specification_version: 4
154
148
  summary: This input will read events from a Kafka topic. It uses the high level consumer API provided by Kafka to read messages from the broker