logstash-input-kafka 4.1.1 → 4.2.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: a42988d8cbbaab2c7c76ab5dc51e73fafc8007d5
4
- data.tar.gz: 531ac5d2c767ce9ad6df4edc08cffa69fcefc6b2
3
+ metadata.gz: 392af7e1646f486c6f81b5ed7dd2206335be8287
4
+ data.tar.gz: 75f322acc9c088a79a6e87c51349237d6bff91ed
5
5
  SHA512:
6
- metadata.gz: 2a73db8d884a53a6958d664282948a0e2ff8c330b5b62915738c469e94f55c7d77540e3b2a1f49aabb39b3cc3ccc036503b9ea0d888c111739db30021158eac7
7
- data.tar.gz: 524190a231cd0fa95356b80f38eed6d3bef1229c9bfdd8b7118e12ac753a7044338fbc8abe903671aefe2514fd8f8e55ecae200ed30893370f5e1d144d35545f
6
+ metadata.gz: 2541883711e5a2fdb1acf216fdf1f25a2af1ed266c84293a869805f3e7e418c2d6a162e5f57cece82dbadf63256d8edf8271be4b6e489e93381f7f8ca8ad3a93
7
+ data.tar.gz: 6b2fc56fbafe56be7c8a891f44b1c83dea8759e49856b8fc94da6e1378edcf69a7bd7e1b1fddbfaf2e8a333674d97a165fe9b151efd08b1168081cc35aef1be0
data/CHANGELOG.md CHANGED
@@ -1,3 +1,7 @@
1
+ ## 4.2.0
2
+ - Add topic pattern config which allows consuming from multiple topics
3
+ - Restored event decorators which adds metadata.
4
+
1
5
  ## 4.1.1
2
6
  - fix: vendor aliasing issue when publishing
3
7
 
@@ -114,8 +114,11 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
114
114
  config :session_timeout_ms, :validate => :string, :default => "30000"
115
115
  # Java Class used to deserialize the record's value
116
116
  config :value_deserializer_class, :validate => :string, :default => "org.apache.kafka.common.serialization.StringDeserializer"
117
- # A list of topics to subscribe to.
118
- config :topics, :validate => :array, :required => true
117
+ # A list of topics to subscribe to, defaults to ["logstash"].
118
+ config :topics, :validate => :array, :default => ["logstash"]
119
+ # A topic regex pattern to subscribe to.
120
+ # The topics configuration will be ignored when using this configuration.
121
+ config :topics_pattern, :validate => :string
119
122
  # Time kafka consumer will wait to receive new messages from topics
120
123
  config :poll_timeout_ms, :validate => :number, :default => 100
121
124
  # Enable SSL/TLS secured communication to Kafka broker. Note that secure communication
@@ -129,6 +132,14 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
129
132
  config :ssl_keystore_location, :validate => :path
130
133
  # If client authentication is required, this setting stores the keystore password
131
134
  config :ssl_keystore_password, :validate => :password
135
+ # Option to add Kafka metadata like topic, message size to the event.
136
+ # This will add a field named `kafka` to the logstash event containing the following attributes:
137
+ # `topic`: The topic this message is associated with
138
+ # `consumer_group`: The consumer group used to read in this event
139
+ # `partition`: The partition this message is associated with
140
+ # `offset`: The offset from the partition this message is associated with
141
+ # `key`: A ByteBuffer containing the message key
142
+ config :decorate_events, :validate => :boolean, :default => false
132
143
 
133
144
 
134
145
  public
@@ -156,12 +167,25 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
156
167
  def thread_runner(logstash_queue, consumer)
157
168
  Thread.new do
158
169
  begin
159
- consumer.subscribe(topics);
170
+ unless @topics_pattern.nil?
171
+ nooplistener = org.apache.kafka.clients.consumer.internals.NoOpConsumerRebalanceListener.new
172
+ pattern = java.util.regex.Pattern.compile(@topics_pattern)
173
+ consumer.subscribe(pattern, nooplistener)
174
+ else
175
+ consumer.subscribe(topics);
176
+ end
160
177
  while !stop?
161
178
  records = consumer.poll(poll_timeout_ms);
162
179
  for record in records do
163
180
  @codec.decode(record.value.to_s) do |event|
164
181
  decorate(event)
182
+ if @decorate_events
183
+ event.set("[kafka][topic]", record.topic)
184
+ event.set("[kafka][consumer_group]", @group_id)
185
+ event.set("[kafka][partition]", record.partition)
186
+ event.set("[kafka][offset]", record.offset)
187
+ event.set("[kafka][key]", record.key)
188
+ end
165
189
  logstash_queue << event
166
190
  end
167
191
  end
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-input-kafka'
4
- s.version = '4.1.1'
4
+ s.version = '4.2.0'
5
5
  s.licenses = ['Apache License (2.0)']
6
6
  s.summary = 'This input will read events from a Kafka topic. It uses the high level consumer API provided by Kafka to read messages from the broker'
7
7
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -2,59 +2,99 @@
2
2
  require "logstash/devutils/rspec/spec_helper"
3
3
  require "logstash/inputs/kafka"
4
4
  require "digest"
5
+ require "rspec/wait"
5
6
 
6
- describe "input/kafka", :integration => true do
7
- let(:partition3_config) { { 'topics' => ['topic3'], 'codec' => 'plain', 'auto_offset_reset' => 'earliest'} }
8
- let(:snappy_config) { { 'topics' => ['snappy_topic'], 'codec' => 'plain', 'auto_offset_reset' => 'earliest'} }
9
- let(:lz4_config) { { 'topics' => ['lz4_topic'], 'codec' => 'plain', 'auto_offset_reset' => 'earliest'} }
10
-
11
- let(:tries) { 60 }
7
+ # Please run kafka_test_setup.sh prior to executing this integration test.
8
+ describe "inputs/kafka", :integration => true do
9
+ # Group ids to make sure that the consumers get all the logs.
10
+ let(:group_id_1) {rand(36**8).to_s(36)}
11
+ let(:group_id_2) {rand(36**8).to_s(36)}
12
+ let(:group_id_3) {rand(36**8).to_s(36)}
13
+ let(:plain_config) { { 'topics' => ['logstash_topic_plain'], 'codec' => 'plain', 'group_id' => group_id_1, 'auto_offset_reset' => 'earliest'} }
14
+ let(:snappy_config) { { 'topics' => ['logstash_topic_snappy'], 'codec' => 'plain', 'group_id' => group_id_1, 'auto_offset_reset' => 'earliest'} }
15
+ let(:lz4_config) { { 'topics' => ['logstash_topic_lz4'], 'codec' => 'plain', 'group_id' => group_id_1, 'auto_offset_reset' => 'earliest'} }
16
+ let(:pattern_config) { { 'topics_pattern' => 'logstash_topic_.*', 'group_id' => group_id_2, 'codec' => 'plain', 'auto_offset_reset' => 'earliest'} }
17
+ let(:decorate_config) { { 'topics' => ['logstash_topic_plain'], 'codec' => 'plain', 'group_id' => group_id_3, 'auto_offset_reset' => 'earliest', 'decorate_events' => true} }
18
+ let(:timeout_seconds) { 120 }
12
19
  let(:num_events) { 103 }
13
-
14
- def thread_it(kafka_input, queue)
15
- Thread.new do
16
- begin
17
- kafka_input.run(queue)
20
+
21
+ describe "#kafka-topics" do
22
+ def thread_it(kafka_input, queue)
23
+ Thread.new do
24
+ begin
25
+ kafka_input.run(queue)
26
+ end
18
27
  end
19
28
  end
29
+
30
+ it "should consume all messages from plain 3-partition topic" do
31
+ kafka_input = LogStash::Inputs::Kafka.new(plain_config)
32
+ queue = Array.new
33
+ t = thread_it(kafka_input, queue)
34
+ t.run
35
+ wait(timeout_seconds).for { queue.length }.to eq(num_events)
36
+ expect(queue.length).to eq(num_events)
37
+ end
38
+
39
+ it "should consume all messages from snappy 3-partition topic" do
40
+ kafka_input = LogStash::Inputs::Kafka.new(snappy_config)
41
+ queue = Array.new
42
+ t = thread_it(kafka_input, queue)
43
+ t.run
44
+ wait(timeout_seconds).for { queue.length }.to eq(num_events)
45
+ expect(queue.length).to eq(num_events)
46
+ end
47
+
48
+ it "should consume all messages from lz4 3-partition topic" do
49
+ kafka_input = LogStash::Inputs::Kafka.new(lz4_config)
50
+ queue = Array.new
51
+ t = thread_it(kafka_input, queue)
52
+ t.run
53
+ wait(timeout_seconds).for { queue.length }.to eq(num_events)
54
+ expect(queue.length).to eq(num_events)
55
+ end
56
+
20
57
  end
21
-
22
- def wait_for_events(queue, num_events)
23
- begin
24
- timeout(30) do
25
- until queue.length == num_events do
26
- sleep 1
27
- next
58
+
59
+ describe "#kafka-topics-pattern" do
60
+
61
+ def thread_it(kafka_input, queue)
62
+ Thread.new do
63
+ begin
64
+ kafka_input.run(queue)
28
65
  end
29
66
  end
30
67
  end
31
- end
32
-
33
- it "should consume all messages from 3-partition topic" do
34
- kafka_input = LogStash::Inputs::Kafka.new(partition3_config)
35
- queue = Array.new
36
- t = thread_it(kafka_input, queue)
37
- t.run
38
- wait_for_events(queue, num_events)
39
- expect(queue.size).to eq(num_events)
40
- end
41
-
42
- it "should consume all messages from snappy 3-partition topic" do
43
- kafka_input = LogStash::Inputs::Kafka.new(snappy_config)
44
- queue = Array.new
45
- t = thread_it(kafka_input, queue)
46
- t.run
47
- wait_for_events(queue, num_events)
48
- expect(queue.size).to eq(num_events)
68
+
69
+ it "should consume all messages from all 3 topics" do
70
+ kafka_input = LogStash::Inputs::Kafka.new(pattern_config)
71
+ queue = Array.new
72
+ t = thread_it(kafka_input, queue)
73
+ t.run
74
+ wait(timeout_seconds).for { queue.length }.to eq(3*num_events)
75
+ expect(queue.length).to eq(3*num_events)
76
+ end
49
77
  end
50
78
 
51
- it "should consume all messages from lz4 3-partition topic" do
52
- kafka_input = LogStash::Inputs::Kafka.new(lz4_config)
53
- queue = Array.new
54
- t = thread_it(kafka_input, queue)
55
- t.run
56
- wait_for_events(queue, num_events)
57
- expect(queue.size).to eq(num_events)
79
+ describe "#kafka-decorate" do
80
+ def thread_it(kafka_input, queue)
81
+ Thread.new do
82
+ begin
83
+ kafka_input.run(queue)
84
+ end
85
+ end
86
+ end
87
+
88
+ it "should show the right topic and group name in decorated kafka section" do
89
+ kafka_input = LogStash::Inputs::Kafka.new(decorate_config)
90
+ queue = Queue.new
91
+ t = thread_it(kafka_input, queue)
92
+ t.run
93
+ wait(timeout_seconds).for { queue.length }.to eq(num_events)
94
+ expect(queue.length).to eq(num_events)
95
+ event = queue.shift
96
+ expect(event.get("kafka")["topic"]).to eq("logstash_topic_plain")
97
+ expect(event.get("kafka")["consumer_group"]).to eq(group_id_3)
98
+ end
58
99
  end
59
-
60
100
  end
@@ -16,7 +16,7 @@ class MockConsumer
16
16
  raise org.apache.kafka.common.errors.WakeupException.new
17
17
  else
18
18
  10.times.map do
19
- org.apache.kafka.clients.consumer.ConsumerRecord.new("test", 0, 0, "key", "value")
19
+ org.apache.kafka.clients.consumer.ConsumerRecord.new("logstash", 0, 0, "key", "value")
20
20
  end
21
21
  end
22
22
  end
@@ -30,7 +30,7 @@ class MockConsumer
30
30
  end
31
31
 
32
32
  describe LogStash::Inputs::Kafka do
33
- let(:config) { { 'topics' => ['test'], 'consumer_threads' => 4 } }
33
+ let(:config) { { 'topics' => ['logstash'], 'consumer_threads' => 4 } }
34
34
  subject { LogStash::Inputs::Kafka.new(config) }
35
35
 
36
36
  it "should register" do
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-input-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 4.1.1
4
+ version: 4.2.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elasticsearch
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2016-11-14 00:00:00.000000000 Z
11
+ date: 2017-02-18 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement