logstash-input-kafka 6.2.4 → 6.2.6
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +6 -0
- data/lib/logstash/inputs/kafka.rb +9 -4
- data/logstash-input-kafka.gemspec +1 -1
- data/spec/integration/inputs/kafka_spec.rb +17 -5
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: a81f9cd12e2e3d4b62a79fdc783dbbec3c23914a
|
4
|
+
data.tar.gz: 7b3a63847c2345526d913dd33df657965faffd86
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: c0161ad18db27f054b2997df14383d9c7308e5dbaa2e0fb869ab84758ff0232a349a71c56cc28a7c89294e7b103053aaba61566719b90bba83f737c17a9832ef
|
7
|
+
data.tar.gz: 7e089f9dafe7e99833b30b20317356dd4eadc0743ebce480b33aa61dca599d4063e606a0db904bfec21f8f67e51721d6d6fda0af117a31b3fa273ed9fea6c9da
|
data/CHANGELOG.md
CHANGED
@@ -1,3 +1,9 @@
|
|
1
|
+
## 6.2.6
|
2
|
+
- fix: Client ID is no longer reused across multiple Kafka consumer instances
|
3
|
+
|
4
|
+
## 6.2.5
|
5
|
+
- Fix a bug where consumer was not correctly setup when `SASL_SSL` option was specified.
|
6
|
+
|
1
7
|
## 6.2.4
|
2
8
|
- Make error reporting more clear when connection fails
|
3
9
|
|
@@ -220,7 +220,7 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
|
|
220
220
|
|
221
221
|
public
|
222
222
|
def run(logstash_queue)
|
223
|
-
@runner_consumers = consumer_threads.times.map {
|
223
|
+
@runner_consumers = consumer_threads.times.map { |i| create_consumer("#{client_id}-#{i}") }
|
224
224
|
@runner_threads = @runner_consumers.map { |consumer| thread_runner(logstash_queue, consumer) }
|
225
225
|
@runner_threads.each { |t| t.join }
|
226
226
|
end # def run
|
@@ -230,6 +230,11 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
|
|
230
230
|
@runner_consumers.each { |c| c.wakeup }
|
231
231
|
end
|
232
232
|
|
233
|
+
public
|
234
|
+
def kafka_consumers
|
235
|
+
@runner_consumers
|
236
|
+
end
|
237
|
+
|
233
238
|
private
|
234
239
|
def thread_runner(logstash_queue, consumer)
|
235
240
|
Thread.new do
|
@@ -266,7 +271,7 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
|
|
266
271
|
end
|
267
272
|
|
268
273
|
private
|
269
|
-
def create_consumer
|
274
|
+
def create_consumer(client_id)
|
270
275
|
begin
|
271
276
|
props = java.util.Properties.new
|
272
277
|
kafka = org.apache.kafka.clients.consumer.ConsumerConfig
|
@@ -305,8 +310,8 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
|
|
305
310
|
elsif security_protocol == "SASL_PLAINTEXT"
|
306
311
|
set_sasl_config(props)
|
307
312
|
elsif security_protocol == "SASL_SSL"
|
308
|
-
set_trustore_keystore_config
|
309
|
-
set_sasl_config
|
313
|
+
set_trustore_keystore_config(props)
|
314
|
+
set_sasl_config(props)
|
310
315
|
end
|
311
316
|
|
312
317
|
org.apache.kafka.clients.consumer.KafkaConsumer.new(props)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-input-kafka'
|
3
|
-
s.version = '6.2.
|
3
|
+
s.version = '6.2.6'
|
4
4
|
s.licenses = ['Apache License (2.0)']
|
5
5
|
s.summary = 'This input will read events from a Kafka topic. It uses the high level consumer API provided by Kafka to read messages from the broker'
|
6
6
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
@@ -10,7 +10,9 @@ describe "inputs/kafka", :integration => true do
|
|
10
10
|
let(:group_id_1) {rand(36**8).to_s(36)}
|
11
11
|
let(:group_id_2) {rand(36**8).to_s(36)}
|
12
12
|
let(:group_id_3) {rand(36**8).to_s(36)}
|
13
|
+
let(:group_id_4) {rand(36**8).to_s(36)}
|
13
14
|
let(:plain_config) { { 'topics' => ['logstash_topic_plain'], 'codec' => 'plain', 'group_id' => group_id_1, 'auto_offset_reset' => 'earliest'} }
|
15
|
+
let(:multi_consumer_config) { plain_config.merge({"group_id" => group_id_4, "client_id" => "spec", "consumer_threads" => 3}) }
|
14
16
|
let(:snappy_config) { { 'topics' => ['logstash_topic_snappy'], 'codec' => 'plain', 'group_id' => group_id_1, 'auto_offset_reset' => 'earliest'} }
|
15
17
|
let(:lz4_config) { { 'topics' => ['logstash_topic_lz4'], 'codec' => 'plain', 'group_id' => group_id_1, 'auto_offset_reset' => 'earliest'} }
|
16
18
|
let(:pattern_config) { { 'topics_pattern' => 'logstash_topic_.*', 'group_id' => group_id_2, 'codec' => 'plain', 'auto_offset_reset' => 'earliest'} }
|
@@ -53,11 +55,21 @@ describe "inputs/kafka", :integration => true do
|
|
53
55
|
wait(timeout_seconds).for { queue.length }.to eq(num_events)
|
54
56
|
expect(queue.length).to eq(num_events)
|
55
57
|
end
|
56
|
-
|
58
|
+
|
59
|
+
it "should consumer all messages with multiple consumers" do
|
60
|
+
kafka_input = LogStash::Inputs::Kafka.new(multi_consumer_config)
|
61
|
+
queue = Array.new
|
62
|
+
t = thread_it(kafka_input, queue)
|
63
|
+
t.run
|
64
|
+
wait(timeout_seconds).for { queue.length }.to eq(num_events)
|
65
|
+
expect(queue.length).to eq(num_events)
|
66
|
+
kafka_input.kafka_consumers.each_with_index do |consumer, i|
|
67
|
+
expect(consumer.metrics.keys.first.tags["client-id"]).to eq("spec-#{i}")
|
68
|
+
end
|
69
|
+
end
|
57
70
|
end
|
58
71
|
|
59
72
|
describe "#kafka-topics-pattern" do
|
60
|
-
|
61
73
|
def thread_it(kafka_input, queue)
|
62
74
|
Thread.new do
|
63
75
|
begin
|
@@ -65,7 +77,7 @@ describe "inputs/kafka", :integration => true do
|
|
65
77
|
end
|
66
78
|
end
|
67
79
|
end
|
68
|
-
|
80
|
+
|
69
81
|
it "should consume all messages from all 3 topics" do
|
70
82
|
kafka_input = LogStash::Inputs::Kafka.new(pattern_config)
|
71
83
|
queue = Array.new
|
@@ -73,7 +85,7 @@ describe "inputs/kafka", :integration => true do
|
|
73
85
|
t.run
|
74
86
|
wait(timeout_seconds).for { queue.length }.to eq(3*num_events)
|
75
87
|
expect(queue.length).to eq(3*num_events)
|
76
|
-
end
|
88
|
+
end
|
77
89
|
end
|
78
90
|
|
79
91
|
describe "#kafka-decorate" do
|
@@ -84,7 +96,7 @@ describe "inputs/kafka", :integration => true do
|
|
84
96
|
end
|
85
97
|
end
|
86
98
|
end
|
87
|
-
|
99
|
+
|
88
100
|
it "should show the right topic and group name in decorated kafka section" do
|
89
101
|
kafka_input = LogStash::Inputs::Kafka.new(decorate_config)
|
90
102
|
queue = Queue.new
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-input-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 6.2.
|
4
|
+
version: 6.2.6
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Elasticsearch
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2017-01-
|
11
|
+
date: 2017-01-25 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|