logstash-input-kafka 5.1.4 → 5.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +6 -0
- data/lib/logstash/inputs/kafka.rb +9 -4
- data/logstash-input-kafka.gemspec +1 -1
- data/spec/integration/inputs/kafka_spec.rb +17 -5
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 69bf688e6da796bc836086907a8511aefa3a4a8d
|
4
|
+
data.tar.gz: 41980f6d7375bf9daae7fba455cd089a83bde427
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 6ea54547d2e8beaefd0a8a67ac7a29fd08a35c27710a8ee48da70102c0a5e20dcaeda23c1993483334cf0728730ec763ed74cc1110bf0e193b1def045b4f63a0
|
7
|
+
data.tar.gz: a6783693e93d7fde277c062929ded62298a7bf37b0f82fd3419051f5421426e72a7a5977e989e169b8cd0630a1939055ad7c1fa3869c2e6e8b7da69f24187861
|
data/CHANGELOG.md
CHANGED
@@ -209,7 +209,7 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
|
|
209
209
|
|
210
210
|
public
|
211
211
|
def run(logstash_queue)
|
212
|
-
@runner_consumers = consumer_threads.times.map {
|
212
|
+
@runner_consumers = consumer_threads.times.map { |i| create_consumer("#{client_id}-#{i}") }
|
213
213
|
@runner_threads = @runner_consumers.map { |consumer| thread_runner(logstash_queue, consumer) }
|
214
214
|
@runner_threads.each { |t| t.join }
|
215
215
|
end # def run
|
@@ -219,6 +219,11 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
|
|
219
219
|
@runner_consumers.each { |c| c.wakeup }
|
220
220
|
end
|
221
221
|
|
222
|
+
public
|
223
|
+
def kafka_consumers
|
224
|
+
@runner_consumers
|
225
|
+
end
|
226
|
+
|
222
227
|
private
|
223
228
|
def thread_runner(logstash_queue, consumer)
|
224
229
|
Thread.new do
|
@@ -255,7 +260,7 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
|
|
255
260
|
end
|
256
261
|
|
257
262
|
private
|
258
|
-
def create_consumer
|
263
|
+
def create_consumer(client_id)
|
259
264
|
begin
|
260
265
|
props = java.util.Properties.new
|
261
266
|
kafka = org.apache.kafka.clients.consumer.ConsumerConfig
|
@@ -292,8 +297,8 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
|
|
292
297
|
elsif security_protocol == "SASL_PLAINTEXT"
|
293
298
|
set_sasl_config(props)
|
294
299
|
elsif security_protocol == "SASL_SSL"
|
295
|
-
set_trustore_keystore_config
|
296
|
-
set_sasl_config
|
300
|
+
set_trustore_keystore_config(props)
|
301
|
+
set_sasl_config(props)
|
297
302
|
end
|
298
303
|
|
299
304
|
org.apache.kafka.clients.consumer.KafkaConsumer.new(props)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-input-kafka'
|
3
|
-
s.version = '5.1.
|
3
|
+
s.version = '5.1.6'
|
4
4
|
s.licenses = ['Apache License (2.0)']
|
5
5
|
s.summary = 'This input will read events from a Kafka topic. It uses the high level consumer API provided by Kafka to read messages from the broker'
|
6
6
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
@@ -10,7 +10,9 @@ describe "inputs/kafka", :integration => true do
|
|
10
10
|
let(:group_id_1) {rand(36**8).to_s(36)}
|
11
11
|
let(:group_id_2) {rand(36**8).to_s(36)}
|
12
12
|
let(:group_id_3) {rand(36**8).to_s(36)}
|
13
|
+
let(:group_id_4) {rand(36**8).to_s(36)}
|
13
14
|
let(:plain_config) { { 'topics' => ['logstash_topic_plain'], 'codec' => 'plain', 'group_id' => group_id_1, 'auto_offset_reset' => 'earliest'} }
|
15
|
+
let(:multi_consumer_config) { plain_config.merge({"group_id" => group_id_4, "client_id" => "spec", "consumer_threads" => 3}) }
|
14
16
|
let(:snappy_config) { { 'topics' => ['logstash_topic_snappy'], 'codec' => 'plain', 'group_id' => group_id_1, 'auto_offset_reset' => 'earliest'} }
|
15
17
|
let(:lz4_config) { { 'topics' => ['logstash_topic_lz4'], 'codec' => 'plain', 'group_id' => group_id_1, 'auto_offset_reset' => 'earliest'} }
|
16
18
|
let(:pattern_config) { { 'topics_pattern' => 'logstash_topic_.*', 'group_id' => group_id_2, 'codec' => 'plain', 'auto_offset_reset' => 'earliest'} }
|
@@ -53,11 +55,21 @@ describe "inputs/kafka", :integration => true do
|
|
53
55
|
wait(timeout_seconds).for { queue.length }.to eq(num_events)
|
54
56
|
expect(queue.length).to eq(num_events)
|
55
57
|
end
|
56
|
-
|
58
|
+
|
59
|
+
it "should consumer all messages with multiple consumers" do
|
60
|
+
kafka_input = LogStash::Inputs::Kafka.new(multi_consumer_config)
|
61
|
+
queue = Array.new
|
62
|
+
t = thread_it(kafka_input, queue)
|
63
|
+
t.run
|
64
|
+
wait(timeout_seconds).for { queue.length }.to eq(num_events)
|
65
|
+
expect(queue.length).to eq(num_events)
|
66
|
+
kafka_input.kafka_consumers.each_with_index do |consumer, i|
|
67
|
+
expect(consumer.metrics.keys.first.tags["client-id"]).to eq("spec-#{i}")
|
68
|
+
end
|
69
|
+
end
|
57
70
|
end
|
58
71
|
|
59
72
|
describe "#kafka-topics-pattern" do
|
60
|
-
|
61
73
|
def thread_it(kafka_input, queue)
|
62
74
|
Thread.new do
|
63
75
|
begin
|
@@ -65,7 +77,7 @@ describe "inputs/kafka", :integration => true do
|
|
65
77
|
end
|
66
78
|
end
|
67
79
|
end
|
68
|
-
|
80
|
+
|
69
81
|
it "should consume all messages from all 3 topics" do
|
70
82
|
kafka_input = LogStash::Inputs::Kafka.new(pattern_config)
|
71
83
|
queue = Array.new
|
@@ -73,7 +85,7 @@ describe "inputs/kafka", :integration => true do
|
|
73
85
|
t.run
|
74
86
|
wait(timeout_seconds).for { queue.length }.to eq(3*num_events)
|
75
87
|
expect(queue.length).to eq(3*num_events)
|
76
|
-
end
|
88
|
+
end
|
77
89
|
end
|
78
90
|
|
79
91
|
describe "#kafka-decorate" do
|
@@ -84,7 +96,7 @@ describe "inputs/kafka", :integration => true do
|
|
84
96
|
end
|
85
97
|
end
|
86
98
|
end
|
87
|
-
|
99
|
+
|
88
100
|
it "should show the right topic and group name in decorated kafka section" do
|
89
101
|
kafka_input = LogStash::Inputs::Kafka.new(decorate_config)
|
90
102
|
queue = Queue.new
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-input-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 5.1.
|
4
|
+
version: 5.1.6
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Elasticsearch
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2017-01-
|
11
|
+
date: 2017-01-25 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|