logstash-input-kafka 3.0.0.beta2 → 3.0.0.beta3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +4 -1
- data/lib/logstash/inputs/kafka.rb +4 -4
- data/logstash-input-kafka.gemspec +2 -2
- data/spec/integration/inputs/kafka_spec.rb +32 -13
- data/spec/unit/inputs/kafka_spec.rb +1 -18
- data/vendor/jar-dependencies/runtime-jars/{kafka-clients-0.9.0.0.jar → kafka-clients-0.9.0.1.jar} +0 -0
- metadata +5 -5
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: f8d3e1a5d3ae1030b4c2401e92627d68079c764b
|
4
|
+
data.tar.gz: b5f497bf850e4b983ae618c060b1625ca489da76
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: a6b759fe203404ca0cb633496fbd8e9ec17016836f06c1fa7a001445cfd7b6fe20e616368e1259c87739a94ac0f51f2a4462f26f4493e56f90132b1d35c80d35
|
7
|
+
data.tar.gz: 09049bef85ba6b173fbe6d55b2f807c54af1c3365cd0d56a562f6859470088bf3b162e7f19dbcf77b3c3c579541dcf08bc1a039aedef6f2099519b61f5008859
|
data/CHANGELOG.md
CHANGED
@@ -1,9 +1,12 @@
|
|
1
1
|
# 3.0.0.beta2
|
2
2
|
- Added SSL/TLS connection support to Kafka
|
3
|
+
- Breaking: Changed default codec to plain instead of SSL. Json codec is really slow when used
|
4
|
+
with inputs because inputs by default are single threaded. This makes it a bad
|
5
|
+
first user experience. Plain codec is a much better default.
|
3
6
|
|
4
7
|
# 3.0.0.beta1
|
5
8
|
- Refactor to use new Java based consumer, bypassing jruby-kafka
|
6
|
-
- Change configuration to match Kafka's configuration. This version is not backward compatible
|
9
|
+
- Breaking: Change configuration to match Kafka's configuration. This version is not backward compatible
|
7
10
|
|
8
11
|
## 2.0.0
|
9
12
|
- Plugins were updated to follow the new shutdown semantic, this mainly allows Logstash to instruct input plugins to terminate gracefully,
|
@@ -32,7 +32,7 @@ require 'logstash-input-kafka_jars.rb'
|
|
32
32
|
class LogStash::Inputs::Kafka < LogStash::Inputs::Base
|
33
33
|
config_name 'kafka'
|
34
34
|
|
35
|
-
default :codec, '
|
35
|
+
default :codec, 'plain'
|
36
36
|
|
37
37
|
# The frequency in milliseconds that the consumer offsets are committed to Kafka.
|
38
38
|
config :auto_commit_interval_ms, :validate => :string, :default => "10"
|
@@ -59,6 +59,9 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
|
|
59
59
|
config :client_id, :validate => :string, :default => "logstash"
|
60
60
|
# Close idle connections after the number of milliseconds specified by this config.
|
61
61
|
config :connections_max_idle_ms, :validate => :string
|
62
|
+
# Ideally you should have as many threads as the number of partitions for a perfect
|
63
|
+
# balance — more threads than partitions means that some threads will be idle
|
64
|
+
config :consumer_threads, :validate => :number, :default => 1
|
62
65
|
# If true, periodically commit to Kafka the offsets of messages already returned by the consumer.
|
63
66
|
# This committed offset will be used when the process fails as the position from
|
64
67
|
# which the consumption will begin.
|
@@ -111,9 +114,6 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
|
|
111
114
|
config :session_timeout_ms, :validate => :string, :default => "30000"
|
112
115
|
# Java Class used to deserialize the record's value
|
113
116
|
config :value_deserializer_class, :validate => :string, :default => "org.apache.kafka.common.serialization.StringDeserializer"
|
114
|
-
# Ideally you should have as many threads as the number of partitions for a perfect
|
115
|
-
# balance — more threads than partitions means that some threads will be idle
|
116
|
-
config :consumer_threads, :validate => :number, :default => 1
|
117
117
|
# A list of topics to subscribe to.
|
118
118
|
config :topics, :validate => :array, :required => true
|
119
119
|
# Time kafka consumer will wait to receive new messages from topics
|
@@ -1,7 +1,7 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
|
3
3
|
s.name = 'logstash-input-kafka'
|
4
|
-
s.version = '3.0.0.
|
4
|
+
s.version = '3.0.0.beta3'
|
5
5
|
s.licenses = ['Apache License (2.0)']
|
6
6
|
s.summary = 'This input will read events from a Kafka topic. It uses the high level consumer API provided by Kafka to read messages from the broker'
|
7
7
|
s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
|
@@ -20,7 +20,7 @@ Gem::Specification.new do |s|
|
|
20
20
|
s.metadata = { 'logstash_plugin' => 'true', 'group' => 'input'}
|
21
21
|
|
22
22
|
# Gem dependencies
|
23
|
-
s.add_runtime_dependency 'logstash-core', ">= 2.0.0
|
23
|
+
s.add_runtime_dependency 'logstash-core', ">= 2.0.0", "< 3.0.0"
|
24
24
|
s.add_runtime_dependency 'logstash-codec-json'
|
25
25
|
s.add_runtime_dependency 'logstash-codec-plain'
|
26
26
|
s.add_runtime_dependency 'stud', '>= 0.0.22', '< 0.1.0'
|
@@ -1,21 +1,40 @@
|
|
1
1
|
# encoding: utf-8
|
2
2
|
require "logstash/devutils/rspec/spec_helper"
|
3
3
|
require "logstash/inputs/kafka"
|
4
|
+
require "digest"
|
4
5
|
|
5
6
|
describe "input/kafka", :integration => true do
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
1000.times do |i|
|
18
|
-
producer.send(org.apache.kafka.clients.producer.ProducerRecord("test", i.to_s, i.to_s))
|
7
|
+
let(:partition3_config) { { 'topics' => ['topic3'], 'codec' => 'plain', 'auto_offset_reset' => 'earliest'} }
|
8
|
+
|
9
|
+
let(:tries) { 60 }
|
10
|
+
let(:num_events) { 103 }
|
11
|
+
|
12
|
+
def wait_until_count(queue)
|
13
|
+
num_tries = tries
|
14
|
+
while (num_tries > 0)
|
15
|
+
break if queue.size == num_events
|
16
|
+
num_tries -= 1
|
17
|
+
sleep 1
|
19
18
|
end
|
20
19
|
end
|
20
|
+
|
21
|
+
def thread_it(kafka_input, queue)
|
22
|
+
Thread.new do
|
23
|
+
begin
|
24
|
+
kafka_input.run(queue)
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
it "should consume all messages from 3-partition topic" do
|
30
|
+
kafka_input = LogStash::Inputs::Kafka.new(partition3_config)
|
31
|
+
queue = Array.new
|
32
|
+
t = thread_it(kafka_input, queue)
|
33
|
+
t.run
|
34
|
+
|
35
|
+
wait_until_count(queue)
|
36
|
+
|
37
|
+
expect(queue.size).to eq(num_events)
|
38
|
+
end
|
39
|
+
|
21
40
|
end
|
@@ -30,27 +30,10 @@ class MockConsumer
|
|
30
30
|
end
|
31
31
|
|
32
32
|
describe LogStash::Inputs::Kafka do
|
33
|
-
let(:config) { { 'topics' => ['test'], '
|
33
|
+
let(:config) { { 'topics' => ['test'], 'consumer_threads' => 4 } }
|
34
34
|
subject { LogStash::Inputs::Kafka.new(config) }
|
35
35
|
|
36
36
|
it "should register" do
|
37
37
|
expect {subject.register}.to_not raise_error
|
38
38
|
end
|
39
|
-
|
40
|
-
it "should run" do
|
41
|
-
expect(subject).to receive(:new_consumer) do
|
42
|
-
MockConsumer.new
|
43
|
-
end.exactly(4).times
|
44
|
-
|
45
|
-
subject.register
|
46
|
-
q = Queue.new
|
47
|
-
Thread.new do
|
48
|
-
while q.size < 13
|
49
|
-
end
|
50
|
-
subject.do_stop
|
51
|
-
end
|
52
|
-
subject.run(q)
|
53
|
-
|
54
|
-
expect(q.size).to eq(40)
|
55
|
-
end
|
56
39
|
end
|
data/vendor/jar-dependencies/runtime-jars/{kafka-clients-0.9.0.0.jar → kafka-clients-0.9.0.1.jar}
RENAMED
Binary file
|
metadata
CHANGED
@@ -1,21 +1,21 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-input-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 3.0.0.
|
4
|
+
version: 3.0.0.beta3
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Elasticsearch
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2016-
|
11
|
+
date: 2016-03-10 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
15
15
|
requirements:
|
16
16
|
- - '>='
|
17
17
|
- !ruby/object:Gem::Version
|
18
|
-
version: 2.0.0
|
18
|
+
version: 2.0.0
|
19
19
|
- - <
|
20
20
|
- !ruby/object:Gem::Version
|
21
21
|
version: 3.0.0
|
@@ -26,7 +26,7 @@ dependencies:
|
|
26
26
|
requirements:
|
27
27
|
- - '>='
|
28
28
|
- !ruby/object:Gem::Version
|
29
|
-
version: 2.0.0
|
29
|
+
version: 2.0.0
|
30
30
|
- - <
|
31
31
|
- !ruby/object:Gem::Version
|
32
32
|
version: 3.0.0
|
@@ -110,7 +110,7 @@ files:
|
|
110
110
|
- logstash-input-kafka.gemspec
|
111
111
|
- spec/integration/inputs/kafka_spec.rb
|
112
112
|
- spec/unit/inputs/kafka_spec.rb
|
113
|
-
- vendor/jar-dependencies/runtime-jars/kafka-clients-0.9.0.
|
113
|
+
- vendor/jar-dependencies/runtime-jars/kafka-clients-0.9.0.1.jar
|
114
114
|
- vendor/jar-dependencies/runtime-jars/slf4j-api-1.7.13.jar
|
115
115
|
- vendor/jar-dependencies/runtime-jars/slf4j-noop-1.7.13.jar
|
116
116
|
homepage: http://www.elastic.co/guide/en/logstash/current/index.html
|