logstash-input-kafka 3.1.0 → 4.0.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +4 -15
- data/lib/logstash/inputs/kafka.rb +5 -28
- data/logstash-input-kafka.gemspec +5 -3
- data/spec/integration/inputs/kafka_spec.rb +44 -83
- data/spec/unit/inputs/kafka_spec.rb +2 -2
- data/vendor/jar-dependencies/runtime-jars/kafka-clients-0.9.0.1.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/log4j-1.2.17.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/lz4-1.2.0.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/slf4j-api-1.7.6.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/slf4j-log4j12-1.7.13.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/snappy-java-1.1.1.7.jar +0 -0
- metadata +28 -22
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: abc3fc0897258da0fa0b331447bfb1f8a0d464cb
|
4
|
+
data.tar.gz: 0dd2bfc124413da3c415b8e87db660a155a5c8b4
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 615fa1e2452d73baf95aadf248aa46961a1d9d2ba7c217c0ea1fd3323d74250b8135fb478a9262601166212f622ebf78a94a9bb88547f7bf0c6ffb465f9433ed
|
7
|
+
data.tar.gz: 78a16c4a6f7ad5037e8357e24a0638929d09484fa2df32e71ae5d568a93be5c75018557f64756969751782fff00674fd31809727825465dee61749f980e44ddf
|
data/CHANGELOG.md
CHANGED
@@ -1,17 +1,7 @@
|
|
1
|
-
##
|
2
|
-
-
|
3
|
-
-
|
4
|
-
-
|
5
|
-
- add :topics_pattern configuration option
|
6
|
-
|
7
|
-
## 3.0.3
|
8
|
-
- Revert back to not configuring log4j here in this plugin. This setup method used
|
9
|
-
will hardcode log4j setting which means you cannot configure it. Its better to add
|
10
|
-
log4j.properties in LS_HOME/bin, add it to classpath to silence warnings.
|
11
|
-
- Update auto_commit_interval_ms to default to 5000
|
12
|
-
|
13
|
-
## 3.0.2
|
14
|
-
- Support for Kafka 0.9 for LS 2.x
|
1
|
+
## 4.0.0
|
2
|
+
- Republish all the gems under jruby.
|
3
|
+
- Update the plugin to the version 2.0 of the plugin api, this change is required for Logstash 5.0 compatibility. See https://github.com/elastic/logstash/issues/5141
|
4
|
+
- Support for Kafka 0.9 for LS 5.x
|
15
5
|
|
16
6
|
## 3.0.0.beta7
|
17
7
|
- Fix Log4j warnings by setting up the logger
|
@@ -57,4 +47,3 @@
|
|
57
47
|
- Plugins were updated to follow the new shutdown semantic, this mainly allows Logstash to instruct input plugins to terminate gracefully,
|
58
48
|
instead of using Thread.raise on the plugins' threads. Ref: https://github.com/elastic/logstash/pull/3895
|
59
49
|
- Dependency on logstash-core update to 2.0
|
60
|
-
|
@@ -35,7 +35,7 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
|
|
35
35
|
default :codec, 'plain'
|
36
36
|
|
37
37
|
# The frequency in milliseconds that the consumer offsets are committed to Kafka.
|
38
|
-
config :auto_commit_interval_ms, :validate => :string, :default => "
|
38
|
+
config :auto_commit_interval_ms, :validate => :string, :default => "10"
|
39
39
|
# What to do when there is no initial offset in Kafka or if an offset is out of range:
|
40
40
|
#
|
41
41
|
# * earliest: automatically reset the offset to the earliest offset
|
@@ -114,11 +114,8 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
|
|
114
114
|
config :session_timeout_ms, :validate => :string, :default => "30000"
|
115
115
|
# Java Class used to deserialize the record's value
|
116
116
|
config :value_deserializer_class, :validate => :string, :default => "org.apache.kafka.common.serialization.StringDeserializer"
|
117
|
-
# A list of topics to subscribe to
|
118
|
-
config :topics, :validate => :array, :
|
119
|
-
# A topic regex pattern to subscribe to.
|
120
|
-
# The topics configuration will be ignored when using this configuration.
|
121
|
-
config :topics_pattern, :validate => :string
|
117
|
+
# A list of topics to subscribe to.
|
118
|
+
config :topics, :validate => :array, :required => true
|
122
119
|
# Time kafka consumer will wait to receive new messages from topics
|
123
120
|
config :poll_timeout_ms, :validate => :number, :default => 100
|
124
121
|
# Enable SSL/TLS secured communication to Kafka broker. Note that secure communication
|
@@ -132,18 +129,11 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
|
|
132
129
|
config :ssl_keystore_location, :validate => :path
|
133
130
|
# If client authentication is required, this setting stores the keystore password
|
134
131
|
config :ssl_keystore_password, :validate => :password
|
135
|
-
# Option to add Kafka metadata like topic, message size to the event.
|
136
|
-
# This will add a field named `kafka` to the logstash event containing the following attributes:
|
137
|
-
# `topic`: The topic this message is associated with
|
138
|
-
# `consumer_group`: The consumer group used to read in this event
|
139
|
-
# `partition`: The partition this message is associated with
|
140
|
-
# `offset`: The offset from the partition this message is associated with
|
141
|
-
# `key`: A ByteBuffer containing the message key
|
142
|
-
config :decorate_events, :validate => :boolean, :default => false
|
143
132
|
|
144
133
|
|
145
134
|
public
|
146
135
|
def register
|
136
|
+
LogStash::Logger.setup_log4j(@logger)
|
147
137
|
@runner_threads = []
|
148
138
|
end # def register
|
149
139
|
|
@@ -163,25 +153,12 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
|
|
163
153
|
def thread_runner(logstash_queue, consumer)
|
164
154
|
Thread.new do
|
165
155
|
begin
|
166
|
-
|
167
|
-
nooplistener = org.apache.kafka.clients.consumer.internals.NoOpConsumerRebalanceListener.new
|
168
|
-
pattern = java.util.regex.Pattern.compile(@topics_pattern)
|
169
|
-
consumer.subscribe(pattern, nooplistener)
|
170
|
-
else
|
171
|
-
consumer.subscribe(topics);
|
172
|
-
end
|
156
|
+
consumer.subscribe(topics);
|
173
157
|
while !stop?
|
174
158
|
records = consumer.poll(poll_timeout_ms);
|
175
159
|
for record in records do
|
176
160
|
@codec.decode(record.value.to_s) do |event|
|
177
161
|
decorate(event)
|
178
|
-
if @decorate_events
|
179
|
-
event.set("[kafka][topic]", record.topic)
|
180
|
-
event.set("[kafka][consumer_group]", @group_id)
|
181
|
-
event.set("[kafka][partition]", record.partition)
|
182
|
-
event.set("[kafka][offset]", record.offset)
|
183
|
-
event.set("[kafka][key]", record.key)
|
184
|
-
end
|
185
162
|
logstash_queue << event
|
186
163
|
end
|
187
164
|
end
|
@@ -1,10 +1,10 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
|
3
3
|
s.name = 'logstash-input-kafka'
|
4
|
-
s.version = '
|
4
|
+
s.version = '4.0.0'
|
5
5
|
s.licenses = ['Apache License (2.0)']
|
6
6
|
s.summary = 'This input will read events from a Kafka topic. It uses the high level consumer API provided by Kafka to read messages from the broker'
|
7
|
-
s.description = "This gem is a
|
7
|
+
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
8
8
|
s.authors = ['Elasticsearch']
|
9
9
|
s.email = 'info@elastic.co'
|
10
10
|
s.homepage = "http://www.elastic.co/guide/en/logstash/current/index.html"
|
@@ -25,9 +25,11 @@ Gem::Specification.new do |s|
|
|
25
25
|
s.add_development_dependency 'jar-dependencies', '~> 0.3.2'
|
26
26
|
|
27
27
|
# Gem dependencies
|
28
|
-
s.add_runtime_dependency "logstash-core-plugin-api", "~>
|
28
|
+
s.add_runtime_dependency "logstash-core-plugin-api", "~> 2.0"
|
29
29
|
s.add_runtime_dependency 'logstash-codec-json'
|
30
30
|
s.add_runtime_dependency 'logstash-codec-plain'
|
31
31
|
s.add_runtime_dependency 'stud', '>= 0.0.22', '< 0.1.0'
|
32
|
+
|
32
33
|
s.add_development_dependency 'logstash-devutils'
|
33
34
|
end
|
35
|
+
|
@@ -3,97 +3,58 @@ require "logstash/devutils/rspec/spec_helper"
|
|
3
3
|
require "logstash/inputs/kafka"
|
4
4
|
require "digest"
|
5
5
|
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
let(:
|
10
|
-
|
11
|
-
let(:
|
12
|
-
let(:plain_config) { { 'topics' => ['logstash_topic_plain'], 'codec' => 'plain', 'group_id' => group_id_1, 'auto_offset_reset' => 'earliest'} }
|
13
|
-
let(:snappy_config) { { 'topics' => ['logstash_topic_snappy'], 'codec' => 'plain', 'group_id' => group_id_1, 'auto_offset_reset' => 'earliest'} }
|
14
|
-
let(:lz4_config) { { 'topics' => ['logstash_topic_lz4'], 'codec' => 'plain', 'group_id' => group_id_1, 'auto_offset_reset' => 'earliest'} }
|
15
|
-
let(:pattern_config) { { 'topics_pattern' => 'logstash_topic_.*', 'group_id' => group_id_2, 'codec' => 'plain', 'auto_offset_reset' => 'earliest'} }
|
16
|
-
let(:decorate_config) { { 'topics' => ['logstash_topic_plain'], 'codec' => 'plain', 'group_id' => group_id_3, 'auto_offset_reset' => 'earliest', 'decorate_events' => true} }
|
17
|
-
let(:timeout_seconds) { 120 }
|
6
|
+
describe "input/kafka", :integration => true do
|
7
|
+
let(:partition3_config) { { 'topics' => ['topic3'], 'codec' => 'plain', 'auto_offset_reset' => 'earliest'} }
|
8
|
+
let(:snappy_config) { { 'topics' => ['snappy_topic'], 'codec' => 'plain', 'auto_offset_reset' => 'earliest'} }
|
9
|
+
let(:lz4_config) { { 'topics' => ['lz4_topic'], 'codec' => 'plain', 'auto_offset_reset' => 'earliest'} }
|
10
|
+
|
11
|
+
let(:tries) { 60 }
|
18
12
|
let(:num_events) { 103 }
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
kafka_input.run(queue)
|
25
|
-
end
|
13
|
+
|
14
|
+
def thread_it(kafka_input, queue)
|
15
|
+
Thread.new do
|
16
|
+
begin
|
17
|
+
kafka_input.run(queue)
|
26
18
|
end
|
27
19
|
end
|
28
|
-
|
29
|
-
it "should consume all messages from plain 3-partition topic" do
|
30
|
-
kafka_input = LogStash::Inputs::Kafka.new(plain_config)
|
31
|
-
queue = Array.new
|
32
|
-
t = thread_it(kafka_input, queue)
|
33
|
-
t.run
|
34
|
-
wait(timeout_seconds).for { queue.length }.to eq(num_events)
|
35
|
-
expect(queue.length).to eq(num_events)
|
36
|
-
end
|
37
|
-
|
38
|
-
it "should consume all messages from snappy 3-partition topic" do
|
39
|
-
kafka_input = LogStash::Inputs::Kafka.new(snappy_config)
|
40
|
-
queue = Array.new
|
41
|
-
t = thread_it(kafka_input, queue)
|
42
|
-
t.run
|
43
|
-
wait(timeout_seconds).for { queue.length }.to eq(num_events)
|
44
|
-
expect(queue.length).to eq(num_events)
|
45
|
-
end
|
46
|
-
|
47
|
-
it "should consume all messages from lz4 3-partition topic" do
|
48
|
-
kafka_input = LogStash::Inputs::Kafka.new(lz4_config)
|
49
|
-
queue = Array.new
|
50
|
-
t = thread_it(kafka_input, queue)
|
51
|
-
t.run
|
52
|
-
wait(timeout_seconds).for { queue.length }.to eq(num_events)
|
53
|
-
expect(queue.length).to eq(num_events)
|
54
|
-
end
|
55
|
-
|
56
20
|
end
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
21
|
+
|
22
|
+
def wait_for_events(queue, num_events)
|
23
|
+
begin
|
24
|
+
timeout(30) do
|
25
|
+
until queue.length == num_events do
|
26
|
+
sleep 1
|
27
|
+
next
|
64
28
|
end
|
65
29
|
end
|
66
30
|
end
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
31
|
+
end
|
32
|
+
|
33
|
+
it "should consume all messages from 3-partition topic" do
|
34
|
+
kafka_input = LogStash::Inputs::Kafka.new(partition3_config)
|
35
|
+
queue = Array.new
|
36
|
+
t = thread_it(kafka_input, queue)
|
37
|
+
t.run
|
38
|
+
wait_for_events(queue, num_events)
|
39
|
+
expect(queue.size).to eq(num_events)
|
40
|
+
end
|
41
|
+
|
42
|
+
it "should consume all messages from snappy 3-partition topic" do
|
43
|
+
kafka_input = LogStash::Inputs::Kafka.new(snappy_config)
|
44
|
+
queue = Array.new
|
45
|
+
t = thread_it(kafka_input, queue)
|
46
|
+
t.run
|
47
|
+
wait_for_events(queue, num_events)
|
48
|
+
expect(queue.size).to eq(num_events)
|
76
49
|
end
|
77
50
|
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
end
|
86
|
-
|
87
|
-
it "should show the right topic and group name in decorated kafka section" do
|
88
|
-
kafka_input = LogStash::Inputs::Kafka.new(decorate_config)
|
89
|
-
queue = Queue.new
|
90
|
-
t = thread_it(kafka_input, queue)
|
91
|
-
t.run
|
92
|
-
wait(timeout_seconds).for { queue.length }.to eq(num_events)
|
93
|
-
expect(queue.length).to eq(num_events)
|
94
|
-
event = queue.shift
|
95
|
-
expect(event.get("kafka")["topic"]).to eq("logstash_topic_plain")
|
96
|
-
expect(event.get("kafka")["consumer_group"]).to eq(group_id_3)
|
97
|
-
end
|
51
|
+
it "should consume all messages from lz4 3-partition topic" do
|
52
|
+
kafka_input = LogStash::Inputs::Kafka.new(lz4_config)
|
53
|
+
queue = Array.new
|
54
|
+
t = thread_it(kafka_input, queue)
|
55
|
+
t.run
|
56
|
+
wait_for_events(queue, num_events)
|
57
|
+
expect(queue.size).to eq(num_events)
|
98
58
|
end
|
59
|
+
|
99
60
|
end
|
@@ -16,7 +16,7 @@ class MockConsumer
|
|
16
16
|
raise org.apache.kafka.common.errors.WakeupException.new
|
17
17
|
else
|
18
18
|
10.times.map do
|
19
|
-
org.apache.kafka.clients.consumer.ConsumerRecord.new("
|
19
|
+
org.apache.kafka.clients.consumer.ConsumerRecord.new("test", 0, 0, "key", "value")
|
20
20
|
end
|
21
21
|
end
|
22
22
|
end
|
@@ -30,7 +30,7 @@ class MockConsumer
|
|
30
30
|
end
|
31
31
|
|
32
32
|
describe LogStash::Inputs::Kafka do
|
33
|
-
let(:config) { { 'topics' => ['
|
33
|
+
let(:config) { { 'topics' => ['test'], 'consumer_threads' => 4 } }
|
34
34
|
subject { LogStash::Inputs::Kafka.new(config) }
|
35
35
|
|
36
36
|
it "should register" do
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
metadata
CHANGED
@@ -1,19 +1,19 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-input-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version:
|
4
|
+
version: 4.0.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Elasticsearch
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2016-
|
11
|
+
date: 2016-05-12 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
15
15
|
requirements:
|
16
|
-
- -
|
16
|
+
- - ~>
|
17
17
|
- !ruby/object:Gem::Version
|
18
18
|
version: 0.3.2
|
19
19
|
name: jar-dependencies
|
@@ -21,27 +21,27 @@ dependencies:
|
|
21
21
|
type: :development
|
22
22
|
version_requirements: !ruby/object:Gem::Requirement
|
23
23
|
requirements:
|
24
|
-
- -
|
24
|
+
- - ~>
|
25
25
|
- !ruby/object:Gem::Version
|
26
26
|
version: 0.3.2
|
27
27
|
- !ruby/object:Gem::Dependency
|
28
28
|
requirement: !ruby/object:Gem::Requirement
|
29
29
|
requirements:
|
30
|
-
- -
|
30
|
+
- - ~>
|
31
31
|
- !ruby/object:Gem::Version
|
32
|
-
version: '
|
32
|
+
version: '2.0'
|
33
33
|
name: logstash-core-plugin-api
|
34
34
|
prerelease: false
|
35
35
|
type: :runtime
|
36
36
|
version_requirements: !ruby/object:Gem::Requirement
|
37
37
|
requirements:
|
38
|
-
- -
|
38
|
+
- - ~>
|
39
39
|
- !ruby/object:Gem::Version
|
40
|
-
version: '
|
40
|
+
version: '2.0'
|
41
41
|
- !ruby/object:Gem::Dependency
|
42
42
|
requirement: !ruby/object:Gem::Requirement
|
43
43
|
requirements:
|
44
|
-
- -
|
44
|
+
- - '>='
|
45
45
|
- !ruby/object:Gem::Version
|
46
46
|
version: '0'
|
47
47
|
name: logstash-codec-json
|
@@ -49,13 +49,13 @@ dependencies:
|
|
49
49
|
type: :runtime
|
50
50
|
version_requirements: !ruby/object:Gem::Requirement
|
51
51
|
requirements:
|
52
|
-
- -
|
52
|
+
- - '>='
|
53
53
|
- !ruby/object:Gem::Version
|
54
54
|
version: '0'
|
55
55
|
- !ruby/object:Gem::Dependency
|
56
56
|
requirement: !ruby/object:Gem::Requirement
|
57
57
|
requirements:
|
58
|
-
- -
|
58
|
+
- - '>='
|
59
59
|
- !ruby/object:Gem::Version
|
60
60
|
version: '0'
|
61
61
|
name: logstash-codec-plain
|
@@ -63,16 +63,16 @@ dependencies:
|
|
63
63
|
type: :runtime
|
64
64
|
version_requirements: !ruby/object:Gem::Requirement
|
65
65
|
requirements:
|
66
|
-
- -
|
66
|
+
- - '>='
|
67
67
|
- !ruby/object:Gem::Version
|
68
68
|
version: '0'
|
69
69
|
- !ruby/object:Gem::Dependency
|
70
70
|
requirement: !ruby/object:Gem::Requirement
|
71
71
|
requirements:
|
72
|
-
- -
|
72
|
+
- - '>='
|
73
73
|
- !ruby/object:Gem::Version
|
74
74
|
version: 0.0.22
|
75
|
-
- -
|
75
|
+
- - <
|
76
76
|
- !ruby/object:Gem::Version
|
77
77
|
version: 0.1.0
|
78
78
|
name: stud
|
@@ -80,16 +80,16 @@ dependencies:
|
|
80
80
|
type: :runtime
|
81
81
|
version_requirements: !ruby/object:Gem::Requirement
|
82
82
|
requirements:
|
83
|
-
- -
|
83
|
+
- - '>='
|
84
84
|
- !ruby/object:Gem::Version
|
85
85
|
version: 0.0.22
|
86
|
-
- -
|
86
|
+
- - <
|
87
87
|
- !ruby/object:Gem::Version
|
88
88
|
version: 0.1.0
|
89
89
|
- !ruby/object:Gem::Dependency
|
90
90
|
requirement: !ruby/object:Gem::Requirement
|
91
91
|
requirements:
|
92
|
-
- -
|
92
|
+
- - '>='
|
93
93
|
- !ruby/object:Gem::Version
|
94
94
|
version: '0'
|
95
95
|
name: logstash-devutils
|
@@ -97,10 +97,10 @@ dependencies:
|
|
97
97
|
type: :development
|
98
98
|
version_requirements: !ruby/object:Gem::Requirement
|
99
99
|
requirements:
|
100
|
-
- -
|
100
|
+
- - '>='
|
101
101
|
- !ruby/object:Gem::Version
|
102
102
|
version: '0'
|
103
|
-
description: This gem is a
|
103
|
+
description: This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program
|
104
104
|
email: info@elastic.co
|
105
105
|
executables: []
|
106
106
|
extensions: []
|
@@ -118,6 +118,12 @@ files:
|
|
118
118
|
- logstash-input-kafka.gemspec
|
119
119
|
- spec/integration/inputs/kafka_spec.rb
|
120
120
|
- spec/unit/inputs/kafka_spec.rb
|
121
|
+
- vendor/jar-dependencies/runtime-jars/kafka-clients-0.9.0.1.jar
|
122
|
+
- vendor/jar-dependencies/runtime-jars/log4j-1.2.17.jar
|
123
|
+
- vendor/jar-dependencies/runtime-jars/lz4-1.2.0.jar
|
124
|
+
- vendor/jar-dependencies/runtime-jars/slf4j-api-1.7.6.jar
|
125
|
+
- vendor/jar-dependencies/runtime-jars/slf4j-log4j12-1.7.13.jar
|
126
|
+
- vendor/jar-dependencies/runtime-jars/snappy-java-1.1.1.7.jar
|
121
127
|
homepage: http://www.elastic.co/guide/en/logstash/current/index.html
|
122
128
|
licenses:
|
123
129
|
- Apache License (2.0)
|
@@ -130,19 +136,19 @@ require_paths:
|
|
130
136
|
- lib
|
131
137
|
required_ruby_version: !ruby/object:Gem::Requirement
|
132
138
|
requirements:
|
133
|
-
- -
|
139
|
+
- - '>='
|
134
140
|
- !ruby/object:Gem::Version
|
135
141
|
version: '0'
|
136
142
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
137
143
|
requirements:
|
138
|
-
- -
|
144
|
+
- - '>='
|
139
145
|
- !ruby/object:Gem::Version
|
140
146
|
version: '0'
|
141
147
|
requirements:
|
142
148
|
- jar 'org.apache.kafka:kafka-clients', '0.9.0.1'
|
143
149
|
- jar 'org.slf4j:slf4j-log4j12', '1.7.13'
|
144
150
|
rubyforge_project:
|
145
|
-
rubygems_version: 2.4.
|
151
|
+
rubygems_version: 2.4.5
|
146
152
|
signing_key:
|
147
153
|
specification_version: 4
|
148
154
|
summary: This input will read events from a Kafka topic. It uses the high level consumer API provided by Kafka to read messages from the broker
|