logstash-integration-kafka 10.0.0-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,194 @@
1
+ # encoding: utf-8
2
+
3
+ require "logstash/devutils/rspec/spec_helper"
4
+ require 'logstash/outputs/kafka'
5
+ require 'json'
6
+ require 'poseidon'
7
+
8
+ describe "outputs/kafka", :integration => true do
9
+ let(:kafka_host) { 'localhost' }
10
+ let(:kafka_port) { 9092 }
11
+ let(:num_events) { 10 }
12
+ let(:base_config) { {'client_id' => 'kafkaoutputspec'} }
13
+ let(:event) { LogStash::Event.new({'message' => '183.60.215.50 - - [11/Sep/2014:22:00:00 +0000] "GET /scripts/netcat-webserver HTTP/1.1" 200 182 "-" "Mozilla/5.0 (compatible; EasouSpider; +http://www.easou.com/search/spider.html)"', '@timestamp' => LogStash::Timestamp.at(0) }) }
14
+
15
+
16
+ context 'when outputting messages serialized as String' do
17
+ let(:test_topic) { 'logstash_integration_topic1' }
18
+ let(:num_events) { 3 }
19
+ let(:consumer) do
20
+ Poseidon::PartitionConsumer.new("my_test_consumer", kafka_host, kafka_port,
21
+ test_topic, 0, :earliest_offset)
22
+ end
23
+ subject do
24
+ consumer.fetch
25
+ end
26
+
27
+ before :each do
28
+ config = base_config.merge({"topic_id" => test_topic})
29
+ load_kafka_data(config)
30
+ end
31
+
32
+ it 'should have data integrity' do
33
+ expect(subject.size).to eq(num_events)
34
+ subject.each do |m|
35
+ expect(m.value).to eq(event.to_s)
36
+ end
37
+ end
38
+
39
+ end
40
+
41
+ context 'when outputting messages serialized as Byte Array' do
42
+ let(:test_topic) { 'topic1b' }
43
+ let(:num_events) { 3 }
44
+ let(:consumer) do
45
+ Poseidon::PartitionConsumer.new("my_test_consumer", kafka_host, kafka_port,
46
+ test_topic, 0, :earliest_offset)
47
+ end
48
+ subject do
49
+ consumer.fetch
50
+ end
51
+
52
+ before :each do
53
+ config = base_config.merge(
54
+ {
55
+ "topic_id" => test_topic,
56
+ "value_serializer" => 'org.apache.kafka.common.serialization.ByteArraySerializer'
57
+ }
58
+ )
59
+ load_kafka_data(config)
60
+ end
61
+
62
+ it 'should have data integrity' do
63
+ expect(subject.size).to eq(num_events)
64
+ subject.each do |m|
65
+ expect(m.value).to eq(event.to_s)
66
+ end
67
+ end
68
+
69
+ end
70
+
71
+ context 'when setting message_key' do
72
+ let(:num_events) { 10 }
73
+ let(:test_topic) { 'logstash_integration_topic2' }
74
+ let!(:consumer0) do
75
+ Poseidon::PartitionConsumer.new("my_test_consumer", kafka_host, kafka_port,
76
+ test_topic, 0, :earliest_offset)
77
+ end
78
+ let!(:consumer1) do
79
+ Poseidon::PartitionConsumer.new("my_test_consumer", kafka_host, kafka_port,
80
+ test_topic, 1, :earliest_offset)
81
+ end
82
+
83
+ before :each do
84
+ config = base_config.merge({"topic_id" => test_topic, "message_key" => "static_key"})
85
+ load_kafka_data(config)
86
+ end
87
+
88
+ it 'should send all events to one partition' do
89
+ expect(consumer0.fetch.size == num_events || consumer1.fetch.size == num_events).to be true
90
+ end
91
+ end
92
+
93
+ context 'when using gzip compression' do
94
+ let(:test_topic) { 'logstash_integration_gzip_topic' }
95
+ let!(:consumer) do
96
+ Poseidon::PartitionConsumer.new("my_test_consumer", kafka_host, kafka_port,
97
+ test_topic, 0, :earliest_offset)
98
+ end
99
+ subject do
100
+ consumer.fetch
101
+ end
102
+
103
+ before :each do
104
+ config = base_config.merge({"topic_id" => test_topic, "compression_type" => "gzip"})
105
+ load_kafka_data(config)
106
+ end
107
+
108
+ it 'should have data integrity' do
109
+ expect(subject.size).to eq(num_events)
110
+ subject.each do |m|
111
+ expect(m.value).to eq(event.to_s)
112
+ end
113
+ end
114
+ end
115
+
116
+ context 'when using snappy compression' do
117
+ let(:test_topic) { 'logstash_integration_snappy_topic' }
118
+ let!(:consumer) do
119
+ Poseidon::PartitionConsumer.new("my_test_consumer", kafka_host, kafka_port,
120
+ test_topic, 0, :earliest_offset)
121
+ end
122
+ subject do
123
+ consumer.fetch
124
+ end
125
+
126
+ before :each do
127
+ config = base_config.merge({"topic_id" => test_topic, "compression_type" => "snappy"})
128
+ load_kafka_data(config)
129
+ end
130
+
131
+ it 'should have data integrity' do
132
+ expect(subject.size).to eq(num_events)
133
+ subject.each do |m|
134
+ expect(m.value).to eq(event.to_s)
135
+ end
136
+ end
137
+ end
138
+
139
+ context 'when using LZ4 compression' do
140
+ let(:test_topic) { 'logstash_integration_lz4_topic' }
141
+
142
+ before :each do
143
+ config = base_config.merge({"topic_id" => test_topic, "compression_type" => "lz4"})
144
+ load_kafka_data(config)
145
+ end
146
+ end
147
+
148
+ context 'when using multi partition topic' do
149
+ let(:num_events) { 10 }
150
+ let(:test_topic) { 'logstash_integration_topic3' }
151
+ let!(:consumer0) do
152
+ Poseidon::PartitionConsumer.new("my_test_consumer", kafka_host, kafka_port,
153
+ test_topic, 0, :earliest_offset)
154
+ end
155
+ let!(:consumer1) do
156
+ Poseidon::PartitionConsumer.new("my_test_consumer", kafka_host, kafka_port,
157
+ test_topic, 1, :earliest_offset)
158
+ end
159
+
160
+ let!(:consumer2) do
161
+ Poseidon::PartitionConsumer.new("my_test_consumer", kafka_host, kafka_port,
162
+ test_topic, 2, :earliest_offset)
163
+ end
164
+
165
+ before :each do
166
+ config = base_config.merge({"topic_id" => test_topic})
167
+ load_kafka_data(config)
168
+ end
169
+
170
+ it 'should distribute events to all partition' do
171
+ consumer0_records = consumer0.fetch
172
+ consumer1_records = consumer1.fetch
173
+ consumer2_records = consumer2.fetch
174
+
175
+ expect(consumer0_records.size > 1 &&
176
+ consumer1_records.size > 1 &&
177
+ consumer2_records.size > 1).to be true
178
+
179
+ all_records = consumer0_records + consumer1_records + consumer2_records
180
+ expect(all_records.size).to eq(num_events)
181
+ all_records.each do |m|
182
+ expect(m.value).to eq(event.to_s)
183
+ end
184
+ end
185
+ end
186
+
187
+ def load_kafka_data(config)
188
+ kafka = LogStash::Outputs::Kafka.new(config)
189
+ kafka.register
190
+ kafka.multi_receive(num_events.times.collect { event })
191
+ kafka.close
192
+ end
193
+
194
+ end
@@ -0,0 +1,39 @@
1
+ # encoding: utf-8
2
+ require "logstash/devutils/rspec/spec_helper"
3
+ require "logstash/inputs/kafka"
4
+ require "concurrent"
5
+
6
+ class MockConsumer
7
+ def initialize
8
+ @wake = Concurrent::AtomicBoolean.new(false)
9
+ end
10
+
11
+ def subscribe(topics)
12
+ end
13
+
14
+ def poll(ms)
15
+ if @wake.value
16
+ raise org.apache.kafka.common.errors.WakeupException.new
17
+ else
18
+ 10.times.map do
19
+ org.apache.kafka.clients.consumer.ConsumerRecord.new("logstash", 0, 0, "key", "value")
20
+ end
21
+ end
22
+ end
23
+
24
+ def close
25
+ end
26
+
27
+ def wakeup
28
+ @wake.make_true
29
+ end
30
+ end
31
+
32
+ describe LogStash::Inputs::Kafka do
33
+ let(:config) { { 'topics' => ['logstash'], 'consumer_threads' => 4 } }
34
+ subject { LogStash::Inputs::Kafka.new(config) }
35
+
36
+ it "should register" do
37
+ expect {subject.register}.to_not raise_error
38
+ end
39
+ end
@@ -0,0 +1,192 @@
1
+ # encoding: utf-8
2
+ require "logstash/devutils/rspec/spec_helper"
3
+ require 'logstash/outputs/kafka'
4
+ require 'json'
5
+
6
+ describe "outputs/kafka" do
7
+ let (:simple_kafka_config) {{'topic_id' => 'test'}}
8
+ let (:event) { LogStash::Event.new({'message' => 'hello', 'topic_name' => 'my_topic', 'host' => '172.0.0.1',
9
+ '@timestamp' => LogStash::Timestamp.now}) }
10
+
11
+ context 'when initializing' do
12
+ it "should register" do
13
+ output = LogStash::Plugin.lookup("output", "kafka").new(simple_kafka_config)
14
+ expect {output.register}.to_not raise_error
15
+ end
16
+
17
+ it 'should populate kafka config with default values' do
18
+ kafka = LogStash::Outputs::Kafka.new(simple_kafka_config)
19
+ insist {kafka.bootstrap_servers} == 'localhost:9092'
20
+ insist {kafka.topic_id} == 'test'
21
+ insist {kafka.key_serializer} == 'org.apache.kafka.common.serialization.StringSerializer'
22
+ end
23
+ end
24
+
25
+ context 'when outputting messages' do
26
+ it 'should send logstash event to kafka broker' do
27
+ expect_any_instance_of(org.apache.kafka.clients.producer.KafkaProducer).to receive(:send)
28
+ .with(an_instance_of(org.apache.kafka.clients.producer.ProducerRecord)).and_call_original
29
+ kafka = LogStash::Outputs::Kafka.new(simple_kafka_config)
30
+ kafka.register
31
+ kafka.multi_receive([event])
32
+ end
33
+
34
+ it 'should support Event#sprintf placeholders in topic_id' do
35
+ topic_field = 'topic_name'
36
+ expect(org.apache.kafka.clients.producer.ProducerRecord).to receive(:new)
37
+ .with("my_topic", event.to_s).and_call_original
38
+ expect_any_instance_of(org.apache.kafka.clients.producer.KafkaProducer).to receive(:send).and_call_original
39
+ kafka = LogStash::Outputs::Kafka.new({'topic_id' => "%{#{topic_field}}"})
40
+ kafka.register
41
+ kafka.multi_receive([event])
42
+ end
43
+
44
+ it 'should support field referenced message_keys' do
45
+ expect(org.apache.kafka.clients.producer.ProducerRecord).to receive(:new)
46
+ .with("test", "172.0.0.1", event.to_s).and_call_original
47
+ expect_any_instance_of(org.apache.kafka.clients.producer.KafkaProducer).to receive(:send).and_call_original
48
+ kafka = LogStash::Outputs::Kafka.new(simple_kafka_config.merge({"message_key" => "%{host}"}))
49
+ kafka.register
50
+ kafka.multi_receive([event])
51
+ end
52
+
53
+ it 'should raise config error when truststore location is not set and ssl is enabled' do
54
+ kafka = LogStash::Outputs::Kafka.new(simple_kafka_config.merge("security_protocol" => "SSL"))
55
+ expect { kafka.register }.to raise_error(LogStash::ConfigurationError, /ssl_truststore_location must be set when SSL is enabled/)
56
+ end
57
+ end
58
+
59
+ context "when KafkaProducer#send() raises an exception" do
60
+ let(:failcount) { (rand * 10).to_i }
61
+ let(:sendcount) { failcount + 1 }
62
+
63
+ let(:exception_classes) { [
64
+ org.apache.kafka.common.errors.TimeoutException,
65
+ org.apache.kafka.common.errors.InterruptException,
66
+ org.apache.kafka.common.errors.SerializationException
67
+ ] }
68
+
69
+ before do
70
+ count = 0
71
+ expect_any_instance_of(org.apache.kafka.clients.producer.KafkaProducer).to receive(:send)
72
+ .exactly(sendcount).times
73
+ .and_wrap_original do |m, *args|
74
+ if count < failcount # fail 'failcount' times in a row.
75
+ count += 1
76
+ # Pick an exception at random
77
+ raise exception_classes.shuffle.first.new("injected exception for testing")
78
+ else
79
+ m.call(*args) # call original
80
+ end
81
+ end
82
+ end
83
+
84
+ it "should retry until successful" do
85
+ kafka = LogStash::Outputs::Kafka.new(simple_kafka_config)
86
+ kafka.register
87
+ kafka.multi_receive([event])
88
+ end
89
+ end
90
+
91
+ context "when a send fails" do
92
+ context "and the default retries behavior is used" do
93
+ # Fail this many times and then finally succeed.
94
+ let(:failcount) { (rand * 10).to_i }
95
+
96
+ # Expect KafkaProducer.send() to get called again after every failure, plus the successful one.
97
+ let(:sendcount) { failcount + 1 }
98
+
99
+ it "should retry until successful" do
100
+ count = 0;
101
+
102
+ expect_any_instance_of(org.apache.kafka.clients.producer.KafkaProducer).to receive(:send)
103
+ .exactly(sendcount).times
104
+ .and_wrap_original do |m, *args|
105
+ if count < failcount
106
+ count += 1
107
+ # inject some failures.
108
+
109
+ # Return a custom Future that will raise an exception to simulate a Kafka send() problem.
110
+ future = java.util.concurrent.FutureTask.new { raise "Failed" }
111
+ future.run
112
+ future
113
+ else
114
+ m.call(*args)
115
+ end
116
+ end
117
+ kafka = LogStash::Outputs::Kafka.new(simple_kafka_config)
118
+ kafka.register
119
+ kafka.multi_receive([event])
120
+ end
121
+ end
122
+
123
+ context 'when retries is 0' do
124
+ let(:retries) { 0 }
125
+ let(:max_sends) { 1 }
126
+
127
+ it "should should only send once" do
128
+ expect_any_instance_of(org.apache.kafka.clients.producer.KafkaProducer).to receive(:send)
129
+ .once
130
+ .and_wrap_original do |m, *args|
131
+ # Always fail.
132
+ future = java.util.concurrent.FutureTask.new { raise "Failed" }
133
+ future.run
134
+ future
135
+ end
136
+ kafka = LogStash::Outputs::Kafka.new(simple_kafka_config.merge("retries" => retries))
137
+ kafka.register
138
+ kafka.multi_receive([event])
139
+ end
140
+
141
+ it 'should not sleep' do
142
+ expect_any_instance_of(org.apache.kafka.clients.producer.KafkaProducer).to receive(:send)
143
+ .once
144
+ .and_wrap_original do |m, *args|
145
+ # Always fail.
146
+ future = java.util.concurrent.FutureTask.new { raise "Failed" }
147
+ future.run
148
+ future
149
+ end
150
+
151
+ kafka = LogStash::Outputs::Kafka.new(simple_kafka_config.merge("retries" => retries))
152
+ expect(kafka).not_to receive(:sleep).with(anything)
153
+ kafka.register
154
+ kafka.multi_receive([event])
155
+ end
156
+ end
157
+
158
+ context "and when retries is set by the user" do
159
+ let(:retries) { (rand * 10).to_i }
160
+ let(:max_sends) { retries + 1 }
161
+
162
+ it "should give up after retries are exhausted" do
163
+ expect_any_instance_of(org.apache.kafka.clients.producer.KafkaProducer).to receive(:send)
164
+ .at_most(max_sends).times
165
+ .and_wrap_original do |m, *args|
166
+ # Always fail.
167
+ future = java.util.concurrent.FutureTask.new { raise "Failed" }
168
+ future.run
169
+ future
170
+ end
171
+ kafka = LogStash::Outputs::Kafka.new(simple_kafka_config.merge("retries" => retries))
172
+ kafka.register
173
+ kafka.multi_receive([event])
174
+ end
175
+
176
+ it 'should only sleep retries number of times' do
177
+ expect_any_instance_of(org.apache.kafka.clients.producer.KafkaProducer).to receive(:send)
178
+ .at_most(max_sends)
179
+ .and_wrap_original do |m, *args|
180
+ # Always fail.
181
+ future = java.util.concurrent.FutureTask.new { raise "Failed" }
182
+ future.run
183
+ future
184
+ end
185
+ kafka = LogStash::Outputs::Kafka.new(simple_kafka_config.merge("retries" => retries))
186
+ expect(kafka).to receive(:sleep).exactly(retries).times
187
+ kafka.register
188
+ kafka.multi_receive([event])
189
+ end
190
+ end
191
+ end
192
+ end
metadata ADDED
@@ -0,0 +1,228 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: logstash-integration-kafka
3
+ version: !ruby/object:Gem::Version
4
+ version: 10.0.0
5
+ platform: java
6
+ authors:
7
+ - Elastic
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2019-10-15 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ requirement: !ruby/object:Gem::Requirement
15
+ requirements:
16
+ - - "~>"
17
+ - !ruby/object:Gem::Version
18
+ version: 0.3.12
19
+ name: jar-dependencies
20
+ prerelease: false
21
+ type: :development
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - "~>"
25
+ - !ruby/object:Gem::Version
26
+ version: 0.3.12
27
+ - !ruby/object:Gem::Dependency
28
+ requirement: !ruby/object:Gem::Requirement
29
+ requirements:
30
+ - - ">="
31
+ - !ruby/object:Gem::Version
32
+ version: '1.60'
33
+ - - "<="
34
+ - !ruby/object:Gem::Version
35
+ version: '2.99'
36
+ name: logstash-core-plugin-api
37
+ prerelease: false
38
+ type: :runtime
39
+ version_requirements: !ruby/object:Gem::Requirement
40
+ requirements:
41
+ - - ">="
42
+ - !ruby/object:Gem::Version
43
+ version: '1.60'
44
+ - - "<="
45
+ - !ruby/object:Gem::Version
46
+ version: '2.99'
47
+ - !ruby/object:Gem::Dependency
48
+ requirement: !ruby/object:Gem::Requirement
49
+ requirements:
50
+ - - ">="
51
+ - !ruby/object:Gem::Version
52
+ version: 6.5.0
53
+ name: logstash-core
54
+ prerelease: false
55
+ type: :runtime
56
+ version_requirements: !ruby/object:Gem::Requirement
57
+ requirements:
58
+ - - ">="
59
+ - !ruby/object:Gem::Version
60
+ version: 6.5.0
61
+ - !ruby/object:Gem::Dependency
62
+ requirement: !ruby/object:Gem::Requirement
63
+ requirements:
64
+ - - ">="
65
+ - !ruby/object:Gem::Version
66
+ version: '0'
67
+ name: logstash-codec-json
68
+ prerelease: false
69
+ type: :runtime
70
+ version_requirements: !ruby/object:Gem::Requirement
71
+ requirements:
72
+ - - ">="
73
+ - !ruby/object:Gem::Version
74
+ version: '0'
75
+ - !ruby/object:Gem::Dependency
76
+ requirement: !ruby/object:Gem::Requirement
77
+ requirements:
78
+ - - ">="
79
+ - !ruby/object:Gem::Version
80
+ version: '0'
81
+ name: logstash-codec-plain
82
+ prerelease: false
83
+ type: :runtime
84
+ version_requirements: !ruby/object:Gem::Requirement
85
+ requirements:
86
+ - - ">="
87
+ - !ruby/object:Gem::Version
88
+ version: '0'
89
+ - !ruby/object:Gem::Dependency
90
+ requirement: !ruby/object:Gem::Requirement
91
+ requirements:
92
+ - - ">="
93
+ - !ruby/object:Gem::Version
94
+ version: 0.0.22
95
+ - - "<"
96
+ - !ruby/object:Gem::Version
97
+ version: 0.1.0
98
+ name: stud
99
+ prerelease: false
100
+ type: :runtime
101
+ version_requirements: !ruby/object:Gem::Requirement
102
+ requirements:
103
+ - - ">="
104
+ - !ruby/object:Gem::Version
105
+ version: 0.0.22
106
+ - - "<"
107
+ - !ruby/object:Gem::Version
108
+ version: 0.1.0
109
+ - !ruby/object:Gem::Dependency
110
+ requirement: !ruby/object:Gem::Requirement
111
+ requirements:
112
+ - - ">="
113
+ - !ruby/object:Gem::Version
114
+ version: '0'
115
+ name: logstash-devutils
116
+ prerelease: false
117
+ type: :development
118
+ version_requirements: !ruby/object:Gem::Requirement
119
+ requirements:
120
+ - - ">="
121
+ - !ruby/object:Gem::Version
122
+ version: '0'
123
+ - !ruby/object:Gem::Dependency
124
+ requirement: !ruby/object:Gem::Requirement
125
+ requirements:
126
+ - - ">="
127
+ - !ruby/object:Gem::Version
128
+ version: '0'
129
+ name: rspec-wait
130
+ prerelease: false
131
+ type: :development
132
+ version_requirements: !ruby/object:Gem::Requirement
133
+ requirements:
134
+ - - ">="
135
+ - !ruby/object:Gem::Version
136
+ version: '0'
137
+ - !ruby/object:Gem::Dependency
138
+ requirement: !ruby/object:Gem::Requirement
139
+ requirements:
140
+ - - ">="
141
+ - !ruby/object:Gem::Version
142
+ version: '0'
143
+ name: poseidon
144
+ prerelease: false
145
+ type: :development
146
+ version_requirements: !ruby/object:Gem::Requirement
147
+ requirements:
148
+ - - ">="
149
+ - !ruby/object:Gem::Version
150
+ version: '0'
151
+ - !ruby/object:Gem::Dependency
152
+ requirement: !ruby/object:Gem::Requirement
153
+ requirements:
154
+ - - ">="
155
+ - !ruby/object:Gem::Version
156
+ version: '0'
157
+ name: snappy
158
+ prerelease: false
159
+ type: :development
160
+ version_requirements: !ruby/object:Gem::Requirement
161
+ requirements:
162
+ - - ">="
163
+ - !ruby/object:Gem::Version
164
+ version: '0'
165
+ description: This gem is a Logstash plugin required to be installed on top of the
166
+ Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This
167
+ gem is not a stand-alone program.
168
+ email: info@elastic.co
169
+ executables: []
170
+ extensions: []
171
+ extra_rdoc_files: []
172
+ files:
173
+ - CHANGELOG.md
174
+ - CONTRIBUTORS
175
+ - DEVELOPER.md
176
+ - Gemfile
177
+ - LICENSE
178
+ - NOTICE.TXT
179
+ - README.md
180
+ - docs/index.asciidoc
181
+ - docs/input-kafka.asciidoc
182
+ - docs/output-kafka.asciidoc
183
+ - lib/logstash-integration-kafka_jars.rb
184
+ - lib/logstash/inputs/kafka.rb
185
+ - lib/logstash/outputs/kafka.rb
186
+ - logstash-integration-kafka.gemspec
187
+ - spec/integration/inputs/kafka_spec.rb
188
+ - spec/integration/outputs/kafka_spec.rb
189
+ - spec/unit/inputs/kafka_spec.rb
190
+ - spec/unit/outputs/kafka_spec.rb
191
+ - vendor/jar-dependencies/com/github/luben/zstd-jni/1.4.2-1/zstd-jni-1.4.2-1.jar
192
+ - vendor/jar-dependencies/org/apache/kafka/kafka-clients/2.3.0/kafka-clients-2.3.0.jar
193
+ - vendor/jar-dependencies/org/lz4/lz4-java/1.6.0/lz4-java-1.6.0.jar
194
+ - vendor/jar-dependencies/org/slf4j/slf4j-api/1.7.26/slf4j-api-1.7.26.jar
195
+ - vendor/jar-dependencies/org/xerial/snappy/snappy-java/1.1.7.3/snappy-java-1.1.7.3.jar
196
+ homepage: http://www.elastic.co/guide/en/logstash/current/index.html
197
+ licenses:
198
+ - Apache-2.0
199
+ metadata:
200
+ logstash_plugin: 'true'
201
+ logstash_group: integration
202
+ integration_plugins: logstash-input-kafka,logstash-output-kafka
203
+ post_install_message:
204
+ rdoc_options: []
205
+ require_paths:
206
+ - lib
207
+ - vendor/jar-dependencies
208
+ required_ruby_version: !ruby/object:Gem::Requirement
209
+ requirements:
210
+ - - ">="
211
+ - !ruby/object:Gem::Version
212
+ version: '0'
213
+ required_rubygems_version: !ruby/object:Gem::Requirement
214
+ requirements:
215
+ - - ">="
216
+ - !ruby/object:Gem::Version
217
+ version: '0'
218
+ requirements: []
219
+ rubyforge_project:
220
+ rubygems_version: 2.6.14
221
+ signing_key:
222
+ specification_version: 4
223
+ summary: Integration with Kafka - input and output plugins
224
+ test_files:
225
+ - spec/integration/inputs/kafka_spec.rb
226
+ - spec/integration/outputs/kafka_spec.rb
227
+ - spec/unit/inputs/kafka_spec.rb
228
+ - spec/unit/outputs/kafka_spec.rb