logstash-integration-kafka 10.1.0-java → 10.5.1-java
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +20 -0
- data/CONTRIBUTORS +1 -0
- data/docs/index.asciidoc +7 -2
- data/docs/input-kafka.asciidoc +124 -81
- data/docs/output-kafka.asciidoc +69 -27
- data/lib/logstash/inputs/kafka.rb +61 -51
- data/lib/logstash/outputs/kafka.rb +48 -31
- data/logstash-integration-kafka.gemspec +1 -1
- data/spec/unit/inputs/kafka_spec.rb +50 -0
- data/spec/unit/outputs/kafka_spec.rb +40 -8
- metadata +2 -2
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-integration-kafka'
|
3
|
-
s.version = '10.1
|
3
|
+
s.version = '10.5.1'
|
4
4
|
s.licenses = ['Apache-2.0']
|
5
5
|
s.summary = "Integration with Kafka - input and output plugins"
|
6
6
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline "+
|
@@ -48,4 +48,54 @@ describe LogStash::Inputs::Kafka do
|
|
48
48
|
expect( subject.send(:create_consumer, 'sample_client-0') ).to be kafka_client
|
49
49
|
end
|
50
50
|
end
|
51
|
+
|
52
|
+
context 'string integer config' do
|
53
|
+
let(:config) { super.merge('session_timeout_ms' => '25000', 'max_poll_interval_ms' => '345000') }
|
54
|
+
|
55
|
+
it "sets integer values" do
|
56
|
+
expect(org.apache.kafka.clients.consumer.KafkaConsumer).
|
57
|
+
to receive(:new).with(hash_including('session.timeout.ms' => '25000', 'max.poll.interval.ms' => '345000')).
|
58
|
+
and_return kafka_client = double('kafka-consumer')
|
59
|
+
|
60
|
+
expect( subject.send(:create_consumer, 'sample_client-1') ).to be kafka_client
|
61
|
+
end
|
62
|
+
end
|
63
|
+
|
64
|
+
context 'integer config' do
|
65
|
+
let(:config) { super.merge('session_timeout_ms' => 25200, 'max_poll_interval_ms' => 123_000) }
|
66
|
+
|
67
|
+
it "sets integer values" do
|
68
|
+
expect(org.apache.kafka.clients.consumer.KafkaConsumer).
|
69
|
+
to receive(:new).with(hash_including('session.timeout.ms' => '25200', 'max.poll.interval.ms' => '123000')).
|
70
|
+
and_return kafka_client = double('kafka-consumer')
|
71
|
+
|
72
|
+
expect( subject.send(:create_consumer, 'sample_client-2') ).to be kafka_client
|
73
|
+
end
|
74
|
+
end
|
75
|
+
|
76
|
+
context 'string boolean config' do
|
77
|
+
let(:config) { super.merge('enable_auto_commit' => 'false', 'check_crcs' => 'true') }
|
78
|
+
|
79
|
+
it "sets parameters" do
|
80
|
+
expect(org.apache.kafka.clients.consumer.KafkaConsumer).
|
81
|
+
to receive(:new).with(hash_including('enable.auto.commit' => 'false', 'check.crcs' => 'true')).
|
82
|
+
and_return kafka_client = double('kafka-consumer')
|
83
|
+
|
84
|
+
expect( subject.send(:create_consumer, 'sample_client-3') ).to be kafka_client
|
85
|
+
expect( subject.enable_auto_commit ).to be false
|
86
|
+
end
|
87
|
+
end
|
88
|
+
|
89
|
+
context 'boolean config' do
|
90
|
+
let(:config) { super.merge('enable_auto_commit' => true, 'check_crcs' => false) }
|
91
|
+
|
92
|
+
it "sets parameters" do
|
93
|
+
expect(org.apache.kafka.clients.consumer.KafkaConsumer).
|
94
|
+
to receive(:new).with(hash_including('enable.auto.commit' => 'true', 'check.crcs' => 'false')).
|
95
|
+
and_return kafka_client = double('kafka-consumer')
|
96
|
+
|
97
|
+
expect( subject.send(:create_consumer, 'sample_client-4') ).to be kafka_client
|
98
|
+
expect( subject.enable_auto_commit ).to be true
|
99
|
+
end
|
100
|
+
end
|
51
101
|
end
|
@@ -56,14 +56,15 @@ describe "outputs/kafka" do
|
|
56
56
|
end
|
57
57
|
end
|
58
58
|
|
59
|
-
context "when KafkaProducer#send() raises
|
59
|
+
context "when KafkaProducer#send() raises a retriable exception" do
|
60
60
|
let(:failcount) { (rand * 10).to_i }
|
61
61
|
let(:sendcount) { failcount + 1 }
|
62
62
|
|
63
63
|
let(:exception_classes) { [
|
64
64
|
org.apache.kafka.common.errors.TimeoutException,
|
65
|
+
org.apache.kafka.common.errors.DisconnectException,
|
66
|
+
org.apache.kafka.common.errors.CoordinatorNotAvailableException,
|
65
67
|
org.apache.kafka.common.errors.InterruptException,
|
66
|
-
org.apache.kafka.common.errors.SerializationException
|
67
68
|
] }
|
68
69
|
|
69
70
|
before do
|
@@ -88,6 +89,37 @@ describe "outputs/kafka" do
|
|
88
89
|
end
|
89
90
|
end
|
90
91
|
|
92
|
+
context "when KafkaProducer#send() raises a non-retriable exception" do
|
93
|
+
let(:failcount) { (rand * 10).to_i }
|
94
|
+
|
95
|
+
let(:exception_classes) { [
|
96
|
+
org.apache.kafka.common.errors.SerializationException,
|
97
|
+
org.apache.kafka.common.errors.RecordTooLargeException,
|
98
|
+
org.apache.kafka.common.errors.InvalidTopicException
|
99
|
+
] }
|
100
|
+
|
101
|
+
before do
|
102
|
+
count = 0
|
103
|
+
expect_any_instance_of(org.apache.kafka.clients.producer.KafkaProducer).to receive(:send)
|
104
|
+
.exactly(1).times
|
105
|
+
.and_wrap_original do |m, *args|
|
106
|
+
if count < failcount # fail 'failcount' times in a row.
|
107
|
+
count += 1
|
108
|
+
# Pick an exception at random
|
109
|
+
raise exception_classes.shuffle.first.new("injected exception for testing")
|
110
|
+
else
|
111
|
+
m.call(*args) # call original
|
112
|
+
end
|
113
|
+
end
|
114
|
+
end
|
115
|
+
|
116
|
+
it "should not retry" do
|
117
|
+
kafka = LogStash::Outputs::Kafka.new(simple_kafka_config)
|
118
|
+
kafka.register
|
119
|
+
kafka.multi_receive([event])
|
120
|
+
end
|
121
|
+
end
|
122
|
+
|
91
123
|
context "when a send fails" do
|
92
124
|
context "and the default retries behavior is used" do
|
93
125
|
# Fail this many times and then finally succeed.
|
@@ -107,7 +139,7 @@ describe "outputs/kafka" do
|
|
107
139
|
# inject some failures.
|
108
140
|
|
109
141
|
# Return a custom Future that will raise an exception to simulate a Kafka send() problem.
|
110
|
-
future = java.util.concurrent.FutureTask.new { raise "Failed" }
|
142
|
+
future = java.util.concurrent.FutureTask.new { raise org.apache.kafka.common.errors.TimeoutException.new("Failed") }
|
111
143
|
future.run
|
112
144
|
future
|
113
145
|
else
|
@@ -129,7 +161,7 @@ describe "outputs/kafka" do
|
|
129
161
|
.once
|
130
162
|
.and_wrap_original do |m, *args|
|
131
163
|
# Always fail.
|
132
|
-
future = java.util.concurrent.FutureTask.new { raise "Failed" }
|
164
|
+
future = java.util.concurrent.FutureTask.new { raise org.apache.kafka.common.errors.TimeoutException.new("Failed") }
|
133
165
|
future.run
|
134
166
|
future
|
135
167
|
end
|
@@ -143,7 +175,7 @@ describe "outputs/kafka" do
|
|
143
175
|
.once
|
144
176
|
.and_wrap_original do |m, *args|
|
145
177
|
# Always fail.
|
146
|
-
future = java.util.concurrent.FutureTask.new { raise "Failed" }
|
178
|
+
future = java.util.concurrent.FutureTask.new { raise org.apache.kafka.common.errors.TimeoutException.new("Failed") }
|
147
179
|
future.run
|
148
180
|
future
|
149
181
|
end
|
@@ -164,7 +196,7 @@ describe "outputs/kafka" do
|
|
164
196
|
.at_most(max_sends).times
|
165
197
|
.and_wrap_original do |m, *args|
|
166
198
|
# Always fail.
|
167
|
-
future = java.util.concurrent.FutureTask.new { raise "Failed" }
|
199
|
+
future = java.util.concurrent.FutureTask.new { raise org.apache.kafka.common.errors.TimeoutException.new("Failed") }
|
168
200
|
future.run
|
169
201
|
future
|
170
202
|
end
|
@@ -175,10 +207,10 @@ describe "outputs/kafka" do
|
|
175
207
|
|
176
208
|
it 'should only sleep retries number of times' do
|
177
209
|
expect_any_instance_of(org.apache.kafka.clients.producer.KafkaProducer).to receive(:send)
|
178
|
-
.at_most(max_sends)
|
210
|
+
.at_most(max_sends).times
|
179
211
|
.and_wrap_original do |m, *args|
|
180
212
|
# Always fail.
|
181
|
-
future = java.util.concurrent.FutureTask.new { raise "Failed" }
|
213
|
+
future = java.util.concurrent.FutureTask.new { raise org.apache.kafka.common.errors.TimeoutException.new("Failed") }
|
182
214
|
future.run
|
183
215
|
future
|
184
216
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-integration-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 10.1
|
4
|
+
version: 10.5.1
|
5
5
|
platform: java
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2020-
|
11
|
+
date: 2020-08-20 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|