logstash-output-kafka 7.3.1 → 7.3.2
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +3 -0
- data/lib/logstash/outputs/kafka.rb +9 -7
- data/logstash-output-kafka.gemspec +1 -1
- data/spec/unit/outputs/kafka_spec.rb +51 -1
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: a10de2fd3b875b0349bfd81afdc25808cc90c90599868f176e73e3758614821d
|
4
|
+
data.tar.gz: b3682d3204b4b5304a78b6a64e2f2300f8890ef80805c82cd507999f94514f8d
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: e1555e10e79b349cab59d256c353718bd415a58ebcf2c698437da45fd14a92c07cff21e1122cf349e768d3d6883f74532a363ee19d0d27a094dd3e73b2a62275
|
7
|
+
data.tar.gz: 2163e214ab7306f8fd92176e0e72b6d7d792c79831fcd1eb64a305bd1c7d2d5e28cf0f3b1e2d0d24a8e774b86b2c7be43bc32eeb60aee86903ee7648e1b936ac
|
data/CHANGELOG.md
CHANGED
@@ -1,3 +1,6 @@
|
|
1
|
+
## 7.3.2
|
2
|
+
- Fixed issue with unnecessary sleep after retries exhausted [#216](https://github.com/logstash-plugins/logstash-output-kafka/pull/216)
|
3
|
+
|
1
4
|
## 7.3.1
|
2
5
|
- Added support for kafka property `ssl.endpoint.identification.algorithm` [#213](https://github.com/logstash-plugins/logstash-output-kafka/pull/213)
|
3
6
|
|
@@ -231,7 +231,7 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
|
|
231
231
|
end
|
232
232
|
|
233
233
|
def retrying_send(batch)
|
234
|
-
remaining = @retries
|
234
|
+
remaining = @retries
|
235
235
|
|
236
236
|
while batch.any?
|
237
237
|
if !remaining.nil?
|
@@ -281,13 +281,15 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
|
|
281
281
|
break if failures.empty?
|
282
282
|
|
283
283
|
# Otherwise, retry with any failed transmissions
|
284
|
-
|
285
|
-
|
286
|
-
|
287
|
-
|
288
|
-
|
284
|
+
if remaining.nil? || remaining >= 0
|
285
|
+
delay = @retry_backoff_ms / 1000.0
|
286
|
+
logger.info("Sending batch to Kafka failed. Will retry after a delay.", :batch_size => batch.size,
|
287
|
+
:failures => failures.size,
|
288
|
+
:sleep => delay)
|
289
|
+
batch = failures
|
290
|
+
sleep(delay)
|
291
|
+
end
|
289
292
|
end
|
290
|
-
|
291
293
|
end
|
292
294
|
|
293
295
|
def close
|
@@ -1,7 +1,7 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
|
3
3
|
s.name = 'logstash-output-kafka'
|
4
|
-
s.version = '7.3.
|
4
|
+
s.version = '7.3.2'
|
5
5
|
s.licenses = ['Apache-2.0']
|
6
6
|
s.summary = "Writes events to a Kafka topic"
|
7
7
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
@@ -49,7 +49,7 @@ describe "outputs/kafka" do
|
|
49
49
|
kafka.register
|
50
50
|
kafka.multi_receive([event])
|
51
51
|
end
|
52
|
-
|
52
|
+
|
53
53
|
it 'should raise config error when truststore location is not set and ssl is enabled' do
|
54
54
|
kafka = LogStash::Outputs::Kafka.new(simple_kafka_config.merge("security_protocol" => "SSL"))
|
55
55
|
expect { kafka.register }.to raise_error(LogStash::ConfigurationError, /ssl_truststore_location must be set when SSL is enabled/)
|
@@ -120,6 +120,41 @@ describe "outputs/kafka" do
|
|
120
120
|
end
|
121
121
|
end
|
122
122
|
|
123
|
+
context 'when retries is 0' do
|
124
|
+
let(:retries) { 0 }
|
125
|
+
let(:max_sends) { 1 }
|
126
|
+
|
127
|
+
it "should should only send once" do
|
128
|
+
expect_any_instance_of(org.apache.kafka.clients.producer.KafkaProducer).to receive(:send)
|
129
|
+
.once
|
130
|
+
.and_wrap_original do |m, *args|
|
131
|
+
# Always fail.
|
132
|
+
future = java.util.concurrent.FutureTask.new { raise "Failed" }
|
133
|
+
future.run
|
134
|
+
future
|
135
|
+
end
|
136
|
+
kafka = LogStash::Outputs::Kafka.new(simple_kafka_config.merge("retries" => retries))
|
137
|
+
kafka.register
|
138
|
+
kafka.multi_receive([event])
|
139
|
+
end
|
140
|
+
|
141
|
+
it 'should not sleep' do
|
142
|
+
expect_any_instance_of(org.apache.kafka.clients.producer.KafkaProducer).to receive(:send)
|
143
|
+
.once
|
144
|
+
.and_wrap_original do |m, *args|
|
145
|
+
# Always fail.
|
146
|
+
future = java.util.concurrent.FutureTask.new { raise "Failed" }
|
147
|
+
future.run
|
148
|
+
future
|
149
|
+
end
|
150
|
+
|
151
|
+
kafka = LogStash::Outputs::Kafka.new(simple_kafka_config.merge("retries" => retries))
|
152
|
+
expect(kafka).not_to receive(:sleep).with(anything)
|
153
|
+
kafka.register
|
154
|
+
kafka.multi_receive([event])
|
155
|
+
end
|
156
|
+
end
|
157
|
+
|
123
158
|
context "and when retries is set by the user" do
|
124
159
|
let(:retries) { (rand * 10).to_i }
|
125
160
|
let(:max_sends) { retries + 1 }
|
@@ -137,6 +172,21 @@ describe "outputs/kafka" do
|
|
137
172
|
kafka.register
|
138
173
|
kafka.multi_receive([event])
|
139
174
|
end
|
175
|
+
|
176
|
+
it 'should only sleep retries number of times' do
|
177
|
+
expect_any_instance_of(org.apache.kafka.clients.producer.KafkaProducer).to receive(:send)
|
178
|
+
.at_most(max_sends)
|
179
|
+
.and_wrap_original do |m, *args|
|
180
|
+
# Always fail.
|
181
|
+
future = java.util.concurrent.FutureTask.new { raise "Failed" }
|
182
|
+
future.run
|
183
|
+
future
|
184
|
+
end
|
185
|
+
kafka = LogStash::Outputs::Kafka.new(simple_kafka_config.merge("retries" => retries))
|
186
|
+
expect(kafka).to receive(:sleep).exactly(retries).times
|
187
|
+
kafka.register
|
188
|
+
kafka.multi_receive([event])
|
189
|
+
end
|
140
190
|
end
|
141
191
|
end
|
142
192
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 7.3.
|
4
|
+
version: 7.3.2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Elasticsearch
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2019-02-12 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|