logstash-integration-kafka 11.3.3-java → 11.3.4-java
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +3 -0
- data/docs/output-kafka.asciidoc +2 -2
- data/lib/logstash/outputs/kafka.rb +3 -3
- data/logstash-integration-kafka.gemspec +1 -1
- data/spec/unit/outputs/kafka_spec.rb +20 -0
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: fb2e5a937c705523e56611e75be5b76453c42b6d5ed241cf86f990251aa673c7
|
4
|
+
data.tar.gz: 379bad770fdc823d19d676041247949f55f51f2f77fe267c7be5fc9f67141053
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 320e5ee1e26113cceb2692c2ec6852f8c9b41adbcba9609e7e917116040c85196c91a0051fcc6a40213e97092c1f3ea4507930dc6b9f7bbf9501dd1c2caaab35
|
7
|
+
data.tar.gz: ab200d48d0d3713f14ab97095d5b56467e79b6c4c22ef23bb32cf26eb7e80f1497565d4b6577d31373bae472bd2955a8dbb59fcd89674c20c3ab2acbe01842bb
|
data/CHANGELOG.md
CHANGED
@@ -1,3 +1,6 @@
|
|
1
|
+
## 11.3.4
|
2
|
+
- Fix "retries" and "value_serializer" error handling in output plugin (#160) [#160](https://github.com/logstash-plugins/logstash-integration-kafka/pull/160)
|
3
|
+
|
1
4
|
## 11.3.3
|
2
5
|
- Fix "Can't modify frozen string" error when record value is `nil` (tombstones) [#155](https://github.com/logstash-plugins/logstash-integration-kafka/pull/155)
|
3
6
|
|
data/docs/output-kafka.asciidoc
CHANGED
@@ -184,7 +184,7 @@ resolved and expanded into a list of canonical names.
|
|
184
184
|
[NOTE]
|
185
185
|
====
|
186
186
|
Starting from Kafka 3 `default` value for `client.dns.lookup` value has been removed.
|
187
|
-
If explicitly configured it
|
187
|
+
If not explicitly configured it defaults to `use_all_dns_ips`.
|
188
188
|
====
|
189
189
|
|
190
190
|
[id="plugins-{type}s-{plugin}-client_id"]
|
@@ -348,7 +348,7 @@ retries are exhausted.
|
|
348
348
|
* There is no default value for this setting.
|
349
349
|
|
350
350
|
The default retry behavior is to retry until successful. To prevent data loss,
|
351
|
-
|
351
|
+
changing this setting is discouraged.
|
352
352
|
|
353
353
|
If you choose to set `retries`, a value greater than zero will cause the
|
354
354
|
client to only retry a fixed number of times. This will result in data loss
|
@@ -185,7 +185,7 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
|
|
185
185
|
|
186
186
|
if !@retries.nil?
|
187
187
|
if @retries < 0
|
188
|
-
raise ConfigurationError, "A negative retry count (#{@retries}) is not valid. Must be a value >= 0"
|
188
|
+
raise LogStash::ConfigurationError, "A negative retry count (#{@retries}) is not valid. Must be a value >= 0"
|
189
189
|
end
|
190
190
|
|
191
191
|
logger.warn("Kafka output is configured with finite retry. This instructs Logstash to LOSE DATA after a set number of send attempts fails. If you do not want to lose data if Kafka is down, then you must remove the retry setting.", :retries => @retries)
|
@@ -193,7 +193,6 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
|
|
193
193
|
|
194
194
|
reassign_dns_lookup
|
195
195
|
|
196
|
-
@producer = create_producer
|
197
196
|
if value_serializer == 'org.apache.kafka.common.serialization.StringSerializer'
|
198
197
|
@codec.on_event do |event, data|
|
199
198
|
write_to_kafka(event, data)
|
@@ -203,8 +202,9 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
|
|
203
202
|
write_to_kafka(event, data.to_java_bytes)
|
204
203
|
end
|
205
204
|
else
|
206
|
-
raise ConfigurationError, "'value_serializer' only supports org.apache.kafka.common.serialization.ByteArraySerializer and org.apache.kafka.common.serialization.StringSerializer"
|
205
|
+
raise LogStash::ConfigurationError, "'value_serializer' only supports org.apache.kafka.common.serialization.ByteArraySerializer and org.apache.kafka.common.serialization.StringSerializer"
|
207
206
|
end
|
207
|
+
@producer = create_producer
|
208
208
|
end
|
209
209
|
|
210
210
|
def prepare(record)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-integration-kafka'
|
3
|
-
s.version = '11.3.
|
3
|
+
s.version = '11.3.4'
|
4
4
|
s.licenses = ['Apache-2.0']
|
5
5
|
s.summary = "Integration with Kafka - input and output plugins"
|
6
6
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline "+
|
@@ -221,6 +221,26 @@ describe "outputs/kafka" do
|
|
221
221
|
kafka.multi_receive([event])
|
222
222
|
end
|
223
223
|
end
|
224
|
+
context 'when retries is -1' do
|
225
|
+
let(:retries) { -1 }
|
226
|
+
|
227
|
+
it "should raise a Configuration error" do
|
228
|
+
kafka = LogStash::Outputs::Kafka.new(simple_kafka_config.merge("retries" => retries))
|
229
|
+
expect { kafka.register }.to raise_error(LogStash::ConfigurationError)
|
230
|
+
end
|
231
|
+
end
|
232
|
+
end
|
233
|
+
|
234
|
+
describe "value_serializer" do
|
235
|
+
let(:output) { LogStash::Plugin.lookup("output", "kafka").new(config) }
|
236
|
+
|
237
|
+
context "when a random string is set" do
|
238
|
+
let(:config) { { "topic_id" => "random", "value_serializer" => "test_string" } }
|
239
|
+
|
240
|
+
it "raises a ConfigurationError" do
|
241
|
+
expect { output.register }.to raise_error(LogStash::ConfigurationError)
|
242
|
+
end
|
243
|
+
end
|
224
244
|
end
|
225
245
|
|
226
246
|
context 'when ssl endpoint identification disabled' do
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-integration-kafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 11.3.
|
4
|
+
version: 11.3.4
|
5
5
|
platform: java
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-
|
11
|
+
date: 2024-02-06 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|