logstash-integration-kafka 11.3.2-java → 11.3.4-java

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: cd3536a455fdf7a64f5882e81c0648022d572660d4dc45841249374c1cd7e406
4
- data.tar.gz: 26281f8584e29961625c8e385cfd599fd43444cbedd84d191b5542f603d00701
3
+ metadata.gz: fb2e5a937c705523e56611e75be5b76453c42b6d5ed241cf86f990251aa673c7
4
+ data.tar.gz: 379bad770fdc823d19d676041247949f55f51f2f77fe267c7be5fc9f67141053
5
5
  SHA512:
6
- metadata.gz: 226f3c894edb182246d36bc283f7fc8d5d8471d9d411b98054a3770f7e414ca321ddbe401012c53ff5bbf4fd74da0340193153ebf7204a74e03802dc8c3df8ad
7
- data.tar.gz: 12a5454b826df30697e4f505e96893687202b8cd17536599a5832a518653274218a278d0139c3150f7c48806e823251f91f3b583ee9b60f828104b5e052fea1b
6
+ metadata.gz: 320e5ee1e26113cceb2692c2ec6852f8c9b41adbcba9609e7e917116040c85196c91a0051fcc6a40213e97092c1f3ea4507930dc6b9f7bbf9501dd1c2caaab35
7
+ data.tar.gz: ab200d48d0d3713f14ab97095d5b56467e79b6c4c22ef23bb32cf26eb7e80f1497565d4b6577d31373bae472bd2955a8dbb59fcd89674c20c3ab2acbe01842bb
data/CHANGELOG.md CHANGED
@@ -1,5 +1,11 @@
1
+ ## 11.3.4
2
+ - Fix "retries" and "value_serializer" error handling in output plugin (#160) [#160](https://github.com/logstash-plugins/logstash-integration-kafka/pull/160)
3
+
4
+ ## 11.3.3
5
+ - Fix "Can't modify frozen string" error when record value is `nil` (tombstones) [#155](https://github.com/logstash-plugins/logstash-integration-kafka/pull/155)
6
+
1
7
  ## 11.3.2
2
- - Fix: update Avro library [#150](https://api.github.com/repos/logstash-plugins/logstash-integration-kafka/pulls/150)
8
+ - Fix: update Avro library [#150](https://github.com/logstash-plugins/logstash-integration-kafka/pull/150)
3
9
 
4
10
  ## 11.3.1
5
11
  - Fix: update snappy dependency [#148](https://github.com/logstash-plugins/logstash-integration-kafka/pull/148)
@@ -184,7 +184,7 @@ resolved and expanded into a list of canonical names.
184
184
  [NOTE]
185
185
  ====
186
186
  Starting from Kafka 3 `default` value for `client.dns.lookup` value has been removed.
187
- If explicitly configured it fallbacks to `use_all_dns_ips`.
187
+ If not explicitly configured it defaults to `use_all_dns_ips`.
188
188
  ====
189
189
 
190
190
  [id="plugins-{type}s-{plugin}-client_id"]
@@ -348,7 +348,7 @@ retries are exhausted.
348
348
  * There is no default value for this setting.
349
349
 
350
350
  The default retry behavior is to retry until successful. To prevent data loss,
351
- the use of this setting is discouraged.
351
+ changing this setting is discouraged.
352
352
 
353
353
  If you choose to set `retries`, a value greater than zero will cause the
354
354
  client to only retry a fixed number of times. This will result in data loss
@@ -356,7 +356,8 @@ class LogStash::Inputs::Kafka < LogStash::Inputs::Base
356
356
  end
357
357
 
358
358
  def handle_record(record, codec_instance, queue)
359
- codec_instance.decode(record.value.to_s) do |event|
359
+ # use + since .to_s on nil/boolean returns a frozen string since ruby 2.7
360
+ codec_instance.decode(+record.value.to_s) do |event|
360
361
  decorate(event)
361
362
  maybe_set_metadata(event, record)
362
363
  queue << event
@@ -185,7 +185,7 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
185
185
 
186
186
  if !@retries.nil?
187
187
  if @retries < 0
188
- raise ConfigurationError, "A negative retry count (#{@retries}) is not valid. Must be a value >= 0"
188
+ raise LogStash::ConfigurationError, "A negative retry count (#{@retries}) is not valid. Must be a value >= 0"
189
189
  end
190
190
 
191
191
  logger.warn("Kafka output is configured with finite retry. This instructs Logstash to LOSE DATA after a set number of send attempts fails. If you do not want to lose data if Kafka is down, then you must remove the retry setting.", :retries => @retries)
@@ -193,7 +193,6 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
193
193
 
194
194
  reassign_dns_lookup
195
195
 
196
- @producer = create_producer
197
196
  if value_serializer == 'org.apache.kafka.common.serialization.StringSerializer'
198
197
  @codec.on_event do |event, data|
199
198
  write_to_kafka(event, data)
@@ -203,8 +202,9 @@ class LogStash::Outputs::Kafka < LogStash::Outputs::Base
203
202
  write_to_kafka(event, data.to_java_bytes)
204
203
  end
205
204
  else
206
- raise ConfigurationError, "'value_serializer' only supports org.apache.kafka.common.serialization.ByteArraySerializer and org.apache.kafka.common.serialization.StringSerializer"
205
+ raise LogStash::ConfigurationError, "'value_serializer' only supports org.apache.kafka.common.serialization.ByteArraySerializer and org.apache.kafka.common.serialization.StringSerializer"
207
206
  end
207
+ @producer = create_producer
208
208
  end
209
209
 
210
210
  def prepare(record)
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-integration-kafka'
3
- s.version = '11.3.2'
3
+ s.version = '11.3.4'
4
4
  s.licenses = ['Apache-2.0']
5
5
  s.summary = "Integration with Kafka - input and output plugins"
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline "+
@@ -301,7 +301,7 @@ def consume_messages(config, queue: Queue.new, timeout:, event_count:)
301
301
  end
302
302
 
303
303
 
304
- describe "schema registry connection options" do
304
+ describe "schema registry connection options", :integration => true do
305
305
  schema_registry = Manticore::Client.new
306
306
  before (:all) do
307
307
  shutdown_schema_registry
@@ -134,6 +134,21 @@ describe LogStash::Inputs::Kafka do
134
134
  it 'should set the consumer thread name' do
135
135
  expect(subject.instance_variable_get('@runner_threads').first.get_name).to eq("kafka-input-worker-test-0")
136
136
  end
137
+
138
+ context 'with records value frozen' do
139
+ # boolean, module name & nil .to_s are frozen by default (https://bugs.ruby-lang.org/issues/16150)
140
+ let(:payload) do [
141
+ org.apache.kafka.clients.consumer.ConsumerRecord.new("logstash", 0, 0, "nil", nil),
142
+ org.apache.kafka.clients.consumer.ConsumerRecord.new("logstash", 0, 0, "true", true),
143
+ org.apache.kafka.clients.consumer.ConsumerRecord.new("logstash", 0, 0, "false", false),
144
+ org.apache.kafka.clients.consumer.ConsumerRecord.new("logstash", 0, 0, "frozen", "".freeze)
145
+ ]
146
+ end
147
+
148
+ it "should process events" do
149
+ expect(q.size).to eq(4)
150
+ end
151
+ end
137
152
  end
138
153
 
139
154
  context 'when errors are encountered during poll' do
@@ -221,6 +221,26 @@ describe "outputs/kafka" do
221
221
  kafka.multi_receive([event])
222
222
  end
223
223
  end
224
+ context 'when retries is -1' do
225
+ let(:retries) { -1 }
226
+
227
+ it "should raise a Configuration error" do
228
+ kafka = LogStash::Outputs::Kafka.new(simple_kafka_config.merge("retries" => retries))
229
+ expect { kafka.register }.to raise_error(LogStash::ConfigurationError)
230
+ end
231
+ end
232
+ end
233
+
234
+ describe "value_serializer" do
235
+ let(:output) { LogStash::Plugin.lookup("output", "kafka").new(config) }
236
+
237
+ context "when a random string is set" do
238
+ let(:config) { { "topic_id" => "random", "value_serializer" => "test_string" } }
239
+
240
+ it "raises a ConfigurationError" do
241
+ expect { output.register }.to raise_error(LogStash::ConfigurationError)
242
+ end
243
+ end
224
244
  end
225
245
 
226
246
  context 'when ssl endpoint identification disabled' do
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-integration-kafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 11.3.2
4
+ version: 11.3.4
5
5
  platform: java
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2023-10-16 00:00:00.000000000 Z
11
+ date: 2024-02-06 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement