logstash-output-elasticsearch 12.0.6-java → 12.1.0-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: ed4936a4ade53b1bfabb21a6a217a2587ee38235a3219be044ca59a664fd25a1
4
- data.tar.gz: c06797095dd77c26bd7c70d1d851fcc50ca8e2b4db35485e9f5c018941e54cd2
3
+ metadata.gz: 2dc84ff1bfa23ba7aac5eea0cf36fc94fc0d3112878fda04fe2bf934fd42ccb8
4
+ data.tar.gz: b926a70cab6ba1d3415208512ee71fddf79ce8e46e9ab81d59354a1f79dd0cb5
5
5
  SHA512:
6
- metadata.gz: 163f1e9cd1127527b6ce207f18f451c6dfd7510c6f752739908d6804c3fa70b2fced78b22668f6388d02a92d9969dd7f47f079e0180abacf8f0305d56ad14441
7
- data.tar.gz: 77dc391a2d5ef77f76d07a52b6f00375eb6b9ffb9c8dbb731c849b2bffd8fb25f819c3070a729a9f1c72e7ba4ff845359ba5109aa2e5eb1242461c4b0cab5719
6
+ metadata.gz: a3f18999365521054239b3d8316134dfb7c33e7f486fbe8e0de2467f25c127eebb42404b8ad649bb1aa2395464bdb1324b4006104ae031a73648e724093afa46
7
+ data.tar.gz: ab7ee85a0bd5cbee9e18dd0cd2f26be93d1da3857d206948d5a4b392066bfcad51b0ab0ebc3eb5c6418388782855d81b7584e3ec9fb8cc6cb6c71a913657b5ce
data/CHANGELOG.md CHANGED
@@ -1,3 +1,9 @@
1
+ ## 12.1.0
2
+ - Add drop_error_types config option to not retry after certain error types [#1228](https://github.com/logstash-plugins/logstash-output-elasticsearch/pull/1228)
3
+
4
+ ## 12.0.7
5
+ - Support both, encoded and non encoded api-key formats on plugin configuration [#1223](https://github.com/logstash-plugins/logstash-output-elasticsearch/pull/1223)
6
+
1
7
  ## 12.0.6
2
8
  - Add headers reporting uncompressed size and doc count for bulk requests [#1217](https://github.com/logstash-plugins/logstash-output-elasticsearch/pull/1217)
3
9
 
data/docs/index.asciidoc CHANGED
@@ -303,12 +303,17 @@ single request.
303
303
  ==== DNS Caching
304
304
 
305
305
  This plugin uses the JVM to lookup DNS entries and is subject to the value of
306
- https://docs.oracle.com/javase/7/docs/technotes/guides/net/properties.html[networkaddress.cache.ttl],
307
- a global setting for the JVM.
306
+ https://docs.oracle.com/en/java/javase/21/docs/api/java.base/java/net/doc-files/net-properties.html#address-cache-heading[Address Cache settings]
307
+ such as `networkaddress.cache.ttl` and `networkaddress.cache.negative.ttl`, global settings for the JVM.
308
308
 
309
309
  As an example, to set your DNS TTL to 1 second you would set
310
310
  the `LS_JAVA_OPTS` environment variable to `-Dnetworkaddress.cache.ttl=1`.
311
311
 
312
+ The default value for `networkaddress.cache.ttl` depends on the JVM implementation,
313
+ which is 30 seconds for the JDK bundled with Logstash.
314
+ The `networkaddress.cache.negative.ttl` setting, that controls how long Java caches
315
+ the result of failed DNS lookups, defaults to 10 seconds.
316
+
312
317
  Keep in mind that a connection with keepalive enabled will
313
318
  not reevaluate its DNS value while the keepalive is in effect.
314
319
 
@@ -368,6 +373,7 @@ Please check out <<plugins-{type}s-{plugin}-obsolete-options>> for details.
368
373
  | <<plugins-{type}s-{plugin}-doc_as_upsert>> |<<boolean,boolean>>|No
369
374
  | <<plugins-{type}s-{plugin}-document_id>> |<<string,string>>|No
370
375
  | <<plugins-{type}s-{plugin}-document_type>> |<<string,string>>|No
376
+ | <<plugins-{type}s-{plugin}-drop_error_types>> |<<array,array>>|No
371
377
  | <<plugins-{type}s-{plugin}-ecs_compatibility>> | <<string,string>>|No
372
378
  | <<plugins-{type}s-{plugin}-failure_type_logging_whitelist>> |<<array,array>>|No
373
379
  | <<plugins-{type}s-{plugin}-healthcheck_path>> |<<string,string>>|No
@@ -639,6 +645,24 @@ If you don't set a value for this option:
639
645
  - for elasticsearch clusters 8.x: no value will be used;
640
646
  - for elasticsearch clusters 7.x: the value of '_doc' will be used.
641
647
 
648
+ [id="plugins-{type}s-{plugin}-drop_error_types"]
649
+ ===== `drop_error_types`
650
+
651
+ * Value type is <<array,array>>
652
+ * Default value is `[]`
653
+
654
+ Lists the set of error types for which individual bulk request actions will not be retried. Unless an individual - document level - action returns 409 or an error from this list, failures will be retried indefinitely.
655
+ A warning message will be logged indicating that the action failed, unless the error type is
656
+ listed in the <<plugins-{type}s-{plugin}-silence_errors_in_log>> config option.
657
+ Note that the events are not added to the Dead Letter Queue (DLQ), regardless of whether it is enabled.
658
+
659
+ [source,ruby]
660
+ output {
661
+ elasticsearch {
662
+ drop_error_types => ["index_closed_exception"]
663
+ }
664
+ }
665
+
642
666
  [id="plugins-{type}s-{plugin}-ecs_compatibility"]
643
667
  ===== `ecs_compatibility`
644
668
 
@@ -188,25 +188,37 @@ module LogStash; module Outputs; class ElasticSearch;
188
188
  def self.setup_api_key(logger, params)
189
189
  api_key = params["api_key"]
190
190
 
191
- return {} unless (api_key && api_key.value)
191
+ return {} unless (api_key&.value)
192
192
 
193
- { "Authorization" => "ApiKey " + Base64.strict_encode64(api_key.value) }
194
- end
193
+ value = is_base64?(api_key.value) ? api_key.value : Base64.strict_encode64(api_key.value)
195
194
 
196
- private
197
- def self.dedup_slashes(url)
198
- url.gsub(/\/+/, "/")
195
+ { "Authorization" => "ApiKey #{value}" }
199
196
  end
200
197
 
201
- # Set a `filter_path` query parameter if it is not already set to be
202
- # `filter_path=errors,items.*.error,items.*.status` to reduce the payload between Logstash and Elasticsearch
203
- def self.resolve_filter_path(url)
204
- return url if url.match?(/(?:[&|?])filter_path=/)
205
- ("#{url}#{query_param_separator(url)}filter_path=errors,items.*.error,items.*.status")
206
- end
198
+ class << self
199
+ private
200
+ def dedup_slashes(url)
201
+ url.gsub(/\/+/, "/")
202
+ end
203
+
204
+ # Set a `filter_path` query parameter if it is not already set to be
205
+ # `filter_path=errors,items.*.error,items.*.status` to reduce the payload between Logstash and Elasticsearch
206
+ def resolve_filter_path(url)
207
+ return url if url.match?(/(?:[&|?])filter_path=/)
208
+ ("#{url}#{query_param_separator(url)}filter_path=errors,items.*.error,items.*.status")
209
+ end
207
210
 
208
- def self.query_param_separator(url)
209
- url.match?(/\?[^\s#]+/) ? '&' : '?'
211
+ def query_param_separator(url)
212
+ url.match?(/\?[^\s#]+/) ? '&' : '?'
213
+ end
214
+
215
+ def is_base64?(string)
216
+ begin
217
+ string == Base64.strict_encode64(Base64.strict_decode64(string))
218
+ rescue ArgumentError
219
+ false
220
+ end
221
+ end
210
222
  end
211
223
  end
212
224
  end; end; end
@@ -204,6 +204,10 @@ module LogStash; module PluginMixins; module ElasticSearch
204
204
  # if enabled, failed index name interpolation events go into dead letter queue.
205
205
  :dlq_on_failed_indexname_interpolation => { :validate => :boolean, :default => true },
206
206
 
207
+ # Failures on actions from a bulk request will not be retried for these error types; the events will be dropped.
208
+ # The events won't be added to the DLQ either.
209
+ :drop_error_types => { :validate => :string, :list => true, :default => [] },
210
+
207
211
  # Obsolete Settings
208
212
  :ssl => { :obsolete => "Set 'ssl_enabled' instead." },
209
213
  :ssl_certificate_verification => { :obsolete => "Set 'ssl_verification_mode' instead." },
@@ -278,16 +278,18 @@ module LogStash; module PluginMixins; module ElasticSearch
278
278
 
279
279
  status = action_props["status"]
280
280
  error = action_props["error"]
281
+ type = error["type"] if error
281
282
  action = actions[idx]
282
283
 
283
- # Retry logic: If it is success, we move on. If it is a failure, we have 3 paths:
284
+ # Retry logic: If it is success, we move on. If it is a failure, we have the following paths:
284
285
  # - For 409, we log and drop. there is nothing we can do
286
+ # - For any error types set in the 'drop_error_types' config, log and drop.
285
287
  # - For a mapping error, we send to dead letter queue for a human to intervene at a later point.
286
288
  # - For everything else there's mastercard. Yep, and we retry indefinitely. This should fix #572 and other transient network issues
287
289
  if DOC_SUCCESS_CODES.include?(status)
288
290
  @document_level_metrics.increment(:successes)
289
291
  next
290
- elsif DOC_CONFLICT_CODE == status
292
+ elsif DOC_CONFLICT_CODE == status || @drop_error_types.include?(type)
291
293
  @document_level_metrics.increment(:non_retryable_failures)
292
294
  @logger.warn "Failed action", status: status, action: action, response: response if log_failure_type?(error)
293
295
  next
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-output-elasticsearch'
3
- s.version = '12.0.6'
3
+ s.version = '12.1.0'
4
4
  s.licenses = ['apache-2.0']
5
5
  s.summary = "Stores logs in Elasticsearch"
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -5,7 +5,10 @@ describe "metrics", :integration => true do
5
5
  require "logstash/outputs/elasticsearch"
6
6
  settings = {
7
7
  "manage_template" => false,
8
- "hosts" => "#{get_host_port()}"
8
+ "hosts" => "#{get_host_port()}",
9
+ # write data to a random non templated index to ensure the bulk partially fails
10
+ # don't use streams like "logs-*" as those have failure stores enabled, causing the bulk to succeed instead
11
+ "index" => "custom_index_#{rand(10000)}"
9
12
  }
10
13
  plugin = LogStash::Outputs::ElasticSearch.new(settings)
11
14
  end
@@ -26,6 +26,44 @@ describe LogStash::Outputs::ElasticSearch::HttpClientBuilder do
26
26
  end
27
27
  end
28
28
 
29
+ describe "auth setup with api-key" do
30
+ let(:klass) { LogStash::Outputs::ElasticSearch::HttpClientBuilder }
31
+
32
+ context "when api-key is not encoded (id:api-key)" do
33
+ let(:api_key) { "id:api-key" }
34
+ let(:api_key_secured) do
35
+ secured = double("api_key")
36
+ allow(secured).to receive(:value).and_return(api_key)
37
+ secured
38
+ end
39
+ let(:options) { { "api_key" => api_key_secured } }
40
+ let(:logger) { double("logger") }
41
+ let(:api_key_header) { klass.setup_api_key(logger, options) }
42
+
43
+ it "returns the correct encoded api-key header" do
44
+ expected = "ApiKey #{Base64.strict_encode64(api_key)}"
45
+ expect(api_key_header["Authorization"]).to eql(expected)
46
+ end
47
+ end
48
+
49
+ context "when api-key is already encoded" do
50
+ let(:api_key) { Base64.strict_encode64("id:api-key") }
51
+ let(:api_key_secured) do
52
+ secured = double("api_key")
53
+ allow(secured).to receive(:value).and_return(api_key)
54
+ secured
55
+ end
56
+ let(:options) { { "api_key" => api_key_secured } }
57
+ let(:logger) { double("logger") }
58
+ let(:api_key_header) { klass.setup_api_key(logger, options) }
59
+
60
+ it "returns the api-key header as is" do
61
+ expected = "ApiKey #{api_key}"
62
+ expect(api_key_header["Authorization"]).to eql(expected)
63
+ end
64
+ end
65
+ end
66
+
29
67
  describe "customizing action paths" do
30
68
  let(:hosts) { [ ::LogStash::Util::SafeURI.new("http://localhost:9200") ] }
31
69
  let(:options) { {"hosts" => hosts } }
@@ -1499,6 +1499,80 @@ describe LogStash::Outputs::ElasticSearch do
1499
1499
  end
1500
1500
  end
1501
1501
 
1502
+ describe 'drop_error_types' do
1503
+
1504
+ let(:error_type) { 'index_closed_exception' }
1505
+
1506
+ let(:options) { super().merge('drop_error_types' => [error_type]) }
1507
+
1508
+ let(:events) { [ LogStash::Event.new("foo" => "bar") ] }
1509
+
1510
+ let(:dlq_writer) { subject.instance_variable_get(:@dlq_writer) }
1511
+
1512
+ let(:error_code) { 403 }
1513
+
1514
+ let(:event_action_tuples) { subject.map_events(events) }
1515
+
1516
+ let(:bulk_response) do
1517
+ {
1518
+ "took"=>1, "ingest_took"=>11, "errors"=>true, "items"=>
1519
+ [{
1520
+ "index"=>{"_index"=>"bar", "_type"=>"_doc", "_id"=>'bar', "status" => error_code,
1521
+ "error"=>{"type" => error_type, "reason" => "TEST" }
1522
+ }
1523
+ }]
1524
+ }
1525
+ end
1526
+
1527
+ before(:each) do
1528
+ allow(subject.client).to receive(:bulk_send).and_return(bulk_response)
1529
+ end
1530
+
1531
+ context 'DLQ is enabled' do
1532
+
1533
+ let(:options) { super().merge("dlq_custom_codes" => [403]) }
1534
+
1535
+ it 'does not write the event to the DLQ' do
1536
+ expect(dlq_writer).not_to receive(:write)
1537
+ subject.send(:submit, event_action_tuples)
1538
+ end
1539
+ end
1540
+
1541
+ context 'DLQ is not enabled' do
1542
+
1543
+ before(:each) do
1544
+ allow(subject).to receive(:dlq_enabled?).and_return(false)
1545
+ end
1546
+
1547
+ it 'does not retry indexing the event' do
1548
+ expect(subject).to receive(:submit).with(event_action_tuples).once.and_call_original
1549
+ subject.send(:retrying_submit, event_action_tuples)
1550
+ end
1551
+ end
1552
+
1553
+ context 'the error type is not in `silence_errors_in_log`' do
1554
+
1555
+ it 'logs the error' do
1556
+ expect(subject.logger).to receive(:warn).with(a_string_including("Failed action"), anything)
1557
+ subject.send(:submit, event_action_tuples)
1558
+ end
1559
+ end
1560
+
1561
+ context 'the error type is in `silence_errors_in_log`' do
1562
+
1563
+ let(:options) { super().merge('silence_errors_in_log' => [error_type]) }
1564
+
1565
+ before(:each) do
1566
+ # ensure that neither warn nor info is called on the logger by using a test double
1567
+ subject.instance_variable_set("@logger", double('logger'))
1568
+ end
1569
+
1570
+ it 'does not log the error' do
1571
+ subject.send(:submit, event_action_tuples)
1572
+ end
1573
+ end
1574
+ end
1575
+
1502
1576
  describe "custom headers" do
1503
1577
  let(:manticore_options) { subject.client.pool.adapter.manticore.instance_variable_get(:@options) }
1504
1578
 
metadata CHANGED
@@ -1,13 +1,13 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-elasticsearch
3
3
  version: !ruby/object:Gem::Version
4
- version: 12.0.6
4
+ version: 12.1.0
5
5
  platform: java
6
6
  authors:
7
7
  - Elastic
8
8
  bindir: bin
9
9
  cert_chain: []
10
- date: 2025-07-29 00:00:00.000000000 Z
10
+ date: 2025-10-07 00:00:00.000000000 Z
11
11
  dependencies:
12
12
  - !ruby/object:Gem::Dependency
13
13
  name: manticore