logstash-output-elasticsearch 11.22.11-java → 11.22.12-java

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 73430889fa821b0bd24a49c0278bd9be2f2107459025f2f31a3eb8a3d70f8261
4
- data.tar.gz: 7848e77db848b7bac6faff43460b5206163380e6a4184510744f12f058ff6aa1
3
+ metadata.gz: b9a7b78791991372bdd4cbeb0f1d6c5f6d9f7a4db88b599ae090f09ecf548377
4
+ data.tar.gz: 169225b31802647c4e00070e465d24dc0222ad3d1095fdb47c37db3cab64048e
5
5
  SHA512:
6
- metadata.gz: 1541f0f7be5499378824a52628ade21c70a3e8370c94d12824e363deb654a47a600ffbee9d68bb77eb3cc087ae4fd07ff9b7e7b4494ccbd41c458eeccf44cb34
7
- data.tar.gz: 2d4ef863c9c5c6051b62005488bb7f7872302d56430cc12be78e4df4e5e3db4a8a912e9ba7d7188defb4d0ea80d31db262a4ede46a43554d9b69f779ea7d4284
6
+ metadata.gz: 79ebc09c5483def4e6104b90f01bca4780f499cacc4c598e19e773900f9f7dde048242f5302f26798aefbf9e85e84555f23b9d3cbce1d862292c25723d1dc10f
7
+ data.tar.gz: 369dc07aa0a5474cd34feda727013fc49a7013c39ef0d28ccc586e6eaf268a37097dc650636d4720d89bd0b017ea37a9d15d9036290a80848432c0526c443748
data/CHANGELOG.md CHANGED
@@ -1,3 +1,5 @@
1
+ ## 11.22.12
2
+ - Properly handle http code 413 (Payload Too Large) [#1199](https://github.com/logstash-plugins/logstash-output-elasticsearch/pull/1199)
1
3
  ## 11.22.11
2
4
  - Remove irrelevant log warning about elastic stack version [#1202](https://github.com/logstash-plugins/logstash-output-elasticsearch/pull/1202)
3
5
 
data/docs/index.asciidoc CHANGED
@@ -196,7 +196,22 @@ This plugin uses the Elasticsearch bulk API to optimize its imports into Elastic
196
196
  either partial or total failures. The bulk API sends batches of requests to an HTTP endpoint. Error codes for the HTTP
197
197
  request are handled differently than error codes for individual documents.
198
198
 
199
- HTTP requests to the bulk API are expected to return a 200 response code. All other response codes are retried indefinitely.
199
+
200
+ HTTP requests to the bulk API are expected to return a 200 response code. All other response codes are retried indefinitely,
201
+ including 413 (Payload Too Large) responses.
202
+
203
+ If you want to handle large payloads differently, you can configure 413 responses to go to the Dead Letter Queue instead:
204
+
205
+ [source,ruby]
206
+ -----
207
+ output {
208
+ elasticsearch {
209
+ hosts => ["localhost:9200"]
210
+ dlq_custom_codes => [413] # Send 413 errors to DLQ instead of retrying
211
+ }
212
+ -----
213
+
214
+ This will capture oversized payloads in the DLQ for analysis rather than retrying them.
200
215
 
201
216
  The following document errors are handled as follows:
202
217
 
@@ -76,11 +76,8 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
76
76
  raise ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::HostUnreachableError.new(e, request_uri_as_string)
77
77
  end
78
78
 
79
- # 404s are excluded because they are valid codes in the case of
80
- # template installation. We might need a better story around this later
81
- # but for our current purposes this is correct
82
79
  code = resp.code
83
- if code < 200 || code > 299 && code != 404
80
+ if code < 200 || code > 299 # assume anything not 2xx is an error that the layer above needs to interpret
84
81
  raise ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError.new(code, request_uri, body, resp.body)
85
82
  end
86
83
 
@@ -253,13 +253,11 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
253
253
  def health_check_request(url)
254
254
  logger.debug("Running health check to see if an Elasticsearch connection is working",
255
255
  :healthcheck_url => url.sanitized.to_s, :path => @healthcheck_path)
256
- begin
257
- response = perform_request_to_url(url, :head, @healthcheck_path)
258
- return response, nil
259
- rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e
260
- logger.warn("Health check failed", code: e.response_code, url: e.url, message: e.message)
261
- return nil, e
262
- end
256
+ response = perform_request_to_url(url, :head, @healthcheck_path)
257
+ return response, nil
258
+ rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e
259
+ logger.warn("Health check failed", code: e.response_code, url: e.url, message: e.message)
260
+ return nil, e
263
261
  end
264
262
 
265
263
  def healthcheck!(register_phase = true)
@@ -312,13 +310,11 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
312
310
  end
313
311
 
314
312
  def get_root_path(url, params={})
315
- begin
316
- resp = perform_request_to_url(url, :get, ROOT_URI_PATH, params)
317
- return resp, nil
318
- rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e
319
- logger.warn("Elasticsearch main endpoint returns #{e.response_code}", message: e.message, body: e.response_body)
320
- return nil, e
321
- end
313
+ resp = perform_request_to_url(url, :get, ROOT_URI_PATH, params)
314
+ return resp, nil
315
+ rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e
316
+ logger.warn("Elasticsearch main endpoint returns #{e.response_code}", message: e.message, body: e.response_body)
317
+ return nil, e
322
318
  end
323
319
 
324
320
  def test_serverless_connection(url, root_response)
@@ -182,22 +182,20 @@ module LogStash; module Outputs; class ElasticSearch;
182
182
  def bulk_send(body_stream, batch_actions)
183
183
  params = compression_level? ? {:headers => {"Content-Encoding" => "gzip"}} : {}
184
184
 
185
- response = @pool.post(@bulk_path, params, body_stream.string)
186
-
187
- @bulk_response_metrics.increment(response.code.to_s)
188
-
189
- case response.code
190
- when 200 # OK
191
- LogStash::Json.load(response.body)
192
- when 413 # Payload Too Large
185
+ begin
186
+ response = @pool.post(@bulk_path, params, body_stream.string)
187
+ @bulk_response_metrics.increment(response.code.to_s)
188
+ rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e
189
+ @bulk_response_metrics.increment(e.response_code.to_s)
190
+ raise e unless e.response_code == 413
191
+ # special handling for 413, treat it as a document level issue
193
192
  logger.warn("Bulk request rejected: `413 Payload Too Large`", :action_count => batch_actions.size, :content_length => body_stream.size)
194
- emulate_batch_error_response(batch_actions, response.code, 'payload_too_large')
195
- else
196
- url = ::LogStash::Util::SafeURI.new(response.final_url)
197
- raise ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError.new(
198
- response.code, url, body_stream.to_s, response.body
199
- )
193
+ return emulate_batch_error_response(batch_actions, 413, 'payload_too_large')
194
+ rescue => e # it may be a network issue instead, re-raise
195
+ raise e
200
196
  end
197
+
198
+ LogStash::Json.load(response.body)
201
199
  end
202
200
 
203
201
  def emulate_batch_error_response(actions, http_code, reason)
@@ -411,6 +409,9 @@ module LogStash; module Outputs; class ElasticSearch;
411
409
  def exists?(path, use_get=false)
412
410
  response = use_get ? @pool.get(path) : @pool.head(path)
413
411
  response.code >= 200 && response.code <= 299
412
+ rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e
413
+ return false if e.response_code == 404
414
+ raise e
414
415
  end
415
416
 
416
417
  def template_exists?(template_endpoint, name)
@@ -421,6 +422,8 @@ module LogStash; module Outputs; class ElasticSearch;
421
422
  path = "#{template_endpoint}/#{name}"
422
423
  logger.info("Installing Elasticsearch template", name: name)
423
424
  @pool.put(path, nil, LogStash::Json.dump(template))
425
+ rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e
426
+ raise e unless e.response_code == 404
424
427
  end
425
428
 
426
429
  # ILM methods
@@ -432,17 +435,15 @@ module LogStash; module Outputs; class ElasticSearch;
432
435
 
433
436
  # Create a new rollover alias
434
437
  def rollover_alias_put(alias_name, alias_definition)
435
- begin
436
- @pool.put(CGI::escape(alias_name), nil, LogStash::Json.dump(alias_definition))
437
- logger.info("Created rollover alias", name: alias_name)
438
- # If the rollover alias already exists, ignore the error that comes back from Elasticsearch
439
- rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e
440
- if e.response_code == 400
441
- logger.info("Rollover alias already exists, skipping", name: alias_name)
442
- return
443
- end
444
- raise e
438
+ @pool.put(CGI::escape(alias_name), nil, LogStash::Json.dump(alias_definition))
439
+ logger.info("Created rollover alias", name: alias_name)
440
+ # If the rollover alias already exists, ignore the error that comes back from Elasticsearch
441
+ rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e
442
+ if e.response_code == 400
443
+ logger.info("Rollover alias already exists, skipping", name: alias_name)
444
+ return
445
445
  end
446
+ raise e
446
447
  end
447
448
 
448
449
  def get_xpack_info
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-output-elasticsearch'
3
- s.version = '11.22.11'
3
+ s.version = '11.22.12'
4
4
  s.licenses = ['apache-2.0']
5
5
  s.summary = "Stores logs in Elasticsearch"
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -915,7 +915,12 @@ describe LogStash::Outputs::ElasticSearch do
915
915
  allow(elasticsearch_output_instance.client.pool).to receive(:post) do |path, params, body|
916
916
  if body.length > max_bytes
917
917
  max_bytes *= 2 # ensure a successful retry
918
- double("Response", :code => 413, :body => "")
918
+ raise ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError.new(
919
+ 413,
920
+ "test-url",
921
+ body,
922
+ ""
923
+ )
919
924
  else
920
925
  double("Response", :code => 200, :body => '{"errors":false,"items":[{"index":{"status":200,"result":"created"}}]}')
921
926
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-elasticsearch
3
3
  version: !ruby/object:Gem::Version
4
- version: 11.22.11
4
+ version: 11.22.12
5
5
  platform: java
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2025-01-15 00:00:00.000000000 Z
11
+ date: 2025-01-28 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement