logstash-output-elasticsearch 11.22.11-java → 11.22.13-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +6 -0
  3. data/docs/index.asciidoc +16 -1
  4. data/lib/logstash/outputs/elasticsearch/http_client/manticore_adapter.rb +1 -4
  5. data/lib/logstash/outputs/elasticsearch/http_client/pool.rb +10 -14
  6. data/lib/logstash/outputs/elasticsearch/http_client.rb +42 -29
  7. data/logstash-output-elasticsearch.gemspec +1 -1
  8. data/spec/es_spec_helper.rb +34 -22
  9. data/spec/fixtures/test_certs/GENERATED_AT +1 -1
  10. data/spec/fixtures/test_certs/ca.crt +30 -27
  11. data/spec/fixtures/test_certs/ca.der.sha256 +1 -1
  12. data/spec/fixtures/test_certs/renew.sh +2 -3
  13. data/spec/fixtures/test_certs/test.crt +29 -28
  14. data/spec/fixtures/test_certs/test.der.sha256 +1 -1
  15. data/spec/fixtures/test_certs/test.p12 +0 -0
  16. data/spec/integration/outputs/compressed_indexing_spec.rb +3 -1
  17. data/spec/integration/outputs/delete_spec.rb +4 -4
  18. data/spec/integration/outputs/ilm_spec.rb +18 -12
  19. data/spec/integration/outputs/index_spec.rb +12 -2
  20. data/spec/integration/outputs/index_version_spec.rb +7 -7
  21. data/spec/integration/outputs/painless_update_spec.rb +11 -10
  22. data/spec/integration/outputs/unsupported_actions_spec.rb +15 -10
  23. data/spec/integration/outputs/update_spec.rb +11 -9
  24. data/spec/spec_helper.rb +8 -0
  25. data/spec/unit/outputs/elasticsearch/http_client_spec.rb +77 -0
  26. data/spec/unit/outputs/elasticsearch_spec.rb +7 -2
  27. data/spec/unit/outputs/error_whitelist_spec.rb +0 -1
  28. metadata +2 -6
  29. data/spec/support/elasticsearch/api/actions/get_alias.rb +0 -18
  30. data/spec/support/elasticsearch/api/actions/put_alias.rb +0 -24
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 73430889fa821b0bd24a49c0278bd9be2f2107459025f2f31a3eb8a3d70f8261
4
- data.tar.gz: 7848e77db848b7bac6faff43460b5206163380e6a4184510744f12f058ff6aa1
3
+ metadata.gz: db023d3f7c10e52c9a7fdd9e2480ccd0f988c977357d86b3980d0698a21f5b10
4
+ data.tar.gz: f7d2ca8aa8dbb5acf848d5c20cca9911a6530fc5296d9765cc26df96df7554c0
5
5
  SHA512:
6
- metadata.gz: 1541f0f7be5499378824a52628ade21c70a3e8370c94d12824e363deb654a47a600ffbee9d68bb77eb3cc087ae4fd07ff9b7e7b4494ccbd41c458eeccf44cb34
7
- data.tar.gz: 2d4ef863c9c5c6051b62005488bb7f7872302d56430cc12be78e4df4e5e3db4a8a912e9ba7d7188defb4d0ea80d31db262a4ede46a43554d9b69f779ea7d4284
6
+ metadata.gz: cd23a9523896c06623ec91a33ec820363af6df32149bf0d445ab1218e5b344668398b09cbcdc5b88b39efa4771d79f9de52f80d968105d991ce369cbafd634bc
7
+ data.tar.gz: e63f6a22b9eb7f308c164ba9e4d63f6685305f82d9e1ef9d4e86d7c6fedb1d19d9de4fd6949b82d534f5745c33d75ec35ae6b036fcc3b8efc2f41977a6948e07
data/CHANGELOG.md CHANGED
@@ -1,3 +1,9 @@
1
+ ## 11.22.13
2
+ - Add headers reporting uncompressed size and doc count for bulk requests [#1217](https://github.com/logstash-plugins/logstash-output-elasticsearch/pull/1217)
3
+
4
+ ## 11.22.12
5
+ - Properly handle http code 413 (Payload Too Large) [#1199](https://github.com/logstash-plugins/logstash-output-elasticsearch/pull/1199)
6
+
1
7
  ## 11.22.11
2
8
  - Remove irrelevant log warning about elastic stack version [#1202](https://github.com/logstash-plugins/logstash-output-elasticsearch/pull/1202)
3
9
 
data/docs/index.asciidoc CHANGED
@@ -196,7 +196,22 @@ This plugin uses the Elasticsearch bulk API to optimize its imports into Elastic
196
196
  either partial or total failures. The bulk API sends batches of requests to an HTTP endpoint. Error codes for the HTTP
197
197
  request are handled differently than error codes for individual documents.
198
198
 
199
- HTTP requests to the bulk API are expected to return a 200 response code. All other response codes are retried indefinitely.
199
+
200
+ HTTP requests to the bulk API are expected to return a 200 response code. All other response codes are retried indefinitely,
201
+ including 413 (Payload Too Large) responses.
202
+
203
+ If you want to handle large payloads differently, you can configure 413 responses to go to the Dead Letter Queue instead:
204
+
205
+ [source,ruby]
206
+ -----
207
+ output {
208
+ elasticsearch {
209
+ hosts => ["localhost:9200"]
210
+ dlq_custom_codes => [413] # Send 413 errors to DLQ instead of retrying
211
+ }
212
+ -----
213
+
214
+ This will capture oversized payloads in the DLQ for analysis rather than retrying them.
200
215
 
201
216
  The following document errors are handled as follows:
202
217
 
@@ -76,11 +76,8 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
76
76
  raise ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::HostUnreachableError.new(e, request_uri_as_string)
77
77
  end
78
78
 
79
- # 404s are excluded because they are valid codes in the case of
80
- # template installation. We might need a better story around this later
81
- # but for our current purposes this is correct
82
79
  code = resp.code
83
- if code < 200 || code > 299 && code != 404
80
+ if code < 200 || code > 299 # assume anything not 2xx is an error that the layer above needs to interpret
84
81
  raise ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError.new(code, request_uri, body, resp.body)
85
82
  end
86
83
 
@@ -253,13 +253,11 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
253
253
  def health_check_request(url)
254
254
  logger.debug("Running health check to see if an Elasticsearch connection is working",
255
255
  :healthcheck_url => url.sanitized.to_s, :path => @healthcheck_path)
256
- begin
257
- response = perform_request_to_url(url, :head, @healthcheck_path)
258
- return response, nil
259
- rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e
260
- logger.warn("Health check failed", code: e.response_code, url: e.url, message: e.message)
261
- return nil, e
262
- end
256
+ response = perform_request_to_url(url, :head, @healthcheck_path)
257
+ return response, nil
258
+ rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e
259
+ logger.warn("Health check failed", code: e.response_code, url: e.url, message: e.message)
260
+ return nil, e
263
261
  end
264
262
 
265
263
  def healthcheck!(register_phase = true)
@@ -312,13 +310,11 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
312
310
  end
313
311
 
314
312
  def get_root_path(url, params={})
315
- begin
316
- resp = perform_request_to_url(url, :get, ROOT_URI_PATH, params)
317
- return resp, nil
318
- rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e
319
- logger.warn("Elasticsearch main endpoint returns #{e.response_code}", message: e.message, body: e.response_body)
320
- return nil, e
321
- end
313
+ resp = perform_request_to_url(url, :get, ROOT_URI_PATH, params)
314
+ return resp, nil
315
+ rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e
316
+ logger.warn("Elasticsearch main endpoint returns #{e.response_code}", message: e.message, body: e.response_body)
317
+ return nil, e
322
318
  end
323
319
 
324
320
  def test_serverless_connection(url, root_response)
@@ -21,7 +21,8 @@ module LogStash; module Outputs; class ElasticSearch;
21
21
  # We wound up agreeing that a number greater than 10 MiB and less than 100MiB
22
22
  # made sense. We picked one on the lowish side to not use too much heap.
23
23
  TARGET_BULK_BYTES = 20 * 1024 * 1024 # 20MiB
24
-
24
+ EVENT_COUNT_HEADER = "X-Elastic-Event-Count".freeze
25
+ UNCOMPRESSED_LENGTH_HEADER = "X-Elastic-Uncompressed-Request-Length".freeze
25
26
 
26
27
  class HttpClient
27
28
  attr_reader :client, :options, :logger, :pool, :action_count, :recv_count
@@ -143,7 +144,11 @@ module LogStash; module Outputs; class ElasticSearch;
143
144
  :payload_size => stream_writer.pos,
144
145
  :content_length => body_stream.size,
145
146
  :batch_offset => (index + 1 - batch_actions.size))
146
- bulk_responses << bulk_send(body_stream, batch_actions)
147
+ headers = {
148
+ EVENT_COUNT_HEADER => batch_actions.size.to_s,
149
+ UNCOMPRESSED_LENGTH_HEADER => stream_writer.pos.to_s
150
+ }
151
+ bulk_responses << bulk_send(body_stream, batch_actions, headers)
147
152
  body_stream.truncate(0) && body_stream.seek(0)
148
153
  stream_writer = gzip_writer(body_stream) if compression_level?
149
154
  batch_actions.clear
@@ -159,7 +164,14 @@ module LogStash; module Outputs; class ElasticSearch;
159
164
  :payload_size => stream_writer.pos,
160
165
  :content_length => body_stream.size,
161
166
  :batch_offset => (actions.size - batch_actions.size))
162
- bulk_responses << bulk_send(body_stream, batch_actions) if body_stream.size > 0
167
+
168
+ if body_stream.size > 0
169
+ headers = {
170
+ EVENT_COUNT_HEADER => batch_actions.size.to_s,
171
+ UNCOMPRESSED_LENGTH_HEADER => stream_writer.pos.to_s
172
+ }
173
+ bulk_responses << bulk_send(body_stream, batch_actions, headers)
174
+ end
163
175
 
164
176
  body_stream.close unless compression_level?
165
177
  join_bulk_responses(bulk_responses)
@@ -179,25 +191,23 @@ module LogStash; module Outputs; class ElasticSearch;
179
191
  }
180
192
  end
181
193
 
182
- def bulk_send(body_stream, batch_actions)
183
- params = compression_level? ? {:headers => {"Content-Encoding" => "gzip"}} : {}
184
-
185
- response = @pool.post(@bulk_path, params, body_stream.string)
194
+ def bulk_send(body_stream, batch_actions, headers = {})
195
+ params = compression_level? ? {:headers => headers.merge("Content-Encoding" => "gzip") } : { :headers => headers }
186
196
 
187
- @bulk_response_metrics.increment(response.code.to_s)
188
-
189
- case response.code
190
- when 200 # OK
191
- LogStash::Json.load(response.body)
192
- when 413 # Payload Too Large
197
+ begin
198
+ response = @pool.post(@bulk_path, params, body_stream.string)
199
+ @bulk_response_metrics.increment(response.code.to_s)
200
+ rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e
201
+ @bulk_response_metrics.increment(e.response_code.to_s)
202
+ raise e unless e.response_code == 413
203
+ # special handling for 413, treat it as a document level issue
193
204
  logger.warn("Bulk request rejected: `413 Payload Too Large`", :action_count => batch_actions.size, :content_length => body_stream.size)
194
- emulate_batch_error_response(batch_actions, response.code, 'payload_too_large')
195
- else
196
- url = ::LogStash::Util::SafeURI.new(response.final_url)
197
- raise ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError.new(
198
- response.code, url, body_stream.to_s, response.body
199
- )
205
+ return emulate_batch_error_response(batch_actions, 413, 'payload_too_large')
206
+ rescue => e # it may be a network issue instead, re-raise
207
+ raise e
200
208
  end
209
+
210
+ LogStash::Json.load(response.body)
201
211
  end
202
212
 
203
213
  def emulate_batch_error_response(actions, http_code, reason)
@@ -411,6 +421,9 @@ module LogStash; module Outputs; class ElasticSearch;
411
421
  def exists?(path, use_get=false)
412
422
  response = use_get ? @pool.get(path) : @pool.head(path)
413
423
  response.code >= 200 && response.code <= 299
424
+ rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e
425
+ return false if e.response_code == 404
426
+ raise e
414
427
  end
415
428
 
416
429
  def template_exists?(template_endpoint, name)
@@ -421,6 +434,8 @@ module LogStash; module Outputs; class ElasticSearch;
421
434
  path = "#{template_endpoint}/#{name}"
422
435
  logger.info("Installing Elasticsearch template", name: name)
423
436
  @pool.put(path, nil, LogStash::Json.dump(template))
437
+ rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e
438
+ raise e unless e.response_code == 404
424
439
  end
425
440
 
426
441
  # ILM methods
@@ -432,17 +447,15 @@ module LogStash; module Outputs; class ElasticSearch;
432
447
 
433
448
  # Create a new rollover alias
434
449
  def rollover_alias_put(alias_name, alias_definition)
435
- begin
436
- @pool.put(CGI::escape(alias_name), nil, LogStash::Json.dump(alias_definition))
437
- logger.info("Created rollover alias", name: alias_name)
438
- # If the rollover alias already exists, ignore the error that comes back from Elasticsearch
439
- rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e
440
- if e.response_code == 400
441
- logger.info("Rollover alias already exists, skipping", name: alias_name)
442
- return
443
- end
444
- raise e
450
+ @pool.put(CGI::escape(alias_name), nil, LogStash::Json.dump(alias_definition))
451
+ logger.info("Created rollover alias", name: alias_name)
452
+ # If the rollover alias already exists, ignore the error that comes back from Elasticsearch
453
+ rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e
454
+ if e.response_code == 400
455
+ logger.info("Rollover alias already exists, skipping", name: alias_name)
456
+ return
445
457
  end
458
+ raise e
446
459
  end
447
460
 
448
461
  def get_xpack_info
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-output-elasticsearch'
3
- s.version = '11.22.11'
3
+ s.version = '11.22.13'
4
4
  s.licenses = ['apache-2.0']
5
5
  s.summary = "Stores logs in Elasticsearch"
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -1,15 +1,18 @@
1
1
  require_relative './spec_helper'
2
2
 
3
3
  require 'elasticsearch'
4
- require_relative "support/elasticsearch/api/actions/delete_ilm_policy"
5
- require_relative "support/elasticsearch/api/actions/get_alias"
6
- require_relative "support/elasticsearch/api/actions/put_alias"
7
- require_relative "support/elasticsearch/api/actions/get_ilm_policy"
8
- require_relative "support/elasticsearch/api/actions/put_ilm_policy"
9
4
 
10
5
  require 'json'
11
6
  require 'cabin'
12
7
 
8
+ # remove this condition and support package once plugin starts consuming elasticsearch-ruby v8 client
9
+ # in elasticsearch-ruby v7, ILM APIs were in a separate xpack gem, now directly available
10
+ unless elastic_ruby_v8_client_available?
11
+ require_relative "support/elasticsearch/api/actions/delete_ilm_policy"
12
+ require_relative "support/elasticsearch/api/actions/get_ilm_policy"
13
+ require_relative "support/elasticsearch/api/actions/put_ilm_policy"
14
+ end
15
+
13
16
  module ESHelper
14
17
  def get_host_port
15
18
  if ENV["INTEGRATION"] == "true"
@@ -20,8 +23,12 @@ module ESHelper
20
23
  end
21
24
 
22
25
  def get_client
23
- Elasticsearch::Client.new(:hosts => [get_host_port]).tap do |client|
24
- allow(client).to receive(:verify_elasticsearch).and_return(true) # bypass client side version checking
26
+ if elastic_ruby_v8_client_available?
27
+ Elasticsearch::Client.new(:hosts => [get_host_port])
28
+ else
29
+ Elasticsearch::Client.new(:hosts => [get_host_port]).tap do |client|
30
+ allow(client).to receive(:verify_elasticsearch).and_return(true) # bypass client side version checking
31
+ end
25
32
  end
26
33
  end
27
34
 
@@ -128,31 +135,36 @@ module ESHelper
128
135
  end
129
136
 
130
137
  def get_policy(client, policy_name)
131
- client.get_ilm_policy(name: policy_name)
138
+ if elastic_ruby_v8_client_available?
139
+ client.index_lifecycle_management.get_lifecycle(policy: policy_name)
140
+ else
141
+ client.get_ilm_policy(name: policy_name)
142
+ end
132
143
  end
133
144
 
134
145
  def put_policy(client, policy_name, policy)
135
- client.put_ilm_policy({:name => policy_name, :body=> policy})
136
- end
137
-
138
- def put_alias(client, the_alias, index)
139
- body = {
140
- "aliases" => {
141
- index => {
142
- "is_write_index"=> true
143
- }
144
- }
145
- }
146
- client.put_alias({name: the_alias, body: body})
146
+ if elastic_ruby_v8_client_available?
147
+ client.index_lifecycle_management.put_lifecycle({:policy => policy_name, :body=> policy})
148
+ else
149
+ client.put_ilm_policy({:name => policy_name, :body=> policy})
150
+ end
147
151
  end
148
152
 
149
153
  def clean_ilm(client)
150
- client.get_ilm_policy.each_key { |key| client.delete_ilm_policy(name: key) if key =~ /logstash-policy/ }
154
+ if elastic_ruby_v8_client_available?
155
+ client.index_lifecycle_management.get_lifecycle.each_key { |key| client.index_lifecycle_management.delete_lifecycle(policy: key) if key =~ /logstash-policy/ }
156
+ else
157
+ client.get_ilm_policy.each_key { |key| client.delete_ilm_policy(name: key) if key =~ /logstash-policy/ }
158
+ end
151
159
  end
152
160
 
153
161
  def supports_ilm?(client)
154
162
  begin
155
- client.get_ilm_policy
163
+ if elastic_ruby_v8_client_available?
164
+ client.index_lifecycle_management.get_lifecycle
165
+ else
166
+ client.get_ilm_policy
167
+ end
156
168
  true
157
169
  rescue
158
170
  false
@@ -1 +1 @@
1
- 2024-06-25T21:50:58+01:00
1
+ 2025-07-22T11:15:03+01:00
@@ -1,29 +1,32 @@
1
1
  -----BEGIN CERTIFICATE-----
2
- MIIFDDCCAvQCAQEwDQYJKoZIhvcNAQELBQAwTDELMAkGA1UEBhMCUFQxCzAJBgNV
3
- BAgMAk5BMQ8wDQYDVQQHDAZMaXNib24xDjAMBgNVBAoMBU15TGFiMQ8wDQYDVQQD
4
- DAZSb290Q0EwHhcNMjQwNjI1MjA1MDU4WhcNMjUwNjI1MjA1MDU4WjBMMQswCQYD
5
- VQQGEwJQVDELMAkGA1UECAwCTkExDzANBgNVBAcMBkxpc2JvbjEOMAwGA1UECgwF
6
- TXlMYWIxDzANBgNVBAMMBlJvb3RDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCC
7
- AgoCggIBAMtTMqAWuH17b9XqPa5L3HNqgnZ958+gvcOt7Q/sOEvcDQJgkzZ+Gywh
8
- 5er5JF2iomYOHiD5JncYr4YmRQKuYfD6B1WI5FuQthD/OlA1/RHqtbY27J33SaO6
9
- 6ro8gal7vjHrXKQkefVYRwdfO6DqqbhV6L4sMiy8FzQ55TMpoM35cWuvoAMxvSQq
10
- GZ4pYYKnfNSGhzHvssfNS1xu/Lwb7Vju4jPhp+43BkGwEimI5km7jNC1nwjiHtxD
11
- sY/s93AKa/vLktXKUK5nA3jjJOhAbRTVnbOAgxFt0YbX98xW/aUqscgBUVs9J/My
12
- TRMwVKJ7Vsmth1PdJQksUASuzESlSPl09dMjTQ+MXzJDt0JvX8SIJPmbBng78MSa
13
- CUhpOZiii1l2mBfPWejx20I/SMCUNmzbwm2w9JD50Jv2iX4l4ge4H1CIK1/orW1p
14
- dY9xPL0uKYm6ADsDC0B8sGgNMBXeB6aLojY1/ITwmmfpfk9c/yWPfC7stHgCYRAv
15
- 5MfGAsmv0/ya5VrWQGBJkFiYy1pon6nxUjCbgn0RABojRoGdhhY3QDipgwmSgFZx
16
- r064RFr1bt/Ml3MJmPf535mSwPdk/j/zw4IZTvlmwKW3FyMDhwYL/zX7J0c6MzMP
17
- LEdi73Qjzmr3ENIrir4O86wNz81YRfYkg9ZX8yKJK9LBAUrYCjJ3AgMBAAEwDQYJ
18
- KoZIhvcNAQELBQADggIBABym9LMyS9W9lvpcH4OK1YLfBPJwrhZ+4keiriY4zWOo
19
- pB+v2Q35neMMXSlTDpeIwPdMkqsh8VZprOWURF80JGvpJ6fBfi05rCDWp/ol1ZKi
20
- snCA+dE2zDK7Z3+F0MbakT5oBi5WgkXSvRvlJEJ/gBD7WC1wq0kxCMK+M5w2RPAT
21
- nnV/iozNBkwExxyJA7BpS6F/v0XjwK7fm5Kpql7zKlh5piZ2IVU0B60Sqskcb2mU
22
- 90+1r9T06ekIW/Iz1jd5RWYziu0nbmDeKeKvGAICNU+evYXW+/5kKecMLuEvDCgS
23
- ssbt/Hb510uLHhxfhN4SbvBl2zADsLC+2arf2ATIwD8ZXDDs04ayBsejV0ZwVrTZ
24
- ExKqAys+B3tuIHGRqL5VukdmH6g6oJziYueohPBCOuSOzDd0FhppF4uXZS8DReSg
25
- KieO2ZYfiA1gVRiY6jPx+r7J9I5kSS1gwr/e3zHJHa79ijMB1SSIswQUmgSMkwGh
26
- sNyDNI9ZxgJan3v7kVargMt2LiNcXvVyTzPSYSXcY7SoebfpMprVIG7vZ9TZf+Uu
27
- FQeOfxdLFuGTnpFrYmvOD3OIKfODlY5t+TNICg7A3eTUXeJPcdBBnuVCiQU6TCB5
28
- H+69K5w54Q6a70sHZU1IWsGT8XtbUizPNQky+LAFsE/5oUnCqtypeEu4srcZK53x
2
+ MIIFdTCCA12gAwIBAgIUDITbsLT9hKser0ZzBZsxqgaZdWswDQYJKoZIhvcNAQEL
3
+ BQAwSjELMAkGA1UEBhMCUFQxCzAJBgNVBAgMAk5BMQ8wDQYDVQQHDAZMaXNib24x
4
+ DjAMBgNVBAoMBU15TGFiMQ0wCwYDVQQDDARyb290MB4XDTI1MDcyMjEwMTUwM1oX
5
+ DTM1MDcyMDEwMTUwM1owSjELMAkGA1UEBhMCUFQxCzAJBgNVBAgMAk5BMQ8wDQYD
6
+ VQQHDAZMaXNib24xDjAMBgNVBAoMBU15TGFiMQ0wCwYDVQQDDARyb290MIICIjAN
7
+ BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAy1MyoBa4fXtv1eo9rkvcc2qCdn3n
8
+ z6C9w63tD+w4S9wNAmCTNn4bLCHl6vkkXaKiZg4eIPkmdxivhiZFAq5h8PoHVYjk
9
+ W5C2EP86UDX9Eeq1tjbsnfdJo7rqujyBqXu+MetcpCR59VhHB187oOqpuFXoviwy
10
+ LLwXNDnlMymgzflxa6+gAzG9JCoZnilhgqd81IaHMe+yx81LXG78vBvtWO7iM+Gn
11
+ 7jcGQbASKYjmSbuM0LWfCOIe3EOxj+z3cApr+8uS1cpQrmcDeOMk6EBtFNWds4CD
12
+ EW3Rhtf3zFb9pSqxyAFRWz0n8zJNEzBUontWya2HU90lCSxQBK7MRKVI+XT10yNN
13
+ D4xfMkO3Qm9fxIgk+ZsGeDvwxJoJSGk5mKKLWXaYF89Z6PHbQj9IwJQ2bNvCbbD0
14
+ kPnQm/aJfiXiB7gfUIgrX+itbWl1j3E8vS4piboAOwMLQHywaA0wFd4HpouiNjX8
15
+ hPCaZ+l+T1z/JY98Luy0eAJhEC/kx8YCya/T/JrlWtZAYEmQWJjLWmifqfFSMJuC
16
+ fREAGiNGgZ2GFjdAOKmDCZKAVnGvTrhEWvVu38yXcwmY9/nfmZLA92T+P/PDghlO
17
+ +WbApbcXIwOHBgv/NfsnRzozMw8sR2LvdCPOavcQ0iuKvg7zrA3PzVhF9iSD1lfz
18
+ Iokr0sEBStgKMncCAwEAAaNTMFEwHQYDVR0OBBYEFKFadJx46upif1BrhYZ0iu8o
19
+ 2z8rMB8GA1UdIwQYMBaAFKFadJx46upif1BrhYZ0iu8o2z8rMA8GA1UdEwEB/wQF
20
+ MAMBAf8wDQYJKoZIhvcNAQELBQADggIBAJi4FwYJz/RotoUpfrLZFf69RoI01Fje
21
+ 8ITt8SR1Dx/1GTPEuqVVfx0EYtOoH6Gg3FwgSQ9GHRDIa1vkHY5S+FUSOW3pCoZE
22
+ /kaLu9bmFxn+GntghvQEor+LzODuZKLXupaGcu1tA4fzyuI4jglVD2sGZtLk//CT
23
+ Hd4tOWXo5k1Fj0jMnJq+2Htr8yBeSAO5ZNsvtAjOUU6pfDEwL9bgRzlKKFQQMUYo
24
+ 6x1FvRDRXWjpzB/H+OSqOaoNLEB9FfEl8I7nn6uTenr5WxjPAOpwjZl9ObB/95xM
25
+ p91abKbLQLev5I8npM9G3C/n01l3IzRs7DNHqGJTZO7frGhicD7/jNa+tkSioeJ2
26
+ fIMqgDOvQE+gMxs19zw1tsI3+kqX7+ptTkU4Lan5V5ZKGfU8xtcVIlyRk5/yDUI5
27
+ 1dfQVubs6z07s6De2qa92LFz9l8sT6QuVer+c/wPPhBdMwbzcHyUJIBjFaBpxH86
28
+ F7Mr5Zr/+qcbHglAHow1lBqdZzimqGd1koqFRat/pFUFh0iqktMmpl+ZUCjyoQEX
29
+ 93j8aMU2UQjYM8NJDE2aRculo9OEoqERYFM2m3nHvrtE7iZgddryLNH7ZmC1EquX
30
+ MhZJ26GuZ2U4b9dAX858WTv0q1EF5S8KObMlxMU7IDk+cWlSD+puWliwfUKoTR/4
31
+ JErSfjCSaRqh
29
32
  -----END CERTIFICATE-----
@@ -1 +1 @@
1
- 8b23238088af65cbae6ee9c23821068d896ec1dad081e2a1035ff70866943247
1
+ d403930d5296f1515aadd3f730757e7719188b63a276687a3475128b746e4340
@@ -3,8 +3,7 @@
3
3
  set -e
4
4
  cd "$(dirname "$0")"
5
5
 
6
- openssl x509 -x509toreq -in ca.crt -signkey ca.key -out ca.csr
7
- openssl x509 -req -days 365 -in ca.csr -set_serial 0x01 -signkey ca.key -out ca.crt && rm ca.csr
6
+ openssl req -x509 -new -nodes -key ca.key -subj "/C=PT/ST=NA/L=Lisbon/O=MyLab/CN=root" -sha256 -days 3650 -out ca.crt
8
7
  openssl x509 -in ca.crt -outform der | sha256sum | awk '{print $1}' > ca.der.sha256
9
8
 
10
9
  openssl x509 -x509toreq -in test.crt -signkey test.key -out test.csr
@@ -13,4 +12,4 @@ openssl x509 -in test.crt -outform der | sha256sum | awk '{print $1}' > test.der
13
12
  openssl pkcs12 -export -inkey test.key -in test.crt -passout "pass:1234567890" -out test.p12
14
13
 
15
14
  # output ISO8601 timestamp to file
16
- date -Iseconds > GENERATED_AT
15
+ date -Iseconds > GENERATED_AT
@@ -1,30 +1,31 @@
1
1
  -----BEGIN CERTIFICATE-----
2
- MIIFEzCCAvsCAQEwDQYJKoZIhvcNAQELBQAwTDELMAkGA1UEBhMCUFQxCzAJBgNV
3
- BAgMAk5BMQ8wDQYDVQQHDAZMaXNib24xDjAMBgNVBAoMBU15TGFiMQ8wDQYDVQQD
4
- DAZSb290Q0EwHhcNMjQwNjI1MjA1MDU4WhcNMjUwNjI1MjA1MDU4WjBTMQswCQYD
5
- VQQGEwJQVDELMAkGA1UECAwCTkExDzANBgNVBAcMBkxpc2JvbjEOMAwGA1UECgwF
6
- TXlMYWIxFjAUBgNVBAMMDWVsYXN0aWNzZWFyY2gwggIiMA0GCSqGSIb3DQEBAQUA
7
- A4ICDwAwggIKAoICAQDGIT9szzhN5HvZ2nivnCDzVfdYbbqBhgEbPppWPyFcV0r2
8
- rtmWfeK5EEdsTS/Ey4owTceOplPpAp4svF+a/i1/bHhqnQYYU4f7Qic4fDAszLdi
9
- SIo0o1csNvIogm/P+uvSzE6eZRZUSmo49dY5SKSJt6Pjh6lM2MHEjsPKIKdAN57w
10
- EN90q4IZv6AHE9rphqxcmF1k+j5xmhCUS1EJ+y7hyZ0S7Hghdgp/0cxSu/7YlVYy
11
- JpkIlQd3RPXzEf6VSYjtr9Ajp1rhvv2611q0CB5NALg/KR3OiMPYmTg5HAKOdweN
12
- am76nG3VxTeV3y+LW/pZAbi4qAl+4/c0eOGsL7o/YSn7qhThU1AWS9kY1WxTCrKR
13
- h58rUGRfmvpnOR99xvR4jz942RNiY61pTmsvo+iJspTII3GZhwIGlHtxE9Rn50lW
14
- QcDuDDHfObWhzb4rS55BERIwDUqD1LgCRd0ikRxPSvI1AM4cl35b4DTaDLcnM6EO
15
- fy+QTYsgNoftU1PI1onDQ7ZdfgrTrIBFQQRwOqfyB4bB2zWVj62LSDvZoYYicNUe
16
- cqyE1542WNKzmyE8Mrf3uknN2J6EH7EhmiyRBtGg3NEQCwIYM4/kWPNPOtkSjsn3
17
- cNbMNUZiSnQn/nTs4T8g6b2rrwsay/FGUE83AbPqqcTlp2RUVnjbC8KA5+iV1wID
18
- AQABMA0GCSqGSIb3DQEBCwUAA4ICAQAlB7YFw7e1pzYz55Kqa91juTWP5XiCn59n
19
- J0YKM++vdHqy224HZb9jGtJjZ+0Wod4cwiOVWm+5hLs4IrzfGuXFZEFx/VWP3SDq
20
- 4F3IJJXQkc7jSNrL6IR92xRDSB+yFZZI6FFsnaKMT2fZELndPVFnH+oclc8ZZoyz
21
- 2H/r1CT4yYx7YclAWUqq8Ci3J82qUeeM8Xj9fzGFKy6oCoRsApQb4qb4DoQ1TbZC
22
- b8gWxHj8l4izul1MtTzSkoMb0Ot50vMoT69m1hDz5H4wF6KuAZUAgM9LQWNHJCkt
23
- hlOXvqFTHF+y+bvK+hGs976xViq3HA45M3+5Psv0+fdoHgYQJvd23yt8CM0rGfv3
24
- P+34HlLCW+FdWiazmo+tl5YmtGs6pYuAEp2z5pmUO2l2CutFmv4xBOvXF+rZOzxY
25
- Q0ackJtflnDC/Tlq2qAldY3Oa8nyI3UIaMUcqHemwm5KpDjc0XF2J1qCoSrMxD8+
26
- L8HdvUYlh3DIFgJIG1DlTtfQO+RwrVi9+NBBGAsforla9HJDO/POiv7O9hED71u+
27
- pev8flmULeisMeYqeiL55jyS/+45VaF7t36FMyiP3zXANwbHZMvzVobEsXAuzPOt
28
- pVNo/EpszrdBe9JWt1GrFLY9c14FmWG8cAWpcwRH0ofhJPPvEB7usFVWCSduOAbA
29
- Zytzb+8iSw==
2
+ MIIFWjCCA0KgAwIBAgIBATANBgkqhkiG9w0BAQsFADBKMQswCQYDVQQGEwJQVDEL
3
+ MAkGA1UECAwCTkExDzANBgNVBAcMBkxpc2JvbjEOMAwGA1UECgwFTXlMYWIxDTAL
4
+ BgNVBAMMBHJvb3QwHhcNMjUwNzIyMTAxNTAzWhcNMjYwNzIyMTAxNTAzWjBTMQsw
5
+ CQYDVQQGEwJQVDELMAkGA1UECAwCTkExDzANBgNVBAcMBkxpc2JvbjEOMAwGA1UE
6
+ CgwFTXlMYWIxFjAUBgNVBAMMDWVsYXN0aWNzZWFyY2gwggIiMA0GCSqGSIb3DQEB
7
+ AQUAA4ICDwAwggIKAoICAQDGIT9szzhN5HvZ2nivnCDzVfdYbbqBhgEbPppWPyFc
8
+ V0r2rtmWfeK5EEdsTS/Ey4owTceOplPpAp4svF+a/i1/bHhqnQYYU4f7Qic4fDAs
9
+ zLdiSIo0o1csNvIogm/P+uvSzE6eZRZUSmo49dY5SKSJt6Pjh6lM2MHEjsPKIKdA
10
+ N57wEN90q4IZv6AHE9rphqxcmF1k+j5xmhCUS1EJ+y7hyZ0S7Hghdgp/0cxSu/7Y
11
+ lVYyJpkIlQd3RPXzEf6VSYjtr9Ajp1rhvv2611q0CB5NALg/KR3OiMPYmTg5HAKO
12
+ dweNam76nG3VxTeV3y+LW/pZAbi4qAl+4/c0eOGsL7o/YSn7qhThU1AWS9kY1WxT
13
+ CrKRh58rUGRfmvpnOR99xvR4jz942RNiY61pTmsvo+iJspTII3GZhwIGlHtxE9Rn
14
+ 50lWQcDuDDHfObWhzb4rS55BERIwDUqD1LgCRd0ikRxPSvI1AM4cl35b4DTaDLcn
15
+ M6EOfy+QTYsgNoftU1PI1onDQ7ZdfgrTrIBFQQRwOqfyB4bB2zWVj62LSDvZoYYi
16
+ cNUecqyE1542WNKzmyE8Mrf3uknN2J6EH7EhmiyRBtGg3NEQCwIYM4/kWPNPOtkS
17
+ jsn3cNbMNUZiSnQn/nTs4T8g6b2rrwsay/FGUE83AbPqqcTlp2RUVnjbC8KA5+iV
18
+ 1wIDAQABo0IwQDAdBgNVHQ4EFgQUb789MhsOk89lMWwSwBss1TLXDFAwHwYDVR0j
19
+ BBgwFoAUoVp0nHjq6mJ/UGuFhnSK7yjbPyswDQYJKoZIhvcNAQELBQADggIBAI+G
20
+ NKZ3s3m+/R4mH3M84gGWPE1joC2bLavYYLZjKnZv18o6fHX0IW/8v5hd5Df3SP5u
21
+ vhjC88bewiKVHldqkC6ju9rbZxQynhFZGXbN9zLvFMZGkfRH5vB2Y13ZWBdWhq5L
22
+ cRxpRk6WlwaSy0Ed4F12u9ERmhMOtSZhqAnNJBeVraOHeGlcMZXJdZkeyxkdcZ4y
23
+ YJcrI8Da0dMxILgIuc9ZCynAItRAjMw1/3wjlx0Cyxif10ct+EFiP6Zv/gzoo05v
24
+ tNeqOCrxAqAcwrS1u4q/KAKySiEIyxyU1nEI/g53nALwoQhFsRVqVXNAoy7xu37y
25
+ o+lvs98rkq/NkkbBvRBPdcF/BYNtesRxKja/QAEvslyZfyICL9oqsuPPEB2nHtXa
26
+ mWntT2NaXyr1FWCxHaXfZQOxSwco3vTk7HLuNug2wxIc/hewkLlk5NCRkAYfTlan
27
+ gLhZ3vBej4oA8cdpODMb8SrYhqKTeX8E+ulHVS0paY0kszAGK2x2kHqRGNXUlfoB
28
+ Ax0etGudHhgtTCAmUgJDyQNLkvBKHYQJ2V/Wv/xej7wXKkACNKlRORl8zcnbVErd
29
+ GM/ibfqNIPIo8dP2EDycSV6vIICqkxpCZZObNjfgKa0UN03qYi7xREhhEehXgU8H
30
+ IO9w2pG7ReiO2E+bLIs0Zh1+2IwlM1EM/eqbq+Gi
30
31
  -----END CERTIFICATE-----
@@ -1 +1 @@
1
- 80329a197063dea8cf7905d10d221648bbdbc05b8fb1d4c2e384b831bc6590df
1
+ 386ae6ef809d20ddfcc7ca68f480e82007c031b365c86cc58922cf1bd7238f89
Binary file
@@ -36,7 +36,9 @@ end
36
36
  {
37
37
  "Content-Encoding" => "gzip",
38
38
  "Content-Type" => "application/json",
39
- 'x-elastic-product-origin' => 'logstash-output-elasticsearch'
39
+ 'x-elastic-product-origin' => 'logstash-output-elasticsearch',
40
+ 'X-Elastic-Event-Count' => anything,
41
+ 'X-Elastic-Uncompressed-Request-Length' => anything,
40
42
  }
41
43
  }
42
44
 
@@ -39,12 +39,12 @@ describe "Versioned delete", :integration => true do
39
39
  it "should ignore non-monotonic external version updates" do
40
40
  id = "ev2"
41
41
  subject.multi_receive([LogStash::Event.new("my_id" => id, "my_action" => "index", "message" => "foo", "my_version" => 99)])
42
- r = es.get(:index => 'logstash-delete', :type => doc_type, :id => id, :refresh => true)
42
+ r = es.get(:index => 'logstash-delete', :id => id, :refresh => true)
43
43
  expect(r['_version']).to eq(99)
44
44
  expect(r['_source']['message']).to eq('foo')
45
45
 
46
46
  subject.multi_receive([LogStash::Event.new("my_id" => id, "my_action" => "delete", "message" => "foo", "my_version" => 98)])
47
- r2 = es.get(:index => 'logstash-delete', :type => doc_type, :id => id, :refresh => true)
47
+ r2 = es.get(:index => 'logstash-delete', :id => id, :refresh => true)
48
48
  expect(r2['_version']).to eq(99)
49
49
  expect(r2['_source']['message']).to eq('foo')
50
50
  end
@@ -52,12 +52,12 @@ describe "Versioned delete", :integration => true do
52
52
  it "should commit monotonic external version updates" do
53
53
  id = "ev3"
54
54
  subject.multi_receive([LogStash::Event.new("my_id" => id, "my_action" => "index", "message" => "foo", "my_version" => 99)])
55
- r = es.get(:index => 'logstash-delete', :type => doc_type, :id => id, :refresh => true)
55
+ r = es.get(:index => 'logstash-delete', :id => id, :refresh => true)
56
56
  expect(r['_version']).to eq(99)
57
57
  expect(r['_source']['message']).to eq('foo')
58
58
 
59
59
  subject.multi_receive([LogStash::Event.new("my_id" => id, "my_action" => "delete", "message" => "foo", "my_version" => 100)])
60
- expect { es.get(:index => 'logstash-delete', :type => doc_type, :id => id, :refresh => true) }.to raise_error(Elasticsearch::Transport::Transport::Errors::NotFound)
60
+ expect { es.get(:index => 'logstash-delete', :id => id, :refresh => true) }.to raise_error(get_expected_error_class)
61
61
  end
62
62
  end
63
63
  end
@@ -102,7 +102,7 @@ shared_examples_for 'an ILM disabled Logstash' do
102
102
  it 'should not install the default policy' do
103
103
  subject.register
104
104
  sleep(1)
105
- expect{get_policy(@es, LogStash::Outputs::ElasticSearch::DEFAULT_POLICY)}.to raise_error(Elasticsearch::Transport::Transport::Errors::NotFound)
105
+ expect{get_policy(@es, LogStash::Outputs::ElasticSearch::DEFAULT_POLICY)}.to raise_error(get_expected_error_class)
106
106
  end
107
107
 
108
108
  it 'should not write the ILM settings into the template' do
@@ -282,12 +282,12 @@ describe 'Elasticsearch has index lifecycle management enabled', :integration =>
282
282
  subject.register
283
283
  sleep(1)
284
284
  expect(@es.indices.exists_alias(name: "logstash")).to be_truthy
285
- expect(@es.get_alias(name: "logstash")).to include("logstash-000001")
285
+ expect(@es.indices.get_alias(name: "logstash")).to include("logstash-000001")
286
286
  end
287
287
  end
288
288
 
289
289
  it 'should install it if it is not present' do
290
- expect{get_policy(@es, LogStash::Outputs::ElasticSearch::DEFAULT_POLICY)}.to raise_error(Elasticsearch::Transport::Transport::Errors::NotFound)
290
+ expect{get_policy(@es, LogStash::Outputs::ElasticSearch::DEFAULT_POLICY)}.to raise_error(get_expected_error_class)
291
291
  subject.register
292
292
  sleep(1)
293
293
  expect{get_policy(@es, LogStash::Outputs::ElasticSearch::DEFAULT_POLICY)}.not_to raise_error
@@ -298,7 +298,7 @@ describe 'Elasticsearch has index lifecycle management enabled', :integration =>
298
298
  subject.register
299
299
  sleep(1)
300
300
  expect(@es.indices.exists_alias(name: "logstash")).to be_truthy
301
- expect(@es.get_alias(name: "logstash")).to include("logstash-#{todays_date}-000001")
301
+ expect(@es.indices.get_alias(name: "logstash")).to include("logstash-#{todays_date}-000001")
302
302
  end
303
303
 
304
304
  it 'should ingest into a single index' do
@@ -340,14 +340,14 @@ describe 'Elasticsearch has index lifecycle management enabled', :integration =>
340
340
  let (:policy) { small_max_doc_policy }
341
341
 
342
342
  before do
343
- expect{get_policy(@es, LogStash::Outputs::ElasticSearch::DEFAULT_POLICY)}.to raise_error(Elasticsearch::Transport::Transport::Errors::NotFound)
343
+ expect{get_policy(@es, LogStash::Outputs::ElasticSearch::DEFAULT_POLICY)}.to raise_error(get_expected_error_class)
344
344
  put_policy(@es,ilm_policy_name, policy)
345
345
  end
346
346
 
347
347
  it 'should not install the default policy if it is not used' do
348
348
  subject.register
349
349
  sleep(1)
350
- expect{get_policy(@es, LogStash::Outputs::ElasticSearch::DEFAULT_POLICY)}.to raise_error(Elasticsearch::Transport::Transport::Errors::NotFound)
350
+ expect{get_policy(@es, LogStash::Outputs::ElasticSearch::DEFAULT_POLICY)}.to raise_error(get_expected_error_class)
351
351
  end
352
352
  end
353
353
 
@@ -357,14 +357,14 @@ describe 'Elasticsearch has index lifecycle management enabled', :integration =>
357
357
  let (:policy) { max_age_policy("1d") }
358
358
 
359
359
  before do
360
- expect{get_policy(@es, LogStash::Outputs::ElasticSearch::DEFAULT_POLICY)}.to raise_error(Elasticsearch::Transport::Transport::Errors::NotFound)
360
+ expect{get_policy(@es, LogStash::Outputs::ElasticSearch::DEFAULT_POLICY)}.to raise_error(get_expected_error_class)
361
361
  put_policy(@es,ilm_policy_name, policy)
362
362
  end
363
363
 
364
364
  it 'should not install the default policy if it is not used' do
365
365
  subject.register
366
366
  sleep(1)
367
- expect{get_policy(@es, LogStash::Outputs::ElasticSearch::DEFAULT_POLICY)}.to raise_error(Elasticsearch::Transport::Transport::Errors::NotFound)
367
+ expect{get_policy(@es, LogStash::Outputs::ElasticSearch::DEFAULT_POLICY)}.to raise_error(get_expected_error_class)
368
368
  end
369
369
  end
370
370
 
@@ -374,7 +374,7 @@ describe 'Elasticsearch has index lifecycle management enabled', :integration =>
374
374
  subject.register
375
375
  sleep(1)
376
376
  expect(@es.indices.exists_alias(name: expected_index)).to be_truthy
377
- expect(@es.get_alias(name: expected_index)).to include("#{expected_index}-#{todays_date}-000001")
377
+ expect(@es.indices.get_alias(name: expected_index)).to include("#{expected_index}-#{todays_date}-000001")
378
378
  end
379
379
 
380
380
  it 'should write the ILM settings into the template' do
@@ -443,17 +443,18 @@ describe 'Elasticsearch has index lifecycle management enabled', :integration =>
443
443
  subject.register
444
444
  sleep(1)
445
445
  expect(@es.indices.exists_alias(name: ilm_rollover_alias)).to be_truthy
446
- expect(@es.get_alias(name: ilm_rollover_alias)).to include("#{ilm_rollover_alias}-#{todays_date}-000001")
446
+ expect(@es.indices.get_alias(name: ilm_rollover_alias)).to include("#{ilm_rollover_alias}-#{todays_date}-000001")
447
447
  end
448
448
 
449
449
  context 'when the custom rollover alias already exists' do
450
450
  it 'should ignore the already exists error' do
451
451
  expect(@es.indices.exists_alias(name: ilm_rollover_alias)).to be_falsey
452
- put_alias(@es, "#{ilm_rollover_alias}-#{todays_date}-000001", ilm_rollover_alias)
452
+ @es.indices.create(index: "#{ilm_rollover_alias}-#{todays_date}-000001")
453
+ @es.indices.put_alias(name: ilm_rollover_alias, index: "#{ilm_rollover_alias}-#{todays_date}-000001")
453
454
  expect(@es.indices.exists_alias(name: ilm_rollover_alias)).to be_truthy
454
455
  subject.register
455
456
  sleep(1)
456
- expect(@es.get_alias(name: ilm_rollover_alias)).to include("#{ilm_rollover_alias}-#{todays_date}-000001")
457
+ expect(@es.indices.get_alias(name: ilm_rollover_alias)).to include("#{ilm_rollover_alias}-#{todays_date}-000001")
457
458
  end
458
459
 
459
460
  end
@@ -532,3 +533,8 @@ describe 'Elasticsearch has index lifecycle management enabled', :integration =>
532
533
  end
533
534
 
534
535
  end
536
+
537
+ def get_expected_error_class
538
+ return Elastic::Transport::Transport::Errors::NotFound if elastic_ruby_v8_client_available?
539
+ Elasticsearch::Transport::Transport::Errors::NotFound
540
+ end
@@ -215,12 +215,22 @@ describe "indexing" do
215
215
 
216
216
  it "sets the correct content-type header" do
217
217
  expected_manticore_opts = {
218
- :headers => {"Content-Type" => "application/json", 'x-elastic-product-origin' => 'logstash-output-elasticsearch'},
218
+ :headers => {
219
+ "Content-Type" => "application/json",
220
+ 'x-elastic-product-origin' => 'logstash-output-elasticsearch',
221
+ 'X-Elastic-Event-Count' => anything,
222
+ 'X-Elastic-Uncompressed-Request-Length' => anything
223
+ },
219
224
  :body => anything
220
225
  }
221
226
  if secure
222
227
  expected_manticore_opts = {
223
- :headers => {"Content-Type" => "application/json", 'x-elastic-product-origin' => 'logstash-output-elasticsearch'},
228
+ :headers => {
229
+ "Content-Type" => "application/json",
230
+ 'x-elastic-product-origin' => 'logstash-output-elasticsearch',
231
+ 'X-Elastic-Event-Count' => anything,
232
+ 'X-Elastic-Uncompressed-Request-Length' => anything
233
+ },
224
234
  :body => anything,
225
235
  :auth => {
226
236
  :user => user,
@@ -36,11 +36,11 @@ describe "Versioned indexing", :integration => true do
36
36
 
37
37
  it "should default to ES version" do
38
38
  subject.multi_receive([LogStash::Event.new("my_id" => "123", "message" => "foo")])
39
- r = es.get(:index => 'logstash-index', :type => doc_type, :id => "123", :refresh => true)
39
+ r = es.get(:index => 'logstash-index', :id => '123', :refresh => true)
40
40
  expect(r["_version"]).to eq(1)
41
41
  expect(r["_source"]["message"]).to eq('foo')
42
42
  subject.multi_receive([LogStash::Event.new("my_id" => "123", "message" => "foobar")])
43
- r2 = es.get(:index => 'logstash-index', :type => doc_type, :id => "123", :refresh => true)
43
+ r2 = es.get(:index => 'logstash-index', :id => '123', :refresh => true)
44
44
  expect(r2["_version"]).to eq(2)
45
45
  expect(r2["_source"]["message"]).to eq('foobar')
46
46
  end
@@ -63,7 +63,7 @@ describe "Versioned indexing", :integration => true do
63
63
  it "should respect the external version" do
64
64
  id = "ev1"
65
65
  subject.multi_receive([LogStash::Event.new("my_id" => id, "my_version" => "99", "message" => "foo")])
66
- r = es.get(:index => 'logstash-index', :type => doc_type, :id => id, :refresh => true)
66
+ r = es.get(:index => 'logstash-index', :id => id, :refresh => true)
67
67
  expect(r["_version"]).to eq(99)
68
68
  expect(r["_source"]["message"]).to eq('foo')
69
69
  end
@@ -71,12 +71,12 @@ describe "Versioned indexing", :integration => true do
71
71
  it "should ignore non-monotonic external version updates" do
72
72
  id = "ev2"
73
73
  subject.multi_receive([LogStash::Event.new("my_id" => id, "my_version" => "99", "message" => "foo")])
74
- r = es.get(:index => 'logstash-index', :type => doc_type, :id => id, :refresh => true)
74
+ r = es.get(:index => 'logstash-index', :id => id, :refresh => true)
75
75
  expect(r["_version"]).to eq(99)
76
76
  expect(r["_source"]["message"]).to eq('foo')
77
77
 
78
78
  subject.multi_receive([LogStash::Event.new("my_id" => id, "my_version" => "98", "message" => "foo")])
79
- r2 = es.get(:index => 'logstash-index', :type => doc_type, :id => id, :refresh => true)
79
+ r2 = es.get(:index => 'logstash-index', :id => id, :refresh => true)
80
80
  expect(r2["_version"]).to eq(99)
81
81
  expect(r2["_source"]["message"]).to eq('foo')
82
82
  end
@@ -84,12 +84,12 @@ describe "Versioned indexing", :integration => true do
84
84
  it "should commit monotonic external version updates" do
85
85
  id = "ev3"
86
86
  subject.multi_receive([LogStash::Event.new("my_id" => id, "my_version" => "99", "message" => "foo")])
87
- r = es.get(:index => 'logstash-index', :type => doc_type, :id => id, :refresh => true)
87
+ r = es.get(:index => 'logstash-index', :id => id, :refresh => true)
88
88
  expect(r["_version"]).to eq(99)
89
89
  expect(r["_source"]["message"]).to eq('foo')
90
90
 
91
91
  subject.multi_receive([LogStash::Event.new("my_id" => id, "my_version" => "100", "message" => "foo")])
92
- r2 = es.get(:index => 'logstash-index', :type => doc_type, :id => id, :refresh => true)
92
+ r2 = es.get(:index => 'logstash-index', :id => id, :refresh => true)
93
93
  expect(r2["_version"]).to eq(100)
94
94
  expect(r2["_source"]["message"]).to eq('foo')
95
95
  end
@@ -22,11 +22,12 @@ describe "Update actions using painless scripts", :integration => true, :update_
22
22
  # This can fail if there are no indexes, ignore failure.
23
23
  @es.indices.delete(:index => "*") rescue nil
24
24
  @es.index(
25
- :index => 'logstash-update',
26
- :type => doc_type,
27
- :id => "123",
28
- :body => { :message => 'Test', :counter => 1 }
29
- )
25
+ {
26
+ :index => 'logstash-update',
27
+ :id => '123',
28
+ :body => { :message => 'Test', :counter => 1 },
29
+ :refresh => true
30
+ })
30
31
  @es.indices.refresh
31
32
  end
32
33
 
@@ -46,7 +47,7 @@ describe "Update actions using painless scripts", :integration => true, :update_
46
47
  subject = get_es_output(plugin_parameters)
47
48
  subject.register
48
49
  subject.multi_receive([LogStash::Event.new("count" => 4 )])
49
- r = @es.get(:index => 'logstash-update', :type => doc_type, :id => "123", :refresh => true)
50
+ r = @es.get(:index => 'logstash-update', :id => "123", :refresh => true)
50
51
  expect(r["_source"]["counter"]).to eq(5)
51
52
  end
52
53
  end
@@ -57,7 +58,7 @@ describe "Update actions using painless scripts", :integration => true, :update_
57
58
  subject = get_es_output({ 'document_id' => "456", 'upsert' => '{"message": "upsert message"}' })
58
59
  subject.register
59
60
  subject.multi_receive([LogStash::Event.new("message" => "sample message here")])
60
- r = @es.get(:index => 'logstash-update', :type => doc_type, :id => "456", :refresh => true)
61
+ r = @es.get(:index => 'logstash-update', :id => "456", :refresh => true)
61
62
  expect(r["_source"]["message"]).to eq('upsert message')
62
63
  end
63
64
 
@@ -65,7 +66,7 @@ describe "Update actions using painless scripts", :integration => true, :update_
65
66
  subject = get_es_output({ 'document_id' => "456", 'doc_as_upsert' => true })
66
67
  subject.register
67
68
  subject.multi_receive([LogStash::Event.new("message" => "sample message here")])
68
- r = @es.get(:index => 'logstash-update', :type => doc_type, :id => "456", :refresh => true)
69
+ r = @es.get(:index => 'logstash-update', :id => "456", :refresh => true)
69
70
  expect(r["_source"]["message"]).to eq('sample message here')
70
71
  end
71
72
 
@@ -82,7 +83,7 @@ describe "Update actions using painless scripts", :integration => true, :update_
82
83
  subject.register
83
84
 
84
85
  subject.multi_receive([LogStash::Event.new("message" => "sample message here")])
85
- r = @es.get(:index => 'logstash-update', :type => doc_type, :id => "456", :refresh => true)
86
+ r = @es.get(:index => 'logstash-update', :id => "456", :refresh => true)
86
87
  expect(r["_source"]["message"]).to eq('upsert message')
87
88
  end
88
89
 
@@ -91,7 +92,7 @@ describe "Update actions using painless scripts", :integration => true, :update_
91
92
  subject.register
92
93
  subject.multi_receive([LogStash::Event.new("counter" => 1)])
93
94
  @es.indices.refresh
94
- r = @es.get(:index => 'logstash-update', :type => doc_type, :id => "456", :refresh => true)
95
+ r = @es.get(:index => 'logstash-update', :id => "456", :refresh => true)
95
96
  expect(r["_source"]["counter"]).to eq(1)
96
97
  end
97
98
  end
@@ -27,16 +27,21 @@ describe "Unsupported actions testing...", :integration => true do
27
27
  @es.indices.delete(:index => "*") rescue nil
28
28
  # index single doc for update purpose
29
29
  @es.index(
30
- :index => INDEX,
31
- :type => doc_type,
32
- :id => "2",
33
- :body => { :message => 'Test to doc indexing', :counter => 1 }
30
+ {
31
+ :index => INDEX,
32
+ :id => '2',
33
+ :body => { :message => 'Test to doc indexing', :counter => 1 },
34
+ :refresh => true
35
+ }
34
36
  )
37
+
35
38
  @es.index(
36
- :index => INDEX,
37
- :type => doc_type,
38
- :id => "3",
39
- :body => { :message => 'Test to doc deletion', :counter => 2 }
39
+ {
40
+ :index => INDEX,
41
+ :id => '3',
42
+ :body => { :message => 'Test to doc deletion', :counter => 2 },
43
+ :refresh => true
44
+ }
40
45
  )
41
46
  @es.indices.refresh
42
47
  end
@@ -63,12 +68,12 @@ describe "Unsupported actions testing...", :integration => true do
63
68
  rejected_events = events.select { |event| !index_or_update.call(event) }
64
69
 
65
70
  indexed_events.each do |event|
66
- response = @es.get(:index => INDEX, :type => doc_type, :id => event.get("doc_id"), :refresh => true)
71
+ response = @es.get(:index => INDEX, :id => event.get("doc_id"), :refresh => true)
67
72
  expect(response['_source']['message']).to eq(event.get("message"))
68
73
  end
69
74
 
70
75
  rejected_events.each do |event|
71
- expect {@es.get(:index => INDEX, :type => doc_type, :id => event.get("doc_id"), :refresh => true)}.to raise_error(Elasticsearch::Transport::Transport::Errors::NotFound)
76
+ expect {@es.get(:index => INDEX, :id => event.get("doc_id"), :refresh => true)}.to raise_error(get_expected_error_class)
72
77
  end
73
78
  end
74
79
  end
@@ -22,10 +22,12 @@ describe "Update actions without scripts", :integration => true do
22
22
  # This can fail if there are no indexes, ignore failure.
23
23
  @es.indices.delete(:index => "*") rescue nil
24
24
  @es.index(
25
- :index => 'logstash-update',
26
- :type => doc_type,
27
- :id => "123",
28
- :body => { :message => 'Test', :counter => 1 }
25
+ {
26
+ :index => 'logstash-update',
27
+ :id => '123',
28
+ :body => { :message => 'Test', :counter => 1 },
29
+ :refresh => true
30
+ }
29
31
  )
30
32
  @es.indices.refresh
31
33
  end
@@ -40,14 +42,14 @@ describe "Update actions without scripts", :integration => true do
40
42
  subject = get_es_output({ 'document_id' => "456" } )
41
43
  subject.register
42
44
  subject.multi_receive([LogStash::Event.new("message" => "sample message here")])
43
- expect {@es.get(:index => 'logstash-update', :type => doc_type, :id => "456", :refresh => true)}.to raise_error(Elasticsearch::Transport::Transport::Errors::NotFound)
45
+ expect {@es.get(:index => 'logstash-update', :id => '456', :refresh => true)}.to raise_error(get_expected_error_class)
44
46
  end
45
47
 
46
48
  it "should update existing document" do
47
49
  subject = get_es_output({ 'document_id' => "123" })
48
50
  subject.register
49
51
  subject.multi_receive([LogStash::Event.new("message" => "updated message here")])
50
- r = @es.get(:index => 'logstash-update', :type => doc_type, :id => "123", :refresh => true)
52
+ r = @es.get(:index => 'logstash-update', :id => '123', :refresh => true)
51
53
  expect(r["_source"]["message"]).to eq('updated message here')
52
54
  end
53
55
 
@@ -57,7 +59,7 @@ describe "Update actions without scripts", :integration => true do
57
59
  subject = get_es_output({ 'document_id' => "123" })
58
60
  subject.register
59
61
  subject.multi_receive([LogStash::Event.new("data" => "updated message here", "message" => "foo")])
60
- r = @es.get(:index => 'logstash-update', :type => doc_type, :id => "123", :refresh => true)
62
+ r = @es.get(:index => 'logstash-update', :id => '123', :refresh => true)
61
63
  expect(r["_source"]["data"]).to eq('updated message here')
62
64
  expect(r["_source"]["message"]).to eq('foo')
63
65
  end
@@ -94,7 +96,7 @@ describe "Update actions without scripts", :integration => true do
94
96
  subject = get_es_output({ 'document_id' => "456", 'upsert' => '{"message": "upsert message"}' })
95
97
  subject.register
96
98
  subject.multi_receive([LogStash::Event.new("message" => "sample message here")])
97
- r = @es.get(:index => 'logstash-update', :type => doc_type, :id => "456", :refresh => true)
99
+ r = @es.get(:index => 'logstash-update', :id => '456', :refresh => true)
98
100
  expect(r["_source"]["message"]).to eq('upsert message')
99
101
  end
100
102
 
@@ -102,7 +104,7 @@ describe "Update actions without scripts", :integration => true do
102
104
  subject = get_es_output({ 'document_id' => "456", 'doc_as_upsert' => true })
103
105
  subject.register
104
106
  subject.multi_receive([LogStash::Event.new("message" => "sample message here")])
105
- r = @es.get(:index => 'logstash-update', :type => doc_type, :id => "456", :refresh => true)
107
+ r = @es.get(:index => 'logstash-update', :id => '456', :refresh => true)
106
108
  expect(r["_source"]["message"]).to eq('sample message here')
107
109
  end
108
110
 
data/spec/spec_helper.rb CHANGED
@@ -8,3 +8,11 @@ end
8
8
  RSpec.configure do |config|
9
9
  config.include LogStash::Outputs::ElasticSearch::SpecHelper
10
10
  end
11
+
12
+ # remove once plugin starts consuming elasticsearch-ruby v8 client
13
+ def elastic_ruby_v8_client_available?
14
+ Elasticsearch::Transport
15
+ false
16
+ rescue NameError # NameError: uninitialized constant Elasticsearch::Transport if Elastic Ruby client is not available
17
+ true
18
+ end
@@ -270,6 +270,83 @@ describe LogStash::Outputs::ElasticSearch::HttpClient do
270
270
 
271
271
  end
272
272
  end
273
+ context "the 'user-agent' header" do
274
+ let(:pool) { double("pool") }
275
+ let(:compression_level) { 6 }
276
+ let(:base_options) { super().merge( :client_settings => {:compression_level => compression_level}) }
277
+ let(:actions) { [
278
+ ["index", {:_id=>nil, :_index=>"logstash"}, {"message_1"=> message_1}],
279
+ ["index", {:_id=>nil, :_index=>"logstash"}, {"message_2"=> message_2}],
280
+ ["index", {:_id=>nil, :_index=>"logstash"}, {"message_3"=> message_3}],
281
+ ]}
282
+ let(:message_1) { "hello" }
283
+ let(:message_2_size) { 1_000 }
284
+ let(:message_2) { SecureRandom.alphanumeric(message_2_size / 2 ) * 2 }
285
+ let(:message_3_size) { 1_000 }
286
+ let(:message_3) { "m" * message_3_size }
287
+ let(:messages_size) { message_1.size + message_2.size + message_3.size }
288
+ let(:action_overhead) { 42 + 16 + 2 } # header plus doc key size plus new line overhead per action
289
+
290
+ let(:response) do
291
+ response = double("response")
292
+ allow(response).to receive(:code).and_return(response)
293
+ allow(response).to receive(:body).and_return({"errors" => false}.to_json)
294
+ response
295
+ end
296
+
297
+ before(:each) do
298
+ subject.instance_variable_set("@pool", pool)
299
+ end
300
+
301
+ it "carries bulk request's uncompressed size" do
302
+ expect(pool).to receive(:post) do |path, params, body|
303
+ headers = params.fetch(:headers, {})
304
+ expect(headers["X-Elastic-Event-Count"]).to eq("3")
305
+ expect(headers["X-Elastic-Uncompressed-Request-Length"]).to eq (messages_size + (action_overhead * 3)).to_s
306
+ end.and_return(response)
307
+
308
+ subject.send(:bulk, actions)
309
+ end
310
+ context "without compression" do
311
+ let(:compression_level) { 0 }
312
+ it "carries bulk request's uncompressed size" do
313
+ expect(pool).to receive(:post) do |path, params, body|
314
+ headers = params.fetch(:headers, {})
315
+ expect(headers["X-Elastic-Event-Count"]).to eq("3")
316
+ expect(headers["X-Elastic-Uncompressed-Request-Length"]).to eq (messages_size + (action_overhead * 3)).to_s
317
+ end.and_return(response)
318
+ subject.send(:bulk, actions)
319
+ end
320
+ end
321
+
322
+ context "with compressed messages over 20MB" do
323
+ let(:message_2_size) { 21_000_000 }
324
+ it "carries bulk request's uncompressed size" do
325
+ # only the first, tiny, message is sent first
326
+ expect(pool).to receive(:post) do |path, params, body|
327
+ headers = params.fetch(:headers, {})
328
+ expect(headers["X-Elastic-Uncompressed-Request-Length"]).to eq (message_1.size + action_overhead).to_s
329
+ expect(headers["X-Elastic-Event-Count"]).to eq("1")
330
+ end.and_return(response)
331
+
332
+ # huge message_2 is sent afterwards alone
333
+ expect(pool).to receive(:post) do |path, params, body|
334
+ headers = params.fetch(:headers, {})
335
+ expect(headers["X-Elastic-Uncompressed-Request-Length"]).to eq (message_2.size + action_overhead).to_s
336
+ expect(headers["X-Elastic-Event-Count"]).to eq("1")
337
+ end.and_return(response)
338
+
339
+ # finally medium message_3 is sent alone as well
340
+ expect(pool).to receive(:post) do |path, params, body|
341
+ headers = params.fetch(:headers, {})
342
+ expect(headers["X-Elastic-Uncompressed-Request-Length"]).to eq (message_3.size + action_overhead).to_s
343
+ expect(headers["X-Elastic-Event-Count"]).to eq("1")
344
+ end.and_return(response)
345
+
346
+ subject.send(:bulk, actions)
347
+ end
348
+ end
349
+ end
273
350
  end
274
351
 
275
352
  describe "sniffing" do
@@ -777,7 +777,7 @@ describe LogStash::Outputs::ElasticSearch do
777
777
  end
778
778
 
779
779
  before(:each) do
780
- allow(subject.client).to receive(:bulk_send).with(instance_of(StringIO), instance_of(Array)) do |stream, actions|
780
+ allow(subject.client).to receive(:bulk_send).with(instance_of(StringIO), instance_of(Array), instance_of(Hash)) do |stream, actions, headers|
781
781
  expect( stream.string ).to include '"foo":"bar1"'
782
782
  expect( stream.string ).to include '"foo":"bar2"'
783
783
  end.and_return(bulk_response, {"errors"=>false}) # let's make it go away (second call) to not retry indefinitely
@@ -915,7 +915,12 @@ describe LogStash::Outputs::ElasticSearch do
915
915
  allow(elasticsearch_output_instance.client.pool).to receive(:post) do |path, params, body|
916
916
  if body.length > max_bytes
917
917
  max_bytes *= 2 # ensure a successful retry
918
- double("Response", :code => 413, :body => "")
918
+ raise ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError.new(
919
+ 413,
920
+ "test-url",
921
+ body,
922
+ ""
923
+ )
919
924
  else
920
925
  double("Response", :code => 200, :body => '{"errors":false,"items":[{"index":{"status":200,"result":"created"}}]}')
921
926
  end
@@ -4,7 +4,6 @@ require_relative "../../../spec/es_spec_helper"
4
4
  describe "whitelisting error types in expected behavior" do
5
5
  let(:template) { '{"template" : "not important, will be updated by :index"}' }
6
6
  let(:event1) { LogStash::Event.new("somevalue" => 100, "@timestamp" => "2014-11-17T20:37:17.223Z") }
7
- let(:action1) { ["index", {:_id=>1, :routing=>nil, :_index=>"logstash-2014.11.17", :_type=> doc_type }, event1] }
8
7
  let(:settings) { {"manage_template" => true, "index" => "logstash-2014.11.17", "template_overwrite" => true, "hosts" => get_host_port() } }
9
8
 
10
9
  subject { LogStash::Outputs::ElasticSearch.new(settings) }
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-elasticsearch
3
3
  version: !ruby/object:Gem::Version
4
- version: 11.22.11
4
+ version: 11.22.13
5
5
  platform: java
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2025-01-15 00:00:00.000000000 Z
11
+ date: 2025-08-01 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -326,9 +326,7 @@ files:
326
326
  - spec/integration/outputs/update_spec.rb
327
327
  - spec/spec_helper.rb
328
328
  - spec/support/elasticsearch/api/actions/delete_ilm_policy.rb
329
- - spec/support/elasticsearch/api/actions/get_alias.rb
330
329
  - spec/support/elasticsearch/api/actions/get_ilm_policy.rb
331
- - spec/support/elasticsearch/api/actions/put_alias.rb
332
330
  - spec/support/elasticsearch/api/actions/put_ilm_policy.rb
333
331
  - spec/unit/http_client_builder_spec.rb
334
332
  - spec/unit/outputs/elasticsearch/data_stream_support_spec.rb
@@ -415,9 +413,7 @@ test_files:
415
413
  - spec/integration/outputs/update_spec.rb
416
414
  - spec/spec_helper.rb
417
415
  - spec/support/elasticsearch/api/actions/delete_ilm_policy.rb
418
- - spec/support/elasticsearch/api/actions/get_alias.rb
419
416
  - spec/support/elasticsearch/api/actions/get_ilm_policy.rb
420
- - spec/support/elasticsearch/api/actions/put_alias.rb
421
417
  - spec/support/elasticsearch/api/actions/put_ilm_policy.rb
422
418
  - spec/unit/http_client_builder_spec.rb
423
419
  - spec/unit/outputs/elasticsearch/data_stream_support_spec.rb
@@ -1,18 +0,0 @@
1
- # Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
2
- # or more contributor license agreements. Licensed under the Elastic License;
3
- # you may not use this file except in compliance with the Elastic License.
4
-
5
- module Elasticsearch
6
- module API
7
- module Actions
8
-
9
- # Retrieve the list of index lifecycle management policies
10
- def get_alias(arguments={})
11
- method = HTTP_GET
12
- path = Utils.__pathify '_alias', Utils.__escape(arguments[:name])
13
- params = {}
14
- perform_request(method, path, params, nil).body
15
- end
16
- end
17
- end
18
- end
@@ -1,24 +0,0 @@
1
- # Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
2
- # or more contributor license agreements. Licensed under the Elastic License;
3
- # you may not use this file except in compliance with the Elastic License.
4
-
5
- module Elasticsearch
6
- module API
7
- module Actions
8
-
9
- # @option arguments [String] :name The name of the alias (*Required*)
10
- # @option arguments [Hash] :The alias definition(*Required*)
11
-
12
- def put_alias(arguments={})
13
- raise ArgumentError, "Required argument 'name' missing" unless arguments[:name]
14
- raise ArgumentError, "Required argument 'body' missing" unless arguments[:body]
15
- method = HTTP_PUT
16
- path = Utils.__pathify Utils.__escape(arguments[:name])
17
-
18
- params = Utils.__validate_and_extract_params arguments
19
- body = arguments[:body]
20
- perform_request(method, path, params, body.to_json).body
21
- end
22
- end
23
- end
24
- end