logstash-output-elasticsearch 11.18.0-java → 11.20.0-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: b42642e174e8a6f0bf30c67cbdf25e27cc8197f2aeb56cb2268fb11f65426a03
4
- data.tar.gz: 26ca32e908ef3d42ec4281259ea9a6bf31746031b12c14ad08fbfd189c069c3b
3
+ metadata.gz: d6658ce0c72fc994e737d823dc03e53abf84d00c2692450173684fa9fc8a5865
4
+ data.tar.gz: 466cc891690d71b25df8992e93c7df1b64a09ab68b8e109fe4c4e8cf0faf25dc
5
5
  SHA512:
6
- metadata.gz: dd0b5731beb34c4e331a8ea52252c4b53af5d8c62ffb97c106eab961709d0d884a02a228d48a236eee635629d23a6c2243528a20e3f1f7368f609e232f26d7f7
7
- data.tar.gz: 79e980b61c3bdd1b3339f1f03eccff4c52fe596fc34e6069fac1239a5c67e17357d118a95d9536bc1937f930f1c60f935858c236447f5b87f8517ec9b79ef53e
6
+ metadata.gz: 8116f03d6cd876a5e6997c07e3a12119c667e30061ca44bd8cdb70987d75659f4cd02bdb6bce15413c4e5c2d7fdac639aa10f95142006e85dcf2e9d248eedc85
7
+ data.tar.gz: 67a56fc6ea82742bbc2605fb09708f017d199fb39a1b59015ccd4b5ffdd8fd9a917dde68d7f2ff23448ef78eb4006af40e1521a8c754d0d38ace29a0a267109b
data/CHANGELOG.md CHANGED
@@ -1,3 +1,9 @@
1
+ ## 11.20.0
2
+ - Changed the register to initiate pipeline shutdown upon bootstrap failure instead of simply logging the error [#1151](https://github.com/logstash-plugins/logstash-output-elasticsearch/pull/1151)
3
+
4
+ ## 11.19.0
5
+ - Added `filter_path` to bulk requests to reduce the size of responses from elasticsearch [#1154](https://github.com/logstash-plugins/logstash-output-elasticsearch/pull/1154)
6
+
1
7
  ## 11.18.0
2
8
  - Added request header `Elastic-Api-Version` for serverless [#1147](https://github.com/logstash-plugins/logstash-output-elasticsearch/pull/1147)
3
9
 
data/docs/index.asciidoc CHANGED
@@ -423,10 +423,12 @@ Elasticsearch {ref}/security-api-create-api-key.html[Create API key API].
423
423
  ===== `bulk_path`
424
424
 
425
425
  * Value type is <<string,string>>
426
- * There is no default value for this setting.
426
+ * The default value for this settings is `/_bulk?filter_path=errors,items.*.error,items.*.status`
427
427
 
428
428
  HTTP Path to perform the _bulk requests to
429
- this defaults to a concatenation of the path parameter and "_bulk"
429
+ * This default bulk path is the concatenation of the value of `path` parameter and `/_bulk?filter_path=errors,items.*.error,items.*.status`
430
+ * The `filter_path` query parameter is appended to the bulk path to reduce the payload between logstash and elasticsearch. However, if a custom `filter_path` query parameter is included in the `bulk_path` setting, then that value will be used.
431
+
430
432
 
431
433
  [id="plugins-{type}s-{plugin}-ca_trusted_fingerprint"]
432
434
  ===== `ca_trusted_fingerprint`
@@ -145,12 +145,12 @@ module LogStash module Outputs class ElasticSearch
145
145
  # @note assumes to be running AFTER {after_successful_connection} completed, due ES version checks
146
146
  # @return [Gem::Version] if ES supports DS nil (or raise) otherwise
147
147
  def assert_es_version_supports_data_streams
148
- fail 'no last_es_version' unless last_es_version # assert - should not happen
148
+ raise LogStash::ConfigurationError 'no last_es_version' unless last_es_version # assert - should not happen
149
149
  es_version = ::Gem::Version.create(last_es_version)
150
150
  if es_version < ::Gem::Version.create(DATA_STREAMS_ORIGIN_ES_VERSION)
151
151
  @logger.error "Elasticsearch version does not support data streams, Logstash might end up writing to an index", es_version: es_version.version
152
152
  # NOTE: when switching to synchronous check from register, this should be a ConfigurationError
153
- raise LogStash::Error, "A data_stream configuration is only supported since Elasticsearch #{DATA_STREAMS_ORIGIN_ES_VERSION} " +
153
+ raise LogStash::ConfigurationError, "A data_stream configuration is only supported since Elasticsearch #{DATA_STREAMS_ORIGIN_ES_VERSION} " +
154
154
  "(detected version #{es_version.version}), please upgrade your cluster"
155
155
  end
156
156
  es_version # return truthy
@@ -28,7 +28,12 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
28
28
  @response_code == 403
29
29
  end
30
30
 
31
+ def too_many_requests?
32
+ @response_code == 429
33
+ end
34
+
31
35
  end
36
+
32
37
  class HostUnreachableError < Error;
33
38
  attr_reader :original_error, :url
34
39
 
@@ -69,7 +74,7 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
69
74
  @adapter = adapter
70
75
  @metric = options[:metric]
71
76
  @initial_urls = initial_urls
72
-
77
+
73
78
  raise ArgumentError, "No URL Normalizer specified!" unless options[:url_normalizer]
74
79
  @url_normalizer = options[:url_normalizer]
75
80
  DEFAULT_OPTIONS.merge(options).tap do |merged|
@@ -159,7 +164,7 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
159
164
  :error_message => e.message,
160
165
  :class => e.class.name,
161
166
  :backtrace => e.backtrace
162
- )
167
+ )
163
168
  end
164
169
  end
165
170
  end
@@ -197,11 +202,11 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
197
202
  sniff(nodes)
198
203
  end
199
204
  end
200
-
205
+
201
206
  def major_version(version_string)
202
207
  version_string.split('.').first.to_i
203
208
  end
204
-
209
+
205
210
  def sniff(nodes)
206
211
  nodes.map do |id,info|
207
212
  # Skip master-only nodes
@@ -360,7 +365,7 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
360
365
 
361
366
  def update_urls(new_urls)
362
367
  return if new_urls.nil?
363
-
368
+
364
369
  # Normalize URLs
365
370
  new_urls = new_urls.map(&method(:normalize_url))
366
371
 
@@ -388,14 +393,14 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
388
393
  if state_changes[:removed].size > 0 || state_changes[:added].size > 0
389
394
  logger.info? && logger.info("Elasticsearch pool URLs updated", :changes => state_changes)
390
395
  end
391
-
396
+
392
397
  # Run an inline healthcheck anytime URLs are updated
393
398
  # This guarantees that during startup / post-startup
394
399
  # sniffing we don't have idle periods waiting for the
395
400
  # periodic sniffer to allow new hosts to come online
396
- healthcheck!
401
+ healthcheck!
397
402
  end
398
-
403
+
399
404
  def size
400
405
  @state_mutex.synchronize { @url_info.size }
401
406
  end
@@ -177,6 +177,7 @@ module LogStash; module Outputs; class ElasticSearch;
177
177
 
178
178
  def bulk_send(body_stream, batch_actions)
179
179
  params = compression_level? ? {:headers => {"Content-Encoding" => "gzip"}} : {}
180
+
180
181
  response = @pool.post(@bulk_path, params, body_stream.string)
181
182
 
182
183
  @bulk_response_metrics.increment(response.code.to_s)
@@ -33,9 +33,9 @@ module LogStash; module Outputs; class ElasticSearch;
33
33
  end
34
34
 
35
35
  common_options[:bulk_path] = if params["bulk_path"]
36
- dedup_slashes("/#{params["bulk_path"]}")
36
+ resolve_filter_path(dedup_slashes("/#{params["bulk_path"]}"))
37
37
  else
38
- dedup_slashes("/#{params["path"]}/_bulk")
38
+ resolve_filter_path(dedup_slashes("/#{params["path"]}/_bulk"))
39
39
  end
40
40
 
41
41
  common_options[:sniffing_path] = if params["sniffing_path"]
@@ -197,5 +197,16 @@ module LogStash; module Outputs; class ElasticSearch;
197
197
  def self.dedup_slashes(url)
198
198
  url.gsub(/\/+/, "/")
199
199
  end
200
+
201
+ # Set a `filter_path` query parameter if it is not already set to be
202
+ # `filter_path=errors,items.*.error,items.*.status` to reduce the payload between Logstash and Elasticsearch
203
+ def self.resolve_filter_path(url)
204
+ return url if url.match?(/(?:[&|?])filter_path=/)
205
+ ("#{url}#{query_param_separator(url)}filter_path=errors,items.*.error,items.*.status")
206
+ end
207
+
208
+ def self.query_param_separator(url)
209
+ url.match?(/\?[^\s#]+/) ? '&' : '?'
210
+ end
200
211
  end
201
212
  end; end; end
@@ -37,7 +37,7 @@ module LogStash; module Outputs; class ElasticSearch
37
37
  template_path = default_template_path(es_major_version, ecs_compatibility)
38
38
  read_template_file(template_path)
39
39
  rescue => e
40
- fail "Failed to load default template for Elasticsearch v#{es_major_version} with ECS #{ecs_compatibility}; caused by: #{e.inspect}"
40
+ raise LogStash::ConfigurationError, "Failed to load default template for Elasticsearch v#{es_major_version} with ECS #{ecs_compatibility}; caused by: #{e.inspect}"
41
41
  end
42
42
 
43
43
  def self.install(client, template_endpoint, template_name, template, template_overwrite)
@@ -99,9 +99,11 @@ module LogStash; module Outputs; class ElasticSearch
99
99
  end
100
100
 
101
101
  def self.read_template_file(template_path)
102
- raise ArgumentError, "Template file '#{template_path}' could not be found" unless ::File.exists?(template_path)
102
+ raise LogStash::ConfigurationError, "Template file '#{template_path}' could not be found" unless ::File.exists?(template_path)
103
103
  template_data = ::IO.read(template_path)
104
104
  LogStash::Json.load(template_data)
105
+ rescue => e
106
+ raise LogStash::ConfigurationError, "Failed to load template file '#{template_path}': #{e.message}"
105
107
  end
106
108
 
107
109
  def self.template_endpoint(plugin)
@@ -322,12 +322,29 @@ class LogStash::Outputs::ElasticSearch < LogStash::Outputs::Base
322
322
  @bulk_request_metrics = metric.namespace(:bulk_requests)
323
323
  @document_level_metrics = metric.namespace(:documents)
324
324
 
325
+ @shutdown_from_finish_register = Concurrent::AtomicBoolean.new(false)
325
326
  @after_successful_connection_thread = after_successful_connection do
326
327
  begin
327
328
  finish_register
328
329
  true # thread.value
330
+ rescue LogStash::ConfigurationError, LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e
331
+ return e if pipeline_shutdown_requested?
332
+
333
+ # retry when 429
334
+ @logger.debug("Received a 429 status code during registration. Retrying..") && retry if too_many_requests?(e)
335
+
336
+ # shut down pipeline
337
+ if execution_context&.agent.respond_to?(:stop_pipeline)
338
+ details = { message: e.message, exception: e.class }
339
+ details[:backtrace] = e.backtrace if @logger.debug?
340
+ @logger.error("Failed to bootstrap. Pipeline \"#{execution_context.pipeline_id}\" is going to shut down", details)
341
+
342
+ @shutdown_from_finish_register.make_true
343
+ execution_context.agent.stop_pipeline(execution_context.pipeline_id)
344
+ end
345
+
346
+ e
329
347
  rescue => e
330
- # we do not want to halt the thread with an exception as that has consequences for LS
331
348
  e # thread.value
332
349
  ensure
333
350
  @after_successful_connection_done.make_true
@@ -450,7 +467,11 @@ class LogStash::Outputs::ElasticSearch < LogStash::Outputs::Base
450
467
  private
451
468
 
452
469
  def stop_after_successful_connection_thread
453
- @after_successful_connection_thread.join unless @after_successful_connection_thread.nil?
470
+ # avoid deadlock when finish_register calling execution_context.agent.stop_pipeline
471
+ # stop_pipeline triggers plugin close and the plugin close waits for after_successful_connection_thread to join
472
+ return if @shutdown_from_finish_register&.true?
473
+
474
+ @after_successful_connection_thread.join if @after_successful_connection_thread&.alive?
454
475
  end
455
476
 
456
477
  # Convert the event into a 3-tuple of action, params and event hash
@@ -599,6 +620,7 @@ class LogStash::Outputs::ElasticSearch < LogStash::Outputs::Base
599
620
  details = { message: e.message, exception: e.class, backtrace: e.backtrace }
600
621
  details[:body] = e.response_body if e.respond_to?(:response_body)
601
622
  @logger.error("Failed to install template", details)
623
+ raise e if register_termination_error?(e)
602
624
  end
603
625
 
604
626
  def setup_ecs_compatibility_related_defaults
@@ -407,5 +407,14 @@ module LogStash; module PluginMixins; module ElasticSearch
407
407
  return val if rest_keys.empty? || val == nil
408
408
  dig_value(val, *rest_keys)
409
409
  end
410
+
411
+ def register_termination_error?(e)
412
+ e.is_a?(LogStash::ConfigurationError) || e.is_a?(LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError)
413
+ end
414
+
415
+ def too_many_requests?(e)
416
+ e.is_a?(LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError) &&
417
+ e.too_many_requests?
418
+ end
410
419
  end
411
420
  end; end; end
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-output-elasticsearch'
3
- s.version = '11.18.0'
3
+ s.version = '11.20.0'
4
4
  s.licenses = ['apache-2.0']
5
5
  s.summary = "Stores logs in Elasticsearch"
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -156,7 +156,7 @@ describe "indexing" do
156
156
  let(:config) { "not implemented" }
157
157
  let(:events) { event_count.times.map { event }.to_a }
158
158
  subject { LogStash::Outputs::ElasticSearch.new(config) }
159
-
159
+ let(:filter_path) { "filter_path=errors,items.*.error,items.*.status"}
160
160
  let(:es_url) { "http://#{get_host_port}" }
161
161
  let(:index_url) { "#{es_url}/#{index}" }
162
162
 
@@ -178,7 +178,7 @@ describe "indexing" do
178
178
  subject.do_close
179
179
  end
180
180
 
181
- shared_examples "an indexer" do |secure|
181
+ shared_examples "an indexer" do |secure, expected_path|
182
182
  before(:each) do
183
183
  host_unreachable_error_class = LogStash::Outputs::ElasticSearch::HttpClient::Pool::HostUnreachableError
184
184
  allow(host_unreachable_error_class).to receive(:new).with(any_args).and_wrap_original do |m, original, url|
@@ -212,13 +212,13 @@ describe "indexing" do
212
212
  expect(doc["_index"]).to eq(index)
213
213
  end
214
214
  end
215
-
215
+
216
216
  it "sets the correct content-type header" do
217
217
  expected_manticore_opts = {:headers => {"Content-Type" => "application/json"}, :body => anything}
218
218
  if secure
219
219
  expected_manticore_opts = {
220
- :headers => {"Content-Type" => "application/json"},
221
- :body => anything,
220
+ :headers => {"Content-Type" => "application/json"},
221
+ :body => anything,
222
222
  :auth => {
223
223
  :user => user,
224
224
  :password => password,
@@ -230,6 +230,20 @@ describe "indexing" do
230
230
  and_call_original
231
231
  subject.multi_receive(events)
232
232
  end
233
+
234
+ it "sets the bulk path URL and filter path parameter correctly" do
235
+ expect(subject.client.pool.adapter.client).to receive(:send).
236
+ with(anything, expected_path != nil ? expected_path : anything, anything).at_least(:once).and_call_original
237
+ subject.multi_receive(events)
238
+ end
239
+
240
+ it "receives a filtered response" do
241
+ expect(subject.client).to receive(:join_bulk_responses).
242
+ with([{"errors"=>false, "items"=>[{"index"=>{"status"=>201}}]}]).
243
+ and_call_original
244
+ subject.multi_receive([event])
245
+ end
246
+
233
247
  end
234
248
 
235
249
  shared_examples "PKIX path failure" do
@@ -269,6 +283,65 @@ describe "indexing" do
269
283
  it_behaves_like("an indexer")
270
284
  end
271
285
 
286
+ describe "an indexer with custom bulk path", :integration => true do
287
+ let(:bulk_path) { "/_bulk?routing=true"}
288
+ let(:config) {
289
+ {
290
+ "hosts" => get_host_port,
291
+ "index" => index,
292
+ "http_compression" => false,
293
+ "bulk_path" => bulk_path
294
+ }
295
+ }
296
+ it_behaves_like("an indexer", false) do
297
+ let (:expected_path) { "#{es_url}#{bulk_path}&#{filter_path}" }
298
+ end
299
+ end
300
+
301
+ describe "an indexer with filter path as second parameter", :integration => true do
302
+ let(:bulk_path) { "/_bulk?routing=true&#{filter_path}"}
303
+ let(:config) {
304
+ {
305
+ "hosts" => get_host_port,
306
+ "index" => index,
307
+ "http_compression" => false,
308
+ "bulk_path" => bulk_path
309
+ }
310
+ }
311
+ it_behaves_like("an indexer", false) do
312
+ let (:expected_path) { "#{es_url}/#{bulk_path}" }
313
+ end
314
+ end
315
+
316
+ describe "an indexer with filter path as first parameter", :integration => true do
317
+ let(:bulk_path) { "/_bulk?#{filter_path}&routing=true"}
318
+ let(:config) {
319
+ {
320
+ "hosts" => get_host_port,
321
+ "index" => index,
322
+ "http_compression" => false,
323
+ "bulk_path" => bulk_path
324
+ }
325
+ }
326
+ it_behaves_like("an indexer", false) do
327
+ let (:expected_path) { "#{es_url}/#{bulk_path}" }
328
+ end
329
+ end
330
+
331
+ describe "an indexer with the standard bulk path", :integration => true do
332
+ let(:config) {
333
+ {
334
+ "hosts" => get_host_port,
335
+ "index" => index,
336
+ "http_compression" => false
337
+ }
338
+ }
339
+ it_behaves_like("an indexer", false) do
340
+ let (:expected_path) { "#{es_url}/_bulk?#{filter_path}" }
341
+ end
342
+
343
+ end
344
+
272
345
  describe "an indexer with no type value set (default to doc)", :integration => true do
273
346
  let(:type) { ESHelper.es_version_satisfies?("< 7") ? "doc" : "_doc" }
274
347
  let(:config) {
@@ -296,7 +369,7 @@ describe "indexing" do
296
369
  "index" => index,
297
370
  "http_compression" => false
298
371
  }
299
- end
372
+ end
300
373
 
301
374
  let(:curl_opts) { "-u #{user}:#{password}" }
302
375
 
@@ -33,11 +33,11 @@ describe "elasticsearch is down on startup", :integration => true do
33
33
  end
34
34
 
35
35
  it 'should ingest events when Elasticsearch recovers before documents are sent' do
36
- allow_any_instance_of(LogStash::Outputs::ElasticSearch::HttpClient::Pool).to receive(:get_es_version).and_raise(
36
+ allow_any_instance_of(LogStash::Outputs::ElasticSearch::HttpClient::Pool).to receive(:get_root_path).with(any_args).and_raise(
37
37
  ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::HostUnreachableError.new StandardError.new("TEST: before docs are sent"), 'http://test.es/'
38
38
  )
39
39
  subject.register
40
- allow_any_instance_of(LogStash::Outputs::ElasticSearch::HttpClient::Pool).to receive(:get_es_version).and_return(ESHelper.es_version)
40
+ allow_any_instance_of(LogStash::Outputs::ElasticSearch::HttpClient::Pool).to receive(:get_root_path).with(any_args).and_call_original
41
41
  subject.multi_receive([event1, event2])
42
42
  @es.indices.refresh
43
43
  r = @es.search(index: 'logstash-*')
@@ -45,13 +45,13 @@ describe "elasticsearch is down on startup", :integration => true do
45
45
  end
46
46
 
47
47
  it 'should ingest events when Elasticsearch recovers after documents are sent' do
48
- allow_any_instance_of(LogStash::Outputs::ElasticSearch::HttpClient::Pool).to receive(:get_es_version).and_raise(
48
+ allow_any_instance_of(LogStash::Outputs::ElasticSearch::HttpClient::Pool).to receive(:get_root_path).with(any_args).and_raise(
49
49
  ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::HostUnreachableError.new StandardError.new("TEST: after docs are sent"), 'http://test.es/'
50
50
  )
51
51
  subject.register
52
52
  Thread.new do
53
53
  sleep 4
54
- allow_any_instance_of(LogStash::Outputs::ElasticSearch::HttpClient::Pool).to receive(:get_es_version).and_return(ESHelper.es_version)
54
+ allow_any_instance_of(LogStash::Outputs::ElasticSearch::HttpClient::Pool).to receive(:get_root_path).with(any_args).and_call_original
55
55
  end
56
56
  subject.multi_receive([event1, event2])
57
57
  @es.indices.refresh
@@ -36,7 +36,17 @@ describe LogStash::Outputs::ElasticSearch::HttpClientBuilder do
36
36
  end
37
37
  end
38
38
 
39
- describe "healthcheck_path" do
39
+ describe "bulk_path" do
40
+ let (:filter_path) {"filter_path=errors,items.*.error,items.*.status"}
41
+
42
+ shared_examples("filter_path added to bulk path appropriately") do
43
+ it "sets the bulk_path option to the expected bulk path" do
44
+ expect(described_class).to receive(:create_http_client) do |options|
45
+ expect(options[:bulk_path]).to eq(expected_bulk_path)
46
+ end
47
+ described_class.build(logger, hosts, options)
48
+ end
49
+ end
40
50
 
41
51
  context "when setting bulk_path" do
42
52
  let(:bulk_path) { "/meh" }
@@ -44,21 +54,31 @@ describe LogStash::Outputs::ElasticSearch::HttpClientBuilder do
44
54
 
45
55
  context "when using path" do
46
56
  let(:options) { super().merge("path" => "/path") }
47
- it "ignores the path setting" do
48
- expect(described_class).to receive(:create_http_client) do |options|
49
- expect(options[:bulk_path]).to eq(bulk_path)
50
- end
51
- described_class.build(logger, hosts, options)
52
- end
57
+ let(:expected_bulk_path) { "#{bulk_path}?#{filter_path}" }
58
+
59
+ it_behaves_like "filter_path added to bulk path appropriately"
60
+ end
61
+
62
+ context "when setting a filter path as first parameter" do
63
+ let (:filter_path) {"filter_path=error"}
64
+ let(:bulk_path) { "/meh?#{filter_path}&routing=true" }
65
+ let(:expected_bulk_path) { bulk_path }
66
+
67
+ it_behaves_like "filter_path added to bulk path appropriately"
68
+ end
69
+
70
+ context "when setting a filter path as second parameter" do
71
+ let (:filter_path) {"filter_path=error"}
72
+ let(:bulk_path) { "/meh?routing=true&#{filter_path}" }
73
+ let(:expected_bulk_path) { bulk_path }
74
+
75
+ it_behaves_like "filter_path added to bulk path appropriately"
53
76
  end
77
+
54
78
  context "when not using path" do
79
+ let(:expected_bulk_path) { "#{bulk_path}?#{filter_path}"}
55
80
 
56
- it "uses the bulk_path setting" do
57
- expect(described_class).to receive(:create_http_client) do |options|
58
- expect(options[:bulk_path]).to eq(bulk_path)
59
- end
60
- described_class.build(logger, hosts, options)
61
- end
81
+ it_behaves_like "filter_path added to bulk path appropriately"
62
82
  end
63
83
  end
64
84
 
@@ -66,25 +86,20 @@ describe LogStash::Outputs::ElasticSearch::HttpClientBuilder do
66
86
 
67
87
  context "when using path" do
68
88
  let(:path) { "/meh" }
89
+ let(:expected_bulk_path) { "#{path}/_bulk?#{filter_path}"}
69
90
  let(:options) { super().merge("path" => path) }
70
- it "sets bulk_path to path+_bulk" do
71
- expect(described_class).to receive(:create_http_client) do |options|
72
- expect(options[:bulk_path]).to eq("#{path}/_bulk")
73
- end
74
- described_class.build(logger, hosts, options)
75
- end
91
+
92
+ it_behaves_like "filter_path added to bulk path appropriately"
76
93
  end
77
94
 
78
95
  context "when not using path" do
79
- it "sets the bulk_path to _bulk" do
80
- expect(described_class).to receive(:create_http_client) do |options|
81
- expect(options[:bulk_path]).to eq("/_bulk")
82
- end
83
- described_class.build(logger, hosts, options)
84
- end
96
+ let(:expected_bulk_path) { "/_bulk?#{filter_path}"}
97
+
98
+ it_behaves_like "filter_path added to bulk path appropriately"
85
99
  end
86
100
  end
87
101
  end
102
+
88
103
  describe "healthcheck_path" do
89
104
  context "when setting healthcheck_path" do
90
105
  let(:healthcheck_path) { "/meh" }
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-elasticsearch
3
3
  version: !ruby/object:Gem::Version
4
- version: 11.18.0
4
+ version: 11.20.0
5
5
  platform: java
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2023-09-25 00:00:00.000000000 Z
11
+ date: 2023-11-08 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement