logstash-output-elasticsearch 10.8.1-java → 11.0.0-java

Sign up to get free protection for your applications and to get access to all the features.
Files changed (33) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +24 -0
  3. data/README.md +1 -1
  4. data/docs/index.asciidoc +282 -114
  5. data/lib/logstash/outputs/elasticsearch.rb +125 -65
  6. data/lib/logstash/outputs/elasticsearch/data_stream_support.rb +233 -0
  7. data/lib/logstash/outputs/elasticsearch/http_client.rb +59 -21
  8. data/lib/logstash/outputs/elasticsearch/http_client/pool.rb +47 -34
  9. data/lib/logstash/outputs/elasticsearch/ilm.rb +11 -12
  10. data/lib/logstash/outputs/elasticsearch/license_checker.rb +19 -22
  11. data/lib/logstash/outputs/elasticsearch/template_manager.rb +3 -5
  12. data/lib/logstash/plugin_mixins/elasticsearch/api_configs.rb +157 -153
  13. data/lib/logstash/plugin_mixins/elasticsearch/common.rb +80 -60
  14. data/logstash-output-elasticsearch.gemspec +2 -2
  15. data/spec/es_spec_helper.rb +3 -6
  16. data/spec/integration/outputs/data_stream_spec.rb +61 -0
  17. data/spec/integration/outputs/ilm_spec.rb +22 -18
  18. data/spec/integration/outputs/ingest_pipeline_spec.rb +4 -2
  19. data/spec/integration/outputs/retry_spec.rb +14 -2
  20. data/spec/integration/outputs/sniffer_spec.rb +0 -1
  21. data/spec/spec_helper.rb +14 -0
  22. data/spec/unit/http_client_builder_spec.rb +9 -9
  23. data/spec/unit/outputs/elasticsearch/data_stream_support_spec.rb +542 -0
  24. data/spec/unit/outputs/elasticsearch/http_client/manticore_adapter_spec.rb +1 -0
  25. data/spec/unit/outputs/elasticsearch/http_client/pool_spec.rb +27 -13
  26. data/spec/unit/outputs/elasticsearch/http_client_spec.rb +59 -41
  27. data/spec/unit/outputs/elasticsearch/template_manager_spec.rb +1 -3
  28. data/spec/unit/outputs/elasticsearch_proxy_spec.rb +4 -5
  29. data/spec/unit/outputs/elasticsearch_spec.rb +228 -38
  30. data/spec/unit/outputs/elasticsearch_ssl_spec.rb +1 -2
  31. data/spec/unit/outputs/error_whitelist_spec.rb +4 -3
  32. data/spec/unit/outputs/license_check_spec.rb +0 -16
  33. metadata +23 -16
@@ -1,5 +1,6 @@
1
1
  require "logstash/devutils/rspec/spec_helper"
2
2
  require "logstash/outputs/elasticsearch/http_client"
3
+ require 'cabin'
3
4
 
4
5
  describe LogStash::Outputs::ElasticSearch::HttpClient::ManticoreAdapter do
5
6
  let(:logger) { Cabin::Channel.get }
@@ -1,6 +1,6 @@
1
1
  require "logstash/devutils/rspec/spec_helper"
2
2
  require "logstash/outputs/elasticsearch/http_client"
3
- require "json"
3
+ require 'cabin'
4
4
 
5
5
  describe LogStash::Outputs::ElasticSearch::HttpClient::Pool do
6
6
  let(:logger) { Cabin::Channel.get }
@@ -9,7 +9,7 @@ describe LogStash::Outputs::ElasticSearch::HttpClient::Pool do
9
9
  let(:options) { {:resurrect_delay => 2, :url_normalizer => proc {|u| u}} } # Shorten the delay a bit to speed up tests
10
10
  let(:es_node_versions) { [ "0.0.0" ] }
11
11
  let(:oss) { true }
12
- let(:valid_license) { true }
12
+ let(:license_status) { 'active' }
13
13
 
14
14
  subject { described_class.new(logger, adapter, initial_urls, options) }
15
15
 
@@ -26,8 +26,7 @@ describe LogStash::Outputs::ElasticSearch::HttpClient::Pool do
26
26
  allow(::Manticore::Client).to receive(:new).and_return(manticore_double)
27
27
 
28
28
  allow(subject).to receive(:get_es_version).with(any_args).and_return(*es_node_versions)
29
- allow(subject.license_checker).to receive(:oss?).and_return(oss)
30
- allow(subject.license_checker).to receive(:valid_es_license?).and_return(valid_license)
29
+ allow(subject.license_checker).to receive(:license_status).and_return(license_status)
31
30
  end
32
31
 
33
32
  after do
@@ -68,7 +67,7 @@ describe LogStash::Outputs::ElasticSearch::HttpClient::Pool do
68
67
 
69
68
  context "and setting healthcheck_path" do
70
69
  let(:healthcheck_path) { "/my/health" }
71
- let(:options) { super.merge(:healthcheck_path => healthcheck_path) }
70
+ let(:options) { super().merge(:healthcheck_path => healthcheck_path) }
72
71
  it "performs the healthcheck to the healthcheck_path" do
73
72
  expect(adapter).to receive(:perform_request) do |url, method, req_path, _, _|
74
73
  expect(method).to eq(:head)
@@ -130,7 +129,7 @@ describe LogStash::Outputs::ElasticSearch::HttpClient::Pool do
130
129
  end
131
130
 
132
131
  context "when enabled" do
133
- let(:options) { super.merge(:sniffing => true)}
132
+ let(:options) { super().merge(:sniffing => true)}
134
133
 
135
134
  it "should start the sniffer" do
136
135
  expect(subject.sniffer_alive?).to eql(true)
@@ -247,7 +246,7 @@ describe LogStash::Outputs::ElasticSearch::HttpClient::Pool do
247
246
  end
248
247
 
249
248
  let(:options) do
250
- super.merge(:license_checker => license_checker)
249
+ super().merge(:license_checker => license_checker)
251
250
  end
252
251
 
253
252
  context 'when LicenseChecker#acceptable_license? returns false' do
@@ -283,21 +282,21 @@ describe LogStash::Outputs::ElasticSearch::HttpClient::Pool do
283
282
  let(:oss) { false }
284
283
 
285
284
  context "if ES doesn't return a valid license" do
286
- let(:valid_license) { false }
285
+ let(:license_status) { nil }
287
286
 
288
- it "marks the url as active" do
287
+ it "marks the url as dead" do
289
288
  subject.update_initial_urls
290
- expect(subject.alive_urls_count).to eq(1)
289
+ expect(subject.alive_urls_count).to eq(0)
291
290
  end
292
291
 
293
292
  it "logs a warning" do
294
- expect(subject.license_checker).to receive(:log_license_deprecation_warn).once
293
+ expect(subject.license_checker).to receive(:warn_no_license).once.and_call_original
295
294
  subject.update_initial_urls
296
295
  end
297
296
  end
298
297
 
299
298
  context "if ES returns a valid license" do
300
- let(:valid_license) { true }
299
+ let(:license_status) { 'active' }
301
300
 
302
301
  it "marks the url as active" do
303
302
  subject.update_initial_urls
@@ -305,7 +304,22 @@ describe LogStash::Outputs::ElasticSearch::HttpClient::Pool do
305
304
  end
306
305
 
307
306
  it "does not log a warning" do
308
- expect(subject.license_checker).to_not receive(:log_license_deprecation_warn)
307
+ expect(subject.license_checker).to_not receive(:warn_no_license)
308
+ expect(subject.license_checker).to_not receive(:warn_invalid_license)
309
+ subject.update_initial_urls
310
+ end
311
+ end
312
+
313
+ context "if ES returns an invalid license" do
314
+ let(:license_status) { 'invalid' }
315
+
316
+ it "marks the url as active" do
317
+ subject.update_initial_urls
318
+ expect(subject.alive_urls_count).to eq(1)
319
+ end
320
+
321
+ it "logs a warning" do
322
+ expect(subject.license_checker).to receive(:warn_invalid_license).and_call_original
309
323
  subject.update_initial_urls
310
324
  end
311
325
  end
@@ -1,7 +1,6 @@
1
- require_relative "../../../../spec/es_spec_helper"
2
- require "logstash/devutils/rspec/spec_helper"
1
+ require_relative "../../../../spec/spec_helper"
3
2
  require "logstash/outputs/elasticsearch/http_client"
4
- require "java"
3
+ require "cabin"
5
4
 
6
5
  describe LogStash::Outputs::ElasticSearch::HttpClient do
7
6
  let(:ssl) { nil }
@@ -48,7 +47,7 @@ describe LogStash::Outputs::ElasticSearch::HttpClient do
48
47
  describe "ssl" do
49
48
  context "when SSL is true" do
50
49
  let(:ssl) { true }
51
- let(:base_options) { super.merge(:hosts => [http_hostname_port]) }
50
+ let(:base_options) { super().merge(:hosts => [http_hostname_port]) }
52
51
 
53
52
  it "should refuse to handle an http url" do
54
53
  expect {
@@ -59,7 +58,7 @@ describe LogStash::Outputs::ElasticSearch::HttpClient do
59
58
 
60
59
  context "when SSL is false" do
61
60
  let(:ssl) { false }
62
- let(:base_options) { super.merge(:hosts => [https_hostname_port]) }
61
+ let(:base_options) { super().merge(:hosts => [https_hostname_port]) }
63
62
 
64
63
  it "should refuse to handle an https url" do
65
64
  expect {
@@ -69,7 +68,7 @@ describe LogStash::Outputs::ElasticSearch::HttpClient do
69
68
  end
70
69
 
71
70
  describe "ssl is nil" do
72
- let(:base_options) { super.merge(:hosts => [https_hostname_port]) }
71
+ let(:base_options) { super().merge(:hosts => [https_hostname_port]) }
73
72
  it "should handle an ssl url correctly when SSL is nil" do
74
73
  subject
75
74
  expect(subject.host_to_url(https_hostname_port).to_s).to eq(https_hostname_port.to_s + "/")
@@ -79,14 +78,14 @@ describe LogStash::Outputs::ElasticSearch::HttpClient do
79
78
 
80
79
  describe "path" do
81
80
  let(:url) { http_hostname_port_path }
82
- let(:base_options) { super.merge(:hosts => [url]) }
81
+ let(:base_options) { super().merge(:hosts => [url]) }
83
82
 
84
83
  it "should allow paths in a url" do
85
84
  expect(subject.host_to_url(url)).to eq(url)
86
85
  end
87
86
 
88
87
  context "with the path option set" do
89
- let(:base_options) { super.merge(:client_settings => {:path => "/otherpath"}) }
88
+ let(:base_options) { super().merge(:client_settings => {:path => "/otherpath"}) }
90
89
 
91
90
  it "should not allow paths in two places" do
92
91
  expect {
@@ -97,7 +96,7 @@ describe LogStash::Outputs::ElasticSearch::HttpClient do
97
96
 
98
97
  context "with a path missing a leading /" do
99
98
  let(:url) { http_hostname_port }
100
- let(:base_options) { super.merge(:client_settings => {:path => "otherpath"}) }
99
+ let(:base_options) { super().merge(:client_settings => {:path => "otherpath"}) }
101
100
 
102
101
 
103
102
  it "should automatically insert a / in front of path overlays" do
@@ -204,7 +203,7 @@ describe LogStash::Outputs::ElasticSearch::HttpClient do
204
203
  end
205
204
 
206
205
  describe "#bulk" do
207
- subject { described_class.new(base_options) }
206
+ subject(:http_client) { described_class.new(base_options) }
208
207
 
209
208
  require "json"
210
209
  let(:message) { "hey" }
@@ -212,42 +211,61 @@ describe LogStash::Outputs::ElasticSearch::HttpClient do
212
211
  ["index", {:_id=>nil, :_index=>"logstash"}, {"message"=> message}],
213
212
  ]}
214
213
 
215
- context "if a message is over TARGET_BULK_BYTES" do
216
- let(:target_bulk_bytes) { LogStash::Outputs::ElasticSearch::TARGET_BULK_BYTES }
217
- let(:message) { "a" * (target_bulk_bytes + 1) }
214
+ [true,false].each do |http_compression_enabled|
215
+ context "with `http_compression => #{http_compression_enabled}`" do
218
216
 
219
- it "should be handled properly" do
220
- allow(subject).to receive(:join_bulk_responses)
221
- expect(subject).to receive(:bulk_send).once do |data|
222
- expect(data.size).to be > target_bulk_bytes
217
+ let(:base_options) { super().merge(:client_settings => {:http_compression => http_compression_enabled}) }
218
+
219
+ before(:each) do
220
+ if http_compression_enabled
221
+ expect(http_client).to receive(:gzip_writer).at_least(:once).and_call_original
222
+ else
223
+ expect(http_client).to_not receive(:gzip_writer)
224
+ end
223
225
  end
224
- s = subject.send(:bulk, actions)
225
- end
226
- end
227
226
 
228
- context "with two messages" do
229
- let(:message1) { "hey" }
230
- let(:message2) { "you" }
231
- let(:actions) { [
232
- ["index", {:_id=>nil, :_index=>"logstash"}, {"message"=> message1}],
233
- ["index", {:_id=>nil, :_index=>"logstash"}, {"message"=> message2}],
234
- ]}
235
- it "executes one bulk_send operation" do
236
- allow(subject).to receive(:join_bulk_responses)
237
- expect(subject).to receive(:bulk_send).once
238
- s = subject.send(:bulk, actions)
239
- end
227
+ context "if a message is over TARGET_BULK_BYTES" do
228
+ let(:target_bulk_bytes) { LogStash::Outputs::ElasticSearch::TARGET_BULK_BYTES }
229
+ let(:message) { "a" * (target_bulk_bytes + 1) }
230
+
231
+ it "should be handled properly" do
232
+ allow(subject).to receive(:join_bulk_responses)
233
+ expect(subject).to receive(:bulk_send).once do |data|
234
+ if !http_compression_enabled
235
+ expect(data.size).to be > target_bulk_bytes
236
+ else
237
+ expect(Zlib::gunzip(data.string).size).to be > target_bulk_bytes
238
+ end
239
+ end
240
+ s = subject.send(:bulk, actions)
241
+ end
242
+ end
243
+
244
+ context "with two messages" do
245
+ let(:message1) { "hey" }
246
+ let(:message2) { "you" }
247
+ let(:actions) { [
248
+ ["index", {:_id=>nil, :_index=>"logstash"}, {"message"=> message1}],
249
+ ["index", {:_id=>nil, :_index=>"logstash"}, {"message"=> message2}],
250
+ ]}
251
+ it "executes one bulk_send operation" do
252
+ allow(subject).to receive(:join_bulk_responses)
253
+ expect(subject).to receive(:bulk_send).once
254
+ s = subject.send(:bulk, actions)
255
+ end
240
256
 
241
- context "if one exceeds TARGET_BULK_BYTES" do
242
- let(:target_bulk_bytes) { LogStash::Outputs::ElasticSearch::TARGET_BULK_BYTES }
243
- let(:message1) { "a" * (target_bulk_bytes + 1) }
244
- it "executes two bulk_send operations" do
245
- allow(subject).to receive(:join_bulk_responses)
246
- expect(subject).to receive(:bulk_send).twice
247
- s = subject.send(:bulk, actions)
257
+ context "if one exceeds TARGET_BULK_BYTES" do
258
+ let(:target_bulk_bytes) { LogStash::Outputs::ElasticSearch::TARGET_BULK_BYTES }
259
+ let(:message1) { "a" * (target_bulk_bytes + 1) }
260
+ it "executes two bulk_send operations" do
261
+ allow(subject).to receive(:join_bulk_responses)
262
+ expect(subject).to receive(:bulk_send).twice
263
+ s = subject.send(:bulk, actions)
264
+ end
265
+ end
248
266
  end
249
- end
250
- end
267
+ end
268
+ end
251
269
  end
252
270
 
253
271
  describe "sniffing" do
@@ -1,7 +1,5 @@
1
1
  require "logstash/devutils/rspec/spec_helper"
2
- require "logstash/outputs/elasticsearch/http_client"
3
- require "java"
4
- require "json"
2
+ require "logstash/outputs/elasticsearch/template_manager"
5
3
 
6
4
  describe LogStash::Outputs::ElasticSearch::TemplateManager do
7
5
 
@@ -1,6 +1,5 @@
1
- require_relative "../../../spec/es_spec_helper"
1
+ require_relative "../../../spec/spec_helper"
2
2
  require 'stud/temporary'
3
- require "logstash/outputs/elasticsearch"
4
3
  require 'manticore/client'
5
4
 
6
5
  describe "Proxy option" do
@@ -24,7 +23,7 @@ describe "Proxy option" do
24
23
 
25
24
  context "when specified as a URI" do
26
25
  shared_examples("hash conversion") do |hash|
27
- let(:settings) { super.merge("proxy" => proxy)}
26
+ let(:settings) { super().merge("proxy" => proxy)}
28
27
 
29
28
  it "should set the proxy to the correct hash value" do
30
29
  expect(::Manticore::Client).to have_received(:new) do |options|
@@ -71,7 +70,7 @@ describe "Proxy option" do
71
70
  end
72
71
 
73
72
  context "when specified as ''" do
74
- let(:settings) { super.merge("proxy" => "${A_MISSING_ENV_VARIABLE:}")}
73
+ let(:settings) { super().merge("proxy" => "${A_MISSING_ENV_VARIABLE:}")}
75
74
 
76
75
  it "should not send the proxy option to manticore" do
77
76
  expect { subject.register }.not_to raise_error
@@ -85,7 +84,7 @@ describe "Proxy option" do
85
84
  end
86
85
 
87
86
  context "when specified as invalid uri" do
88
- let(:settings) { super.merge("proxy" => ":")}
87
+ let(:settings) { super().merge("proxy" => ":")}
89
88
 
90
89
  it "should fail" do
91
90
  # SafeURI isn't doing the proper exception wrapping for us, we can not simply :
@@ -1,27 +1,40 @@
1
- require_relative "../../../spec/es_spec_helper"
1
+ require_relative "../../../spec/spec_helper"
2
2
  require "base64"
3
3
  require "flores/random"
4
+ require 'concurrent/atomic/count_down_latch'
4
5
  require "logstash/outputs/elasticsearch"
5
6
 
6
7
  describe LogStash::Outputs::ElasticSearch do
7
- subject { described_class.new(options) }
8
+ subject(:elasticsearch_output_instance) { described_class.new(options) }
8
9
  let(:options) { {} }
9
10
  let(:maximum_seen_major_version) { [1,2,5,6,7,8].sample }
10
11
 
11
12
  let(:do_register) { true }
12
13
 
14
+ let(:stub_http_client_pool!) do
15
+ allow_any_instance_of(LogStash::Outputs::ElasticSearch::HttpClient::Pool).to receive(:start)
16
+ end
17
+
18
+ let(:after_successful_connection_thread_mock) do
19
+ double('after_successful_connection_thread', value: true)
20
+ end
21
+
13
22
  before(:each) do
14
23
  if do_register
15
- # Build the client and set mocks before calling register to avoid races.
16
- subject.build_client
24
+ stub_http_client_pool!
25
+
26
+ allow(subject).to receive(:finish_register) # stub-out thread completion (to avoid error log entries)
27
+
28
+ # emulate 'successful' ES connection on the same thread
29
+ allow(subject).to receive(:after_successful_connection) { |&block| block.call }.
30
+ and_return after_successful_connection_thread_mock
31
+ allow(subject).to receive(:stop_after_successful_connection_thread)
32
+
33
+ subject.register
17
34
 
18
- # Rspec mocks can't handle background threads, so... we can't use any
19
- allow(subject.client.pool).to receive(:start_resurrectionist)
20
- allow(subject.client.pool).to receive(:start_sniffer)
21
- allow(subject.client.pool).to receive(:healthcheck!)
22
35
  allow(subject.client).to receive(:maximum_seen_major_version).at_least(:once).and_return(maximum_seen_major_version)
23
36
  allow(subject.client).to receive(:get_xpack_info)
24
- subject.register
37
+
25
38
  subject.client.pool.adapter.manticore.respond_with(:body => "{}")
26
39
  end
27
40
  end
@@ -44,9 +57,15 @@ describe LogStash::Outputs::ElasticSearch do
44
57
  let(:manticore_urls) { subject.client.pool.urls }
45
58
  let(:manticore_url) { manticore_urls.first }
46
59
 
60
+ let(:stub_http_client_pool!) do
61
+ [:start_resurrectionist, :start_sniffer, :healthcheck!].each do |method|
62
+ allow_any_instance_of(LogStash::Outputs::ElasticSearch::HttpClient::Pool).to receive(method)
63
+ end
64
+ end
65
+
47
66
  describe "getting a document type" do
48
67
  context "if document_type isn't set" do
49
- let(:options) { super.merge("document_type" => nil)}
68
+ let(:options) { super().merge("document_type" => nil)}
50
69
  context "for 7.x elasticsearch clusters" do
51
70
  let(:maximum_seen_major_version) { 7 }
52
71
  it "should return '_doc'" do
@@ -70,7 +89,7 @@ describe LogStash::Outputs::ElasticSearch do
70
89
  end
71
90
 
72
91
  context "with 'document type set'" do
73
- let(:options) { super.merge("document_type" => "bar")}
92
+ let(:options) { super().merge("document_type" => "bar")}
74
93
  it "should get the event type from the 'document_type' setting" do
75
94
  expect(subject.send(:get_event_type, LogStash::Event.new())).to eql("bar")
76
95
  end
@@ -80,14 +99,14 @@ describe LogStash::Outputs::ElasticSearch do
80
99
  describe "building an event action tuple" do
81
100
  context "for 7.x elasticsearch clusters" do
82
101
  let(:maximum_seen_major_version) { 7 }
83
- it "should include '_type'" do
102
+ it "should not include '_type' when 'document_type' is not explicitly defined" do
84
103
  action_tuple = subject.send(:event_action_tuple, LogStash::Event.new("type" => "foo"))
85
104
  action_params = action_tuple[1]
86
- expect(action_params).to include(:_type => "_doc")
105
+ expect(action_params).not_to include(:_type => "_doc")
87
106
  end
88
107
 
89
108
  context "with 'document type set'" do
90
- let(:options) { super.merge("document_type" => "bar")}
109
+ let(:options) { super().merge("document_type" => "bar")}
91
110
  it "should get the event type from the 'document_type' setting" do
92
111
  action_tuple = subject.send(:event_action_tuple, LogStash::Event.new("type" => "foo"))
93
112
  action_params = action_tuple[1]
@@ -105,7 +124,7 @@ describe LogStash::Outputs::ElasticSearch do
105
124
  end
106
125
 
107
126
  context "with 'document type set'" do
108
- let(:options) { super.merge("document_type" => "bar")}
127
+ let(:options) { super().merge("document_type" => "bar")}
109
128
  it "should not include '_type'" do
110
129
  action_tuple = subject.send(:event_action_tuple, LogStash::Event.new("type" => "foo"))
111
130
  action_params = action_tuple[1]
@@ -127,7 +146,7 @@ describe LogStash::Outputs::ElasticSearch do
127
146
 
128
147
  context "as part of a URL" do
129
148
  let(:options) {
130
- super.merge("hosts" => ["http://#{user}:#{password.value}@localhost:9200"])
149
+ super().merge("hosts" => ["http://#{user}:#{password.value}@localhost:9200"])
131
150
  }
132
151
 
133
152
  include_examples("an authenticated config")
@@ -135,7 +154,7 @@ describe LogStash::Outputs::ElasticSearch do
135
154
 
136
155
  context "as a hash option" do
137
156
  let(:options) {
138
- super.merge!(
157
+ super().merge!(
139
158
  "user" => user,
140
159
  "password" => password
141
160
  )
@@ -175,7 +194,7 @@ describe LogStash::Outputs::ElasticSearch do
175
194
 
176
195
  context "with extra slashes" do
177
196
  let(:path) { "/slashed-path/ "}
178
- let(:options) { super.merge("path" => "/some-path/") }
197
+ let(:options) { super().merge("path" => "/some-path/") }
179
198
 
180
199
  it "should properly set the path on the HTTP client without adding slashes" do
181
200
  expect(manticore_url.path).to eql(options["path"])
@@ -234,13 +253,13 @@ describe LogStash::Outputs::ElasticSearch do
234
253
  end
235
254
 
236
255
  describe "without a port specified" do
237
- let(:options) { super.merge('hosts' => 'localhost') }
256
+ let(:options) { super().merge('hosts' => 'localhost') }
238
257
  it "should properly set the default port (9200) on the HTTP client" do
239
258
  expect(manticore_url.port).to eql(9200)
240
259
  end
241
260
  end
242
261
  describe "with a port other than 9200 specified" do
243
- let(:options) { super.merge('hosts' => 'localhost:9202') }
262
+ let(:options) { super().merge('hosts' => 'localhost:9202') }
244
263
  it "should properly set the specified port on the HTTP client" do
245
264
  expect(manticore_url.port).to eql(9202)
246
265
  end
@@ -253,8 +272,7 @@ describe LogStash::Outputs::ElasticSearch do
253
272
  before do
254
273
  allow(subject).to receive(:retrying_submit).with(anything)
255
274
  events.each_with_index do |e,i|
256
- et = events_tuples[i]
257
- allow(subject).to receive(:event_action_tuple).with(e).and_return(et)
275
+ allow(subject).to receive(:event_action_tuple).with(e).and_return(events_tuples[i])
258
276
  end
259
277
  subject.multi_receive(events)
260
278
  end
@@ -265,12 +283,14 @@ describe LogStash::Outputs::ElasticSearch do
265
283
  let(:event) { ::LogStash::Event.new("foo" => "bar") }
266
284
  let(:error) do
267
285
  ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError.new(
268
- 429, double("url").as_null_object, double("request body"), double("response body")
286
+ 429, double("url").as_null_object, request_body, double("response body")
269
287
  )
270
288
  end
271
289
  let(:logger) { double("logger").as_null_object }
272
290
  let(:response) { { :errors => [], :items => [] } }
273
291
 
292
+ let(:request_body) { double(:request_body, :bytesize => 1023) }
293
+
274
294
  before(:each) do
275
295
 
276
296
  i = 0
@@ -296,6 +316,95 @@ describe LogStash::Outputs::ElasticSearch do
296
316
  expect(subject.logger).to have_received(:debug).with(/Encountered a retryable error/i, anything)
297
317
  end
298
318
  end
319
+
320
+ context "unexpected bulk response" do
321
+ let(:options) do
322
+ { "hosts" => "127.0.0.1:9999", "index" => "%{foo}", "manage_template" => false }
323
+ end
324
+
325
+ let(:events) { [ ::LogStash::Event.new("foo" => "bar1"), ::LogStash::Event.new("foo" => "bar2") ] }
326
+
327
+ let(:bulk_response) do
328
+ # shouldn't really happen but we've seen this happen - here ES returns more items than were sent
329
+ { "took"=>1, "ingest_took"=>9, "errors"=>true,
330
+ "items"=>[{"index"=>{"_index"=>"bar1", "_type"=>"_doc", "_id"=>nil, "status"=>500,
331
+ "error"=>{"type" => "illegal_state_exception",
332
+ "reason" => "pipeline with id [test-ingest] could not be loaded, caused by [ElasticsearchParseException[Error updating pipeline with id [test-ingest]]; nested: ElasticsearchException[java.lang.IllegalArgumentException: no enrich index exists for policy with name [test-metadata1]]; nested: IllegalArgumentException[no enrich index exists for policy with name [test-metadata1]];; ElasticsearchException[java.lang.IllegalArgumentException: no enrich index exists for policy with name [test-metadata1]]; nested: IllegalArgumentException[no enrich index exists for policy with name [test-metadata1]];; java.lang.IllegalArgumentException: no enrich index exists for policy with name [test-metadata1]]"
333
+ }
334
+ }
335
+ },
336
+ # NOTE: this is an artificial success (usually everything fails with a 500) but even if some doc where
337
+ # to succeed due the unexpected reponse items we can not clearly identify which actions to retry ...
338
+ {"index"=>{"_index"=>"bar2", "_type"=>"_doc", "_id"=>nil, "status"=>201}},
339
+ {"index"=>{"_index"=>"bar2", "_type"=>"_doc", "_id"=>nil, "status"=>500,
340
+ "error"=>{"type" => "illegal_state_exception",
341
+ "reason" => "pipeline with id [test-ingest] could not be loaded, caused by [ElasticsearchParseException[Error updating pipeline with id [test-ingest]]; nested: ElasticsearchException[java.lang.IllegalArgumentException: no enrich index exists for policy with name [test-metadata1]];"
342
+ }
343
+ }
344
+ }]
345
+ }
346
+ end
347
+
348
+ before(:each) do
349
+ allow(subject.client).to receive(:bulk_send).with(instance_of(StringIO), instance_of(Array)) do |stream, actions|
350
+ expect( stream.string ).to include '"foo":"bar1"'
351
+ expect( stream.string ).to include '"foo":"bar2"'
352
+ end.and_return(bulk_response, {"errors"=>false}) # let's make it go away (second call) to not retry indefinitely
353
+ end
354
+
355
+ it "should retry submit" do
356
+ allow(subject.logger).to receive(:error).with(/Encountered an unexpected error/i, anything)
357
+ allow(subject.client).to receive(:bulk).and_call_original # track count
358
+
359
+ subject.multi_receive(events)
360
+
361
+ expect(subject.client).to have_received(:bulk).twice
362
+ end
363
+
364
+ it "should log specific error message" do
365
+ expect(subject.logger).to receive(:error).with(/Encountered an unexpected error/i,
366
+ hash_including(:message => 'Sent 2 documents but Elasticsearch returned 3 responses (likely a bug with _bulk endpoint)'))
367
+
368
+ subject.multi_receive(events)
369
+ end
370
+ end
371
+ end
372
+
373
+ context '413 errors' do
374
+ let(:payload_size) { LogStash::Outputs::ElasticSearch::TARGET_BULK_BYTES + 1024 }
375
+ let(:event) { ::LogStash::Event.new("message" => ("a" * payload_size ) ) }
376
+
377
+ let(:logger_stub) { double("logger").as_null_object }
378
+
379
+ before(:each) do
380
+ allow(elasticsearch_output_instance.client).to receive(:logger).and_return(logger_stub)
381
+
382
+ allow(elasticsearch_output_instance.client).to receive(:bulk).and_call_original
383
+
384
+ max_bytes = payload_size * 3 / 4 # ensure a failure first attempt
385
+ allow(elasticsearch_output_instance.client.pool).to receive(:post) do |path, params, body|
386
+ if body.length > max_bytes
387
+ max_bytes *= 2 # ensure a successful retry
388
+ double("Response", :code => 413, :body => "")
389
+ else
390
+ double("Response", :code => 200, :body => '{"errors":false,"items":[{"index":{"status":200,"result":"created"}}]}')
391
+ end
392
+ end
393
+ end
394
+
395
+ it 'retries the 413 until it goes away' do
396
+ elasticsearch_output_instance.multi_receive([event])
397
+
398
+ expect(elasticsearch_output_instance.client).to have_received(:bulk).twice
399
+ end
400
+
401
+ it 'logs about payload quantity and size' do
402
+ elasticsearch_output_instance.multi_receive([event])
403
+
404
+ expect(logger_stub).to have_received(:warn)
405
+ .with(a_string_matching(/413 Payload Too Large/),
406
+ hash_including(:action_count => 1, :content_length => a_value > 20_000_000))
407
+ end
299
408
  end
300
409
 
301
410
  context "with timeout set" do
@@ -311,7 +420,7 @@ describe LogStash::Outputs::ElasticSearch do
311
420
 
312
421
  before do
313
422
  # Expect a timeout to be logged.
314
- expect(subject.logger).to receive(:error).with(/Attempted to send a bulk request to Elasticsearch/i, anything).at_least(:once)
423
+ expect(subject.logger).to receive(:error).with(/Attempted to send a bulk request/i, anything).at_least(:once)
315
424
  expect(subject.client).to receive(:bulk).at_least(:twice).and_call_original
316
425
  end
317
426
 
@@ -325,13 +434,14 @@ describe LogStash::Outputs::ElasticSearch do
325
434
  end
326
435
 
327
436
  describe "the action option" do
437
+
328
438
  context "with a sprintf action" do
329
439
  let(:options) { {"action" => "%{myactionfield}" } }
330
440
 
331
441
  let(:event) { LogStash::Event.new("myactionfield" => "update", "message" => "blah") }
332
442
 
333
443
  it "should interpolate the requested action value when creating an event_action_tuple" do
334
- expect(subject.event_action_tuple(event).first).to eql("update")
444
+ expect(subject.send(:event_action_tuple, event).first).to eql("update")
335
445
  end
336
446
  end
337
447
 
@@ -341,7 +451,7 @@ describe LogStash::Outputs::ElasticSearch do
341
451
  let(:event) { LogStash::Event.new("myactionfield" => "update", "message" => "blah") }
342
452
 
343
453
  it "should obtain specific action's params from event_action_tuple" do
344
- expect(subject.event_action_tuple(event)[1]).to include(:_upsert)
454
+ expect(subject.send(:event_action_tuple, event)[1]).to include(:_upsert)
345
455
  end
346
456
  end
347
457
 
@@ -349,6 +459,8 @@ describe LogStash::Outputs::ElasticSearch do
349
459
  let(:options) { {"action" => "SOME Garbaaage"} }
350
460
  let(:do_register) { false } # this is what we want to test, so we disable the before(:each) call
351
461
 
462
+ before { allow(subject).to receive(:finish_register) }
463
+
352
464
  it "should raise a configuration error" do
353
465
  expect { subject.register }.to raise_error(LogStash::ConfigurationError)
354
466
  end
@@ -356,13 +468,14 @@ describe LogStash::Outputs::ElasticSearch do
356
468
  end
357
469
 
358
470
  describe "the pipeline option" do
471
+
359
472
  context "with a sprintf and set pipeline" do
360
473
  let(:options) { {"pipeline" => "%{pipeline}" } }
361
474
 
362
475
  let(:event) { LogStash::Event.new("pipeline" => "my-ingest-pipeline") }
363
476
 
364
477
  it "should interpolate the pipeline value and set it" do
365
- expect(subject.event_action_tuple(event)[1]).to include(:pipeline => "my-ingest-pipeline")
478
+ expect(subject.send(:event_action_tuple, event)[1]).to include(:pipeline => "my-ingest-pipeline")
366
479
  end
367
480
  end
368
481
 
@@ -372,7 +485,7 @@ describe LogStash::Outputs::ElasticSearch do
372
485
  let(:event) { LogStash::Event.new("pipeline" => "") }
373
486
 
374
487
  it "should interpolate the pipeline value but not set it because it is empty" do
375
- expect(subject.event_action_tuple(event)[1]).not_to include(:pipeline)
488
+ expect(subject.send(:event_action_tuple, event)[1]).not_to include(:pipeline)
376
489
  end
377
490
  end
378
491
  end
@@ -410,30 +523,30 @@ describe LogStash::Outputs::ElasticSearch do
410
523
  let(:options) { { 'retry_on_conflict' => num_retries } }
411
524
 
412
525
  context "with a regular index" do
413
- let(:options) { super.merge("action" => "index") }
526
+ let(:options) { super().merge("action" => "index") }
414
527
 
415
528
  it "should not set the retry_on_conflict parameter when creating an event_action_tuple" do
416
529
  allow(subject.client).to receive(:maximum_seen_major_version).and_return(maximum_seen_major_version)
417
- action, params, event_data = subject.event_action_tuple(event)
418
- expect(params).not_to include({subject.retry_on_conflict_action_name => num_retries})
530
+ action, params, event_data = subject.send(:event_action_tuple, event)
531
+ expect(params).not_to include({subject.send(:retry_on_conflict_action_name) => num_retries})
419
532
  end
420
533
  end
421
534
 
422
535
  context "using a plain update" do
423
- let(:options) { super.merge("action" => "update", "retry_on_conflict" => num_retries, "document_id" => 1) }
536
+ let(:options) { super().merge("action" => "update", "retry_on_conflict" => num_retries, "document_id" => 1) }
424
537
 
425
538
  it "should set the retry_on_conflict parameter when creating an event_action_tuple" do
426
- action, params, event_data = subject.event_action_tuple(event)
427
- expect(params).to include({subject.retry_on_conflict_action_name => num_retries})
539
+ action, params, event_data = subject.send(:event_action_tuple, event)
540
+ expect(params).to include({subject.send(:retry_on_conflict_action_name) => num_retries})
428
541
  end
429
542
  end
430
543
 
431
544
  context "with a sprintf action that resolves to update" do
432
- let(:options) { super.merge("action" => "%{myactionfield}", "retry_on_conflict" => num_retries, "document_id" => 1) }
545
+ let(:options) { super().merge("action" => "%{myactionfield}", "retry_on_conflict" => num_retries, "document_id" => 1) }
433
546
 
434
547
  it "should set the retry_on_conflict parameter when creating an event_action_tuple" do
435
- action, params, event_data = subject.event_action_tuple(event)
436
- expect(params).to include({subject.retry_on_conflict_action_name => num_retries})
548
+ action, params, event_data = subject.send(:event_action_tuple, event)
549
+ expect(params).to include({subject.send(:retry_on_conflict_action_name) => num_retries})
437
550
  expect(action).to eq("update")
438
551
  end
439
552
  end
@@ -463,6 +576,8 @@ describe LogStash::Outputs::ElasticSearch do
463
576
  let(:do_register) { false }
464
577
 
465
578
  before :each do
579
+ allow(subject).to receive(:finish_register)
580
+
466
581
  allow(::Manticore::Client).to receive(:new).with(any_args).and_call_original
467
582
  end
468
583
 
@@ -486,6 +601,12 @@ describe LogStash::Outputs::ElasticSearch do
486
601
  let(:custom_parameters_hash) { { "id" => 1, "name" => "logstash" } }
487
602
  let(:custom_parameters_query) { custom_parameters_hash.map {|k,v| "#{k}=#{v}" }.join("&") }
488
603
 
604
+ let(:stub_http_client_pool!) do
605
+ [:start_resurrectionist, :start_sniffer, :healthcheck!].each do |method|
606
+ allow_any_instance_of(LogStash::Outputs::ElasticSearch::HttpClient::Pool).to receive(method)
607
+ end
608
+ end
609
+
489
610
  context "using non-url hosts" do
490
611
 
491
612
  let(:options) {
@@ -692,7 +813,7 @@ describe LogStash::Outputs::ElasticSearch do
692
813
  end
693
814
 
694
815
  describe "custom headers" do
695
- let(:manticore_options) { subject.client.pool.adapter.manticore.instance_variable_get(:@options) }
816
+ let(:manticore_options) { subject.client.pool.adapter.manticore.instance_variable_get(:@options) }
696
817
 
697
818
  context "when set" do
698
819
  let(:headers) { { "X-Thing" => "Test" } }
@@ -765,6 +886,75 @@ describe LogStash::Outputs::ElasticSearch do
765
886
  end
766
887
  end
767
888
 
889
+ describe "post-register ES setup" do
890
+ let(:do_register) { false }
891
+ let(:es_version) { '7.10.0' } # DS default on LS 8.x
892
+ let(:options) { { 'hosts' => '127.0.0.1:9999' } }
893
+ let(:logger) { subject.logger }
894
+
895
+ before do
896
+ allow(logger).to receive(:error) # expect tracking
897
+
898
+ allow(subject).to receive(:last_es_version).and_return es_version
899
+ # make successful_connection? return true:
900
+ allow(subject).to receive(:maximum_seen_major_version).and_return Integer(es_version.split('.').first)
901
+ allow(subject).to receive(:stop_after_successful_connection_thread)
902
+ end
903
+
904
+ it "logs inability to retrieve uuid" do
905
+ allow(subject).to receive(:install_template)
906
+ allow(subject).to receive(:ilm_in_use?).and_return nil
907
+ subject.register
908
+ subject.send :wait_for_successful_connection
909
+
910
+ expect(logger).to have_received(:error).with(/Unable to retrieve Elasticsearch cluster uuid/i, anything)
911
+ end if LOGSTASH_VERSION >= '7.0.0'
912
+
913
+ it "logs template install failure" do
914
+ allow(subject).to receive(:discover_cluster_uuid)
915
+ allow(subject).to receive(:ilm_in_use?).and_return nil
916
+ subject.register
917
+ subject.send :wait_for_successful_connection
918
+
919
+ expect(logger).to have_received(:error).with(/Failed to install template/i, anything)
920
+ end
921
+
922
+ context 'error raised' do
923
+
924
+ let(:es_version) { '7.8.0' }
925
+ let(:options) { super().merge('data_stream' => 'true') }
926
+ let(:latch) { Concurrent::CountDownLatch.new }
927
+
928
+ before do
929
+ allow(subject).to receive(:install_template)
930
+ allow(subject).to receive(:discover_cluster_uuid)
931
+ allow(subject).to receive(:ilm_in_use?).and_return nil
932
+ # executes from the after_successful_connection thread :
933
+ allow(subject).to receive(:finish_register) { latch.wait }.and_call_original
934
+ subject.register
935
+ end
936
+
937
+ it 'keeps logging on multi_receive' do
938
+ allow(subject).to receive(:retrying_submit)
939
+ latch.count_down; sleep(1.0)
940
+
941
+ expect_logged_error = lambda do |count|
942
+ expect(logger).to have_received(:error).with(
943
+ /Elasticsearch setup did not complete normally, please review previously logged errors/i,
944
+ hash_including(message: 'A data_stream configuration is only supported since Elasticsearch 7.9.0 (detected version 7.8.0), please upgrade your cluster')
945
+ ).exactly(count).times
946
+ end
947
+
948
+ subject.multi_receive [ LogStash::Event.new('foo' => 1) ]
949
+ expect_logged_error.call(1)
950
+
951
+ subject.multi_receive [ LogStash::Event.new('foo' => 2) ]
952
+ expect_logged_error.call(2)
953
+ end
954
+
955
+ end
956
+ end
957
+
768
958
  @private
769
959
 
770
960
  def stub_manticore_client!(manticore_double = nil)