logstash-output-elasticsearch-test 11.16.0-x86_64-linux
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CHANGELOG.md +649 -0
- data/CONTRIBUTORS +34 -0
- data/Gemfile +16 -0
- data/LICENSE +202 -0
- data/NOTICE.TXT +5 -0
- data/README.md +106 -0
- data/docs/index.asciidoc +1369 -0
- data/lib/logstash/outputs/elasticsearch/data_stream_support.rb +282 -0
- data/lib/logstash/outputs/elasticsearch/default-ilm-policy.json +14 -0
- data/lib/logstash/outputs/elasticsearch/http_client/manticore_adapter.rb +155 -0
- data/lib/logstash/outputs/elasticsearch/http_client/pool.rb +534 -0
- data/lib/logstash/outputs/elasticsearch/http_client.rb +497 -0
- data/lib/logstash/outputs/elasticsearch/http_client_builder.rb +201 -0
- data/lib/logstash/outputs/elasticsearch/ilm.rb +92 -0
- data/lib/logstash/outputs/elasticsearch/license_checker.rb +52 -0
- data/lib/logstash/outputs/elasticsearch/template_manager.rb +131 -0
- data/lib/logstash/outputs/elasticsearch/templates/ecs-disabled/elasticsearch-6x.json +45 -0
- data/lib/logstash/outputs/elasticsearch/templates/ecs-disabled/elasticsearch-7x.json +44 -0
- data/lib/logstash/outputs/elasticsearch/templates/ecs-disabled/elasticsearch-8x.json +50 -0
- data/lib/logstash/outputs/elasticsearch.rb +699 -0
- data/lib/logstash/plugin_mixins/elasticsearch/api_configs.rb +237 -0
- data/lib/logstash/plugin_mixins/elasticsearch/common.rb +409 -0
- data/lib/logstash/plugin_mixins/elasticsearch/noop_license_checker.rb +9 -0
- data/logstash-output-elasticsearch.gemspec +40 -0
- data/spec/es_spec_helper.rb +225 -0
- data/spec/fixtures/_nodes/6x.json +81 -0
- data/spec/fixtures/_nodes/7x.json +92 -0
- data/spec/fixtures/htpasswd +2 -0
- data/spec/fixtures/license_check/active.json +16 -0
- data/spec/fixtures/license_check/inactive.json +5 -0
- data/spec/fixtures/nginx_reverse_proxy.conf +22 -0
- data/spec/fixtures/scripts/painless/scripted_update.painless +2 -0
- data/spec/fixtures/scripts/painless/scripted_update_nested.painless +1 -0
- data/spec/fixtures/scripts/painless/scripted_upsert.painless +1 -0
- data/spec/fixtures/template-with-policy-es6x.json +48 -0
- data/spec/fixtures/template-with-policy-es7x.json +45 -0
- data/spec/fixtures/template-with-policy-es8x.json +50 -0
- data/spec/fixtures/test_certs/ca.crt +29 -0
- data/spec/fixtures/test_certs/ca.der.sha256 +1 -0
- data/spec/fixtures/test_certs/ca.key +51 -0
- data/spec/fixtures/test_certs/renew.sh +13 -0
- data/spec/fixtures/test_certs/test.crt +30 -0
- data/spec/fixtures/test_certs/test.der.sha256 +1 -0
- data/spec/fixtures/test_certs/test.key +51 -0
- data/spec/fixtures/test_certs/test.p12 +0 -0
- data/spec/fixtures/test_certs/test_invalid.crt +36 -0
- data/spec/fixtures/test_certs/test_invalid.key +51 -0
- data/spec/fixtures/test_certs/test_invalid.p12 +0 -0
- data/spec/fixtures/test_certs/test_self_signed.crt +32 -0
- data/spec/fixtures/test_certs/test_self_signed.key +54 -0
- data/spec/fixtures/test_certs/test_self_signed.p12 +0 -0
- data/spec/integration/outputs/compressed_indexing_spec.rb +70 -0
- data/spec/integration/outputs/create_spec.rb +67 -0
- data/spec/integration/outputs/data_stream_spec.rb +68 -0
- data/spec/integration/outputs/delete_spec.rb +63 -0
- data/spec/integration/outputs/ilm_spec.rb +534 -0
- data/spec/integration/outputs/index_spec.rb +421 -0
- data/spec/integration/outputs/index_version_spec.rb +98 -0
- data/spec/integration/outputs/ingest_pipeline_spec.rb +75 -0
- data/spec/integration/outputs/metrics_spec.rb +66 -0
- data/spec/integration/outputs/no_es_on_startup_spec.rb +78 -0
- data/spec/integration/outputs/painless_update_spec.rb +99 -0
- data/spec/integration/outputs/parent_spec.rb +94 -0
- data/spec/integration/outputs/retry_spec.rb +182 -0
- data/spec/integration/outputs/routing_spec.rb +61 -0
- data/spec/integration/outputs/sniffer_spec.rb +94 -0
- data/spec/integration/outputs/templates_spec.rb +133 -0
- data/spec/integration/outputs/unsupported_actions_spec.rb +75 -0
- data/spec/integration/outputs/update_spec.rb +114 -0
- data/spec/spec_helper.rb +10 -0
- data/spec/support/elasticsearch/api/actions/delete_ilm_policy.rb +19 -0
- data/spec/support/elasticsearch/api/actions/get_alias.rb +18 -0
- data/spec/support/elasticsearch/api/actions/get_ilm_policy.rb +18 -0
- data/spec/support/elasticsearch/api/actions/put_alias.rb +24 -0
- data/spec/support/elasticsearch/api/actions/put_ilm_policy.rb +25 -0
- data/spec/unit/http_client_builder_spec.rb +185 -0
- data/spec/unit/outputs/elasticsearch/data_stream_support_spec.rb +612 -0
- data/spec/unit/outputs/elasticsearch/http_client/manticore_adapter_spec.rb +151 -0
- data/spec/unit/outputs/elasticsearch/http_client/pool_spec.rb +501 -0
- data/spec/unit/outputs/elasticsearch/http_client_spec.rb +339 -0
- data/spec/unit/outputs/elasticsearch/template_manager_spec.rb +189 -0
- data/spec/unit/outputs/elasticsearch_proxy_spec.rb +103 -0
- data/spec/unit/outputs/elasticsearch_spec.rb +1573 -0
- data/spec/unit/outputs/elasticsearch_ssl_spec.rb +197 -0
- data/spec/unit/outputs/error_whitelist_spec.rb +56 -0
- data/spec/unit/outputs/license_check_spec.rb +57 -0
- metadata +423 -0
@@ -0,0 +1,1573 @@
|
|
1
|
+
require_relative "../../../spec/spec_helper"
|
2
|
+
require "base64"
|
3
|
+
require "flores/random"
|
4
|
+
require 'concurrent/atomic/count_down_latch'
|
5
|
+
require "logstash/outputs/elasticsearch"
|
6
|
+
require 'logstash/plugin_mixins/ecs_compatibility_support/spec_helper'
|
7
|
+
require 'rspec/collection_matchers'
|
8
|
+
|
9
|
+
describe LogStash::Outputs::ElasticSearch do
|
10
|
+
subject(:elasticsearch_output_instance) { described_class.new(options) }
|
11
|
+
let(:options) { {} }
|
12
|
+
let(:maximum_seen_major_version) { [6,7,8].sample }
|
13
|
+
|
14
|
+
let(:do_register) { true }
|
15
|
+
|
16
|
+
let(:stub_http_client_pool!) do
|
17
|
+
allow_any_instance_of(LogStash::Outputs::ElasticSearch::HttpClient::Pool).to receive(:start)
|
18
|
+
end
|
19
|
+
|
20
|
+
let(:spy_http_client_builder!) do
|
21
|
+
allow(described_class::HttpClientBuilder).to receive(:build).with(any_args).and_call_original
|
22
|
+
end
|
23
|
+
|
24
|
+
let(:after_successful_connection_thread_mock) do
|
25
|
+
double('after_successful_connection_thread', value: true)
|
26
|
+
end
|
27
|
+
|
28
|
+
before(:each) do
|
29
|
+
if do_register
|
30
|
+
spy_http_client_builder!
|
31
|
+
stub_http_client_pool!
|
32
|
+
|
33
|
+
allow(subject).to receive(:finish_register) # stub-out thread completion (to avoid error log entries)
|
34
|
+
|
35
|
+
# emulate 'successful' ES connection on the same thread
|
36
|
+
allow(subject).to receive(:after_successful_connection) { |&block| block.call }.
|
37
|
+
and_return after_successful_connection_thread_mock
|
38
|
+
allow(subject).to receive(:stop_after_successful_connection_thread)
|
39
|
+
|
40
|
+
subject.register
|
41
|
+
|
42
|
+
allow(subject.client).to receive(:maximum_seen_major_version).at_least(:once).and_return(maximum_seen_major_version)
|
43
|
+
allow(subject.client).to receive(:get_xpack_info)
|
44
|
+
|
45
|
+
subject.client.pool.adapter.manticore.respond_with(:body => "{}")
|
46
|
+
end
|
47
|
+
end
|
48
|
+
|
49
|
+
after(:each) do
|
50
|
+
subject.close
|
51
|
+
end
|
52
|
+
|
53
|
+
context "check aborting of a batch" do
|
54
|
+
context "on an unreachable ES instance" do
|
55
|
+
let(:events) { [ ::LogStash::Event.new("foo" => "bar1"), ::LogStash::Event.new("foo" => "bar2") ] }
|
56
|
+
|
57
|
+
let(:shutdown_value) { true }
|
58
|
+
|
59
|
+
let(:logger) { double("logger") }
|
60
|
+
|
61
|
+
let(:never_ending) { Thread.new { sleep 1 while true } }
|
62
|
+
|
63
|
+
let(:do_register) { false }
|
64
|
+
|
65
|
+
before(:each) do
|
66
|
+
spy_http_client_builder!
|
67
|
+
stub_http_client_pool!
|
68
|
+
|
69
|
+
allow(subject).to receive(:finish_register) # stub-out thread completion (to avoid error log entries)
|
70
|
+
|
71
|
+
# emulate 'failed' ES connection, which sleeps forever
|
72
|
+
allow(subject).to receive(:after_successful_connection) { |&block| never_ending }
|
73
|
+
allow(subject).to receive(:stop_after_successful_connection_thread)
|
74
|
+
|
75
|
+
subject.register
|
76
|
+
|
77
|
+
allow(subject.client).to receive(:maximum_seen_major_version).at_least(:once).and_return(maximum_seen_major_version)
|
78
|
+
allow(subject.client).to receive(:get_xpack_info)
|
79
|
+
|
80
|
+
subject.client.pool.adapter.manticore.respond_with(:body => "{}")
|
81
|
+
|
82
|
+
allow(subject).to receive(:logger).and_return(logger)
|
83
|
+
allow(logger).to receive(:info)
|
84
|
+
|
85
|
+
allow(subject).to receive(:pipeline_shutdown_requested?) do
|
86
|
+
shutdown_value
|
87
|
+
end
|
88
|
+
end
|
89
|
+
|
90
|
+
it "the #multi_receive abort while waiting on unreachable and a shutdown is requested" do
|
91
|
+
expect { subject.multi_receive(events) }.to raise_error(org.logstash.execution.AbortedBatchException)
|
92
|
+
expect(logger).to have_received(:info).with(/Aborting the batch due to shutdown request while waiting for connections to become live/i)
|
93
|
+
end
|
94
|
+
end
|
95
|
+
|
96
|
+
context "on a reachable ES instance" do
|
97
|
+
let(:events) { [ ::LogStash::Event.new("foo" => "bar1"), ::LogStash::Event.new("foo" => "bar2") ] }
|
98
|
+
|
99
|
+
let(:logger) { double("logger") }
|
100
|
+
|
101
|
+
before(:each) do
|
102
|
+
allow(subject).to receive(:logger).and_return(logger)
|
103
|
+
allow(logger).to receive(:info)
|
104
|
+
|
105
|
+
allow(subject).to receive(:pipeline_shutdown_requested?).and_return(true)
|
106
|
+
allow(subject).to receive(:retrying_submit)
|
107
|
+
end
|
108
|
+
|
109
|
+
it "the #multi_receive doesn't abort when waiting for a connection on alive ES and a shutdown is requested" do
|
110
|
+
subject.multi_receive(events)
|
111
|
+
expect(logger).to_not have_received(:info).with(/Aborting the batch due to shutdown request while waiting for connections to become live/i)
|
112
|
+
end
|
113
|
+
end
|
114
|
+
|
115
|
+
context "when a connected ES becomes unreachable" do
|
116
|
+
# let(:error) do
|
117
|
+
# ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError.new(
|
118
|
+
# 429, double("url").as_null_object, request_body, double("response body")
|
119
|
+
# )
|
120
|
+
# end
|
121
|
+
|
122
|
+
shared_examples 'raise an abort error' do
|
123
|
+
let(:options) {
|
124
|
+
{
|
125
|
+
"index" => "my-index",
|
126
|
+
"hosts" => ["localhost","localhost:9202"],
|
127
|
+
"path" => "some-path",
|
128
|
+
"manage_template" => false
|
129
|
+
}
|
130
|
+
}
|
131
|
+
|
132
|
+
let(:manticore_urls) { subject.client.pool.urls }
|
133
|
+
let(:manticore_url) { manticore_urls.first }
|
134
|
+
|
135
|
+
let(:stub_http_client_pool!) do
|
136
|
+
[:start_resurrectionist, :start_sniffer, :healthcheck!].each do |method|
|
137
|
+
allow_any_instance_of(LogStash::Outputs::ElasticSearch::HttpClient::Pool).to receive(method)
|
138
|
+
end
|
139
|
+
end
|
140
|
+
|
141
|
+
let(:event) { ::LogStash::Event.new("foo" => "bar") }
|
142
|
+
|
143
|
+
let(:logger) { double("logger").as_null_object }
|
144
|
+
let(:response) { { :errors => [], :items => [] } }
|
145
|
+
|
146
|
+
let(:request_body) { double(:request_body, :bytesize => 1023) }
|
147
|
+
|
148
|
+
before(:each) do
|
149
|
+
bulk_param = [["index", anything, event.to_hash]]
|
150
|
+
|
151
|
+
allow(subject).to receive(:logger).and_return(logger)
|
152
|
+
|
153
|
+
# fail consistently for ever
|
154
|
+
allow(subject.client).to receive(:bulk).with(bulk_param).and_raise(error)
|
155
|
+
end
|
156
|
+
|
157
|
+
it "should exit the retry with an abort exception if shutdown is requested" do
|
158
|
+
# trigger the shutdown signal
|
159
|
+
allow(subject).to receive(:pipeline_shutdown_requested?) { true }
|
160
|
+
|
161
|
+
# execute in another thread because it blocks in a retry loop until the shutdown is triggered
|
162
|
+
th = Thread.new do
|
163
|
+
subject.multi_receive([event])
|
164
|
+
rescue org.logstash.execution.AbortedBatchException => e
|
165
|
+
# return exception's class so that it can be verified when retrieving the thread's value
|
166
|
+
e.class
|
167
|
+
end
|
168
|
+
|
169
|
+
expect(th.value).to eq(org.logstash.execution.AbortedBatchException)
|
170
|
+
end
|
171
|
+
end
|
172
|
+
|
173
|
+
context "with 429 error" do
|
174
|
+
let(:error) do
|
175
|
+
::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError.new(
|
176
|
+
429, double("url").as_null_object, request_body, double("response body")
|
177
|
+
)
|
178
|
+
end
|
179
|
+
|
180
|
+
it_behaves_like 'raise an abort error'
|
181
|
+
end
|
182
|
+
|
183
|
+
context "with 'no connections' error" do
|
184
|
+
let(:error) { ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::NoConnectionAvailableError.new }
|
185
|
+
|
186
|
+
it_behaves_like 'raise an abort error'
|
187
|
+
end
|
188
|
+
end
|
189
|
+
end if LOGSTASH_VERSION >= '8.8'
|
190
|
+
|
191
|
+
context "with an active instance" do
|
192
|
+
let(:options) {
|
193
|
+
{
|
194
|
+
"index" => "my-index",
|
195
|
+
"hosts" => ["localhost","localhost:9202"],
|
196
|
+
"path" => "some-path",
|
197
|
+
"manage_template" => false
|
198
|
+
}
|
199
|
+
}
|
200
|
+
|
201
|
+
let(:manticore_urls) { subject.client.pool.urls }
|
202
|
+
let(:manticore_url) { manticore_urls.first }
|
203
|
+
|
204
|
+
let(:stub_http_client_pool!) do
|
205
|
+
[:start_resurrectionist, :start_sniffer, :healthcheck!].each do |method|
|
206
|
+
allow_any_instance_of(LogStash::Outputs::ElasticSearch::HttpClient::Pool).to receive(method)
|
207
|
+
end
|
208
|
+
end
|
209
|
+
|
210
|
+
describe "getting a document type" do
|
211
|
+
context "if document_type isn't set" do
|
212
|
+
let(:options) { super().merge("document_type" => nil)}
|
213
|
+
context "for 7.x elasticsearch clusters" do
|
214
|
+
let(:maximum_seen_major_version) { 7 }
|
215
|
+
it "should return '_doc'" do
|
216
|
+
expect(subject.send(:get_event_type, LogStash::Event.new("type" => "foo"))).to eql("_doc")
|
217
|
+
end
|
218
|
+
end
|
219
|
+
|
220
|
+
context "for 6.x elasticsearch clusters" do
|
221
|
+
let(:maximum_seen_major_version) { 6 }
|
222
|
+
it "should return 'doc'" do
|
223
|
+
expect(subject.send(:get_event_type, LogStash::Event.new("type" => "foo"))).to eql("doc")
|
224
|
+
end
|
225
|
+
end
|
226
|
+
end
|
227
|
+
|
228
|
+
context "with 'document type set'" do
|
229
|
+
let(:options) { super().merge("document_type" => "bar")}
|
230
|
+
it "should get the event type from the 'document_type' setting" do
|
231
|
+
expect(subject.send(:get_event_type, LogStash::Event.new())).to eql("bar")
|
232
|
+
end
|
233
|
+
end
|
234
|
+
end
|
235
|
+
|
236
|
+
describe "building an event action tuple" do
|
237
|
+
context "for 7.x elasticsearch clusters" do
|
238
|
+
let(:maximum_seen_major_version) { 7 }
|
239
|
+
it "should not include '_type' when 'document_type' is not explicitly defined" do
|
240
|
+
action_tuple = subject.send(:event_action_tuple, LogStash::Event.new("type" => "foo"))
|
241
|
+
action_params = action_tuple[1]
|
242
|
+
expect(action_params).not_to include(:_type => "_doc")
|
243
|
+
end
|
244
|
+
|
245
|
+
context "with 'document type set'" do
|
246
|
+
let(:options) { super().merge("document_type" => "bar")}
|
247
|
+
it "should get the event type from the 'document_type' setting" do
|
248
|
+
action_tuple = subject.send(:event_action_tuple, LogStash::Event.new("type" => "foo"))
|
249
|
+
action_params = action_tuple[1]
|
250
|
+
expect(action_params).to include(:_type => "bar")
|
251
|
+
end
|
252
|
+
end
|
253
|
+
end
|
254
|
+
|
255
|
+
context "for 8.x elasticsearch clusters" do
|
256
|
+
let(:maximum_seen_major_version) { 8 }
|
257
|
+
it "should not include '_type'" do
|
258
|
+
action_tuple = subject.send(:event_action_tuple, LogStash::Event.new("type" => "foo"))
|
259
|
+
action_params = action_tuple[1]
|
260
|
+
expect(action_params).not_to include(:_type)
|
261
|
+
end
|
262
|
+
|
263
|
+
context "with 'document type set'" do
|
264
|
+
let(:options) { super().merge("document_type" => "bar")}
|
265
|
+
it "should not include '_type'" do
|
266
|
+
action_tuple = subject.send(:event_action_tuple, LogStash::Event.new("type" => "foo"))
|
267
|
+
action_params = action_tuple[1]
|
268
|
+
expect(action_params).not_to include(:_type)
|
269
|
+
end
|
270
|
+
end
|
271
|
+
end
|
272
|
+
end
|
273
|
+
|
274
|
+
describe "with auth" do
|
275
|
+
let(:user) { "myuser" }
|
276
|
+
let(:password) { ::LogStash::Util::Password.new("mypassword") }
|
277
|
+
|
278
|
+
shared_examples "an authenticated config" do
|
279
|
+
it "should set the URL auth correctly" do
|
280
|
+
expect(manticore_url.user).to eq user
|
281
|
+
end
|
282
|
+
end
|
283
|
+
|
284
|
+
context "as part of a URL" do
|
285
|
+
let(:options) {
|
286
|
+
super().merge("hosts" => ["http://#{user}:#{password.value}@localhost:9200"])
|
287
|
+
}
|
288
|
+
|
289
|
+
include_examples("an authenticated config")
|
290
|
+
end
|
291
|
+
|
292
|
+
context "as a hash option" do
|
293
|
+
let(:options) {
|
294
|
+
super().merge!(
|
295
|
+
"user" => user,
|
296
|
+
"password" => password
|
297
|
+
)
|
298
|
+
}
|
299
|
+
|
300
|
+
include_examples("an authenticated config")
|
301
|
+
end
|
302
|
+
|
303
|
+
context 'cloud_auth also set' do
|
304
|
+
let(:do_register) { false } # this is what we want to test, so we disable the before(:each) call
|
305
|
+
let(:options) { { "user" => user, "password" => password, "cloud_auth" => "elastic:my-passwd-00" } }
|
306
|
+
|
307
|
+
it "should fail" do
|
308
|
+
expect { subject.register }.to raise_error LogStash::ConfigurationError, /Multiple authentication options are specified/
|
309
|
+
end
|
310
|
+
end
|
311
|
+
|
312
|
+
context 'api_key also set' do
|
313
|
+
let(:do_register) { false } # this is what we want to test, so we disable the before(:each) call
|
314
|
+
let(:options) { { "user" => user, "password" => password, "api_key" => "some_key" } }
|
315
|
+
|
316
|
+
it "should fail" do
|
317
|
+
expect { subject.register }.to raise_error LogStash::ConfigurationError, /Multiple authentication options are specified/
|
318
|
+
end
|
319
|
+
end
|
320
|
+
|
321
|
+
end
|
322
|
+
|
323
|
+
describe "with path" do
|
324
|
+
it "should properly create a URI with the path" do
|
325
|
+
expect(subject.path).to eql(options["path"])
|
326
|
+
end
|
327
|
+
|
328
|
+
it "should properly set the path on the HTTP client adding slashes" do
|
329
|
+
expect(manticore_url.path).to eql("/" + options["path"] + "/")
|
330
|
+
end
|
331
|
+
|
332
|
+
context "with extra slashes" do
|
333
|
+
let(:path) { "/slashed-path/ "}
|
334
|
+
let(:options) { super().merge("path" => "/some-path/") }
|
335
|
+
|
336
|
+
it "should properly set the path on the HTTP client without adding slashes" do
|
337
|
+
expect(manticore_url.path).to eql(options["path"])
|
338
|
+
end
|
339
|
+
end
|
340
|
+
|
341
|
+
context "with a URI based path" do
|
342
|
+
let(:options) do
|
343
|
+
o = super()
|
344
|
+
o.delete("path")
|
345
|
+
o["hosts"] = ["http://localhost:9200/mypath/"]
|
346
|
+
o
|
347
|
+
end
|
348
|
+
let(:client_host_path) { manticore_url.path }
|
349
|
+
|
350
|
+
it "should initialize without error" do
|
351
|
+
expect { subject }.not_to raise_error
|
352
|
+
end
|
353
|
+
|
354
|
+
it "should use the URI path" do
|
355
|
+
expect(client_host_path).to eql("/mypath/")
|
356
|
+
end
|
357
|
+
|
358
|
+
context "with a path option but no URL path" do
|
359
|
+
let(:options) do
|
360
|
+
o = super()
|
361
|
+
o["path"] = "/override/"
|
362
|
+
o["hosts"] = ["http://localhost:9200"]
|
363
|
+
o
|
364
|
+
end
|
365
|
+
|
366
|
+
it "should initialize without error" do
|
367
|
+
expect { subject }.not_to raise_error
|
368
|
+
end
|
369
|
+
|
370
|
+
it "should use the option path" do
|
371
|
+
expect(client_host_path).to eql("/override/")
|
372
|
+
end
|
373
|
+
end
|
374
|
+
|
375
|
+
# If you specify the path in two spots that is an error!
|
376
|
+
context "with a path option and a URL path" do
|
377
|
+
let(:do_register) { false } # Register will fail
|
378
|
+
let(:options) do
|
379
|
+
o = super()
|
380
|
+
o["path"] = "/override"
|
381
|
+
o["hosts"] = ["http://localhost:9200/mypath/"]
|
382
|
+
o
|
383
|
+
end
|
384
|
+
|
385
|
+
it "should initialize with an error" do
|
386
|
+
expect { subject.register }.to raise_error(LogStash::ConfigurationError)
|
387
|
+
end
|
388
|
+
end
|
389
|
+
end
|
390
|
+
end
|
391
|
+
|
392
|
+
describe "without a port specified" do
|
393
|
+
let(:options) { super().merge('hosts' => 'localhost') }
|
394
|
+
it "should properly set the default port (9200) on the HTTP client" do
|
395
|
+
expect(manticore_url.port).to eql(9200)
|
396
|
+
end
|
397
|
+
end
|
398
|
+
describe "with a port other than 9200 specified" do
|
399
|
+
let(:options) { super().merge('hosts' => 'localhost:9202') }
|
400
|
+
it "should properly set the specified port on the HTTP client" do
|
401
|
+
expect(manticore_url.port).to eql(9202)
|
402
|
+
end
|
403
|
+
end
|
404
|
+
|
405
|
+
describe "when 'dlq_custom_codes'" do
|
406
|
+
let(:options) { super().merge('dlq_custom_codes' => [404]) }
|
407
|
+
let(:do_register) { false }
|
408
|
+
|
409
|
+
context "contains already defined codes" do
|
410
|
+
it "should raise a configuration error" do
|
411
|
+
expect{ subject.register }.to raise_error(LogStash::ConfigurationError, /are already defined as standard DLQ error codes/)
|
412
|
+
end
|
413
|
+
end
|
414
|
+
|
415
|
+
context "is configured but DLQ is not enabled" do
|
416
|
+
it "raise a configuration error" do
|
417
|
+
allow(subject).to receive(:dlq_enabled?).and_return(false)
|
418
|
+
expect{ subject.register }.to raise_error(LogStash::ConfigurationError, /configured while DLQ is not enabled/)
|
419
|
+
end
|
420
|
+
end
|
421
|
+
end if LOGSTASH_VERSION > '7.0'
|
422
|
+
|
423
|
+
describe "#multi_receive" do
|
424
|
+
let(:events) { [double("one"), double("two"), double("three")] }
|
425
|
+
let(:events_tuples) { [double("one t"), double("two t"), double("three t")] }
|
426
|
+
|
427
|
+
before do
|
428
|
+
allow(subject).to receive(:retrying_submit).with(anything)
|
429
|
+
events.each_with_index do |e,i|
|
430
|
+
allow(subject).to receive(:event_action_tuple).with(e).and_return(events_tuples[i])
|
431
|
+
end
|
432
|
+
subject.multi_receive(events)
|
433
|
+
end
|
434
|
+
|
435
|
+
end
|
436
|
+
|
437
|
+
context "429 errors" do
|
438
|
+
let(:event) { ::LogStash::Event.new("foo" => "bar") }
|
439
|
+
let(:error) do
|
440
|
+
::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError.new(
|
441
|
+
429, double("url").as_null_object, request_body, double("response body")
|
442
|
+
)
|
443
|
+
end
|
444
|
+
let(:logger) { double("logger").as_null_object }
|
445
|
+
let(:response) { { :errors => [], :items => [] } }
|
446
|
+
|
447
|
+
let(:request_body) { double(:request_body, :bytesize => 1023) }
|
448
|
+
|
449
|
+
before(:each) do
|
450
|
+
|
451
|
+
i = 0
|
452
|
+
bulk_param = [["index", anything, event.to_hash]]
|
453
|
+
|
454
|
+
allow(subject).to receive(:logger).and_return(logger)
|
455
|
+
|
456
|
+
# Fail the first time bulk is called, succeed the next time
|
457
|
+
allow(subject.client).to receive(:bulk).with(bulk_param) do
|
458
|
+
i += 1
|
459
|
+
if i == 1
|
460
|
+
raise error
|
461
|
+
end
|
462
|
+
end.and_return(response)
|
463
|
+
subject.multi_receive([event])
|
464
|
+
end
|
465
|
+
|
466
|
+
it "should retry the 429 till it goes away" do
|
467
|
+
expect(subject.client).to have_received(:bulk).twice
|
468
|
+
end
|
469
|
+
|
470
|
+
it "should log a debug message" do
|
471
|
+
expect(subject.logger).to have_received(:debug).with(/Encountered a retryable error/i, anything)
|
472
|
+
end
|
473
|
+
end
|
474
|
+
|
475
|
+
context "unexpected bulk response" do
|
476
|
+
let(:options) do
|
477
|
+
{ "hosts" => "127.0.0.1:9999", "index" => "%{foo}", "manage_template" => false }
|
478
|
+
end
|
479
|
+
|
480
|
+
let(:events) { [ ::LogStash::Event.new("foo" => "bar1"), ::LogStash::Event.new("foo" => "bar2") ] }
|
481
|
+
|
482
|
+
let(:bulk_response) do
|
483
|
+
# shouldn't really happen but we've seen this happen - here ES returns more items than were sent
|
484
|
+
{ "took"=>1, "ingest_took"=>9, "errors"=>true,
|
485
|
+
"items"=>[{"index"=>{"_index"=>"bar1", "_type"=>"_doc", "_id"=>nil, "status"=>500,
|
486
|
+
"error"=>{"type" => "illegal_state_exception",
|
487
|
+
"reason" => "pipeline with id [test-ingest] could not be loaded, caused by [ElasticsearchParseException[Error updating pipeline with id [test-ingest]]; nested: ElasticsearchException[java.lang.IllegalArgumentException: no enrich index exists for policy with name [test-metadata1]]; nested: IllegalArgumentException[no enrich index exists for policy with name [test-metadata1]];; ElasticsearchException[java.lang.IllegalArgumentException: no enrich index exists for policy with name [test-metadata1]]; nested: IllegalArgumentException[no enrich index exists for policy with name [test-metadata1]];; java.lang.IllegalArgumentException: no enrich index exists for policy with name [test-metadata1]]"
|
488
|
+
}
|
489
|
+
}
|
490
|
+
},
|
491
|
+
# NOTE: this is an artificial success (usually everything fails with a 500) but even if some doc where
|
492
|
+
# to succeed due the unexpected response items we can not clearly identify which actions to retry ...
|
493
|
+
{"index"=>{"_index"=>"bar2", "_type"=>"_doc", "_id"=>nil, "status"=>201}},
|
494
|
+
{"index"=>{"_index"=>"bar2", "_type"=>"_doc", "_id"=>nil, "status"=>500,
|
495
|
+
"error"=>{"type" => "illegal_state_exception",
|
496
|
+
"reason" => "pipeline with id [test-ingest] could not be loaded, caused by [ElasticsearchParseException[Error updating pipeline with id [test-ingest]]; nested: ElasticsearchException[java.lang.IllegalArgumentException: no enrich index exists for policy with name [test-metadata1]];"
|
497
|
+
}
|
498
|
+
}
|
499
|
+
}]
|
500
|
+
}
|
501
|
+
end
|
502
|
+
|
503
|
+
before(:each) do
|
504
|
+
allow(subject.client).to receive(:bulk_send).with(instance_of(StringIO), instance_of(Array)) do |stream, actions|
|
505
|
+
expect( stream.string ).to include '"foo":"bar1"'
|
506
|
+
expect( stream.string ).to include '"foo":"bar2"'
|
507
|
+
end.and_return(bulk_response, {"errors"=>false}) # let's make it go away (second call) to not retry indefinitely
|
508
|
+
end
|
509
|
+
|
510
|
+
it "should retry submit" do
|
511
|
+
allow(subject.logger).to receive(:error).with(/Encountered an unexpected error/i, anything)
|
512
|
+
allow(subject.client).to receive(:bulk).and_call_original # track count
|
513
|
+
|
514
|
+
subject.multi_receive(events)
|
515
|
+
|
516
|
+
expect(subject.client).to have_received(:bulk).twice
|
517
|
+
end
|
518
|
+
|
519
|
+
it "should log specific error message" do
|
520
|
+
expect(subject.logger).to receive(:error).with(/Encountered an unexpected error/i,
|
521
|
+
hash_including(:message => 'Sent 2 documents but Elasticsearch returned 3 responses (likely a bug with _bulk endpoint)'))
|
522
|
+
|
523
|
+
subject.multi_receive(events)
|
524
|
+
end
|
525
|
+
end
|
526
|
+
|
527
|
+
context "unsupported actions" do
|
528
|
+
let(:options) { super().merge("index" => "logstash", "action" => "%{action_field}") }
|
529
|
+
|
530
|
+
context "with multiple valid actions with one trailing invalid action" do
|
531
|
+
let(:events) {[
|
532
|
+
LogStash::Event.new("action_field" => "index", "id" => 1, "message"=> "hello"),
|
533
|
+
LogStash::Event.new("action_field" => "index", "id" => 2, "message"=> "hi"),
|
534
|
+
LogStash::Event.new("action_field" => "index", "id" => 3, "message"=> "bye"),
|
535
|
+
LogStash::Event.new("action_field" => "unsupported_action", "id" => 4, "message"=> "world!")
|
536
|
+
]}
|
537
|
+
it "rejects unsupported actions" do
|
538
|
+
event_result = subject.send(:safe_interpolation_map_events, events)
|
539
|
+
expect(event_result.successful_events).to have_exactly(3).items
|
540
|
+
event_result.successful_events.each do |action, _|
|
541
|
+
expect(action).to_not eql("unsupported_action")
|
542
|
+
end
|
543
|
+
expect(event_result.event_mapping_errors).to have_exactly(1).items
|
544
|
+
event_result.event_mapping_errors.each do |event_mapping_error|
|
545
|
+
expect(event_mapping_error.message).to eql("Elasticsearch doesn't support [unsupported_action] action")
|
546
|
+
end
|
547
|
+
end
|
548
|
+
end
|
549
|
+
|
550
|
+
context "with one leading invalid action followed by multiple valid actions" do
|
551
|
+
let(:events) {[
|
552
|
+
LogStash::Event.new("action_field" => "unsupported_action", "id" => 1, "message"=> "world!"),
|
553
|
+
LogStash::Event.new("action_field" => "index", "id" => 2, "message"=> "hello"),
|
554
|
+
LogStash::Event.new("action_field" => "index", "id" => 3, "message"=> "hi"),
|
555
|
+
LogStash::Event.new("action_field" => "index", "id" => 4, "message"=> "bye")
|
556
|
+
]}
|
557
|
+
it "rejects unsupported actions" do
|
558
|
+
event_result = subject.send(:safe_interpolation_map_events, events)
|
559
|
+
expect(event_result.successful_events).to have_exactly(3).items
|
560
|
+
event_result.successful_events.each do |action, _|
|
561
|
+
expect(action).to_not eql("unsupported_action")
|
562
|
+
end
|
563
|
+
expect(event_result.event_mapping_errors).to have_exactly(1).items
|
564
|
+
event_result.event_mapping_errors.each do |event_mapping_error|
|
565
|
+
expect(event_mapping_error.message).to eql("Elasticsearch doesn't support [unsupported_action] action")
|
566
|
+
end
|
567
|
+
end
|
568
|
+
end
|
569
|
+
|
570
|
+
context "with batch of multiple invalid actions and no valid actions" do
|
571
|
+
let(:events) {[
|
572
|
+
LogStash::Event.new("action_field" => "unsupported_action1", "id" => 1, "message"=> "world!"),
|
573
|
+
LogStash::Event.new("action_field" => "unsupported_action2", "id" => 2, "message"=> "hello"),
|
574
|
+
LogStash::Event.new("action_field" => "unsupported_action3", "id" => 3, "message"=> "hi"),
|
575
|
+
LogStash::Event.new("action_field" => "unsupported_action4", "id" => 4, "message"=> "bye")
|
576
|
+
]}
|
577
|
+
it "rejects unsupported actions" do
|
578
|
+
event_result = subject.send(:safe_interpolation_map_events, events)
|
579
|
+
expect(event_result.successful_events).to have(:no).items
|
580
|
+
event_result.successful_events.each do |action, _|
|
581
|
+
expect(action).to_not eql("unsupported_action")
|
582
|
+
end
|
583
|
+
expect(event_result.event_mapping_errors).to have_exactly(4).items
|
584
|
+
event_result.event_mapping_errors.each do |event_mapping_error|
|
585
|
+
expect(event_mapping_error.message).to include "Elasticsearch doesn't support"
|
586
|
+
end
|
587
|
+
end
|
588
|
+
end
|
589
|
+
|
590
|
+
context "with batch of intermixed valid and invalid actions" do
|
591
|
+
let(:events) {[
|
592
|
+
LogStash::Event.new("action_field" => "index", "id" => 1, "message"=> "world!"),
|
593
|
+
LogStash::Event.new("action_field" => "unsupported_action2", "id" => 2, "message"=> "hello"),
|
594
|
+
LogStash::Event.new("action_field" => "unsupported_action3", "id" => 3, "message"=> "hi"),
|
595
|
+
LogStash::Event.new("action_field" => "index", "id" => 4, "message"=> "bye")
|
596
|
+
]}
|
597
|
+
it "rejects unsupported actions" do
|
598
|
+
event_result = subject.send(:safe_interpolation_map_events, events)
|
599
|
+
expect(event_result.successful_events).to have_exactly(2).items
|
600
|
+
expect(event_result.event_mapping_errors).to have_exactly(2).items
|
601
|
+
event_result.event_mapping_errors.each do |event_mapping_error|
|
602
|
+
expect(event_mapping_error.message).to include "Elasticsearch doesn't support"
|
603
|
+
end
|
604
|
+
end
|
605
|
+
end
|
606
|
+
|
607
|
+
context "with batch of exactly one action that is invalid" do
|
608
|
+
let(:events) {[
|
609
|
+
LogStash::Event.new("action_field" => "index", "id" => 1, "message"=> "world!"),
|
610
|
+
LogStash::Event.new("action_field" => "index", "id" => 2, "message"=> "hello"),
|
611
|
+
LogStash::Event.new("action_field" => "unsupported_action3", "id" => 3, "message"=> "hi"),
|
612
|
+
LogStash::Event.new("action_field" => "index", "id" => 4, "message"=> "bye")
|
613
|
+
]}
|
614
|
+
it "rejects unsupported action" do
|
615
|
+
event_result = subject.send(:safe_interpolation_map_events, events)
|
616
|
+
expect(event_result.successful_events).to have_exactly(3).items
|
617
|
+
expect(event_result.event_mapping_errors).to have_exactly(1).items
|
618
|
+
event_result.event_mapping_errors.each do |event_mapping_error|
|
619
|
+
expect(event_mapping_error.message).to eql("Elasticsearch doesn't support [unsupported_action3] action")
|
620
|
+
end
|
621
|
+
end
|
622
|
+
end
|
623
|
+
end
|
624
|
+
end
|
625
|
+
|
626
|
+
context '413 errors' do
|
627
|
+
let(:payload_size) { LogStash::Outputs::ElasticSearch::TARGET_BULK_BYTES + 1024 }
|
628
|
+
let(:event) { ::LogStash::Event.new("message" => ("a" * payload_size ) ) }
|
629
|
+
|
630
|
+
let(:logger_stub) { double("logger").as_null_object }
|
631
|
+
|
632
|
+
before(:each) do
|
633
|
+
allow(elasticsearch_output_instance.client).to receive(:logger).and_return(logger_stub)
|
634
|
+
|
635
|
+
allow(elasticsearch_output_instance.client).to receive(:bulk).and_call_original
|
636
|
+
|
637
|
+
max_bytes = payload_size * 3 / 4 # ensure a failure first attempt
|
638
|
+
allow(elasticsearch_output_instance.client.pool).to receive(:post) do |path, params, body|
|
639
|
+
if body.length > max_bytes
|
640
|
+
max_bytes *= 2 # ensure a successful retry
|
641
|
+
double("Response", :code => 413, :body => "")
|
642
|
+
else
|
643
|
+
double("Response", :code => 200, :body => '{"errors":false,"items":[{"index":{"status":200,"result":"created"}}]}')
|
644
|
+
end
|
645
|
+
end
|
646
|
+
end
|
647
|
+
|
648
|
+
it 'retries the 413 until it goes away' do
|
649
|
+
elasticsearch_output_instance.multi_receive([event])
|
650
|
+
|
651
|
+
expect(elasticsearch_output_instance.client).to have_received(:bulk).twice
|
652
|
+
end
|
653
|
+
|
654
|
+
it 'logs about payload quantity and size' do
|
655
|
+
elasticsearch_output_instance.multi_receive([event])
|
656
|
+
|
657
|
+
expect(logger_stub).to have_received(:warn)
|
658
|
+
.with(a_string_matching(/413 Payload Too Large/),
|
659
|
+
hash_including(:action_count => 1, :content_length => a_value > 20_000_000))
|
660
|
+
end
|
661
|
+
end
|
662
|
+
|
663
|
+
context "with timeout set" do
|
664
|
+
let(:listener) { Flores::Random.tcp_listener }
|
665
|
+
let(:port) { listener[2] }
|
666
|
+
let(:options) do
|
667
|
+
{
|
668
|
+
"manage_template" => false,
|
669
|
+
"hosts" => "localhost:#{port}",
|
670
|
+
"timeout" => 0.1, # fast timeout
|
671
|
+
}
|
672
|
+
end
|
673
|
+
|
674
|
+
before do
|
675
|
+
# Expect a timeout to be logged.
|
676
|
+
expect(subject.logger).to receive(:error).with(/Attempted to send a bulk request/i, anything).at_least(:once)
|
677
|
+
expect(subject.client).to receive(:bulk).at_least(:twice).and_call_original
|
678
|
+
end
|
679
|
+
|
680
|
+
it "should fail after the timeout" do
|
681
|
+
#pending("This is tricky now that we do healthchecks on instantiation")
|
682
|
+
Thread.new { subject.multi_receive([LogStash::Event.new]) }
|
683
|
+
|
684
|
+
# Allow the timeout to occur
|
685
|
+
sleep 6
|
686
|
+
end
|
687
|
+
end
|
688
|
+
|
689
|
+
describe "the action option" do
|
690
|
+
|
691
|
+
context "with a sprintf action" do
|
692
|
+
let(:options) { {"action" => "%{myactionfield}" } }
|
693
|
+
|
694
|
+
let(:event) { LogStash::Event.new("myactionfield" => "update", "message" => "blah") }
|
695
|
+
|
696
|
+
it "should interpolate the requested action value when creating an event_action_tuple" do
|
697
|
+
expect(subject.send(:event_action_tuple, event).first).to eql("update")
|
698
|
+
end
|
699
|
+
end
|
700
|
+
|
701
|
+
context "with a sprintf action equals to update" do
|
702
|
+
let(:options) { {"action" => "%{myactionfield}", "upsert" => '{"message": "some text"}' } }
|
703
|
+
|
704
|
+
let(:event) { LogStash::Event.new("myactionfield" => "update", "message" => "blah") }
|
705
|
+
|
706
|
+
it "should obtain specific action's params from event_action_tuple" do
|
707
|
+
expect(subject.send(:event_action_tuple, event)[1]).to include(:_upsert)
|
708
|
+
end
|
709
|
+
end
|
710
|
+
|
711
|
+
context "with an invalid action" do
|
712
|
+
let(:options) { {"action" => "SOME Garbaaage"} }
|
713
|
+
let(:do_register) { false } # this is what we want to test, so we disable the before(:each) call
|
714
|
+
|
715
|
+
before { allow(subject).to receive(:finish_register) }
|
716
|
+
|
717
|
+
it "should raise a configuration error" do
|
718
|
+
expect { subject.register }.to raise_error(LogStash::ConfigurationError)
|
719
|
+
end
|
720
|
+
end
|
721
|
+
end
|
722
|
+
|
723
|
+
describe "the pipeline option" do
|
724
|
+
|
725
|
+
context "with a sprintf and set pipeline" do
|
726
|
+
let(:options) { {"pipeline" => "%{pipeline}" } }
|
727
|
+
|
728
|
+
let(:event) { LogStash::Event.new("pipeline" => "my-ingest-pipeline") }
|
729
|
+
|
730
|
+
it "interpolate the pipeline value and set it" do
|
731
|
+
expect(subject.send(:event_action_tuple, event)[1]).to include(:pipeline => "my-ingest-pipeline")
|
732
|
+
end
|
733
|
+
end
|
734
|
+
|
735
|
+
context "with a sprintf and empty pipeline" do
|
736
|
+
let(:options) { {"pipeline" => "%{pipeline}" } }
|
737
|
+
|
738
|
+
let(:event) { LogStash::Event.new("pipeline" => "") }
|
739
|
+
|
740
|
+
it "interpolates the pipeline value but not set it because it is empty" do
|
741
|
+
expect(subject.send(:event_action_tuple, event)[1]).not_to include(:pipeline)
|
742
|
+
end
|
743
|
+
end
|
744
|
+
|
745
|
+
context "with both pipeline and target_ingest_pipeline" do
|
746
|
+
let(:options) { {"pipeline" => "%{pipeline}" } }
|
747
|
+
let(:event) { LogStash::Event.new({"pipeline" => "my-ingest-pipeline", "[@metadata][target_ingest_pipeline]" => "meta-ingest-pipeline"}) }
|
748
|
+
|
749
|
+
it "interpolates the plugin's pipeline value" do
|
750
|
+
expect(subject.send(:event_action_tuple, event)[1]).to include(:pipeline => "my-ingest-pipeline")
|
751
|
+
end
|
752
|
+
|
753
|
+
context "when the plugin's `pipeline` is constant" do
|
754
|
+
let(:options) { super().merge("pipeline" => "my-constant-pipeline") }
|
755
|
+
it "uses plugin's pipeline value" do
|
756
|
+
expect(subject.send(:event_action_tuple, event)[1]).to include(:pipeline => "my-constant-pipeline")
|
757
|
+
end
|
758
|
+
end
|
759
|
+
|
760
|
+
context "when the plugin's `pipeline` includes an unresolvable sprintf placeholder" do
|
761
|
+
let(:options) { super().merge("pipeline" => "reference-%{unset}-field") }
|
762
|
+
it "does not use the target_ingest_pipeline" do
|
763
|
+
# when sprintf doesn't resolve a placeholder, the behaviour of our `pipeline` is UNSPECIFIED.
|
764
|
+
# here we only validate that the presence of the magic field does not
|
765
|
+
# override an explicitly-configured pipeline.
|
766
|
+
expect(subject.send(:event_action_tuple, event)[1]).to_not include(:pipeline => "my-ingest-pipeline")
|
767
|
+
end
|
768
|
+
end
|
769
|
+
end
|
770
|
+
|
771
|
+
context "with empty pipeline and target_ingest_pipeline" do
|
772
|
+
let(:options) { {"pipeline" => "%{pipeline}" } }
|
773
|
+
let(:event) { LogStash::Event.new({"pipeline" => "", "[@metadata][target_ingest_pipeline]" => "meta-ingest-pipeline"}) }
|
774
|
+
|
775
|
+
it "interpolates the pipeline value but not set it because pipeline is empty" do
|
776
|
+
expect(subject.send(:event_action_tuple, event)[1]).not_to include(:pipeline)
|
777
|
+
end
|
778
|
+
end
|
779
|
+
|
780
|
+
context "with target_ingest_pipeline" do
|
781
|
+
let(:event) { LogStash::Event.new({"pipeline" => "", "@metadata" => {"target_ingest_pipeline" => "meta-ingest-pipeline"}}) }
|
782
|
+
|
783
|
+
it "interpolates the target_ingest_pipeline value and set it" do
|
784
|
+
expect(subject.send(:event_action_tuple, event)[1]).to include(:pipeline => "meta-ingest-pipeline")
|
785
|
+
end
|
786
|
+
end
|
787
|
+
|
788
|
+
context "with empty target_ingest_pipeline" do
|
789
|
+
let(:event) { LogStash::Event.new({"pipeline" => "", "@metadata" => {"host" => "elastic"}}) }
|
790
|
+
|
791
|
+
it "does not set pipeline" do
|
792
|
+
expect(subject.send(:event_action_tuple, event)[1]).not_to include(:pipeline)
|
793
|
+
end
|
794
|
+
end
|
795
|
+
|
796
|
+
context "with empty pipeline and empty target_ingest_pipeline" do
|
797
|
+
let(:event) { LogStash::Event.new }
|
798
|
+
|
799
|
+
it "does not set pipeline" do
|
800
|
+
expect(subject.send(:event_action_tuple, event)[1]).not_to include(:pipeline)
|
801
|
+
end
|
802
|
+
end
|
803
|
+
end
|
804
|
+
|
805
|
+
describe "the manage_template option" do
|
806
|
+
context "with data stream enabled" do
|
807
|
+
let(:options) { {"data_stream" => "true", "data_stream_type" => "logs" } }
|
808
|
+
let(:do_register) { true }
|
809
|
+
|
810
|
+
it "should default to false" do
|
811
|
+
expect(subject).to have_attributes(manage_template: false)
|
812
|
+
end
|
813
|
+
end
|
814
|
+
|
815
|
+
context "with data stream disabled" do
|
816
|
+
let(:options) { {"data_stream" => "false", "index" => "logs" } }
|
817
|
+
let(:do_register) { true }
|
818
|
+
|
819
|
+
it "should default to true" do
|
820
|
+
expect(subject).to have_attributes(manage_template: true)
|
821
|
+
end
|
822
|
+
end
|
823
|
+
end
|
824
|
+
|
825
|
+
describe "SSL end to end" do
|
826
|
+
let(:do_register) { false } # skip the register in the global before block, as is called here.
|
827
|
+
|
828
|
+
before(:each) do
|
829
|
+
stub_manticore_client!
|
830
|
+
subject.register
|
831
|
+
end
|
832
|
+
|
833
|
+
shared_examples("an encrypted client connection") do
|
834
|
+
it "should enable SSL in manticore" do
|
835
|
+
expect(subject.client.pool.urls.map(&:scheme).uniq).to eql(['https'])
|
836
|
+
end
|
837
|
+
end
|
838
|
+
|
839
|
+
context "With the 'ssl_enabled' option" do
|
840
|
+
let(:options) { {"ssl_enabled" => true}}
|
841
|
+
|
842
|
+
include_examples("an encrypted client connection")
|
843
|
+
end
|
844
|
+
|
845
|
+
context "With an https host" do
|
846
|
+
let(:options) { {"hosts" => "https://localhost"} }
|
847
|
+
include_examples("an encrypted client connection")
|
848
|
+
end
|
849
|
+
end
|
850
|
+
|
851
|
+
describe "SSL deprecated settings" do
|
852
|
+
let(:base_options) { {"ssl" => "true"} }
|
853
|
+
|
854
|
+
context "with client certificate" do
|
855
|
+
let(:do_register) { true }
|
856
|
+
let(:cacert) { Stud::Temporary.file.path }
|
857
|
+
let(:options) { base_options.merge(
|
858
|
+
"cacert" => cacert,
|
859
|
+
"ssl_certificate_verification" => false
|
860
|
+
) }
|
861
|
+
|
862
|
+
after :each do
|
863
|
+
File.delete(cacert)
|
864
|
+
end
|
865
|
+
|
866
|
+
it "should map new configs into params" do
|
867
|
+
expect(subject.params).to match hash_including(
|
868
|
+
"ssl_enabled" => true,
|
869
|
+
"ssl_verification_mode" => "none",
|
870
|
+
"ssl_certificate_authorities" => [cacert]
|
871
|
+
)
|
872
|
+
end
|
873
|
+
|
874
|
+
it "should set new configs variables" do
|
875
|
+
expect(subject.instance_variable_get(:@ssl_enabled)).to eql(true)
|
876
|
+
expect(subject.instance_variable_get(:@ssl_verification_mode)).to eql("none")
|
877
|
+
expect(subject.instance_variable_get(:@ssl_certificate_authorities)).to eql([cacert])
|
878
|
+
end
|
879
|
+
end
|
880
|
+
|
881
|
+
context "with java stores" do
|
882
|
+
let(:do_register) { true }
|
883
|
+
let(:keystore) { Stud::Temporary.file.path }
|
884
|
+
let(:truststore) { Stud::Temporary.file.path }
|
885
|
+
let(:options) { base_options.merge(
|
886
|
+
"keystore" => keystore,
|
887
|
+
"keystore_password" => "keystore",
|
888
|
+
"truststore" => truststore,
|
889
|
+
"truststore_password" => "truststore",
|
890
|
+
"ssl_certificate_verification" => true
|
891
|
+
) }
|
892
|
+
|
893
|
+
let(:spy_http_client_builder!) do
|
894
|
+
allow(described_class::HttpClientBuilder).to receive(:build).with(any_args).and_call_original
|
895
|
+
allow(described_class::HttpClientBuilder).to receive(:setup_ssl).with(any_args).and_return({})
|
896
|
+
end
|
897
|
+
|
898
|
+
after :each do
|
899
|
+
File.delete(keystore)
|
900
|
+
File.delete(truststore)
|
901
|
+
end
|
902
|
+
|
903
|
+
it "should map new configs into params" do
|
904
|
+
expect(subject.params).to match hash_including(
|
905
|
+
"ssl_enabled" => true,
|
906
|
+
"ssl_keystore_path" => keystore,
|
907
|
+
"ssl_truststore_path" => truststore,
|
908
|
+
"ssl_verification_mode" => "full"
|
909
|
+
)
|
910
|
+
|
911
|
+
expect(subject.params["ssl_keystore_password"].value).to eql("keystore")
|
912
|
+
expect(subject.params["ssl_truststore_password"].value).to eql("truststore")
|
913
|
+
end
|
914
|
+
|
915
|
+
it "should set new configs variables" do
|
916
|
+
expect(subject.instance_variable_get(:@ssl_enabled)).to eql(true)
|
917
|
+
expect(subject.instance_variable_get(:@ssl_keystore_path)).to eql(keystore)
|
918
|
+
expect(subject.instance_variable_get(:@ssl_keystore_password).value).to eql("keystore")
|
919
|
+
expect(subject.instance_variable_get(:@ssl_truststore_path)).to eql(truststore)
|
920
|
+
expect(subject.instance_variable_get(:@ssl_truststore_password).value).to eql("truststore")
|
921
|
+
expect(subject.instance_variable_get(:@ssl_verification_mode)).to eql("full")
|
922
|
+
end
|
923
|
+
end
|
924
|
+
end
|
925
|
+
|
926
|
+
describe "retry_on_conflict" do
|
927
|
+
let(:num_retries) { 123 }
|
928
|
+
let(:event) { LogStash::Event.new("myactionfield" => "update", "message" => "blah") }
|
929
|
+
let(:options) { { 'retry_on_conflict' => num_retries } }
|
930
|
+
|
931
|
+
context "with a regular index" do
|
932
|
+
let(:options) { super().merge("action" => "index") }
|
933
|
+
|
934
|
+
it "should not set the retry_on_conflict parameter when creating an event_action_tuple" do
|
935
|
+
allow(subject.client).to receive(:maximum_seen_major_version).and_return(maximum_seen_major_version)
|
936
|
+
action, params, event_data = subject.send(:event_action_tuple, event)
|
937
|
+
expect(params).not_to include({subject.send(:retry_on_conflict_action_name) => num_retries})
|
938
|
+
end
|
939
|
+
end
|
940
|
+
|
941
|
+
context "using a plain update" do
|
942
|
+
let(:options) { super().merge("action" => "update", "retry_on_conflict" => num_retries, "document_id" => 1) }
|
943
|
+
|
944
|
+
it "should set the retry_on_conflict parameter when creating an event_action_tuple" do
|
945
|
+
action, params, event_data = subject.send(:event_action_tuple, event)
|
946
|
+
expect(params).to include({subject.send(:retry_on_conflict_action_name) => num_retries})
|
947
|
+
end
|
948
|
+
end
|
949
|
+
|
950
|
+
context "with a sprintf action that resolves to update" do
|
951
|
+
let(:options) { super().merge("action" => "%{myactionfield}", "retry_on_conflict" => num_retries, "document_id" => 1) }
|
952
|
+
|
953
|
+
it "should set the retry_on_conflict parameter when creating an event_action_tuple" do
|
954
|
+
action, params, event_data = subject.send(:event_action_tuple, event)
|
955
|
+
expect(params).to include({subject.send(:retry_on_conflict_action_name) => num_retries})
|
956
|
+
expect(action).to eq("update")
|
957
|
+
end
|
958
|
+
end
|
959
|
+
end
|
960
|
+
|
961
|
+
describe "sleep interval calculation" do
|
962
|
+
let(:retry_max_interval) { 64 }
|
963
|
+
let(:options) { { "retry_max_interval" => retry_max_interval } }
|
964
|
+
|
965
|
+
it "should double the given value" do
|
966
|
+
expect(subject.next_sleep_interval(2)).to eql(4)
|
967
|
+
expect(subject.next_sleep_interval(32)).to eql(64)
|
968
|
+
end
|
969
|
+
|
970
|
+
it "should not increase the value past the max retry interval" do
|
971
|
+
sleep_interval = 2
|
972
|
+
100.times do
|
973
|
+
sleep_interval = subject.next_sleep_interval(sleep_interval)
|
974
|
+
expect(sleep_interval).to be <= retry_max_interval
|
975
|
+
end
|
976
|
+
end
|
977
|
+
end
|
978
|
+
|
979
|
+
describe "stale connection check" do
|
980
|
+
let(:validate_after_inactivity) { 123 }
|
981
|
+
let(:options) { { "validate_after_inactivity" => validate_after_inactivity } }
|
982
|
+
let(:do_register) { false }
|
983
|
+
|
984
|
+
before :each do
|
985
|
+
allow(subject).to receive(:finish_register)
|
986
|
+
|
987
|
+
allow(::Manticore::Client).to receive(:new).with(any_args).and_call_original
|
988
|
+
end
|
989
|
+
|
990
|
+
after :each do
|
991
|
+
subject.close
|
992
|
+
end
|
993
|
+
|
994
|
+
it "should set the correct http client option for 'validate_after_inactivity'" do
|
995
|
+
subject.register
|
996
|
+
expect(::Manticore::Client).to have_received(:new) do |options|
|
997
|
+
expect(options[:check_connection_timeout]).to eq(validate_after_inactivity)
|
998
|
+
end
|
999
|
+
end
|
1000
|
+
end
|
1001
|
+
|
1002
|
+
describe "custom parameters" do
|
1003
|
+
|
1004
|
+
let(:manticore_urls) { subject.client.pool.urls }
|
1005
|
+
let(:manticore_url) { manticore_urls.first }
|
1006
|
+
|
1007
|
+
let(:custom_parameters_hash) { { "id" => 1, "name" => "logstash" } }
|
1008
|
+
let(:custom_parameters_query) { custom_parameters_hash.map {|k,v| "#{k}=#{v}" }.join("&") }
|
1009
|
+
|
1010
|
+
let(:stub_http_client_pool!) do
|
1011
|
+
[:start_resurrectionist, :start_sniffer, :healthcheck!].each do |method|
|
1012
|
+
allow_any_instance_of(LogStash::Outputs::ElasticSearch::HttpClient::Pool).to receive(method)
|
1013
|
+
end
|
1014
|
+
end
|
1015
|
+
|
1016
|
+
context "using non-url hosts" do
|
1017
|
+
|
1018
|
+
let(:options) {
|
1019
|
+
{
|
1020
|
+
"index" => "my-index",
|
1021
|
+
"hosts" => ["localhost:9202"],
|
1022
|
+
"path" => "some-path",
|
1023
|
+
"parameters" => custom_parameters_hash
|
1024
|
+
}
|
1025
|
+
}
|
1026
|
+
|
1027
|
+
it "creates a URI with the added parameters" do
|
1028
|
+
expect(subject.parameters).to eql(custom_parameters_hash)
|
1029
|
+
end
|
1030
|
+
|
1031
|
+
it "sets the query string on the HTTP client" do
|
1032
|
+
expect(manticore_url.query).to eql(custom_parameters_query)
|
1033
|
+
end
|
1034
|
+
end
|
1035
|
+
|
1036
|
+
context "using url hosts" do
|
1037
|
+
|
1038
|
+
context "with embedded query parameters" do
|
1039
|
+
let(:options) {
|
1040
|
+
{ "hosts" => ["http://localhost:9202/path?#{custom_parameters_query}"] }
|
1041
|
+
}
|
1042
|
+
|
1043
|
+
it "sets the query string on the HTTP client" do
|
1044
|
+
expect(manticore_url.query).to eql(custom_parameters_query)
|
1045
|
+
end
|
1046
|
+
end
|
1047
|
+
|
1048
|
+
context "with explicit query parameters" do
|
1049
|
+
let(:options) {
|
1050
|
+
{
|
1051
|
+
"hosts" => ["http://localhost:9202/path"],
|
1052
|
+
"parameters" => custom_parameters_hash
|
1053
|
+
}
|
1054
|
+
}
|
1055
|
+
|
1056
|
+
it "sets the query string on the HTTP client" do
|
1057
|
+
expect(manticore_url.query).to eql(custom_parameters_query)
|
1058
|
+
end
|
1059
|
+
end
|
1060
|
+
|
1061
|
+
context "with explicit query parameters and existing url parameters" do
|
1062
|
+
let(:existing_query_string) { "existing=param" }
|
1063
|
+
let(:options) {
|
1064
|
+
{
|
1065
|
+
"hosts" => ["http://localhost:9202/path?#{existing_query_string}"],
|
1066
|
+
"parameters" => custom_parameters_hash
|
1067
|
+
}
|
1068
|
+
}
|
1069
|
+
|
1070
|
+
it "keeps the existing query string" do
|
1071
|
+
expect(manticore_url.query).to include(existing_query_string)
|
1072
|
+
end
|
1073
|
+
|
1074
|
+
it "includes the new query string" do
|
1075
|
+
expect(manticore_url.query).to include(custom_parameters_query)
|
1076
|
+
end
|
1077
|
+
|
1078
|
+
it "appends the new query string to the existing one" do
|
1079
|
+
expect(manticore_url.query).to eql("#{existing_query_string}&#{custom_parameters_query}")
|
1080
|
+
end
|
1081
|
+
end
|
1082
|
+
end
|
1083
|
+
end
|
1084
|
+
|
1085
|
+
describe "cloud.id" do
|
1086
|
+
let(:do_register) { false }
|
1087
|
+
|
1088
|
+
let(:valid_cloud_id) do
|
1089
|
+
'sample:dXMtY2VudHJhbDEuZ2NwLmNsb3VkLmVzLmlvJGFjMzFlYmI5MDI0MTc3MzE1NzA0M2MzNGZkMjZmZDQ2OjkyNDMkYTRjMDYyMzBlNDhjOGZjZTdiZTg4YTA3NGEzYmIzZTA6OTI0NA=='
|
1090
|
+
end
|
1091
|
+
|
1092
|
+
let(:options) { { 'cloud_id' => valid_cloud_id } }
|
1093
|
+
|
1094
|
+
before(:each) do
|
1095
|
+
stub_manticore_client!
|
1096
|
+
end
|
1097
|
+
|
1098
|
+
it "should set host(s)" do
|
1099
|
+
subject.register
|
1100
|
+
es_url = subject.client.pool.urls.first
|
1101
|
+
expect( es_url.to_s ).to eql('https://ac31ebb90241773157043c34fd26fd46.us-central1.gcp.cloud.es.io:9243/')
|
1102
|
+
end
|
1103
|
+
|
1104
|
+
context 'invalid' do
|
1105
|
+
let(:options) { { 'cloud_id' => 'invalid:dXMtY2VudHJhbDEuZ2NwLmNsb3VkLmVzLmlv' } }
|
1106
|
+
|
1107
|
+
it "should fail" do
|
1108
|
+
expect { subject.register }.to raise_error LogStash::ConfigurationError, /cloud_id.*? is invalid/
|
1109
|
+
end
|
1110
|
+
end
|
1111
|
+
|
1112
|
+
context 'hosts also set' do
|
1113
|
+
let(:options) { { 'cloud_id' => valid_cloud_id, 'hosts' => [ 'localhost' ] } }
|
1114
|
+
|
1115
|
+
it "should fail" do
|
1116
|
+
expect { subject.register }.to raise_error LogStash::ConfigurationError, /cloud_id and hosts/
|
1117
|
+
end
|
1118
|
+
end
|
1119
|
+
end if LOGSTASH_VERSION > '6.0'
|
1120
|
+
|
1121
|
+
describe "cloud.auth" do
|
1122
|
+
let(:do_register) { false }
|
1123
|
+
|
1124
|
+
let(:options) { { 'cloud_auth' => LogStash::Util::Password.new('elastic:my-passwd-00') } }
|
1125
|
+
|
1126
|
+
before(:each) do
|
1127
|
+
stub_manticore_client!
|
1128
|
+
end
|
1129
|
+
|
1130
|
+
it "should set host(s)" do
|
1131
|
+
subject.register
|
1132
|
+
es_url = subject.client.pool.urls.first
|
1133
|
+
expect( es_url.user ).to eql('elastic')
|
1134
|
+
expect( es_url.password ).to eql('my-passwd-00')
|
1135
|
+
end
|
1136
|
+
|
1137
|
+
context 'invalid' do
|
1138
|
+
let(:options) { { 'cloud_auth' => 'invalid-format' } }
|
1139
|
+
|
1140
|
+
it "should fail" do
|
1141
|
+
expect { subject.register }.to raise_error LogStash::ConfigurationError, /cloud_auth.*? format/
|
1142
|
+
end
|
1143
|
+
end
|
1144
|
+
|
1145
|
+
context 'user also set' do
|
1146
|
+
let(:options) { { 'cloud_auth' => 'elastic:my-passwd-00', 'user' => 'another' } }
|
1147
|
+
|
1148
|
+
it "should fail" do
|
1149
|
+
expect { subject.register }.to raise_error LogStash::ConfigurationError, /Multiple authentication options are specified/
|
1150
|
+
end
|
1151
|
+
end
|
1152
|
+
|
1153
|
+
context 'api_key also set' do
|
1154
|
+
let(:options) { { 'cloud_auth' => 'elastic:my-passwd-00', 'api_key' => 'some_key' } }
|
1155
|
+
|
1156
|
+
it "should fail" do
|
1157
|
+
expect { subject.register }.to raise_error LogStash::ConfigurationError, /Multiple authentication options are specified/
|
1158
|
+
end
|
1159
|
+
end
|
1160
|
+
end if LOGSTASH_VERSION > '6.0'
|
1161
|
+
|
1162
|
+
context 'handling elasticsearch document-level status meant for the DLQ' do
|
1163
|
+
let(:es_api_action) { "CUSTOM_ACTION" }
|
1164
|
+
let(:es_api_params) { Hash['_index' => 'MY_INDEX'] }
|
1165
|
+
|
1166
|
+
let(:options) { { "manage_template" => false, "data_stream" => 'false' } }
|
1167
|
+
let(:action) { LogStash::Outputs::ElasticSearch::EventActionTuple.new(es_api_action, es_api_params, LogStash::Event.new("foo" => "bar")) }
|
1168
|
+
|
1169
|
+
let(:logger) { double('logger').as_null_object }
|
1170
|
+
before(:each) { subject.instance_variable_set(:@logger, logger) }
|
1171
|
+
|
1172
|
+
context 'when @dlq_writer is nil' do
|
1173
|
+
before { subject.instance_variable_set '@dlq_writer', nil }
|
1174
|
+
|
1175
|
+
context 'resorting to previous behaviour of logging the error' do
|
1176
|
+
context 'getting an invalid_index_name_exception' do
|
1177
|
+
it 'should log at ERROR level' do
|
1178
|
+
# logger = double("logger").as_null_object
|
1179
|
+
# subject.instance_variable_set(:@logger, logger)
|
1180
|
+
|
1181
|
+
mock_response = { 'index' => { 'error' => { 'type' => 'invalid_index_name_exception' } } }
|
1182
|
+
subject.handle_dlq_response("Could not index event to Elasticsearch.", action, :some_status, mock_response)
|
1183
|
+
|
1184
|
+
expect(logger).to have_received(:error).with(a_string_including("Could not index event to Elasticsearch"),
|
1185
|
+
a_hash_including(:status => :some_status,
|
1186
|
+
:action => [es_api_action, es_api_params, action.event.to_hash],
|
1187
|
+
:response => mock_response))
|
1188
|
+
end
|
1189
|
+
end
|
1190
|
+
|
1191
|
+
context 'when getting any other exception' do
|
1192
|
+
it 'should log at WARN level' do
|
1193
|
+
# logger = double("logger").as_null_object
|
1194
|
+
# subject.instance_variable_set(:@logger, logger)
|
1195
|
+
|
1196
|
+
mock_response = { 'index' => { 'error' => { 'type' => 'illegal_argument_exception' } } }
|
1197
|
+
subject.handle_dlq_response("Could not index event to Elasticsearch.", action, :some_status, mock_response)
|
1198
|
+
|
1199
|
+
expect(logger).to have_received(:warn).with(a_string_including("Could not index event to Elasticsearch"),
|
1200
|
+
a_hash_including(:status => :some_status,
|
1201
|
+
:action => [es_api_action, es_api_params, action.event.to_hash],
|
1202
|
+
:response => mock_response))
|
1203
|
+
end
|
1204
|
+
end
|
1205
|
+
|
1206
|
+
context 'when the response does not include [error]' do
|
1207
|
+
it 'should not fail, but just log a warning' do
|
1208
|
+
# logger = double("logger").as_null_object
|
1209
|
+
# subject.instance_variable_set(:@logger, logger)
|
1210
|
+
|
1211
|
+
mock_response = { 'index' => {} }
|
1212
|
+
expect do
|
1213
|
+
subject.handle_dlq_response("Could not index event to Elasticsearch.", action, :some_status, mock_response)
|
1214
|
+
end.to_not raise_error
|
1215
|
+
|
1216
|
+
expect(logger).to have_received(:warn).with(a_string_including("Could not index event to Elasticsearch"),
|
1217
|
+
a_hash_including(:status => :some_status,
|
1218
|
+
:action => [es_api_action, es_api_params, action.event.to_hash],
|
1219
|
+
:response => mock_response))
|
1220
|
+
end
|
1221
|
+
end
|
1222
|
+
end
|
1223
|
+
end
|
1224
|
+
|
1225
|
+
# DLQ writer always nil, no matter what I try here. So mocking it all the way
|
1226
|
+
context 'when DLQ is enabled' do
|
1227
|
+
let(:dlq_writer) { double('DLQ writer') }
|
1228
|
+
before { subject.instance_variable_set('@dlq_writer', dlq_writer) }
|
1229
|
+
|
1230
|
+
# Note: This is not quite the desired behaviour.
|
1231
|
+
# We should still log when sending to the DLQ.
|
1232
|
+
# This shall be solved by another issue, however: logstash-output-elasticsearch#772
|
1233
|
+
it 'should send the event to the DLQ instead, and not log' do
|
1234
|
+
event = LogStash::Event.new("foo" => "bar")
|
1235
|
+
expect(dlq_writer).to receive(:write).once.with(event, /Could not index/)
|
1236
|
+
mock_response = { 'index' => { 'error' => { 'type' => 'illegal_argument_exception' } } }
|
1237
|
+
action = LogStash::Outputs::ElasticSearch::EventActionTuple.new(:action, :params, event)
|
1238
|
+
subject.handle_dlq_response("Could not index event to Elasticsearch.", action, 404, mock_response)
|
1239
|
+
|
1240
|
+
expect(logger).to_not have_received(:warn).with(a_string_including("Could not index event to Elasticsearch"))
|
1241
|
+
end
|
1242
|
+
end
|
1243
|
+
|
1244
|
+
context 'with error response status' do
|
1245
|
+
|
1246
|
+
let(:options) { super().merge 'document_id' => '%{foo}' }
|
1247
|
+
|
1248
|
+
let(:events) { [ LogStash::Event.new("foo" => "bar") ] }
|
1249
|
+
|
1250
|
+
let(:dlq_writer) { subject.instance_variable_get(:@dlq_writer) }
|
1251
|
+
|
1252
|
+
let(:error_code) { 400 }
|
1253
|
+
|
1254
|
+
let(:bulk_response) do
|
1255
|
+
{
|
1256
|
+
"took"=>1, "ingest_took"=>11, "errors"=>true, "items"=>
|
1257
|
+
[{
|
1258
|
+
"index"=>{"_index"=>"bar", "_type"=>"_doc", "_id"=>'bar', "status" => error_code,
|
1259
|
+
"error"=>{"type" => "illegal_argument_exception", "reason" => "TEST" }
|
1260
|
+
}
|
1261
|
+
}]
|
1262
|
+
}
|
1263
|
+
end
|
1264
|
+
|
1265
|
+
before(:each) do
|
1266
|
+
allow(subject.client).to receive(:bulk_send).and_return(bulk_response)
|
1267
|
+
end
|
1268
|
+
|
1269
|
+
shared_examples "should write event to DLQ" do
|
1270
|
+
it "should write event to DLQ" do
|
1271
|
+
expect(dlq_writer).to receive(:write).and_wrap_original do |method, *args|
|
1272
|
+
expect( args.size ).to eql 2
|
1273
|
+
|
1274
|
+
event, reason = *args
|
1275
|
+
expect( event ).to be_a LogStash::Event
|
1276
|
+
expect( event ).to be events.first
|
1277
|
+
expect( reason ).to start_with "Could not index event to Elasticsearch. status: #{error_code}, action: [\"index\""
|
1278
|
+
expect( reason ).to match /_id=>"bar".*"foo"=>"bar".*response:.*"reason"=>"TEST"/
|
1279
|
+
|
1280
|
+
method.call(*args) # won't hurt to call LogStash::Util::DummyDeadLetterQueueWriter
|
1281
|
+
end.once
|
1282
|
+
|
1283
|
+
event_action_tuples = subject.map_events(events)
|
1284
|
+
subject.send(:submit, event_action_tuples)
|
1285
|
+
end
|
1286
|
+
end
|
1287
|
+
|
1288
|
+
context "is one of the predefined codes" do
|
1289
|
+
include_examples "should write event to DLQ"
|
1290
|
+
end
|
1291
|
+
|
1292
|
+
context "when user customized dlq_custom_codes option" do
|
1293
|
+
let(:error_code) { 403 }
|
1294
|
+
let(:options) { super().merge 'dlq_custom_codes' => [error_code] }
|
1295
|
+
|
1296
|
+
include_examples "should write event to DLQ"
|
1297
|
+
end
|
1298
|
+
|
1299
|
+
end if LOGSTASH_VERSION > '7.0'
|
1300
|
+
end
|
1301
|
+
|
1302
|
+
describe "custom headers" do
|
1303
|
+
let(:manticore_options) { subject.client.pool.adapter.manticore.instance_variable_get(:@options) }
|
1304
|
+
|
1305
|
+
context "when set" do
|
1306
|
+
let(:headers) { { "X-Thing" => "Test" } }
|
1307
|
+
let(:options) { { "custom_headers" => headers } }
|
1308
|
+
it "should use the custom headers in the adapter options" do
|
1309
|
+
expect(manticore_options[:headers]).to eq(headers)
|
1310
|
+
end
|
1311
|
+
end
|
1312
|
+
|
1313
|
+
context "when not set" do
|
1314
|
+
it "should have no headers" do
|
1315
|
+
expect(manticore_options[:headers]).to be_empty
|
1316
|
+
end
|
1317
|
+
end
|
1318
|
+
end
|
1319
|
+
|
1320
|
+
describe "API key" do
|
1321
|
+
let(:manticore_options) { subject.client.pool.adapter.manticore.instance_variable_get(:@options) }
|
1322
|
+
let(:api_key) { "some_id:some_api_key" }
|
1323
|
+
let(:base64_api_key) { "ApiKey c29tZV9pZDpzb21lX2FwaV9rZXk=" }
|
1324
|
+
|
1325
|
+
shared_examples 'secure api-key authenticated client' do
|
1326
|
+
let(:do_register) { true }
|
1327
|
+
|
1328
|
+
it 'adds the appropriate Authorization header to the manticore client' do
|
1329
|
+
expect(manticore_options[:headers]).to eq({ "Authorization" => base64_api_key })
|
1330
|
+
end
|
1331
|
+
it 'is provides ssl_enabled=>true to the http client builder' do; aggregate_failures do
|
1332
|
+
expect(described_class::HttpClientBuilder).to have_received(:build).with(anything, anything, hash_including('ssl_enabled'=>true))
|
1333
|
+
end; end
|
1334
|
+
end
|
1335
|
+
|
1336
|
+
context "when set without ssl_enabled => true" do
|
1337
|
+
let(:do_register) { false } # this is what we want to test, so we disable the before(:each) call
|
1338
|
+
let(:options) { { "api_key" => api_key } }
|
1339
|
+
|
1340
|
+
it "should raise a configuration error" do
|
1341
|
+
expect { subject.register }.to raise_error LogStash::ConfigurationError, /requires SSL\/TLS/
|
1342
|
+
end
|
1343
|
+
|
1344
|
+
context 'with cloud_id' do
|
1345
|
+
let(:sample_cloud_id) { 'sample:dXMtY2VudHJhbDEuZ2NwLmNsb3VkLmVzLmlvJGFjMzFlYmI5MDI0MTc3MzE1NzA0M2MzNGZkMjZmZDQ2OjkyNDMkYTRjMDYyMzBlNDhjOGZjZTdiZTg4YTA3NGEzYmIzZTA6OTI0NA==' }
|
1346
|
+
let(:options) { super().merge('cloud_id' => sample_cloud_id) }
|
1347
|
+
|
1348
|
+
it_behaves_like 'secure api-key authenticated client'
|
1349
|
+
end
|
1350
|
+
end
|
1351
|
+
|
1352
|
+
context "when set without ssl_enabled specified but with an https host" do
|
1353
|
+
let(:do_register) { false } # this is what we want to test, so we disable the before(:each) call
|
1354
|
+
let(:options) { { "hosts" => ["https://some.host.com"], "api_key" => api_key } }
|
1355
|
+
|
1356
|
+
it_behaves_like 'secure api-key authenticated client'
|
1357
|
+
end
|
1358
|
+
|
1359
|
+
context "when set without ssl_enabled specified but with an http host`" do
|
1360
|
+
let(:do_register) { false } # this is what we want to test, so we disable the before(:each) call
|
1361
|
+
let(:options) { { "hosts" => ["http://some.host.com"], "api_key" => api_key } }
|
1362
|
+
|
1363
|
+
it "should raise a configuration error" do
|
1364
|
+
expect { subject.register }.to raise_error LogStash::ConfigurationError, /requires SSL\/TLS/
|
1365
|
+
end
|
1366
|
+
end
|
1367
|
+
|
1368
|
+
context "when set with `ssl_enabled => false`" do
|
1369
|
+
let(:do_register) { false } # this is what we want to test, so we disable the before(:each) call
|
1370
|
+
let(:options) { { "ssl_enabled" => "false", "api_key" => api_key } }
|
1371
|
+
|
1372
|
+
it "should raise a configuration error" do
|
1373
|
+
expect { subject.register }.to raise_error LogStash::ConfigurationError, /requires SSL\/TLS/
|
1374
|
+
end
|
1375
|
+
end
|
1376
|
+
|
1377
|
+
context "when set" do
|
1378
|
+
let(:options) { { "api_key" => ::LogStash::Util::Password.new(api_key) } }
|
1379
|
+
|
1380
|
+
context "with ssl_enabled => true" do
|
1381
|
+
let(:options) { super().merge("ssl_enabled" => true) }
|
1382
|
+
it_behaves_like 'secure api-key authenticated client'
|
1383
|
+
end
|
1384
|
+
|
1385
|
+
context "with ssl_enabled => false" do
|
1386
|
+
let(:options) { super().merge("ssl_enabled" => "false") }
|
1387
|
+
|
1388
|
+
let(:do_register) { false } # this is what we want to test, so we disable the before(:each) call
|
1389
|
+
it "should raise a configuration error" do
|
1390
|
+
expect { subject.register }.to raise_error LogStash::ConfigurationError, /requires SSL\/TLS/
|
1391
|
+
end
|
1392
|
+
end
|
1393
|
+
|
1394
|
+
context "without ssl_enabled specified" do
|
1395
|
+
context "with an https host" do
|
1396
|
+
let(:options) { super().merge("hosts" => ["https://some.host.com"]) }
|
1397
|
+
it_behaves_like 'secure api-key authenticated client'
|
1398
|
+
end
|
1399
|
+
context "with an http host`" do
|
1400
|
+
let(:do_register) { false } # this is what we want to test, so we disable the before(:each) call
|
1401
|
+
let(:options) { { "hosts" => ["http://some.host.com"], "api_key" => api_key } }
|
1402
|
+
|
1403
|
+
it "should raise a configuration error" do
|
1404
|
+
expect { subject.register }.to raise_error LogStash::ConfigurationError, /requires SSL\/TLS/
|
1405
|
+
end
|
1406
|
+
end
|
1407
|
+
end
|
1408
|
+
end
|
1409
|
+
|
1410
|
+
context "when not set" do
|
1411
|
+
it "should have no headers" do
|
1412
|
+
expect(manticore_options[:headers]).to be_empty
|
1413
|
+
end
|
1414
|
+
end
|
1415
|
+
|
1416
|
+
context 'user also set' do
|
1417
|
+
let(:do_register) { false } # this is what we want to test, so we disable the before(:each) call
|
1418
|
+
let(:options) { { "ssl_enabled" => true, "api_key" => api_key, 'user' => 'another' } }
|
1419
|
+
|
1420
|
+
it "should fail" do
|
1421
|
+
expect { subject.register }.to raise_error LogStash::ConfigurationError, /Multiple authentication options are specified/
|
1422
|
+
end
|
1423
|
+
end
|
1424
|
+
|
1425
|
+
context 'cloud_auth also set' do
|
1426
|
+
let(:do_register) { false } # this is what we want to test, so we disable the before(:each) call
|
1427
|
+
let(:options) { { "ssl_enabled" => true, "api_key" => api_key, 'cloud_auth' => 'foobar' } }
|
1428
|
+
|
1429
|
+
it "should fail" do
|
1430
|
+
expect { subject.register }.to raise_error LogStash::ConfigurationError, /Multiple authentication options are specified/
|
1431
|
+
end
|
1432
|
+
end
|
1433
|
+
end
|
1434
|
+
|
1435
|
+
describe 'ECS Compatibility Support', :ecs_compatibility_support do
|
1436
|
+
[
|
1437
|
+
:disabled,
|
1438
|
+
:v1,
|
1439
|
+
:v8,
|
1440
|
+
].each do |ecs_compatibility|
|
1441
|
+
context "When initialized with `ecs_compatibility => #{ecs_compatibility}`" do
|
1442
|
+
let(:options) { Hash.new }
|
1443
|
+
subject(:output) { described_class.new(options.merge("ecs_compatibility" => "#{ecs_compatibility}")) }
|
1444
|
+
context 'when registered' do
|
1445
|
+
before(:each) { output.register }
|
1446
|
+
it 'has the correct effective ECS compatibility setting' do
|
1447
|
+
expect(output.ecs_compatibility).to eq(ecs_compatibility)
|
1448
|
+
end
|
1449
|
+
end
|
1450
|
+
end
|
1451
|
+
end
|
1452
|
+
end
|
1453
|
+
|
1454
|
+
describe "post-register ES setup" do
|
1455
|
+
let(:do_register) { false }
|
1456
|
+
let(:es_version) { '7.10.0' } # DS default on LS 8.x
|
1457
|
+
let(:options) { { 'hosts' => '127.0.0.1:9999', 'data_stream' => 'false' } }
|
1458
|
+
let(:logger) { subject.logger }
|
1459
|
+
|
1460
|
+
before do
|
1461
|
+
allow(logger).to receive(:error) # expect tracking
|
1462
|
+
|
1463
|
+
allow(subject).to receive(:last_es_version).and_return es_version
|
1464
|
+
# make successful_connection? return true:
|
1465
|
+
allow(subject).to receive(:maximum_seen_major_version).and_return Integer(es_version.split('.').first)
|
1466
|
+
allow(subject).to receive(:alive_urls_count).and_return Integer(1)
|
1467
|
+
allow(subject).to receive(:stop_after_successful_connection_thread)
|
1468
|
+
end
|
1469
|
+
|
1470
|
+
it "logs inability to retrieve uuid" do
|
1471
|
+
allow(subject).to receive(:install_template)
|
1472
|
+
allow(subject).to receive(:ilm_in_use?).and_return nil
|
1473
|
+
subject.register
|
1474
|
+
subject.send :wait_for_successful_connection
|
1475
|
+
|
1476
|
+
expect(logger).to have_received(:error).with(/Unable to retrieve Elasticsearch cluster uuid/i, anything)
|
1477
|
+
end if LOGSTASH_VERSION >= '7.0.0'
|
1478
|
+
|
1479
|
+
it "logs template install failure" do
|
1480
|
+
allow(subject).to receive(:discover_cluster_uuid)
|
1481
|
+
allow(subject).to receive(:ilm_in_use?).and_return nil
|
1482
|
+
subject.register
|
1483
|
+
subject.send :wait_for_successful_connection
|
1484
|
+
|
1485
|
+
expect(logger).to have_received(:error).with(/Failed to install template/i, anything)
|
1486
|
+
end
|
1487
|
+
|
1488
|
+
context 'error raised' do
|
1489
|
+
|
1490
|
+
let(:es_version) { '7.8.0' }
|
1491
|
+
let(:options) { super().merge('data_stream' => 'true', 'ecs_compatibility' => 'v1') }
|
1492
|
+
let(:latch) { Concurrent::CountDownLatch.new }
|
1493
|
+
|
1494
|
+
before do
|
1495
|
+
allow(subject).to receive(:install_template)
|
1496
|
+
allow(subject).to receive(:discover_cluster_uuid)
|
1497
|
+
allow(subject).to receive(:ilm_in_use?).and_return nil
|
1498
|
+
# executes from the after_successful_connection thread :
|
1499
|
+
allow(subject).to receive(:finish_register) { latch.wait }.and_call_original
|
1500
|
+
subject.register
|
1501
|
+
end
|
1502
|
+
|
1503
|
+
it 'keeps logging on multi_receive' do
|
1504
|
+
allow(subject).to receive(:retrying_submit)
|
1505
|
+
latch.count_down; sleep(1.0)
|
1506
|
+
|
1507
|
+
expect_logged_error = lambda do |count|
|
1508
|
+
expect(logger).to have_received(:error).with(
|
1509
|
+
/Elasticsearch setup did not complete normally, please review previously logged errors/i,
|
1510
|
+
hash_including(message: 'A data_stream configuration is only supported since Elasticsearch 7.9.0 (detected version 7.8.0), please upgrade your cluster')
|
1511
|
+
).exactly(count).times
|
1512
|
+
end
|
1513
|
+
|
1514
|
+
subject.multi_receive [ LogStash::Event.new('foo' => 1) ]
|
1515
|
+
expect_logged_error.call(1)
|
1516
|
+
|
1517
|
+
subject.multi_receive [ LogStash::Event.new('foo' => 2) ]
|
1518
|
+
expect_logged_error.call(2)
|
1519
|
+
end
|
1520
|
+
|
1521
|
+
end
|
1522
|
+
|
1523
|
+
context 'during register/finish_register' do
|
1524
|
+
|
1525
|
+
let(:options) { { 'hosts' => '127.0.0.1:9999', 'data_stream' => 'true' } }
|
1526
|
+
let(:es_version) { '8.7.0' } # DS default on LS 8.x
|
1527
|
+
|
1528
|
+
before do
|
1529
|
+
allow(subject).to receive(:discover_cluster_uuid)
|
1530
|
+
allow(subject).to receive(:maybe_create_rollover_alias)
|
1531
|
+
allow(subject).to receive(:maybe_create_ilm_policy)
|
1532
|
+
allow(subject).to receive(:build_client)
|
1533
|
+
end
|
1534
|
+
|
1535
|
+
# this test could not have been done using latches as the fix to the race
|
1536
|
+
# condition alters the order of the instructions in elasticsearch.rb
|
1537
|
+
#
|
1538
|
+
# the race condition happened when the @index was set to the datastream
|
1539
|
+
# after `ilm_in_use?` is called but before `setup_ilm`
|
1540
|
+
it 'doesn\'t have a race condition leading to resetting back to ILM' do
|
1541
|
+
ilm_in_use = subject.method(:ilm_in_use?)
|
1542
|
+
expect(subject).to receive(:ilm_in_use?) do |params|
|
1543
|
+
ret = ilm_in_use.call
|
1544
|
+
sleep 3
|
1545
|
+
ret
|
1546
|
+
end
|
1547
|
+
setup_mapper_and_target = subject.method(:setup_mapper_and_target)
|
1548
|
+
expect(subject).to receive(:setup_mapper_and_target).once do |params|
|
1549
|
+
sleep 1
|
1550
|
+
setup_mapper_and_target.call(params)
|
1551
|
+
end
|
1552
|
+
subject.register
|
1553
|
+
sleep 6
|
1554
|
+
expect(subject.index).to eq("logs-generic-default")
|
1555
|
+
end
|
1556
|
+
|
1557
|
+
end
|
1558
|
+
end
|
1559
|
+
|
1560
|
+
@private
|
1561
|
+
|
1562
|
+
def stub_manticore_client!(manticore_double = nil)
|
1563
|
+
manticore_double ||= double("manticore #{self.inspect}")
|
1564
|
+
response_double = double("manticore response").as_null_object
|
1565
|
+
# Allow healtchecks
|
1566
|
+
allow(manticore_double).to receive(:head).with(any_args).and_return(response_double)
|
1567
|
+
allow(manticore_double).to receive(:get).with(any_args).and_return(response_double)
|
1568
|
+
allow(manticore_double).to receive(:close)
|
1569
|
+
|
1570
|
+
allow(::Manticore::Client).to receive(:new).and_return(manticore_double)
|
1571
|
+
end
|
1572
|
+
|
1573
|
+
end
|