logstash-output-elasticsearch-test 11.16.0-x86_64-linux
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CHANGELOG.md +649 -0
- data/CONTRIBUTORS +34 -0
- data/Gemfile +16 -0
- data/LICENSE +202 -0
- data/NOTICE.TXT +5 -0
- data/README.md +106 -0
- data/docs/index.asciidoc +1369 -0
- data/lib/logstash/outputs/elasticsearch/data_stream_support.rb +282 -0
- data/lib/logstash/outputs/elasticsearch/default-ilm-policy.json +14 -0
- data/lib/logstash/outputs/elasticsearch/http_client/manticore_adapter.rb +155 -0
- data/lib/logstash/outputs/elasticsearch/http_client/pool.rb +534 -0
- data/lib/logstash/outputs/elasticsearch/http_client.rb +497 -0
- data/lib/logstash/outputs/elasticsearch/http_client_builder.rb +201 -0
- data/lib/logstash/outputs/elasticsearch/ilm.rb +92 -0
- data/lib/logstash/outputs/elasticsearch/license_checker.rb +52 -0
- data/lib/logstash/outputs/elasticsearch/template_manager.rb +131 -0
- data/lib/logstash/outputs/elasticsearch/templates/ecs-disabled/elasticsearch-6x.json +45 -0
- data/lib/logstash/outputs/elasticsearch/templates/ecs-disabled/elasticsearch-7x.json +44 -0
- data/lib/logstash/outputs/elasticsearch/templates/ecs-disabled/elasticsearch-8x.json +50 -0
- data/lib/logstash/outputs/elasticsearch.rb +699 -0
- data/lib/logstash/plugin_mixins/elasticsearch/api_configs.rb +237 -0
- data/lib/logstash/plugin_mixins/elasticsearch/common.rb +409 -0
- data/lib/logstash/plugin_mixins/elasticsearch/noop_license_checker.rb +9 -0
- data/logstash-output-elasticsearch.gemspec +40 -0
- data/spec/es_spec_helper.rb +225 -0
- data/spec/fixtures/_nodes/6x.json +81 -0
- data/spec/fixtures/_nodes/7x.json +92 -0
- data/spec/fixtures/htpasswd +2 -0
- data/spec/fixtures/license_check/active.json +16 -0
- data/spec/fixtures/license_check/inactive.json +5 -0
- data/spec/fixtures/nginx_reverse_proxy.conf +22 -0
- data/spec/fixtures/scripts/painless/scripted_update.painless +2 -0
- data/spec/fixtures/scripts/painless/scripted_update_nested.painless +1 -0
- data/spec/fixtures/scripts/painless/scripted_upsert.painless +1 -0
- data/spec/fixtures/template-with-policy-es6x.json +48 -0
- data/spec/fixtures/template-with-policy-es7x.json +45 -0
- data/spec/fixtures/template-with-policy-es8x.json +50 -0
- data/spec/fixtures/test_certs/ca.crt +29 -0
- data/spec/fixtures/test_certs/ca.der.sha256 +1 -0
- data/spec/fixtures/test_certs/ca.key +51 -0
- data/spec/fixtures/test_certs/renew.sh +13 -0
- data/spec/fixtures/test_certs/test.crt +30 -0
- data/spec/fixtures/test_certs/test.der.sha256 +1 -0
- data/spec/fixtures/test_certs/test.key +51 -0
- data/spec/fixtures/test_certs/test.p12 +0 -0
- data/spec/fixtures/test_certs/test_invalid.crt +36 -0
- data/spec/fixtures/test_certs/test_invalid.key +51 -0
- data/spec/fixtures/test_certs/test_invalid.p12 +0 -0
- data/spec/fixtures/test_certs/test_self_signed.crt +32 -0
- data/spec/fixtures/test_certs/test_self_signed.key +54 -0
- data/spec/fixtures/test_certs/test_self_signed.p12 +0 -0
- data/spec/integration/outputs/compressed_indexing_spec.rb +70 -0
- data/spec/integration/outputs/create_spec.rb +67 -0
- data/spec/integration/outputs/data_stream_spec.rb +68 -0
- data/spec/integration/outputs/delete_spec.rb +63 -0
- data/spec/integration/outputs/ilm_spec.rb +534 -0
- data/spec/integration/outputs/index_spec.rb +421 -0
- data/spec/integration/outputs/index_version_spec.rb +98 -0
- data/spec/integration/outputs/ingest_pipeline_spec.rb +75 -0
- data/spec/integration/outputs/metrics_spec.rb +66 -0
- data/spec/integration/outputs/no_es_on_startup_spec.rb +78 -0
- data/spec/integration/outputs/painless_update_spec.rb +99 -0
- data/spec/integration/outputs/parent_spec.rb +94 -0
- data/spec/integration/outputs/retry_spec.rb +182 -0
- data/spec/integration/outputs/routing_spec.rb +61 -0
- data/spec/integration/outputs/sniffer_spec.rb +94 -0
- data/spec/integration/outputs/templates_spec.rb +133 -0
- data/spec/integration/outputs/unsupported_actions_spec.rb +75 -0
- data/spec/integration/outputs/update_spec.rb +114 -0
- data/spec/spec_helper.rb +10 -0
- data/spec/support/elasticsearch/api/actions/delete_ilm_policy.rb +19 -0
- data/spec/support/elasticsearch/api/actions/get_alias.rb +18 -0
- data/spec/support/elasticsearch/api/actions/get_ilm_policy.rb +18 -0
- data/spec/support/elasticsearch/api/actions/put_alias.rb +24 -0
- data/spec/support/elasticsearch/api/actions/put_ilm_policy.rb +25 -0
- data/spec/unit/http_client_builder_spec.rb +185 -0
- data/spec/unit/outputs/elasticsearch/data_stream_support_spec.rb +612 -0
- data/spec/unit/outputs/elasticsearch/http_client/manticore_adapter_spec.rb +151 -0
- data/spec/unit/outputs/elasticsearch/http_client/pool_spec.rb +501 -0
- data/spec/unit/outputs/elasticsearch/http_client_spec.rb +339 -0
- data/spec/unit/outputs/elasticsearch/template_manager_spec.rb +189 -0
- data/spec/unit/outputs/elasticsearch_proxy_spec.rb +103 -0
- data/spec/unit/outputs/elasticsearch_spec.rb +1573 -0
- data/spec/unit/outputs/elasticsearch_ssl_spec.rb +197 -0
- data/spec/unit/outputs/error_whitelist_spec.rb +56 -0
- data/spec/unit/outputs/license_check_spec.rb +57 -0
- metadata +423 -0
@@ -0,0 +1,421 @@
|
|
1
|
+
require_relative "../../../spec/es_spec_helper"
|
2
|
+
require "logstash/outputs/elasticsearch"
|
3
|
+
require 'cgi'
|
4
|
+
|
5
|
+
describe "TARGET_BULK_BYTES", :integration => true do
|
6
|
+
let(:target_bulk_bytes) { LogStash::Outputs::ElasticSearch::TARGET_BULK_BYTES }
|
7
|
+
let(:event_count) { 1000 }
|
8
|
+
let(:events) { event_count.times.map { event }.to_a }
|
9
|
+
let(:config) {
|
10
|
+
{
|
11
|
+
"hosts" => get_host_port,
|
12
|
+
"index" => index
|
13
|
+
}
|
14
|
+
}
|
15
|
+
let(:index) { 10.times.collect { rand(10).to_s }.join("") }
|
16
|
+
let(:type) { ESHelper.es_version_satisfies?("< 7") ? "doc" : "_doc" }
|
17
|
+
|
18
|
+
subject { LogStash::Outputs::ElasticSearch.new(config) }
|
19
|
+
|
20
|
+
before do
|
21
|
+
subject.register
|
22
|
+
allow(subject.client).to receive(:bulk_send).with(any_args).and_call_original
|
23
|
+
subject.multi_receive(events)
|
24
|
+
end
|
25
|
+
|
26
|
+
describe "batches that are too large for one" do
|
27
|
+
let(:event) { LogStash::Event.new("message" => "a " * (((target_bulk_bytes/2) / event_count)+1)) }
|
28
|
+
|
29
|
+
it "should send in two batches" do
|
30
|
+
expect(subject.client).to have_received(:bulk_send).twice do |payload|
|
31
|
+
expect(payload.size).to be <= target_bulk_bytes
|
32
|
+
end
|
33
|
+
end
|
34
|
+
|
35
|
+
describe "batches that fit in one" do
|
36
|
+
# Normally you'd want to generate a request that's just 1 byte below the limit, but it's
|
37
|
+
# impossible to know how many bytes an event will serialize as with bulk proto overhead
|
38
|
+
let(:event) { LogStash::Event.new("message" => "a") }
|
39
|
+
|
40
|
+
it "should send in one batch" do
|
41
|
+
expect(subject.client).to have_received(:bulk_send).once do |payload|
|
42
|
+
expect(payload.size).to be <= target_bulk_bytes
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|
47
|
+
end
|
48
|
+
|
49
|
+
def curl_and_get_json_response(url, method: :get, retrieve_err_payload: false); require 'open3'
|
50
|
+
cmd = "curl -s -v --show-error #{curl_opts} -X #{method.to_s.upcase} -k #{url}"
|
51
|
+
begin
|
52
|
+
out, err, status = Open3.capture3(cmd)
|
53
|
+
rescue Errno::ENOENT
|
54
|
+
fail "curl not available, make sure curl binary is installed and available on $PATH"
|
55
|
+
end
|
56
|
+
|
57
|
+
if status.success?
|
58
|
+
http_status = err.match(/< HTTP\/1.1 (\d+)/)[1] || '0' # < HTTP/1.1 200 OK\r\n
|
59
|
+
|
60
|
+
if http_status.strip[0].to_i > 2
|
61
|
+
error = (LogStash::Json.load(out)['error']) rescue nil
|
62
|
+
if error
|
63
|
+
if retrieve_err_payload
|
64
|
+
return error
|
65
|
+
else
|
66
|
+
fail "#{cmd.inspect} received an error: #{http_status}\n\n#{error.inspect}"
|
67
|
+
end
|
68
|
+
else
|
69
|
+
warn out
|
70
|
+
fail "#{cmd.inspect} unexpected response: #{http_status}\n\n#{err}"
|
71
|
+
end
|
72
|
+
end
|
73
|
+
|
74
|
+
LogStash::Json.load(out)
|
75
|
+
else
|
76
|
+
warn out
|
77
|
+
fail "#{cmd.inspect} process failed: #{status}\n\n#{err}"
|
78
|
+
end
|
79
|
+
end
|
80
|
+
|
81
|
+
describe "indexing with sprintf resolution", :integration => true do
|
82
|
+
let(:message) { "Hello from #{__FILE__}" }
|
83
|
+
let(:event) { LogStash::Event.new("message" => message, "type" => type) }
|
84
|
+
let (:index) { "%{[index_name]}_dynamic" }
|
85
|
+
let(:type) { ESHelper.es_version_satisfies?("< 7") ? "doc" : "_doc" }
|
86
|
+
let(:event_count) { 1 }
|
87
|
+
let(:user) { "simpleuser" }
|
88
|
+
let(:password) { "abc123" }
|
89
|
+
let(:config) do
|
90
|
+
{
|
91
|
+
"hosts" => [ get_host_port ],
|
92
|
+
"user" => user,
|
93
|
+
"password" => password,
|
94
|
+
"index" => index
|
95
|
+
}
|
96
|
+
end
|
97
|
+
let(:events) { event_count.times.map { event }.to_a }
|
98
|
+
subject { LogStash::Outputs::ElasticSearch.new(config) }
|
99
|
+
|
100
|
+
let(:es_url) { "http://#{get_host_port}" }
|
101
|
+
let(:index_url) { "#{es_url}/#{index}" }
|
102
|
+
|
103
|
+
let(:curl_opts) { nil }
|
104
|
+
|
105
|
+
let(:es_admin) { 'admin' } # default user added in ES -> 8.x requires auth credentials for /_refresh etc
|
106
|
+
let(:es_admin_pass) { 'elastic' }
|
107
|
+
|
108
|
+
let(:initial_events) { [] }
|
109
|
+
|
110
|
+
let(:do_register) { true }
|
111
|
+
|
112
|
+
before do
|
113
|
+
subject.register if do_register
|
114
|
+
subject.multi_receive(initial_events) if initial_events
|
115
|
+
end
|
116
|
+
|
117
|
+
after do
|
118
|
+
subject.do_close
|
119
|
+
end
|
120
|
+
|
121
|
+
let(:event) { LogStash::Event.new("message" => message, "type" => type, "index_name" => "test") }
|
122
|
+
|
123
|
+
it "should index successfully when field is resolved" do
|
124
|
+
expected_index_name = "test_dynamic"
|
125
|
+
subject.multi_receive(events)
|
126
|
+
|
127
|
+
# curl_and_get_json_response "#{es_url}/_refresh", method: :post
|
128
|
+
|
129
|
+
result = curl_and_get_json_response "#{es_url}/#{expected_index_name}"
|
130
|
+
|
131
|
+
expect(result[expected_index_name]).not_to be(nil)
|
132
|
+
end
|
133
|
+
|
134
|
+
context "when dynamic field doesn't resolve the index_name" do
|
135
|
+
let(:event) { LogStash::Event.new("message" => message, "type" => type) }
|
136
|
+
let(:dlq_writer) { double('DLQ writer') }
|
137
|
+
before { subject.instance_variable_set('@dlq_writer', dlq_writer) }
|
138
|
+
|
139
|
+
it "should doesn't create an index name with unresolved placeholders" do
|
140
|
+
expect(dlq_writer).to receive(:write).once.with(event, a_string_including("Badly formatted index, after interpolation still contains placeholder"))
|
141
|
+
subject.multi_receive(events)
|
142
|
+
|
143
|
+
escaped_index_name = CGI.escape("%{[index_name]}_dynamic")
|
144
|
+
result = curl_and_get_json_response "#{es_url}/#{escaped_index_name}", retrieve_err_payload: true
|
145
|
+
expect(result["root_cause"].first()["type"]).to eq("index_not_found_exception")
|
146
|
+
end
|
147
|
+
end
|
148
|
+
end
|
149
|
+
|
150
|
+
describe "indexing" do
|
151
|
+
let(:message) { "Hello from #{__FILE__}" }
|
152
|
+
let(:event) { LogStash::Event.new("message" => message, "type" => type) }
|
153
|
+
let(:index) { 10.times.collect { rand(10).to_s }.join("") }
|
154
|
+
let(:type) { ESHelper.es_version_satisfies?("< 7") ? "doc" : "_doc" }
|
155
|
+
let(:event_count) { 1 + rand(2) }
|
156
|
+
let(:config) { "not implemented" }
|
157
|
+
let(:events) { event_count.times.map { event }.to_a }
|
158
|
+
subject { LogStash::Outputs::ElasticSearch.new(config) }
|
159
|
+
|
160
|
+
let(:es_url) { "http://#{get_host_port}" }
|
161
|
+
let(:index_url) { "#{es_url}/#{index}" }
|
162
|
+
|
163
|
+
let(:curl_opts) { nil }
|
164
|
+
|
165
|
+
let(:es_admin) { 'admin' } # default user added in ES -> 8.x requires auth credentials for /_refresh etc
|
166
|
+
let(:es_admin_pass) { 'elastic' }
|
167
|
+
|
168
|
+
let(:initial_events) { [] }
|
169
|
+
|
170
|
+
let(:do_register) { true }
|
171
|
+
|
172
|
+
before do
|
173
|
+
subject.register if do_register
|
174
|
+
subject.multi_receive(initial_events) if initial_events
|
175
|
+
end
|
176
|
+
|
177
|
+
after do
|
178
|
+
subject.do_close
|
179
|
+
end
|
180
|
+
|
181
|
+
shared_examples "an indexer" do |secure|
|
182
|
+
before(:each) do
|
183
|
+
host_unreachable_error_class = LogStash::Outputs::ElasticSearch::HttpClient::Pool::HostUnreachableError
|
184
|
+
allow(host_unreachable_error_class).to receive(:new).with(any_args).and_wrap_original do |m, original, url|
|
185
|
+
if original.message.include?("PKIX path building failed")
|
186
|
+
$stderr.puts "Client not connecting due to PKIX path building failure; " +
|
187
|
+
"shutting plugin down to prevent infinite retries"
|
188
|
+
subject.close # premature shutdown to prevent infinite retry
|
189
|
+
end
|
190
|
+
m.call(original, url)
|
191
|
+
end
|
192
|
+
end
|
193
|
+
|
194
|
+
it "ships events" do
|
195
|
+
subject.multi_receive(events)
|
196
|
+
|
197
|
+
curl_and_get_json_response "#{es_url}/_refresh", method: :post
|
198
|
+
|
199
|
+
result = curl_and_get_json_response "#{index_url}/_count?q=*"
|
200
|
+
cur_count = result["count"]
|
201
|
+
expect(cur_count).to eq(event_count)
|
202
|
+
|
203
|
+
result = curl_and_get_json_response "#{index_url}/_search?q=*&size=1000"
|
204
|
+
result["hits"]["hits"].each do |doc|
|
205
|
+
expect(doc["_source"]["message"]).to eq(message)
|
206
|
+
|
207
|
+
if ESHelper.es_version_satisfies?("< 8")
|
208
|
+
expect(doc["_type"]).to eq(type)
|
209
|
+
else
|
210
|
+
expect(doc).not_to include("_type")
|
211
|
+
end
|
212
|
+
expect(doc["_index"]).to eq(index)
|
213
|
+
end
|
214
|
+
end
|
215
|
+
|
216
|
+
it "sets the correct content-type header" do
|
217
|
+
expected_manticore_opts = {:headers => {"Content-Type" => "application/json"}, :body => anything}
|
218
|
+
if secure
|
219
|
+
expected_manticore_opts = {
|
220
|
+
:headers => {"Content-Type" => "application/json"},
|
221
|
+
:body => anything,
|
222
|
+
:auth => {
|
223
|
+
:user => user,
|
224
|
+
:password => password,
|
225
|
+
:eager => true
|
226
|
+
}}
|
227
|
+
end
|
228
|
+
expect(subject.client.pool.adapter.client).to receive(:send).
|
229
|
+
with(anything, anything, expected_manticore_opts).at_least(:once).
|
230
|
+
and_call_original
|
231
|
+
subject.multi_receive(events)
|
232
|
+
end
|
233
|
+
end
|
234
|
+
|
235
|
+
shared_examples "PKIX path failure" do
|
236
|
+
let(:do_register) { false }
|
237
|
+
let(:host_unreachable_error_class) { LogStash::Outputs::ElasticSearch::HttpClient::Pool::HostUnreachableError }
|
238
|
+
|
239
|
+
before(:each) do
|
240
|
+
limit_execution
|
241
|
+
end
|
242
|
+
|
243
|
+
let(:limit_execution) do
|
244
|
+
Thread.new { sleep 5; subject.close }
|
245
|
+
end
|
246
|
+
|
247
|
+
it 'fails to establish TLS' do
|
248
|
+
allow(host_unreachable_error_class).to receive(:new).with(any_args).and_call_original.at_least(:once)
|
249
|
+
|
250
|
+
subject.register
|
251
|
+
limit_execution.join
|
252
|
+
|
253
|
+
sleep 1
|
254
|
+
|
255
|
+
expect(host_unreachable_error_class).to have_received(:new).at_least(:once) do |original, url|
|
256
|
+
expect(original.message).to include("PKIX path building failed")
|
257
|
+
end
|
258
|
+
end
|
259
|
+
end
|
260
|
+
|
261
|
+
describe "an indexer with custom index_type", :integration => true do
|
262
|
+
let(:config) {
|
263
|
+
{
|
264
|
+
"hosts" => get_host_port,
|
265
|
+
"index" => index
|
266
|
+
}
|
267
|
+
}
|
268
|
+
it_behaves_like("an indexer")
|
269
|
+
end
|
270
|
+
|
271
|
+
describe "an indexer with no type value set (default to doc)", :integration => true do
|
272
|
+
let(:type) { ESHelper.es_version_satisfies?("< 7") ? "doc" : "_doc" }
|
273
|
+
let(:config) {
|
274
|
+
{
|
275
|
+
"hosts" => get_host_port,
|
276
|
+
"index" => index
|
277
|
+
}
|
278
|
+
}
|
279
|
+
it_behaves_like("an indexer")
|
280
|
+
end
|
281
|
+
|
282
|
+
describe "a secured indexer", :secure_integration => true do
|
283
|
+
let(:user) { "simpleuser" }
|
284
|
+
let(:password) { "abc123" }
|
285
|
+
let(:cacert) { "spec/fixtures/test_certs/ca.crt" }
|
286
|
+
let(:es_url) { "https://#{get_host_port}" }
|
287
|
+
let(:config) do
|
288
|
+
{
|
289
|
+
"hosts" => [ get_host_port ],
|
290
|
+
"user" => user,
|
291
|
+
"password" => password,
|
292
|
+
"ssl_enabled" => true,
|
293
|
+
"ssl_certificate_authorities" => cacert,
|
294
|
+
"index" => index
|
295
|
+
}
|
296
|
+
end
|
297
|
+
|
298
|
+
let(:curl_opts) { "-u #{user}:#{password}" }
|
299
|
+
|
300
|
+
if ENV['ES_SSL_KEY_INVALID'] == 'true' # test_invalid.crt (configured in ES) has SAN: DNS:localhost
|
301
|
+
# javax.net.ssl.SSLPeerUnverifiedException: Host name 'elasticsearch' does not match the certificate subject ...
|
302
|
+
|
303
|
+
context "when no keystore nor ca cert set and verification is disabled" do
|
304
|
+
let(:config) do
|
305
|
+
super().tap { |config| config.delete('ssl_certificate_authorities') }.merge('ssl_verification_mode' => 'none')
|
306
|
+
end
|
307
|
+
|
308
|
+
include_examples("an indexer", true)
|
309
|
+
end
|
310
|
+
|
311
|
+
context "when keystore is set and verification is disabled" do
|
312
|
+
let(:config) do
|
313
|
+
super().merge(
|
314
|
+
'ssl_verification_mode' => 'none',
|
315
|
+
'ssl_keystore_path' => 'spec/fixtures/test_certs/test.p12',
|
316
|
+
'ssl_keystore_password' => '1234567890'
|
317
|
+
)
|
318
|
+
end
|
319
|
+
|
320
|
+
include_examples("an indexer", true)
|
321
|
+
end
|
322
|
+
|
323
|
+
context "when keystore has self-signed cert and verification is disabled" do
|
324
|
+
let(:config) do
|
325
|
+
super().tap { |config| config.delete('ssl_certificate_authorities') }.merge(
|
326
|
+
'ssl_verification_mode' => 'none',
|
327
|
+
'ssl_keystore_path' => 'spec/fixtures/test_certs/test_self_signed.p12',
|
328
|
+
'ssl_keystore_password' => '1234567890'
|
329
|
+
)
|
330
|
+
end
|
331
|
+
|
332
|
+
include_examples("an indexer", true)
|
333
|
+
end
|
334
|
+
|
335
|
+
else
|
336
|
+
|
337
|
+
let(:curl_opts) { "#{super()} --tlsv1.2 --tls-max 1.3 -u #{es_admin}:#{es_admin_pass}" } # due ES 8.x we need user/password
|
338
|
+
|
339
|
+
it_behaves_like("an indexer", true)
|
340
|
+
|
341
|
+
describe "with a password requiring escaping" do
|
342
|
+
let(:user) { "f@ncyuser" }
|
343
|
+
let(:password) { "ab%12#" }
|
344
|
+
|
345
|
+
include_examples("an indexer", true)
|
346
|
+
end
|
347
|
+
|
348
|
+
describe "with a user/password requiring escaping in the URL" do
|
349
|
+
let(:config) do
|
350
|
+
{
|
351
|
+
"hosts" => ["https://#{CGI.escape(user)}:#{CGI.escape(password)}@elasticsearch:9200"],
|
352
|
+
"ssl_enabled" => true,
|
353
|
+
"ssl_certificate_authorities" => "spec/fixtures/test_certs/test.crt",
|
354
|
+
"index" => index
|
355
|
+
}
|
356
|
+
end
|
357
|
+
|
358
|
+
include_examples("an indexer", true)
|
359
|
+
end
|
360
|
+
|
361
|
+
context "without providing `ssl_certificate_authorities`" do
|
362
|
+
let(:config) do
|
363
|
+
super().tap do |c|
|
364
|
+
c.delete("ssl_certificate_authorities")
|
365
|
+
end
|
366
|
+
end
|
367
|
+
|
368
|
+
it_behaves_like("PKIX path failure")
|
369
|
+
end
|
370
|
+
|
371
|
+
if Gem::Version.new(LOGSTASH_VERSION) >= Gem::Version.new("8.3.0")
|
372
|
+
context "with `ca_trusted_fingerprint` instead of `ssl_certificate_authorities`" do
|
373
|
+
let(:config) do
|
374
|
+
super().tap do |c|
|
375
|
+
c.delete("ssl_certificate_authorities")
|
376
|
+
c.update("ca_trusted_fingerprint" => ca_trusted_fingerprint)
|
377
|
+
end
|
378
|
+
end
|
379
|
+
let(:ca_trusted_fingerprint) { File.read("spec/fixtures/test_certs/test.der.sha256").chomp }
|
380
|
+
|
381
|
+
|
382
|
+
it_behaves_like("an indexer", true)
|
383
|
+
|
384
|
+
context 'with an invalid `ca_trusted_fingerprint`' do
|
385
|
+
let(:ca_trusted_fingerprint) { super().reverse }
|
386
|
+
|
387
|
+
it_behaves_like("PKIX path failure")
|
388
|
+
end
|
389
|
+
end
|
390
|
+
end
|
391
|
+
|
392
|
+
context 'with enforced TLSv1.3 protocol' do
|
393
|
+
let(:config) { super().merge 'ssl_supported_protocols' => [ 'TLSv1.3' ] }
|
394
|
+
|
395
|
+
it_behaves_like("an indexer", true)
|
396
|
+
end
|
397
|
+
|
398
|
+
context 'with enforced TLSv1.2 protocol (while ES only enabled TLSv1.3)' do
|
399
|
+
let(:config) { super().merge 'ssl_supported_protocols' => [ 'TLSv1.2' ] }
|
400
|
+
|
401
|
+
let(:initial_events) { nil }
|
402
|
+
|
403
|
+
it "does not ship events" do
|
404
|
+
curl_and_get_json_response index_url, method: :put # make sure index exists
|
405
|
+
Thread.start { subject.multi_receive(events) } # we'll be stuck in a retry loop
|
406
|
+
sleep 2.5
|
407
|
+
|
408
|
+
curl_and_get_json_response "#{es_url}/_refresh", method: :post
|
409
|
+
|
410
|
+
result = curl_and_get_json_response "#{index_url}/_count?q=*"
|
411
|
+
cur_count = result["count"]
|
412
|
+
expect(cur_count).to eq(0) # ES output keeps re-trying but ends up with a
|
413
|
+
# [Manticore::ClientProtocolException] Received fatal alert: protocol_version
|
414
|
+
end
|
415
|
+
|
416
|
+
end if ENV['ES_SSL_SUPPORTED_PROTOCOLS'] == 'TLSv1.3'
|
417
|
+
|
418
|
+
end
|
419
|
+
|
420
|
+
end
|
421
|
+
end
|
@@ -0,0 +1,98 @@
|
|
1
|
+
require_relative "../../../spec/es_spec_helper"
|
2
|
+
require "logstash/outputs/elasticsearch"
|
3
|
+
|
4
|
+
describe "Versioned indexing", :integration => true do
|
5
|
+
require "logstash/outputs/elasticsearch"
|
6
|
+
|
7
|
+
let(:es) { get_client }
|
8
|
+
|
9
|
+
before :each do
|
10
|
+
# Delete all templates first.
|
11
|
+
# Clean ES of data before we start.
|
12
|
+
es.indices.delete_template(:name => "*")
|
13
|
+
# This can fail if there are no indexes, ignore failure.
|
14
|
+
es.indices.delete(:index => "*") rescue nil
|
15
|
+
es.indices.refresh
|
16
|
+
end
|
17
|
+
|
18
|
+
context "when index only" do
|
19
|
+
subject { LogStash::Outputs::ElasticSearch.new(settings) }
|
20
|
+
|
21
|
+
before do
|
22
|
+
subject.register
|
23
|
+
end
|
24
|
+
|
25
|
+
describe "unversioned output" do
|
26
|
+
let(:settings) do
|
27
|
+
{
|
28
|
+
"manage_template" => true,
|
29
|
+
"index" => "logstash-index",
|
30
|
+
"template_overwrite" => true,
|
31
|
+
"hosts" => get_host_port(),
|
32
|
+
"action" => "index",
|
33
|
+
"document_id" => "%{my_id}"
|
34
|
+
}
|
35
|
+
end
|
36
|
+
|
37
|
+
it "should default to ES version" do
|
38
|
+
subject.multi_receive([LogStash::Event.new("my_id" => "123", "message" => "foo")])
|
39
|
+
r = es.get(:index => 'logstash-index', :type => doc_type, :id => "123", :refresh => true)
|
40
|
+
expect(r["_version"]).to eq(1)
|
41
|
+
expect(r["_source"]["message"]).to eq('foo')
|
42
|
+
subject.multi_receive([LogStash::Event.new("my_id" => "123", "message" => "foobar")])
|
43
|
+
r2 = es.get(:index => 'logstash-index', :type => doc_type, :id => "123", :refresh => true)
|
44
|
+
expect(r2["_version"]).to eq(2)
|
45
|
+
expect(r2["_source"]["message"]).to eq('foobar')
|
46
|
+
end
|
47
|
+
end
|
48
|
+
|
49
|
+
describe "versioned output" do
|
50
|
+
let(:settings) do
|
51
|
+
{
|
52
|
+
"manage_template" => true,
|
53
|
+
"index" => "logstash-index",
|
54
|
+
"template_overwrite" => true,
|
55
|
+
"hosts" => get_host_port(),
|
56
|
+
"action" => "index",
|
57
|
+
"document_id" => "%{my_id}",
|
58
|
+
"version" => "%{my_version}",
|
59
|
+
"version_type" => "external",
|
60
|
+
}
|
61
|
+
end
|
62
|
+
|
63
|
+
it "should respect the external version" do
|
64
|
+
id = "ev1"
|
65
|
+
subject.multi_receive([LogStash::Event.new("my_id" => id, "my_version" => "99", "message" => "foo")])
|
66
|
+
r = es.get(:index => 'logstash-index', :type => doc_type, :id => id, :refresh => true)
|
67
|
+
expect(r["_version"]).to eq(99)
|
68
|
+
expect(r["_source"]["message"]).to eq('foo')
|
69
|
+
end
|
70
|
+
|
71
|
+
it "should ignore non-monotonic external version updates" do
|
72
|
+
id = "ev2"
|
73
|
+
subject.multi_receive([LogStash::Event.new("my_id" => id, "my_version" => "99", "message" => "foo")])
|
74
|
+
r = es.get(:index => 'logstash-index', :type => doc_type, :id => id, :refresh => true)
|
75
|
+
expect(r["_version"]).to eq(99)
|
76
|
+
expect(r["_source"]["message"]).to eq('foo')
|
77
|
+
|
78
|
+
subject.multi_receive([LogStash::Event.new("my_id" => id, "my_version" => "98", "message" => "foo")])
|
79
|
+
r2 = es.get(:index => 'logstash-index', :type => doc_type, :id => id, :refresh => true)
|
80
|
+
expect(r2["_version"]).to eq(99)
|
81
|
+
expect(r2["_source"]["message"]).to eq('foo')
|
82
|
+
end
|
83
|
+
|
84
|
+
it "should commit monotonic external version updates" do
|
85
|
+
id = "ev3"
|
86
|
+
subject.multi_receive([LogStash::Event.new("my_id" => id, "my_version" => "99", "message" => "foo")])
|
87
|
+
r = es.get(:index => 'logstash-index', :type => doc_type, :id => id, :refresh => true)
|
88
|
+
expect(r["_version"]).to eq(99)
|
89
|
+
expect(r["_source"]["message"]).to eq('foo')
|
90
|
+
|
91
|
+
subject.multi_receive([LogStash::Event.new("my_id" => id, "my_version" => "100", "message" => "foo")])
|
92
|
+
r2 = es.get(:index => 'logstash-index', :type => doc_type, :id => id, :refresh => true)
|
93
|
+
expect(r2["_version"]).to eq(100)
|
94
|
+
expect(r2["_source"]["message"]).to eq('foo')
|
95
|
+
end
|
96
|
+
end
|
97
|
+
end
|
98
|
+
end
|
@@ -0,0 +1,75 @@
|
|
1
|
+
require_relative "../../../spec/es_spec_helper"
|
2
|
+
|
3
|
+
describe "Ingest pipeline execution behavior", :integration => true do
|
4
|
+
subject! do
|
5
|
+
require "logstash/outputs/elasticsearch"
|
6
|
+
settings = {
|
7
|
+
"hosts" => "#{get_host_port()}",
|
8
|
+
"pipeline" => "apache-logs",
|
9
|
+
"data_stream" => 'false',
|
10
|
+
"ecs_compatibility" => "disabled", # specs are tightly tied to non-ECS defaults
|
11
|
+
}
|
12
|
+
next LogStash::Outputs::ElasticSearch.new(settings)
|
13
|
+
end
|
14
|
+
|
15
|
+
let(:http_client) { Manticore::Client.new }
|
16
|
+
let(:ingest_url) { "http://#{get_host_port()}/_ingest/pipeline/apache-logs" }
|
17
|
+
let(:apache_logs_pipeline) { '
|
18
|
+
{
|
19
|
+
"description" : "Pipeline to parse Apache logs",
|
20
|
+
"processors" : [
|
21
|
+
{
|
22
|
+
"grok": {
|
23
|
+
"field": "message",
|
24
|
+
"patterns": ["%{COMBINEDAPACHELOG}"]
|
25
|
+
}
|
26
|
+
}
|
27
|
+
]
|
28
|
+
}'
|
29
|
+
}
|
30
|
+
|
31
|
+
before :each do
|
32
|
+
# Delete all templates first.
|
33
|
+
require "elasticsearch"
|
34
|
+
|
35
|
+
# Clean ES of data before we start.
|
36
|
+
@es = get_client
|
37
|
+
@es.indices.delete_template(:name => "*")
|
38
|
+
|
39
|
+
# This can fail if there are no indexes, ignore failure.
|
40
|
+
@es.indices.delete(:index => "*") rescue nil
|
41
|
+
|
42
|
+
# delete existing ingest pipeline
|
43
|
+
http_client.delete(ingest_url).call
|
44
|
+
|
45
|
+
# register pipeline
|
46
|
+
http_client.put(ingest_url, :body => apache_logs_pipeline, :headers => {"Content-Type" => "application/json" }).call
|
47
|
+
|
48
|
+
#TODO: Use esclient
|
49
|
+
#@es.ingest.put_pipeline :id => 'apache_pipeline', :body => pipeline_defintion
|
50
|
+
|
51
|
+
subject.register
|
52
|
+
subject.multi_receive([LogStash::Event.new("message" => '183.60.215.50 - - [01/Jun/2015:18:00:00 +0000] "GET /scripts/netcat-webserver HTTP/1.1" 200 182 "-" "Mozilla/5.0 (compatible; EasouSpider; +http://www.easou.com/search/spider.html)"')])
|
53
|
+
@es.indices.refresh
|
54
|
+
|
55
|
+
#Wait or fail until everything's indexed.
|
56
|
+
Stud::try(10.times) do
|
57
|
+
r = @es.search(index: 'logstash-*')
|
58
|
+
expect(r).to have_hits(1)
|
59
|
+
sleep(0.1)
|
60
|
+
end
|
61
|
+
end
|
62
|
+
|
63
|
+
it "indexes using the proper pipeline" do
|
64
|
+
results = @es.search(:index => 'logstash-*', :q => "message:\"netcat\"")
|
65
|
+
expect(results).to have_hits(1)
|
66
|
+
expect(results["hits"]["hits"][0]["_source"]["response"]).to eq("200")
|
67
|
+
expect(results["hits"]["hits"][0]["_source"]["bytes"]).to eq("182")
|
68
|
+
expect(results["hits"]["hits"][0]["_source"]["verb"]).to eq("GET")
|
69
|
+
expect(results["hits"]["hits"][0]["_source"]["request"]).to eq("/scripts/netcat-webserver")
|
70
|
+
expect(results["hits"]["hits"][0]["_source"]["auth"]).to eq("-")
|
71
|
+
expect(results["hits"]["hits"][0]["_source"]["ident"]).to eq("-")
|
72
|
+
expect(results["hits"]["hits"][0]["_source"]["clientip"]).to eq("183.60.215.50")
|
73
|
+
expect(results["hits"]["hits"][0]["_source"]["junkfieldaaaa"]).to eq(nil)
|
74
|
+
end
|
75
|
+
end
|
@@ -0,0 +1,66 @@
|
|
1
|
+
require_relative "../../../spec/es_spec_helper"
|
2
|
+
|
3
|
+
describe "metrics", :integration => true do
|
4
|
+
subject! do
|
5
|
+
require "logstash/outputs/elasticsearch"
|
6
|
+
settings = {
|
7
|
+
"manage_template" => false,
|
8
|
+
"hosts" => "#{get_host_port()}"
|
9
|
+
}
|
10
|
+
plugin = LogStash::Outputs::ElasticSearch.new(settings)
|
11
|
+
end
|
12
|
+
|
13
|
+
let(:metric) { subject.metric }
|
14
|
+
let(:bulk_request_metrics) { subject.instance_variable_get(:@bulk_request_metrics) }
|
15
|
+
let(:document_level_metrics) { subject.instance_variable_get(:@document_level_metrics) }
|
16
|
+
|
17
|
+
before :each do
|
18
|
+
require "elasticsearch"
|
19
|
+
|
20
|
+
# Clean ES of data before we start.
|
21
|
+
@es = get_client
|
22
|
+
clean(@es)
|
23
|
+
subject.register
|
24
|
+
end
|
25
|
+
|
26
|
+
context "after a succesful bulk insert" do
|
27
|
+
let(:bulk) { [
|
28
|
+
LogStash::Event.new("message" => "sample message here"),
|
29
|
+
LogStash::Event.new("somemessage" => { "message" => "sample nested message here" }),
|
30
|
+
LogStash::Event.new("somevalue" => 100),
|
31
|
+
LogStash::Event.new("somevalue" => 10),
|
32
|
+
LogStash::Event.new("somevalue" => 1),
|
33
|
+
LogStash::Event.new("country" => "us"),
|
34
|
+
LogStash::Event.new("country" => "at"),
|
35
|
+
LogStash::Event.new("geoip" => { "location" => [ 0.0, 0.0 ] })
|
36
|
+
]}
|
37
|
+
|
38
|
+
it "increases successful bulk request metric" do
|
39
|
+
expect(bulk_request_metrics).to receive(:increment).with(:successes).once
|
40
|
+
subject.multi_receive(bulk)
|
41
|
+
end
|
42
|
+
|
43
|
+
it "increases number of successful inserted documents" do
|
44
|
+
expect(document_level_metrics).to receive(:increment).with(:successes, bulk.size).once
|
45
|
+
subject.multi_receive(bulk)
|
46
|
+
end
|
47
|
+
end
|
48
|
+
|
49
|
+
context "after a bulk insert that generates errors" do
|
50
|
+
let(:bulk) { [
|
51
|
+
LogStash::Event.new("message" => "sample message here"),
|
52
|
+
LogStash::Event.new("message" => { "message" => "sample nested message here" }),
|
53
|
+
]}
|
54
|
+
it "increases bulk request with error metric" do
|
55
|
+
expect(bulk_request_metrics).to receive(:increment).with(:with_errors).once
|
56
|
+
expect(bulk_request_metrics).to_not receive(:increment).with(:successes)
|
57
|
+
subject.multi_receive(bulk)
|
58
|
+
end
|
59
|
+
|
60
|
+
it "increases number of successful and non retryable documents" do
|
61
|
+
expect(document_level_metrics).to receive(:increment).with(:dlq_routed).once
|
62
|
+
expect(document_level_metrics).to receive(:increment).with(:successes).once
|
63
|
+
subject.multi_receive(bulk)
|
64
|
+
end
|
65
|
+
end
|
66
|
+
end
|