logstash-output-elasticsearch 10.8.0-java → 10.8.6-java
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +20 -0
- data/README.md +1 -1
- data/docs/index.asciidoc +157 -99
- data/lib/logstash/outputs/elasticsearch.rb +3 -1
- data/lib/logstash/outputs/elasticsearch/http_client.rb +50 -14
- data/lib/logstash/outputs/elasticsearch/http_client/pool.rb +1 -1
- data/lib/logstash/plugin_mixins/elasticsearch/common.rb +10 -2
- data/logstash-output-elasticsearch.gemspec +1 -1
- data/spec/integration/outputs/ilm_spec.rb +16 -16
- data/spec/integration/outputs/retry_spec.rb +14 -2
- data/spec/unit/http_client_builder_spec.rb +9 -9
- data/spec/unit/outputs/elasticsearch/http_client/pool_spec.rb +3 -3
- data/spec/unit/outputs/elasticsearch/http_client_spec.rb +57 -38
- data/spec/unit/outputs/elasticsearch_proxy_spec.rb +3 -3
- data/spec/unit/outputs/elasticsearch_spec.rb +107 -16
- data/spec/unit/outputs/error_whitelist_spec.rb +1 -1
- metadata +2 -2
@@ -24,7 +24,7 @@ describe "Proxy option" do
|
|
24
24
|
|
25
25
|
context "when specified as a URI" do
|
26
26
|
shared_examples("hash conversion") do |hash|
|
27
|
-
let(:settings) { super.merge("proxy" => proxy)}
|
27
|
+
let(:settings) { super().merge("proxy" => proxy)}
|
28
28
|
|
29
29
|
it "should set the proxy to the correct hash value" do
|
30
30
|
expect(::Manticore::Client).to have_received(:new) do |options|
|
@@ -71,7 +71,7 @@ describe "Proxy option" do
|
|
71
71
|
end
|
72
72
|
|
73
73
|
context "when specified as ''" do
|
74
|
-
let(:settings) { super.merge("proxy" => "${A_MISSING_ENV_VARIABLE:}")}
|
74
|
+
let(:settings) { super().merge("proxy" => "${A_MISSING_ENV_VARIABLE:}")}
|
75
75
|
|
76
76
|
it "should not send the proxy option to manticore" do
|
77
77
|
expect { subject.register }.not_to raise_error
|
@@ -85,7 +85,7 @@ describe "Proxy option" do
|
|
85
85
|
end
|
86
86
|
|
87
87
|
context "when specified as invalid uri" do
|
88
|
-
let(:settings) { super.merge("proxy" => ":")}
|
88
|
+
let(:settings) { super().merge("proxy" => ":")}
|
89
89
|
|
90
90
|
it "should fail" do
|
91
91
|
# SafeURI isn't doing the proper exception wrapping for us, we can not simply :
|
@@ -4,7 +4,7 @@ require "flores/random"
|
|
4
4
|
require "logstash/outputs/elasticsearch"
|
5
5
|
|
6
6
|
describe LogStash::Outputs::ElasticSearch do
|
7
|
-
subject { described_class.new(options) }
|
7
|
+
subject(:elasticsearch_output_instance) { described_class.new(options) }
|
8
8
|
let(:options) { {} }
|
9
9
|
let(:maximum_seen_major_version) { [1,2,5,6,7,8].sample }
|
10
10
|
|
@@ -46,7 +46,7 @@ describe LogStash::Outputs::ElasticSearch do
|
|
46
46
|
|
47
47
|
describe "getting a document type" do
|
48
48
|
context "if document_type isn't set" do
|
49
|
-
let(:options) { super.merge("document_type" => nil)}
|
49
|
+
let(:options) { super().merge("document_type" => nil)}
|
50
50
|
context "for 7.x elasticsearch clusters" do
|
51
51
|
let(:maximum_seen_major_version) { 7 }
|
52
52
|
it "should return '_doc'" do
|
@@ -70,7 +70,7 @@ describe LogStash::Outputs::ElasticSearch do
|
|
70
70
|
end
|
71
71
|
|
72
72
|
context "with 'document type set'" do
|
73
|
-
let(:options) { super.merge("document_type" => "bar")}
|
73
|
+
let(:options) { super().merge("document_type" => "bar")}
|
74
74
|
it "should get the event type from the 'document_type' setting" do
|
75
75
|
expect(subject.send(:get_event_type, LogStash::Event.new())).to eql("bar")
|
76
76
|
end
|
@@ -80,14 +80,14 @@ describe LogStash::Outputs::ElasticSearch do
|
|
80
80
|
describe "building an event action tuple" do
|
81
81
|
context "for 7.x elasticsearch clusters" do
|
82
82
|
let(:maximum_seen_major_version) { 7 }
|
83
|
-
it "should include '_type'" do
|
83
|
+
it "should not include '_type' when 'document_type' is not explicitly defined" do
|
84
84
|
action_tuple = subject.send(:event_action_tuple, LogStash::Event.new("type" => "foo"))
|
85
85
|
action_params = action_tuple[1]
|
86
|
-
expect(action_params).
|
86
|
+
expect(action_params).not_to include(:_type => "_doc")
|
87
87
|
end
|
88
88
|
|
89
89
|
context "with 'document type set'" do
|
90
|
-
let(:options) { super.merge("document_type" => "bar")}
|
90
|
+
let(:options) { super().merge("document_type" => "bar")}
|
91
91
|
it "should get the event type from the 'document_type' setting" do
|
92
92
|
action_tuple = subject.send(:event_action_tuple, LogStash::Event.new("type" => "foo"))
|
93
93
|
action_params = action_tuple[1]
|
@@ -105,7 +105,7 @@ describe LogStash::Outputs::ElasticSearch do
|
|
105
105
|
end
|
106
106
|
|
107
107
|
context "with 'document type set'" do
|
108
|
-
let(:options) { super.merge("document_type" => "bar")}
|
108
|
+
let(:options) { super().merge("document_type" => "bar")}
|
109
109
|
it "should not include '_type'" do
|
110
110
|
action_tuple = subject.send(:event_action_tuple, LogStash::Event.new("type" => "foo"))
|
111
111
|
action_params = action_tuple[1]
|
@@ -127,7 +127,7 @@ describe LogStash::Outputs::ElasticSearch do
|
|
127
127
|
|
128
128
|
context "as part of a URL" do
|
129
129
|
let(:options) {
|
130
|
-
super.merge("hosts" => ["http://#{user}:#{password.value}@localhost:9200"])
|
130
|
+
super().merge("hosts" => ["http://#{user}:#{password.value}@localhost:9200"])
|
131
131
|
}
|
132
132
|
|
133
133
|
include_examples("an authenticated config")
|
@@ -135,7 +135,7 @@ describe LogStash::Outputs::ElasticSearch do
|
|
135
135
|
|
136
136
|
context "as a hash option" do
|
137
137
|
let(:options) {
|
138
|
-
super.merge!(
|
138
|
+
super().merge!(
|
139
139
|
"user" => user,
|
140
140
|
"password" => password
|
141
141
|
)
|
@@ -175,7 +175,7 @@ describe LogStash::Outputs::ElasticSearch do
|
|
175
175
|
|
176
176
|
context "with extra slashes" do
|
177
177
|
let(:path) { "/slashed-path/ "}
|
178
|
-
let(:options) { super.merge("path" => "/some-path/") }
|
178
|
+
let(:options) { super().merge("path" => "/some-path/") }
|
179
179
|
|
180
180
|
it "should properly set the path on the HTTP client without adding slashes" do
|
181
181
|
expect(manticore_url.path).to eql(options["path"])
|
@@ -234,13 +234,13 @@ describe LogStash::Outputs::ElasticSearch do
|
|
234
234
|
end
|
235
235
|
|
236
236
|
describe "without a port specified" do
|
237
|
-
let(:options) { super.merge('hosts' => 'localhost') }
|
237
|
+
let(:options) { super().merge('hosts' => 'localhost') }
|
238
238
|
it "should properly set the default port (9200) on the HTTP client" do
|
239
239
|
expect(manticore_url.port).to eql(9200)
|
240
240
|
end
|
241
241
|
end
|
242
242
|
describe "with a port other than 9200 specified" do
|
243
|
-
let(:options) { super.merge('hosts' => 'localhost:9202') }
|
243
|
+
let(:options) { super().merge('hosts' => 'localhost:9202') }
|
244
244
|
it "should properly set the specified port on the HTTP client" do
|
245
245
|
expect(manticore_url.port).to eql(9202)
|
246
246
|
end
|
@@ -265,12 +265,14 @@ describe LogStash::Outputs::ElasticSearch do
|
|
265
265
|
let(:event) { ::LogStash::Event.new("foo" => "bar") }
|
266
266
|
let(:error) do
|
267
267
|
::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError.new(
|
268
|
-
429, double("url").as_null_object,
|
268
|
+
429, double("url").as_null_object, request_body, double("response body")
|
269
269
|
)
|
270
270
|
end
|
271
271
|
let(:logger) { double("logger").as_null_object }
|
272
272
|
let(:response) { { :errors => [], :items => [] } }
|
273
273
|
|
274
|
+
let(:request_body) { double(:request_body, :bytesize => 1023) }
|
275
|
+
|
274
276
|
before(:each) do
|
275
277
|
|
276
278
|
i = 0
|
@@ -296,6 +298,95 @@ describe LogStash::Outputs::ElasticSearch do
|
|
296
298
|
expect(subject.logger).to have_received(:debug).with(/Encountered a retryable error/i, anything)
|
297
299
|
end
|
298
300
|
end
|
301
|
+
|
302
|
+
context "unexpected bulk response" do
|
303
|
+
let(:options) do
|
304
|
+
{ "hosts" => "127.0.0.1:9999", "index" => "%{foo}", "manage_template" => false }
|
305
|
+
end
|
306
|
+
|
307
|
+
let(:events) { [ ::LogStash::Event.new("foo" => "bar1"), ::LogStash::Event.new("foo" => "bar2") ] }
|
308
|
+
|
309
|
+
let(:bulk_response) do
|
310
|
+
# shouldn't really happen but we've seen this happen - here ES returns more items than were sent
|
311
|
+
{ "took"=>1, "ingest_took"=>9, "errors"=>true,
|
312
|
+
"items"=>[{"index"=>{"_index"=>"bar1", "_type"=>"_doc", "_id"=>nil, "status"=>500,
|
313
|
+
"error"=>{"type" => "illegal_state_exception",
|
314
|
+
"reason" => "pipeline with id [test-ingest] could not be loaded, caused by [ElasticsearchParseException[Error updating pipeline with id [test-ingest]]; nested: ElasticsearchException[java.lang.IllegalArgumentException: no enrich index exists for policy with name [test-metadata1]]; nested: IllegalArgumentException[no enrich index exists for policy with name [test-metadata1]];; ElasticsearchException[java.lang.IllegalArgumentException: no enrich index exists for policy with name [test-metadata1]]; nested: IllegalArgumentException[no enrich index exists for policy with name [test-metadata1]];; java.lang.IllegalArgumentException: no enrich index exists for policy with name [test-metadata1]]"
|
315
|
+
}
|
316
|
+
}
|
317
|
+
},
|
318
|
+
# NOTE: this is an artificial success (usually everything fails with a 500) but even if some doc where
|
319
|
+
# to succeed due the unexpected reponse items we can not clearly identify which actions to retry ...
|
320
|
+
{"index"=>{"_index"=>"bar2", "_type"=>"_doc", "_id"=>nil, "status"=>201}},
|
321
|
+
{"index"=>{"_index"=>"bar2", "_type"=>"_doc", "_id"=>nil, "status"=>500,
|
322
|
+
"error"=>{"type" => "illegal_state_exception",
|
323
|
+
"reason" => "pipeline with id [test-ingest] could not be loaded, caused by [ElasticsearchParseException[Error updating pipeline with id [test-ingest]]; nested: ElasticsearchException[java.lang.IllegalArgumentException: no enrich index exists for policy with name [test-metadata1]];"
|
324
|
+
}
|
325
|
+
}
|
326
|
+
}]
|
327
|
+
}
|
328
|
+
end
|
329
|
+
|
330
|
+
before(:each) do
|
331
|
+
allow(subject.client).to receive(:bulk_send).with(instance_of(StringIO), instance_of(Array)) do |stream, actions|
|
332
|
+
expect( stream.string ).to include '"foo":"bar1"'
|
333
|
+
expect( stream.string ).to include '"foo":"bar2"'
|
334
|
+
end.and_return(bulk_response, {"errors"=>false}) # let's make it go away (second call) to not retry indefinitely
|
335
|
+
end
|
336
|
+
|
337
|
+
it "should retry submit" do
|
338
|
+
allow(subject.logger).to receive(:error).with(/Encountered an unexpected error/i, anything)
|
339
|
+
allow(subject.client).to receive(:bulk).and_call_original # track count
|
340
|
+
|
341
|
+
subject.multi_receive(events)
|
342
|
+
|
343
|
+
expect(subject.client).to have_received(:bulk).twice
|
344
|
+
end
|
345
|
+
|
346
|
+
it "should log specific error message" do
|
347
|
+
expect(subject.logger).to receive(:error).with(/Encountered an unexpected error/i,
|
348
|
+
hash_including(:error_message => 'Sent 2 documents but Elasticsearch returned 3 responses (likely a bug with _bulk endpoint)'))
|
349
|
+
|
350
|
+
subject.multi_receive(events)
|
351
|
+
end
|
352
|
+
end
|
353
|
+
end
|
354
|
+
|
355
|
+
context '413 errors' do
|
356
|
+
let(:payload_size) { LogStash::Outputs::ElasticSearch::TARGET_BULK_BYTES + 1024 }
|
357
|
+
let(:event) { ::LogStash::Event.new("message" => ("a" * payload_size ) ) }
|
358
|
+
|
359
|
+
let(:logger_stub) { double("logger").as_null_object }
|
360
|
+
|
361
|
+
before(:each) do
|
362
|
+
allow(elasticsearch_output_instance.client).to receive(:logger).and_return(logger_stub)
|
363
|
+
|
364
|
+
allow(elasticsearch_output_instance.client).to receive(:bulk).and_call_original
|
365
|
+
|
366
|
+
max_bytes = payload_size * 3 / 4 # ensure a failure first attempt
|
367
|
+
allow(elasticsearch_output_instance.client.pool).to receive(:post) do |path, params, body|
|
368
|
+
if body.length > max_bytes
|
369
|
+
max_bytes *= 2 # ensure a successful retry
|
370
|
+
double("Response", :code => 413, :body => "")
|
371
|
+
else
|
372
|
+
double("Response", :code => 200, :body => '{"errors":false,"items":[{"index":{"status":200,"result":"created"}}]}')
|
373
|
+
end
|
374
|
+
end
|
375
|
+
end
|
376
|
+
|
377
|
+
it 'retries the 413 until it goes away' do
|
378
|
+
elasticsearch_output_instance.multi_receive([event])
|
379
|
+
|
380
|
+
expect(elasticsearch_output_instance.client).to have_received(:bulk).twice
|
381
|
+
end
|
382
|
+
|
383
|
+
it 'logs about payload quantity and size' do
|
384
|
+
elasticsearch_output_instance.multi_receive([event])
|
385
|
+
|
386
|
+
expect(logger_stub).to have_received(:warn)
|
387
|
+
.with(a_string_matching(/413 Payload Too Large/),
|
388
|
+
hash_including(:action_count => 1, :content_length => a_value > 20_000_000))
|
389
|
+
end
|
299
390
|
end
|
300
391
|
|
301
392
|
context "with timeout set" do
|
@@ -410,7 +501,7 @@ describe LogStash::Outputs::ElasticSearch do
|
|
410
501
|
let(:options) { { 'retry_on_conflict' => num_retries } }
|
411
502
|
|
412
503
|
context "with a regular index" do
|
413
|
-
let(:options) { super.merge("action" => "index") }
|
504
|
+
let(:options) { super().merge("action" => "index") }
|
414
505
|
|
415
506
|
it "should not set the retry_on_conflict parameter when creating an event_action_tuple" do
|
416
507
|
allow(subject.client).to receive(:maximum_seen_major_version).and_return(maximum_seen_major_version)
|
@@ -420,7 +511,7 @@ describe LogStash::Outputs::ElasticSearch do
|
|
420
511
|
end
|
421
512
|
|
422
513
|
context "using a plain update" do
|
423
|
-
let(:options) { super.merge("action" => "update", "retry_on_conflict" => num_retries, "document_id" => 1) }
|
514
|
+
let(:options) { super().merge("action" => "update", "retry_on_conflict" => num_retries, "document_id" => 1) }
|
424
515
|
|
425
516
|
it "should set the retry_on_conflict parameter when creating an event_action_tuple" do
|
426
517
|
action, params, event_data = subject.event_action_tuple(event)
|
@@ -429,7 +520,7 @@ describe LogStash::Outputs::ElasticSearch do
|
|
429
520
|
end
|
430
521
|
|
431
522
|
context "with a sprintf action that resolves to update" do
|
432
|
-
let(:options) { super.merge("action" => "%{myactionfield}", "retry_on_conflict" => num_retries, "document_id" => 1) }
|
523
|
+
let(:options) { super().merge("action" => "%{myactionfield}", "retry_on_conflict" => num_retries, "document_id" => 1) }
|
433
524
|
|
434
525
|
it "should set the retry_on_conflict parameter when creating an event_action_tuple" do
|
435
526
|
action, params, event_data = subject.event_action_tuple(event)
|
@@ -44,7 +44,7 @@ describe "whitelisting error types in expected behavior" do
|
|
44
44
|
end
|
45
45
|
|
46
46
|
describe "when failure logging is disabled for docuemnt exists error" do
|
47
|
-
let(:settings) { super.merge("failure_type_logging_whitelist" => ["document_already_exists_exception"]) }
|
47
|
+
let(:settings) { super().merge("failure_type_logging_whitelist" => ["document_already_exists_exception"]) }
|
48
48
|
|
49
49
|
it "should log a failure on the action" do
|
50
50
|
expect(subject.logger).not_to have_received(:warn).with("Failed action.", anything)
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-elasticsearch
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 10.8.
|
4
|
+
version: 10.8.6
|
5
5
|
platform: java
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2021-04-06 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|