logstash-output-elasticsearch 11.0.0-java → 11.0.1-java
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +4 -0
- data/docs/index.asciidoc +1 -1
- data/lib/logstash/outputs/elasticsearch.rb +15 -1
- data/lib/logstash/outputs/elasticsearch/data_stream_support.rb +1 -1
- data/lib/logstash/plugin_mixins/elasticsearch/common.rb +2 -1
- data/logstash-output-elasticsearch.gemspec +1 -1
- data/spec/unit/outputs/elasticsearch_spec.rb +53 -10
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: beee28e62f0872a041adc3c0f2c8546dbca09ced5c8a8e261cf1c9d07a0e64fe
|
4
|
+
data.tar.gz: f3f88dbbdf310557151a2f819ea566413d9f798be38d4717261c9dc544db97f1
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: b43c800a66632c08a8c9c6d35367de3a127ebbf0abdfb0757cea7fb22501f4dc97e395e4b1ae2d57065c2c913a58e7f62dfa9a5cc4761c2f5c449d1f54448a4d
|
7
|
+
data.tar.gz: ff012fb8f3133de9f6002a35d733ea178828970f530c5c8f36239fdda605756f6d76b8a550690e2ac52aa64c551b73889453443ddc17f945a33b1f02be2abd0b
|
data/CHANGELOG.md
CHANGED
@@ -1,3 +1,7 @@
|
|
1
|
+
## 11.0.1
|
2
|
+
- Fix: DLQ regression shipped in 11.0.0 [#1012](https://github.com/logstash-plugins/logstash-output-elasticsearch/pull/1012)
|
3
|
+
- [DOC] Fixed broken link in list item [#1011](https://github.com/logstash-plugins/logstash-output-elasticsearch/pull/1011)
|
4
|
+
|
1
5
|
## 11.0.0
|
2
6
|
- Feat: Data stream support [#988](https://github.com/logstash-plugins/logstash-output-elasticsearch/pull/988)
|
3
7
|
- Refactor: reviewed logging format + restored ES (initial) setup error logging
|
data/docs/index.asciidoc
CHANGED
@@ -71,7 +71,7 @@ as logs, events, and metrics) and non-time series data in Elasticsearch.
|
|
71
71
|
The data stream options are recommended for indexing time series datasets (such
|
72
72
|
as logs, metrics, and events) into {es}:
|
73
73
|
|
74
|
-
* <<plugins-{type}s-{plugin}-data_stream>>
|
74
|
+
* <<plugins-{type}s-{plugin}-data_stream>>
|
75
75
|
* <<plugins-{type}s-{plugin}-data_stream_auto_routing>>
|
76
76
|
* <<plugins-{type}s-{plugin}-data_stream_dataset>>
|
77
77
|
* <<plugins-{type}s-{plugin}-data_stream_namespace>>
|
@@ -392,7 +392,21 @@ class LogStash::Outputs::ElasticSearch < LogStash::Outputs::Base
|
|
392
392
|
params[:version] = event.sprintf(@version) if @version
|
393
393
|
params[:version_type] = event.sprintf(@version_type) if @version_type
|
394
394
|
|
395
|
-
|
395
|
+
EventActionTuple.new(action, params, event)
|
396
|
+
end
|
397
|
+
|
398
|
+
class EventActionTuple < Array # TODO: acting as an array for compatibility
|
399
|
+
|
400
|
+
def initialize(action, params, event, event_data = nil)
|
401
|
+
super(3)
|
402
|
+
self[0] = action
|
403
|
+
self[1] = params
|
404
|
+
self[2] = event_data || event.to_hash
|
405
|
+
@event = event
|
406
|
+
end
|
407
|
+
|
408
|
+
attr_reader :event
|
409
|
+
|
396
410
|
end
|
397
411
|
|
398
412
|
# @return Hash (initial) parameters for given event
|
@@ -146,7 +146,7 @@ module LogStash module Outputs class ElasticSearch
|
|
146
146
|
def data_stream_event_action_tuple(event)
|
147
147
|
event_data = event.to_hash
|
148
148
|
data_stream_event_sync(event_data) if data_stream_sync_fields
|
149
|
-
|
149
|
+
EventActionTuple.new('create', common_event_params(event), event, event_data)
|
150
150
|
end
|
151
151
|
|
152
152
|
DATA_STREAM_SYNC_FIELDS = [ 'type', 'dataset', 'namespace' ].freeze
|
@@ -200,8 +200,9 @@ module LogStash; module PluginMixins; module ElasticSearch
|
|
200
200
|
def handle_dlq_status(message, action, status, response)
|
201
201
|
# To support bwc, we check if DLQ exists. otherwise we log and drop event (previous behavior)
|
202
202
|
if @dlq_writer
|
203
|
+
event, action = action.event, [action[0], action[1], action[2]]
|
203
204
|
# TODO: Change this to send a map with { :status => status, :action => action } in the future
|
204
|
-
@dlq_writer.write(
|
205
|
+
@dlq_writer.write(event, "#{message} status: #{status}, action: #{action}, response: #{response}")
|
205
206
|
else
|
206
207
|
if dig_value(response, 'index', 'error', 'type') == 'invalid_index_name_exception'
|
207
208
|
level = :error
|
@@ -343,7 +343,7 @@ describe LogStash::Outputs::ElasticSearch do
|
|
343
343
|
}
|
344
344
|
}]
|
345
345
|
}
|
346
|
-
|
346
|
+
end
|
347
347
|
|
348
348
|
before(:each) do
|
349
349
|
allow(subject.client).to receive(:bulk_send).with(instance_of(StringIO), instance_of(Array)) do |stream, actions|
|
@@ -771,9 +771,9 @@ describe LogStash::Outputs::ElasticSearch do
|
|
771
771
|
|
772
772
|
context 'when getting any other exception' do
|
773
773
|
it 'should log at WARN level' do
|
774
|
-
|
775
|
-
subject.instance_variable_set(:@logger,
|
776
|
-
expect(
|
774
|
+
logger = double("logger").as_null_object
|
775
|
+
subject.instance_variable_set(:@logger, logger)
|
776
|
+
expect(logger).to receive(:warn).with(/Could not index/, hash_including(:status, :action, :response))
|
777
777
|
mock_response = { 'index' => { 'error' => { 'type' => 'illegal_argument_exception' } } }
|
778
778
|
subject.handle_dlq_status("Could not index event to Elasticsearch.",
|
779
779
|
[:action, :params, :event], :some_status, mock_response)
|
@@ -782,9 +782,9 @@ describe LogStash::Outputs::ElasticSearch do
|
|
782
782
|
|
783
783
|
context 'when the response does not include [error]' do
|
784
784
|
it 'should not fail, but just log a warning' do
|
785
|
-
|
786
|
-
subject.instance_variable_set(:@logger,
|
787
|
-
expect(
|
785
|
+
logger = double("logger").as_null_object
|
786
|
+
subject.instance_variable_set(:@logger, logger)
|
787
|
+
expect(logger).to receive(:warn).with(/Could not index/, hash_including(:status, :action, :response))
|
788
788
|
mock_response = { 'index' => {} }
|
789
789
|
expect do
|
790
790
|
subject.handle_dlq_status("Could not index event to Elasticsearch.",
|
@@ -804,12 +804,55 @@ describe LogStash::Outputs::ElasticSearch do
|
|
804
804
|
# We should still log when sending to the DLQ.
|
805
805
|
# This shall be solved by another issue, however: logstash-output-elasticsearch#772
|
806
806
|
it 'should send the event to the DLQ instead, and not log' do
|
807
|
-
|
807
|
+
event = LogStash::Event.new("foo" => "bar")
|
808
|
+
expect(dlq_writer).to receive(:write).once.with(event, /Could not index/)
|
808
809
|
mock_response = { 'index' => { 'error' => { 'type' => 'illegal_argument_exception' } } }
|
809
|
-
|
810
|
-
|
810
|
+
action = LogStash::Outputs::ElasticSearch::EventActionTuple.new(:action, :params, event)
|
811
|
+
subject.handle_dlq_status("Could not index event to Elasticsearch.", action, 404, mock_response)
|
811
812
|
end
|
812
813
|
end
|
814
|
+
|
815
|
+
context 'with response status 400' do
|
816
|
+
|
817
|
+
let(:options) { super().merge 'document_id' => '%{foo}' }
|
818
|
+
|
819
|
+
let(:events) { [ LogStash::Event.new("foo" => "bar") ] }
|
820
|
+
|
821
|
+
let(:dlq_writer) { subject.instance_variable_get(:@dlq_writer) }
|
822
|
+
|
823
|
+
let(:bulk_response) do
|
824
|
+
{
|
825
|
+
"took"=>1, "ingest_took"=>11, "errors"=>true, "items"=>
|
826
|
+
[{
|
827
|
+
"index"=>{"_index"=>"bar", "_type"=>"_doc", "_id"=>'bar', "status"=>400,
|
828
|
+
"error"=>{"type" => "illegal_argument_exception", "reason" => "TEST" }
|
829
|
+
}
|
830
|
+
}]
|
831
|
+
}
|
832
|
+
end
|
833
|
+
|
834
|
+
before(:each) do
|
835
|
+
allow(subject.client).to receive(:bulk_send).and_return(bulk_response)
|
836
|
+
end
|
837
|
+
|
838
|
+
it "should write event to DLQ" do
|
839
|
+
expect(dlq_writer).to receive(:write).and_wrap_original do |method, *args|
|
840
|
+
expect( args.size ).to eql 2
|
841
|
+
|
842
|
+
event, reason = *args
|
843
|
+
expect( event ).to be_a LogStash::Event
|
844
|
+
expect( event ).to be events.first
|
845
|
+
expect( reason ).to start_with 'Could not index event to Elasticsearch. status: 400, action: ["index"'
|
846
|
+
expect( reason ).to match /_id=>"bar".*"foo"=>"bar".*response:.*"reason"=>"TEST"/
|
847
|
+
|
848
|
+
method.call(*args) # won't hurt to call LogStash::Util::DummyDeadLetterQueueWriter
|
849
|
+
end.once
|
850
|
+
|
851
|
+
event_action_tuples = subject.map_events(events)
|
852
|
+
subject.send(:submit, event_action_tuples)
|
853
|
+
end
|
854
|
+
|
855
|
+
end if LOGSTASH_VERSION > '7.0'
|
813
856
|
end
|
814
857
|
|
815
858
|
describe "custom headers" do
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-elasticsearch
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 11.0.
|
4
|
+
version: 11.0.1
|
5
5
|
platform: java
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2021-04-
|
11
|
+
date: 2021-04-20 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|