logstash-output-elasticsearch 11.8.0-java → 11.9.1-java
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +9 -0
- data/README.md +1 -1
- data/docs/index.asciidoc +10 -1
- data/lib/logstash/outputs/elasticsearch/http_client.rb +4 -0
- data/lib/logstash/outputs/elasticsearch.rb +64 -3
- data/lib/logstash/plugin_mixins/elasticsearch/common.rb +15 -12
- data/logstash-output-elasticsearch.gemspec +2 -1
- data/spec/integration/outputs/index_spec.rb +101 -27
- data/spec/integration/outputs/unsupported_actions_spec.rb +75 -0
- data/spec/unit/outputs/elasticsearch/http_client_spec.rb +1 -0
- data/spec/unit/outputs/elasticsearch_spec.rb +108 -11
- metadata +18 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 4dbc44fb5e78b53368adb97d43b613c4a3c843e1a1308aa6d008cb105ee4301c
|
4
|
+
data.tar.gz: 9ed115a974e20bda89d63d6b2bf37dc5c7c42f8f40b46badbad072db0a698683
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 9df0d61b80c78cd992b46428b24836db44094d636384940e976a0a09a58245bec59def6031d8426a24d41bdb0644ac49d4e8b558ef32c684554fbf03104a148c
|
7
|
+
data.tar.gz: 8c63cfaa83e3acad5d864dbf2d170fcf0e9111f31e8053fc2ce2a8a4d8af696355fda7b3e7152dc0764bce8e36c3afdd432c5a344f97a2b9b4b6c6c17e6a52d9
|
data/CHANGELOG.md
CHANGED
@@ -1,3 +1,12 @@
|
|
1
|
+
## 11.9.1
|
2
|
+
- Fixes a possible infinite-retry-loop that could occur when this plugin is configured with an `action` whose value contains a [sprintf-style placeholder][] that fails to be resolved for an individual event. Events in this state will be routed to the pipeline's [dead letter queue][DLQ] if it is available, or will be logged-and-dropped so that the remaining events in the batch can be processed [#1080](https://github.com/logstash-plugins/logstash-output-elasticsearch/pull/1080)
|
3
|
+
|
4
|
+
[sprintf-style placeholder]: https://www.elastic.co/guide/en/logstash/current/event-dependent-configuration.html#sprintf
|
5
|
+
[DLQ]: https://www.elastic.co/guide/en/logstash/current/dead-letter-queues.html
|
6
|
+
|
7
|
+
## 11.9.0
|
8
|
+
- Feature: force unresolved dynamic index names to be sent into DLQ. This feature could be explicitly disabled using `dlq_on_failed_indexname_interpolation` setting [#1084](https://github.com/logstash-plugins/logstash-output-elasticsearch/pull/1084)
|
9
|
+
|
1
10
|
## 11.8.0
|
2
11
|
- Feature: Adds a new `dlq_custom_codes` option to customize DLQ codes [#1067](https://github.com/logstash-plugins/logstash-output-elasticsearch/pull/1067)
|
3
12
|
|
data/README.md
CHANGED
@@ -19,7 +19,7 @@ Need help? Try #logstash on freenode IRC or the https://discuss.elastic.co/c/log
|
|
19
19
|
|
20
20
|
## Developing
|
21
21
|
|
22
|
-
### 1. Plugin
|
22
|
+
### 1. Plugin Development and Testing
|
23
23
|
|
24
24
|
#### Code
|
25
25
|
- To get started, you'll need JRuby with the Bundler gem installed.
|
data/docs/index.asciidoc
CHANGED
@@ -320,6 +320,7 @@ This plugin supports the following configuration options plus the
|
|
320
320
|
| <<plugins-{type}s-{plugin}-data_stream_sync_fields>> |<<boolean,boolean>>|No
|
321
321
|
| <<plugins-{type}s-{plugin}-data_stream_type>> |<<string,string>>|No
|
322
322
|
| <<plugins-{type}s-{plugin}-dlq_custom_codes>> |<<number,number>>|No
|
323
|
+
| <<plugins-{type}s-{plugin}-dlq_on_failed_indexname_interpolation>> |<<boolean,boolean>>|No
|
323
324
|
| <<plugins-{type}s-{plugin}-doc_as_upsert>> |<<boolean,boolean>>|No
|
324
325
|
| <<plugins-{type}s-{plugin}-document_id>> |<<string,string>>|No
|
325
326
|
| <<plugins-{type}s-{plugin}-document_type>> |<<string,string>>|No
|
@@ -394,7 +395,7 @@ The Elasticsearch action to perform. Valid actions are:
|
|
394
395
|
document if not already present. See the `doc_as_upsert` option. NOTE: This does not work and is not supported
|
395
396
|
in Elasticsearch 1.x. Please upgrade to ES 2.x or greater to use this feature with Logstash!
|
396
397
|
- A sprintf style string to change the action based on the content of the event. The value `%{[foo]}`
|
397
|
-
would use the foo field for the action
|
398
|
+
would use the foo field for the action. If resolved action is not in [`index`, `delete`, `create`, `update`], the event will not be sent to {es}, and instead either will be sent to the pipeline's <<dead-letter-queues,dead letter queue (DLQ)>> if it is enabled, or will be logged and dropped.
|
398
399
|
|
399
400
|
For more details on actions, check out the {ref}/docs-bulk.html[Elasticsearch bulk API documentation].
|
400
401
|
|
@@ -533,6 +534,14 @@ This list is an addition to the ordinary error codes considered for this feature
|
|
533
534
|
It's considered a configuration error to re-use the same predefined codes for success, DLQ or conflict.
|
534
535
|
The option accepts a list of natural numbers corresponding to HTTP errors codes.
|
535
536
|
|
537
|
+
[id="plugins-{type}s-{plugin}-dlq_on_failed_indexname_interpolation"]
|
538
|
+
===== `dlq_on_failed_indexname_interpolation`
|
539
|
+
|
540
|
+
* Value type is <<boolean,boolean>>
|
541
|
+
* Default value is `true`.
|
542
|
+
|
543
|
+
If enabled, failed index name interpolation events go into dead letter queue.
|
544
|
+
|
536
545
|
[id="plugins-{type}s-{plugin}-doc_as_upsert"]
|
537
546
|
===== `doc_as_upsert`
|
538
547
|
|
@@ -120,6 +120,7 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
120
120
|
else
|
121
121
|
stream_writer = body_stream
|
122
122
|
end
|
123
|
+
|
123
124
|
bulk_responses = []
|
124
125
|
batch_actions = []
|
125
126
|
bulk_actions.each_with_index do |action, index|
|
@@ -142,13 +143,16 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
142
143
|
stream_writer.write(as_json)
|
143
144
|
batch_actions << action
|
144
145
|
end
|
146
|
+
|
145
147
|
stream_writer.close if http_compression
|
148
|
+
|
146
149
|
logger.debug("Sending final bulk request for batch.",
|
147
150
|
:action_count => batch_actions.size,
|
148
151
|
:payload_size => stream_writer.pos,
|
149
152
|
:content_length => body_stream.size,
|
150
153
|
:batch_offset => (actions.size - batch_actions.size))
|
151
154
|
bulk_responses << bulk_send(body_stream, batch_actions) if body_stream.size > 0
|
155
|
+
|
152
156
|
body_stream.close if !http_compression
|
153
157
|
join_bulk_responses(bulk_responses)
|
154
158
|
end
|
@@ -261,6 +261,9 @@ class LogStash::Outputs::ElasticSearch < LogStash::Outputs::Base
|
|
261
261
|
# The option accepts a list of natural numbers corresponding to HTTP errors codes.
|
262
262
|
config :dlq_custom_codes, :validate => :number, :list => true, :default => []
|
263
263
|
|
264
|
+
# if enabled, failed index name interpolation events go into dead letter queue.
|
265
|
+
config :dlq_on_failed_indexname_interpolation, :validate => :boolean, :default => true
|
266
|
+
|
264
267
|
attr_reader :client
|
265
268
|
attr_reader :default_index
|
266
269
|
attr_reader :default_ilm_rollover_alias
|
@@ -362,11 +365,48 @@ class LogStash::Outputs::ElasticSearch < LogStash::Outputs::Base
|
|
362
365
|
# Receive an array of events and immediately attempt to index them (no buffering)
|
363
366
|
def multi_receive(events)
|
364
367
|
wait_for_successful_connection if @after_successful_connection_done
|
365
|
-
|
368
|
+
events_mapped = safe_interpolation_map_events(events)
|
369
|
+
retrying_submit(events_mapped.successful_events)
|
370
|
+
unless events_mapped.event_mapping_errors.empty?
|
371
|
+
handle_event_mapping_errors(events_mapped.event_mapping_errors)
|
372
|
+
end
|
373
|
+
end
|
374
|
+
|
375
|
+
# @param: Arrays of FailedEventMapping
|
376
|
+
private
|
377
|
+
def handle_event_mapping_errors(event_mapping_errors)
|
378
|
+
# if DQL is enabled, log the events to provide issue insights to users.
|
379
|
+
if @dlq_writer
|
380
|
+
@logger.warn("Events could not be indexed and routing to DLQ, count: #{event_mapping_errors.size}")
|
381
|
+
end
|
382
|
+
|
383
|
+
event_mapping_errors.each do |event_mapping_error|
|
384
|
+
detailed_message = "#{event_mapping_error.message}; event: `#{event_mapping_error.event.to_hash_with_metadata}`"
|
385
|
+
handle_dlq_status(event_mapping_error.event, :warn, detailed_message)
|
386
|
+
end
|
387
|
+
@document_level_metrics.increment(:non_retryable_failures, event_mapping_errors.size)
|
366
388
|
end
|
367
389
|
|
390
|
+
MapEventsResult = Struct.new(:successful_events, :event_mapping_errors)
|
391
|
+
FailedEventMapping = Struct.new(:event, :message)
|
392
|
+
|
393
|
+
private
|
394
|
+
def safe_interpolation_map_events(events)
|
395
|
+
successful_events = [] # list of LogStash::Outputs::ElasticSearch::EventActionTuple
|
396
|
+
event_mapping_errors = [] # list of FailedEventMapping
|
397
|
+
events.each do |event|
|
398
|
+
begin
|
399
|
+
successful_events << @event_mapper.call(event)
|
400
|
+
rescue EventMappingError => ie
|
401
|
+
event_mapping_errors << FailedEventMapping.new(event, ie.message)
|
402
|
+
end
|
403
|
+
end
|
404
|
+
MapEventsResult.new(successful_events, event_mapping_errors)
|
405
|
+
end
|
406
|
+
|
407
|
+
public
|
368
408
|
def map_events(events)
|
369
|
-
events.
|
409
|
+
safe_interpolation_map_events(events).successful_events
|
370
410
|
end
|
371
411
|
|
372
412
|
def wait_for_successful_connection
|
@@ -414,6 +454,7 @@ class LogStash::Outputs::ElasticSearch < LogStash::Outputs::Base
|
|
414
454
|
end
|
415
455
|
|
416
456
|
action = event.sprintf(@action || 'index')
|
457
|
+
raise UnsupportedActionError, action unless VALID_HTTP_ACTIONS.include?(action)
|
417
458
|
|
418
459
|
if action == 'update'
|
419
460
|
params[:_upsert] = LogStash::Json.load(event.sprintf(@upsert)) if @upsert != ""
|
@@ -441,12 +482,32 @@ class LogStash::Outputs::ElasticSearch < LogStash::Outputs::Base
|
|
441
482
|
|
442
483
|
end
|
443
484
|
|
485
|
+
class EventMappingError < ArgumentError
|
486
|
+
def initialize(msg = nil)
|
487
|
+
super
|
488
|
+
end
|
489
|
+
end
|
490
|
+
|
491
|
+
class IndexInterpolationError < EventMappingError
|
492
|
+
def initialize(bad_formatted_index)
|
493
|
+
super("Badly formatted index, after interpolation still contains placeholder: [#{bad_formatted_index}]")
|
494
|
+
end
|
495
|
+
end
|
496
|
+
|
497
|
+
class UnsupportedActionError < EventMappingError
|
498
|
+
def initialize(bad_action)
|
499
|
+
super("Elasticsearch doesn't support [#{bad_action}] action")
|
500
|
+
end
|
501
|
+
end
|
502
|
+
|
444
503
|
# @return Hash (initial) parameters for given event
|
445
504
|
# @private shared event params factory between index and data_stream mode
|
446
505
|
def common_event_params(event)
|
506
|
+
sprintf_index = @event_target.call(event)
|
507
|
+
raise IndexInterpolationError, sprintf_index if sprintf_index.match(/%{.*?}/) && dlq_on_failed_indexname_interpolation
|
447
508
|
params = {
|
448
509
|
:_id => @document_id ? event.sprintf(@document_id) : nil,
|
449
|
-
:_index =>
|
510
|
+
:_index => sprintf_index,
|
450
511
|
routing_field_name => @routing ? event.sprintf(@routing) : nil
|
451
512
|
}
|
452
513
|
|
@@ -206,19 +206,23 @@ module LogStash; module PluginMixins; module ElasticSearch
|
|
206
206
|
doubled > @retry_max_interval ? @retry_max_interval : doubled
|
207
207
|
end
|
208
208
|
|
209
|
-
def
|
209
|
+
def handle_dlq_response(message, action, status, response)
|
210
|
+
_, action_params = action.event, [action[0], action[1], action[2]]
|
211
|
+
|
212
|
+
# TODO: Change this to send a map with { :status => status, :action => action } in the future
|
213
|
+
detailed_message = "#{message} status: #{status}, action: #{action_params}, response: #{response}"
|
214
|
+
|
215
|
+
log_level = dig_value(response, 'index', 'error', 'type') == 'invalid_index_name_exception' ? :error : :warn
|
216
|
+
|
217
|
+
handle_dlq_status(action.event, log_level, detailed_message)
|
218
|
+
end
|
219
|
+
|
220
|
+
def handle_dlq_status(event, log_level, message)
|
210
221
|
# To support bwc, we check if DLQ exists. otherwise we log and drop event (previous behavior)
|
211
222
|
if @dlq_writer
|
212
|
-
|
213
|
-
# TODO: Change this to send a map with { :status => status, :action => action } in the future
|
214
|
-
@dlq_writer.write(event, "#{message} status: #{status}, action: #{action}, response: #{response}")
|
223
|
+
@dlq_writer.write(event, "#{message}")
|
215
224
|
else
|
216
|
-
|
217
|
-
level = :error
|
218
|
-
else
|
219
|
-
level = :warn
|
220
|
-
end
|
221
|
-
@logger.send level, message, status: status, action: action, response: response
|
225
|
+
@logger.send log_level, message
|
222
226
|
end
|
223
227
|
end
|
224
228
|
|
@@ -255,7 +259,6 @@ module LogStash; module PluginMixins; module ElasticSearch
|
|
255
259
|
status = action_props["status"]
|
256
260
|
error = action_props["error"]
|
257
261
|
action = actions[idx]
|
258
|
-
action_params = action[1]
|
259
262
|
|
260
263
|
# Retry logic: If it is success, we move on. If it is a failure, we have 3 paths:
|
261
264
|
# - For 409, we log and drop. there is nothing we can do
|
@@ -269,7 +272,7 @@ module LogStash; module PluginMixins; module ElasticSearch
|
|
269
272
|
@logger.warn "Failed action", status: status, action: action, response: response if log_failure_type?(error)
|
270
273
|
next
|
271
274
|
elsif @dlq_codes.include?(status)
|
272
|
-
|
275
|
+
handle_dlq_response("Could not index event to Elasticsearch.", action, status, response)
|
273
276
|
@document_level_metrics.increment(:non_retryable_failures)
|
274
277
|
next
|
275
278
|
else
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-output-elasticsearch'
|
3
|
-
s.version = '11.
|
3
|
+
s.version = '11.9.1'
|
4
4
|
s.licenses = ['apache-2.0']
|
5
5
|
s.summary = "Stores logs in Elasticsearch"
|
6
6
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
@@ -33,6 +33,7 @@ Gem::Specification.new do |s|
|
|
33
33
|
s.add_development_dependency 'cabin', ['~> 0.6']
|
34
34
|
s.add_development_dependency 'webrick'
|
35
35
|
s.add_development_dependency 'webmock'
|
36
|
+
s.add_development_dependency 'rspec-collection_matchers'
|
36
37
|
# Still used in some specs, we should remove this ASAP
|
37
38
|
s.add_development_dependency 'elasticsearch'
|
38
39
|
end
|
@@ -1,5 +1,6 @@
|
|
1
1
|
require_relative "../../../spec/es_spec_helper"
|
2
2
|
require "logstash/outputs/elasticsearch"
|
3
|
+
require 'cgi'
|
3
4
|
|
4
5
|
describe "TARGET_BULK_BYTES", :integration => true do
|
5
6
|
let(:target_bulk_bytes) { LogStash::Outputs::ElasticSearch::TARGET_BULK_BYTES }
|
@@ -45,16 +46,57 @@ describe "TARGET_BULK_BYTES", :integration => true do
|
|
45
46
|
end
|
46
47
|
end
|
47
48
|
|
48
|
-
|
49
|
+
def curl_and_get_json_response(url, method: :get, retrieve_err_payload: false); require 'open3'
|
50
|
+
cmd = "curl -s -v --show-error #{curl_opts} -X #{method.to_s.upcase} -k #{url}"
|
51
|
+
begin
|
52
|
+
out, err, status = Open3.capture3(cmd)
|
53
|
+
rescue Errno::ENOENT
|
54
|
+
fail "curl not available, make sure curl binary is installed and available on $PATH"
|
55
|
+
end
|
56
|
+
|
57
|
+
if status.success?
|
58
|
+
http_status = err.match(/< HTTP\/1.1 (\d+)/)[1] || '0' # < HTTP/1.1 200 OK\r\n
|
59
|
+
|
60
|
+
if http_status.strip[0].to_i > 2
|
61
|
+
error = (LogStash::Json.load(out)['error']) rescue nil
|
62
|
+
if error
|
63
|
+
if retrieve_err_payload
|
64
|
+
return error
|
65
|
+
else
|
66
|
+
fail "#{cmd.inspect} received an error: #{http_status}\n\n#{error.inspect}"
|
67
|
+
end
|
68
|
+
else
|
69
|
+
warn out
|
70
|
+
fail "#{cmd.inspect} unexpected response: #{http_status}\n\n#{err}"
|
71
|
+
end
|
72
|
+
end
|
73
|
+
|
74
|
+
LogStash::Json.load(out)
|
75
|
+
else
|
76
|
+
warn out
|
77
|
+
fail "#{cmd.inspect} process failed: #{status}\n\n#{err}"
|
78
|
+
end
|
79
|
+
end
|
80
|
+
|
81
|
+
describe "indexing with sprintf resolution", :integration => true do
|
49
82
|
let(:message) { "Hello from #{__FILE__}" }
|
50
83
|
let(:event) { LogStash::Event.new("message" => message, "type" => type) }
|
51
|
-
let(:index) {
|
84
|
+
let (:index) { "%{[index_name]}_dynamic" }
|
52
85
|
let(:type) { ESHelper.es_version_satisfies?("< 7") ? "doc" : "_doc" }
|
53
|
-
let(:event_count) { 1
|
54
|
-
let(:
|
86
|
+
let(:event_count) { 1 }
|
87
|
+
let(:user) { "simpleuser" }
|
88
|
+
let(:password) { "abc123" }
|
89
|
+
let(:config) do
|
90
|
+
{
|
91
|
+
"hosts" => [ get_host_port ],
|
92
|
+
"user" => user,
|
93
|
+
"password" => password,
|
94
|
+
"index" => index
|
95
|
+
}
|
96
|
+
end
|
55
97
|
let(:events) { event_count.times.map { event }.to_a }
|
56
98
|
subject { LogStash::Outputs::ElasticSearch.new(config) }
|
57
|
-
|
99
|
+
|
58
100
|
let(:es_url) { "http://#{get_host_port}" }
|
59
101
|
let(:index_url) { "#{es_url}/#{index}" }
|
60
102
|
|
@@ -63,33 +105,65 @@ describe "indexing" do
|
|
63
105
|
let(:es_admin) { 'admin' } # default user added in ES -> 8.x requires auth credentials for /_refresh etc
|
64
106
|
let(:es_admin_pass) { 'elastic' }
|
65
107
|
|
66
|
-
|
67
|
-
cmd = "curl -s -v --show-error #{curl_opts} -X #{method.to_s.upcase} -k #{url}"
|
68
|
-
begin
|
69
|
-
out, err, status = Open3.capture3(cmd)
|
70
|
-
rescue Errno::ENOENT
|
71
|
-
fail "curl not available, make sure curl binary is installed and available on $PATH"
|
72
|
-
end
|
108
|
+
let(:initial_events) { [] }
|
73
109
|
|
74
|
-
|
75
|
-
http_status = err.match(/< HTTP\/1.1 (\d+)/)[1] || '0' # < HTTP/1.1 200 OK\r\n
|
110
|
+
let(:do_register) { true }
|
76
111
|
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
else
|
82
|
-
warn out
|
83
|
-
fail "#{cmd.inspect} unexpected response: #{http_status}\n\n#{err}"
|
84
|
-
end
|
85
|
-
end
|
112
|
+
before do
|
113
|
+
subject.register if do_register
|
114
|
+
subject.multi_receive(initial_events) if initial_events
|
115
|
+
end
|
86
116
|
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
117
|
+
after do
|
118
|
+
subject.do_close
|
119
|
+
end
|
120
|
+
|
121
|
+
let(:event) { LogStash::Event.new("message" => message, "type" => type, "index_name" => "test") }
|
122
|
+
|
123
|
+
it "should index successfully when field is resolved" do
|
124
|
+
expected_index_name = "test_dynamic"
|
125
|
+
subject.multi_receive(events)
|
126
|
+
|
127
|
+
# curl_and_get_json_response "#{es_url}/_refresh", method: :post
|
128
|
+
|
129
|
+
result = curl_and_get_json_response "#{es_url}/#{expected_index_name}"
|
130
|
+
|
131
|
+
expect(result[expected_index_name]).not_to be(nil)
|
132
|
+
end
|
133
|
+
|
134
|
+
context "when dynamic field doesn't resolve the index_name" do
|
135
|
+
let(:event) { LogStash::Event.new("message" => message, "type" => type) }
|
136
|
+
let(:dlq_writer) { double('DLQ writer') }
|
137
|
+
before { subject.instance_variable_set('@dlq_writer', dlq_writer) }
|
138
|
+
|
139
|
+
it "should doesn't create an index name with unresolved placeholders" do
|
140
|
+
expect(dlq_writer).to receive(:write).once.with(event, a_string_including("Badly formatted index, after interpolation still contains placeholder"))
|
141
|
+
subject.multi_receive(events)
|
142
|
+
|
143
|
+
escaped_index_name = CGI.escape("%{[index_name]}_dynamic")
|
144
|
+
result = curl_and_get_json_response "#{es_url}/#{escaped_index_name}", retrieve_err_payload: true
|
145
|
+
expect(result["root_cause"].first()["type"]).to eq("index_not_found_exception")
|
91
146
|
end
|
92
147
|
end
|
148
|
+
end
|
149
|
+
|
150
|
+
describe "indexing" do
|
151
|
+
let(:message) { "Hello from #{__FILE__}" }
|
152
|
+
let(:event) { LogStash::Event.new("message" => message, "type" => type) }
|
153
|
+
let(:index) { 10.times.collect { rand(10).to_s }.join("") }
|
154
|
+
let(:type) { ESHelper.es_version_satisfies?("< 7") ? "doc" : "_doc" }
|
155
|
+
let(:event_count) { 1 + rand(2) }
|
156
|
+
let(:config) { "not implemented" }
|
157
|
+
let(:events) { event_count.times.map { event }.to_a }
|
158
|
+
subject { LogStash::Outputs::ElasticSearch.new(config) }
|
159
|
+
|
160
|
+
let(:es_url) { "http://#{get_host_port}" }
|
161
|
+
let(:index_url) { "#{es_url}/#{index}" }
|
162
|
+
|
163
|
+
let(:curl_opts) { nil }
|
164
|
+
|
165
|
+
let(:es_admin) { 'admin' } # default user added in ES -> 8.x requires auth credentials for /_refresh etc
|
166
|
+
let(:es_admin_pass) { 'elastic' }
|
93
167
|
|
94
168
|
let(:initial_events) { [] }
|
95
169
|
|
@@ -0,0 +1,75 @@
|
|
1
|
+
require_relative "../../../spec/es_spec_helper"
|
2
|
+
|
3
|
+
describe "Unsupported actions testing...", :integration => true do
|
4
|
+
require "logstash/outputs/elasticsearch"
|
5
|
+
|
6
|
+
INDEX = "logstash-unsupported-actions-rejected"
|
7
|
+
|
8
|
+
def get_es_output( options={} )
|
9
|
+
settings = {
|
10
|
+
"manage_template" => true,
|
11
|
+
"index" => INDEX,
|
12
|
+
"template_overwrite" => true,
|
13
|
+
"hosts" => get_host_port(),
|
14
|
+
"action" => "%{action_field}",
|
15
|
+
"document_id" => "%{doc_id}",
|
16
|
+
"ecs_compatibility" => "disabled"
|
17
|
+
}
|
18
|
+
LogStash::Outputs::ElasticSearch.new(settings.merge!(options))
|
19
|
+
end
|
20
|
+
|
21
|
+
before :each do
|
22
|
+
@es = get_client
|
23
|
+
# Delete all templates first.
|
24
|
+
# Clean ES of data before we start.
|
25
|
+
@es.indices.delete_template(:name => "*")
|
26
|
+
# This can fail if there are no indexes, ignore failure.
|
27
|
+
@es.indices.delete(:index => "*") rescue nil
|
28
|
+
# index single doc for update purpose
|
29
|
+
@es.index(
|
30
|
+
:index => INDEX,
|
31
|
+
:type => doc_type,
|
32
|
+
:id => "2",
|
33
|
+
:body => { :message => 'Test to doc indexing', :counter => 1 }
|
34
|
+
)
|
35
|
+
@es.index(
|
36
|
+
:index => INDEX,
|
37
|
+
:type => doc_type,
|
38
|
+
:id => "3",
|
39
|
+
:body => { :message => 'Test to doc deletion', :counter => 2 }
|
40
|
+
)
|
41
|
+
@es.indices.refresh
|
42
|
+
end
|
43
|
+
|
44
|
+
context "multiple actions include unsupported action" do
|
45
|
+
let(:events) {[
|
46
|
+
LogStash::Event.new("action_field" => "index", "doc_id" => 1, "message"=> "hello"),
|
47
|
+
LogStash::Event.new("action_field" => "update", "doc_id" => 2, "message"=> "hi"),
|
48
|
+
LogStash::Event.new("action_field" => "delete", "doc_id" => 3),
|
49
|
+
LogStash::Event.new("action_field" => "unsupported_action", "doc_id" => 4, "message"=> "world!")
|
50
|
+
]}
|
51
|
+
|
52
|
+
it "should reject unsupported doc" do
|
53
|
+
subject = get_es_output
|
54
|
+
subject.register
|
55
|
+
subject.multi_receive(events)
|
56
|
+
|
57
|
+
index_or_update = proc do |event|
|
58
|
+
action = event.get("action_field")
|
59
|
+
action.eql?("index") || action.eql?("update")
|
60
|
+
end
|
61
|
+
|
62
|
+
indexed_events = events.select { |event| index_or_update.call(event) }
|
63
|
+
rejected_events = events.select { |event| !index_or_update.call(event) }
|
64
|
+
|
65
|
+
indexed_events.each do |event|
|
66
|
+
response = @es.get(:index => INDEX, :type => doc_type, :id => event.get("doc_id"), :refresh => true)
|
67
|
+
expect(response['_source']['message']).to eq(event.get("message"))
|
68
|
+
end
|
69
|
+
|
70
|
+
rejected_events.each do |event|
|
71
|
+
expect {@es.get(:index => INDEX, :type => doc_type, :id => event.get("doc_id"), :refresh => true)}.to raise_error(Elasticsearch::Transport::Transport::Errors::NotFound)
|
72
|
+
end
|
73
|
+
end
|
74
|
+
end
|
75
|
+
end
|
@@ -3,8 +3,8 @@ require "base64"
|
|
3
3
|
require "flores/random"
|
4
4
|
require 'concurrent/atomic/count_down_latch'
|
5
5
|
require "logstash/outputs/elasticsearch"
|
6
|
-
|
7
6
|
require 'logstash/plugin_mixins/ecs_compatibility_support/spec_helper'
|
7
|
+
require 'rspec/collection_matchers'
|
8
8
|
|
9
9
|
describe LogStash::Outputs::ElasticSearch do
|
10
10
|
subject(:elasticsearch_output_instance) { described_class.new(options) }
|
@@ -347,7 +347,7 @@ describe LogStash::Outputs::ElasticSearch do
|
|
347
347
|
}
|
348
348
|
},
|
349
349
|
# NOTE: this is an artificial success (usually everything fails with a 500) but even if some doc where
|
350
|
-
# to succeed due the unexpected
|
350
|
+
# to succeed due the unexpected response items we can not clearly identify which actions to retry ...
|
351
351
|
{"index"=>{"_index"=>"bar2", "_type"=>"_doc", "_id"=>nil, "status"=>201}},
|
352
352
|
{"index"=>{"_index"=>"bar2", "_type"=>"_doc", "_id"=>nil, "status"=>500,
|
353
353
|
"error"=>{"type" => "illegal_state_exception",
|
@@ -381,6 +381,104 @@ describe LogStash::Outputs::ElasticSearch do
|
|
381
381
|
subject.multi_receive(events)
|
382
382
|
end
|
383
383
|
end
|
384
|
+
|
385
|
+
context "unsupported actions" do
|
386
|
+
let(:options) { super().merge("index" => "logstash", "action" => "%{action_field}") }
|
387
|
+
|
388
|
+
context "with multiple valid actions with one trailing invalid action" do
|
389
|
+
let(:events) {[
|
390
|
+
LogStash::Event.new("action_field" => "index", "id" => 1, "message"=> "hello"),
|
391
|
+
LogStash::Event.new("action_field" => "index", "id" => 2, "message"=> "hi"),
|
392
|
+
LogStash::Event.new("action_field" => "index", "id" => 3, "message"=> "bye"),
|
393
|
+
LogStash::Event.new("action_field" => "unsupported_action", "id" => 4, "message"=> "world!")
|
394
|
+
]}
|
395
|
+
it "rejects unsupported actions" do
|
396
|
+
event_result = subject.send(:safe_interpolation_map_events, events)
|
397
|
+
expect(event_result.successful_events).to have_exactly(3).items
|
398
|
+
event_result.successful_events.each do |action, _|
|
399
|
+
expect(action).to_not eql("unsupported_action")
|
400
|
+
end
|
401
|
+
expect(event_result.event_mapping_errors).to have_exactly(1).items
|
402
|
+
event_result.event_mapping_errors.each do |event_mapping_error|
|
403
|
+
expect(event_mapping_error.message).to eql("Elasticsearch doesn't support [unsupported_action] action")
|
404
|
+
end
|
405
|
+
end
|
406
|
+
end
|
407
|
+
|
408
|
+
context "with one leading invalid action followed by multiple valid actions" do
|
409
|
+
let(:events) {[
|
410
|
+
LogStash::Event.new("action_field" => "unsupported_action", "id" => 1, "message"=> "world!"),
|
411
|
+
LogStash::Event.new("action_field" => "index", "id" => 2, "message"=> "hello"),
|
412
|
+
LogStash::Event.new("action_field" => "index", "id" => 3, "message"=> "hi"),
|
413
|
+
LogStash::Event.new("action_field" => "index", "id" => 4, "message"=> "bye")
|
414
|
+
]}
|
415
|
+
it "rejects unsupported actions" do
|
416
|
+
event_result = subject.send(:safe_interpolation_map_events, events)
|
417
|
+
expect(event_result.successful_events).to have_exactly(3).items
|
418
|
+
event_result.successful_events.each do |action, _|
|
419
|
+
expect(action).to_not eql("unsupported_action")
|
420
|
+
end
|
421
|
+
expect(event_result.event_mapping_errors).to have_exactly(1).items
|
422
|
+
event_result.event_mapping_errors.each do |event_mapping_error|
|
423
|
+
expect(event_mapping_error.message).to eql("Elasticsearch doesn't support [unsupported_action] action")
|
424
|
+
end
|
425
|
+
end
|
426
|
+
end
|
427
|
+
|
428
|
+
context "with batch of multiple invalid actions and no valid actions" do
|
429
|
+
let(:events) {[
|
430
|
+
LogStash::Event.new("action_field" => "unsupported_action1", "id" => 1, "message"=> "world!"),
|
431
|
+
LogStash::Event.new("action_field" => "unsupported_action2", "id" => 2, "message"=> "hello"),
|
432
|
+
LogStash::Event.new("action_field" => "unsupported_action3", "id" => 3, "message"=> "hi"),
|
433
|
+
LogStash::Event.new("action_field" => "unsupported_action4", "id" => 4, "message"=> "bye")
|
434
|
+
]}
|
435
|
+
it "rejects unsupported actions" do
|
436
|
+
event_result = subject.send(:safe_interpolation_map_events, events)
|
437
|
+
expect(event_result.successful_events).to have(:no).items
|
438
|
+
event_result.successful_events.each do |action, _|
|
439
|
+
expect(action).to_not eql("unsupported_action")
|
440
|
+
end
|
441
|
+
expect(event_result.event_mapping_errors).to have_exactly(4).items
|
442
|
+
event_result.event_mapping_errors.each do |event_mapping_error|
|
443
|
+
expect(event_mapping_error.message).to include "Elasticsearch doesn't support"
|
444
|
+
end
|
445
|
+
end
|
446
|
+
end
|
447
|
+
|
448
|
+
context "with batch of intermixed valid and invalid actions" do
|
449
|
+
let(:events) {[
|
450
|
+
LogStash::Event.new("action_field" => "index", "id" => 1, "message"=> "world!"),
|
451
|
+
LogStash::Event.new("action_field" => "unsupported_action2", "id" => 2, "message"=> "hello"),
|
452
|
+
LogStash::Event.new("action_field" => "unsupported_action3", "id" => 3, "message"=> "hi"),
|
453
|
+
LogStash::Event.new("action_field" => "index", "id" => 4, "message"=> "bye")
|
454
|
+
]}
|
455
|
+
it "rejects unsupported actions" do
|
456
|
+
event_result = subject.send(:safe_interpolation_map_events, events)
|
457
|
+
expect(event_result.successful_events).to have_exactly(2).items
|
458
|
+
expect(event_result.event_mapping_errors).to have_exactly(2).items
|
459
|
+
event_result.event_mapping_errors.each do |event_mapping_error|
|
460
|
+
expect(event_mapping_error.message).to include "Elasticsearch doesn't support"
|
461
|
+
end
|
462
|
+
end
|
463
|
+
end
|
464
|
+
|
465
|
+
context "with batch of exactly one action that is invalid" do
|
466
|
+
let(:events) {[
|
467
|
+
LogStash::Event.new("action_field" => "index", "id" => 1, "message"=> "world!"),
|
468
|
+
LogStash::Event.new("action_field" => "index", "id" => 2, "message"=> "hello"),
|
469
|
+
LogStash::Event.new("action_field" => "unsupported_action3", "id" => 3, "message"=> "hi"),
|
470
|
+
LogStash::Event.new("action_field" => "index", "id" => 4, "message"=> "bye")
|
471
|
+
]}
|
472
|
+
it "rejects unsupported action" do
|
473
|
+
event_result = subject.send(:safe_interpolation_map_events, events)
|
474
|
+
expect(event_result.successful_events).to have_exactly(3).items
|
475
|
+
expect(event_result.event_mapping_errors).to have_exactly(1).items
|
476
|
+
event_result.event_mapping_errors.each do |event_mapping_error|
|
477
|
+
expect(event_mapping_error.message).to eql("Elasticsearch doesn't support [unsupported_action3] action")
|
478
|
+
end
|
479
|
+
end
|
480
|
+
end
|
481
|
+
end
|
384
482
|
end
|
385
483
|
|
386
484
|
context '413 errors' do
|
@@ -768,17 +866,18 @@ describe LogStash::Outputs::ElasticSearch do
|
|
768
866
|
|
769
867
|
context 'handling elasticsearch document-level status meant for the DLQ' do
|
770
868
|
let(:options) { { "manage_template" => false } }
|
869
|
+
let(:action) { LogStash::Outputs::ElasticSearch::EventActionTuple.new(:action, :params, LogStash::Event.new("foo" => "bar")) }
|
771
870
|
|
772
871
|
context 'when @dlq_writer is nil' do
|
773
872
|
before { subject.instance_variable_set '@dlq_writer', nil }
|
873
|
+
let(:action) { LogStash::Outputs::ElasticSearch::EventActionTuple.new(:action, :params, LogStash::Event.new("foo" => "bar")) }
|
774
874
|
|
775
875
|
context 'resorting to previous behaviour of logging the error' do
|
776
876
|
context 'getting an invalid_index_name_exception' do
|
777
877
|
it 'should log at ERROR level' do
|
778
878
|
subject.instance_variable_set(:@logger, double("logger").as_null_object)
|
779
879
|
mock_response = { 'index' => { 'error' => { 'type' => 'invalid_index_name_exception' } } }
|
780
|
-
subject.
|
781
|
-
[:action, :params, :event], :some_status, mock_response)
|
880
|
+
subject.handle_dlq_response("Could not index event to Elasticsearch.", action, :some_status, mock_response)
|
782
881
|
end
|
783
882
|
end
|
784
883
|
|
@@ -786,10 +885,9 @@ describe LogStash::Outputs::ElasticSearch do
|
|
786
885
|
it 'should log at WARN level' do
|
787
886
|
logger = double("logger").as_null_object
|
788
887
|
subject.instance_variable_set(:@logger, logger)
|
789
|
-
expect(logger).to receive(:warn).with(
|
888
|
+
expect(logger).to receive(:warn).with(a_string_including "Could not index event to Elasticsearch. status: some_status, action: [:action, :params, {")
|
790
889
|
mock_response = { 'index' => { 'error' => { 'type' => 'illegal_argument_exception' } } }
|
791
|
-
subject.
|
792
|
-
[:action, :params, :event], :some_status, mock_response)
|
890
|
+
subject.handle_dlq_response("Could not index event to Elasticsearch.", action, :some_status, mock_response)
|
793
891
|
end
|
794
892
|
end
|
795
893
|
|
@@ -797,11 +895,10 @@ describe LogStash::Outputs::ElasticSearch do
|
|
797
895
|
it 'should not fail, but just log a warning' do
|
798
896
|
logger = double("logger").as_null_object
|
799
897
|
subject.instance_variable_set(:@logger, logger)
|
800
|
-
expect(logger).to receive(:warn).with(
|
898
|
+
expect(logger).to receive(:warn).with(a_string_including "Could not index event to Elasticsearch. status: some_status, action: [:action, :params, {")
|
801
899
|
mock_response = { 'index' => {} }
|
802
900
|
expect do
|
803
|
-
subject.
|
804
|
-
[:action, :params, :event], :some_status, mock_response)
|
901
|
+
subject.handle_dlq_response("Could not index event to Elasticsearch.", action, :some_status, mock_response)
|
805
902
|
end.to_not raise_error
|
806
903
|
end
|
807
904
|
end
|
@@ -821,7 +918,7 @@ describe LogStash::Outputs::ElasticSearch do
|
|
821
918
|
expect(dlq_writer).to receive(:write).once.with(event, /Could not index/)
|
822
919
|
mock_response = { 'index' => { 'error' => { 'type' => 'illegal_argument_exception' } } }
|
823
920
|
action = LogStash::Outputs::ElasticSearch::EventActionTuple.new(:action, :params, event)
|
824
|
-
subject.
|
921
|
+
subject.handle_dlq_response("Could not index event to Elasticsearch.", action, 404, mock_response)
|
825
922
|
end
|
826
923
|
end
|
827
924
|
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-elasticsearch
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 11.
|
4
|
+
version: 11.9.1
|
5
5
|
platform: java
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2022-09-
|
11
|
+
date: 2022-09-21 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
@@ -196,6 +196,20 @@ dependencies:
|
|
196
196
|
- - ">="
|
197
197
|
- !ruby/object:Gem::Version
|
198
198
|
version: '0'
|
199
|
+
- !ruby/object:Gem::Dependency
|
200
|
+
requirement: !ruby/object:Gem::Requirement
|
201
|
+
requirements:
|
202
|
+
- - ">="
|
203
|
+
- !ruby/object:Gem::Version
|
204
|
+
version: '0'
|
205
|
+
name: rspec-collection_matchers
|
206
|
+
prerelease: false
|
207
|
+
type: :development
|
208
|
+
version_requirements: !ruby/object:Gem::Requirement
|
209
|
+
requirements:
|
210
|
+
- - ">="
|
211
|
+
- !ruby/object:Gem::Version
|
212
|
+
version: '0'
|
199
213
|
- !ruby/object:Gem::Dependency
|
200
214
|
requirement: !ruby/object:Gem::Requirement
|
201
215
|
requirements:
|
@@ -288,6 +302,7 @@ files:
|
|
288
302
|
- spec/integration/outputs/routing_spec.rb
|
289
303
|
- spec/integration/outputs/sniffer_spec.rb
|
290
304
|
- spec/integration/outputs/templates_spec.rb
|
305
|
+
- spec/integration/outputs/unsupported_actions_spec.rb
|
291
306
|
- spec/integration/outputs/update_spec.rb
|
292
307
|
- spec/spec_helper.rb
|
293
308
|
- spec/support/elasticsearch/api/actions/delete_ilm_policy.rb
|
@@ -373,6 +388,7 @@ test_files:
|
|
373
388
|
- spec/integration/outputs/routing_spec.rb
|
374
389
|
- spec/integration/outputs/sniffer_spec.rb
|
375
390
|
- spec/integration/outputs/templates_spec.rb
|
391
|
+
- spec/integration/outputs/unsupported_actions_spec.rb
|
376
392
|
- spec/integration/outputs/update_spec.rb
|
377
393
|
- spec/spec_helper.rb
|
378
394
|
- spec/support/elasticsearch/api/actions/delete_ilm_policy.rb
|