logstash-output-elasticsearch 10.8.6-java → 11.0.3-java
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +17 -0
- data/docs/index.asciidoc +132 -22
- data/lib/logstash/outputs/elasticsearch.rb +125 -64
- data/lib/logstash/outputs/elasticsearch/data_stream_support.rb +233 -0
- data/lib/logstash/outputs/elasticsearch/http_client.rb +9 -7
- data/lib/logstash/outputs/elasticsearch/http_client/pool.rb +49 -62
- data/lib/logstash/outputs/elasticsearch/ilm.rb +13 -45
- data/lib/logstash/outputs/elasticsearch/license_checker.rb +26 -23
- data/lib/logstash/outputs/elasticsearch/template_manager.rb +4 -6
- data/lib/logstash/outputs/elasticsearch/templates/ecs-v1/elasticsearch-8x.json +1 -0
- data/lib/logstash/plugin_mixins/elasticsearch/api_configs.rb +157 -153
- data/lib/logstash/plugin_mixins/elasticsearch/common.rb +71 -58
- data/logstash-output-elasticsearch.gemspec +3 -3
- data/spec/es_spec_helper.rb +7 -12
- data/spec/fixtures/_nodes/{5x_6x.json → 6x.json} +5 -5
- data/spec/integration/outputs/compressed_indexing_spec.rb +47 -46
- data/spec/integration/outputs/data_stream_spec.rb +61 -0
- data/spec/integration/outputs/delete_spec.rb +49 -51
- data/spec/integration/outputs/ilm_spec.rb +236 -248
- data/spec/integration/outputs/index_spec.rb +5 -2
- data/spec/integration/outputs/index_version_spec.rb +78 -82
- data/spec/integration/outputs/ingest_pipeline_spec.rb +58 -58
- data/spec/integration/outputs/painless_update_spec.rb +74 -164
- data/spec/integration/outputs/parent_spec.rb +67 -75
- data/spec/integration/outputs/retry_spec.rb +6 -6
- data/spec/integration/outputs/sniffer_spec.rb +15 -54
- data/spec/integration/outputs/templates_spec.rb +79 -81
- data/spec/integration/outputs/update_spec.rb +99 -101
- data/spec/spec_helper.rb +10 -0
- data/spec/unit/outputs/elasticsearch/data_stream_support_spec.rb +528 -0
- data/spec/unit/outputs/elasticsearch/http_client/manticore_adapter_spec.rb +1 -0
- data/spec/unit/outputs/elasticsearch/http_client/pool_spec.rb +36 -29
- data/spec/unit/outputs/elasticsearch/http_client_spec.rb +2 -3
- data/spec/unit/outputs/elasticsearch/template_manager_spec.rb +10 -12
- data/spec/unit/outputs/elasticsearch_proxy_spec.rb +1 -2
- data/spec/unit/outputs/elasticsearch_spec.rb +176 -41
- data/spec/unit/outputs/elasticsearch_ssl_spec.rb +1 -2
- data/spec/unit/outputs/error_whitelist_spec.rb +3 -2
- data/spec/unit/outputs/license_check_spec.rb +0 -16
- metadata +29 -36
- data/lib/logstash/outputs/elasticsearch/templates/ecs-disabled/elasticsearch-2x.json +0 -95
- data/lib/logstash/outputs/elasticsearch/templates/ecs-disabled/elasticsearch-5x.json +0 -46
- data/spec/fixtures/_nodes/2x_1x.json +0 -27
- data/spec/fixtures/scripts/groovy/scripted_update.groovy +0 -2
- data/spec/fixtures/scripts/groovy/scripted_update_nested.groovy +0 -2
- data/spec/fixtures/scripts/groovy/scripted_upsert.groovy +0 -2
- data/spec/integration/outputs/groovy_update_spec.rb +0 -150
- data/spec/integration/outputs/templates_5x_spec.rb +0 -98
@@ -0,0 +1,233 @@
|
|
1
|
+
module LogStash module Outputs class ElasticSearch
|
2
|
+
# DS specific behavior/configuration.
|
3
|
+
module DataStreamSupport
|
4
|
+
|
5
|
+
def self.included(base)
|
6
|
+
# Defines whether data will be indexed into an Elasticsearch data stream,
|
7
|
+
# `data_stream_*` settings will only be used if this setting is enabled!
|
8
|
+
# This setting supports values `true`, `false`, and `auto`.
|
9
|
+
# Defaults to `false` in Logstash 7.x and `auto` starting in Logstash 8.0.
|
10
|
+
base.config :data_stream, :validate => ['true', 'false', 'auto']
|
11
|
+
|
12
|
+
base.config :data_stream_type, :validate => ['logs', 'metrics', 'synthetics'], :default => 'logs'
|
13
|
+
base.config :data_stream_dataset, :validate => :dataset_identifier, :default => 'generic'
|
14
|
+
base.config :data_stream_namespace, :validate => :namespace_identifier, :default => 'default'
|
15
|
+
|
16
|
+
base.config :data_stream_sync_fields, :validate => :boolean, :default => true
|
17
|
+
base.config :data_stream_auto_routing, :validate => :boolean, :default => true
|
18
|
+
|
19
|
+
base.extend(Validator)
|
20
|
+
end
|
21
|
+
|
22
|
+
# @note assumes to be running AFTER {after_successful_connection} completed, due ES version checks
|
23
|
+
def data_stream_config?
|
24
|
+
@data_stream_config.nil? ? @data_stream_config = check_data_stream_config! : @data_stream_config
|
25
|
+
end
|
26
|
+
|
27
|
+
private
|
28
|
+
|
29
|
+
def data_stream_name(event)
|
30
|
+
data_stream = event.get('data_stream')
|
31
|
+
return @index if !data_stream_auto_routing || !data_stream.is_a?(Hash)
|
32
|
+
|
33
|
+
type = data_stream['type'] || data_stream_type
|
34
|
+
dataset = data_stream['dataset'] || data_stream_dataset
|
35
|
+
namespace = data_stream['namespace'] || data_stream_namespace
|
36
|
+
"#{type}-#{dataset}-#{namespace}"
|
37
|
+
end
|
38
|
+
|
39
|
+
# @param params the user configuration for the ES output
|
40
|
+
# @note LS initialized configuration (with filled defaults) won't detect as data-stream
|
41
|
+
# compatible, only explicit (`original_params`) config should be tested.
|
42
|
+
# @return [TrueClass|FalseClass] whether given configuration is data-stream compatible
|
43
|
+
def check_data_stream_config!(params = original_params)
|
44
|
+
data_stream_params = params.select { |name, _| name.start_with?('data_stream_') } # exclude data_stream =>
|
45
|
+
invalid_data_stream_params = invalid_data_stream_params(params)
|
46
|
+
|
47
|
+
case data_stream_explicit_value
|
48
|
+
when false
|
49
|
+
if data_stream_params.any?
|
50
|
+
@logger.error "Ambiguous configuration; data stream settings must not be present when data streams is disabled (caused by: `data_stream => false`)", data_stream_params
|
51
|
+
raise LogStash::ConfigurationError, "Ambiguous configuration, please remove data stream specific settings: #{data_stream_params.keys}"
|
52
|
+
end
|
53
|
+
return false
|
54
|
+
when true
|
55
|
+
if invalid_data_stream_params.any?
|
56
|
+
@logger.error "Invalid data stream configuration, following parameters are not supported:", invalid_data_stream_params
|
57
|
+
raise LogStash::ConfigurationError, "Invalid data stream configuration: #{invalid_data_stream_params.keys}"
|
58
|
+
end
|
59
|
+
return true
|
60
|
+
else
|
61
|
+
use_data_stream = data_stream_default(data_stream_params, invalid_data_stream_params.empty?)
|
62
|
+
if !use_data_stream && data_stream_params.any?
|
63
|
+
# DS (auto) disabled but there's still some data-stream parameters (and no `data_stream => false`)
|
64
|
+
@logger.error "Ambiguous configuration; data stream settings are present, but data streams are not enabled", data_stream_params
|
65
|
+
raise LogStash::ConfigurationError, "Ambiguous configuration, please set data_stream => true " +
|
66
|
+
"or remove data stream specific settings: #{data_stream_params.keys}"
|
67
|
+
end
|
68
|
+
use_data_stream
|
69
|
+
end
|
70
|
+
end
|
71
|
+
|
72
|
+
def data_stream_explicit_value
|
73
|
+
case @data_stream
|
74
|
+
when 'true'
|
75
|
+
return true
|
76
|
+
when 'false'
|
77
|
+
return false
|
78
|
+
else
|
79
|
+
return nil # 'auto' or not set by user
|
80
|
+
end
|
81
|
+
end
|
82
|
+
|
83
|
+
def invalid_data_stream_params(params)
|
84
|
+
shared_params = LogStash::PluginMixins::ElasticSearch::APIConfigs::CONFIG_PARAMS.keys.map(&:to_s)
|
85
|
+
params.reject do |name, value|
|
86
|
+
# NOTE: intentionally do not support explicit DS configuration like:
|
87
|
+
# - `index => ...` identifier provided by data_stream_xxx settings
|
88
|
+
# - `manage_template => false` implied by not setting the parameter
|
89
|
+
case name
|
90
|
+
when 'action'
|
91
|
+
value == 'create'
|
92
|
+
when 'routing', 'pipeline'
|
93
|
+
true
|
94
|
+
when 'data_stream'
|
95
|
+
value.to_s == 'true'
|
96
|
+
else
|
97
|
+
name.start_with?('data_stream_') ||
|
98
|
+
shared_params.include?(name) ||
|
99
|
+
inherited_internal_config_param?(name) # 'id', 'enabled_metric' etc
|
100
|
+
end
|
101
|
+
end
|
102
|
+
end
|
103
|
+
|
104
|
+
def inherited_internal_config_param?(name)
|
105
|
+
self.class.superclass.get_config.key?(name.to_s) # superclass -> LogStash::Outputs::Base
|
106
|
+
end
|
107
|
+
|
108
|
+
DATA_STREAMS_ORIGIN_ES_VERSION = '7.9.0'
|
109
|
+
|
110
|
+
# @return [Gem::Version] if ES supports DS nil (or raise) otherwise
|
111
|
+
def assert_es_version_supports_data_streams
|
112
|
+
fail 'no last_es_version' unless last_es_version # assert - should not happen
|
113
|
+
es_version = Gem::Version.create(last_es_version)
|
114
|
+
if es_version < Gem::Version.create(DATA_STREAMS_ORIGIN_ES_VERSION)
|
115
|
+
@logger.error "Elasticsearch version does not support data streams, Logstash might end up writing to an index", es_version: es_version.version
|
116
|
+
# NOTE: when switching to synchronous check from register, this should be a ConfigurationError
|
117
|
+
raise LogStash::Error, "A data_stream configuration is only supported since Elasticsearch #{DATA_STREAMS_ORIGIN_ES_VERSION} " +
|
118
|
+
"(detected version #{es_version.version}), please upgrade your cluster"
|
119
|
+
end
|
120
|
+
es_version # return truthy
|
121
|
+
end
|
122
|
+
|
123
|
+
DATA_STREAMS_ENABLED_BY_DEFAULT_LS_VERSION = '8.0.0'
|
124
|
+
|
125
|
+
# when data_stream => is either 'auto' or not set
|
126
|
+
def data_stream_default(data_stream_params, valid_data_stream_config)
|
127
|
+
ds_default = Gem::Version.create(LOGSTASH_VERSION) >= Gem::Version.create(DATA_STREAMS_ENABLED_BY_DEFAULT_LS_VERSION)
|
128
|
+
|
129
|
+
if ds_default # LS 8.0
|
130
|
+
return false unless valid_data_stream_config
|
131
|
+
|
132
|
+
@logger.debug 'Configuration is data stream compliant'
|
133
|
+
return true
|
134
|
+
end
|
135
|
+
|
136
|
+
# LS 7.x
|
137
|
+
if valid_data_stream_config && !data_stream_params.any?
|
138
|
+
@logger.warn "Configuration is data stream compliant but due backwards compatibility Logstash 7.x will not assume " +
|
139
|
+
"writing to a data-stream, default behavior will change on Logstash 8.0 " +
|
140
|
+
"(set `data_stream => true/false` to disable this warning)"
|
141
|
+
end
|
142
|
+
false
|
143
|
+
end
|
144
|
+
|
145
|
+
# an {event_action_tuple} replacement when a data-stream configuration is detected
|
146
|
+
def data_stream_event_action_tuple(event)
|
147
|
+
event_data = event.to_hash
|
148
|
+
data_stream_event_sync(event_data) if data_stream_sync_fields
|
149
|
+
EventActionTuple.new('create', common_event_params(event), event, event_data)
|
150
|
+
end
|
151
|
+
|
152
|
+
DATA_STREAM_SYNC_FIELDS = [ 'type', 'dataset', 'namespace' ].freeze
|
153
|
+
|
154
|
+
def data_stream_event_sync(event_data)
|
155
|
+
data_stream = event_data['data_stream']
|
156
|
+
if data_stream.is_a?(Hash)
|
157
|
+
unless data_stream_auto_routing
|
158
|
+
sync_fields = DATA_STREAM_SYNC_FIELDS.select { |name| data_stream.key?(name) && data_stream[name] != send(:"data_stream_#{name}") }
|
159
|
+
if sync_fields.any? # these fields will need to be overwritten
|
160
|
+
info = sync_fields.inject({}) { |info, name| info[name] = data_stream[name]; info }
|
161
|
+
info[:event] = event_data
|
162
|
+
@logger.warn "Some data_stream fields are out of sync, these will be updated to reflect data-stream name", info
|
163
|
+
|
164
|
+
# NOTE: we work directly with event.to_hash data thus fine to mutate the 'data_stream' hash
|
165
|
+
sync_fields.each { |name| data_stream[name] = nil } # fallback to ||= bellow
|
166
|
+
end
|
167
|
+
end
|
168
|
+
else
|
169
|
+
unless data_stream.nil?
|
170
|
+
@logger.warn "Invalid 'data_stream' field type, due fields sync will overwrite", value: data_stream, event: event_data
|
171
|
+
end
|
172
|
+
event_data['data_stream'] = data_stream = Hash.new
|
173
|
+
end
|
174
|
+
|
175
|
+
data_stream['type'] ||= data_stream_type
|
176
|
+
data_stream['dataset'] ||= data_stream_dataset
|
177
|
+
data_stream['namespace'] ||= data_stream_namespace
|
178
|
+
|
179
|
+
event_data
|
180
|
+
end
|
181
|
+
|
182
|
+
module Validator
|
183
|
+
|
184
|
+
# @override {LogStash::Config::Mixin::validate_value} to handle custom validators
|
185
|
+
# @param value [Array<Object>]
|
186
|
+
# @param validator [nil,Array,Symbol]
|
187
|
+
# @return [Array(true,Object)]: if validation is a success, a tuple containing `true` and the coerced value
|
188
|
+
# @return [Array(false,String)]: if validation is a failure, a tuple containing `false` and the failure reason.
|
189
|
+
def validate_value(value, validator)
|
190
|
+
case validator
|
191
|
+
when :dataset_identifier then validate_dataset_identifier(value)
|
192
|
+
when :namespace_identifier then validate_namespace_identifier(value)
|
193
|
+
else super
|
194
|
+
end
|
195
|
+
end
|
196
|
+
|
197
|
+
private
|
198
|
+
|
199
|
+
def validate_dataset_identifier(value)
|
200
|
+
valid, value = validate_value(value, :string)
|
201
|
+
return false, value unless valid
|
202
|
+
|
203
|
+
validate_identifier(value)
|
204
|
+
end
|
205
|
+
|
206
|
+
def validate_namespace_identifier(value)
|
207
|
+
valid, value = validate_value(value, :string)
|
208
|
+
return false, value unless valid
|
209
|
+
|
210
|
+
validate_identifier(value)
|
211
|
+
end
|
212
|
+
|
213
|
+
def validate_identifier(value, max_size = 100)
|
214
|
+
if value.empty?
|
215
|
+
return false, "Invalid identifier - empty string"
|
216
|
+
end
|
217
|
+
if value.bytesize > max_size
|
218
|
+
return false, "Invalid identifier - too long (#{value.bytesize} bytes)"
|
219
|
+
end
|
220
|
+
# cannot include \, /, *, ?, ", <, >, |, ' ' (space char), ',', #, :
|
221
|
+
if value.match? Regexp.union(INVALID_IDENTIFIER_CHARS)
|
222
|
+
return false, "Invalid characters detected #{INVALID_IDENTIFIER_CHARS.inspect} are not allowed"
|
223
|
+
end
|
224
|
+
return true, value
|
225
|
+
end
|
226
|
+
|
227
|
+
INVALID_IDENTIFIER_CHARS = [ '\\', '/', '*', '?', '"', '<', '>', '|', ' ', ',', '#', ':' ]
|
228
|
+
private_constant :INVALID_IDENTIFIER_CHARS
|
229
|
+
|
230
|
+
end
|
231
|
+
|
232
|
+
end
|
233
|
+
end end end
|
@@ -1,6 +1,4 @@
|
|
1
1
|
require "logstash/outputs/elasticsearch"
|
2
|
-
require "cabin"
|
3
|
-
require "base64"
|
4
2
|
require 'logstash/outputs/elasticsearch/http_client/pool'
|
5
3
|
require 'logstash/outputs/elasticsearch/http_client/manticore_adapter'
|
6
4
|
require 'cgi'
|
@@ -80,12 +78,16 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
80
78
|
|
81
79
|
def template_install(name, template, force=false)
|
82
80
|
if template_exists?(name) && !force
|
83
|
-
@logger.debug("Found existing Elasticsearch template
|
81
|
+
@logger.debug("Found existing Elasticsearch template, skipping template management", name: name)
|
84
82
|
return
|
85
83
|
end
|
86
84
|
template_put(name, template)
|
87
85
|
end
|
88
86
|
|
87
|
+
def last_es_version
|
88
|
+
@pool.last_es_version
|
89
|
+
end
|
90
|
+
|
89
91
|
def maximum_seen_major_version
|
90
92
|
@pool.maximum_seen_major_version
|
91
93
|
end
|
@@ -384,7 +386,7 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
384
386
|
|
385
387
|
def template_put(name, template)
|
386
388
|
path = "#{template_endpoint}/#{name}"
|
387
|
-
logger.info("Installing
|
389
|
+
logger.info("Installing Elasticsearch template", name: name)
|
388
390
|
@pool.put(path, nil, LogStash::Json.dump(template))
|
389
391
|
end
|
390
392
|
|
@@ -401,13 +403,13 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
401
403
|
|
402
404
|
# Create a new rollover alias
|
403
405
|
def rollover_alias_put(alias_name, alias_definition)
|
404
|
-
logger.info("Creating rollover alias #{alias_name}")
|
405
406
|
begin
|
406
407
|
@pool.put(CGI::escape(alias_name), nil, LogStash::Json.dump(alias_definition))
|
408
|
+
logger.info("Created rollover alias", name: alias_name)
|
407
409
|
# If the rollover alias already exists, ignore the error that comes back from Elasticsearch
|
408
410
|
rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e
|
409
411
|
if e.response_code == 400
|
410
|
-
logger.info("Rollover
|
412
|
+
logger.info("Rollover alias already exists, skipping", name: alias_name)
|
411
413
|
return
|
412
414
|
end
|
413
415
|
raise e
|
@@ -428,7 +430,7 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
428
430
|
|
429
431
|
def ilm_policy_put(name, policy)
|
430
432
|
path = "_ilm/policy/#{name}"
|
431
|
-
logger.info("Installing ILM policy #{policy}
|
433
|
+
logger.info("Installing ILM policy #{policy}", name: name)
|
432
434
|
@pool.put(path, nil, LogStash::Json.dump(policy))
|
433
435
|
end
|
434
436
|
|
@@ -1,3 +1,4 @@
|
|
1
|
+
require "concurrent/atomic/atomic_reference"
|
1
2
|
require "logstash/plugin_mixins/elasticsearch/noop_license_checker"
|
2
3
|
|
3
4
|
module LogStash; module Outputs; class ElasticSearch; class HttpClient;
|
@@ -71,6 +72,8 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
|
|
71
72
|
@stopping = false
|
72
73
|
|
73
74
|
@license_checker = options[:license_checker] || LogStash::PluginMixins::ElasticSearch::NoopLicenseChecker::INSTANCE
|
75
|
+
|
76
|
+
@last_es_version = Concurrent::AtomicReference.new
|
74
77
|
end
|
75
78
|
|
76
79
|
def start
|
@@ -118,12 +121,6 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
|
|
118
121
|
@state_mutex.synchronize { @url_info }
|
119
122
|
end
|
120
123
|
|
121
|
-
def maximum_seen_major_version
|
122
|
-
@state_mutex.synchronize do
|
123
|
-
@maximum_seen_major_version
|
124
|
-
end
|
125
|
-
end
|
126
|
-
|
127
124
|
def urls
|
128
125
|
url_info.keys
|
129
126
|
end
|
@@ -179,15 +176,7 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
|
|
179
176
|
@logger.warn("Sniff returned no nodes! Will not update hosts.")
|
180
177
|
return nil
|
181
178
|
else
|
182
|
-
|
183
|
-
when 5, 6, 7, 8
|
184
|
-
sniff_5x_and_above(nodes)
|
185
|
-
when 2, 1
|
186
|
-
sniff_2x_1x(nodes)
|
187
|
-
else
|
188
|
-
@logger.warn("Could not determine version for nodes in ES cluster!")
|
189
|
-
return nil
|
190
|
-
end
|
179
|
+
sniff(nodes)
|
191
180
|
end
|
192
181
|
end
|
193
182
|
|
@@ -195,7 +184,7 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
|
|
195
184
|
version_string.split('.').first.to_i
|
196
185
|
end
|
197
186
|
|
198
|
-
def
|
187
|
+
def sniff(nodes)
|
199
188
|
nodes.map do |id,info|
|
200
189
|
# Skip master-only nodes
|
201
190
|
next if info["roles"] && info["roles"] == ["master"]
|
@@ -211,24 +200,6 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
|
|
211
200
|
end
|
212
201
|
end
|
213
202
|
|
214
|
-
def sniff_2x_1x(nodes)
|
215
|
-
nodes.map do |id,info|
|
216
|
-
# TODO Make sure this works with shield. Does that listed
|
217
|
-
# stuff as 'https_address?'
|
218
|
-
|
219
|
-
addr_str = info['http_address'].to_s
|
220
|
-
next unless addr_str # Skip hosts with HTTP disabled
|
221
|
-
|
222
|
-
# Only connect to nodes that serve data
|
223
|
-
# this will skip connecting to client, tribe, and master only nodes
|
224
|
-
# Note that if 'attributes' is NOT set, then that's just a regular node
|
225
|
-
# with master + data + client enabled, so we allow that
|
226
|
-
attributes = info['attributes']
|
227
|
-
next if attributes && attributes['data'] == 'false'
|
228
|
-
address_str_to_uri(addr_str)
|
229
|
-
end.compact
|
230
|
-
end
|
231
|
-
|
232
203
|
def stop_sniffer
|
233
204
|
@sniffer.join if @sniffer
|
234
205
|
end
|
@@ -252,11 +223,12 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
|
|
252
223
|
response = perform_request_to_url(url, :get, LICENSE_PATH)
|
253
224
|
LogStash::Json.load(response.body)
|
254
225
|
rescue => e
|
255
|
-
logger.error("Unable to get license information", url: url.sanitized.to_s,
|
226
|
+
logger.error("Unable to get license information", url: url.sanitized.to_s, exception: e.class, message: e.message)
|
256
227
|
{}
|
257
228
|
end
|
258
229
|
|
259
230
|
def health_check_request(url)
|
231
|
+
logger.debug("Running health check to see if an ES connection is working", url: url.sanitized.to_s, path: @healthcheck_path)
|
260
232
|
perform_request_to_url(url, :head, @healthcheck_path)
|
261
233
|
end
|
262
234
|
|
@@ -264,29 +236,20 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
|
|
264
236
|
# Try to keep locking granularity low such that we don't affect IO...
|
265
237
|
@state_mutex.synchronize { @url_info.select {|url,meta| meta[:state] != :alive } }.each do |url,meta|
|
266
238
|
begin
|
267
|
-
logger.debug("Running health check to see if an Elasticsearch connection is working",
|
268
|
-
:healthcheck_url => url, :path => @healthcheck_path)
|
269
239
|
health_check_request(url)
|
270
240
|
# If no exception was raised it must have succeeded!
|
271
|
-
logger.warn("Restored connection to ES instance", :
|
241
|
+
logger.warn("Restored connection to ES instance", url: url.sanitized.to_s)
|
272
242
|
# We reconnected to this node, check its ES version
|
273
243
|
es_version = get_es_version(url)
|
274
244
|
@state_mutex.synchronize do
|
275
245
|
meta[:version] = es_version
|
276
|
-
|
277
|
-
if !@maximum_seen_major_version
|
278
|
-
@logger.info("ES Output version determined", :es_version => major)
|
279
|
-
set_new_major_version(major)
|
280
|
-
elsif major > @maximum_seen_major_version
|
281
|
-
@logger.warn("Detected a node with a higher major version than previously observed. This could be the result of an elasticsearch cluster upgrade.", :previous_major => @maximum_seen_major_version, :new_major => major, :node_url => url.sanitized.to_s)
|
282
|
-
set_new_major_version(major)
|
283
|
-
end
|
246
|
+
set_last_es_version(es_version, url)
|
284
247
|
|
285
248
|
alive = @license_checker.appropriate_license?(self, url)
|
286
249
|
meta[:state] = alive ? :alive : :dead
|
287
250
|
end
|
288
251
|
rescue HostUnreachableError, BadResponseCodeError => e
|
289
|
-
logger.warn("Attempted to resurrect connection to dead ES instance, but got an error
|
252
|
+
logger.warn("Attempted to resurrect connection to dead ES instance, but got an error", url: url.sanitized.to_s, exception: e.class, message: e.message)
|
290
253
|
end
|
291
254
|
end
|
292
255
|
end
|
@@ -355,9 +318,7 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
|
|
355
318
|
end
|
356
319
|
|
357
320
|
if state_changes[:removed].size > 0 || state_changes[:added].size > 0
|
358
|
-
|
359
|
-
logger.info("Elasticsearch pool URLs updated", :changes => state_changes)
|
360
|
-
end
|
321
|
+
logger.info? && logger.info("Elasticsearch pool URLs updated", :changes => state_changes)
|
361
322
|
end
|
362
323
|
|
363
324
|
# Run an inline healthcheck anytime URLs are updated
|
@@ -371,10 +332,6 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
|
|
371
332
|
@state_mutex.synchronize { @url_info.size }
|
372
333
|
end
|
373
334
|
|
374
|
-
def es_versions
|
375
|
-
@state_mutex.synchronize { @url_info.size }
|
376
|
-
end
|
377
|
-
|
378
335
|
def add_url(url)
|
379
336
|
@url_info[url] ||= empty_url_meta
|
380
337
|
end
|
@@ -459,22 +416,52 @@ module LogStash; module Outputs; class ElasticSearch; class HttpClient;
|
|
459
416
|
|
460
417
|
def return_connection(url)
|
461
418
|
@state_mutex.synchronize do
|
462
|
-
|
463
|
-
|
464
|
-
end
|
419
|
+
info = @url_info[url]
|
420
|
+
info[:in_use] -= 1 if info # Guard against the condition where the connection has already been deleted
|
465
421
|
end
|
466
422
|
end
|
467
423
|
|
468
424
|
def get_es_version(url)
|
469
425
|
request = perform_request_to_url(url, :get, ROOT_URI_PATH)
|
470
|
-
LogStash::Json.load(request.body)["version"]["number"]
|
426
|
+
LogStash::Json.load(request.body)["version"]["number"] # e.g. "7.10.0"
|
427
|
+
end
|
428
|
+
|
429
|
+
def last_es_version
|
430
|
+
@last_es_version.get
|
431
|
+
end
|
432
|
+
|
433
|
+
def maximum_seen_major_version
|
434
|
+
@state_mutex.synchronize { @maximum_seen_major_version }
|
435
|
+
end
|
436
|
+
|
437
|
+
private
|
438
|
+
|
439
|
+
# @private executing within @state_mutex
|
440
|
+
def set_last_es_version(version, url)
|
441
|
+
@last_es_version.set(version)
|
442
|
+
|
443
|
+
major = major_version(version)
|
444
|
+
if @maximum_seen_major_version.nil?
|
445
|
+
@logger.info("Elasticsearch version determined (#{version})", es_version: major)
|
446
|
+
set_maximum_seen_major_version(major)
|
447
|
+
elsif major > @maximum_seen_major_version
|
448
|
+
warn_on_higher_major_version(major, url)
|
449
|
+
@maximum_seen_major_version = major
|
450
|
+
end
|
471
451
|
end
|
472
452
|
|
473
|
-
def
|
474
|
-
|
475
|
-
|
476
|
-
@logger.warn("Detected a 6.x and above cluster: the `type` event field won't be used to determine the document _type", :es_version => @maximum_seen_major_version)
|
453
|
+
def set_maximum_seen_major_version(major)
|
454
|
+
if major >= 6
|
455
|
+
@logger.warn("Detected a 6.x and above cluster: the `type` event field won't be used to determine the document _type", es_version: major)
|
477
456
|
end
|
457
|
+
@maximum_seen_major_version = major
|
478
458
|
end
|
459
|
+
|
460
|
+
def warn_on_higher_major_version(major, url)
|
461
|
+
@logger.warn("Detected a node with a higher major version than previously observed, " +
|
462
|
+
"this could be the result of an Elasticsearch cluster upgrade",
|
463
|
+
previous_major: @maximum_seen_major_version, new_major: major, node_url: url.sanitized.to_s)
|
464
|
+
end
|
465
|
+
|
479
466
|
end
|
480
467
|
end; end; end; end;
|