logstash-output-elasticsearch-test 11.16.0-x86_64-linux
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/CHANGELOG.md +649 -0
- data/CONTRIBUTORS +34 -0
- data/Gemfile +16 -0
- data/LICENSE +202 -0
- data/NOTICE.TXT +5 -0
- data/README.md +106 -0
- data/docs/index.asciidoc +1369 -0
- data/lib/logstash/outputs/elasticsearch/data_stream_support.rb +282 -0
- data/lib/logstash/outputs/elasticsearch/default-ilm-policy.json +14 -0
- data/lib/logstash/outputs/elasticsearch/http_client/manticore_adapter.rb +155 -0
- data/lib/logstash/outputs/elasticsearch/http_client/pool.rb +534 -0
- data/lib/logstash/outputs/elasticsearch/http_client.rb +497 -0
- data/lib/logstash/outputs/elasticsearch/http_client_builder.rb +201 -0
- data/lib/logstash/outputs/elasticsearch/ilm.rb +92 -0
- data/lib/logstash/outputs/elasticsearch/license_checker.rb +52 -0
- data/lib/logstash/outputs/elasticsearch/template_manager.rb +131 -0
- data/lib/logstash/outputs/elasticsearch/templates/ecs-disabled/elasticsearch-6x.json +45 -0
- data/lib/logstash/outputs/elasticsearch/templates/ecs-disabled/elasticsearch-7x.json +44 -0
- data/lib/logstash/outputs/elasticsearch/templates/ecs-disabled/elasticsearch-8x.json +50 -0
- data/lib/logstash/outputs/elasticsearch.rb +699 -0
- data/lib/logstash/plugin_mixins/elasticsearch/api_configs.rb +237 -0
- data/lib/logstash/plugin_mixins/elasticsearch/common.rb +409 -0
- data/lib/logstash/plugin_mixins/elasticsearch/noop_license_checker.rb +9 -0
- data/logstash-output-elasticsearch.gemspec +40 -0
- data/spec/es_spec_helper.rb +225 -0
- data/spec/fixtures/_nodes/6x.json +81 -0
- data/spec/fixtures/_nodes/7x.json +92 -0
- data/spec/fixtures/htpasswd +2 -0
- data/spec/fixtures/license_check/active.json +16 -0
- data/spec/fixtures/license_check/inactive.json +5 -0
- data/spec/fixtures/nginx_reverse_proxy.conf +22 -0
- data/spec/fixtures/scripts/painless/scripted_update.painless +2 -0
- data/spec/fixtures/scripts/painless/scripted_update_nested.painless +1 -0
- data/spec/fixtures/scripts/painless/scripted_upsert.painless +1 -0
- data/spec/fixtures/template-with-policy-es6x.json +48 -0
- data/spec/fixtures/template-with-policy-es7x.json +45 -0
- data/spec/fixtures/template-with-policy-es8x.json +50 -0
- data/spec/fixtures/test_certs/ca.crt +29 -0
- data/spec/fixtures/test_certs/ca.der.sha256 +1 -0
- data/spec/fixtures/test_certs/ca.key +51 -0
- data/spec/fixtures/test_certs/renew.sh +13 -0
- data/spec/fixtures/test_certs/test.crt +30 -0
- data/spec/fixtures/test_certs/test.der.sha256 +1 -0
- data/spec/fixtures/test_certs/test.key +51 -0
- data/spec/fixtures/test_certs/test.p12 +0 -0
- data/spec/fixtures/test_certs/test_invalid.crt +36 -0
- data/spec/fixtures/test_certs/test_invalid.key +51 -0
- data/spec/fixtures/test_certs/test_invalid.p12 +0 -0
- data/spec/fixtures/test_certs/test_self_signed.crt +32 -0
- data/spec/fixtures/test_certs/test_self_signed.key +54 -0
- data/spec/fixtures/test_certs/test_self_signed.p12 +0 -0
- data/spec/integration/outputs/compressed_indexing_spec.rb +70 -0
- data/spec/integration/outputs/create_spec.rb +67 -0
- data/spec/integration/outputs/data_stream_spec.rb +68 -0
- data/spec/integration/outputs/delete_spec.rb +63 -0
- data/spec/integration/outputs/ilm_spec.rb +534 -0
- data/spec/integration/outputs/index_spec.rb +421 -0
- data/spec/integration/outputs/index_version_spec.rb +98 -0
- data/spec/integration/outputs/ingest_pipeline_spec.rb +75 -0
- data/spec/integration/outputs/metrics_spec.rb +66 -0
- data/spec/integration/outputs/no_es_on_startup_spec.rb +78 -0
- data/spec/integration/outputs/painless_update_spec.rb +99 -0
- data/spec/integration/outputs/parent_spec.rb +94 -0
- data/spec/integration/outputs/retry_spec.rb +182 -0
- data/spec/integration/outputs/routing_spec.rb +61 -0
- data/spec/integration/outputs/sniffer_spec.rb +94 -0
- data/spec/integration/outputs/templates_spec.rb +133 -0
- data/spec/integration/outputs/unsupported_actions_spec.rb +75 -0
- data/spec/integration/outputs/update_spec.rb +114 -0
- data/spec/spec_helper.rb +10 -0
- data/spec/support/elasticsearch/api/actions/delete_ilm_policy.rb +19 -0
- data/spec/support/elasticsearch/api/actions/get_alias.rb +18 -0
- data/spec/support/elasticsearch/api/actions/get_ilm_policy.rb +18 -0
- data/spec/support/elasticsearch/api/actions/put_alias.rb +24 -0
- data/spec/support/elasticsearch/api/actions/put_ilm_policy.rb +25 -0
- data/spec/unit/http_client_builder_spec.rb +185 -0
- data/spec/unit/outputs/elasticsearch/data_stream_support_spec.rb +612 -0
- data/spec/unit/outputs/elasticsearch/http_client/manticore_adapter_spec.rb +151 -0
- data/spec/unit/outputs/elasticsearch/http_client/pool_spec.rb +501 -0
- data/spec/unit/outputs/elasticsearch/http_client_spec.rb +339 -0
- data/spec/unit/outputs/elasticsearch/template_manager_spec.rb +189 -0
- data/spec/unit/outputs/elasticsearch_proxy_spec.rb +103 -0
- data/spec/unit/outputs/elasticsearch_spec.rb +1573 -0
- data/spec/unit/outputs/elasticsearch_ssl_spec.rb +197 -0
- data/spec/unit/outputs/error_whitelist_spec.rb +56 -0
- data/spec/unit/outputs/license_check_spec.rb +57 -0
- metadata +423 -0
@@ -0,0 +1,534 @@
|
|
1
|
+
require "concurrent/atomic/atomic_reference"
|
2
|
+
require "logstash/plugin_mixins/elasticsearch/noop_license_checker"
|
3
|
+
|
4
|
+
module LogStash; module Outputs; class ElasticSearch; class HttpClient;
|
5
|
+
class Pool
|
6
|
+
class NoConnectionAvailableError < Error; end
|
7
|
+
class BadResponseCodeError < Error
|
8
|
+
attr_reader :url, :response_code, :request_body, :response_body
|
9
|
+
|
10
|
+
def initialize(response_code, url, request_body, response_body)
|
11
|
+
super("Got response code '#{response_code}' contacting Elasticsearch at URL '#{url}'")
|
12
|
+
|
13
|
+
@response_code = response_code
|
14
|
+
@url = url
|
15
|
+
@request_body = request_body
|
16
|
+
@response_body = response_body
|
17
|
+
end
|
18
|
+
|
19
|
+
end
|
20
|
+
class HostUnreachableError < Error;
|
21
|
+
attr_reader :original_error, :url
|
22
|
+
|
23
|
+
def initialize(original_error, url)
|
24
|
+
super("Elasticsearch Unreachable: [#{url}][#{original_error.class}] #{original_error.message}")
|
25
|
+
|
26
|
+
@original_error = original_error
|
27
|
+
@url = url
|
28
|
+
end
|
29
|
+
|
30
|
+
end
|
31
|
+
|
32
|
+
attr_reader :logger, :adapter, :sniffing, :sniffer_delay, :resurrect_delay, :healthcheck_path, :sniffing_path, :bulk_path
|
33
|
+
attr_reader :license_checker # license_checker is used by the pool specs
|
34
|
+
|
35
|
+
ROOT_URI_PATH = '/'.freeze
|
36
|
+
LICENSE_PATH = '/_license'.freeze
|
37
|
+
|
38
|
+
VERSION_6_TO_7 = ::Gem::Requirement.new([">= 6.0.0", "< 7.0.0"])
|
39
|
+
VERSION_7_TO_7_14 = ::Gem::Requirement.new([">= 7.0.0", "< 7.14.0"])
|
40
|
+
|
41
|
+
DEFAULT_OPTIONS = {
|
42
|
+
:healthcheck_path => ROOT_URI_PATH,
|
43
|
+
:sniffing_path => "/_nodes/http",
|
44
|
+
:bulk_path => "/_bulk",
|
45
|
+
:scheme => 'http',
|
46
|
+
:resurrect_delay => 5,
|
47
|
+
:sniffing => false,
|
48
|
+
:sniffer_delay => 10,
|
49
|
+
}.freeze
|
50
|
+
|
51
|
+
BUILD_FLAVOUR_SERVERLESS = 'serverless'.freeze
|
52
|
+
|
53
|
+
def initialize(logger, adapter, initial_urls=[], options={})
|
54
|
+
@logger = logger
|
55
|
+
@adapter = adapter
|
56
|
+
@metric = options[:metric]
|
57
|
+
@initial_urls = initial_urls
|
58
|
+
|
59
|
+
raise ArgumentError, "No URL Normalizer specified!" unless options[:url_normalizer]
|
60
|
+
@url_normalizer = options[:url_normalizer]
|
61
|
+
DEFAULT_OPTIONS.merge(options).tap do |merged|
|
62
|
+
@bulk_path = merged[:bulk_path]
|
63
|
+
@sniffing_path = merged[:sniffing_path]
|
64
|
+
@healthcheck_path = merged[:healthcheck_path]
|
65
|
+
@resurrect_delay = merged[:resurrect_delay]
|
66
|
+
@sniffing = merged[:sniffing]
|
67
|
+
@sniffer_delay = merged[:sniffer_delay]
|
68
|
+
end
|
69
|
+
|
70
|
+
# Used for all concurrent operations in this class
|
71
|
+
@state_mutex = Mutex.new
|
72
|
+
|
73
|
+
# Holds metadata about all URLs
|
74
|
+
@url_info = {}
|
75
|
+
@stopping = false
|
76
|
+
|
77
|
+
@license_checker = options[:license_checker] || LogStash::PluginMixins::ElasticSearch::NoopLicenseChecker::INSTANCE
|
78
|
+
|
79
|
+
@last_es_version = Concurrent::AtomicReference.new
|
80
|
+
@build_flavour = Concurrent::AtomicReference.new
|
81
|
+
end
|
82
|
+
|
83
|
+
def start
|
84
|
+
update_initial_urls
|
85
|
+
start_resurrectionist
|
86
|
+
start_sniffer if @sniffing
|
87
|
+
end
|
88
|
+
|
89
|
+
def update_initial_urls
|
90
|
+
update_urls(@initial_urls)
|
91
|
+
end
|
92
|
+
|
93
|
+
def close
|
94
|
+
@state_mutex.synchronize { @stopping = true }
|
95
|
+
|
96
|
+
logger.debug "Stopping sniffer"
|
97
|
+
stop_sniffer
|
98
|
+
|
99
|
+
logger.debug "Stopping resurrectionist"
|
100
|
+
stop_resurrectionist
|
101
|
+
|
102
|
+
logger.debug "Waiting for in use manticore connections"
|
103
|
+
wait_for_in_use_connections
|
104
|
+
|
105
|
+
logger.debug("Closing adapter #{@adapter}")
|
106
|
+
@adapter.close
|
107
|
+
end
|
108
|
+
|
109
|
+
def wait_for_in_use_connections
|
110
|
+
until in_use_connections.empty?
|
111
|
+
logger.info "Blocked on shutdown to in use connections #{@state_mutex.synchronize {@url_info}}"
|
112
|
+
sleep 1
|
113
|
+
end
|
114
|
+
end
|
115
|
+
|
116
|
+
def in_use_connections
|
117
|
+
@state_mutex.synchronize { @url_info.values.select {|v| v[:in_use] > 0 } }
|
118
|
+
end
|
119
|
+
|
120
|
+
def alive_urls_count
|
121
|
+
@state_mutex.synchronize { @url_info.values.select {|v| v[:state] == :alive }.count }
|
122
|
+
end
|
123
|
+
|
124
|
+
def url_info
|
125
|
+
@state_mutex.synchronize { @url_info }
|
126
|
+
end
|
127
|
+
|
128
|
+
def urls
|
129
|
+
url_info.keys
|
130
|
+
end
|
131
|
+
|
132
|
+
def until_stopped(task_name, delay)
|
133
|
+
last_done = Time.now
|
134
|
+
until @state_mutex.synchronize { @stopping }
|
135
|
+
begin
|
136
|
+
now = Time.now
|
137
|
+
if (now - last_done) >= delay
|
138
|
+
last_done = now
|
139
|
+
yield
|
140
|
+
end
|
141
|
+
sleep 1
|
142
|
+
rescue => e
|
143
|
+
logger.warn(
|
144
|
+
"Error while performing #{task_name}",
|
145
|
+
:error_message => e.message,
|
146
|
+
:class => e.class.name,
|
147
|
+
:backtrace => e.backtrace
|
148
|
+
)
|
149
|
+
end
|
150
|
+
end
|
151
|
+
end
|
152
|
+
|
153
|
+
def start_sniffer
|
154
|
+
@sniffer = Thread.new do
|
155
|
+
until_stopped("sniffing", sniffer_delay) do
|
156
|
+
begin
|
157
|
+
sniff!
|
158
|
+
rescue NoConnectionAvailableError => e
|
159
|
+
@state_mutex.synchronize { # Synchronize around @url_info
|
160
|
+
logger.warn("Elasticsearch output attempted to sniff for new connections but cannot. No living connections are detected. Pool contains the following current URLs", :url_info => @url_info) }
|
161
|
+
end
|
162
|
+
end
|
163
|
+
end
|
164
|
+
end
|
165
|
+
|
166
|
+
# Sniffs the cluster then updates the internal URLs
|
167
|
+
def sniff!
|
168
|
+
update_urls(check_sniff)
|
169
|
+
end
|
170
|
+
|
171
|
+
ES1_SNIFF_RE_URL = /\[([^\/]*)?\/?([^:]*):([0-9]+)\]/
|
172
|
+
ES2_AND_ABOVE_SNIFF_RE_URL = /([^\/]*)?\/?([^:]*):([0-9]+)/
|
173
|
+
# Sniffs and returns the results. Does not update internal URLs!
|
174
|
+
def check_sniff
|
175
|
+
_, url_meta, resp = perform_request(:get, @sniffing_path)
|
176
|
+
@metric.increment(:sniff_requests)
|
177
|
+
parsed = LogStash::Json.load(resp.body)
|
178
|
+
nodes = parsed['nodes']
|
179
|
+
if !nodes || nodes.empty?
|
180
|
+
@logger.warn("Sniff returned no nodes! Will not update hosts.")
|
181
|
+
return nil
|
182
|
+
else
|
183
|
+
sniff(nodes)
|
184
|
+
end
|
185
|
+
end
|
186
|
+
|
187
|
+
def major_version(version_string)
|
188
|
+
version_string.split('.').first.to_i
|
189
|
+
end
|
190
|
+
|
191
|
+
def sniff(nodes)
|
192
|
+
nodes.map do |id,info|
|
193
|
+
# Skip master-only nodes
|
194
|
+
next if info["roles"] && info["roles"] == ["master"]
|
195
|
+
address_str_to_uri(info["http"]["publish_address"]) if info["http"]
|
196
|
+
end.compact
|
197
|
+
end
|
198
|
+
|
199
|
+
def address_str_to_uri(addr_str)
|
200
|
+
matches = addr_str.match(ES1_SNIFF_RE_URL) || addr_str.match(ES2_AND_ABOVE_SNIFF_RE_URL)
|
201
|
+
if matches
|
202
|
+
host = matches[1].empty? ? matches[2] : matches[1]
|
203
|
+
::LogStash::Util::SafeURI.new("#{host}:#{matches[3]}")
|
204
|
+
end
|
205
|
+
end
|
206
|
+
|
207
|
+
def stop_sniffer
|
208
|
+
@sniffer.join if @sniffer
|
209
|
+
end
|
210
|
+
|
211
|
+
def sniffer_alive?
|
212
|
+
@sniffer ? @sniffer.alive? : nil
|
213
|
+
end
|
214
|
+
|
215
|
+
def start_resurrectionist
|
216
|
+
@resurrectionist = Thread.new do
|
217
|
+
until_stopped("resurrection", @resurrect_delay) do
|
218
|
+
healthcheck!(false)
|
219
|
+
end
|
220
|
+
end
|
221
|
+
end
|
222
|
+
|
223
|
+
# Retrieve ES node license information
|
224
|
+
# @param url [LogStash::Util::SafeURI] ES node URL
|
225
|
+
# @return [Hash] deserialized license document or empty Hash upon any error
|
226
|
+
def get_license(url)
|
227
|
+
response = perform_request_to_url(url, :get, LICENSE_PATH)
|
228
|
+
LogStash::Json.load(response.body)
|
229
|
+
rescue => e
|
230
|
+
logger.error("Unable to get license information", url: url.sanitized.to_s, exception: e.class, message: e.message)
|
231
|
+
{}
|
232
|
+
end
|
233
|
+
|
234
|
+
def health_check_request(url)
|
235
|
+
response = perform_request_to_url(url, :head, @healthcheck_path)
|
236
|
+
raise BadResponseCodeError.new(response.code, url, nil, response.body) unless (200..299).cover?(response.code)
|
237
|
+
end
|
238
|
+
|
239
|
+
def healthcheck!(register_phase = true)
|
240
|
+
# Try to keep locking granularity low such that we don't affect IO...
|
241
|
+
@state_mutex.synchronize { @url_info.select {|url,meta| meta[:state] != :alive } }.each do |url,meta|
|
242
|
+
begin
|
243
|
+
logger.debug("Running health check to see if an Elasticsearch connection is working",
|
244
|
+
:healthcheck_url => url.sanitized.to_s, :path => @healthcheck_path)
|
245
|
+
health_check_request(url)
|
246
|
+
|
247
|
+
# when called from resurrectionist skip the product check done during register phase
|
248
|
+
if register_phase
|
249
|
+
if !elasticsearch?(url)
|
250
|
+
raise LogStash::ConfigurationError, "Could not connect to a compatible version of Elasticsearch"
|
251
|
+
end
|
252
|
+
end
|
253
|
+
# If no exception was raised it must have succeeded!
|
254
|
+
logger.warn("Restored connection to ES instance", url: url.sanitized.to_s)
|
255
|
+
# We reconnected to this node, check its ES version
|
256
|
+
version_info = get_es_version(url)
|
257
|
+
es_version = version_info.fetch('number', nil)
|
258
|
+
build_flavour = version_info.fetch('build_flavor', nil)
|
259
|
+
|
260
|
+
if es_version.nil?
|
261
|
+
logger.warn("Failed to retrieve Elasticsearch version data from connected endpoint, connection aborted", :url => url.sanitized.to_s)
|
262
|
+
next
|
263
|
+
end
|
264
|
+
@state_mutex.synchronize do
|
265
|
+
meta[:version] = es_version
|
266
|
+
set_last_es_version(es_version, url)
|
267
|
+
set_build_flavour(build_flavour)
|
268
|
+
|
269
|
+
alive = @license_checker.appropriate_license?(self, url)
|
270
|
+
meta[:state] = alive ? :alive : :dead
|
271
|
+
end
|
272
|
+
rescue HostUnreachableError, BadResponseCodeError => e
|
273
|
+
logger.warn("Attempted to resurrect connection to dead ES instance, but got an error", url: url.sanitized.to_s, exception: e.class, message: e.message)
|
274
|
+
end
|
275
|
+
end
|
276
|
+
end
|
277
|
+
|
278
|
+
def elasticsearch?(url)
|
279
|
+
begin
|
280
|
+
response = perform_request_to_url(url, :get, ROOT_URI_PATH)
|
281
|
+
rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e
|
282
|
+
return false if response.code == 401 || response.code == 403
|
283
|
+
raise e
|
284
|
+
end
|
285
|
+
|
286
|
+
version_info = LogStash::Json.load(response.body)
|
287
|
+
return false if version_info['version'].nil?
|
288
|
+
|
289
|
+
version = ::Gem::Version.new(version_info["version"]['number'])
|
290
|
+
return false if version < ::Gem::Version.new('6.0.0')
|
291
|
+
|
292
|
+
if VERSION_6_TO_7.satisfied_by?(version)
|
293
|
+
return valid_tagline?(version_info)
|
294
|
+
elsif VERSION_7_TO_7_14.satisfied_by?(version)
|
295
|
+
build_flavor = version_info["version"]['build_flavor']
|
296
|
+
return false if build_flavor.nil? || build_flavor != 'default' || !valid_tagline?(version_info)
|
297
|
+
else
|
298
|
+
# case >= 7.14
|
299
|
+
lower_headers = response.headers.transform_keys {|key| key.to_s.downcase }
|
300
|
+
product_header = lower_headers['x-elastic-product']
|
301
|
+
return false if product_header != 'Elasticsearch'
|
302
|
+
end
|
303
|
+
return true
|
304
|
+
rescue => e
|
305
|
+
logger.error("Unable to retrieve Elasticsearch version", url: url.sanitized.to_s, exception: e.class, message: e.message)
|
306
|
+
false
|
307
|
+
end
|
308
|
+
|
309
|
+
def valid_tagline?(version_info)
|
310
|
+
tagline = version_info['tagline']
|
311
|
+
tagline == "You Know, for Search"
|
312
|
+
end
|
313
|
+
|
314
|
+
def stop_resurrectionist
|
315
|
+
@resurrectionist.join if @resurrectionist
|
316
|
+
end
|
317
|
+
|
318
|
+
def resurrectionist_alive?
|
319
|
+
@resurrectionist ? @resurrectionist.alive? : nil
|
320
|
+
end
|
321
|
+
|
322
|
+
def perform_request(method, path, params={}, body=nil)
|
323
|
+
with_connection do |url, url_meta|
|
324
|
+
resp = perform_request_to_url(url, method, path, params, body)
|
325
|
+
[url, url_meta, resp]
|
326
|
+
end
|
327
|
+
end
|
328
|
+
|
329
|
+
[:get, :put, :post, :delete, :patch, :head].each do |method|
|
330
|
+
define_method(method) do |path, params={}, body=nil|
|
331
|
+
_, _, response = perform_request(method, path, params, body)
|
332
|
+
response
|
333
|
+
end
|
334
|
+
end
|
335
|
+
|
336
|
+
def perform_request_to_url(url, method, path, params={}, body=nil)
|
337
|
+
@adapter.perform_request(url, method, path, params, body)
|
338
|
+
end
|
339
|
+
|
340
|
+
def normalize_url(uri)
|
341
|
+
u = @url_normalizer.call(uri)
|
342
|
+
if !u.is_a?(::LogStash::Util::SafeURI)
|
343
|
+
raise "URL Normalizer returned a '#{u.class}' rather than a SafeURI! This shouldn't happen!"
|
344
|
+
end
|
345
|
+
u
|
346
|
+
end
|
347
|
+
|
348
|
+
def update_urls(new_urls)
|
349
|
+
return if new_urls.nil?
|
350
|
+
|
351
|
+
# Normalize URLs
|
352
|
+
new_urls = new_urls.map(&method(:normalize_url))
|
353
|
+
|
354
|
+
# Used for logging nicely
|
355
|
+
state_changes = {:removed => [], :added => []}
|
356
|
+
@state_mutex.synchronize do
|
357
|
+
# Add new connections
|
358
|
+
new_urls.each do |url|
|
359
|
+
# URI objects don't have real hash equality! So, since this isn't perf sensitive we do a linear scan
|
360
|
+
unless @url_info.keys.include?(url)
|
361
|
+
state_changes[:added] << url
|
362
|
+
add_url(url)
|
363
|
+
end
|
364
|
+
end
|
365
|
+
|
366
|
+
# Delete connections not in the new list
|
367
|
+
@url_info.each do |url,_|
|
368
|
+
unless new_urls.include?(url)
|
369
|
+
state_changes[:removed] << url
|
370
|
+
remove_url(url)
|
371
|
+
end
|
372
|
+
end
|
373
|
+
end
|
374
|
+
|
375
|
+
if state_changes[:removed].size > 0 || state_changes[:added].size > 0
|
376
|
+
logger.info? && logger.info("Elasticsearch pool URLs updated", :changes => state_changes)
|
377
|
+
end
|
378
|
+
|
379
|
+
# Run an inline healthcheck anytime URLs are updated
|
380
|
+
# This guarantees that during startup / post-startup
|
381
|
+
# sniffing we don't have idle periods waiting for the
|
382
|
+
# periodic sniffer to allow new hosts to come online
|
383
|
+
healthcheck!
|
384
|
+
end
|
385
|
+
|
386
|
+
def size
|
387
|
+
@state_mutex.synchronize { @url_info.size }
|
388
|
+
end
|
389
|
+
|
390
|
+
def add_url(url)
|
391
|
+
@url_info[url] ||= empty_url_meta
|
392
|
+
end
|
393
|
+
|
394
|
+
def remove_url(url)
|
395
|
+
@url_info.delete(url)
|
396
|
+
end
|
397
|
+
|
398
|
+
def empty_url_meta
|
399
|
+
{
|
400
|
+
:in_use => 0,
|
401
|
+
:state => :unknown
|
402
|
+
}
|
403
|
+
end
|
404
|
+
|
405
|
+
def with_connection
|
406
|
+
url, url_meta = get_connection
|
407
|
+
|
408
|
+
# Custom error class used here so that users may retry attempts if they receive this error
|
409
|
+
# should they choose to
|
410
|
+
raise NoConnectionAvailableError, "No Available connections" unless url
|
411
|
+
yield url, url_meta
|
412
|
+
rescue HostUnreachableError => e
|
413
|
+
# Mark the connection as dead here since this is likely not transient
|
414
|
+
mark_dead(url, e)
|
415
|
+
raise e
|
416
|
+
rescue BadResponseCodeError => e
|
417
|
+
# These aren't discarded from the pool because these are often very transient
|
418
|
+
# errors
|
419
|
+
raise e
|
420
|
+
ensure
|
421
|
+
return_connection(url)
|
422
|
+
end
|
423
|
+
|
424
|
+
def mark_dead(url, error)
|
425
|
+
@state_mutex.synchronize do
|
426
|
+
meta = @url_info[url]
|
427
|
+
# In case a sniff happened removing the metadata just before there's nothing to mark
|
428
|
+
# This is an extreme edge case, but it can happen!
|
429
|
+
return unless meta
|
430
|
+
logger.warn("Marking url as dead. Last error: [#{error.class}] #{error.message}",
|
431
|
+
:url => url, :error_message => error.message, :error_class => error.class.name)
|
432
|
+
meta[:state] = :dead
|
433
|
+
meta[:last_error] = error
|
434
|
+
meta[:last_errored_at] = Time.now
|
435
|
+
end
|
436
|
+
end
|
437
|
+
|
438
|
+
def url_meta(url)
|
439
|
+
@state_mutex.synchronize do
|
440
|
+
@url_info[url]
|
441
|
+
end
|
442
|
+
end
|
443
|
+
|
444
|
+
def get_connection
|
445
|
+
@state_mutex.synchronize do
|
446
|
+
# The goal here is to pick a random connection from the least-in-use connections
|
447
|
+
# We want some randomness so that we don't hit the same node over and over, but
|
448
|
+
# we also want more 'fair' behavior in the event of high concurrency
|
449
|
+
eligible_set = nil
|
450
|
+
lowest_value_seen = nil
|
451
|
+
@url_info.each do |url,meta|
|
452
|
+
meta_in_use = meta[:in_use]
|
453
|
+
next if meta[:state] == :dead
|
454
|
+
|
455
|
+
if lowest_value_seen.nil? || meta_in_use < lowest_value_seen
|
456
|
+
lowest_value_seen = meta_in_use
|
457
|
+
eligible_set = [[url, meta]]
|
458
|
+
elsif lowest_value_seen == meta_in_use
|
459
|
+
eligible_set << [url, meta]
|
460
|
+
end
|
461
|
+
end
|
462
|
+
|
463
|
+
return nil if eligible_set.nil?
|
464
|
+
|
465
|
+
pick, pick_meta = eligible_set.sample
|
466
|
+
pick_meta[:in_use] += 1
|
467
|
+
|
468
|
+
[pick, pick_meta]
|
469
|
+
end
|
470
|
+
end
|
471
|
+
|
472
|
+
def return_connection(url)
|
473
|
+
@state_mutex.synchronize do
|
474
|
+
info = @url_info[url]
|
475
|
+
info[:in_use] -= 1 if info # Guard against the condition where the connection has already been deleted
|
476
|
+
end
|
477
|
+
end
|
478
|
+
|
479
|
+
def get_es_version(url)
|
480
|
+
response = perform_request_to_url(url, :get, ROOT_URI_PATH)
|
481
|
+
return nil unless (200..299).cover?(response.code)
|
482
|
+
|
483
|
+
response = LogStash::Json.load(response.body)
|
484
|
+
|
485
|
+
response.fetch('version', {})
|
486
|
+
end
|
487
|
+
|
488
|
+
def last_es_version
|
489
|
+
@last_es_version.get
|
490
|
+
end
|
491
|
+
|
492
|
+
def maximum_seen_major_version
|
493
|
+
@state_mutex.synchronize { @maximum_seen_major_version }
|
494
|
+
end
|
495
|
+
|
496
|
+
def serverless?
|
497
|
+
@build_flavour.get == BUILD_FLAVOUR_SERVERLESS
|
498
|
+
end
|
499
|
+
|
500
|
+
private
|
501
|
+
|
502
|
+
# @private executing within @state_mutex
|
503
|
+
def set_last_es_version(version, url)
|
504
|
+
@last_es_version.set(version)
|
505
|
+
|
506
|
+
major = major_version(version)
|
507
|
+
if @maximum_seen_major_version.nil?
|
508
|
+
@logger.info("Elasticsearch version determined (#{version})", es_version: major)
|
509
|
+
set_maximum_seen_major_version(major)
|
510
|
+
elsif major > @maximum_seen_major_version
|
511
|
+
warn_on_higher_major_version(major, url)
|
512
|
+
@maximum_seen_major_version = major
|
513
|
+
end
|
514
|
+
end
|
515
|
+
|
516
|
+
def set_maximum_seen_major_version(major)
|
517
|
+
if major >= 6
|
518
|
+
@logger.warn("Detected a 6.x and above cluster: the `type` event field won't be used to determine the document _type", es_version: major)
|
519
|
+
end
|
520
|
+
@maximum_seen_major_version = major
|
521
|
+
end
|
522
|
+
|
523
|
+
def warn_on_higher_major_version(major, url)
|
524
|
+
@logger.warn("Detected a node with a higher major version than previously observed, " +
|
525
|
+
"this could be the result of an Elasticsearch cluster upgrade",
|
526
|
+
previous_major: @maximum_seen_major_version, new_major: major, node_url: url.sanitized.to_s)
|
527
|
+
end
|
528
|
+
|
529
|
+
def set_build_flavour(flavour)
|
530
|
+
@build_flavour.set(flavour)
|
531
|
+
end
|
532
|
+
|
533
|
+
end
|
534
|
+
end; end; end; end;
|