logstash-output-opensearch 1.0.0-java

Sign up to get free protection for your applications and to get access to all the features.
Files changed (61) hide show
  1. checksums.yaml +7 -0
  2. checksums.yaml.gz.sig +0 -0
  3. data.tar.gz.sig +0 -0
  4. data/ADMINS.md +29 -0
  5. data/CODE_OF_CONDUCT.md +25 -0
  6. data/CONTRIBUTING.md +99 -0
  7. data/DEVELOPER_GUIDE.md +208 -0
  8. data/Gemfile +20 -0
  9. data/LICENSE +202 -0
  10. data/MAINTAINERS.md +71 -0
  11. data/NOTICE +2 -0
  12. data/README.md +37 -0
  13. data/RELEASING.md +36 -0
  14. data/SECURITY.md +3 -0
  15. data/lib/logstash/outputs/opensearch.rb +449 -0
  16. data/lib/logstash/outputs/opensearch/distribution_checker.rb +44 -0
  17. data/lib/logstash/outputs/opensearch/http_client.rb +465 -0
  18. data/lib/logstash/outputs/opensearch/http_client/manticore_adapter.rb +140 -0
  19. data/lib/logstash/outputs/opensearch/http_client/pool.rb +467 -0
  20. data/lib/logstash/outputs/opensearch/http_client_builder.rb +182 -0
  21. data/lib/logstash/outputs/opensearch/template_manager.rb +60 -0
  22. data/lib/logstash/outputs/opensearch/templates/ecs-disabled/1x.json +44 -0
  23. data/lib/logstash/outputs/opensearch/templates/ecs-disabled/7x.json +44 -0
  24. data/lib/logstash/plugin_mixins/opensearch/api_configs.rb +168 -0
  25. data/lib/logstash/plugin_mixins/opensearch/common.rb +294 -0
  26. data/lib/logstash/plugin_mixins/opensearch/noop_distribution_checker.rb +18 -0
  27. data/logstash-output-opensearch.gemspec +40 -0
  28. data/spec/fixtures/_nodes/nodes.json +74 -0
  29. data/spec/fixtures/htpasswd +2 -0
  30. data/spec/fixtures/nginx_reverse_proxy.conf +22 -0
  31. data/spec/fixtures/scripts/painless/scripted_update.painless +2 -0
  32. data/spec/fixtures/scripts/painless/scripted_update_nested.painless +1 -0
  33. data/spec/fixtures/scripts/painless/scripted_upsert.painless +1 -0
  34. data/spec/integration/outputs/compressed_indexing_spec.rb +76 -0
  35. data/spec/integration/outputs/create_spec.rb +76 -0
  36. data/spec/integration/outputs/delete_spec.rb +72 -0
  37. data/spec/integration/outputs/index_spec.rb +164 -0
  38. data/spec/integration/outputs/index_version_spec.rb +110 -0
  39. data/spec/integration/outputs/ingest_pipeline_spec.rb +82 -0
  40. data/spec/integration/outputs/metrics_spec.rb +75 -0
  41. data/spec/integration/outputs/no_opensearch_on_startup_spec.rb +67 -0
  42. data/spec/integration/outputs/painless_update_spec.rb +147 -0
  43. data/spec/integration/outputs/parent_spec.rb +103 -0
  44. data/spec/integration/outputs/retry_spec.rb +182 -0
  45. data/spec/integration/outputs/routing_spec.rb +70 -0
  46. data/spec/integration/outputs/sniffer_spec.rb +70 -0
  47. data/spec/integration/outputs/templates_spec.rb +105 -0
  48. data/spec/integration/outputs/update_spec.rb +123 -0
  49. data/spec/opensearch_spec_helper.rb +141 -0
  50. data/spec/spec_helper.rb +19 -0
  51. data/spec/unit/http_client_builder_spec.rb +194 -0
  52. data/spec/unit/outputs/error_whitelist_spec.rb +62 -0
  53. data/spec/unit/outputs/opensearch/http_client/manticore_adapter_spec.rb +159 -0
  54. data/spec/unit/outputs/opensearch/http_client/pool_spec.rb +306 -0
  55. data/spec/unit/outputs/opensearch/http_client_spec.rb +292 -0
  56. data/spec/unit/outputs/opensearch/template_manager_spec.rb +36 -0
  57. data/spec/unit/outputs/opensearch_proxy_spec.rb +112 -0
  58. data/spec/unit/outputs/opensearch_spec.rb +800 -0
  59. data/spec/unit/outputs/opensearch_ssl_spec.rb +179 -0
  60. metadata +289 -0
  61. metadata.gz.sig +0 -0
@@ -0,0 +1,140 @@
1
+ # SPDX-License-Identifier: Apache-2.0
2
+ #
3
+ # The OpenSearch Contributors require contributions made to
4
+ # this file be licensed under the Apache-2.0 license or a
5
+ # compatible open source license.
6
+ #
7
+ # Modifications Copyright OpenSearch Contributors. See
8
+ # GitHub history for details.
9
+
10
+ require 'manticore'
11
+ require 'cgi'
12
+
13
+ module LogStash; module Outputs; class OpenSearch; class HttpClient;
14
+ DEFAULT_HEADERS = { "Content-Type" => "application/json" }
15
+
16
+ class ManticoreAdapter
17
+ attr_reader :manticore, :logger
18
+
19
+ def initialize(logger, options={})
20
+ @logger = logger
21
+ options = options.clone || {}
22
+ options[:ssl] = options[:ssl] || {}
23
+
24
+ # We manage our own retries directly, so let's disable them here
25
+ options[:automatic_retries] = 0
26
+ # We definitely don't need cookies
27
+ options[:cookies] = false
28
+
29
+ @client_params = {:headers => DEFAULT_HEADERS.merge(options[:headers] || {})}
30
+
31
+ if options[:proxy]
32
+ options[:proxy] = manticore_proxy_hash(options[:proxy])
33
+ end
34
+
35
+ @manticore = ::Manticore::Client.new(options)
36
+ end
37
+
38
+ # Transform the proxy option to a hash. Manticore's support for non-hash
39
+ # proxy options is broken. This was fixed in https://github.com/cheald/manticore/commit/34a00cee57a56148629ed0a47c329181e7319af5
40
+ # but this is not yet released
41
+ def manticore_proxy_hash(proxy_uri)
42
+ [:scheme, :port, :user, :password, :path].reduce(:host => proxy_uri.host) do |acc,opt|
43
+ value = proxy_uri.send(opt)
44
+ acc[opt] = value unless value.nil? || (value.is_a?(String) && value.empty?)
45
+ acc
46
+ end
47
+ end
48
+
49
+ def client
50
+ @manticore
51
+ end
52
+
53
+ # Performs the request by invoking {Transport::Base#perform_request} with a block.
54
+ #
55
+ # @return [Response]
56
+ # @see Transport::Base#perform_request
57
+ #
58
+ def perform_request(url, method, path, params={}, body=nil)
59
+ # Perform 2-level deep merge on the params, so if the passed params and client params will both have hashes stored on a key they
60
+ # will be merged as well, instead of choosing just one of the values
61
+ params = (params || {}).merge(@client_params) { |key, oldval, newval|
62
+ (oldval.is_a?(Hash) && newval.is_a?(Hash)) ? oldval.merge(newval) : newval
63
+ }
64
+ params[:body] = body if body
65
+
66
+ if url.user
67
+ params[:auth] = {
68
+ :user => CGI.unescape(url.user),
69
+ # We have to unescape the password here since manticore won't do it
70
+ # for us unless its part of the URL
71
+ :password => CGI.unescape(url.password),
72
+ :eager => true
73
+ }
74
+ end
75
+
76
+ request_uri = format_url(url, path)
77
+ request_uri_as_string = remove_double_escaping(request_uri.to_s)
78
+ resp = @manticore.send(method.downcase, request_uri_as_string, params)
79
+
80
+ # Manticore returns lazy responses by default
81
+ # We want to block for our usage, this will wait for the repsonse
82
+ # to finish
83
+ resp.call
84
+
85
+ # 404s are excluded because they are valid codes in the case of
86
+ # template installation. We might need a better story around this later
87
+ # but for our current purposes this is correct
88
+ if resp.code < 200 || resp.code > 299 && resp.code != 404
89
+ raise ::LogStash::Outputs::OpenSearch::HttpClient::Pool::BadResponseCodeError.new(resp.code, request_uri, body, resp.body)
90
+ end
91
+
92
+ resp
93
+ end
94
+
95
+ # Returned urls from this method should be checked for double escaping.
96
+ def format_url(url, path_and_query=nil)
97
+ request_uri = url.clone
98
+
99
+ # We excise auth info from the URL in case manticore itself tries to stick
100
+ # sensitive data in a thrown exception or log data
101
+ request_uri.user = nil
102
+ request_uri.password = nil
103
+
104
+ return request_uri.to_s if path_and_query.nil?
105
+
106
+ parsed_path_and_query = java.net.URI.new(path_and_query)
107
+
108
+ query = request_uri.query
109
+ parsed_query = parsed_path_and_query.query
110
+
111
+ new_query_parts = [request_uri.query, parsed_path_and_query.query].select do |part|
112
+ part && !part.empty? # Skip empty nil and ""
113
+ end
114
+
115
+ request_uri.query = new_query_parts.join("&") unless new_query_parts.empty?
116
+
117
+ # use `raw_path`` as `path` will unescape any escaped '/' in the path
118
+ request_uri.path = "#{request_uri.path}/#{parsed_path_and_query.raw_path}".gsub(/\/{2,}/, "/")
119
+ request_uri
120
+ end
121
+
122
+ # Later versions of SafeURI will also escape the '%' sign in an already escaped URI.
123
+ # (If the path variable is used, it constructs a new java.net.URI object using the multi-arg constructor,
124
+ # which will escape any '%' characters in the path, as opposed to the single-arg constructor which requires illegal
125
+ # characters to be already escaped, and will throw otherwise)
126
+ # The URI needs to have been previously escaped, as it does not play nice with an escaped '/' in the
127
+ # middle of a URI, as required by date math, treating it as a path separator
128
+ def remove_double_escaping(url)
129
+ url.gsub(/%25([0-9A-F]{2})/i, '%\1')
130
+ end
131
+
132
+ def close
133
+ @manticore.close
134
+ end
135
+
136
+ def host_unreachable_exceptions
137
+ [::Manticore::Timeout,::Manticore::SocketException, ::Manticore::ClientProtocolException, ::Manticore::ResolutionFailure, Manticore::SocketTimeout]
138
+ end
139
+ end
140
+ end; end; end; end
@@ -0,0 +1,467 @@
1
+ # SPDX-License-Identifier: Apache-2.0
2
+ #
3
+ # The OpenSearch Contributors require contributions made to
4
+ # this file be licensed under the Apache-2.0 license or a
5
+ # compatible open source license.
6
+ #
7
+ # Modifications Copyright OpenSearch Contributors. See
8
+ # GitHub history for details.
9
+
10
+ require "concurrent/atomic/atomic_reference"
11
+ require "logstash/plugin_mixins/opensearch/noop_distribution_checker"
12
+
13
+ module LogStash; module Outputs; class OpenSearch; class HttpClient;
14
+ class Pool
15
+ class NoConnectionAvailableError < Error; end
16
+ class BadResponseCodeError < Error
17
+ attr_reader :url, :response_code, :request_body, :response_body
18
+
19
+ def initialize(response_code, url, request_body, response_body)
20
+ @response_code = response_code
21
+ @url = url
22
+ @request_body = request_body
23
+ @response_body = response_body
24
+ end
25
+
26
+ def message
27
+ "Got response code '#{response_code}' contacting OpenSearch at URL '#{@url}'"
28
+ end
29
+ end
30
+ class HostUnreachableError < Error;
31
+ attr_reader :original_error, :url
32
+
33
+ def initialize(original_error, url)
34
+ @original_error = original_error
35
+ @url = url
36
+ end
37
+
38
+ def message
39
+ "OpenSearch Unreachable: [#{@url}][#{original_error.class}] #{original_error.message}"
40
+ end
41
+ end
42
+
43
+ attr_reader :logger, :adapter, :sniffing, :sniffer_delay, :resurrect_delay, :healthcheck_path, :sniffing_path, :bulk_path
44
+ attr_reader :distribution_checker
45
+
46
+ ROOT_URI_PATH = '/'.freeze
47
+
48
+ DEFAULT_OPTIONS = {
49
+ :healthcheck_path => ROOT_URI_PATH,
50
+ :sniffing_path => "/_nodes/http",
51
+ :bulk_path => "/_bulk",
52
+ :scheme => 'http',
53
+ :resurrect_delay => 5,
54
+ :sniffing => false,
55
+ :sniffer_delay => 10,
56
+ }.freeze
57
+
58
+ def initialize(logger, adapter, initial_urls=[], options={})
59
+ @logger = logger
60
+ @adapter = adapter
61
+ @metric = options[:metric]
62
+ @initial_urls = initial_urls
63
+
64
+ raise ArgumentError, "No URL Normalizer specified!" unless options[:url_normalizer]
65
+ @url_normalizer = options[:url_normalizer]
66
+ DEFAULT_OPTIONS.merge(options).tap do |merged|
67
+ @bulk_path = merged[:bulk_path]
68
+ @sniffing_path = merged[:sniffing_path]
69
+ @healthcheck_path = merged[:healthcheck_path]
70
+ @resurrect_delay = merged[:resurrect_delay]
71
+ @sniffing = merged[:sniffing]
72
+ @sniffer_delay = merged[:sniffer_delay]
73
+ end
74
+
75
+ # Used for all concurrent operations in this class
76
+ @state_mutex = Mutex.new
77
+
78
+ # Holds metadata about all URLs
79
+ @url_info = {}
80
+ @stopping = false
81
+
82
+ @last_version = Concurrent::AtomicReference.new
83
+ @distribution_checker = options[:distribution_checker] || LogStash::PluginMixins::OpenSearch::NoopDistributionChecker::INSTANCE
84
+ end
85
+
86
+ def start
87
+ update_initial_urls
88
+ start_resurrectionist
89
+ start_sniffer if @sniffing
90
+ end
91
+
92
+ def update_initial_urls
93
+ update_urls(@initial_urls)
94
+ end
95
+
96
+ def close
97
+ @state_mutex.synchronize { @stopping = true }
98
+
99
+ logger.debug "Stopping sniffer"
100
+ stop_sniffer
101
+
102
+ logger.debug "Stopping resurrectionist"
103
+ stop_resurrectionist
104
+
105
+ logger.debug "Waiting for in use manticore connections"
106
+ wait_for_in_use_connections
107
+
108
+ logger.debug("Closing adapter #{@adapter}")
109
+ @adapter.close
110
+ end
111
+
112
+ def wait_for_in_use_connections
113
+ until in_use_connections.empty?
114
+ logger.info "Blocked on shutdown to in use connections #{@state_mutex.synchronize {@url_info}}"
115
+ sleep 1
116
+ end
117
+ end
118
+
119
+ def in_use_connections
120
+ @state_mutex.synchronize { @url_info.values.select {|v| v[:in_use] > 0 } }
121
+ end
122
+
123
+ def alive_urls_count
124
+ @state_mutex.synchronize { @url_info.values.select {|v| v[:state] == :alive }.count }
125
+ end
126
+
127
+ def url_info
128
+ @state_mutex.synchronize { @url_info }
129
+ end
130
+
131
+ def urls
132
+ url_info.keys
133
+ end
134
+
135
+ def until_stopped(task_name, delay)
136
+ last_done = Time.now
137
+ until @state_mutex.synchronize { @stopping }
138
+ begin
139
+ now = Time.now
140
+ if (now - last_done) >= delay
141
+ last_done = now
142
+ yield
143
+ end
144
+ sleep 1
145
+ rescue => e
146
+ logger.warn(
147
+ "Error while performing #{task_name}",
148
+ :error_message => e.message,
149
+ :class => e.class.name,
150
+ :backtrace => e.backtrace
151
+ )
152
+ end
153
+ end
154
+ end
155
+
156
+ def start_sniffer
157
+ @sniffer = Thread.new do
158
+ until_stopped("sniffing", sniffer_delay) do
159
+ begin
160
+ sniff!
161
+ rescue NoConnectionAvailableError => e
162
+ @state_mutex.synchronize { # Synchronize around @url_info
163
+ logger.warn("OpenSearch output attempted to sniff for new connections but cannot. No living connections are detected. Pool contains the following current URLs", :url_info => @url_info) }
164
+ end
165
+ end
166
+ end
167
+ end
168
+
169
+ # Sniffs the cluster then updates the internal URLs
170
+ def sniff!
171
+ update_urls(check_sniff)
172
+ end
173
+
174
+ SNIFF_RE_URL = /([^\/]*)?\/?([^:]*):([0-9]+)/
175
+ # Sniffs and returns the results. Does not update internal URLs!
176
+ def check_sniff
177
+ _, url_meta, resp = perform_request(:get, @sniffing_path)
178
+ @metric.increment(:sniff_requests)
179
+ parsed = LogStash::Json.load(resp.body)
180
+ nodes = parsed['nodes']
181
+ if !nodes || nodes.empty?
182
+ @logger.warn("Sniff returned no nodes! Will not update hosts.")
183
+ return nil
184
+ else
185
+ sniff(nodes)
186
+ end
187
+ end
188
+
189
+ def major_version(version_string)
190
+ version_string.split('.').first.to_i
191
+ end
192
+
193
+ def sniff(nodes)
194
+ nodes.map do |id,info|
195
+ # Skip master-only nodes
196
+ next if info["roles"] && info["roles"] == ["master"]
197
+ address_str_to_uri(info["http"]["publish_address"]) if info["http"]
198
+ end.compact
199
+ end
200
+
201
+ def address_str_to_uri(addr_str)
202
+ matches = addr_str.match(SNIFF_RE_URL)
203
+ if matches
204
+ host = matches[1].empty? ? matches[2] : matches[1]
205
+ ::LogStash::Util::SafeURI.new("#{host}:#{matches[3]}")
206
+ end
207
+ end
208
+
209
+ def stop_sniffer
210
+ @sniffer.join if @sniffer
211
+ end
212
+
213
+ def sniffer_alive?
214
+ @sniffer ? @sniffer.alive? : nil
215
+ end
216
+
217
+ def start_resurrectionist
218
+ @resurrectionist = Thread.new do
219
+ until_stopped("resurrection", @resurrect_delay) do
220
+ healthcheck!
221
+ end
222
+ end
223
+ end
224
+
225
+ def health_check_request(url)
226
+ logger.debug("Running health check to see if an OpenSearch connection is working", url: url.sanitized.to_s, path: @healthcheck_path)
227
+ perform_request_to_url(url, :head, @healthcheck_path)
228
+ end
229
+
230
+ def healthcheck!
231
+ # Try to keep locking granularity low such that we don't affect IO...
232
+ @state_mutex.synchronize { @url_info.select {|url,meta| meta[:state] != :alive } }.each do |url,meta|
233
+ begin
234
+ health_check_request(url)
235
+ # If no exception was raised it must have succeeded!
236
+ logger.warn("Restored connection to OpenSearch instance", url: url.sanitized.to_s)
237
+ # We reconnected to this node, check its version
238
+ version = get_version(url)
239
+ @state_mutex.synchronize do
240
+ meta[:version] = version
241
+ set_last_version(version, url)
242
+ alive = @distribution_checker.is_supported?(self, url, @maximum_seen_major_version)
243
+ meta[:state] = alive ? :alive : :dead
244
+ end
245
+ rescue HostUnreachableError, BadResponseCodeError => e
246
+ logger.warn("Attempted to resurrect connection to dead OpenSearch instance, but got an error", url: url.sanitized.to_s, exception: e.class, message: e.message)
247
+ end
248
+ end
249
+ end
250
+
251
+ def stop_resurrectionist
252
+ @resurrectionist.join if @resurrectionist
253
+ end
254
+
255
+ def resurrectionist_alive?
256
+ @resurrectionist ? @resurrectionist.alive? : nil
257
+ end
258
+
259
+ def perform_request(method, path, params={}, body=nil)
260
+ with_connection do |url, url_meta|
261
+ resp = perform_request_to_url(url, method, path, params, body)
262
+ [url, url_meta, resp]
263
+ end
264
+ end
265
+
266
+ [:get, :put, :post, :delete, :patch, :head].each do |method|
267
+ define_method(method) do |path, params={}, body=nil|
268
+ _, _, response = perform_request(method, path, params, body)
269
+ response
270
+ end
271
+ end
272
+
273
+ def perform_request_to_url(url, method, path, params={}, body=nil)
274
+ res = @adapter.perform_request(url, method, path, params, body)
275
+ rescue *@adapter.host_unreachable_exceptions => e
276
+ raise HostUnreachableError.new(e, url), "Could not reach host #{e.class}: #{e.message}"
277
+ end
278
+
279
+ def normalize_url(uri)
280
+ u = @url_normalizer.call(uri)
281
+ if !u.is_a?(::LogStash::Util::SafeURI)
282
+ raise "URL Normalizer returned a '#{u.class}' rather than a SafeURI! This shouldn't happen!"
283
+ end
284
+ u
285
+ end
286
+
287
+ def update_urls(new_urls)
288
+ return if new_urls.nil?
289
+
290
+ # Normalize URLs
291
+ new_urls = new_urls.map(&method(:normalize_url))
292
+
293
+ # Used for logging nicely
294
+ state_changes = {:removed => [], :added => []}
295
+ @state_mutex.synchronize do
296
+ # Add new connections
297
+ new_urls.each do |url|
298
+ # URI objects don't have real hash equality! So, since this isn't perf sensitive we do a linear scan
299
+ unless @url_info.keys.include?(url)
300
+ state_changes[:added] << url
301
+ add_url(url)
302
+ end
303
+ end
304
+
305
+ # Delete connections not in the new list
306
+ @url_info.each do |url,_|
307
+ unless new_urls.include?(url)
308
+ state_changes[:removed] << url
309
+ remove_url(url)
310
+ end
311
+ end
312
+ end
313
+
314
+ if state_changes[:removed].size > 0 || state_changes[:added].size > 0
315
+ logger.info? && logger.info("OpenSearch pool URLs updated", :changes => state_changes)
316
+ end
317
+
318
+ # Run an inline health check anytime URLs are updated
319
+ # This guarantees that during startup / post-startup
320
+ # sniffing we don't have idle periods waiting for the
321
+ # periodic sniffer to allow new hosts to come online
322
+ healthcheck!
323
+ end
324
+
325
+ def size
326
+ @state_mutex.synchronize { @url_info.size }
327
+ end
328
+
329
+ def add_url(url)
330
+ @url_info[url] ||= empty_url_meta
331
+ end
332
+
333
+ def remove_url(url)
334
+ @url_info.delete(url)
335
+ end
336
+
337
+ def empty_url_meta
338
+ {
339
+ :in_use => 0,
340
+ :state => :unknown
341
+ }
342
+ end
343
+
344
+ def with_connection
345
+ url, url_meta = get_connection
346
+
347
+ # Custom error class used here so that users may retry attempts if they receive this error
348
+ # should they choose to
349
+ raise NoConnectionAvailableError, "No Available connections" unless url
350
+ yield url, url_meta
351
+ rescue HostUnreachableError => e
352
+ # Mark the connection as dead here since this is likely not transient
353
+ mark_dead(url, e)
354
+ raise e
355
+ rescue BadResponseCodeError => e
356
+ # These aren't discarded from the pool because these are often very transient
357
+ # errors
358
+ raise e
359
+ ensure
360
+ return_connection(url)
361
+ end
362
+
363
+ def mark_dead(url, error)
364
+ @state_mutex.synchronize do
365
+ meta = @url_info[url]
366
+ # In case a sniff happened removing the metadata just before there's nothing to mark
367
+ # This is an extreme edge case, but it can happen!
368
+ return unless meta
369
+ logger.warn("Marking url as dead. Last error: [#{error.class}] #{error.message}",
370
+ :url => url, :error_message => error.message, :error_class => error.class.name)
371
+ meta[:state] = :dead
372
+ meta[:last_error] = error
373
+ meta[:last_errored_at] = Time.now
374
+ end
375
+ end
376
+
377
+ def url_meta(url)
378
+ @state_mutex.synchronize do
379
+ @url_info[url]
380
+ end
381
+ end
382
+
383
+ def get_connection
384
+ @state_mutex.synchronize do
385
+ # The goal here is to pick a random connection from the least-in-use connections
386
+ # We want some randomness so that we don't hit the same node over and over, but
387
+ # we also want more 'fair' behavior in the event of high concurrency
388
+ eligible_set = nil
389
+ lowest_value_seen = nil
390
+ @url_info.each do |url,meta|
391
+ meta_in_use = meta[:in_use]
392
+ next if meta[:state] == :dead
393
+
394
+ if lowest_value_seen.nil? || meta_in_use < lowest_value_seen
395
+ lowest_value_seen = meta_in_use
396
+ eligible_set = [[url, meta]]
397
+ elsif lowest_value_seen == meta_in_use
398
+ eligible_set << [url, meta]
399
+ end
400
+ end
401
+
402
+ return nil if eligible_set.nil?
403
+
404
+ pick, pick_meta = eligible_set.sample
405
+ pick_meta[:in_use] += 1
406
+
407
+ [pick, pick_meta]
408
+ end
409
+ end
410
+
411
+ def return_connection(url)
412
+ @state_mutex.synchronize do
413
+ info = @url_info[url]
414
+ info[:in_use] -= 1 if info # Guard against the condition where the connection has already been deleted
415
+ end
416
+ end
417
+
418
+ def get_version_map(url)
419
+ request = perform_request_to_url(url, :get, ROOT_URI_PATH)
420
+ LogStash::Json.load(request.body)['version']
421
+ end
422
+
423
+ def get_version(url)
424
+ get_version_map(url)['number'] # e.g. "7.10.0"
425
+ end
426
+
427
+ def get_distribution(url)
428
+ version_map = get_version_map(url)
429
+ version_map.has_key?('distribution') ? version_map['distribution'] : version_map['build_flavor'] # e.g. "opensearch or oss"
430
+ end
431
+
432
+ def last_version
433
+ @last_version.get
434
+ end
435
+
436
+ def maximum_seen_major_version
437
+ @state_mutex.synchronize { @maximum_seen_major_version }
438
+ end
439
+
440
+ private
441
+
442
+ # @private executing within @state_mutex
443
+ def set_last_version(version, url)
444
+ @last_version.set(version)
445
+
446
+ major = major_version(version)
447
+ if @maximum_seen_major_version.nil?
448
+ @logger.info("Cluster version determined (#{version})", version: major)
449
+ set_maximum_seen_major_version(major)
450
+ elsif major > @maximum_seen_major_version
451
+ warn_on_higher_major_version(major, url)
452
+ @maximum_seen_major_version = major
453
+ end
454
+ end
455
+
456
+ def set_maximum_seen_major_version(major)
457
+ @maximum_seen_major_version = major
458
+ end
459
+
460
+ def warn_on_higher_major_version(major, url)
461
+ @logger.warn("Detected a node with a higher major version than previously observed, " +
462
+ "this could be the result of an OpenSearch cluster upgrade",
463
+ previous_major: @maximum_seen_major_version, new_major: major, node_url: url.sanitized.to_s)
464
+ end
465
+
466
+ end
467
+ end; end; end; end;