logstash-output-elasticsearch-test 10.3.0-x86_64-linux

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. checksums.yaml +7 -0
  2. data/CHANGELOG.md +397 -0
  3. data/CONTRIBUTORS +33 -0
  4. data/Gemfile +15 -0
  5. data/LICENSE +13 -0
  6. data/NOTICE.TXT +5 -0
  7. data/README.md +106 -0
  8. data/docs/index.asciidoc +899 -0
  9. data/lib/logstash/outputs/elasticsearch/common.rb +441 -0
  10. data/lib/logstash/outputs/elasticsearch/common_configs.rb +167 -0
  11. data/lib/logstash/outputs/elasticsearch/default-ilm-policy.json +14 -0
  12. data/lib/logstash/outputs/elasticsearch/elasticsearch-template-es2x.json +95 -0
  13. data/lib/logstash/outputs/elasticsearch/elasticsearch-template-es5x.json +46 -0
  14. data/lib/logstash/outputs/elasticsearch/elasticsearch-template-es6x.json +45 -0
  15. data/lib/logstash/outputs/elasticsearch/elasticsearch-template-es7x.json +44 -0
  16. data/lib/logstash/outputs/elasticsearch/elasticsearch-template-es8x.json +44 -0
  17. data/lib/logstash/outputs/elasticsearch/http_client/manticore_adapter.rb +131 -0
  18. data/lib/logstash/outputs/elasticsearch/http_client/pool.rb +495 -0
  19. data/lib/logstash/outputs/elasticsearch/http_client.rb +432 -0
  20. data/lib/logstash/outputs/elasticsearch/http_client_builder.rb +159 -0
  21. data/lib/logstash/outputs/elasticsearch/ilm.rb +113 -0
  22. data/lib/logstash/outputs/elasticsearch/template_manager.rb +61 -0
  23. data/lib/logstash/outputs/elasticsearch.rb +263 -0
  24. data/logstash-output-elasticsearch.gemspec +33 -0
  25. data/spec/es_spec_helper.rb +189 -0
  26. data/spec/fixtures/_nodes/2x_1x.json +27 -0
  27. data/spec/fixtures/_nodes/5x_6x.json +81 -0
  28. data/spec/fixtures/_nodes/7x.json +92 -0
  29. data/spec/fixtures/htpasswd +2 -0
  30. data/spec/fixtures/nginx_reverse_proxy.conf +22 -0
  31. data/spec/fixtures/scripts/groovy/scripted_update.groovy +2 -0
  32. data/spec/fixtures/scripts/groovy/scripted_update_nested.groovy +2 -0
  33. data/spec/fixtures/scripts/groovy/scripted_upsert.groovy +2 -0
  34. data/spec/fixtures/scripts/painless/scripted_update.painless +2 -0
  35. data/spec/fixtures/scripts/painless/scripted_update_nested.painless +1 -0
  36. data/spec/fixtures/scripts/painless/scripted_upsert.painless +1 -0
  37. data/spec/fixtures/template-with-policy-es6x.json +48 -0
  38. data/spec/fixtures/template-with-policy-es7x.json +45 -0
  39. data/spec/fixtures/test_certs/ca/ca.crt +32 -0
  40. data/spec/fixtures/test_certs/ca/ca.key +51 -0
  41. data/spec/fixtures/test_certs/test.crt +36 -0
  42. data/spec/fixtures/test_certs/test.key +51 -0
  43. data/spec/integration/outputs/compressed_indexing_spec.rb +69 -0
  44. data/spec/integration/outputs/create_spec.rb +67 -0
  45. data/spec/integration/outputs/delete_spec.rb +65 -0
  46. data/spec/integration/outputs/groovy_update_spec.rb +150 -0
  47. data/spec/integration/outputs/ilm_spec.rb +531 -0
  48. data/spec/integration/outputs/index_spec.rb +178 -0
  49. data/spec/integration/outputs/index_version_spec.rb +102 -0
  50. data/spec/integration/outputs/ingest_pipeline_spec.rb +74 -0
  51. data/spec/integration/outputs/metrics_spec.rb +70 -0
  52. data/spec/integration/outputs/no_es_on_startup_spec.rb +58 -0
  53. data/spec/integration/outputs/painless_update_spec.rb +189 -0
  54. data/spec/integration/outputs/parent_spec.rb +102 -0
  55. data/spec/integration/outputs/retry_spec.rb +169 -0
  56. data/spec/integration/outputs/routing_spec.rb +61 -0
  57. data/spec/integration/outputs/sniffer_spec.rb +133 -0
  58. data/spec/integration/outputs/templates_5x_spec.rb +98 -0
  59. data/spec/integration/outputs/templates_spec.rb +98 -0
  60. data/spec/integration/outputs/update_spec.rb +116 -0
  61. data/spec/support/elasticsearch/api/actions/delete_ilm_policy.rb +19 -0
  62. data/spec/support/elasticsearch/api/actions/get_alias.rb +18 -0
  63. data/spec/support/elasticsearch/api/actions/get_ilm_policy.rb +18 -0
  64. data/spec/support/elasticsearch/api/actions/put_alias.rb +24 -0
  65. data/spec/support/elasticsearch/api/actions/put_ilm_policy.rb +25 -0
  66. data/spec/unit/http_client_builder_spec.rb +185 -0
  67. data/spec/unit/outputs/elasticsearch/http_client/manticore_adapter_spec.rb +149 -0
  68. data/spec/unit/outputs/elasticsearch/http_client/pool_spec.rb +274 -0
  69. data/spec/unit/outputs/elasticsearch/http_client_spec.rb +250 -0
  70. data/spec/unit/outputs/elasticsearch/template_manager_spec.rb +25 -0
  71. data/spec/unit/outputs/elasticsearch_proxy_spec.rb +72 -0
  72. data/spec/unit/outputs/elasticsearch_spec.rb +675 -0
  73. data/spec/unit/outputs/elasticsearch_ssl_spec.rb +82 -0
  74. data/spec/unit/outputs/error_whitelist_spec.rb +54 -0
  75. metadata +300 -0
@@ -0,0 +1,432 @@
1
+ require "logstash/outputs/elasticsearch"
2
+ require "cabin"
3
+ require "base64"
4
+ require 'logstash/outputs/elasticsearch/http_client/pool'
5
+ require 'logstash/outputs/elasticsearch/http_client/manticore_adapter'
6
+ require 'cgi'
7
+ require 'zlib'
8
+ require 'stringio'
9
+
10
+ module LogStash; module Outputs; class ElasticSearch;
11
+ # This is a constant instead of a config option because
12
+ # there really isn't a good reason to configure it.
13
+ #
14
+ # The criteria used are:
15
+ # 1. We need a number that's less than 100MiB because ES
16
+ # won't accept bulks larger than that.
17
+ # 2. It must be large enough to amortize the connection constant
18
+ # across multiple requests.
19
+ # 3. It must be small enough that even if multiple threads hit this size
20
+ # we won't use a lot of heap.
21
+ #
22
+ # We wound up agreeing that a number greater than 10 MiB and less than 100MiB
23
+ # made sense. We picked one on the lowish side to not use too much heap.
24
+ TARGET_BULK_BYTES = 20 * 1024 * 1024 # 20MiB
25
+
26
+ class HttpClient
27
+ attr_reader :client, :options, :logger, :pool, :action_count, :recv_count
28
+ # This is here in case we use DEFAULT_OPTIONS in the future
29
+ # DEFAULT_OPTIONS = {
30
+ # :setting => value
31
+ # }
32
+
33
+ #
34
+ # The `options` is a hash where the following symbol keys have meaning:
35
+ #
36
+ # * `:hosts` - array of String. Set a list of hosts to use for communication.
37
+ # * `:port` - number. set the port to use to communicate with Elasticsearch
38
+ # * `:user` - String. The user to use for authentication.
39
+ # * `:password` - String. The password to use for authentication.
40
+ # * `:timeout` - Float. A duration value, in seconds, after which a socket
41
+ # operation or request will be aborted if not yet successfull
42
+ # * `:client_settings` - a hash; see below for keys.
43
+ #
44
+ # The `client_settings` key is a has that can contain other settings:
45
+ #
46
+ # * `:ssl` - Boolean. Enable or disable SSL/TLS.
47
+ # * `:proxy` - String. Choose a HTTP HTTProxy to use.
48
+ # * `:path` - String. The leading path for prefixing Elasticsearch
49
+ # * `:headers` - Hash. Pairs of headers and their values
50
+ # requests. This is sometimes used if you are proxying Elasticsearch access
51
+ # through a special http path, such as using mod_rewrite.
52
+ def initialize(options={})
53
+ @logger = options[:logger]
54
+ @metric = options[:metric]
55
+ @bulk_request_metrics = @metric.namespace(:bulk_requests)
56
+ @bulk_response_metrics = @bulk_request_metrics.namespace(:responses)
57
+
58
+ # Again, in case we use DEFAULT_OPTIONS in the future, uncomment this.
59
+ # @options = DEFAULT_OPTIONS.merge(options)
60
+ @options = options
61
+
62
+ @url_template = build_url_template
63
+
64
+ @pool = build_pool(@options)
65
+ # mutex to prevent requests and sniffing to access the
66
+ # connection pool at the same time
67
+ @bulk_path = @options[:bulk_path]
68
+ end
69
+
70
+ def build_url_template
71
+ {
72
+ :scheme => self.scheme,
73
+ :user => self.user,
74
+ :password => self.password,
75
+ :host => "URLTEMPLATE",
76
+ :port => self.port,
77
+ :path => self.path
78
+ }
79
+ end
80
+
81
+ def template_install(name, template, force=false)
82
+ if template_exists?(name) && !force
83
+ @logger.debug("Found existing Elasticsearch template. Skipping template management", :name => name)
84
+ return
85
+ end
86
+ template_put(name, template)
87
+ end
88
+
89
+ def maximum_seen_major_version
90
+ @pool.maximum_seen_major_version
91
+ end
92
+
93
+ def bulk(actions)
94
+ @action_count ||= 0
95
+ @action_count += actions.size
96
+
97
+ return if actions.empty?
98
+
99
+ bulk_actions = actions.collect do |action, args, source|
100
+ args, source = update_action_builder(args, source) if action == 'update'
101
+
102
+ if source && action != 'delete'
103
+ next [ { action => args }, source ]
104
+ else
105
+ next { action => args }
106
+ end
107
+ end
108
+
109
+ body_stream = StringIO.new
110
+ if http_compression
111
+ body_stream.set_encoding "BINARY"
112
+ stream_writer = Zlib::GzipWriter.new(body_stream, Zlib::DEFAULT_COMPRESSION, Zlib::DEFAULT_STRATEGY)
113
+ else
114
+ stream_writer = body_stream
115
+ end
116
+ bulk_responses = []
117
+ bulk_actions.each do |action|
118
+ as_json = action.is_a?(Array) ?
119
+ action.map {|line| LogStash::Json.dump(line)}.join("\n") :
120
+ LogStash::Json.dump(action)
121
+ as_json << "\n"
122
+ if (body_stream.size + as_json.bytesize) > TARGET_BULK_BYTES
123
+ bulk_responses << bulk_send(body_stream) unless body_stream.size == 0
124
+ end
125
+ stream_writer.write(as_json)
126
+ end
127
+ stream_writer.close if http_compression
128
+ bulk_responses << bulk_send(body_stream) if body_stream.size > 0
129
+ body_stream.close if !http_compression
130
+ join_bulk_responses(bulk_responses)
131
+ end
132
+
133
+ def join_bulk_responses(bulk_responses)
134
+ {
135
+ "errors" => bulk_responses.any? {|r| r["errors"] == true},
136
+ "items" => bulk_responses.reduce([]) {|m,r| m.concat(r.fetch("items", []))}
137
+ }
138
+ end
139
+
140
+ def bulk_send(body_stream)
141
+ params = http_compression ? {:headers => {"Content-Encoding" => "gzip"}} : {}
142
+ # Discard the URL
143
+ response = @pool.post(@bulk_path, params, body_stream.string)
144
+ if !body_stream.closed?
145
+ body_stream.truncate(0)
146
+ body_stream.seek(0)
147
+ end
148
+
149
+ @bulk_response_metrics.increment(response.code.to_s)
150
+
151
+ if response.code != 200
152
+ url = ::LogStash::Util::SafeURI.new(response.final_url)
153
+ raise ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError.new(
154
+ response.code, url, body_stream.to_s, response.body
155
+ )
156
+ end
157
+
158
+ LogStash::Json.load(response.body)
159
+ end
160
+
161
+ def get(path)
162
+ response = @pool.get(path, nil)
163
+ LogStash::Json.load(response.body)
164
+ end
165
+
166
+ def post(path, params = {}, body_string)
167
+ response = @pool.post(path, params, body_string)
168
+ LogStash::Json.load(response.body)
169
+ end
170
+
171
+ def close
172
+ @pool.close
173
+ end
174
+
175
+ def calculate_property(uris, property, default, sniff_check)
176
+ values = uris.map(&property).uniq
177
+
178
+ if sniff_check && values.size > 1
179
+ raise LogStash::ConfigurationError, "Cannot have multiple values for #{property} in hosts when sniffing is enabled!"
180
+ end
181
+
182
+ uri_value = values.first
183
+
184
+ default = nil if default.is_a?(String) && default.empty? # Blanks are as good as nil
185
+ uri_value = nil if uri_value.is_a?(String) && uri_value.empty?
186
+
187
+ if default && uri_value && (default != uri_value)
188
+ raise LogStash::ConfigurationError, "Explicit value for '#{property}' was declared, but it is different in one of the URLs given! Please make sure your URLs are inline with explicit values. The URLs have the property set to '#{uri_value}', but it was also set to '#{default}' explicitly"
189
+ end
190
+
191
+ uri_value || default
192
+ end
193
+
194
+ def sniffing
195
+ @options[:sniffing]
196
+ end
197
+
198
+ def user
199
+ calculate_property(uris, :user, @options[:user], sniffing)
200
+ end
201
+
202
+ def password
203
+ calculate_property(uris, :password, @options[:password], sniffing)
204
+ end
205
+
206
+ def path
207
+ calculated = calculate_property(uris, :path, client_settings[:path], sniffing)
208
+ calculated = "/#{calculated}" if calculated && !calculated.start_with?("/")
209
+ calculated
210
+ end
211
+
212
+ def scheme
213
+ explicit_scheme = if ssl_options && ssl_options.has_key?(:enabled)
214
+ ssl_options[:enabled] ? 'https' : 'http'
215
+ else
216
+ nil
217
+ end
218
+
219
+ calculated_scheme = calculate_property(uris, :scheme, explicit_scheme, sniffing)
220
+
221
+ if calculated_scheme && calculated_scheme !~ /https?/
222
+ raise LogStash::ConfigurationError, "Bad scheme '#{calculated_scheme}' found should be one of http/https"
223
+ end
224
+
225
+ if calculated_scheme && explicit_scheme && calculated_scheme != explicit_scheme
226
+ raise LogStash::ConfigurationError, "SSL option was explicitly set to #{ssl_options[:enabled]} but a URL was also declared with a scheme of '#{explicit_scheme}'. Please reconcile this"
227
+ end
228
+
229
+ calculated_scheme # May be nil if explicit_scheme is nil!
230
+ end
231
+
232
+ def port
233
+ # We don't set the 'default' here because the default is what the user
234
+ # indicated, so we use an || outside of calculate_property. This lets people
235
+ # Enter things like foo:123, bar and wind up with foo:123, bar:9200
236
+ calculate_property(uris, :port, nil, sniffing) || 9200
237
+ end
238
+
239
+ def uris
240
+ @options[:hosts]
241
+ end
242
+
243
+ def client_settings
244
+ @options[:client_settings] || {}
245
+ end
246
+
247
+ def ssl_options
248
+ client_settings.fetch(:ssl, {})
249
+ end
250
+
251
+ def http_compression
252
+ client_settings.fetch(:http_compression, false)
253
+ end
254
+
255
+ def build_adapter(options)
256
+ timeout = options[:timeout] || 0
257
+
258
+ adapter_options = {
259
+ :socket_timeout => timeout,
260
+ :request_timeout => timeout,
261
+ }
262
+
263
+ adapter_options[:proxy] = client_settings[:proxy] if client_settings[:proxy]
264
+
265
+ adapter_options[:check_connection_timeout] = client_settings[:check_connection_timeout] if client_settings[:check_connection_timeout]
266
+
267
+ # Having this explicitly set to nil is an error
268
+ if client_settings[:pool_max]
269
+ adapter_options[:pool_max] = client_settings[:pool_max]
270
+ end
271
+
272
+ # Having this explicitly set to nil is an error
273
+ if client_settings[:pool_max_per_route]
274
+ adapter_options[:pool_max_per_route] = client_settings[:pool_max_per_route]
275
+ end
276
+
277
+ adapter_options[:ssl] = ssl_options if self.scheme == 'https'
278
+
279
+ adapter_options[:headers] = client_settings[:headers] if client_settings[:headers]
280
+
281
+ adapter_class = ::LogStash::Outputs::ElasticSearch::HttpClient::ManticoreAdapter
282
+ adapter = adapter_class.new(@logger, adapter_options)
283
+ end
284
+
285
+ def build_pool(options)
286
+ adapter = build_adapter(options)
287
+
288
+ pool_options = {
289
+ :sniffing => sniffing,
290
+ :sniffer_delay => options[:sniffer_delay],
291
+ :sniffing_path => options[:sniffing_path],
292
+ :healthcheck_path => options[:healthcheck_path],
293
+ :resurrect_delay => options[:resurrect_delay],
294
+ :url_normalizer => self.method(:host_to_url),
295
+ :metric => options[:metric]
296
+ }
297
+ pool_options[:scheme] = self.scheme if self.scheme
298
+
299
+ pool_class = ::LogStash::Outputs::ElasticSearch::HttpClient::Pool
300
+ full_urls = @options[:hosts].map {|h| host_to_url(h) }
301
+ pool = pool_class.new(@logger, adapter, full_urls, pool_options)
302
+ pool.start
303
+ pool
304
+ end
305
+
306
+ def host_to_url(h)
307
+ # Never override the calculated scheme
308
+ raw_scheme = @url_template[:scheme] || 'http'
309
+
310
+ raw_user = h.user || @url_template[:user]
311
+ raw_password = h.password || @url_template[:password]
312
+ postfixed_userinfo = raw_user && raw_password ? "#{raw_user}:#{raw_password}@" : nil
313
+
314
+ raw_host = h.host # Always replace this!
315
+ raw_port = h.port || @url_template[:port]
316
+
317
+ raw_path = !h.path.nil? && !h.path.empty? && h.path != "/" ? h.path : @url_template[:path]
318
+ prefixed_raw_path = raw_path && !raw_path.empty? ? raw_path : "/"
319
+
320
+ parameters = client_settings[:parameters]
321
+ raw_query = if parameters && !parameters.empty?
322
+ combined = h.query ?
323
+ Hash[URI::decode_www_form(h.query)].merge(parameters) :
324
+ parameters
325
+ query_str = combined.flat_map {|k,v|
326
+ values = Array(v)
327
+ values.map {|av| "#{k}=#{av}"}
328
+ }.join("&")
329
+ query_str
330
+ else
331
+ h.query
332
+ end
333
+ prefixed_raw_query = raw_query && !raw_query.empty? ? "?#{raw_query}" : nil
334
+
335
+ raw_url = "#{raw_scheme}://#{postfixed_userinfo}#{raw_host}:#{raw_port}#{prefixed_raw_path}#{prefixed_raw_query}"
336
+
337
+ ::LogStash::Util::SafeURI.new(raw_url)
338
+ end
339
+
340
+ def exists?(path, use_get=false)
341
+ response = use_get ? @pool.get(path) : @pool.head(path)
342
+ response.code >= 200 && response.code <= 299
343
+ end
344
+
345
+ def template_exists?(name)
346
+ exists?("/_template/#{name}")
347
+ end
348
+
349
+ def template_put(name, template)
350
+ path = "_template/#{name}"
351
+ logger.info("Installing elasticsearch template to #{path}")
352
+ @pool.put(path, nil, LogStash::Json.dump(template))
353
+ end
354
+
355
+ # ILM methods
356
+
357
+ # check whether rollover alias already exists
358
+ def rollover_alias_exists?(name)
359
+ exists?(name)
360
+ end
361
+
362
+ # Create a new rollover alias
363
+ def rollover_alias_put(alias_name, alias_definition)
364
+ logger.info("Creating rollover alias #{alias_name}")
365
+ begin
366
+ @pool.put(CGI::escape(alias_name), nil, LogStash::Json.dump(alias_definition))
367
+ # If the rollover alias already exists, ignore the error that comes back from Elasticsearch
368
+ rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e
369
+ if e.response_code == 400
370
+ logger.info("Rollover Alias #{alias_name} already exists. Skipping")
371
+ return
372
+ end
373
+ raise e
374
+ end
375
+ end
376
+
377
+ def get_xpack_info
378
+ get("/_xpack")
379
+ end
380
+
381
+ def get_ilm_endpoint
382
+ @pool.get("/_ilm/policy")
383
+ end
384
+
385
+ def ilm_policy_exists?(name)
386
+ exists?("/_ilm/policy/#{name}", true)
387
+ end
388
+
389
+ def ilm_policy_put(name, policy)
390
+ path = "_ilm/policy/#{name}"
391
+ logger.info("Installing ILM policy #{policy} to #{path}")
392
+ @pool.put(path, nil, LogStash::Json.dump(policy))
393
+ end
394
+
395
+
396
+ # Build a bulk item for an elasticsearch update action
397
+ def update_action_builder(args, source)
398
+ if args[:_script]
399
+ # Use the event as a hash from your script with variable name defined
400
+ # by script_var_name (default: "event")
401
+ # Ex: event["@timestamp"]
402
+ source_orig = source
403
+ source = { 'script' => {'params' => { @options[:script_var_name] => source_orig }} }
404
+ if @options[:scripted_upsert]
405
+ source['scripted_upsert'] = true
406
+ source['upsert'] = {}
407
+ elsif @options[:doc_as_upsert]
408
+ source['upsert'] = source_orig
409
+ else
410
+ source['upsert'] = args.delete(:_upsert) if args[:_upsert]
411
+ end
412
+ case @options[:script_type]
413
+ when 'indexed'
414
+ source['script']['id'] = args.delete(:_script)
415
+ when 'file'
416
+ source['script']['file'] = args.delete(:_script)
417
+ when 'inline'
418
+ source['script']['inline'] = args.delete(:_script)
419
+ end
420
+ source['script']['lang'] = @options[:script_lang] if @options[:script_lang] != ''
421
+ else
422
+ source = { 'doc' => source }
423
+ if @options[:doc_as_upsert]
424
+ source['doc_as_upsert'] = true
425
+ else
426
+ source['upsert'] = args.delete(:_upsert) if args[:_upsert]
427
+ end
428
+ end
429
+ [args, source]
430
+ end
431
+ end
432
+ end end end
@@ -0,0 +1,159 @@
1
+ require 'cgi'
2
+
3
+ module LogStash; module Outputs; class ElasticSearch;
4
+ module HttpClientBuilder
5
+ def self.build(logger, hosts, params)
6
+ client_settings = {
7
+ :pool_max => params["pool_max"],
8
+ :pool_max_per_route => params["pool_max_per_route"],
9
+ :check_connection_timeout => params["validate_after_inactivity"],
10
+ :http_compression => params["http_compression"],
11
+ :headers => params["custom_headers"]
12
+ }
13
+
14
+ client_settings[:proxy] = params["proxy"] if params["proxy"]
15
+
16
+ common_options = {
17
+ :client_settings => client_settings,
18
+ :metric => params["metric"],
19
+ :resurrect_delay => params["resurrect_delay"]
20
+ }
21
+
22
+ if params["sniffing"]
23
+ common_options[:sniffing] = true
24
+ common_options[:sniffer_delay] = params["sniffing_delay"]
25
+ end
26
+
27
+ common_options[:timeout] = params["timeout"] if params["timeout"]
28
+
29
+ if params["path"]
30
+ client_settings[:path] = dedup_slashes("/#{params["path"]}/")
31
+ end
32
+
33
+ common_options[:bulk_path] = if params["bulk_path"]
34
+ dedup_slashes("/#{params["bulk_path"]}")
35
+ else
36
+ dedup_slashes("/#{params["path"]}/_bulk")
37
+ end
38
+
39
+ common_options[:sniffing_path] = if params["sniffing_path"]
40
+ dedup_slashes("/#{params["sniffing_path"]}")
41
+ else
42
+ dedup_slashes("/#{params["path"]}/_nodes/http")
43
+ end
44
+
45
+ common_options[:healthcheck_path] = if params["healthcheck_path"]
46
+ dedup_slashes("/#{params["healthcheck_path"]}")
47
+ else
48
+ dedup_slashes("/#{params["path"]}")
49
+ end
50
+
51
+ if params["parameters"]
52
+ client_settings[:parameters] = params["parameters"]
53
+ end
54
+
55
+ logger.debug? && logger.debug("Normalizing http path", :path => params["path"], :normalized => client_settings[:path])
56
+
57
+ client_settings.merge! setup_ssl(logger, params)
58
+ common_options.merge! setup_basic_auth(logger, params)
59
+
60
+ external_version_types = ["external", "external_gt", "external_gte"]
61
+ # External Version validation
62
+ raise(
63
+ LogStash::ConfigurationError,
64
+ "External versioning requires the presence of a version number."
65
+ ) if external_version_types.include?(params.fetch('version_type', '')) and params.fetch("version", nil) == nil
66
+
67
+
68
+ # Create API setup
69
+ raise(
70
+ LogStash::ConfigurationError,
71
+ "External versioning is not supported by the create action."
72
+ ) if params['action'] == 'create' and external_version_types.include?(params.fetch('version_type', ''))
73
+
74
+ # Update API setup
75
+ raise( LogStash::ConfigurationError,
76
+ "doc_as_upsert and scripted_upsert are mutually exclusive."
77
+ ) if params["doc_as_upsert"] and params["scripted_upsert"]
78
+
79
+ raise(
80
+ LogStash::ConfigurationError,
81
+ "Specifying action => 'update' needs a document_id."
82
+ ) if params['action'] == 'update' and params.fetch('document_id', '') == ''
83
+
84
+ raise(
85
+ LogStash::ConfigurationError,
86
+ "External versioning is not supported by the update action. See https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-update.html."
87
+ ) if params['action'] == 'update' and external_version_types.include?(params.fetch('version_type', ''))
88
+
89
+ # Update API setup
90
+ update_options = {
91
+ :doc_as_upsert => params["doc_as_upsert"],
92
+ :script_var_name => params["script_var_name"],
93
+ :script_type => params["script_type"],
94
+ :script_lang => params["script_lang"],
95
+ :scripted_upsert => params["scripted_upsert"]
96
+ }
97
+ common_options.merge! update_options if params["action"] == 'update'
98
+
99
+ create_http_client(common_options.merge(:hosts => hosts, :logger => logger))
100
+ end
101
+
102
+ def self.create_http_client(options)
103
+ LogStash::Outputs::ElasticSearch::HttpClient.new(options)
104
+ end
105
+
106
+ def self.setup_ssl(logger, params)
107
+ params["ssl"] = true if params["hosts"].any? {|h| h.scheme == "https" }
108
+ return {} if params["ssl"].nil?
109
+
110
+ return {:ssl => {:enabled => false}} if params["ssl"] == false
111
+
112
+ cacert, truststore, truststore_password, keystore, keystore_password =
113
+ params.values_at('cacert', 'truststore', 'truststore_password', 'keystore', 'keystore_password')
114
+
115
+ if cacert && truststore
116
+ raise(LogStash::ConfigurationError, "Use either \"cacert\" or \"truststore\" when configuring the CA certificate") if truststore
117
+ end
118
+
119
+ ssl_options = {:enabled => true}
120
+
121
+ if cacert
122
+ ssl_options[:ca_file] = cacert
123
+ elsif truststore
124
+ ssl_options[:truststore_password] = truststore_password.value if truststore_password
125
+ end
126
+
127
+ ssl_options[:truststore] = truststore if truststore
128
+ if keystore
129
+ ssl_options[:keystore] = keystore
130
+ ssl_options[:keystore_password] = keystore_password.value if keystore_password
131
+ end
132
+ if !params["ssl_certificate_verification"]
133
+ logger.warn [
134
+ "** WARNING ** Detected UNSAFE options in elasticsearch output configuration!",
135
+ "** WARNING ** You have enabled encryption but DISABLED certificate verification.",
136
+ "** WARNING ** To make sure your data is secure change :ssl_certificate_verification to true"
137
+ ].join("\n")
138
+ ssl_options[:verify] = false
139
+ end
140
+ { ssl: ssl_options }
141
+ end
142
+
143
+ def self.setup_basic_auth(logger, params)
144
+ user, password = params["user"], params["password"]
145
+
146
+ return {} unless user && password && password.value
147
+
148
+ {
149
+ :user => CGI.escape(user),
150
+ :password => CGI.escape(password.value)
151
+ }
152
+ end
153
+
154
+ private
155
+ def self.dedup_slashes(url)
156
+ url.gsub(/\/+/, "/")
157
+ end
158
+ end
159
+ end; end; end
@@ -0,0 +1,113 @@
1
+ module LogStash; module Outputs; class ElasticSearch
2
+ module Ilm
3
+
4
+ ILM_POLICY_PATH = "default-ilm-policy.json"
5
+
6
+ def setup_ilm
7
+ return unless ilm_in_use?
8
+ logger.warn("Overwriting supplied index #{@index} with rollover alias #{@ilm_rollover_alias}") unless default_index?(@index)
9
+ @index = @ilm_rollover_alias
10
+ maybe_create_rollover_alias
11
+ maybe_create_ilm_policy
12
+ end
13
+
14
+ def default_rollover_alias?(rollover_alias)
15
+ rollover_alias == LogStash::Outputs::ElasticSearch::DEFAULT_ROLLOVER_ALIAS
16
+ end
17
+
18
+ def ilm_alias_set?
19
+ default_index?(@index) || !default_rollover_alias?(@ilm_rollover_alias)
20
+ end
21
+
22
+ def ilm_in_use?
23
+ return @ilm_actually_enabled if defined?(@ilm_actually_enabled)
24
+ @ilm_actually_enabled =
25
+ begin
26
+ if @ilm_enabled == 'auto'
27
+ if ilm_on_by_default?
28
+ ilm_ready, error = ilm_ready?
29
+ if !ilm_ready
30
+ @logger.info("Index Lifecycle Management is set to 'auto', but will be disabled - #{error}")
31
+ false
32
+ else
33
+ ilm_alias_set?
34
+ end
35
+ else
36
+ @logger.info("Index Lifecycle Management is set to 'auto', but will be disabled - Your Elasticsearch cluster is before 7.0.0, which is the minimum version required to automatically run Index Lifecycle Management")
37
+ false
38
+ end
39
+ elsif @ilm_enabled.to_s == 'true'
40
+ ilm_ready, error = ilm_ready?
41
+ raise LogStash::ConfigurationError,"Index Lifecycle Management is set to enabled in Logstash, but cannot be used - #{error}" unless ilm_ready
42
+ ilm_alias_set?
43
+ else
44
+ false
45
+ end
46
+ end
47
+ end
48
+
49
+ def ilm_on_by_default?
50
+ maximum_seen_major_version >= 7
51
+ end
52
+
53
+ def ilm_ready?
54
+ # Check the Elasticsearch instance for ILM readiness - this means that the version has to be a non-OSS release, with ILM feature
55
+ # available and enabled.
56
+ begin
57
+ xpack = client.get_xpack_info
58
+ features = xpack.nil? || xpack.empty? ? nil : xpack["features"]
59
+ ilm = features.nil? ? nil : features["ilm"]
60
+ return false, "Index Lifecycle management is not installed on your Elasticsearch cluster" if features.nil? || ilm.nil?
61
+ return false, "Index Lifecycle management is not available in your Elasticsearch cluster" unless ilm['available']
62
+ return false, "Index Lifecycle management is not enabled in your Elasticsearch cluster" unless ilm['enabled']
63
+ return true, nil
64
+ rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e
65
+ # Check xpack endpoint: If no xpack endpoint, then this version of Elasticsearch is not compatible
66
+ if e.response_code == 404
67
+ return false, "Index Lifecycle management is not installed on your Elasticsearch cluster"
68
+ elsif e.response_code == 400
69
+ return false, "Index Lifecycle management is not installed on your Elasticsearch cluster"
70
+ else
71
+ raise e
72
+ end
73
+ end
74
+ end
75
+
76
+ private
77
+
78
+ def ilm_policy_default?
79
+ ilm_policy == LogStash::Outputs::ElasticSearch::DEFAULT_POLICY
80
+ end
81
+
82
+ def maybe_create_ilm_policy
83
+ if ilm_policy_default?
84
+ client.ilm_policy_put(ilm_policy, policy_payload) unless client.ilm_policy_exists?(ilm_policy)
85
+ else
86
+ raise LogStash::ConfigurationError, "The specified ILM policy #{ilm_policy} does not exist on your Elasticsearch instance" unless client.ilm_policy_exists?(ilm_policy)
87
+ end
88
+ end
89
+
90
+ def maybe_create_rollover_alias
91
+ client.rollover_alias_put(rollover_alias_target, rollover_alias_payload) unless client.rollover_alias_exists?(ilm_rollover_alias)
92
+ end
93
+
94
+ def rollover_alias_target
95
+ "<#{ilm_rollover_alias}-#{ilm_pattern}>"
96
+ end
97
+
98
+ def rollover_alias_payload
99
+ {
100
+ 'aliases' => {
101
+ ilm_rollover_alias =>{
102
+ 'is_write_index' => true
103
+ }
104
+ }
105
+ }
106
+ end
107
+
108
+ def policy_payload
109
+ policy_path = ::File.expand_path(ILM_POLICY_PATH, ::File.dirname(__FILE__))
110
+ LogStash::Json.load(::IO.read(policy_path))
111
+ end
112
+ end
113
+ end end end