fluent-plugin-elasticsearch-dext 5.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. checksums.yaml +7 -0
  2. data/.coveralls.yml +2 -0
  3. data/.editorconfig +9 -0
  4. data/.github/ISSUE_TEMPLATE/bug_report.md +37 -0
  5. data/.github/ISSUE_TEMPLATE/feature_request.md +24 -0
  6. data/.github/workflows/issue-auto-closer.yml +12 -0
  7. data/.github/workflows/linux.yml +26 -0
  8. data/.github/workflows/macos.yml +26 -0
  9. data/.github/workflows/windows.yml +26 -0
  10. data/.gitignore +18 -0
  11. data/.travis.yml +40 -0
  12. data/CONTRIBUTING.md +24 -0
  13. data/Gemfile +11 -0
  14. data/History.md +553 -0
  15. data/ISSUE_TEMPLATE.md +30 -0
  16. data/LICENSE.txt +201 -0
  17. data/PULL_REQUEST_TEMPLATE.md +10 -0
  18. data/README.ElasticsearchGenID.md +116 -0
  19. data/README.ElasticsearchInput.md +293 -0
  20. data/README.Troubleshooting.md +601 -0
  21. data/README.md +1467 -0
  22. data/Rakefile +11 -0
  23. data/appveyor.yml +20 -0
  24. data/fluent-plugin-elasticsearch.gemspec +35 -0
  25. data/gemfiles/Gemfile.elasticsearch.v6 +12 -0
  26. data/lib/fluent/log-ext.rb +38 -0
  27. data/lib/fluent/plugin/default-ilm-policy.json +14 -0
  28. data/lib/fluent/plugin/elasticsearch_constants.rb +13 -0
  29. data/lib/fluent/plugin/elasticsearch_error.rb +5 -0
  30. data/lib/fluent/plugin/elasticsearch_error_handler.rb +129 -0
  31. data/lib/fluent/plugin/elasticsearch_fallback_selector.rb +9 -0
  32. data/lib/fluent/plugin/elasticsearch_index_lifecycle_management.rb +67 -0
  33. data/lib/fluent/plugin/elasticsearch_index_template.rb +211 -0
  34. data/lib/fluent/plugin/elasticsearch_simple_sniffer.rb +10 -0
  35. data/lib/fluent/plugin/elasticsearch_tls.rb +70 -0
  36. data/lib/fluent/plugin/filter_elasticsearch_genid.rb +77 -0
  37. data/lib/fluent/plugin/in_elasticsearch.rb +325 -0
  38. data/lib/fluent/plugin/oj_serializer.rb +22 -0
  39. data/lib/fluent/plugin/out_elasticsearch.rb +1108 -0
  40. data/lib/fluent/plugin/out_elasticsearch_data_stream.rb +218 -0
  41. data/lib/fluent/plugin/out_elasticsearch_dynamic.rb +282 -0
  42. data/test/helper.rb +24 -0
  43. data/test/plugin/test_alias_template.json +9 -0
  44. data/test/plugin/test_elasticsearch_error_handler.rb +646 -0
  45. data/test/plugin/test_elasticsearch_fallback_selector.rb +74 -0
  46. data/test/plugin/test_elasticsearch_index_lifecycle_management.rb +66 -0
  47. data/test/plugin/test_elasticsearch_tls.rb +145 -0
  48. data/test/plugin/test_filter_elasticsearch_genid.rb +215 -0
  49. data/test/plugin/test_in_elasticsearch.rb +459 -0
  50. data/test/plugin/test_index_alias_template.json +11 -0
  51. data/test/plugin/test_index_template.json +25 -0
  52. data/test/plugin/test_oj_serializer.rb +19 -0
  53. data/test/plugin/test_out_elasticsearch.rb +5688 -0
  54. data/test/plugin/test_out_elasticsearch_data_stream.rb +337 -0
  55. data/test/plugin/test_out_elasticsearch_dynamic.rb +1134 -0
  56. data/test/plugin/test_template.json +23 -0
  57. data/test/test_log-ext.rb +35 -0
  58. metadata +236 -0
@@ -0,0 +1,22 @@
1
+ require 'oj'
2
+
3
+ module Fluent::Plugin
4
+ module Serializer
5
+
6
+ class Oj
7
+ include Elasticsearch::Transport::Transport::Serializer::Base
8
+
9
+ # De-serialize a Hash from JSON string
10
+ #
11
+ def load(string, options={})
12
+ ::Oj.load(string, options)
13
+ end
14
+
15
+ # Serialize a Hash to JSON string
16
+ #
17
+ def dump(object, options={})
18
+ ::Oj.dump(object, options)
19
+ end
20
+ end
21
+ end
22
+ end
@@ -0,0 +1,1108 @@
1
+ # encoding: UTF-8
2
+ require 'date'
3
+ require 'excon'
4
+ require 'elasticsearch'
5
+ begin
6
+ require 'elasticsearch/xpack'
7
+ rescue LoadError
8
+ end
9
+ require 'json'
10
+ require 'uri'
11
+ require 'base64'
12
+ begin
13
+ require 'strptime'
14
+ rescue LoadError
15
+ end
16
+
17
+ require 'fluent/plugin/output'
18
+ require 'fluent/event'
19
+ require 'fluent/error'
20
+ require 'fluent/time'
21
+ require 'fluent/unique_id'
22
+ require 'fluent/log-ext'
23
+ require 'zlib'
24
+ require_relative 'elasticsearch_constants'
25
+ require_relative 'elasticsearch_error'
26
+ require_relative 'elasticsearch_error_handler'
27
+ require_relative 'elasticsearch_index_template'
28
+ require_relative 'elasticsearch_index_lifecycle_management'
29
+ require_relative 'elasticsearch_tls'
30
+ require_relative 'elasticsearch_fallback_selector'
31
+ begin
32
+ require_relative 'oj_serializer'
33
+ rescue LoadError
34
+ end
35
+
36
+ module Fluent::Plugin
37
+ class ElasticsearchOutput < Output
38
+ class RecoverableRequestFailure < StandardError; end
39
+ class UnrecoverableRequestFailure < Fluent::UnrecoverableError; end
40
+ class RetryStreamEmitFailure < StandardError; end
41
+
42
+ # MissingIdFieldError is raised for records that do not
43
+ # include the field for the unique record identifier
44
+ class MissingIdFieldError < StandardError; end
45
+
46
+ # RetryStreamError privides a stream to be
47
+ # put back in the pipeline for cases where a bulk request
48
+ # failed (e.g some records succeed while others failed)
49
+ class RetryStreamError < StandardError
50
+ attr_reader :retry_stream
51
+ def initialize(retry_stream)
52
+ @retry_stream = retry_stream
53
+ end
54
+ end
55
+
56
+ RequestInfo = Struct.new(:host, :index, :ilm_index, :ilm_alias)
57
+
58
+ attr_reader :alias_indexes
59
+ attr_reader :template_names
60
+ attr_reader :ssl_version_options
61
+ attr_reader :compressable_connection
62
+ attr_reader :api_key_header
63
+
64
+ helpers :event_emitter, :compat_parameters, :record_accessor, :timer
65
+
66
+ Fluent::Plugin.register_output('elasticsearch', self)
67
+
68
+ DEFAULT_BUFFER_TYPE = "memory"
69
+ DEFAULT_ELASTICSEARCH_VERSION = 5 # For compatibility.
70
+ DEFAULT_TYPE_NAME_ES_7x = "_doc".freeze
71
+ DEFAULT_TYPE_NAME = "fluentd".freeze
72
+ DEFAULT_RELOAD_AFTER = -1
73
+ TARGET_BULK_BYTES = 20 * 1024 * 1024
74
+ DEFAULT_POLICY_ID = "logstash-policy"
75
+
76
+ config_param :host, :string, :default => 'localhost'
77
+ config_param :port, :integer, :default => 9200
78
+ config_param :user, :string, :default => nil
79
+ config_param :password, :string, :default => nil, :secret => true
80
+ config_param :cloud_id, :string, :default => nil
81
+ config_param :cloud_auth, :string, :default => nil
82
+ config_param :path, :string, :default => nil
83
+ config_param :scheme, :enum, :list => [:https, :http], :default => :http
84
+ config_param :hosts, :string, :default => nil
85
+ config_param :target_index_key, :string, :default => nil
86
+ config_param :target_type_key, :string, :default => nil,
87
+ :deprecated => <<EOC
88
+ Elasticsearch 7.x or above will ignore this config. Please use fixed type_name instead.
89
+ EOC
90
+ config_param :time_key_format, :string, :default => nil
91
+ config_param :time_precision, :integer, :default => 9
92
+ config_param :include_timestamp, :bool, :default => false
93
+ config_param :logstash_format, :bool, :default => false
94
+ config_param :logstash_prefix, :string, :default => "logstash"
95
+ config_param :logstash_prefix_separator, :string, :default => '-'
96
+ config_param :logstash_dateformat, :string, :default => "%Y.%m.%d"
97
+ config_param :utc_index, :bool, :default => true
98
+ config_param :type_name, :string, :default => DEFAULT_TYPE_NAME
99
+ config_param :suppress_type_name, :bool, :default => false
100
+ config_param :index_name, :string, :default => "fluentd"
101
+ config_param :id_key, :string, :default => nil
102
+ config_param :write_operation, :string, :default => "index"
103
+ config_param :parent_key, :string, :default => nil
104
+ config_param :routing_key, :string, :default => nil
105
+ config_param :request_timeout, :time, :default => 5
106
+ config_param :reload_connections, :bool, :default => true
107
+ config_param :reload_on_failure, :bool, :default => false
108
+ config_param :retry_tag, :string, :default=>nil
109
+ config_param :resurrect_after, :time, :default => 60
110
+ config_param :time_key, :string, :default => nil
111
+ config_param :time_key_exclude_timestamp, :bool, :default => false
112
+ config_param :ssl_verify , :bool, :default => true
113
+ config_param :client_key, :string, :default => nil
114
+ config_param :client_cert, :string, :default => nil
115
+ config_param :client_key_pass, :string, :default => nil, :secret => true
116
+ config_param :ca_file, :string, :default => nil
117
+ config_param :remove_keys, :string, :default => nil
118
+ config_param :remove_keys_on_update, :string, :default => ""
119
+ config_param :remove_keys_on_update_key, :string, :default => nil
120
+ config_param :flatten_hashes, :bool, :default => false
121
+ config_param :flatten_hashes_separator, :string, :default => "_"
122
+ config_param :template_name, :string, :default => nil
123
+ config_param :template_file, :string, :default => nil
124
+ config_param :template_overwrite, :bool, :default => false
125
+ config_param :customize_template, :hash, :default => nil
126
+ config_param :rollover_index, :string, :default => false
127
+ config_param :index_date_pattern, :string, :default => "now/d"
128
+ config_param :index_separator, :string, :default => "-"
129
+ config_param :deflector_alias, :string, :default => nil
130
+ config_param :index_prefix, :string, :default => "logstash",
131
+ obsoleted: "This parameter shouldn't be used in 4.0.0 or later. Specify ILM target index with using `index_name' w/o `logstash_format' or 'logstash_prefix' w/ `logstash_format' instead."
132
+ config_param :application_name, :string, :default => "default"
133
+ config_param :templates, :hash, :default => nil
134
+ config_param :max_retry_putting_template, :integer, :default => 10
135
+ config_param :fail_on_putting_template_retry_exceed, :bool, :default => true
136
+ config_param :fail_on_detecting_es_version_retry_exceed, :bool, :default => true
137
+ config_param :max_retry_get_es_version, :integer, :default => 15
138
+ config_param :include_tag_key, :bool, :default => false
139
+ config_param :tag_key, :string, :default => 'tag'
140
+ config_param :time_parse_error_tag, :string, :default => 'Fluent::ElasticsearchOutput::TimeParser.error'
141
+ config_param :reconnect_on_error, :bool, :default => false
142
+ config_param :pipeline, :string, :default => nil
143
+ config_param :with_transporter_log, :bool, :default => false
144
+ config_param :emit_error_for_missing_id, :bool, :default => false
145
+ config_param :sniffer_class_name, :string, :default => nil
146
+ config_param :selector_class_name, :string, :default => nil
147
+ config_param :reload_after, :integer, :default => DEFAULT_RELOAD_AFTER
148
+ config_param :content_type, :enum, list: [:"application/json", :"application/x-ndjson"], :default => :"application/json",
149
+ :deprecated => <<EOC
150
+ elasticsearch gem v6.0.2 starts to use correct Content-Type. Please upgrade elasticserach gem and stop to use this option.
151
+ see: https://github.com/elastic/elasticsearch-ruby/pull/514
152
+ EOC
153
+ config_param :include_index_in_url, :bool, :default => false
154
+ config_param :http_backend, :enum, list: [:excon, :typhoeus], :default => :excon
155
+ config_param :http_backend_excon_nonblock, :bool, :default => true
156
+ config_param :validate_client_version, :bool, :default => false
157
+ config_param :prefer_oj_serializer, :bool, :default => false
158
+ config_param :unrecoverable_error_types, :array, :default => ["out_of_memory_error", "es_rejected_execution_exception"]
159
+ config_param :verify_es_version_at_startup, :bool, :default => true
160
+ config_param :default_elasticsearch_version, :integer, :default => DEFAULT_ELASTICSEARCH_VERSION
161
+ config_param :log_es_400_reason, :bool, :default => false
162
+ config_param :custom_headers, :hash, :default => {}
163
+ config_param :api_key, :string, :default => nil, :secret => true
164
+ config_param :suppress_doc_wrap, :bool, :default => false
165
+ config_param :ignore_exceptions, :array, :default => [], value_type: :string, :desc => "Ignorable exception list"
166
+ config_param :exception_backup, :bool, :default => true, :desc => "Chunk backup flag when ignore exception occured"
167
+ config_param :bulk_message_request_threshold, :size, :default => TARGET_BULK_BYTES
168
+ config_param :compression_level, :enum, list: [:no_compression, :best_speed, :best_compression, :default_compression], :default => :no_compression
169
+ config_param :enable_ilm, :bool, :default => false
170
+ config_param :ilm_policy_id, :string, :default => DEFAULT_POLICY_ID
171
+ config_param :ilm_policy, :hash, :default => {}
172
+ config_param :ilm_policies, :hash, :default => {}
173
+ config_param :ilm_policy_overwrite, :bool, :default => false
174
+ config_param :truncate_caches_interval, :time, :default => nil
175
+ config_param :use_legacy_template, :bool, :default => true
176
+ config_param :catch_transport_exception_on_retry, :bool, :default => true
177
+
178
+ config_section :metadata, param_name: :metainfo, multi: false do
179
+ config_param :include_chunk_id, :bool, :default => false
180
+ config_param :chunk_id_key, :string, :default => "chunk_id".freeze
181
+ end
182
+
183
+ config_section :buffer do
184
+ config_set_default :@type, DEFAULT_BUFFER_TYPE
185
+ config_set_default :chunk_keys, ['tag']
186
+ config_set_default :timekey_use_utc, true
187
+ end
188
+
189
+ include Fluent::ElasticsearchIndexTemplate
190
+ include Fluent::Plugin::ElasticsearchConstants
191
+ include Fluent::Plugin::ElasticsearchIndexLifecycleManagement
192
+ include Fluent::Plugin::ElasticsearchTLS
193
+
194
+ def initialize
195
+ super
196
+ end
197
+
198
+ def configure(conf)
199
+ compat_parameters_convert(conf, :buffer)
200
+
201
+ super
202
+ if placeholder_substitution_needed_for_template?
203
+ # nop.
204
+ elsif not @buffer_config.chunk_keys.include? "tag" and
205
+ not @buffer_config.chunk_keys.include? "_index"
206
+ raise Fluent::ConfigError, "'tag' or '_index' in chunk_keys is required."
207
+ end
208
+ @time_parser = create_time_parser
209
+ @backend_options = backend_options
210
+ @ssl_version_options = set_tls_minmax_version_config(@ssl_version, @ssl_max_version, @ssl_min_version)
211
+
212
+ if @remove_keys
213
+ @remove_keys = @remove_keys.split(/\s*,\s*/)
214
+ end
215
+
216
+ if @target_index_key && @target_index_key.is_a?(String)
217
+ @target_index_key = @target_index_key.split '.'
218
+ end
219
+
220
+ if @target_type_key && @target_type_key.is_a?(String)
221
+ @target_type_key = @target_type_key.split '.'
222
+ end
223
+
224
+ if @remove_keys_on_update && @remove_keys_on_update.is_a?(String)
225
+ @remove_keys_on_update = @remove_keys_on_update.split ','
226
+ end
227
+
228
+ @api_key_header = setup_api_key
229
+
230
+ raise Fluent::ConfigError, "'max_retry_putting_template' must be greater than or equal to zero." if @max_retry_putting_template < 0
231
+ raise Fluent::ConfigError, "'max_retry_get_es_version' must be greater than or equal to zero." if @max_retry_get_es_version < 0
232
+
233
+ # Dump log when using host placeholders and template features at same time.
234
+ valid_host_placeholder = placeholder?(:host_placeholder, @host)
235
+ if valid_host_placeholder && (@template_name && @template_file || @templates)
236
+ if @verify_es_version_at_startup
237
+ raise Fluent::ConfigError, "host placeholder, template installation, and verify Elasticsearch version at startup are exclusive feature at same time. Please specify verify_es_version_at_startup as `false` when host placeholder and template installation are enabled."
238
+ end
239
+ log.info "host placeholder and template installation makes your Elasticsearch cluster a bit slow down(beta)."
240
+ end
241
+
242
+ raise Fluent::ConfigError, "You can't specify ilm_policy and ilm_policies at the same time" unless @ilm_policy.empty? or @ilm_policies.empty?
243
+
244
+ unless @ilm_policy.empty?
245
+ @ilm_policies = { @ilm_policy_id => @ilm_policy }
246
+ end
247
+ @alias_indexes = []
248
+ @template_names = []
249
+ if !dry_run?
250
+ if @template_name && @template_file
251
+ if @enable_ilm
252
+ raise Fluent::ConfigError, "deflector_alias is prohibited to use with enable_ilm at same time." if @deflector_alias
253
+ end
254
+ if @ilm_policy.empty? && @ilm_policy_overwrite
255
+ raise Fluent::ConfigError, "ilm_policy_overwrite requires a non empty ilm_policy."
256
+ end
257
+ if @logstash_format || placeholder_substitution_needed_for_template?
258
+ class << self
259
+ alias_method :template_installation, :template_installation_actual
260
+ end
261
+ else
262
+ template_installation_actual(@deflector_alias ? @deflector_alias : @index_name, @template_name, @customize_template, @application_name, @index_name, @ilm_policy_id)
263
+ end
264
+ verify_ilm_working if @enable_ilm
265
+ end
266
+ if @templates
267
+ retry_operate(@max_retry_putting_template,
268
+ @fail_on_putting_template_retry_exceed,
269
+ @catch_transport_exception_on_retry) do
270
+ templates_hash_install(@templates, @template_overwrite)
271
+ end
272
+ end
273
+ end
274
+
275
+ @truncate_mutex = Mutex.new
276
+ if @truncate_caches_interval
277
+ timer_execute(:out_elasticsearch_truncate_caches, @truncate_caches_interval) do
278
+ log.info('Clean up the indices and template names cache')
279
+
280
+ @truncate_mutex.synchronize {
281
+ @alias_indexes.clear
282
+ @template_names.clear
283
+ }
284
+ end
285
+ end
286
+
287
+ @serializer_class = nil
288
+ begin
289
+ require 'oj'
290
+ @dump_proc = Oj.method(:dump)
291
+ if @prefer_oj_serializer
292
+ @serializer_class = Fluent::Plugin::Serializer::Oj
293
+ Elasticsearch::API.settings[:serializer] = Fluent::Plugin::Serializer::Oj
294
+ end
295
+ rescue LoadError
296
+ @dump_proc = Yajl.method(:dump)
297
+ end
298
+
299
+ raise Fluent::ConfigError, "`cloud_auth` must be present if `cloud_id` is present" if @cloud_id && @cloud_auth.nil?
300
+ raise Fluent::ConfigError, "`password` must be present if `user` is present" if @user && @password.nil?
301
+
302
+ if @cloud_auth
303
+ @user = @cloud_auth.split(':', -1)[0]
304
+ @password = @cloud_auth.split(':', -1)[1]
305
+ end
306
+
307
+ if @user && m = @user.match(/%{(?<user>.*)}/)
308
+ @user = URI.encode_www_form_component(m["user"])
309
+ end
310
+ if @password && m = @password.match(/%{(?<password>.*)}/)
311
+ @password = URI.encode_www_form_component(m["password"])
312
+ end
313
+
314
+ @transport_logger = nil
315
+ if @with_transporter_log
316
+ @transport_logger = log
317
+ log_level = conf['@log_level'] || conf['log_level']
318
+ log.warn "Consider to specify log_level with @log_level." unless log_level
319
+ end
320
+ # Specify @sniffer_class before calling #client.
321
+ # #detect_es_major_version uses #client.
322
+ @sniffer_class = nil
323
+ begin
324
+ @sniffer_class = Object.const_get(@sniffer_class_name) if @sniffer_class_name
325
+ rescue Exception => ex
326
+ raise Fluent::ConfigError, "Could not load sniffer class #{@sniffer_class_name}: #{ex}"
327
+ end
328
+
329
+ @selector_class = nil
330
+ begin
331
+ @selector_class = Object.const_get(@selector_class_name) if @selector_class_name
332
+ rescue Exception => ex
333
+ raise Fluent::ConfigError, "Could not load selector class #{@selector_class_name}: #{ex}"
334
+ end
335
+
336
+ @last_seen_major_version = if major_version = handle_last_seen_es_major_version
337
+ major_version
338
+ else
339
+ @default_elasticsearch_version
340
+ end
341
+ if @suppress_type_name && @last_seen_major_version >= 7
342
+ @type_name = nil
343
+ else
344
+ if @last_seen_major_version == 6 && @type_name != DEFAULT_TYPE_NAME_ES_7x
345
+ log.info "Detected ES 6.x: ES 7.x will only accept `_doc` in type_name."
346
+ end
347
+ if @last_seen_major_version == 7 && @type_name != DEFAULT_TYPE_NAME_ES_7x
348
+ log.warn "Detected ES 7.x: `_doc` will be used as the document `_type`."
349
+ @type_name = '_doc'.freeze
350
+ end
351
+ if @last_seen_major_version >= 8 && @type_name != DEFAULT_TYPE_NAME_ES_7x
352
+ log.debug "Detected ES 8.x or above: This parameter has no effect."
353
+ @type_name = nil
354
+ end
355
+ end
356
+
357
+ if @validate_client_version && !dry_run?
358
+ if @last_seen_major_version != client_library_version.to_i
359
+ raise Fluent::ConfigError, <<-EOC
360
+ Detected ES #{@last_seen_major_version} but you use ES client #{client_library_version}.
361
+ Please consider to use #{@last_seen_major_version}.x series ES client.
362
+ EOC
363
+ end
364
+ end
365
+
366
+ if @last_seen_major_version >= 6
367
+ case @ssl_version
368
+ when :SSLv23, :TLSv1, :TLSv1_1
369
+ if @scheme == :https
370
+ log.warn "Detected ES 6.x or above and enabled insecure security:
371
+ You might have to specify `ssl_version TLSv1_2` in configuration."
372
+ end
373
+ end
374
+ end
375
+
376
+ if @ssl_version && @scheme == :https
377
+ if !@http_backend_excon_nonblock
378
+ log.warn "TLS handshake will be stucked with block connection.
379
+ Consider to set `http_backend_excon_nonblock` as true"
380
+ end
381
+ end
382
+
383
+ # Consider missing the prefix of "$." in nested key specifiers.
384
+ @id_key = convert_compat_id_key(@id_key) if @id_key
385
+ @parent_key = convert_compat_id_key(@parent_key) if @parent_key
386
+ @routing_key = convert_compat_id_key(@routing_key) if @routing_key
387
+
388
+ @routing_key_name = configure_routing_key_name
389
+ @meta_config_map = create_meta_config_map
390
+ @current_config = nil
391
+ @compressable_connection = false
392
+
393
+ @ignore_exception_classes = @ignore_exceptions.map do |exception|
394
+ unless Object.const_defined?(exception)
395
+ log.warn "Cannot find class #{exception}. Will ignore it."
396
+
397
+ nil
398
+ else
399
+ Object.const_get(exception)
400
+ end
401
+ end.compact
402
+
403
+ if @bulk_message_request_threshold < 0
404
+ class << self
405
+ alias_method :split_request?, :split_request_size_uncheck?
406
+ end
407
+ else
408
+ class << self
409
+ alias_method :split_request?, :split_request_size_check?
410
+ end
411
+ end
412
+
413
+ if Gem::Version.create(::Elasticsearch::Transport::VERSION) < Gem::Version.create("7.2.0")
414
+ if compression
415
+ raise Fluent::ConfigError, <<-EOC
416
+ Cannot use compression with elasticsearch-transport plugin version < 7.2.0
417
+ Your elasticsearch-transport plugin version version is #{Elasticsearch::Transport::VERSION}.
418
+ Please consider to upgrade ES client.
419
+ EOC
420
+ end
421
+ end
422
+ end
423
+
424
+ def setup_api_key
425
+ return {} unless @api_key
426
+
427
+ { "Authorization" => "ApiKey " + Base64.strict_encode64(@api_key) }
428
+ end
429
+
430
+ def dry_run?
431
+ if Fluent::Engine.respond_to?(:dry_run_mode)
432
+ Fluent::Engine.dry_run_mode
433
+ elsif Fluent::Engine.respond_to?(:supervisor_mode)
434
+ Fluent::Engine.supervisor_mode
435
+ end
436
+ end
437
+
438
+ def placeholder?(name, param)
439
+ placeholder_validities = []
440
+ placeholder_validators(name, param).each do |v|
441
+ begin
442
+ v.validate!
443
+ placeholder_validities << true
444
+ rescue Fluent::ConfigError => e
445
+ log.debug("'#{name} #{param}' is tested built-in placeholder(s) but there is no valid placeholder(s). error: #{e}")
446
+ placeholder_validities << false
447
+ end
448
+ end
449
+ placeholder_validities.include?(true)
450
+ end
451
+
452
+ def compression
453
+ !(@compression_level == :no_compression)
454
+ end
455
+
456
+ def compression_strategy
457
+ case @compression_level
458
+ when :default_compression
459
+ Zlib::DEFAULT_COMPRESSION
460
+ when :best_compression
461
+ Zlib::BEST_COMPRESSION
462
+ when :best_speed
463
+ Zlib::BEST_SPEED
464
+ else
465
+ Zlib::NO_COMPRESSION
466
+ end
467
+ end
468
+
469
+ def backend_options
470
+ case @http_backend
471
+ when :excon
472
+ { client_key: @client_key, client_cert: @client_cert, client_key_pass: @client_key_pass, nonblock: @http_backend_excon_nonblock }
473
+ when :typhoeus
474
+ require 'typhoeus'
475
+ { sslkey: @client_key, sslcert: @client_cert, keypasswd: @client_key_pass }
476
+ end
477
+ rescue LoadError => ex
478
+ log.error_backtrace(ex.backtrace)
479
+ raise Fluent::ConfigError, "You must install #{@http_backend} gem. Exception: #{ex}"
480
+ end
481
+
482
+ def handle_last_seen_es_major_version
483
+ if @verify_es_version_at_startup && !dry_run?
484
+ retry_operate(@max_retry_get_es_version,
485
+ @fail_on_detecting_es_version_retry_exceed,
486
+ @catch_transport_exception_on_retry) do
487
+ detect_es_major_version
488
+ end
489
+ else
490
+ nil
491
+ end
492
+ end
493
+
494
+ def detect_es_major_version
495
+ @_es_info ||= client.info
496
+ begin
497
+ unless version = @_es_info.dig("version", "number")
498
+ version = @default_elasticsearch_version
499
+ end
500
+ rescue NoMethodError => e
501
+ log.warn "#{@_es_info} can not dig version information. Assuming Elasticsearch #{@default_elasticsearch_version}", error: e
502
+ version = @default_elasticsearch_version
503
+ end
504
+ version.to_i
505
+ end
506
+
507
+ def client_library_version
508
+ Elasticsearch::VERSION
509
+ end
510
+
511
+ def configure_routing_key_name
512
+ if @last_seen_major_version >= 7
513
+ 'routing'
514
+ else
515
+ '_routing'
516
+ end
517
+ end
518
+
519
+ def convert_compat_id_key(key)
520
+ if key.include?('.') && !key.start_with?('$[')
521
+ key = "$.#{key}" unless key.start_with?('$.')
522
+ end
523
+ key
524
+ end
525
+
526
+ def create_meta_config_map
527
+ result = []
528
+ result << [record_accessor_create(@id_key), '_id'] if @id_key
529
+ result << [record_accessor_create(@parent_key), '_parent'] if @parent_key
530
+ result << [record_accessor_create(@routing_key), @routing_key_name] if @routing_key
531
+ result
532
+ end
533
+
534
+ # once fluent v0.14 is released we might be able to use
535
+ # Fluent::Parser::TimeParser, but it doesn't quite do what we want - if gives
536
+ # [sec,nsec] where as we want something we can call `strftime` on...
537
+ def create_time_parser
538
+ if @time_key_format
539
+ begin
540
+ # Strptime doesn't support all formats, but for those it does it's
541
+ # blazingly fast.
542
+ strptime = Strptime.new(@time_key_format)
543
+ Proc.new { |value|
544
+ value = convert_numeric_time_into_string(value, @time_key_format) if value.is_a?(Numeric)
545
+ strptime.exec(value).to_datetime
546
+ }
547
+ rescue
548
+ # Can happen if Strptime doesn't recognize the format; or
549
+ # if strptime couldn't be required (because it's not installed -- it's
550
+ # ruby 2 only)
551
+ Proc.new { |value|
552
+ value = convert_numeric_time_into_string(value, @time_key_format) if value.is_a?(Numeric)
553
+ DateTime.strptime(value, @time_key_format)
554
+ }
555
+ end
556
+ else
557
+ Proc.new { |value|
558
+ value = convert_numeric_time_into_string(value) if value.is_a?(Numeric)
559
+ DateTime.parse(value)
560
+ }
561
+ end
562
+ end
563
+
564
+ def convert_numeric_time_into_string(numeric_time, time_key_format = "%Y-%m-%d %H:%M:%S.%N%z")
565
+ numeric_time_parser = Fluent::NumericTimeParser.new(:float)
566
+ Time.at(numeric_time_parser.parse(numeric_time).to_r).strftime(time_key_format)
567
+ end
568
+
569
+ def parse_time(value, event_time, tag)
570
+ @time_parser.call(value)
571
+ rescue => e
572
+ router.emit_error_event(@time_parse_error_tag, Fluent::Engine.now, {'tag' => tag, 'time' => event_time, 'format' => @time_key_format, 'value' => value}, e)
573
+ return Time.at(event_time).to_datetime
574
+ end
575
+
576
+ def cloud_client
577
+ Elasticsearch::Client.new(
578
+ cloud_id: @cloud_id,
579
+ user: @user,
580
+ password: @password
581
+ )
582
+ end
583
+
584
+ def client(host = nil, compress_connection = false)
585
+ return cloud_client if @cloud_id
586
+
587
+ # check here to see if we already have a client connection for the given host
588
+ connection_options = get_connection_options(host)
589
+
590
+ @_es = nil unless is_existing_connection(connection_options[:hosts])
591
+ @_es = nil unless @compressable_connection == compress_connection
592
+
593
+ @_es ||= begin
594
+ @compressable_connection = compress_connection
595
+ @current_config = connection_options[:hosts].clone
596
+ adapter_conf = lambda {|f| f.adapter @http_backend, @backend_options }
597
+ local_reload_connections = @reload_connections
598
+ if local_reload_connections && @reload_after > DEFAULT_RELOAD_AFTER
599
+ local_reload_connections = @reload_after
600
+ end
601
+
602
+ gzip_headers = if compress_connection
603
+ {'Content-Encoding' => 'gzip'}
604
+ else
605
+ {}
606
+ end
607
+ headers = { 'Content-Type' => @content_type.to_s }
608
+ .merge(@custom_headers)
609
+ .merge(@api_key_header)
610
+ .merge(gzip_headers)
611
+ ssl_options = { verify: @ssl_verify, ca_file: @ca_file}.merge(@ssl_version_options)
612
+
613
+ transport = Elasticsearch::Transport::Transport::HTTP::Faraday.new(connection_options.merge(
614
+ options: {
615
+ reload_connections: local_reload_connections,
616
+ reload_on_failure: @reload_on_failure,
617
+ resurrect_after: @resurrect_after,
618
+ logger: @transport_logger,
619
+ transport_options: {
620
+ headers: headers,
621
+ request: { timeout: @request_timeout },
622
+ ssl: ssl_options,
623
+ },
624
+ http: {
625
+ user: @user,
626
+ password: @password,
627
+ scheme: @scheme
628
+ },
629
+ sniffer_class: @sniffer_class,
630
+ serializer_class: @serializer_class,
631
+ selector_class: @selector_class,
632
+ compression: compress_connection,
633
+ }), &adapter_conf)
634
+ Elasticsearch::Client.new transport: transport
635
+ end
636
+ end
637
+
638
+ def get_escaped_userinfo(host_str)
639
+ if m = host_str.match(/(?<scheme>.*)%{(?<user>.*)}:%{(?<password>.*)}(?<path>@.*)/)
640
+ m["scheme"] +
641
+ URI.encode_www_form_component(m["user"]) +
642
+ ':' +
643
+ URI.encode_www_form_component(m["password"]) +
644
+ m["path"]
645
+ else
646
+ host_str
647
+ end
648
+ end
649
+
650
+ def get_connection_options(con_host=nil)
651
+
652
+ hosts = if con_host || @hosts
653
+ (con_host || @hosts).split(',').map do |host_str|
654
+ # Support legacy hosts format host:port,host:port,host:port...
655
+ if host_str.match(%r{^[^:]+(\:\d+)?$})
656
+ {
657
+ host: host_str.split(':')[0],
658
+ port: (host_str.split(':')[1] || @port).to_i,
659
+ scheme: @scheme.to_s
660
+ }
661
+ else
662
+ # New hosts format expects URLs such as http://logs.foo.com,https://john:pass@logs2.foo.com/elastic
663
+ uri = URI(get_escaped_userinfo(host_str))
664
+ %w(user password path).inject(host: uri.host, port: uri.port, scheme: uri.scheme) do |hash, key|
665
+ hash[key.to_sym] = uri.public_send(key) unless uri.public_send(key).nil? || uri.public_send(key) == ''
666
+ hash
667
+ end
668
+ end
669
+ end.compact
670
+ else
671
+ [{host: @host, port: @port, scheme: @scheme.to_s}]
672
+ end.each do |host|
673
+ host.merge!(user: @user, password: @password) if !host[:user] && @user
674
+ host.merge!(path: @path) if !host[:path] && @path
675
+ end
676
+
677
+ {
678
+ hosts: hosts
679
+ }
680
+ end
681
+
682
+ def connection_options_description(con_host=nil)
683
+ get_connection_options(con_host)[:hosts].map do |host_info|
684
+ attributes = host_info.dup
685
+ attributes[:password] = 'obfuscated' if attributes.has_key?(:password)
686
+ attributes.inspect
687
+ end.join(', ')
688
+ end
689
+
690
+ # append_record_to_messages adds a record to the bulk message
691
+ # payload to be submitted to Elasticsearch. Records that do
692
+ # not include '_id' field are skipped when 'write_operation'
693
+ # is configured for 'create' or 'update'
694
+ #
695
+ # returns 'true' if record was appended to the bulk message
696
+ # and 'false' otherwise
697
+ def append_record_to_messages(op, meta, header, record, msgs)
698
+ case op
699
+ when UPDATE_OP, UPSERT_OP
700
+ if meta.has_key?(ID_FIELD)
701
+ header[UPDATE_OP] = meta
702
+ msgs << @dump_proc.call(header) << BODY_DELIMITER
703
+ msgs << @dump_proc.call(update_body(record, op)) << BODY_DELIMITER
704
+ return true
705
+ end
706
+ when CREATE_OP
707
+ if meta.has_key?(ID_FIELD)
708
+ header[CREATE_OP] = meta
709
+ msgs << @dump_proc.call(header) << BODY_DELIMITER
710
+ msgs << @dump_proc.call(record) << BODY_DELIMITER
711
+ return true
712
+ end
713
+ when INDEX_OP
714
+ header[INDEX_OP] = meta
715
+ msgs << @dump_proc.call(header) << BODY_DELIMITER
716
+ msgs << @dump_proc.call(record) << BODY_DELIMITER
717
+ return true
718
+ end
719
+ return false
720
+ end
721
+
722
+ def update_body(record, op)
723
+ update = remove_keys(record)
724
+ if @suppress_doc_wrap
725
+ return update
726
+ end
727
+ body = {"doc".freeze => update}
728
+ if op == UPSERT_OP
729
+ if update == record
730
+ body["doc_as_upsert".freeze] = true
731
+ else
732
+ body[UPSERT_OP] = record
733
+ end
734
+ end
735
+ body
736
+ end
737
+
738
+ def remove_keys(record)
739
+ keys = record[@remove_keys_on_update_key] || @remove_keys_on_update || []
740
+ record.delete(@remove_keys_on_update_key)
741
+ return record unless keys.any?
742
+ record = record.dup
743
+ keys.each { |key| record.delete(key) }
744
+ record
745
+ end
746
+
747
+ def flatten_record(record, prefix=[])
748
+ ret = {}
749
+ if record.is_a? Hash
750
+ record.each { |key, value|
751
+ ret.merge! flatten_record(value, prefix + [key.to_s])
752
+ }
753
+ elsif record.is_a? Array
754
+ # Don't mess with arrays, leave them unprocessed
755
+ ret.merge!({prefix.join(@flatten_hashes_separator) => record})
756
+ else
757
+ return {prefix.join(@flatten_hashes_separator) => record}
758
+ end
759
+ ret
760
+ end
761
+
762
+ def expand_placeholders(chunk)
763
+ logstash_prefix = extract_placeholders(@logstash_prefix, chunk)
764
+ logstash_dateformat = extract_placeholders(@logstash_dateformat, chunk)
765
+ index_name = extract_placeholders(@index_name, chunk)
766
+ if @type_name
767
+ type_name = extract_placeholders(@type_name, chunk)
768
+ else
769
+ type_name = nil
770
+ end
771
+ if @template_name
772
+ template_name = extract_placeholders(@template_name, chunk)
773
+ else
774
+ template_name = nil
775
+ end
776
+ if @customize_template
777
+ customize_template = @customize_template.each_with_object({}) { |(key, value), hash| hash[key] = extract_placeholders(value, chunk) }
778
+ else
779
+ customize_template = nil
780
+ end
781
+ if @deflector_alias
782
+ deflector_alias = extract_placeholders(@deflector_alias, chunk)
783
+ else
784
+ deflector_alias = nil
785
+ end
786
+ if @application_name
787
+ application_name = extract_placeholders(@application_name, chunk)
788
+ else
789
+ application_name = nil
790
+ end
791
+ if @pipeline
792
+ pipeline = extract_placeholders(@pipeline, chunk)
793
+ else
794
+ pipeline = nil
795
+ end
796
+ if @ilm_policy_id
797
+ ilm_policy_id = extract_placeholders(@ilm_policy_id, chunk)
798
+ else
799
+ ilm_policy_id = nil
800
+ end
801
+ return logstash_prefix, logstash_dateformat, index_name, type_name, template_name, customize_template, deflector_alias, application_name, pipeline, ilm_policy_id
802
+ end
803
+
804
+ def multi_workers_ready?
805
+ true
806
+ end
807
+
808
+ def inject_chunk_id_to_record_if_needed(record, chunk_id)
809
+ if @metainfo&.include_chunk_id
810
+ record[@metainfo.chunk_id_key] = chunk_id
811
+ record
812
+ else
813
+ record
814
+ end
815
+ end
816
+
817
+ def write(chunk)
818
+ bulk_message_count = Hash.new { |h,k| h[k] = 0 }
819
+ bulk_message = Hash.new { |h,k| h[k] = '' }
820
+ header = {}
821
+ meta = {}
822
+
823
+ tag = chunk.metadata.tag
824
+ chunk_id = dump_unique_id_hex(chunk.unique_id)
825
+ extracted_values = expand_placeholders(chunk)
826
+ host = if @hosts
827
+ extract_placeholders(@hosts, chunk)
828
+ else
829
+ extract_placeholders(@host, chunk)
830
+ end
831
+
832
+ chunk.msgpack_each do |time, record|
833
+ next unless record.is_a? Hash
834
+
835
+ record = inject_chunk_id_to_record_if_needed(record, chunk_id)
836
+
837
+ begin
838
+ meta, header, record = process_message(tag, meta, header, time, record, extracted_values)
839
+ info = if @include_index_in_url
840
+ RequestInfo.new(host, meta.delete("_index".freeze), meta["_index".freeze], meta.delete("_alias".freeze))
841
+ else
842
+ RequestInfo.new(host, nil, meta["_index".freeze], meta.delete("_alias".freeze))
843
+ end
844
+
845
+ if split_request?(bulk_message, info)
846
+ bulk_message.each do |info, msgs|
847
+ send_bulk(msgs, tag, chunk, bulk_message_count[info], extracted_values, info) unless msgs.empty?
848
+ msgs.clear
849
+ # Clear bulk_message_count for this info.
850
+ bulk_message_count[info] = 0;
851
+ next
852
+ end
853
+ end
854
+
855
+ if append_record_to_messages(@write_operation, meta, header, record, bulk_message[info])
856
+ bulk_message_count[info] += 1;
857
+ else
858
+ if @emit_error_for_missing_id
859
+ raise MissingIdFieldError, "Missing '_id' field. Write operation is #{@write_operation}"
860
+ else
861
+ log.on_debug { log.debug("Dropping record because its missing an '_id' field and write_operation is #{@write_operation}: #{record}") }
862
+ end
863
+ end
864
+ rescue => e
865
+ router.emit_error_event(tag, time, record, e)
866
+ end
867
+ end
868
+
869
+ bulk_message.each do |info, msgs|
870
+ send_bulk(msgs, tag, chunk, bulk_message_count[info], extracted_values, info) unless msgs.empty?
871
+ msgs.clear
872
+ end
873
+ end
874
+
875
+ def split_request?(bulk_message, info)
876
+ # For safety.
877
+ end
878
+
879
+ def split_request_size_check?(bulk_message, info)
880
+ bulk_message[info].size > @bulk_message_request_threshold
881
+ end
882
+
883
+ def split_request_size_uncheck?(bulk_message, info)
884
+ false
885
+ end
886
+
887
+ def process_message(tag, meta, header, time, record, extracted_values)
888
+ logstash_prefix, logstash_dateformat, index_name, type_name, _template_name, _customize_template, _deflector_alias, application_name, pipeline, _ilm_policy_id = extracted_values
889
+
890
+ if @flatten_hashes
891
+ record = flatten_record(record)
892
+ end
893
+
894
+ dt = nil
895
+ if @logstash_format || @include_timestamp
896
+ if record.has_key?(TIMESTAMP_FIELD)
897
+ rts = record[TIMESTAMP_FIELD]
898
+ dt = parse_time(rts, time, tag)
899
+ elsif record.has_key?(@time_key)
900
+ rts = record[@time_key]
901
+ dt = parse_time(rts, time, tag)
902
+ record[TIMESTAMP_FIELD] = dt.iso8601(@time_precision) unless @time_key_exclude_timestamp
903
+ else
904
+ dt = Time.at(time).to_datetime
905
+ record[TIMESTAMP_FIELD] = dt.iso8601(@time_precision)
906
+ end
907
+ end
908
+
909
+ target_index_parent, target_index_child_key = @target_index_key ? get_parent_of(record, @target_index_key) : nil
910
+ if target_index_parent && target_index_parent[target_index_child_key]
911
+ target_index_alias = target_index = target_index_parent.delete(target_index_child_key)
912
+ elsif @logstash_format
913
+ dt = dt.new_offset(0) if @utc_index
914
+ target_index = "#{logstash_prefix}#{@logstash_prefix_separator}#{dt.strftime(logstash_dateformat)}"
915
+ target_index_alias = "#{logstash_prefix}#{@logstash_prefix_separator}#{application_name}#{@logstash_prefix_separator}#{dt.strftime(logstash_dateformat)}"
916
+ else
917
+ target_index_alias = target_index = index_name
918
+ end
919
+
920
+ # Change target_index to lower-case since Elasticsearch doesn't
921
+ # allow upper-case characters in index names.
922
+ target_index = target_index.downcase
923
+ target_index_alias = target_index_alias.downcase
924
+ if @include_tag_key
925
+ record[@tag_key] = tag
926
+ end
927
+
928
+ target_type_parent, target_type_child_key = @target_type_key ? get_parent_of(record, @target_type_key) : nil
929
+ if target_type_parent && target_type_parent[target_type_child_key]
930
+ target_type = target_type_parent.delete(target_type_child_key)
931
+ if @last_seen_major_version == 6
932
+ log.warn "Detected ES 6.x: `@type_name` will be used as the document `_type`."
933
+ target_type = type_name
934
+ elsif @last_seen_major_version == 7
935
+ log.warn "Detected ES 7.x: `_doc` will be used as the document `_type`."
936
+ target_type = '_doc'.freeze
937
+ elsif @last_seen_major_version >=8
938
+ log.debug "Detected ES 8.x or above: document type will not be used."
939
+ target_type = nil
940
+ end
941
+ else
942
+ if @suppress_type_name && @last_seen_major_version >= 7
943
+ target_type = nil
944
+ elsif @last_seen_major_version == 7 && @type_name != DEFAULT_TYPE_NAME_ES_7x
945
+ log.warn "Detected ES 7.x: `_doc` will be used as the document `_type`."
946
+ target_type = '_doc'.freeze
947
+ elsif @last_seen_major_version >= 8
948
+ log.debug "Detected ES 8.x or above: document type will not be used."
949
+ target_type = nil
950
+ else
951
+ target_type = type_name
952
+ end
953
+ end
954
+
955
+ meta.clear
956
+ meta["_index".freeze] = target_index
957
+ meta["_type".freeze] = target_type unless @last_seen_major_version >= 8
958
+ meta["_alias".freeze] = target_index_alias
959
+
960
+ if @pipeline
961
+ meta["pipeline".freeze] = pipeline
962
+ end
963
+
964
+ @meta_config_map.each do |record_accessor, meta_key|
965
+ if raw_value = record_accessor.call(record)
966
+ meta[meta_key] = raw_value
967
+ end
968
+ end
969
+
970
+ if @remove_keys
971
+ @remove_keys.each { |key| record.delete(key) }
972
+ end
973
+
974
+ return [meta, header, record]
975
+ end
976
+
977
+ # returns [parent, child_key] of child described by path array in record's tree
978
+ # returns [nil, child_key] if path doesnt exist in record
979
+ def get_parent_of(record, path)
980
+ parent_object = path[0..-2].reduce(record) { |a, e| a.is_a?(Hash) ? a[e] : nil }
981
+ [parent_object, path[-1]]
982
+ end
983
+
984
+ # gzip compress data
985
+ def gzip(string)
986
+ wio = StringIO.new("w")
987
+ w_gz = Zlib::GzipWriter.new(wio, strategy = compression_strategy)
988
+ w_gz.write(string)
989
+ w_gz.close
990
+ wio.string
991
+ end
992
+
993
+ def placeholder_substitution_needed_for_template?
994
+ need_substitution = placeholder?(:host, @host.to_s) ||
995
+ placeholder?(:index_name, @index_name.to_s) ||
996
+ placeholder?(:template_name, @template_name.to_s) ||
997
+ @customize_template&.values&.any? { |value| placeholder?(:customize_template, value.to_s) } ||
998
+ placeholder?(:logstash_prefix, @logstash_prefix.to_s) ||
999
+ placeholder?(:logstash_dateformat, @logstash_dateformat.to_s) ||
1000
+ placeholder?(:deflector_alias, @deflector_alias.to_s) ||
1001
+ placeholder?(:application_name, @application_name.to_s) ||
1002
+ placeholder?(:ilm_policy_id, @ilm_policy_id.to_s)
1003
+ log.debug("Need substitution: #{need_substitution}")
1004
+ need_substitution
1005
+ end
1006
+
1007
+ def template_installation(deflector_alias, template_name, customize_template, application_name, ilm_policy_id, target_index, host)
1008
+ # for safety.
1009
+ end
1010
+
1011
+ def template_installation_actual(deflector_alias, template_name, customize_template, application_name, target_index, ilm_policy_id, host=nil)
1012
+ if template_name && @template_file
1013
+ if !@logstash_format && (deflector_alias.nil? || (@alias_indexes.include? deflector_alias)) && (@template_names.include? template_name)
1014
+ if deflector_alias
1015
+ log.debug("Index alias #{deflector_alias} and template #{template_name} already exist (cached)")
1016
+ else
1017
+ log.debug("Template #{template_name} already exists (cached)")
1018
+ end
1019
+ else
1020
+ retry_operate(@max_retry_putting_template,
1021
+ @fail_on_putting_template_retry_exceed,
1022
+ @catch_transport_exception_on_retry) do
1023
+ if customize_template
1024
+ template_custom_install(template_name, @template_file, @template_overwrite, customize_template, @enable_ilm, deflector_alias, ilm_policy_id, host, target_index, @index_separator)
1025
+ else
1026
+ template_install(template_name, @template_file, @template_overwrite, @enable_ilm, deflector_alias, ilm_policy_id, host, target_index, @index_separator)
1027
+ end
1028
+ ilm_policy = @ilm_policies[ilm_policy_id] || {}
1029
+ create_rollover_alias(target_index, @rollover_index, deflector_alias, application_name, @index_date_pattern, @index_separator, @enable_ilm, ilm_policy_id, ilm_policy, @ilm_policy_overwrite, host)
1030
+ end
1031
+ @alias_indexes << deflector_alias unless deflector_alias.nil?
1032
+ @template_names << template_name
1033
+ end
1034
+ end
1035
+ end
1036
+
1037
+ # send_bulk given a specific bulk request, the original tag,
1038
+ # chunk, and bulk_message_count
1039
+ def send_bulk(data, tag, chunk, bulk_message_count, extracted_values, info)
1040
+ _logstash_prefix, _logstash_dateformat, index_name, _type_name, template_name, customize_template, deflector_alias, application_name, _pipeline, ilm_policy_id = extracted_values
1041
+ if deflector_alias
1042
+ template_installation(deflector_alias, template_name, customize_template, application_name, index_name, ilm_policy_id, info.host)
1043
+ else
1044
+ template_installation(info.ilm_index, template_name, customize_template, application_name, @logstash_format ? info.ilm_alias : index_name, ilm_policy_id, info.host)
1045
+ end
1046
+
1047
+ begin
1048
+
1049
+ log.on_trace { log.trace "bulk request: #{data}" }
1050
+
1051
+ prepared_data = if compression
1052
+ gzip(data)
1053
+ else
1054
+ data
1055
+ end
1056
+
1057
+ response = client(info.host, compression).bulk body: prepared_data, index: info.index
1058
+ log.on_trace { log.trace "bulk response: #{response}" }
1059
+
1060
+ if response['errors']
1061
+ error = Fluent::Plugin::ElasticsearchErrorHandler.new(self)
1062
+ error.handle_error(response, tag, chunk, bulk_message_count, extracted_values)
1063
+ end
1064
+ rescue RetryStreamError => e
1065
+ log.trace "router.emit_stream for retry stream doing..."
1066
+ emit_tag = @retry_tag ? @retry_tag : tag
1067
+ # check capacity of buffer space
1068
+ if retry_stream_retryable?
1069
+ router.emit_stream(emit_tag, e.retry_stream)
1070
+ else
1071
+ raise RetryStreamEmitFailure, "buffer is full."
1072
+ end
1073
+ log.trace "router.emit_stream for retry stream done."
1074
+ rescue => e
1075
+ ignore = @ignore_exception_classes.any? { |clazz| e.class <= clazz }
1076
+
1077
+ log.warn "Exception ignored in tag #{tag}: #{e.class.name} #{e.message}" if ignore
1078
+
1079
+ @_es = nil if @reconnect_on_error
1080
+ @_es_info = nil if @reconnect_on_error
1081
+
1082
+ raise UnrecoverableRequestFailure if ignore && @exception_backup
1083
+
1084
+ # FIXME: identify unrecoverable errors and raise UnrecoverableRequestFailure instead
1085
+ raise RecoverableRequestFailure, "could not push logs to Elasticsearch cluster (#{connection_options_description(info.host)}): #{e.message}" unless ignore
1086
+ end
1087
+ end
1088
+
1089
+ def retry_stream_retryable?
1090
+ @buffer.storable?
1091
+ end
1092
+
1093
+ def is_existing_connection(host)
1094
+ # check if the host provided match the current connection
1095
+ return false if @_es.nil?
1096
+ return false if @current_config.nil?
1097
+ return false if host.length != @current_config.length
1098
+
1099
+ for i in 0...host.length
1100
+ if !host[i][:host].eql? @current_config[i][:host] || host[i][:port] != @current_config[i][:port]
1101
+ return false
1102
+ end
1103
+ end
1104
+
1105
+ return true
1106
+ end
1107
+ end
1108
+ end