fluent-plugin-elasticsearch2 3.5.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/Rakefile ADDED
@@ -0,0 +1,11 @@
1
+ require 'bundler/gem_tasks'
2
+ require 'rake/testtask'
3
+
4
+ Rake::TestTask.new(:test) do |test|
5
+ test.libs << 'test'
6
+ test.pattern = 'test/**/test_*.rb'
7
+ test.verbose = true
8
+ test.warning = false
9
+ end
10
+
11
+ task :default => :test
data/appveyor.yml ADDED
@@ -0,0 +1,30 @@
1
+ version: '{build}'
2
+ install:
3
+ - SET PATH=C:\Ruby%ruby_version%\bin;%PATH%
4
+ - "%devkit%\\devkitvars.bat"
5
+ - IF EXIST "%devkit%\\bin\\ridk.cmd" ridk.cmd enable
6
+ - ruby --version
7
+ - gem --version
8
+ - bundle install
9
+ build: off
10
+ test_script:
11
+ - bundle exec rake test
12
+
13
+ # https://www.appveyor.com/docs/installed-software/#ruby
14
+ environment:
15
+ matrix:
16
+ - ruby_version: "25-x64"
17
+ devkit: C:\Ruby23-x64\DevKit
18
+ - ruby_version: "25"
19
+ devkit: C:\Ruby23\DevKit
20
+ - ruby_version: "24-x64"
21
+ devkit: C:\Ruby23-x64\DevKit
22
+ - ruby_version: "24"
23
+ devkit: C:\Ruby23\DevKit
24
+ - ruby_version: "23-x64"
25
+ devkit: C:\Ruby23-x64\DevKit
26
+ - ruby_version: "22-x64"
27
+ devkit: C:\Ruby23-x64\DevKit
28
+ matrix:
29
+ allow_failures:
30
+ - ruby_version: "21"
@@ -0,0 +1,31 @@
1
+ # -*- encoding: utf-8 -*-
2
+ $:.push File.expand_path('../lib', __FILE__)
3
+
4
+ Gem::Specification.new do |s|
5
+ s.name = 'fluent-plugin-elasticsearch2'
6
+ s.version = '3.5.5'
7
+ s.authors = ['diogo', 'pitr']
8
+ s.email = ['pitr.vern@gmail.com', 'me@diogoterror.com']
9
+ s.description = "%Elasticsearch output plugin for Fluent event collector"
10
+ s.summary = s.description
11
+ s.homepage = 'https://github.com/uken/fluent-plugin-elasticsearch2'
12
+ s.license = 'Apache-2.0'
13
+
14
+ s.files = `git ls-files`.split($/)
15
+ s.executables = s.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
16
+ s.test_files = s.files.grep(%r{^(test|spec|features)/})
17
+ s.require_paths = ['lib']
18
+
19
+ s.required_ruby_version = Gem::Requirement.new(">= 2.0".freeze)
20
+
21
+ s.add_runtime_dependency 'fluentd', '>= 0.14.22'
22
+ s.add_runtime_dependency 'excon', '>= 0'
23
+ s.add_runtime_dependency 'elasticsearch'
24
+
25
+
26
+ s.add_development_dependency 'rake', '>= 0'
27
+ s.add_development_dependency 'webmock', '~> 3'
28
+ s.add_development_dependency 'test-unit', '~> 3.1.0'
29
+ s.add_development_dependency 'minitest', '~> 5.8'
30
+ s.add_development_dependency 'flexmock', '~> 2.0'
31
+ end
@@ -0,0 +1,38 @@
1
+ require 'fluent/log'
2
+ # For elasticsearch-ruby v7.0.0 or later
3
+ # logger for Elasticsearch::Loggable required the following methods:
4
+ #
5
+ # * debug?
6
+ # * info?
7
+ # * warn?
8
+ # * error?
9
+ # * fatal?
10
+
11
+ module Fluent
12
+ class Log
13
+ # Elasticsearch::Loggable does not request trace? method.
14
+ # def trace?
15
+ # @level <= LEVEL_TRACE
16
+ # end
17
+
18
+ def debug?
19
+ @level <= LEVEL_DEBUG
20
+ end
21
+
22
+ def info?
23
+ @level <= LEVEL_INFO
24
+ end
25
+
26
+ def warn?
27
+ @level <= LEVEL_WARN
28
+ end
29
+
30
+ def error?
31
+ @level <= LEVEL_ERROR
32
+ end
33
+
34
+ def fatal?
35
+ @level <= LEVEL_FATAL
36
+ end
37
+ end
38
+ end
@@ -0,0 +1,13 @@
1
+ module Fluent
2
+ module Plugin
3
+ module ElasticsearchConstants
4
+ BODY_DELIMITER = "\n".freeze
5
+ UPDATE_OP = "update".freeze
6
+ UPSERT_OP = "upsert".freeze
7
+ CREATE_OP = "create".freeze
8
+ INDEX_OP = "index".freeze
9
+ ID_FIELD = "_id".freeze
10
+ TIMESTAMP_FIELD = "@timestamp".freeze
11
+ end
12
+ end
13
+ end
@@ -0,0 +1,5 @@
1
+ require 'fluent/error'
2
+
3
+ class Fluent::Plugin::ElasticsearchError
4
+ class RetryableOperationExhaustedFailure < Fluent::UnrecoverableError; end
5
+ end
@@ -0,0 +1,127 @@
1
+ require 'fluent/event'
2
+ require 'fluent/error'
3
+ require_relative 'elasticsearch_constants'
4
+
5
+ class Fluent::Plugin::ElasticsearchErrorHandler
6
+ include Fluent::Plugin::ElasticsearchConstants
7
+
8
+ attr_accessor :bulk_message_count
9
+ class ElasticsearchVersionMismatch < Fluent::UnrecoverableError; end
10
+ class ElasticsearchSubmitMismatch < Fluent::UnrecoverableError; end
11
+ class ElasticsearchRequestAbortError < Fluent::UnrecoverableError; end
12
+ class ElasticsearchError < StandardError; end
13
+
14
+ def initialize(plugin)
15
+ @plugin = plugin
16
+ end
17
+
18
+ def unrecoverable_error_types
19
+ @plugin.unrecoverable_error_types
20
+ end
21
+
22
+ def unrecoverable_error?(type)
23
+ unrecoverable_error_types.include?(type)
24
+ end
25
+
26
+ def log_es_400_reason(&block)
27
+ if @plugin.log_es_400_reason
28
+ block.call
29
+ else
30
+ @plugin.log.on_debug(&block)
31
+ end
32
+ end
33
+
34
+ def handle_error(response, tag, chunk, bulk_message_count, extracted_values)
35
+ items = response['items']
36
+ if items.nil? || !items.is_a?(Array)
37
+ raise ElasticsearchVersionMismatch, "The response format was unrecognized: #{response}"
38
+ end
39
+ if bulk_message_count != items.length
40
+ raise ElasticsearchSubmitMismatch, "The number of records submitted #{bulk_message_count} do not match the number returned #{items.length}. Unable to process bulk response."
41
+ end
42
+ retry_stream = Fluent::MultiEventStream.new
43
+ stats = Hash.new(0)
44
+ meta = {}
45
+ header = {}
46
+ chunk.msgpack_each do |time, rawrecord|
47
+ bulk_message = ''
48
+ next unless rawrecord.is_a? Hash
49
+ begin
50
+ # we need a deep copy for process_message to alter
51
+ processrecord = Marshal.load(Marshal.dump(rawrecord))
52
+ meta, header, record = @plugin.process_message(tag, meta, header, time, processrecord, extracted_values)
53
+ next unless @plugin.append_record_to_messages(@plugin.write_operation, meta, header, record, bulk_message)
54
+ rescue => e
55
+ stats[:bad_chunk_record] += 1
56
+ next
57
+ end
58
+ item = items.shift
59
+ if item.is_a?(Hash) && item.has_key?(@plugin.write_operation)
60
+ write_operation = @plugin.write_operation
61
+ elsif INDEX_OP == @plugin.write_operation && item.is_a?(Hash) && item.has_key?(CREATE_OP)
62
+ write_operation = CREATE_OP
63
+ elsif item.nil?
64
+ stats[:errors_nil_resp] += 1
65
+ next
66
+ else
67
+ # When we don't have an expected ops field, something changed in the API
68
+ # expected return values (ES 2.x)
69
+ stats[:errors_bad_resp] += 1
70
+ next
71
+ end
72
+ if item[write_operation].has_key?('status')
73
+ status = item[write_operation]['status']
74
+ else
75
+ # When we don't have a status field, something changed in the API
76
+ # expected return values (ES 2.x)
77
+ stats[:errors_bad_resp] += 1
78
+ next
79
+ end
80
+ case
81
+ when [200, 201].include?(status)
82
+ stats[:successes] += 1
83
+ when CREATE_OP == write_operation && 409 == status
84
+ stats[:duplicates] += 1
85
+ when 400 == status
86
+ stats[:bad_argument] += 1
87
+ reason = ""
88
+ log_es_400_reason do
89
+ if item[write_operation].has_key?('error') && item[write_operation]['error'].has_key?('type')
90
+ reason = " [error type]: #{item[write_operation]['error']['type']}"
91
+ end
92
+ if item[write_operation].has_key?('error') && item[write_operation]['error'].has_key?('reason')
93
+ reason += " [reason]: \'#{item[write_operation]['error']['reason']}\'"
94
+ end
95
+ end
96
+ @plugin.router.emit_error_event(tag, time, rawrecord, ElasticsearchError.new("400 - Rejected by Elasticsearch#{reason}"))
97
+ else
98
+ if item[write_operation]['error'].is_a?(String)
99
+ reason = item[write_operation]['error']
100
+ stats[:errors_block_resp] += 1
101
+ @plugin.router.emit_error_event(tag, time, rawrecord, ElasticsearchError.new("#{status} - #{reason}"))
102
+ next
103
+ elsif item[write_operation].has_key?('error') && item[write_operation]['error'].has_key?('type')
104
+ type = item[write_operation]['error']['type']
105
+ stats[type] += 1
106
+ retry_stream.add(time, rawrecord)
107
+ if unrecoverable_error?(type)
108
+ raise ElasticsearchRequestAbortError, "Rejected Elasticsearch due to #{type}"
109
+ end
110
+ else
111
+ # When we don't have a type field, something changed in the API
112
+ # expected return values (ES 2.x)
113
+ stats[:errors_bad_resp] += 1
114
+ @plugin.router.emit_error_event(tag, time, rawrecord, ElasticsearchError.new("#{status} - No error type provided in the response"))
115
+ next
116
+ end
117
+ stats[type] += 1
118
+ end
119
+ end
120
+ @plugin.log.on_debug do
121
+ msg = ["Indexed (op = #{@plugin.write_operation})"]
122
+ stats.each_pair { |key, value| msg << "#{value} #{key}" }
123
+ @plugin.log.debug msg.join(', ')
124
+ end
125
+ raise Fluent::Plugin::ElasticsearchOutput::RetryStreamError.new(retry_stream) unless retry_stream.empty?
126
+ end
127
+ end
@@ -0,0 +1,112 @@
1
+ require 'fluent/error'
2
+ require_relative './elasticsearch_error'
3
+
4
+ module Fluent::ElasticsearchIndexTemplate
5
+ def get_template(template_file)
6
+ if !File.exists?(template_file)
7
+ raise "If you specify a template_name you must specify a valid template file (checked '#{template_file}')!"
8
+ end
9
+ file_contents = IO.read(template_file).gsub(/\n/,'')
10
+ JSON.parse(file_contents)
11
+ end
12
+
13
+ def get_custom_template(template_file, customize_template)
14
+ if !File.exists?(template_file)
15
+ raise "If you specify a template_name you must specify a valid template file (checked '#{template_file}')!"
16
+ end
17
+ file_contents = IO.read(template_file).gsub(/\n/,'')
18
+ customize_template.each do |key, value|
19
+ file_contents = file_contents.gsub(key,value.downcase)
20
+ end
21
+ JSON.parse(file_contents)
22
+ end
23
+
24
+ def template_exists?(name)
25
+ client.indices.get_template(:name => name)
26
+ return true
27
+ rescue Elasticsearch::Transport::Transport::Errors::NotFound
28
+ return false
29
+ end
30
+
31
+ def retry_operate(max_retries, fail_on_retry_exceed = true)
32
+ return unless block_given?
33
+ retries = 0
34
+ begin
35
+ yield
36
+ rescue *client.transport.host_unreachable_exceptions, Timeout::Error => e
37
+ @_es = nil
38
+ @_es_info = nil
39
+ if retries < max_retries
40
+ retries += 1
41
+ wait_seconds = 2**retries
42
+ sleep wait_seconds
43
+ log.warn "Could not communicate to Elasticsearch, resetting connection and trying again. #{e.message}"
44
+ log.warn "Remaining retry: #{max_retries - retries}. Retry to communicate after #{wait_seconds} second(s)."
45
+ retry
46
+ end
47
+ message = "Could not communicate to Elasticsearch after #{retries} retries. #{e.message}"
48
+ log.warn message
49
+ raise Fluent::Plugin::ElasticsearchError::RetryableOperationExhaustedFailure,
50
+ message if fail_on_retry_exceed
51
+ end
52
+ end
53
+
54
+ def template_put(name, template)
55
+ client.indices.put_template(:name => name, :body => template)
56
+ end
57
+
58
+ def indexcreation(index_name)
59
+ client.indices.create(:index => index_name)
60
+ rescue Elasticsearch::Transport::Transport::Error => e
61
+ log.error("Error while index creation - #{index_name}: #{e.inspect}")
62
+ end
63
+
64
+ def template_install(name, template_file, overwrite)
65
+ if overwrite
66
+ template_put(name, get_template(template_file))
67
+ log.info("Template '#{name}' overwritten with #{template_file}.")
68
+ return
69
+ end
70
+ if !template_exists?(name)
71
+ template_put(name, get_template(template_file))
72
+ log.info("Template configured, but no template installed. Installed '#{name}' from #{template_file}.")
73
+ else
74
+ log.info("Template configured and already installed.")
75
+ end
76
+ end
77
+
78
+ def template_custom_install(template_name, template_file, overwrite, customize_template, index_prefix, rollover_index, deflector_alias_name, app_name, index_date_pattern)
79
+ template_custom_name=template_name.downcase
80
+ if overwrite
81
+ template_put(template_custom_name, get_custom_template(template_file, customize_template))
82
+ log.info("Template '#{template_custom_name}' overwritten with #{template_file}.")
83
+ else
84
+ if !template_exists?(template_custom_name)
85
+ template_put(template_custom_name, get_custom_template(template_file, customize_template))
86
+ log.info("Template configured, but no template installed. Installed '#{template_custom_name}' from #{template_file}.")
87
+ else
88
+ log.info("Template configured and already installed.")
89
+ end
90
+ end
91
+
92
+ if rollover_index
93
+ if !client.indices.exists_alias(:name => deflector_alias_name)
94
+ index_name_temp='<'+index_prefix.downcase+'-'+app_name.downcase+'-{'+index_date_pattern+'}-000001>'
95
+ indexcreation(index_name_temp)
96
+ client.indices.put_alias(:index => index_name_temp, :name => deflector_alias_name)
97
+ log.info("The alias '#{deflector_alias_name}' is created for the index '#{index_name_temp}'")
98
+ else
99
+ log.info("The alias '#{deflector_alias_name}' is already present")
100
+ end
101
+ else
102
+ log.info("No index and alias creation action performed because rollover_index is set to '#{rollover_index}'")
103
+ end
104
+ end
105
+
106
+ def templates_hash_install(templates, overwrite)
107
+ templates.each do |key, value|
108
+ template_install(key, value, overwrite)
109
+ end
110
+ end
111
+
112
+ end
@@ -0,0 +1,10 @@
1
+ require 'elasticsearch'
2
+
3
+ class Fluent::Plugin::ElasticsearchSimpleSniffer < Elasticsearch::Transport::Transport::Sniffer
4
+
5
+ def hosts
6
+ @transport.logger.debug "In Fluent::Plugin::ElasticsearchSimpleSniffer hosts #{@transport.hosts}" if @transport.logger
7
+ @transport.hosts
8
+ end
9
+
10
+ end
@@ -0,0 +1,25 @@
1
+ require 'securerandom'
2
+ require 'base64'
3
+ require 'fluent/plugin/filter'
4
+
5
+ module Fluent::Plugin
6
+ class ElasticsearchGenidFilter < Filter
7
+ Fluent::Plugin.register_filter('elasticsearch_genid', self)
8
+
9
+ config_param :hash_id_key, :string, :default => '_hash'
10
+
11
+ def initialize
12
+ super
13
+ end
14
+
15
+ def configure(conf)
16
+ super
17
+ end
18
+
19
+ def filter(tag, time, record)
20
+ record[@hash_id_key] = Base64.strict_encode64(SecureRandom.uuid)
21
+ record
22
+ end
23
+
24
+ end
25
+ end
@@ -0,0 +1,22 @@
1
+ require 'oj'
2
+
3
+ module Fluent::Plugin
4
+ module Serializer
5
+
6
+ class Oj
7
+ include Elasticsearch::Transport::Transport::Serializer::Base
8
+
9
+ # De-serialize a Hash from JSON string
10
+ #
11
+ def load(string, options={})
12
+ ::Oj.load(string, options)
13
+ end
14
+
15
+ # Serialize a Hash to JSON string
16
+ #
17
+ def dump(object, options={})
18
+ ::Oj.dump(object, options)
19
+ end
20
+ end
21
+ end
22
+ end
@@ -0,0 +1,782 @@
1
+ # encoding: UTF-8
2
+ require 'date'
3
+ require 'excon'
4
+ require 'elasticsearch'
5
+ require 'json'
6
+ require 'uri'
7
+ begin
8
+ require 'strptime'
9
+ rescue LoadError
10
+ end
11
+
12
+ require 'fluent/plugin/output'
13
+ require 'fluent/event'
14
+ require 'fluent/error'
15
+ require 'fluent/time'
16
+ require 'fluent/log-ext'
17
+ require_relative 'elasticsearch_constants'
18
+ require_relative 'elasticsearch_error'
19
+ require_relative 'elasticsearch_error_handler'
20
+ require_relative 'elasticsearch_index_template'
21
+ begin
22
+ require_relative 'oj_serializer'
23
+ rescue LoadError
24
+ end
25
+
26
+ module Fluent::Plugin
27
+ class ElasticsearchOutput < Output
28
+ class RecoverableRequestFailure < StandardError; end
29
+ class UnrecoverableRequestFailure < Fluent::UnrecoverableError; end
30
+
31
+ # MissingIdFieldError is raised for records that do not
32
+ # include the field for the unique record identifier
33
+ class MissingIdFieldError < StandardError; end
34
+
35
+ # RetryStreamError privides a stream to be
36
+ # put back in the pipeline for cases where a bulk request
37
+ # failed (e.g some records succeed while others failed)
38
+ class RetryStreamError < StandardError
39
+ attr_reader :retry_stream
40
+ def initialize(retry_stream)
41
+ @retry_stream = retry_stream
42
+ end
43
+ end
44
+
45
+ RequestInfo = Struct.new(:host, :index)
46
+
47
+ helpers :event_emitter, :compat_parameters, :record_accessor
48
+
49
+ Fluent::Plugin.register_output('elasticsearch2', self)
50
+
51
+ DEFAULT_BUFFER_TYPE = "memory"
52
+ DEFAULT_ELASTICSEARCH_VERSION = 5 # For compatibility.
53
+ DEFAULT_TYPE_NAME_ES_7x = "_doc".freeze
54
+ DEFAULT_TYPE_NAME = "fluentd".freeze
55
+ DEFAULT_RELOAD_AFTER = -1
56
+ TARGET_BULK_BYTES = 20 * 1024 * 1024
57
+
58
+ config_param :host, :string, :default => 'localhost'
59
+ config_param :port, :integer, :default => 9200
60
+ config_param :user, :string, :default => nil
61
+ config_param :password, :string, :default => nil, :secret => true,
62
+ :deprecated => <<EOC
63
+ THis is a pass.
64
+ EOC
65
+ config_param :path, :string, :default => nil
66
+ config_param :scheme, :enum, :list => [:https, :http], :default => :http
67
+ config_param :hosts, :string, :default => nil
68
+ config_param :target_index_key, :string, :default => nil
69
+ config_param :target_type_key, :string, :default => nil,
70
+ :deprecated => <<EOC
71
+ Elasticsearch 7.x or above will ignore this config. Please use fixed type_name instead.
72
+ EOC
73
+ config_param :time_key_format, :string, :default => nil
74
+ config_param :time_precision, :integer, :default => 9
75
+ config_param :include_timestamp, :bool, :default => false
76
+ config_param :logstash_format, :bool, :default => false
77
+ config_param :logstash_prefix, :string, :default => "logstash"
78
+ config_param :logstash_prefix_separator, :string, :default => '-'
79
+ config_param :logstash_dateformat, :string, :default => "%Y.%m.%d"
80
+ config_param :utc_index, :bool, :default => true
81
+ config_param :type_name, :string, :default => DEFAULT_TYPE_NAME
82
+ config_param :index_name, :string, :default => "fluentd"
83
+ config_param :id_key, :string, :default => nil
84
+ config_param :write_operation, :string, :default => "index"
85
+ config_param :parent_key, :string, :default => nil
86
+ config_param :routing_key, :string, :default => nil
87
+ config_param :request_timeout, :time, :default => 5
88
+ config_param :reload_connections, :bool, :default => true
89
+ config_param :reload_on_failure, :bool, :default => false
90
+ config_param :retry_tag, :string, :default=>nil
91
+ config_param :resurrect_after, :time, :default => 60
92
+ config_param :time_key, :string, :default => nil
93
+ config_param :time_key_exclude_timestamp, :bool, :default => false
94
+ config_param :ssl_verify , :bool, :default => true
95
+ config_param :client_key, :string, :default => nil
96
+ config_param :client_cert, :string, :default => nil
97
+ config_param :client_key_pass, :string, :default => nil, :secret => true
98
+ config_param :ca_file, :string, :default => nil
99
+ config_param :ssl_version, :enum, list: [:SSLv23, :TLSv1, :TLSv1_1, :TLSv1_2], :default => :TLSv1
100
+ config_param :remove_keys, :string, :default => nil
101
+ config_param :remove_keys_on_update, :string, :default => ""
102
+ config_param :remove_keys_on_update_key, :string, :default => nil
103
+ config_param :flatten_hashes, :bool, :default => false
104
+ config_param :flatten_hashes_separator, :string, :default => "_"
105
+ config_param :template_name, :string, :default => nil
106
+ config_param :template_file, :string, :default => nil
107
+ config_param :template_overwrite, :bool, :default => false
108
+ config_param :customize_template, :hash, :default => nil
109
+ config_param :rollover_index, :string, :default => false
110
+ config_param :index_date_pattern, :string, :default => "now/d"
111
+ config_param :deflector_alias, :string, :default => nil
112
+ config_param :index_prefix, :string, :default => "logstash"
113
+ config_param :application_name, :string, :default => "default"
114
+ config_param :templates, :hash, :default => nil
115
+ config_param :max_retry_putting_template, :integer, :default => 10
116
+ config_param :fail_on_putting_template_retry_exceed, :bool, :default => true
117
+ config_param :max_retry_get_es_version, :integer, :default => 15
118
+ config_param :include_tag_key, :bool, :default => false
119
+ config_param :tag_key, :string, :default => 'tag'
120
+ config_param :time_parse_error_tag, :string, :default => 'Fluent::ElasticsearchOutput::TimeParser.error'
121
+ config_param :reconnect_on_error, :bool, :default => false
122
+ config_param :pipeline, :string, :default => nil
123
+ config_param :with_transporter_log, :bool, :default => false
124
+ config_param :emit_error_for_missing_id, :bool, :default => false
125
+ config_param :sniffer_class_name, :string, :default => nil
126
+ config_param :reload_after, :integer, :default => DEFAULT_RELOAD_AFTER
127
+ config_param :content_type, :enum, list: [:"application/json", :"application/x-ndjson"], :default => :"application/json",
128
+ :deprecated => <<EOC
129
+ elasticsearch gem v6.0.2 starts to use correct Content-Type. Please upgrade elasticserach gem and stop to use this option.
130
+ see: https://github.com/elastic/elasticsearch-ruby/pull/514
131
+ EOC
132
+ config_param :include_index_in_url, :bool, :default => false
133
+ config_param :http_backend, :enum, list: [:excon, :typhoeus], :default => :excon
134
+ config_param :validate_client_version, :bool, :default => false
135
+ config_param :prefer_oj_serializer, :bool, :default => false
136
+ config_param :unrecoverable_error_types, :array, :default => ["out_of_memory_error", "es_rejected_execution_exception"]
137
+ config_param :verify_es_version_at_startup, :bool, :default => true
138
+ config_param :default_elasticsearch_version, :integer, :default => DEFAULT_ELASTICSEARCH_VERSION
139
+ config_param :log_es_400_reason, :bool, :default => false
140
+ config_param :custom_headers, :hash, :default => {}
141
+ config_param :suppress_doc_wrap, :bool, :default => false
142
+ config_param :ignore_exceptions, :array, :default => [], value_type: :string, :desc => "Ignorable exception list"
143
+ config_param :exception_backup, :bool, :default => true, :desc => "Chunk backup flag when ignore exception occured"
144
+ config_param :bulk_message_request_threshold, :size, :default => TARGET_BULK_BYTES
145
+
146
+ config_section :buffer do
147
+ config_set_default :@type, DEFAULT_BUFFER_TYPE
148
+ config_set_default :chunk_keys, ['tag']
149
+ config_set_default :timekey_use_utc, true
150
+ end
151
+
152
+ include Fluent::ElasticsearchIndexTemplate
153
+ include Fluent::Plugin::ElasticsearchConstants
154
+
155
+ def initialize
156
+ super
157
+ end
158
+
159
+ def configure(conf)
160
+ compat_parameters_convert(conf, :buffer)
161
+
162
+ super
163
+ raise Fluent::ConfigError, "'tag' in chunk_keys is required." if not @chunk_key_tag
164
+
165
+ @time_parser = create_time_parser
166
+ @backend_options = backend_options
167
+
168
+ if @remove_keys
169
+ @remove_keys = @remove_keys.split(/\s*,\s*/)
170
+ end
171
+
172
+ if @target_index_key && @target_index_key.is_a?(String)
173
+ @target_index_key = @target_index_key.split '.'
174
+ end
175
+
176
+ if @target_type_key && @target_type_key.is_a?(String)
177
+ @target_type_key = @target_type_key.split '.'
178
+ end
179
+
180
+ if @remove_keys_on_update && @remove_keys_on_update.is_a?(String)
181
+ @remove_keys_on_update = @remove_keys_on_update.split ','
182
+ end
183
+
184
+ raise Fluent::ConfigError, "'max_retry_putting_template' must be greater than or equal to zero." if @max_retry_putting_template < 0
185
+ raise Fluent::ConfigError, "'max_retry_get_es_version' must be greater than or equal to zero." if @max_retry_get_es_version < 0
186
+
187
+ # Raise error when using host placeholders and template features at same time.
188
+ valid_host_placeholder = placeholder?(:host_placeholder, @host)
189
+ if valid_host_placeholder && (@template_name && @template_file || @templates)
190
+ raise Fluent::ConfigError, "host placeholder and template installation are exclusive features."
191
+ end
192
+
193
+ if !Fluent::Engine.dry_run_mode
194
+ if @template_name && @template_file
195
+ retry_operate(@max_retry_putting_template, @fail_on_putting_template_retry_exceed) do
196
+ if @customize_template
197
+ if @rollover_index
198
+ raise Fluent::ConfigError, "'deflector_alias' must be provided if 'rollover_index' is set true ." if not @deflector_alias
199
+ end
200
+ template_custom_install(@template_name, @template_file, @template_overwrite, @customize_template, @index_prefix, @rollover_index, @deflector_alias, @application_name, @index_date_pattern)
201
+ else
202
+ template_install(@template_name, @template_file, @template_overwrite)
203
+ end
204
+ end
205
+ elsif @templates
206
+ retry_operate(@max_retry_putting_template, @fail_on_putting_template_retry_exceed) do
207
+ templates_hash_install(@templates, @template_overwrite)
208
+ end
209
+ end
210
+ end
211
+
212
+ @serializer_class = nil
213
+ begin
214
+ require 'oj'
215
+ @dump_proc = Oj.method(:dump)
216
+ if @prefer_oj_serializer
217
+ @serializer_class = Fluent::Plugin::Serializer::Oj
218
+ Elasticsearch::API.settings[:serializer] = Fluent::Plugin::Serializer::Oj
219
+ end
220
+ rescue LoadError
221
+ @dump_proc = Yajl.method(:dump)
222
+ end
223
+
224
+ raise Fluent::ConfigError, "`password` must be present if `user` is present" if @user && @password.nil?
225
+
226
+ if @user && m = @user.match(/%{(?<user>.*)}/)
227
+ @user = URI.encode_www_form_component(m["user"])
228
+ end
229
+ if @password && m = @password.match(/%{(?<password>.*)}/)
230
+ @password = URI.encode_www_form_component(m["password"])
231
+ end
232
+
233
+ @transport_logger = nil
234
+ if @with_transporter_log
235
+ @transport_logger = log
236
+ log_level = conf['@log_level'] || conf['log_level']
237
+ log.warn "Consider to specify log_level with @log_level." unless log_level
238
+ end
239
+ # Specify @sniffer_class before calling #client.
240
+ # #detect_es_major_version uses #client.
241
+ @sniffer_class = nil
242
+ begin
243
+ @sniffer_class = Object.const_get(@sniffer_class_name) if @sniffer_class_name
244
+ rescue Exception => ex
245
+ raise Fluent::ConfigError, "Could not load sniffer class #{@sniffer_class_name}: #{ex}"
246
+ end
247
+
248
+ @last_seen_major_version =
249
+ if @verify_es_version_at_startup && !Fluent::Engine.dry_run_mode
250
+ retry_operate(@max_retry_get_es_version) do
251
+ detect_es_major_version
252
+ end
253
+ else
254
+ @default_elasticsearch_version
255
+ end
256
+ if @last_seen_major_version == 6 && @type_name != DEFAULT_TYPE_NAME_ES_7x
257
+ log.info "Detected ES 6.x: ES 7.x will only accept `_doc` in type_name."
258
+ end
259
+ if @last_seen_major_version >= 7 && @type_name != DEFAULT_TYPE_NAME_ES_7x
260
+ log.warn "Detected ES 7.x or above: `_doc` will be used as the document `_type`."
261
+ @type_name = '_doc'.freeze
262
+ end
263
+
264
+ if @validate_client_version && !Fluent::Engine.dry_run_mode
265
+ if @last_seen_major_version != client_library_version.to_i
266
+ raise Fluent::ConfigError, <<-EOC
267
+ Detected ES #{@last_seen_major_version} but you use ES client #{client_library_version}.
268
+ Please consider to use #{@last_seen_major_version}.x series ES client.
269
+ EOC
270
+ end
271
+ end
272
+
273
+ if @last_seen_major_version >= 6
274
+ case @ssl_version
275
+ when :SSLv23, :TLSv1, :TLSv1_1
276
+ if @scheme == :https
277
+ log.warn "Detected ES 6.x or above and enabled insecure security:
278
+ You might have to specify `ssl_version TLSv1_2` in configuration."
279
+ end
280
+ end
281
+ end
282
+
283
+ if @buffer_config.flush_thread_count < 2
284
+ log.warn "To prevent events traffic jam, you should specify 2 or more 'flush_thread_count'."
285
+ end
286
+
287
+ # Consider missing the prefix of "$." in nested key specifiers.
288
+ @id_key = convert_compat_id_key(@id_key) if @id_key
289
+ @parent_key = convert_compat_id_key(@parent_key) if @parent_key
290
+ @routing_key = convert_compat_id_key(@routing_key) if @routing_key
291
+
292
+ @routing_key_name = configure_routing_key_name
293
+ @meta_config_map = create_meta_config_map
294
+ @current_config = nil
295
+
296
+ @ignore_exception_classes = @ignore_exceptions.map do |exception|
297
+ unless Object.const_defined?(exception)
298
+ log.warn "Cannot find class #{exception}. Will ignore it."
299
+
300
+ nil
301
+ else
302
+ Object.const_get(exception)
303
+ end
304
+ end.compact
305
+
306
+ if @bulk_message_request_threshold < 0
307
+ class << self
308
+ alias_method :split_request?, :split_request_size_uncheck?
309
+ end
310
+ else
311
+ class << self
312
+ alias_method :split_request?, :split_request_size_check?
313
+ end
314
+ end
315
+ end
316
+
317
+ def placeholder?(name, param)
318
+ begin
319
+ placeholder_validate!(name, param)
320
+ true
321
+ rescue Fluent::ConfigError
322
+ false
323
+ end
324
+ end
325
+
326
+ def backend_options
327
+ case @http_backend
328
+ when :excon
329
+ { client_key: @client_key, client_cert: @client_cert, client_key_pass: @client_key_pass }
330
+ when :typhoeus
331
+ require 'typhoeus'
332
+ { sslkey: @client_key, sslcert: @client_cert, keypasswd: @client_key_pass }
333
+ end
334
+ rescue LoadError => ex
335
+ log.error_backtrace(ex.backtrace)
336
+ raise Fluent::ConfigError, "You must install #{@http_backend} gem. Exception: #{ex}"
337
+ end
338
+
339
+ def detect_es_major_version
340
+ @_es_info ||= client.info
341
+ @_es_info["version"]["number"].to_i
342
+ end
343
+
344
+ def client_library_version
345
+ Elasticsearch::VERSION
346
+ end
347
+
348
+ def configure_routing_key_name
349
+ if @last_seen_major_version >= 7
350
+ 'routing'
351
+ else
352
+ '_routing'
353
+ end
354
+ end
355
+
356
+ def convert_compat_id_key(key)
357
+ if key.include?('.') && !key.start_with?('$[')
358
+ key = "$.#{key}" unless key.start_with?('$.')
359
+ end
360
+ key
361
+ end
362
+
363
+ def create_meta_config_map
364
+ result = []
365
+ result << [record_accessor_create(@id_key), '_id'] if @id_key
366
+ result << [record_accessor_create(@parent_key), '_parent'] if @parent_key
367
+ result << [record_accessor_create(@routing_key), @routing_key_name] if @routing_key
368
+ result
369
+ end
370
+
371
+ # once fluent v0.14 is released we might be able to use
372
+ # Fluent::Parser::TimeParser, but it doesn't quite do what we want - if gives
373
+ # [sec,nsec] where as we want something we can call `strftime` on...
374
+ def create_time_parser
375
+ if @time_key_format
376
+ begin
377
+ # Strptime doesn't support all formats, but for those it does it's
378
+ # blazingly fast.
379
+ strptime = Strptime.new(@time_key_format)
380
+ Proc.new { |value|
381
+ value = convert_numeric_time_into_string(value, @time_key_format) if value.is_a?(Numeric)
382
+ strptime.exec(value).to_datetime
383
+ }
384
+ rescue
385
+ # Can happen if Strptime doesn't recognize the format; or
386
+ # if strptime couldn't be required (because it's not installed -- it's
387
+ # ruby 2 only)
388
+ Proc.new { |value|
389
+ value = convert_numeric_time_into_string(value, @time_key_format) if value.is_a?(Numeric)
390
+ DateTime.strptime(value, @time_key_format)
391
+ }
392
+ end
393
+ else
394
+ Proc.new { |value|
395
+ value = convert_numeric_time_into_string(value) if value.is_a?(Numeric)
396
+ DateTime.parse(value)
397
+ }
398
+ end
399
+ end
400
+
401
+ def convert_numeric_time_into_string(numeric_time, time_key_format = "%Y-%m-%d %H:%M:%S.%N%z")
402
+ numeric_time_parser = Fluent::NumericTimeParser.new(:float)
403
+ Time.at(numeric_time_parser.parse(numeric_time).to_r).strftime(time_key_format)
404
+ end
405
+
406
+ def parse_time(value, event_time, tag)
407
+ @time_parser.call(value)
408
+ rescue => e
409
+ router.emit_error_event(@time_parse_error_tag, Fluent::Engine.now, {'tag' => tag, 'time' => event_time, 'format' => @time_key_format, 'value' => value}, e)
410
+ return Time.at(event_time).to_datetime
411
+ end
412
+
413
+ def client(host = nil)
414
+ # check here to see if we already have a client connection for the given host
415
+ connection_options = get_connection_options(host)
416
+
417
+ @_es = nil unless is_existing_connection(connection_options[:hosts])
418
+
419
+ @_es ||= begin
420
+ @current_config = connection_options[:hosts].clone
421
+ adapter_conf = lambda {|f| f.adapter @http_backend, @backend_options }
422
+ local_reload_connections = @reload_connections
423
+ if local_reload_connections && @reload_after > DEFAULT_RELOAD_AFTER
424
+ local_reload_connections = @reload_after
425
+ end
426
+ headers = { 'Content-Type' => @content_type.to_s }.merge(@custom_headers)
427
+ transport = Elasticsearch::Transport::Transport::HTTP::Faraday.new(connection_options.merge(
428
+ options: {
429
+ reload_connections: local_reload_connections,
430
+ reload_on_failure: @reload_on_failure,
431
+ resurrect_after: @resurrect_after,
432
+ logger: @transport_logger,
433
+ transport_options: {
434
+ headers: headers,
435
+ request: { timeout: @request_timeout },
436
+ ssl: { verify: @ssl_verify, ca_file: @ca_file, version: @ssl_version }
437
+ },
438
+ http: {
439
+ user: @user,
440
+ password: @password
441
+ },
442
+ sniffer_class: @sniffer_class,
443
+ serializer_class: @serializer_class,
444
+ }), &adapter_conf)
445
+ Elasticsearch::Client.new transport: transport
446
+ end
447
+ end
448
+
449
+ def get_escaped_userinfo(host_str)
450
+ if m = host_str.match(/(?<scheme>.*)%{(?<user>.*)}:%{(?<password>.*)}(?<path>@.*)/)
451
+ m["scheme"] +
452
+ URI.encode_www_form_component(m["user"]) +
453
+ ':' +
454
+ URI.encode_www_form_component(m["password"]) +
455
+ m["path"]
456
+ else
457
+ host_str
458
+ end
459
+ end
460
+
461
+ def get_connection_options(con_host=nil)
462
+
463
+ hosts = if con_host || @hosts
464
+ (con_host || @hosts).split(',').map do |host_str|
465
+ # Support legacy hosts format host:port,host:port,host:port...
466
+ if host_str.match(%r{^[^:]+(\:\d+)?$})
467
+ {
468
+ host: host_str.split(':')[0],
469
+ port: (host_str.split(':')[1] || @port).to_i,
470
+ scheme: @scheme.to_s
471
+ }
472
+ else
473
+ # New hosts format expects URLs such as http://logs.foo.com,https://john:pass@logs2.foo.com/elastic
474
+ uri = URI(get_escaped_userinfo(host_str))
475
+ %w(user password path).inject(host: uri.host, port: uri.port, scheme: uri.scheme) do |hash, key|
476
+ hash[key.to_sym] = uri.public_send(key) unless uri.public_send(key).nil? || uri.public_send(key) == ''
477
+ hash
478
+ end
479
+ end
480
+ end.compact
481
+ else
482
+ [{host: @host, port: @port, scheme: @scheme.to_s}]
483
+ end.each do |host|
484
+ host.merge!(user: @user, password: @password) if !host[:user] && @user
485
+ host.merge!(path: @path) if !host[:path] && @path
486
+ end
487
+
488
+ {
489
+ hosts: hosts
490
+ }
491
+ end
492
+
493
+ def connection_options_description(con_host=nil)
494
+ get_connection_options(con_host)[:hosts].map do |host_info|
495
+ attributes = host_info.dup
496
+ attributes[:password] = 'obfuscated' if attributes.has_key?(:password)
497
+ attributes.inspect
498
+ end.join(', ')
499
+ end
500
+
501
+ # append_record_to_messages adds a record to the bulk message
502
+ # payload to be submitted to Elasticsearch. Records that do
503
+ # not include '_id' field are skipped when 'write_operation'
504
+ # is configured for 'create' or 'update'
505
+ #
506
+ # returns 'true' if record was appended to the bulk message
507
+ # and 'false' otherwise
508
+ def append_record_to_messages(op, meta, header, record, msgs)
509
+ case op
510
+ when UPDATE_OP, UPSERT_OP
511
+ if meta.has_key?(ID_FIELD)
512
+ header[UPDATE_OP] = meta
513
+ msgs << @dump_proc.call(header) << BODY_DELIMITER
514
+ msgs << @dump_proc.call(update_body(record, op)) << BODY_DELIMITER
515
+ return true
516
+ end
517
+ when CREATE_OP
518
+ if meta.has_key?(ID_FIELD)
519
+ header[CREATE_OP] = meta
520
+ msgs << @dump_proc.call(header) << BODY_DELIMITER
521
+ msgs << @dump_proc.call(record) << BODY_DELIMITER
522
+ return true
523
+ end
524
+ when INDEX_OP
525
+ header[INDEX_OP] = meta
526
+ msgs << @dump_proc.call(header) << BODY_DELIMITER
527
+ msgs << @dump_proc.call(record) << BODY_DELIMITER
528
+ return true
529
+ end
530
+ return false
531
+ end
532
+
533
+ def update_body(record, op)
534
+ update = remove_keys(record)
535
+ if @suppress_doc_wrap
536
+ return update
537
+ end
538
+ body = {"doc".freeze => update}
539
+ if op == UPSERT_OP
540
+ if update == record
541
+ body["doc_as_upsert".freeze] = true
542
+ else
543
+ body[UPSERT_OP] = record
544
+ end
545
+ end
546
+ body
547
+ end
548
+
549
+ def remove_keys(record)
550
+ keys = record[@remove_keys_on_update_key] || @remove_keys_on_update || []
551
+ record.delete(@remove_keys_on_update_key)
552
+ return record unless keys.any?
553
+ record = record.dup
554
+ keys.each { |key| record.delete(key) }
555
+ record
556
+ end
557
+
558
+ def flatten_record(record, prefix=[])
559
+ ret = {}
560
+ if record.is_a? Hash
561
+ record.each { |key, value|
562
+ ret.merge! flatten_record(value, prefix + [key.to_s])
563
+ }
564
+ elsif record.is_a? Array
565
+ # Don't mess with arrays, leave them unprocessed
566
+ ret.merge!({prefix.join(@flatten_hashes_separator) => record})
567
+ else
568
+ return {prefix.join(@flatten_hashes_separator) => record}
569
+ end
570
+ ret
571
+ end
572
+
573
+ def expand_placeholders(chunk)
574
+ logstash_prefix = extract_placeholders(@logstash_prefix, chunk)
575
+ index_name = extract_placeholders(@index_name, chunk)
576
+ type_name = extract_placeholders(@type_name, chunk)
577
+ return logstash_prefix, index_name, type_name
578
+ end
579
+
580
+ def multi_workers_ready?
581
+ true
582
+ end
583
+
584
+ def write(chunk)
585
+ bulk_message_count = Hash.new { |h,k| h[k] = 0 }
586
+ bulk_message = Hash.new { |h,k| h[k] = '' }
587
+ header = {}
588
+ meta = {}
589
+
590
+ tag = chunk.metadata.tag
591
+ extracted_values = expand_placeholders(chunk)
592
+ host = if @hosts
593
+ extract_placeholders(@hosts, chunk)
594
+ else
595
+ extract_placeholders(@host, chunk)
596
+ end
597
+
598
+ chunk.msgpack_each do |time, record|
599
+ next unless record.is_a? Hash
600
+ begin
601
+ meta, header, record = process_message(tag, meta, header, time, record, extracted_values)
602
+ info = if @include_index_in_url
603
+ RequestInfo.new(host, meta.delete("_index".freeze))
604
+ else
605
+ RequestInfo.new(host, nil)
606
+ end
607
+
608
+ if split_request?(bulk_message, info)
609
+ bulk_message.each do |info, msgs|
610
+ send_bulk(msgs, tag, chunk, bulk_message_count[info], extracted_values, info) unless msgs.empty?
611
+ msgs.clear
612
+ # Clear bulk_message_count for this info.
613
+ bulk_message_count[info] = 0;
614
+ next
615
+ end
616
+ end
617
+
618
+ if append_record_to_messages(@write_operation, meta, header, record, bulk_message[info])
619
+ bulk_message_count[info] += 1;
620
+ else
621
+ if @emit_error_for_missing_id
622
+ raise MissingIdFieldError, "Missing '_id' field. Write operation is #{@write_operation}"
623
+ else
624
+ log.on_debug { log.debug("Dropping record because its missing an '_id' field and write_operation is #{@write_operation}: #{record}") }
625
+ end
626
+ end
627
+ rescue => e
628
+ router.emit_error_event(tag, time, record, e)
629
+ end
630
+ end
631
+
632
+ bulk_message.each do |info, msgs|
633
+ send_bulk(msgs, tag, chunk, bulk_message_count[info], extracted_values, info) unless msgs.empty?
634
+ msgs.clear
635
+ end
636
+ end
637
+
638
+ def split_request?(bulk_message, info)
639
+ # For safety.
640
+ end
641
+
642
+ def split_request_size_check?(bulk_message, info)
643
+ bulk_message[info].size > @bulk_message_request_threshold
644
+ end
645
+
646
+ def split_request_size_uncheck?(bulk_message, info)
647
+ false
648
+ end
649
+
650
+ def process_message(tag, meta, header, time, record, extracted_values)
651
+ logstash_prefix, index_name, type_name = extracted_values
652
+
653
+ if @flatten_hashes
654
+ record = flatten_record(record)
655
+ end
656
+
657
+ dt = nil
658
+ if @logstash_format || @include_timestamp
659
+ if record.has_key?(TIMESTAMP_FIELD)
660
+ rts = record[TIMESTAMP_FIELD]
661
+ dt = parse_time(rts, time, tag)
662
+ elsif record.has_key?(@time_key)
663
+ rts = record[@time_key]
664
+ dt = parse_time(rts, time, tag)
665
+ record[TIMESTAMP_FIELD] = dt.iso8601(@time_precision) unless @time_key_exclude_timestamp
666
+ else
667
+ dt = Time.at(time).to_datetime
668
+ record[TIMESTAMP_FIELD] = dt.iso8601(@time_precision)
669
+ end
670
+ end
671
+
672
+ target_index_parent, target_index_child_key = @target_index_key ? get_parent_of(record, @target_index_key) : nil
673
+ if target_index_parent && target_index_parent[target_index_child_key]
674
+ target_index = target_index_parent.delete(target_index_child_key)
675
+ elsif @logstash_format
676
+ dt = dt.new_offset(0) if @utc_index
677
+ target_index = "#{logstash_prefix}#{@logstash_prefix_separator}#{dt.strftime(@logstash_dateformat)}"
678
+ else
679
+ target_index = index_name
680
+ end
681
+
682
+ # Change target_index to lower-case since Elasticsearch doesn't
683
+ # allow upper-case characters in index names.
684
+ target_index = target_index.downcase
685
+ if @include_tag_key
686
+ record[@tag_key] = tag
687
+ end
688
+
689
+ target_type_parent, target_type_child_key = @target_type_key ? get_parent_of(record, @target_type_key) : nil
690
+ if target_type_parent && target_type_parent[target_type_child_key]
691
+ target_type = target_type_parent.delete(target_type_child_key)
692
+ if @last_seen_major_version == 6
693
+ log.warn "Detected ES 6.x: `@type_name` will be used as the document `_type`."
694
+ target_type = type_name
695
+ elsif @last_seen_major_version >= 7
696
+ log.warn "Detected ES 7.x or above: `_doc` will be used as the document `_type`."
697
+ target_type = '_doc'.freeze
698
+ end
699
+ else
700
+ if @last_seen_major_version >= 7 && @type_name != DEFAULT_TYPE_NAME_ES_7x
701
+ log.warn "Detected ES 7.x or above: `_doc` will be used as the document `_type`."
702
+ target_type = '_doc'.freeze
703
+ else
704
+ target_type = type_name
705
+ end
706
+ end
707
+
708
+ meta.clear
709
+ meta["_index".freeze] = target_index
710
+ meta["_type".freeze] = target_type
711
+
712
+ if @pipeline
713
+ meta["pipeline".freeze] = @pipeline
714
+ end
715
+
716
+ @meta_config_map.each do |record_accessor, meta_key|
717
+ if raw_value = record_accessor.call(record)
718
+ meta[meta_key] = raw_value
719
+ end
720
+ end
721
+
722
+ if @remove_keys
723
+ @remove_keys.each { |key| record.delete(key) }
724
+ end
725
+
726
+ return [meta, header, record]
727
+ end
728
+
729
+ # returns [parent, child_key] of child described by path array in record's tree
730
+ # returns [nil, child_key] if path doesnt exist in record
731
+ def get_parent_of(record, path)
732
+ parent_object = path[0..-2].reduce(record) { |a, e| a.is_a?(Hash) ? a[e] : nil }
733
+ [parent_object, path[-1]]
734
+ end
735
+
736
+ # send_bulk given a specific bulk request, the original tag,
737
+ # chunk, and bulk_message_count
738
+ def send_bulk(data, tag, chunk, bulk_message_count, extracted_values, info)
739
+ begin
740
+
741
+ log.on_trace { log.trace "bulk request: #{data}" }
742
+ response = client(info.host).bulk body: data, index: info.index
743
+ log.on_trace { log.trace "bulk response: #{response}" }
744
+
745
+ if response['errors']
746
+ error = Fluent::Plugin::ElasticsearchErrorHandler.new(self)
747
+ error.handle_error(response, tag, chunk, bulk_message_count, extracted_values)
748
+ end
749
+ rescue RetryStreamError => e
750
+ emit_tag = @retry_tag ? @retry_tag : tag
751
+ router.emit_stream(emit_tag, e.retry_stream)
752
+ rescue => e
753
+ ignore = @ignore_exception_classes.any? { |clazz| e.class <= clazz }
754
+
755
+ log.warn "Exception ignored in tag #{tag}: #{e.class.name} #{e.message}" if ignore
756
+
757
+ @_es = nil if @reconnect_on_error
758
+ @_es_info = nil if @reconnect_on_error
759
+
760
+ raise UnrecoverableRequestFailure if ignore && @exception_backup
761
+
762
+ # FIXME: identify unrecoverable errors and raise UnrecoverableRequestFailure instead
763
+ raise RecoverableRequestFailure, "could not push logs to Elasticsearch cluster (#{connection_options_description(info.host)}): #{e.message}" unless ignore
764
+ end
765
+ end
766
+
767
+ def is_existing_connection(host)
768
+ # check if the host provided match the current connection
769
+ return false if @_es.nil?
770
+ return false if @current_config.nil?
771
+ return false if host.length != @current_config.length
772
+
773
+ for i in 0...host.length
774
+ if !host[i][:host].eql? @current_config[i][:host] || host[i][:port] != @current_config[i][:port]
775
+ return false
776
+ end
777
+ end
778
+
779
+ return true
780
+ end
781
+ end
782
+ end