fluent-plugin-elasticsearch 3.5.4 → 3.5.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.coveralls.yml +2 -2
- data/.editorconfig +9 -9
- data/.gitignore +18 -18
- data/.travis.yml +18 -18
- data/CONTRIBUTING.md +24 -0
- data/Gemfile +9 -9
- data/History.md +402 -390
- data/ISSUE_TEMPLATE.md +30 -27
- data/LICENSE.txt +201 -201
- data/PULL_REQUEST_TEMPLATE.md +10 -10
- data/README.md +1539 -1515
- data/Rakefile +11 -11
- data/appveyor.yml +30 -30
- data/fluent-plugin-elasticsearch.gemspec +31 -31
- data/lib/fluent/log-ext.rb +38 -38
- data/lib/fluent/plugin/elasticsearch_constants.rb +13 -13
- data/lib/fluent/plugin/elasticsearch_error.rb +5 -5
- data/lib/fluent/plugin/elasticsearch_error_handler.rb +127 -122
- data/lib/fluent/plugin/elasticsearch_index_template.rb +112 -112
- data/lib/fluent/plugin/elasticsearch_simple_sniffer.rb +10 -10
- data/lib/fluent/plugin/filter_elasticsearch_genid.rb +25 -25
- data/lib/fluent/plugin/oj_serializer.rb +22 -22
- data/lib/fluent/plugin/out_elasticsearch.rb +778 -777
- data/lib/fluent/plugin/out_elasticsearch_dynamic.rb +262 -262
- data/test/helper.rb +24 -24
- data/test/plugin/test_alias_template.json +8 -8
- data/test/plugin/test_elasticsearch_error_handler.rb +525 -503
- data/test/plugin/test_filter_elasticsearch_genid.rb +44 -44
- data/test/plugin/test_out_elasticsearch.rb +2744 -2720
- data/test/plugin/test_out_elasticsearch_dynamic.rb +1001 -1001
- data/test/plugin/test_template.json +23 -23
- data/test/test_log-ext.rb +35 -35
- metadata +4 -4
@@ -1,112 +1,112 @@
|
|
1
|
-
require 'fluent/error'
|
2
|
-
require_relative './elasticsearch_error'
|
3
|
-
|
4
|
-
module Fluent::ElasticsearchIndexTemplate
|
5
|
-
def get_template(template_file)
|
6
|
-
if !File.exists?(template_file)
|
7
|
-
raise "If you specify a template_name you must specify a valid template file (checked '#{template_file}')!"
|
8
|
-
end
|
9
|
-
file_contents = IO.read(template_file).gsub(/\n/,'')
|
10
|
-
JSON.parse(file_contents)
|
11
|
-
end
|
12
|
-
|
13
|
-
def get_custom_template(template_file, customize_template)
|
14
|
-
if !File.exists?(template_file)
|
15
|
-
raise "If you specify a template_name you must specify a valid template file (checked '#{template_file}')!"
|
16
|
-
end
|
17
|
-
file_contents = IO.read(template_file).gsub(/\n/,'')
|
18
|
-
customize_template.each do |key, value|
|
19
|
-
file_contents = file_contents.gsub(key,value.downcase)
|
20
|
-
end
|
21
|
-
JSON.parse(file_contents)
|
22
|
-
end
|
23
|
-
|
24
|
-
def template_exists?(name)
|
25
|
-
client.indices.get_template(:name => name)
|
26
|
-
return true
|
27
|
-
rescue Elasticsearch::Transport::Transport::Errors::NotFound
|
28
|
-
return false
|
29
|
-
end
|
30
|
-
|
31
|
-
def retry_operate(max_retries, fail_on_retry_exceed = true)
|
32
|
-
return unless block_given?
|
33
|
-
retries = 0
|
34
|
-
begin
|
35
|
-
yield
|
36
|
-
rescue *client.transport.host_unreachable_exceptions, Timeout::Error => e
|
37
|
-
@_es = nil
|
38
|
-
@_es_info = nil
|
39
|
-
if retries < max_retries
|
40
|
-
retries += 1
|
41
|
-
wait_seconds = 2**retries
|
42
|
-
sleep wait_seconds
|
43
|
-
log.warn "Could not communicate to Elasticsearch, resetting connection and trying again. #{e.message}"
|
44
|
-
log.warn "Remaining retry: #{max_retries - retries}. Retry to communicate after #{wait_seconds} second(s)."
|
45
|
-
retry
|
46
|
-
end
|
47
|
-
message = "Could not communicate to Elasticsearch after #{retries} retries. #{e.message}"
|
48
|
-
log.warn message
|
49
|
-
raise Fluent::Plugin::ElasticsearchError::RetryableOperationExhaustedFailure,
|
50
|
-
message if fail_on_retry_exceed
|
51
|
-
end
|
52
|
-
end
|
53
|
-
|
54
|
-
def template_put(name, template)
|
55
|
-
client.indices.put_template(:name => name, :body => template)
|
56
|
-
end
|
57
|
-
|
58
|
-
def indexcreation(index_name)
|
59
|
-
client.indices.create(:index => index_name)
|
60
|
-
rescue Elasticsearch::Transport::Transport::Error => e
|
61
|
-
log.error("Error while index creation - #{index_name}: #{e.inspect}")
|
62
|
-
end
|
63
|
-
|
64
|
-
def template_install(name, template_file, overwrite)
|
65
|
-
if overwrite
|
66
|
-
template_put(name, get_template(template_file))
|
67
|
-
log.info("Template '#{name}' overwritten with #{template_file}.")
|
68
|
-
return
|
69
|
-
end
|
70
|
-
if !template_exists?(name)
|
71
|
-
template_put(name, get_template(template_file))
|
72
|
-
log.info("Template configured, but no template installed. Installed '#{name}' from #{template_file}.")
|
73
|
-
else
|
74
|
-
log.info("Template configured and already installed.")
|
75
|
-
end
|
76
|
-
end
|
77
|
-
|
78
|
-
def template_custom_install(template_name, template_file, overwrite, customize_template, index_prefix, rollover_index, deflector_alias_name, app_name, index_date_pattern)
|
79
|
-
template_custom_name=template_name.downcase
|
80
|
-
if overwrite
|
81
|
-
template_put(template_custom_name, get_custom_template(template_file, customize_template))
|
82
|
-
log.info("Template '#{template_custom_name}' overwritten with #{template_file}.")
|
83
|
-
else
|
84
|
-
if !template_exists?(template_custom_name)
|
85
|
-
template_put(template_custom_name, get_custom_template(template_file, customize_template))
|
86
|
-
log.info("Template configured, but no template installed. Installed '#{template_custom_name}' from #{template_file}.")
|
87
|
-
else
|
88
|
-
log.info("Template configured and already installed.")
|
89
|
-
end
|
90
|
-
end
|
91
|
-
|
92
|
-
if rollover_index
|
93
|
-
if !client.indices.exists_alias(:name => deflector_alias_name)
|
94
|
-
index_name_temp='<'+index_prefix.downcase+'-'+app_name.downcase+'-{'+index_date_pattern+'}-000001>'
|
95
|
-
indexcreation(index_name_temp)
|
96
|
-
client.indices.put_alias(:index => index_name_temp, :name => deflector_alias_name)
|
97
|
-
log.info("The alias '#{deflector_alias_name}' is created for the index '#{index_name_temp}'")
|
98
|
-
else
|
99
|
-
log.info("The alias '#{deflector_alias_name}' is already present")
|
100
|
-
end
|
101
|
-
else
|
102
|
-
log.info("No index and alias creation action performed because rollover_index is set to '#{rollover_index}'")
|
103
|
-
end
|
104
|
-
end
|
105
|
-
|
106
|
-
def templates_hash_install(templates, overwrite)
|
107
|
-
templates.each do |key, value|
|
108
|
-
template_install(key, value, overwrite)
|
109
|
-
end
|
110
|
-
end
|
111
|
-
|
112
|
-
end
|
1
|
+
require 'fluent/error'
|
2
|
+
require_relative './elasticsearch_error'
|
3
|
+
|
4
|
+
module Fluent::ElasticsearchIndexTemplate
|
5
|
+
def get_template(template_file)
|
6
|
+
if !File.exists?(template_file)
|
7
|
+
raise "If you specify a template_name you must specify a valid template file (checked '#{template_file}')!"
|
8
|
+
end
|
9
|
+
file_contents = IO.read(template_file).gsub(/\n/,'')
|
10
|
+
JSON.parse(file_contents)
|
11
|
+
end
|
12
|
+
|
13
|
+
def get_custom_template(template_file, customize_template)
|
14
|
+
if !File.exists?(template_file)
|
15
|
+
raise "If you specify a template_name you must specify a valid template file (checked '#{template_file}')!"
|
16
|
+
end
|
17
|
+
file_contents = IO.read(template_file).gsub(/\n/,'')
|
18
|
+
customize_template.each do |key, value|
|
19
|
+
file_contents = file_contents.gsub(key,value.downcase)
|
20
|
+
end
|
21
|
+
JSON.parse(file_contents)
|
22
|
+
end
|
23
|
+
|
24
|
+
def template_exists?(name)
|
25
|
+
client.indices.get_template(:name => name)
|
26
|
+
return true
|
27
|
+
rescue Elasticsearch::Transport::Transport::Errors::NotFound
|
28
|
+
return false
|
29
|
+
end
|
30
|
+
|
31
|
+
def retry_operate(max_retries, fail_on_retry_exceed = true)
|
32
|
+
return unless block_given?
|
33
|
+
retries = 0
|
34
|
+
begin
|
35
|
+
yield
|
36
|
+
rescue *client.transport.host_unreachable_exceptions, Timeout::Error => e
|
37
|
+
@_es = nil
|
38
|
+
@_es_info = nil
|
39
|
+
if retries < max_retries
|
40
|
+
retries += 1
|
41
|
+
wait_seconds = 2**retries
|
42
|
+
sleep wait_seconds
|
43
|
+
log.warn "Could not communicate to Elasticsearch, resetting connection and trying again. #{e.message}"
|
44
|
+
log.warn "Remaining retry: #{max_retries - retries}. Retry to communicate after #{wait_seconds} second(s)."
|
45
|
+
retry
|
46
|
+
end
|
47
|
+
message = "Could not communicate to Elasticsearch after #{retries} retries. #{e.message}"
|
48
|
+
log.warn message
|
49
|
+
raise Fluent::Plugin::ElasticsearchError::RetryableOperationExhaustedFailure,
|
50
|
+
message if fail_on_retry_exceed
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
def template_put(name, template)
|
55
|
+
client.indices.put_template(:name => name, :body => template)
|
56
|
+
end
|
57
|
+
|
58
|
+
def indexcreation(index_name)
|
59
|
+
client.indices.create(:index => index_name)
|
60
|
+
rescue Elasticsearch::Transport::Transport::Error => e
|
61
|
+
log.error("Error while index creation - #{index_name}: #{e.inspect}")
|
62
|
+
end
|
63
|
+
|
64
|
+
def template_install(name, template_file, overwrite)
|
65
|
+
if overwrite
|
66
|
+
template_put(name, get_template(template_file))
|
67
|
+
log.info("Template '#{name}' overwritten with #{template_file}.")
|
68
|
+
return
|
69
|
+
end
|
70
|
+
if !template_exists?(name)
|
71
|
+
template_put(name, get_template(template_file))
|
72
|
+
log.info("Template configured, but no template installed. Installed '#{name}' from #{template_file}.")
|
73
|
+
else
|
74
|
+
log.info("Template configured and already installed.")
|
75
|
+
end
|
76
|
+
end
|
77
|
+
|
78
|
+
def template_custom_install(template_name, template_file, overwrite, customize_template, index_prefix, rollover_index, deflector_alias_name, app_name, index_date_pattern)
|
79
|
+
template_custom_name=template_name.downcase
|
80
|
+
if overwrite
|
81
|
+
template_put(template_custom_name, get_custom_template(template_file, customize_template))
|
82
|
+
log.info("Template '#{template_custom_name}' overwritten with #{template_file}.")
|
83
|
+
else
|
84
|
+
if !template_exists?(template_custom_name)
|
85
|
+
template_put(template_custom_name, get_custom_template(template_file, customize_template))
|
86
|
+
log.info("Template configured, but no template installed. Installed '#{template_custom_name}' from #{template_file}.")
|
87
|
+
else
|
88
|
+
log.info("Template configured and already installed.")
|
89
|
+
end
|
90
|
+
end
|
91
|
+
|
92
|
+
if rollover_index
|
93
|
+
if !client.indices.exists_alias(:name => deflector_alias_name)
|
94
|
+
index_name_temp='<'+index_prefix.downcase+'-'+app_name.downcase+'-{'+index_date_pattern+'}-000001>'
|
95
|
+
indexcreation(index_name_temp)
|
96
|
+
client.indices.put_alias(:index => index_name_temp, :name => deflector_alias_name)
|
97
|
+
log.info("The alias '#{deflector_alias_name}' is created for the index '#{index_name_temp}'")
|
98
|
+
else
|
99
|
+
log.info("The alias '#{deflector_alias_name}' is already present")
|
100
|
+
end
|
101
|
+
else
|
102
|
+
log.info("No index and alias creation action performed because rollover_index is set to '#{rollover_index}'")
|
103
|
+
end
|
104
|
+
end
|
105
|
+
|
106
|
+
def templates_hash_install(templates, overwrite)
|
107
|
+
templates.each do |key, value|
|
108
|
+
template_install(key, value, overwrite)
|
109
|
+
end
|
110
|
+
end
|
111
|
+
|
112
|
+
end
|
@@ -1,10 +1,10 @@
|
|
1
|
-
require 'elasticsearch'
|
2
|
-
|
3
|
-
class Fluent::Plugin::ElasticsearchSimpleSniffer < Elasticsearch::Transport::Transport::Sniffer
|
4
|
-
|
5
|
-
def hosts
|
6
|
-
@transport.logger.debug "In Fluent::Plugin::ElasticsearchSimpleSniffer hosts #{@transport.hosts}" if @transport.logger
|
7
|
-
@transport.hosts
|
8
|
-
end
|
9
|
-
|
10
|
-
end
|
1
|
+
require 'elasticsearch'
|
2
|
+
|
3
|
+
class Fluent::Plugin::ElasticsearchSimpleSniffer < Elasticsearch::Transport::Transport::Sniffer
|
4
|
+
|
5
|
+
def hosts
|
6
|
+
@transport.logger.debug "In Fluent::Plugin::ElasticsearchSimpleSniffer hosts #{@transport.hosts}" if @transport.logger
|
7
|
+
@transport.hosts
|
8
|
+
end
|
9
|
+
|
10
|
+
end
|
@@ -1,25 +1,25 @@
|
|
1
|
-
require 'securerandom'
|
2
|
-
require 'base64'
|
3
|
-
require 'fluent/plugin/filter'
|
4
|
-
|
5
|
-
module Fluent::Plugin
|
6
|
-
class ElasticsearchGenidFilter < Filter
|
7
|
-
Fluent::Plugin.register_filter('elasticsearch_genid', self)
|
8
|
-
|
9
|
-
config_param :hash_id_key, :string, :default => '_hash'
|
10
|
-
|
11
|
-
def initialize
|
12
|
-
super
|
13
|
-
end
|
14
|
-
|
15
|
-
def configure(conf)
|
16
|
-
super
|
17
|
-
end
|
18
|
-
|
19
|
-
def filter(tag, time, record)
|
20
|
-
record[@hash_id_key] = Base64.strict_encode64(SecureRandom.uuid)
|
21
|
-
record
|
22
|
-
end
|
23
|
-
|
24
|
-
end
|
25
|
-
end
|
1
|
+
require 'securerandom'
|
2
|
+
require 'base64'
|
3
|
+
require 'fluent/plugin/filter'
|
4
|
+
|
5
|
+
module Fluent::Plugin
|
6
|
+
class ElasticsearchGenidFilter < Filter
|
7
|
+
Fluent::Plugin.register_filter('elasticsearch_genid', self)
|
8
|
+
|
9
|
+
config_param :hash_id_key, :string, :default => '_hash'
|
10
|
+
|
11
|
+
def initialize
|
12
|
+
super
|
13
|
+
end
|
14
|
+
|
15
|
+
def configure(conf)
|
16
|
+
super
|
17
|
+
end
|
18
|
+
|
19
|
+
def filter(tag, time, record)
|
20
|
+
record[@hash_id_key] = Base64.strict_encode64(SecureRandom.uuid)
|
21
|
+
record
|
22
|
+
end
|
23
|
+
|
24
|
+
end
|
25
|
+
end
|
@@ -1,22 +1,22 @@
|
|
1
|
-
require 'oj'
|
2
|
-
|
3
|
-
module Fluent::Plugin
|
4
|
-
module Serializer
|
5
|
-
|
6
|
-
class Oj
|
7
|
-
include Elasticsearch::Transport::Transport::Serializer::Base
|
8
|
-
|
9
|
-
# De-serialize a Hash from JSON string
|
10
|
-
#
|
11
|
-
def load(string, options={})
|
12
|
-
::Oj.load(string, options)
|
13
|
-
end
|
14
|
-
|
15
|
-
# Serialize a Hash to JSON string
|
16
|
-
#
|
17
|
-
def dump(object, options={})
|
18
|
-
::Oj.dump(object, options)
|
19
|
-
end
|
20
|
-
end
|
21
|
-
end
|
22
|
-
end
|
1
|
+
require 'oj'
|
2
|
+
|
3
|
+
module Fluent::Plugin
|
4
|
+
module Serializer
|
5
|
+
|
6
|
+
class Oj
|
7
|
+
include Elasticsearch::Transport::Transport::Serializer::Base
|
8
|
+
|
9
|
+
# De-serialize a Hash from JSON string
|
10
|
+
#
|
11
|
+
def load(string, options={})
|
12
|
+
::Oj.load(string, options)
|
13
|
+
end
|
14
|
+
|
15
|
+
# Serialize a Hash to JSON string
|
16
|
+
#
|
17
|
+
def dump(object, options={})
|
18
|
+
::Oj.dump(object, options)
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
@@ -1,777 +1,778 @@
|
|
1
|
-
# encoding: UTF-8
|
2
|
-
require 'date'
|
3
|
-
require 'excon'
|
4
|
-
require 'elasticsearch'
|
5
|
-
require 'json'
|
6
|
-
require 'uri'
|
7
|
-
begin
|
8
|
-
require 'strptime'
|
9
|
-
rescue LoadError
|
10
|
-
end
|
11
|
-
|
12
|
-
require 'fluent/plugin/output'
|
13
|
-
require 'fluent/event'
|
14
|
-
require 'fluent/error'
|
15
|
-
require 'fluent/time'
|
16
|
-
require 'fluent/log-ext'
|
17
|
-
require_relative 'elasticsearch_constants'
|
18
|
-
require_relative 'elasticsearch_error'
|
19
|
-
require_relative 'elasticsearch_error_handler'
|
20
|
-
require_relative 'elasticsearch_index_template'
|
21
|
-
begin
|
22
|
-
require_relative 'oj_serializer'
|
23
|
-
rescue LoadError
|
24
|
-
end
|
25
|
-
|
26
|
-
module Fluent::Plugin
|
27
|
-
class ElasticsearchOutput < Output
|
28
|
-
class RecoverableRequestFailure < StandardError; end
|
29
|
-
class UnrecoverableRequestFailure < Fluent::UnrecoverableError; end
|
30
|
-
|
31
|
-
# MissingIdFieldError is raised for records that do not
|
32
|
-
# include the field for the unique record identifier
|
33
|
-
class MissingIdFieldError < StandardError; end
|
34
|
-
|
35
|
-
# RetryStreamError privides a stream to be
|
36
|
-
# put back in the pipeline for cases where a bulk request
|
37
|
-
# failed (e.g some records succeed while others failed)
|
38
|
-
class RetryStreamError < StandardError
|
39
|
-
attr_reader :retry_stream
|
40
|
-
def initialize(retry_stream)
|
41
|
-
@retry_stream = retry_stream
|
42
|
-
end
|
43
|
-
end
|
44
|
-
|
45
|
-
RequestInfo = Struct.new(:host, :index)
|
46
|
-
|
47
|
-
helpers :event_emitter, :compat_parameters, :record_accessor
|
48
|
-
|
49
|
-
Fluent::Plugin.register_output('elasticsearch', self)
|
50
|
-
|
51
|
-
DEFAULT_BUFFER_TYPE = "memory"
|
52
|
-
DEFAULT_ELASTICSEARCH_VERSION = 5 # For compatibility.
|
53
|
-
DEFAULT_TYPE_NAME_ES_7x = "_doc".freeze
|
54
|
-
DEFAULT_TYPE_NAME = "fluentd".freeze
|
55
|
-
DEFAULT_RELOAD_AFTER = -1
|
56
|
-
TARGET_BULK_BYTES = 20 * 1024 * 1024
|
57
|
-
|
58
|
-
config_param :host, :string, :default => 'localhost'
|
59
|
-
config_param :port, :integer, :default => 9200
|
60
|
-
config_param :user, :string, :default => nil
|
61
|
-
config_param :password, :string, :default => nil, :secret => true
|
62
|
-
config_param :path, :string, :default => nil
|
63
|
-
config_param :scheme, :enum, :list => [:https, :http], :default => :http
|
64
|
-
config_param :hosts, :string, :default => nil
|
65
|
-
config_param :target_index_key, :string, :default => nil
|
66
|
-
config_param :target_type_key, :string, :default => nil,
|
67
|
-
:deprecated => <<EOC
|
68
|
-
Elasticsearch 7.x or above will ignore this config. Please use fixed type_name instead.
|
69
|
-
EOC
|
70
|
-
config_param :time_key_format, :string, :default => nil
|
71
|
-
config_param :time_precision, :integer, :default => 9
|
72
|
-
config_param :include_timestamp, :bool, :default => false
|
73
|
-
config_param :logstash_format, :bool, :default => false
|
74
|
-
config_param :logstash_prefix, :string, :default => "logstash"
|
75
|
-
config_param :logstash_prefix_separator, :string, :default => '-'
|
76
|
-
config_param :logstash_dateformat, :string, :default => "%Y.%m.%d"
|
77
|
-
config_param :utc_index, :bool, :default => true
|
78
|
-
config_param :type_name, :string, :default => DEFAULT_TYPE_NAME
|
79
|
-
config_param :index_name, :string, :default => "fluentd"
|
80
|
-
config_param :id_key, :string, :default => nil
|
81
|
-
config_param :write_operation, :string, :default => "index"
|
82
|
-
config_param :parent_key, :string, :default => nil
|
83
|
-
config_param :routing_key, :string, :default => nil
|
84
|
-
config_param :request_timeout, :time, :default => 5
|
85
|
-
config_param :reload_connections, :bool, :default => true
|
86
|
-
config_param :reload_on_failure, :bool, :default => false
|
87
|
-
config_param :retry_tag, :string, :default=>nil
|
88
|
-
config_param :resurrect_after, :time, :default => 60
|
89
|
-
config_param :time_key, :string, :default => nil
|
90
|
-
config_param :time_key_exclude_timestamp, :bool, :default => false
|
91
|
-
config_param :ssl_verify , :bool, :default => true
|
92
|
-
config_param :client_key, :string, :default => nil
|
93
|
-
config_param :client_cert, :string, :default => nil
|
94
|
-
config_param :client_key_pass, :string, :default => nil
|
95
|
-
config_param :ca_file, :string, :default => nil
|
96
|
-
config_param :ssl_version, :enum, list: [:SSLv23, :TLSv1, :TLSv1_1, :TLSv1_2], :default => :TLSv1
|
97
|
-
config_param :remove_keys, :string, :default => nil
|
98
|
-
config_param :remove_keys_on_update, :string, :default => ""
|
99
|
-
config_param :remove_keys_on_update_key, :string, :default => nil
|
100
|
-
config_param :flatten_hashes, :bool, :default => false
|
101
|
-
config_param :flatten_hashes_separator, :string, :default => "_"
|
102
|
-
config_param :template_name, :string, :default => nil
|
103
|
-
config_param :template_file, :string, :default => nil
|
104
|
-
config_param :template_overwrite, :bool, :default => false
|
105
|
-
config_param :customize_template, :hash, :default => nil
|
106
|
-
config_param :rollover_index, :string, :default => false
|
107
|
-
config_param :index_date_pattern, :string, :default => "now/d"
|
108
|
-
config_param :deflector_alias, :string, :default => nil
|
109
|
-
config_param :index_prefix, :string, :default => "logstash"
|
110
|
-
config_param :application_name, :string, :default => "default"
|
111
|
-
config_param :templates, :hash, :default => nil
|
112
|
-
config_param :max_retry_putting_template, :integer, :default => 10
|
113
|
-
config_param :fail_on_putting_template_retry_exceed, :bool, :default => true
|
114
|
-
config_param :max_retry_get_es_version, :integer, :default => 15
|
115
|
-
config_param :include_tag_key, :bool, :default => false
|
116
|
-
config_param :tag_key, :string, :default => 'tag'
|
117
|
-
config_param :time_parse_error_tag, :string, :default => 'Fluent::ElasticsearchOutput::TimeParser.error'
|
118
|
-
config_param :reconnect_on_error, :bool, :default => false
|
119
|
-
config_param :pipeline, :string, :default => nil
|
120
|
-
config_param :with_transporter_log, :bool, :default => false
|
121
|
-
config_param :emit_error_for_missing_id, :bool, :default => false
|
122
|
-
config_param :sniffer_class_name, :string, :default => nil
|
123
|
-
config_param :reload_after, :integer, :default => DEFAULT_RELOAD_AFTER
|
124
|
-
config_param :content_type, :enum, list: [:"application/json", :"application/x-ndjson"], :default => :"application/json",
|
125
|
-
:deprecated => <<EOC
|
126
|
-
elasticsearch gem v6.0.2 starts to use correct Content-Type. Please upgrade elasticserach gem and stop to use this option.
|
127
|
-
see: https://github.com/elastic/elasticsearch-ruby/pull/514
|
128
|
-
EOC
|
129
|
-
config_param :include_index_in_url, :bool, :default => false
|
130
|
-
config_param :http_backend, :enum, list: [:excon, :typhoeus], :default => :excon
|
131
|
-
config_param :validate_client_version, :bool, :default => false
|
132
|
-
config_param :prefer_oj_serializer, :bool, :default => false
|
133
|
-
config_param :unrecoverable_error_types, :array, :default => ["out_of_memory_error", "es_rejected_execution_exception"]
|
134
|
-
config_param :verify_es_version_at_startup, :bool, :default => true
|
135
|
-
config_param :default_elasticsearch_version, :integer, :default => DEFAULT_ELASTICSEARCH_VERSION
|
136
|
-
config_param :log_es_400_reason, :bool, :default => false
|
137
|
-
config_param :custom_headers, :hash, :default => {}
|
138
|
-
config_param :suppress_doc_wrap, :bool, :default => false
|
139
|
-
config_param :ignore_exceptions, :array, :default => [], value_type: :string, :desc => "Ignorable exception list"
|
140
|
-
config_param :exception_backup, :bool, :default => true, :desc => "Chunk backup flag when ignore exception occured"
|
141
|
-
config_param :bulk_message_request_threshold, :size, :default => TARGET_BULK_BYTES
|
142
|
-
|
143
|
-
config_section :buffer do
|
144
|
-
config_set_default :@type, DEFAULT_BUFFER_TYPE
|
145
|
-
config_set_default :chunk_keys, ['tag']
|
146
|
-
config_set_default :timekey_use_utc, true
|
147
|
-
end
|
148
|
-
|
149
|
-
include Fluent::ElasticsearchIndexTemplate
|
150
|
-
include Fluent::Plugin::ElasticsearchConstants
|
151
|
-
|
152
|
-
def initialize
|
153
|
-
super
|
154
|
-
end
|
155
|
-
|
156
|
-
def configure(conf)
|
157
|
-
compat_parameters_convert(conf, :buffer)
|
158
|
-
|
159
|
-
super
|
160
|
-
raise Fluent::ConfigError, "'tag' in chunk_keys is required." if not @chunk_key_tag
|
161
|
-
|
162
|
-
@time_parser = create_time_parser
|
163
|
-
@backend_options = backend_options
|
164
|
-
|
165
|
-
if @remove_keys
|
166
|
-
@remove_keys = @remove_keys.split(/\s*,\s*/)
|
167
|
-
end
|
168
|
-
|
169
|
-
if @target_index_key && @target_index_key.is_a?(String)
|
170
|
-
@target_index_key = @target_index_key.split '.'
|
171
|
-
end
|
172
|
-
|
173
|
-
if @target_type_key && @target_type_key.is_a?(String)
|
174
|
-
@target_type_key = @target_type_key.split '.'
|
175
|
-
end
|
176
|
-
|
177
|
-
if @remove_keys_on_update && @remove_keys_on_update.is_a?(String)
|
178
|
-
@remove_keys_on_update = @remove_keys_on_update.split ','
|
179
|
-
end
|
180
|
-
|
181
|
-
raise Fluent::ConfigError, "'max_retry_putting_template' must be
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
187
|
-
|
188
|
-
|
189
|
-
|
190
|
-
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
|
195
|
-
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
207
|
-
|
208
|
-
|
209
|
-
|
210
|
-
|
211
|
-
|
212
|
-
|
213
|
-
|
214
|
-
|
215
|
-
|
216
|
-
|
217
|
-
|
218
|
-
|
219
|
-
|
220
|
-
|
221
|
-
|
222
|
-
|
223
|
-
|
224
|
-
|
225
|
-
|
226
|
-
|
227
|
-
|
228
|
-
|
229
|
-
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
#
|
235
|
-
|
236
|
-
|
237
|
-
|
238
|
-
|
239
|
-
|
240
|
-
|
241
|
-
|
242
|
-
|
243
|
-
|
244
|
-
|
245
|
-
|
246
|
-
|
247
|
-
|
248
|
-
|
249
|
-
|
250
|
-
|
251
|
-
|
252
|
-
|
253
|
-
|
254
|
-
|
255
|
-
|
256
|
-
|
257
|
-
|
258
|
-
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
|
268
|
-
|
269
|
-
|
270
|
-
|
271
|
-
|
272
|
-
|
273
|
-
|
274
|
-
|
275
|
-
|
276
|
-
|
277
|
-
|
278
|
-
|
279
|
-
|
280
|
-
|
281
|
-
|
282
|
-
|
283
|
-
@
|
284
|
-
@
|
285
|
-
|
286
|
-
|
287
|
-
@
|
288
|
-
@
|
289
|
-
|
290
|
-
|
291
|
-
|
292
|
-
|
293
|
-
|
294
|
-
|
295
|
-
|
296
|
-
|
297
|
-
|
298
|
-
|
299
|
-
|
300
|
-
|
301
|
-
|
302
|
-
|
303
|
-
|
304
|
-
|
305
|
-
|
306
|
-
|
307
|
-
|
308
|
-
|
309
|
-
|
310
|
-
|
311
|
-
|
312
|
-
|
313
|
-
|
314
|
-
|
315
|
-
|
316
|
-
|
317
|
-
|
318
|
-
|
319
|
-
|
320
|
-
|
321
|
-
|
322
|
-
|
323
|
-
|
324
|
-
|
325
|
-
|
326
|
-
|
327
|
-
|
328
|
-
|
329
|
-
|
330
|
-
|
331
|
-
|
332
|
-
|
333
|
-
|
334
|
-
|
335
|
-
@_es_info
|
336
|
-
|
337
|
-
|
338
|
-
|
339
|
-
|
340
|
-
|
341
|
-
|
342
|
-
|
343
|
-
|
344
|
-
|
345
|
-
|
346
|
-
|
347
|
-
|
348
|
-
|
349
|
-
|
350
|
-
|
351
|
-
|
352
|
-
|
353
|
-
|
354
|
-
|
355
|
-
|
356
|
-
|
357
|
-
|
358
|
-
|
359
|
-
result
|
360
|
-
result << [record_accessor_create(@
|
361
|
-
result << [record_accessor_create(@
|
362
|
-
result
|
363
|
-
|
364
|
-
|
365
|
-
|
366
|
-
#
|
367
|
-
#
|
368
|
-
|
369
|
-
|
370
|
-
|
371
|
-
|
372
|
-
#
|
373
|
-
|
374
|
-
|
375
|
-
|
376
|
-
|
377
|
-
|
378
|
-
|
379
|
-
|
380
|
-
# if
|
381
|
-
#
|
382
|
-
|
383
|
-
|
384
|
-
|
385
|
-
|
386
|
-
|
387
|
-
|
388
|
-
|
389
|
-
|
390
|
-
|
391
|
-
|
392
|
-
|
393
|
-
|
394
|
-
|
395
|
-
|
396
|
-
|
397
|
-
|
398
|
-
|
399
|
-
|
400
|
-
|
401
|
-
|
402
|
-
|
403
|
-
|
404
|
-
|
405
|
-
|
406
|
-
|
407
|
-
|
408
|
-
|
409
|
-
|
410
|
-
|
411
|
-
|
412
|
-
|
413
|
-
|
414
|
-
|
415
|
-
|
416
|
-
|
417
|
-
|
418
|
-
|
419
|
-
|
420
|
-
|
421
|
-
|
422
|
-
|
423
|
-
|
424
|
-
|
425
|
-
|
426
|
-
|
427
|
-
|
428
|
-
|
429
|
-
|
430
|
-
|
431
|
-
|
432
|
-
|
433
|
-
|
434
|
-
|
435
|
-
|
436
|
-
|
437
|
-
|
438
|
-
|
439
|
-
|
440
|
-
|
441
|
-
|
442
|
-
|
443
|
-
|
444
|
-
|
445
|
-
|
446
|
-
|
447
|
-
|
448
|
-
|
449
|
-
m["
|
450
|
-
|
451
|
-
|
452
|
-
|
453
|
-
|
454
|
-
|
455
|
-
|
456
|
-
|
457
|
-
|
458
|
-
|
459
|
-
|
460
|
-
|
461
|
-
|
462
|
-
|
463
|
-
|
464
|
-
|
465
|
-
|
466
|
-
|
467
|
-
|
468
|
-
|
469
|
-
|
470
|
-
|
471
|
-
|
472
|
-
hash
|
473
|
-
|
474
|
-
|
475
|
-
|
476
|
-
|
477
|
-
|
478
|
-
|
479
|
-
|
480
|
-
host.merge!(
|
481
|
-
|
482
|
-
|
483
|
-
|
484
|
-
|
485
|
-
|
486
|
-
|
487
|
-
|
488
|
-
|
489
|
-
|
490
|
-
|
491
|
-
attributes
|
492
|
-
attributes.
|
493
|
-
|
494
|
-
|
495
|
-
|
496
|
-
|
497
|
-
#
|
498
|
-
#
|
499
|
-
#
|
500
|
-
#
|
501
|
-
#
|
502
|
-
#
|
503
|
-
|
504
|
-
|
505
|
-
|
506
|
-
|
507
|
-
|
508
|
-
|
509
|
-
msgs << @dump_proc.call(
|
510
|
-
|
511
|
-
|
512
|
-
|
513
|
-
|
514
|
-
|
515
|
-
|
516
|
-
msgs << @dump_proc.call(
|
517
|
-
|
518
|
-
|
519
|
-
|
520
|
-
|
521
|
-
|
522
|
-
msgs << @dump_proc.call(
|
523
|
-
|
524
|
-
|
525
|
-
|
526
|
-
|
527
|
-
|
528
|
-
|
529
|
-
|
530
|
-
|
531
|
-
|
532
|
-
|
533
|
-
|
534
|
-
|
535
|
-
|
536
|
-
|
537
|
-
|
538
|
-
|
539
|
-
|
540
|
-
|
541
|
-
|
542
|
-
|
543
|
-
|
544
|
-
|
545
|
-
|
546
|
-
record
|
547
|
-
|
548
|
-
record
|
549
|
-
|
550
|
-
record
|
551
|
-
|
552
|
-
|
553
|
-
|
554
|
-
|
555
|
-
|
556
|
-
|
557
|
-
|
558
|
-
|
559
|
-
|
560
|
-
|
561
|
-
|
562
|
-
|
563
|
-
|
564
|
-
|
565
|
-
|
566
|
-
|
567
|
-
|
568
|
-
|
569
|
-
|
570
|
-
|
571
|
-
|
572
|
-
|
573
|
-
|
574
|
-
|
575
|
-
|
576
|
-
|
577
|
-
|
578
|
-
|
579
|
-
|
580
|
-
|
581
|
-
|
582
|
-
|
583
|
-
|
584
|
-
|
585
|
-
|
586
|
-
|
587
|
-
|
588
|
-
|
589
|
-
|
590
|
-
|
591
|
-
|
592
|
-
|
593
|
-
|
594
|
-
|
595
|
-
|
596
|
-
|
597
|
-
|
598
|
-
|
599
|
-
|
600
|
-
|
601
|
-
|
602
|
-
|
603
|
-
|
604
|
-
|
605
|
-
|
606
|
-
msgs.
|
607
|
-
|
608
|
-
bulk_message_count
|
609
|
-
|
610
|
-
|
611
|
-
|
612
|
-
|
613
|
-
|
614
|
-
|
615
|
-
|
616
|
-
|
617
|
-
|
618
|
-
|
619
|
-
|
620
|
-
|
621
|
-
|
622
|
-
|
623
|
-
|
624
|
-
|
625
|
-
|
626
|
-
|
627
|
-
|
628
|
-
|
629
|
-
msgs.
|
630
|
-
|
631
|
-
|
632
|
-
|
633
|
-
|
634
|
-
|
635
|
-
|
636
|
-
|
637
|
-
|
638
|
-
|
639
|
-
|
640
|
-
|
641
|
-
|
642
|
-
|
643
|
-
|
644
|
-
|
645
|
-
|
646
|
-
|
647
|
-
|
648
|
-
|
649
|
-
|
650
|
-
|
651
|
-
|
652
|
-
|
653
|
-
|
654
|
-
|
655
|
-
|
656
|
-
|
657
|
-
|
658
|
-
|
659
|
-
|
660
|
-
|
661
|
-
|
662
|
-
|
663
|
-
|
664
|
-
|
665
|
-
|
666
|
-
|
667
|
-
|
668
|
-
|
669
|
-
|
670
|
-
|
671
|
-
|
672
|
-
|
673
|
-
|
674
|
-
|
675
|
-
|
676
|
-
|
677
|
-
|
678
|
-
#
|
679
|
-
|
680
|
-
|
681
|
-
|
682
|
-
|
683
|
-
|
684
|
-
|
685
|
-
|
686
|
-
|
687
|
-
|
688
|
-
|
689
|
-
|
690
|
-
|
691
|
-
|
692
|
-
|
693
|
-
|
694
|
-
|
695
|
-
|
696
|
-
|
697
|
-
|
698
|
-
|
699
|
-
|
700
|
-
|
701
|
-
|
702
|
-
|
703
|
-
|
704
|
-
meta
|
705
|
-
meta["
|
706
|
-
|
707
|
-
|
708
|
-
|
709
|
-
|
710
|
-
|
711
|
-
|
712
|
-
|
713
|
-
|
714
|
-
|
715
|
-
|
716
|
-
|
717
|
-
|
718
|
-
|
719
|
-
|
720
|
-
|
721
|
-
|
722
|
-
|
723
|
-
|
724
|
-
|
725
|
-
# returns [
|
726
|
-
|
727
|
-
|
728
|
-
|
729
|
-
|
730
|
-
|
731
|
-
|
732
|
-
#
|
733
|
-
|
734
|
-
|
735
|
-
|
736
|
-
|
737
|
-
|
738
|
-
|
739
|
-
|
740
|
-
|
741
|
-
|
742
|
-
error.
|
743
|
-
|
744
|
-
|
745
|
-
|
746
|
-
|
747
|
-
|
748
|
-
|
749
|
-
|
750
|
-
|
751
|
-
|
752
|
-
|
753
|
-
@
|
754
|
-
|
755
|
-
|
756
|
-
|
757
|
-
|
758
|
-
|
759
|
-
|
760
|
-
|
761
|
-
|
762
|
-
|
763
|
-
|
764
|
-
|
765
|
-
return false if @
|
766
|
-
return false if
|
767
|
-
|
768
|
-
|
769
|
-
|
770
|
-
|
771
|
-
|
772
|
-
|
773
|
-
|
774
|
-
|
775
|
-
|
776
|
-
|
777
|
-
end
|
1
|
+
# encoding: UTF-8
|
2
|
+
require 'date'
|
3
|
+
require 'excon'
|
4
|
+
require 'elasticsearch'
|
5
|
+
require 'json'
|
6
|
+
require 'uri'
|
7
|
+
begin
|
8
|
+
require 'strptime'
|
9
|
+
rescue LoadError
|
10
|
+
end
|
11
|
+
|
12
|
+
require 'fluent/plugin/output'
|
13
|
+
require 'fluent/event'
|
14
|
+
require 'fluent/error'
|
15
|
+
require 'fluent/time'
|
16
|
+
require 'fluent/log-ext'
|
17
|
+
require_relative 'elasticsearch_constants'
|
18
|
+
require_relative 'elasticsearch_error'
|
19
|
+
require_relative 'elasticsearch_error_handler'
|
20
|
+
require_relative 'elasticsearch_index_template'
|
21
|
+
begin
|
22
|
+
require_relative 'oj_serializer'
|
23
|
+
rescue LoadError
|
24
|
+
end
|
25
|
+
|
26
|
+
module Fluent::Plugin
|
27
|
+
class ElasticsearchOutput < Output
|
28
|
+
class RecoverableRequestFailure < StandardError; end
|
29
|
+
class UnrecoverableRequestFailure < Fluent::UnrecoverableError; end
|
30
|
+
|
31
|
+
# MissingIdFieldError is raised for records that do not
|
32
|
+
# include the field for the unique record identifier
|
33
|
+
class MissingIdFieldError < StandardError; end
|
34
|
+
|
35
|
+
# RetryStreamError privides a stream to be
|
36
|
+
# put back in the pipeline for cases where a bulk request
|
37
|
+
# failed (e.g some records succeed while others failed)
|
38
|
+
class RetryStreamError < StandardError
|
39
|
+
attr_reader :retry_stream
|
40
|
+
def initialize(retry_stream)
|
41
|
+
@retry_stream = retry_stream
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
RequestInfo = Struct.new(:host, :index)
|
46
|
+
|
47
|
+
helpers :event_emitter, :compat_parameters, :record_accessor
|
48
|
+
|
49
|
+
Fluent::Plugin.register_output('elasticsearch', self)
|
50
|
+
|
51
|
+
DEFAULT_BUFFER_TYPE = "memory"
|
52
|
+
DEFAULT_ELASTICSEARCH_VERSION = 5 # For compatibility.
|
53
|
+
DEFAULT_TYPE_NAME_ES_7x = "_doc".freeze
|
54
|
+
DEFAULT_TYPE_NAME = "fluentd".freeze
|
55
|
+
DEFAULT_RELOAD_AFTER = -1
|
56
|
+
TARGET_BULK_BYTES = 20 * 1024 * 1024
|
57
|
+
|
58
|
+
config_param :host, :string, :default => 'localhost'
|
59
|
+
config_param :port, :integer, :default => 9200
|
60
|
+
config_param :user, :string, :default => nil
|
61
|
+
config_param :password, :string, :default => nil, :secret => true
|
62
|
+
config_param :path, :string, :default => nil
|
63
|
+
config_param :scheme, :enum, :list => [:https, :http], :default => :http
|
64
|
+
config_param :hosts, :string, :default => nil
|
65
|
+
config_param :target_index_key, :string, :default => nil
|
66
|
+
config_param :target_type_key, :string, :default => nil,
|
67
|
+
:deprecated => <<EOC
|
68
|
+
Elasticsearch 7.x or above will ignore this config. Please use fixed type_name instead.
|
69
|
+
EOC
|
70
|
+
config_param :time_key_format, :string, :default => nil
|
71
|
+
config_param :time_precision, :integer, :default => 9
|
72
|
+
config_param :include_timestamp, :bool, :default => false
|
73
|
+
config_param :logstash_format, :bool, :default => false
|
74
|
+
config_param :logstash_prefix, :string, :default => "logstash"
|
75
|
+
config_param :logstash_prefix_separator, :string, :default => '-'
|
76
|
+
config_param :logstash_dateformat, :string, :default => "%Y.%m.%d"
|
77
|
+
config_param :utc_index, :bool, :default => true
|
78
|
+
config_param :type_name, :string, :default => DEFAULT_TYPE_NAME
|
79
|
+
config_param :index_name, :string, :default => "fluentd"
|
80
|
+
config_param :id_key, :string, :default => nil
|
81
|
+
config_param :write_operation, :string, :default => "index"
|
82
|
+
config_param :parent_key, :string, :default => nil
|
83
|
+
config_param :routing_key, :string, :default => nil
|
84
|
+
config_param :request_timeout, :time, :default => 5
|
85
|
+
config_param :reload_connections, :bool, :default => true
|
86
|
+
config_param :reload_on_failure, :bool, :default => false
|
87
|
+
config_param :retry_tag, :string, :default=>nil
|
88
|
+
config_param :resurrect_after, :time, :default => 60
|
89
|
+
config_param :time_key, :string, :default => nil
|
90
|
+
config_param :time_key_exclude_timestamp, :bool, :default => false
|
91
|
+
config_param :ssl_verify , :bool, :default => true
|
92
|
+
config_param :client_key, :string, :default => nil
|
93
|
+
config_param :client_cert, :string, :default => nil
|
94
|
+
config_param :client_key_pass, :string, :default => nil
|
95
|
+
config_param :ca_file, :string, :default => nil
|
96
|
+
config_param :ssl_version, :enum, list: [:SSLv23, :TLSv1, :TLSv1_1, :TLSv1_2], :default => :TLSv1
|
97
|
+
config_param :remove_keys, :string, :default => nil
|
98
|
+
config_param :remove_keys_on_update, :string, :default => ""
|
99
|
+
config_param :remove_keys_on_update_key, :string, :default => nil
|
100
|
+
config_param :flatten_hashes, :bool, :default => false
|
101
|
+
config_param :flatten_hashes_separator, :string, :default => "_"
|
102
|
+
config_param :template_name, :string, :default => nil
|
103
|
+
config_param :template_file, :string, :default => nil
|
104
|
+
config_param :template_overwrite, :bool, :default => false
|
105
|
+
config_param :customize_template, :hash, :default => nil
|
106
|
+
config_param :rollover_index, :string, :default => false
|
107
|
+
config_param :index_date_pattern, :string, :default => "now/d"
|
108
|
+
config_param :deflector_alias, :string, :default => nil
|
109
|
+
config_param :index_prefix, :string, :default => "logstash"
|
110
|
+
config_param :application_name, :string, :default => "default"
|
111
|
+
config_param :templates, :hash, :default => nil
|
112
|
+
config_param :max_retry_putting_template, :integer, :default => 10
|
113
|
+
config_param :fail_on_putting_template_retry_exceed, :bool, :default => true
|
114
|
+
config_param :max_retry_get_es_version, :integer, :default => 15
|
115
|
+
config_param :include_tag_key, :bool, :default => false
|
116
|
+
config_param :tag_key, :string, :default => 'tag'
|
117
|
+
config_param :time_parse_error_tag, :string, :default => 'Fluent::ElasticsearchOutput::TimeParser.error'
|
118
|
+
config_param :reconnect_on_error, :bool, :default => false
|
119
|
+
config_param :pipeline, :string, :default => nil
|
120
|
+
config_param :with_transporter_log, :bool, :default => false
|
121
|
+
config_param :emit_error_for_missing_id, :bool, :default => false
|
122
|
+
config_param :sniffer_class_name, :string, :default => nil
|
123
|
+
config_param :reload_after, :integer, :default => DEFAULT_RELOAD_AFTER
|
124
|
+
config_param :content_type, :enum, list: [:"application/json", :"application/x-ndjson"], :default => :"application/json",
|
125
|
+
:deprecated => <<EOC
|
126
|
+
elasticsearch gem v6.0.2 starts to use correct Content-Type. Please upgrade elasticserach gem and stop to use this option.
|
127
|
+
see: https://github.com/elastic/elasticsearch-ruby/pull/514
|
128
|
+
EOC
|
129
|
+
config_param :include_index_in_url, :bool, :default => false
|
130
|
+
config_param :http_backend, :enum, list: [:excon, :typhoeus], :default => :excon
|
131
|
+
config_param :validate_client_version, :bool, :default => false
|
132
|
+
config_param :prefer_oj_serializer, :bool, :default => false
|
133
|
+
config_param :unrecoverable_error_types, :array, :default => ["out_of_memory_error", "es_rejected_execution_exception"]
|
134
|
+
config_param :verify_es_version_at_startup, :bool, :default => true
|
135
|
+
config_param :default_elasticsearch_version, :integer, :default => DEFAULT_ELASTICSEARCH_VERSION
|
136
|
+
config_param :log_es_400_reason, :bool, :default => false
|
137
|
+
config_param :custom_headers, :hash, :default => {}
|
138
|
+
config_param :suppress_doc_wrap, :bool, :default => false
|
139
|
+
config_param :ignore_exceptions, :array, :default => [], value_type: :string, :desc => "Ignorable exception list"
|
140
|
+
config_param :exception_backup, :bool, :default => true, :desc => "Chunk backup flag when ignore exception occured"
|
141
|
+
config_param :bulk_message_request_threshold, :size, :default => TARGET_BULK_BYTES
|
142
|
+
|
143
|
+
config_section :buffer do
|
144
|
+
config_set_default :@type, DEFAULT_BUFFER_TYPE
|
145
|
+
config_set_default :chunk_keys, ['tag']
|
146
|
+
config_set_default :timekey_use_utc, true
|
147
|
+
end
|
148
|
+
|
149
|
+
include Fluent::ElasticsearchIndexTemplate
|
150
|
+
include Fluent::Plugin::ElasticsearchConstants
|
151
|
+
|
152
|
+
def initialize
|
153
|
+
super
|
154
|
+
end
|
155
|
+
|
156
|
+
def configure(conf)
|
157
|
+
compat_parameters_convert(conf, :buffer)
|
158
|
+
|
159
|
+
super
|
160
|
+
raise Fluent::ConfigError, "'tag' in chunk_keys is required." if not @chunk_key_tag
|
161
|
+
|
162
|
+
@time_parser = create_time_parser
|
163
|
+
@backend_options = backend_options
|
164
|
+
|
165
|
+
if @remove_keys
|
166
|
+
@remove_keys = @remove_keys.split(/\s*,\s*/)
|
167
|
+
end
|
168
|
+
|
169
|
+
if @target_index_key && @target_index_key.is_a?(String)
|
170
|
+
@target_index_key = @target_index_key.split '.'
|
171
|
+
end
|
172
|
+
|
173
|
+
if @target_type_key && @target_type_key.is_a?(String)
|
174
|
+
@target_type_key = @target_type_key.split '.'
|
175
|
+
end
|
176
|
+
|
177
|
+
if @remove_keys_on_update && @remove_keys_on_update.is_a?(String)
|
178
|
+
@remove_keys_on_update = @remove_keys_on_update.split ','
|
179
|
+
end
|
180
|
+
|
181
|
+
raise Fluent::ConfigError, "'max_retry_putting_template' must be greater than or equal to zero." if @max_retry_putting_template < 0
|
182
|
+
raise Fluent::ConfigError, "'max_retry_get_es_version' must be greater than or equal to zero." if @max_retry_get_es_version < 0
|
183
|
+
|
184
|
+
# Raise error when using host placeholders and template features at same time.
|
185
|
+
valid_host_placeholder = placeholder?(:host_placeholder, @host)
|
186
|
+
if valid_host_placeholder && (@template_name && @template_file || @templates)
|
187
|
+
raise Fluent::ConfigError, "host placeholder and template installation are exclusive features."
|
188
|
+
end
|
189
|
+
|
190
|
+
if !Fluent::Engine.dry_run_mode
|
191
|
+
if @template_name && @template_file
|
192
|
+
retry_operate(@max_retry_putting_template, @fail_on_putting_template_retry_exceed) do
|
193
|
+
if @customize_template
|
194
|
+
if @rollover_index
|
195
|
+
raise Fluent::ConfigError, "'deflector_alias' must be provided if 'rollover_index' is set true ." if not @deflector_alias
|
196
|
+
end
|
197
|
+
template_custom_install(@template_name, @template_file, @template_overwrite, @customize_template, @index_prefix, @rollover_index, @deflector_alias, @application_name, @index_date_pattern)
|
198
|
+
else
|
199
|
+
template_install(@template_name, @template_file, @template_overwrite)
|
200
|
+
end
|
201
|
+
end
|
202
|
+
elsif @templates
|
203
|
+
retry_operate(@max_retry_putting_template, @fail_on_putting_template_retry_exceed) do
|
204
|
+
templates_hash_install(@templates, @template_overwrite)
|
205
|
+
end
|
206
|
+
end
|
207
|
+
end
|
208
|
+
|
209
|
+
@serializer_class = nil
|
210
|
+
begin
|
211
|
+
require 'oj'
|
212
|
+
@dump_proc = Oj.method(:dump)
|
213
|
+
if @prefer_oj_serializer
|
214
|
+
@serializer_class = Fluent::Plugin::Serializer::Oj
|
215
|
+
Elasticsearch::API.settings[:serializer] = Fluent::Plugin::Serializer::Oj
|
216
|
+
end
|
217
|
+
rescue LoadError
|
218
|
+
@dump_proc = Yajl.method(:dump)
|
219
|
+
end
|
220
|
+
|
221
|
+
if @user && m = @user.match(/%{(?<user>.*)}/)
|
222
|
+
@user = URI.encode_www_form_component(m["user"])
|
223
|
+
end
|
224
|
+
if @password && m = @password.match(/%{(?<password>.*)}/)
|
225
|
+
@password = URI.encode_www_form_component(m["password"])
|
226
|
+
end
|
227
|
+
|
228
|
+
@transport_logger = nil
|
229
|
+
if @with_transporter_log
|
230
|
+
@transport_logger = log
|
231
|
+
log_level = conf['@log_level'] || conf['log_level']
|
232
|
+
log.warn "Consider to specify log_level with @log_level." unless log_level
|
233
|
+
end
|
234
|
+
# Specify @sniffer_class before calling #client.
|
235
|
+
# #detect_es_major_version uses #client.
|
236
|
+
@sniffer_class = nil
|
237
|
+
begin
|
238
|
+
@sniffer_class = Object.const_get(@sniffer_class_name) if @sniffer_class_name
|
239
|
+
rescue Exception => ex
|
240
|
+
raise Fluent::ConfigError, "Could not load sniffer class #{@sniffer_class_name}: #{ex}"
|
241
|
+
end
|
242
|
+
|
243
|
+
@last_seen_major_version =
|
244
|
+
if @verify_es_version_at_startup && !Fluent::Engine.dry_run_mode
|
245
|
+
retry_operate(@max_retry_get_es_version) do
|
246
|
+
detect_es_major_version
|
247
|
+
end
|
248
|
+
else
|
249
|
+
@default_elasticsearch_version
|
250
|
+
end
|
251
|
+
if @last_seen_major_version == 6 && @type_name != DEFAULT_TYPE_NAME_ES_7x
|
252
|
+
log.info "Detected ES 6.x: ES 7.x will only accept `_doc` in type_name."
|
253
|
+
end
|
254
|
+
if @last_seen_major_version >= 7 && @type_name != DEFAULT_TYPE_NAME_ES_7x
|
255
|
+
log.warn "Detected ES 7.x or above: `_doc` will be used as the document `_type`."
|
256
|
+
@type_name = '_doc'.freeze
|
257
|
+
end
|
258
|
+
|
259
|
+
if @validate_client_version && !Fluent::Engine.dry_run_mode
|
260
|
+
if @last_seen_major_version != client_library_version.to_i
|
261
|
+
raise Fluent::ConfigError, <<-EOC
|
262
|
+
Detected ES #{@last_seen_major_version} but you use ES client #{client_library_version}.
|
263
|
+
Please consider to use #{@last_seen_major_version}.x series ES client.
|
264
|
+
EOC
|
265
|
+
end
|
266
|
+
end
|
267
|
+
|
268
|
+
if @last_seen_major_version >= 6
|
269
|
+
case @ssl_version
|
270
|
+
when :SSLv23, :TLSv1, :TLSv1_1
|
271
|
+
if @scheme == :https
|
272
|
+
log.warn "Detected ES 6.x or above and enabled insecure security:
|
273
|
+
You might have to specify `ssl_version TLSv1_2` in configuration."
|
274
|
+
end
|
275
|
+
end
|
276
|
+
end
|
277
|
+
|
278
|
+
if @buffer_config.flush_thread_count < 2
|
279
|
+
log.warn "To prevent events traffic jam, you should specify 2 or more 'flush_thread_count'."
|
280
|
+
end
|
281
|
+
|
282
|
+
# Consider missing the prefix of "$." in nested key specifiers.
|
283
|
+
@id_key = convert_compat_id_key(@id_key) if @id_key
|
284
|
+
@parent_key = convert_compat_id_key(@parent_key) if @parent_key
|
285
|
+
@routing_key = convert_compat_id_key(@routing_key) if @routing_key
|
286
|
+
|
287
|
+
@routing_key_name = configure_routing_key_name
|
288
|
+
@meta_config_map = create_meta_config_map
|
289
|
+
@current_config = nil
|
290
|
+
|
291
|
+
@ignore_exception_classes = @ignore_exceptions.map do |exception|
|
292
|
+
unless Object.const_defined?(exception)
|
293
|
+
log.warn "Cannot find class #{exception}. Will ignore it."
|
294
|
+
|
295
|
+
nil
|
296
|
+
else
|
297
|
+
Object.const_get(exception)
|
298
|
+
end
|
299
|
+
end.compact
|
300
|
+
|
301
|
+
if @bulk_message_request_threshold < 0
|
302
|
+
class << self
|
303
|
+
alias_method :split_request?, :split_request_size_uncheck?
|
304
|
+
end
|
305
|
+
else
|
306
|
+
class << self
|
307
|
+
alias_method :split_request?, :split_request_size_check?
|
308
|
+
end
|
309
|
+
end
|
310
|
+
end
|
311
|
+
|
312
|
+
def placeholder?(name, param)
|
313
|
+
begin
|
314
|
+
placeholder_validate!(name, param)
|
315
|
+
true
|
316
|
+
rescue Fluent::ConfigError
|
317
|
+
false
|
318
|
+
end
|
319
|
+
end
|
320
|
+
|
321
|
+
def backend_options
|
322
|
+
case @http_backend
|
323
|
+
when :excon
|
324
|
+
{ client_key: @client_key, client_cert: @client_cert, client_key_pass: @client_key_pass }
|
325
|
+
when :typhoeus
|
326
|
+
require 'typhoeus'
|
327
|
+
{ sslkey: @client_key, sslcert: @client_cert, keypasswd: @client_key_pass }
|
328
|
+
end
|
329
|
+
rescue LoadError => ex
|
330
|
+
log.error_backtrace(ex.backtrace)
|
331
|
+
raise Fluent::ConfigError, "You must install #{@http_backend} gem. Exception: #{ex}"
|
332
|
+
end
|
333
|
+
|
334
|
+
def detect_es_major_version
|
335
|
+
@_es_info ||= client.info
|
336
|
+
@_es_info["version"]["number"].to_i
|
337
|
+
end
|
338
|
+
|
339
|
+
def client_library_version
|
340
|
+
Elasticsearch::VERSION
|
341
|
+
end
|
342
|
+
|
343
|
+
def configure_routing_key_name
|
344
|
+
if @last_seen_major_version >= 7
|
345
|
+
'routing'
|
346
|
+
else
|
347
|
+
'_routing'
|
348
|
+
end
|
349
|
+
end
|
350
|
+
|
351
|
+
def convert_compat_id_key(key)
|
352
|
+
if key.include?('.') && !key.start_with?('$[')
|
353
|
+
key = "$.#{key}" unless key.start_with?('$.')
|
354
|
+
end
|
355
|
+
key
|
356
|
+
end
|
357
|
+
|
358
|
+
def create_meta_config_map
|
359
|
+
result = []
|
360
|
+
result << [record_accessor_create(@id_key), '_id'] if @id_key
|
361
|
+
result << [record_accessor_create(@parent_key), '_parent'] if @parent_key
|
362
|
+
result << [record_accessor_create(@routing_key), @routing_key_name] if @routing_key
|
363
|
+
result
|
364
|
+
end
|
365
|
+
|
366
|
+
# once fluent v0.14 is released we might be able to use
|
367
|
+
# Fluent::Parser::TimeParser, but it doesn't quite do what we want - if gives
|
368
|
+
# [sec,nsec] where as we want something we can call `strftime` on...
|
369
|
+
def create_time_parser
|
370
|
+
if @time_key_format
|
371
|
+
begin
|
372
|
+
# Strptime doesn't support all formats, but for those it does it's
|
373
|
+
# blazingly fast.
|
374
|
+
strptime = Strptime.new(@time_key_format)
|
375
|
+
Proc.new { |value|
|
376
|
+
value = convert_numeric_time_into_string(value, @time_key_format) if value.is_a?(Numeric)
|
377
|
+
strptime.exec(value).to_datetime
|
378
|
+
}
|
379
|
+
rescue
|
380
|
+
# Can happen if Strptime doesn't recognize the format; or
|
381
|
+
# if strptime couldn't be required (because it's not installed -- it's
|
382
|
+
# ruby 2 only)
|
383
|
+
Proc.new { |value|
|
384
|
+
value = convert_numeric_time_into_string(value, @time_key_format) if value.is_a?(Numeric)
|
385
|
+
DateTime.strptime(value, @time_key_format)
|
386
|
+
}
|
387
|
+
end
|
388
|
+
else
|
389
|
+
Proc.new { |value|
|
390
|
+
value = convert_numeric_time_into_string(value) if value.is_a?(Numeric)
|
391
|
+
DateTime.parse(value)
|
392
|
+
}
|
393
|
+
end
|
394
|
+
end
|
395
|
+
|
396
|
+
def convert_numeric_time_into_string(numeric_time, time_key_format = "%Y-%m-%d %H:%M:%S.%N%z")
|
397
|
+
numeric_time_parser = Fluent::NumericTimeParser.new(:float)
|
398
|
+
Time.at(numeric_time_parser.parse(numeric_time).to_r).strftime(time_key_format)
|
399
|
+
end
|
400
|
+
|
401
|
+
def parse_time(value, event_time, tag)
|
402
|
+
@time_parser.call(value)
|
403
|
+
rescue => e
|
404
|
+
router.emit_error_event(@time_parse_error_tag, Fluent::Engine.now, {'tag' => tag, 'time' => event_time, 'format' => @time_key_format, 'value' => value}, e)
|
405
|
+
return Time.at(event_time).to_datetime
|
406
|
+
end
|
407
|
+
|
408
|
+
def client(host = nil)
|
409
|
+
# check here to see if we already have a client connection for the given host
|
410
|
+
connection_options = get_connection_options(host)
|
411
|
+
|
412
|
+
@_es = nil unless is_existing_connection(connection_options[:hosts])
|
413
|
+
|
414
|
+
@_es ||= begin
|
415
|
+
@current_config = connection_options[:hosts].clone
|
416
|
+
adapter_conf = lambda {|f| f.adapter @http_backend, @backend_options }
|
417
|
+
local_reload_connections = @reload_connections
|
418
|
+
if local_reload_connections && @reload_after > DEFAULT_RELOAD_AFTER
|
419
|
+
local_reload_connections = @reload_after
|
420
|
+
end
|
421
|
+
headers = { 'Content-Type' => @content_type.to_s }.merge(@custom_headers)
|
422
|
+
transport = Elasticsearch::Transport::Transport::HTTP::Faraday.new(connection_options.merge(
|
423
|
+
options: {
|
424
|
+
reload_connections: local_reload_connections,
|
425
|
+
reload_on_failure: @reload_on_failure,
|
426
|
+
resurrect_after: @resurrect_after,
|
427
|
+
logger: @transport_logger,
|
428
|
+
transport_options: {
|
429
|
+
headers: headers,
|
430
|
+
request: { timeout: @request_timeout },
|
431
|
+
ssl: { verify: @ssl_verify, ca_file: @ca_file, version: @ssl_version }
|
432
|
+
},
|
433
|
+
http: {
|
434
|
+
user: @user,
|
435
|
+
password: @password
|
436
|
+
},
|
437
|
+
sniffer_class: @sniffer_class,
|
438
|
+
serializer_class: @serializer_class,
|
439
|
+
}), &adapter_conf)
|
440
|
+
Elasticsearch::Client.new transport: transport
|
441
|
+
end
|
442
|
+
end
|
443
|
+
|
444
|
+
def get_escaped_userinfo(host_str)
|
445
|
+
if m = host_str.match(/(?<scheme>.*)%{(?<user>.*)}:%{(?<password>.*)}(?<path>@.*)/)
|
446
|
+
m["scheme"] +
|
447
|
+
URI.encode_www_form_component(m["user"]) +
|
448
|
+
':' +
|
449
|
+
URI.encode_www_form_component(m["password"]) +
|
450
|
+
m["path"]
|
451
|
+
else
|
452
|
+
host_str
|
453
|
+
end
|
454
|
+
end
|
455
|
+
|
456
|
+
def get_connection_options(con_host=nil)
|
457
|
+
raise "`password` must be present if `user` is present" if @user && !@password
|
458
|
+
|
459
|
+
hosts = if con_host || @hosts
|
460
|
+
(con_host || @hosts).split(',').map do |host_str|
|
461
|
+
# Support legacy hosts format host:port,host:port,host:port...
|
462
|
+
if host_str.match(%r{^[^:]+(\:\d+)?$})
|
463
|
+
{
|
464
|
+
host: host_str.split(':')[0],
|
465
|
+
port: (host_str.split(':')[1] || @port).to_i,
|
466
|
+
scheme: @scheme.to_s
|
467
|
+
}
|
468
|
+
else
|
469
|
+
# New hosts format expects URLs such as http://logs.foo.com,https://john:pass@logs2.foo.com/elastic
|
470
|
+
uri = URI(get_escaped_userinfo(host_str))
|
471
|
+
%w(user password path).inject(host: uri.host, port: uri.port, scheme: uri.scheme) do |hash, key|
|
472
|
+
hash[key.to_sym] = uri.public_send(key) unless uri.public_send(key).nil? || uri.public_send(key) == ''
|
473
|
+
hash
|
474
|
+
end
|
475
|
+
end
|
476
|
+
end.compact
|
477
|
+
else
|
478
|
+
[{host: @host, port: @port, scheme: @scheme.to_s}]
|
479
|
+
end.each do |host|
|
480
|
+
host.merge!(user: @user, password: @password) if !host[:user] && @user
|
481
|
+
host.merge!(path: @path) if !host[:path] && @path
|
482
|
+
end
|
483
|
+
|
484
|
+
{
|
485
|
+
hosts: hosts
|
486
|
+
}
|
487
|
+
end
|
488
|
+
|
489
|
+
def connection_options_description(con_host=nil)
|
490
|
+
get_connection_options(con_host)[:hosts].map do |host_info|
|
491
|
+
attributes = host_info.dup
|
492
|
+
attributes[:password] = 'obfuscated' if attributes.has_key?(:password)
|
493
|
+
attributes.inspect
|
494
|
+
end.join(', ')
|
495
|
+
end
|
496
|
+
|
497
|
+
# append_record_to_messages adds a record to the bulk message
|
498
|
+
# payload to be submitted to Elasticsearch. Records that do
|
499
|
+
# not include '_id' field are skipped when 'write_operation'
|
500
|
+
# is configured for 'create' or 'update'
|
501
|
+
#
|
502
|
+
# returns 'true' if record was appended to the bulk message
|
503
|
+
# and 'false' otherwise
|
504
|
+
def append_record_to_messages(op, meta, header, record, msgs)
|
505
|
+
case op
|
506
|
+
when UPDATE_OP, UPSERT_OP
|
507
|
+
if meta.has_key?(ID_FIELD)
|
508
|
+
header[UPDATE_OP] = meta
|
509
|
+
msgs << @dump_proc.call(header) << BODY_DELIMITER
|
510
|
+
msgs << @dump_proc.call(update_body(record, op)) << BODY_DELIMITER
|
511
|
+
return true
|
512
|
+
end
|
513
|
+
when CREATE_OP
|
514
|
+
if meta.has_key?(ID_FIELD)
|
515
|
+
header[CREATE_OP] = meta
|
516
|
+
msgs << @dump_proc.call(header) << BODY_DELIMITER
|
517
|
+
msgs << @dump_proc.call(record) << BODY_DELIMITER
|
518
|
+
return true
|
519
|
+
end
|
520
|
+
when INDEX_OP
|
521
|
+
header[INDEX_OP] = meta
|
522
|
+
msgs << @dump_proc.call(header) << BODY_DELIMITER
|
523
|
+
msgs << @dump_proc.call(record) << BODY_DELIMITER
|
524
|
+
return true
|
525
|
+
end
|
526
|
+
return false
|
527
|
+
end
|
528
|
+
|
529
|
+
def update_body(record, op)
|
530
|
+
update = remove_keys(record)
|
531
|
+
if @suppress_doc_wrap
|
532
|
+
return update
|
533
|
+
end
|
534
|
+
body = {"doc".freeze => update}
|
535
|
+
if op == UPSERT_OP
|
536
|
+
if update == record
|
537
|
+
body["doc_as_upsert".freeze] = true
|
538
|
+
else
|
539
|
+
body[UPSERT_OP] = record
|
540
|
+
end
|
541
|
+
end
|
542
|
+
body
|
543
|
+
end
|
544
|
+
|
545
|
+
def remove_keys(record)
|
546
|
+
keys = record[@remove_keys_on_update_key] || @remove_keys_on_update || []
|
547
|
+
record.delete(@remove_keys_on_update_key)
|
548
|
+
return record unless keys.any?
|
549
|
+
record = record.dup
|
550
|
+
keys.each { |key| record.delete(key) }
|
551
|
+
record
|
552
|
+
end
|
553
|
+
|
554
|
+
def flatten_record(record, prefix=[])
|
555
|
+
ret = {}
|
556
|
+
if record.is_a? Hash
|
557
|
+
record.each { |key, value|
|
558
|
+
ret.merge! flatten_record(value, prefix + [key.to_s])
|
559
|
+
}
|
560
|
+
elsif record.is_a? Array
|
561
|
+
# Don't mess with arrays, leave them unprocessed
|
562
|
+
ret.merge!({prefix.join(@flatten_hashes_separator) => record})
|
563
|
+
else
|
564
|
+
return {prefix.join(@flatten_hashes_separator) => record}
|
565
|
+
end
|
566
|
+
ret
|
567
|
+
end
|
568
|
+
|
569
|
+
def expand_placeholders(chunk)
|
570
|
+
logstash_prefix = extract_placeholders(@logstash_prefix, chunk)
|
571
|
+
index_name = extract_placeholders(@index_name, chunk)
|
572
|
+
type_name = extract_placeholders(@type_name, chunk)
|
573
|
+
return logstash_prefix, index_name, type_name
|
574
|
+
end
|
575
|
+
|
576
|
+
def multi_workers_ready?
|
577
|
+
true
|
578
|
+
end
|
579
|
+
|
580
|
+
def write(chunk)
|
581
|
+
bulk_message_count = Hash.new { |h,k| h[k] = 0 }
|
582
|
+
bulk_message = Hash.new { |h,k| h[k] = '' }
|
583
|
+
header = {}
|
584
|
+
meta = {}
|
585
|
+
|
586
|
+
tag = chunk.metadata.tag
|
587
|
+
extracted_values = expand_placeholders(chunk)
|
588
|
+
host = if @hosts
|
589
|
+
extract_placeholders(@hosts, chunk)
|
590
|
+
else
|
591
|
+
extract_placeholders(@host, chunk)
|
592
|
+
end
|
593
|
+
|
594
|
+
chunk.msgpack_each do |time, record|
|
595
|
+
next unless record.is_a? Hash
|
596
|
+
begin
|
597
|
+
meta, header, record = process_message(tag, meta, header, time, record, extracted_values)
|
598
|
+
info = if @include_index_in_url
|
599
|
+
RequestInfo.new(host, meta.delete("_index".freeze))
|
600
|
+
else
|
601
|
+
RequestInfo.new(host, nil)
|
602
|
+
end
|
603
|
+
|
604
|
+
if split_request?(bulk_message, info)
|
605
|
+
bulk_message.each do |info, msgs|
|
606
|
+
send_bulk(msgs, tag, chunk, bulk_message_count[info], extracted_values, info) unless msgs.empty?
|
607
|
+
msgs.clear
|
608
|
+
# Clear bulk_message_count for this info.
|
609
|
+
bulk_message_count[info] = 0;
|
610
|
+
next
|
611
|
+
end
|
612
|
+
end
|
613
|
+
|
614
|
+
if append_record_to_messages(@write_operation, meta, header, record, bulk_message[info])
|
615
|
+
bulk_message_count[info] += 1;
|
616
|
+
else
|
617
|
+
if @emit_error_for_missing_id
|
618
|
+
raise MissingIdFieldError, "Missing '_id' field. Write operation is #{@write_operation}"
|
619
|
+
else
|
620
|
+
log.on_debug { log.debug("Dropping record because its missing an '_id' field and write_operation is #{@write_operation}: #{record}") }
|
621
|
+
end
|
622
|
+
end
|
623
|
+
rescue => e
|
624
|
+
router.emit_error_event(tag, time, record, e)
|
625
|
+
end
|
626
|
+
end
|
627
|
+
|
628
|
+
bulk_message.each do |info, msgs|
|
629
|
+
send_bulk(msgs, tag, chunk, bulk_message_count[info], extracted_values, info) unless msgs.empty?
|
630
|
+
msgs.clear
|
631
|
+
end
|
632
|
+
end
|
633
|
+
|
634
|
+
def split_request?(bulk_message, info)
|
635
|
+
# For safety.
|
636
|
+
end
|
637
|
+
|
638
|
+
def split_request_size_check?(bulk_message, info)
|
639
|
+
bulk_message[info].size > @bulk_message_request_threshold
|
640
|
+
end
|
641
|
+
|
642
|
+
def split_request_size_uncheck?(bulk_message, info)
|
643
|
+
false
|
644
|
+
end
|
645
|
+
|
646
|
+
def process_message(tag, meta, header, time, record, extracted_values)
|
647
|
+
logstash_prefix, index_name, type_name = extracted_values
|
648
|
+
|
649
|
+
if @flatten_hashes
|
650
|
+
record = flatten_record(record)
|
651
|
+
end
|
652
|
+
|
653
|
+
dt = nil
|
654
|
+
if @logstash_format || @include_timestamp
|
655
|
+
if record.has_key?(TIMESTAMP_FIELD)
|
656
|
+
rts = record[TIMESTAMP_FIELD]
|
657
|
+
dt = parse_time(rts, time, tag)
|
658
|
+
elsif record.has_key?(@time_key)
|
659
|
+
rts = record[@time_key]
|
660
|
+
dt = parse_time(rts, time, tag)
|
661
|
+
record[TIMESTAMP_FIELD] = dt.iso8601(@time_precision) unless @time_key_exclude_timestamp
|
662
|
+
else
|
663
|
+
dt = Time.at(time).to_datetime
|
664
|
+
record[TIMESTAMP_FIELD] = dt.iso8601(@time_precision)
|
665
|
+
end
|
666
|
+
end
|
667
|
+
|
668
|
+
target_index_parent, target_index_child_key = @target_index_key ? get_parent_of(record, @target_index_key) : nil
|
669
|
+
if target_index_parent && target_index_parent[target_index_child_key]
|
670
|
+
target_index = target_index_parent.delete(target_index_child_key)
|
671
|
+
elsif @logstash_format
|
672
|
+
dt = dt.new_offset(0) if @utc_index
|
673
|
+
target_index = "#{logstash_prefix}#{@logstash_prefix_separator}#{dt.strftime(@logstash_dateformat)}"
|
674
|
+
else
|
675
|
+
target_index = index_name
|
676
|
+
end
|
677
|
+
|
678
|
+
# Change target_index to lower-case since Elasticsearch doesn't
|
679
|
+
# allow upper-case characters in index names.
|
680
|
+
target_index = target_index.downcase
|
681
|
+
if @include_tag_key
|
682
|
+
record[@tag_key] = tag
|
683
|
+
end
|
684
|
+
|
685
|
+
target_type_parent, target_type_child_key = @target_type_key ? get_parent_of(record, @target_type_key) : nil
|
686
|
+
if target_type_parent && target_type_parent[target_type_child_key]
|
687
|
+
target_type = target_type_parent.delete(target_type_child_key)
|
688
|
+
if @last_seen_major_version == 6
|
689
|
+
log.warn "Detected ES 6.x: `@type_name` will be used as the document `_type`."
|
690
|
+
target_type = type_name
|
691
|
+
elsif @last_seen_major_version >= 7
|
692
|
+
log.warn "Detected ES 7.x or above: `_doc` will be used as the document `_type`."
|
693
|
+
target_type = '_doc'.freeze
|
694
|
+
end
|
695
|
+
else
|
696
|
+
if @last_seen_major_version >= 7 && @type_name != DEFAULT_TYPE_NAME_ES_7x
|
697
|
+
log.warn "Detected ES 7.x or above: `_doc` will be used as the document `_type`."
|
698
|
+
target_type = '_doc'.freeze
|
699
|
+
else
|
700
|
+
target_type = type_name
|
701
|
+
end
|
702
|
+
end
|
703
|
+
|
704
|
+
meta.clear
|
705
|
+
meta["_index".freeze] = target_index
|
706
|
+
meta["_type".freeze] = target_type
|
707
|
+
|
708
|
+
if @pipeline
|
709
|
+
meta["pipeline".freeze] = @pipeline
|
710
|
+
end
|
711
|
+
|
712
|
+
@meta_config_map.each do |record_accessor, meta_key|
|
713
|
+
if raw_value = record_accessor.call(record)
|
714
|
+
meta[meta_key] = raw_value
|
715
|
+
end
|
716
|
+
end
|
717
|
+
|
718
|
+
if @remove_keys
|
719
|
+
@remove_keys.each { |key| record.delete(key) }
|
720
|
+
end
|
721
|
+
|
722
|
+
return [meta, header, record]
|
723
|
+
end
|
724
|
+
|
725
|
+
# returns [parent, child_key] of child described by path array in record's tree
|
726
|
+
# returns [nil, child_key] if path doesnt exist in record
|
727
|
+
def get_parent_of(record, path)
|
728
|
+
parent_object = path[0..-2].reduce(record) { |a, e| a.is_a?(Hash) ? a[e] : nil }
|
729
|
+
[parent_object, path[-1]]
|
730
|
+
end
|
731
|
+
|
732
|
+
# send_bulk given a specific bulk request, the original tag,
|
733
|
+
# chunk, and bulk_message_count
|
734
|
+
def send_bulk(data, tag, chunk, bulk_message_count, extracted_values, info)
|
735
|
+
begin
|
736
|
+
|
737
|
+
log.on_trace { log.trace "bulk request: #{data}" }
|
738
|
+
response = client(info.host).bulk body: data, index: info.index
|
739
|
+
log.on_trace { log.trace "bulk response: #{response}" }
|
740
|
+
|
741
|
+
if response['errors']
|
742
|
+
error = Fluent::Plugin::ElasticsearchErrorHandler.new(self)
|
743
|
+
error.handle_error(response, tag, chunk, bulk_message_count, extracted_values)
|
744
|
+
end
|
745
|
+
rescue RetryStreamError => e
|
746
|
+
emit_tag = @retry_tag ? @retry_tag : tag
|
747
|
+
router.emit_stream(emit_tag, e.retry_stream)
|
748
|
+
rescue => e
|
749
|
+
ignore = @ignore_exception_classes.any? { |clazz| e.class <= clazz }
|
750
|
+
|
751
|
+
log.warn "Exception ignored in tag #{tag}: #{e.class.name} #{e.message}" if ignore
|
752
|
+
|
753
|
+
@_es = nil if @reconnect_on_error
|
754
|
+
@_es_info = nil if @reconnect_on_error
|
755
|
+
|
756
|
+
raise UnrecoverableRequestFailure if ignore && @exception_backup
|
757
|
+
|
758
|
+
# FIXME: identify unrecoverable errors and raise UnrecoverableRequestFailure instead
|
759
|
+
raise RecoverableRequestFailure, "could not push logs to Elasticsearch cluster (#{connection_options_description(info.host)}): #{e.message}" unless ignore
|
760
|
+
end
|
761
|
+
end
|
762
|
+
|
763
|
+
def is_existing_connection(host)
|
764
|
+
# check if the host provided match the current connection
|
765
|
+
return false if @_es.nil?
|
766
|
+
return false if @current_config.nil?
|
767
|
+
return false if host.length != @current_config.length
|
768
|
+
|
769
|
+
for i in 0...host.length
|
770
|
+
if !host[i][:host].eql? @current_config[i][:host] || host[i][:port] != @current_config[i][:port]
|
771
|
+
return false
|
772
|
+
end
|
773
|
+
end
|
774
|
+
|
775
|
+
return true
|
776
|
+
end
|
777
|
+
end
|
778
|
+
end
|