fluent-plugin-elasticsearch 3.5.4 → 3.5.5
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.coveralls.yml +2 -2
- data/.editorconfig +9 -9
- data/.gitignore +18 -18
- data/.travis.yml +18 -18
- data/CONTRIBUTING.md +24 -0
- data/Gemfile +9 -9
- data/History.md +402 -390
- data/ISSUE_TEMPLATE.md +30 -27
- data/LICENSE.txt +201 -201
- data/PULL_REQUEST_TEMPLATE.md +10 -10
- data/README.md +1539 -1515
- data/Rakefile +11 -11
- data/appveyor.yml +30 -30
- data/fluent-plugin-elasticsearch.gemspec +31 -31
- data/lib/fluent/log-ext.rb +38 -38
- data/lib/fluent/plugin/elasticsearch_constants.rb +13 -13
- data/lib/fluent/plugin/elasticsearch_error.rb +5 -5
- data/lib/fluent/plugin/elasticsearch_error_handler.rb +127 -122
- data/lib/fluent/plugin/elasticsearch_index_template.rb +112 -112
- data/lib/fluent/plugin/elasticsearch_simple_sniffer.rb +10 -10
- data/lib/fluent/plugin/filter_elasticsearch_genid.rb +25 -25
- data/lib/fluent/plugin/oj_serializer.rb +22 -22
- data/lib/fluent/plugin/out_elasticsearch.rb +778 -777
- data/lib/fluent/plugin/out_elasticsearch_dynamic.rb +262 -262
- data/test/helper.rb +24 -24
- data/test/plugin/test_alias_template.json +8 -8
- data/test/plugin/test_elasticsearch_error_handler.rb +525 -503
- data/test/plugin/test_filter_elasticsearch_genid.rb +44 -44
- data/test/plugin/test_out_elasticsearch.rb +2744 -2720
- data/test/plugin/test_out_elasticsearch_dynamic.rb +1001 -1001
- data/test/plugin/test_template.json +23 -23
- data/test/test_log-ext.rb +35 -35
- metadata +4 -4
@@ -1,262 +1,262 @@
|
|
1
|
-
# encoding: UTF-8
|
2
|
-
require_relative 'out_elasticsearch'
|
3
|
-
|
4
|
-
module Fluent::Plugin
|
5
|
-
class ElasticsearchOutputDynamic < ElasticsearchOutput
|
6
|
-
|
7
|
-
Fluent::Plugin.register_output('elasticsearch_dynamic', self)
|
8
|
-
|
9
|
-
helpers :event_emitter
|
10
|
-
|
11
|
-
config_param :delimiter, :string, :default => "."
|
12
|
-
|
13
|
-
DYNAMIC_PARAM_NAMES = %W[hosts host port include_timestamp logstash_format logstash_prefix logstash_dateformat time_key utc_index index_name tag_key type_name id_key parent_key routing_key write_operation]
|
14
|
-
DYNAMIC_PARAM_SYMBOLS = DYNAMIC_PARAM_NAMES.map { |n| "@#{n}".to_sym }
|
15
|
-
|
16
|
-
RequestInfo = Struct.new(:host, :index)
|
17
|
-
|
18
|
-
attr_reader :dynamic_config
|
19
|
-
|
20
|
-
def configure(conf)
|
21
|
-
super
|
22
|
-
|
23
|
-
# evaluate all configurations here
|
24
|
-
@dynamic_config = {}
|
25
|
-
DYNAMIC_PARAM_SYMBOLS.each_with_index { |var, i|
|
26
|
-
value = expand_param(self.instance_variable_get(var), nil, nil, nil)
|
27
|
-
key = DYNAMIC_PARAM_NAMES[i]
|
28
|
-
@dynamic_config[key] = value.to_s
|
29
|
-
}
|
30
|
-
# end eval all configs
|
31
|
-
end
|
32
|
-
|
33
|
-
def create_meta_config_map
|
34
|
-
{'id_key' => '_id', 'parent_key' => '_parent', 'routing_key' => @routing_key_name}
|
35
|
-
end
|
36
|
-
|
37
|
-
|
38
|
-
def client(host = nil)
|
39
|
-
# check here to see if we already have a client connection for the given host
|
40
|
-
connection_options = get_connection_options(host)
|
41
|
-
|
42
|
-
@_es = nil unless is_existing_connection(connection_options[:hosts])
|
43
|
-
|
44
|
-
@_es ||= begin
|
45
|
-
@current_config = connection_options[:hosts].clone
|
46
|
-
adapter_conf = lambda {|f| f.adapter @http_backend, @backend_options }
|
47
|
-
transport = Elasticsearch::Transport::Transport::HTTP::Faraday.new(connection_options.merge(
|
48
|
-
options: {
|
49
|
-
reload_connections: @reload_connections,
|
50
|
-
reload_on_failure: @reload_on_failure,
|
51
|
-
resurrect_after: @resurrect_after,
|
52
|
-
logger: @transport_logger,
|
53
|
-
transport_options: {
|
54
|
-
headers: { 'Content-Type' => @content_type.to_s },
|
55
|
-
request: { timeout: @request_timeout },
|
56
|
-
ssl: { verify: @ssl_verify, ca_file: @ca_file, version: @ssl_version }
|
57
|
-
},
|
58
|
-
http: {
|
59
|
-
user: @user,
|
60
|
-
password: @password
|
61
|
-
}
|
62
|
-
}), &adapter_conf)
|
63
|
-
Elasticsearch::Client.new transport: transport
|
64
|
-
end
|
65
|
-
end
|
66
|
-
|
67
|
-
def get_connection_options(con_host)
|
68
|
-
raise "`password` must be present if `user` is present" if @user && !@password
|
69
|
-
|
70
|
-
hosts = if con_host || @hosts
|
71
|
-
(con_host || @hosts).split(',').map do |host_str|
|
72
|
-
# Support legacy hosts format host:port,host:port,host:port...
|
73
|
-
if host_str.match(%r{^[^:]+(\:\d+)?$})
|
74
|
-
{
|
75
|
-
host: host_str.split(':')[0],
|
76
|
-
port: (host_str.split(':')[1] || @port).to_i,
|
77
|
-
scheme: @scheme.to_s
|
78
|
-
}
|
79
|
-
else
|
80
|
-
# New hosts format expects URLs such as http://logs.foo.com,https://john:pass@logs2.foo.com/elastic
|
81
|
-
uri = URI(get_escaped_userinfo(host_str))
|
82
|
-
%w(user password path).inject(host: uri.host, port: uri.port, scheme: uri.scheme) do |hash, key|
|
83
|
-
hash[key.to_sym] = uri.public_send(key) unless uri.public_send(key).nil? || uri.public_send(key) == ''
|
84
|
-
hash
|
85
|
-
end
|
86
|
-
end
|
87
|
-
end.compact
|
88
|
-
else
|
89
|
-
[{host: @host, port: @port.to_i, scheme: @scheme.to_s}]
|
90
|
-
end.each do |host|
|
91
|
-
host.merge!(user: @user, password: @password) if !host[:user] && @user
|
92
|
-
host.merge!(path: @path) if !host[:path] && @path
|
93
|
-
end
|
94
|
-
|
95
|
-
{
|
96
|
-
hosts: hosts
|
97
|
-
}
|
98
|
-
end
|
99
|
-
|
100
|
-
def connection_options_description(host)
|
101
|
-
get_connection_options(host)[:hosts].map do |host_info|
|
102
|
-
attributes = host_info.dup
|
103
|
-
attributes[:password] = 'obfuscated' if attributes.has_key?(:password)
|
104
|
-
attributes.inspect
|
105
|
-
end.join(', ')
|
106
|
-
end
|
107
|
-
|
108
|
-
def multi_workers_ready?
|
109
|
-
true
|
110
|
-
end
|
111
|
-
|
112
|
-
def write(chunk)
|
113
|
-
bulk_message = Hash.new { |h,k| h[k] = '' }
|
114
|
-
dynamic_conf = @dynamic_config.clone
|
115
|
-
|
116
|
-
headers = {
|
117
|
-
UPDATE_OP => {},
|
118
|
-
UPSERT_OP => {},
|
119
|
-
CREATE_OP => {},
|
120
|
-
INDEX_OP => {}
|
121
|
-
}
|
122
|
-
|
123
|
-
tag = chunk.metadata.tag
|
124
|
-
|
125
|
-
chunk.msgpack_each do |time, record|
|
126
|
-
next unless record.is_a? Hash
|
127
|
-
|
128
|
-
begin
|
129
|
-
# evaluate all configurations here
|
130
|
-
DYNAMIC_PARAM_SYMBOLS.each_with_index { |var, i|
|
131
|
-
k = DYNAMIC_PARAM_NAMES[i]
|
132
|
-
v = self.instance_variable_get(var)
|
133
|
-
# check here to determine if we should evaluate
|
134
|
-
if dynamic_conf[k] != v
|
135
|
-
value = expand_param(v, tag, time, record)
|
136
|
-
dynamic_conf[k] = value
|
137
|
-
end
|
138
|
-
}
|
139
|
-
# end eval all configs
|
140
|
-
rescue => e
|
141
|
-
# handle dynamic parameters misconfigurations
|
142
|
-
router.emit_error_event(tag, time, record, e)
|
143
|
-
next
|
144
|
-
end
|
145
|
-
|
146
|
-
if eval_or_val(dynamic_conf['logstash_format']) || eval_or_val(dynamic_conf['include_timestamp'])
|
147
|
-
if record.has_key?("@timestamp")
|
148
|
-
time = Time.parse record["@timestamp"]
|
149
|
-
elsif record.has_key?(dynamic_conf['time_key'])
|
150
|
-
time = Time.parse record[dynamic_conf['time_key']]
|
151
|
-
record['@timestamp'] = record[dynamic_conf['time_key']] unless time_key_exclude_timestamp
|
152
|
-
else
|
153
|
-
record.merge!({"@timestamp" => Time.at(time).iso8601(@time_precision)})
|
154
|
-
end
|
155
|
-
end
|
156
|
-
|
157
|
-
if eval_or_val(dynamic_conf['logstash_format'])
|
158
|
-
if eval_or_val(dynamic_conf['utc_index'])
|
159
|
-
target_index = "#{dynamic_conf['logstash_prefix']}#{@logstash_prefix_separator}#{Time.at(time).getutc.strftime("#{dynamic_conf['logstash_dateformat']}")}"
|
160
|
-
else
|
161
|
-
target_index = "#{dynamic_conf['logstash_prefix']}#{@logstash_prefix_separator}#{Time.at(time).strftime("#{dynamic_conf['logstash_dateformat']}")}"
|
162
|
-
end
|
163
|
-
else
|
164
|
-
target_index = dynamic_conf['index_name']
|
165
|
-
end
|
166
|
-
|
167
|
-
# Change target_index to lower-case since Elasticsearch doesn't
|
168
|
-
# allow upper-case characters in index names.
|
169
|
-
target_index = target_index.downcase
|
170
|
-
|
171
|
-
if @include_tag_key
|
172
|
-
record.merge!(dynamic_conf['tag_key'] => tag)
|
173
|
-
end
|
174
|
-
|
175
|
-
if dynamic_conf['hosts']
|
176
|
-
host = dynamic_conf['hosts']
|
177
|
-
else
|
178
|
-
host = "#{dynamic_conf['host']}:#{dynamic_conf['port']}"
|
179
|
-
end
|
180
|
-
|
181
|
-
if @include_index_in_url
|
182
|
-
key = RequestInfo.new(host, target_index)
|
183
|
-
meta = {"_type" => dynamic_conf['type_name']}
|
184
|
-
else
|
185
|
-
key = RequestInfo.new(host, nil)
|
186
|
-
meta = {"_index" => target_index, "_type" => dynamic_conf['type_name']}
|
187
|
-
end
|
188
|
-
|
189
|
-
@meta_config_map.each_pair do |config_name, meta_key|
|
190
|
-
if dynamic_conf[config_name] && accessor = record_accessor_create(dynamic_conf[config_name])
|
191
|
-
if raw_value = accessor.call(record)
|
192
|
-
meta[meta_key] = raw_value
|
193
|
-
end
|
194
|
-
end
|
195
|
-
end
|
196
|
-
|
197
|
-
if @remove_keys
|
198
|
-
@remove_keys.each { |key| record.delete(key) }
|
199
|
-
end
|
200
|
-
|
201
|
-
write_op = dynamic_conf["write_operation"]
|
202
|
-
append_record_to_messages(write_op, meta, headers[write_op], record, bulk_message[key])
|
203
|
-
end
|
204
|
-
|
205
|
-
bulk_message.each do |info, msgs|
|
206
|
-
send_bulk(msgs, info.host, info.index) unless msgs.empty?
|
207
|
-
msgs.clear
|
208
|
-
end
|
209
|
-
end
|
210
|
-
|
211
|
-
def send_bulk(data, host, index)
|
212
|
-
begin
|
213
|
-
response = client(host).bulk body: data, index: index
|
214
|
-
if response['errors']
|
215
|
-
log.error "Could not push log to Elasticsearch: #{response}"
|
216
|
-
end
|
217
|
-
rescue => e
|
218
|
-
@_es = nil if @reconnect_on_error
|
219
|
-
# FIXME: identify unrecoverable errors and raise UnrecoverableRequestFailure instead
|
220
|
-
raise RecoverableRequestFailure, "could not push logs to Elasticsearch cluster (#{connection_options_description(host)}): #{e.message}"
|
221
|
-
end
|
222
|
-
end
|
223
|
-
|
224
|
-
def eval_or_val(var)
|
225
|
-
return var unless var.is_a?(String)
|
226
|
-
eval(var)
|
227
|
-
end
|
228
|
-
|
229
|
-
def expand_param(param, tag, time, record)
|
230
|
-
# check for '${ ... }'
|
231
|
-
# yes => `eval`
|
232
|
-
# no => return param
|
233
|
-
return param if (param =~ /\${.+}/).nil?
|
234
|
-
|
235
|
-
# check for 'tag_parts[]'
|
236
|
-
# separated by a delimiter (default '.')
|
237
|
-
tag_parts = tag.split(@delimiter) unless (param =~ /tag_parts\[.+\]/).nil? || tag.nil?
|
238
|
-
|
239
|
-
# pull out section between ${} then eval
|
240
|
-
inner = param.clone
|
241
|
-
while inner.match(/\${.+}/)
|
242
|
-
to_eval = inner.match(/\${(.+?)}/){$1}
|
243
|
-
|
244
|
-
if !(to_eval =~ /record\[.+\]/).nil? && record.nil?
|
245
|
-
return to_eval
|
246
|
-
elsif !(to_eval =~/tag_parts\[.+\]/).nil? && tag_parts.nil?
|
247
|
-
return to_eval
|
248
|
-
elsif !(to_eval =~/time/).nil? && time.nil?
|
249
|
-
return to_eval
|
250
|
-
else
|
251
|
-
inner.sub!(/\${.+?}/, eval( to_eval ))
|
252
|
-
end
|
253
|
-
end
|
254
|
-
inner
|
255
|
-
end
|
256
|
-
|
257
|
-
def is_valid_expand_param_type(param)
|
258
|
-
return false if [:@buffer_type].include?(param)
|
259
|
-
return self.instance_variable_get(param).is_a?(String)
|
260
|
-
end
|
261
|
-
end
|
262
|
-
end
|
1
|
+
# encoding: UTF-8
|
2
|
+
require_relative 'out_elasticsearch'
|
3
|
+
|
4
|
+
module Fluent::Plugin
|
5
|
+
class ElasticsearchOutputDynamic < ElasticsearchOutput
|
6
|
+
|
7
|
+
Fluent::Plugin.register_output('elasticsearch_dynamic', self)
|
8
|
+
|
9
|
+
helpers :event_emitter
|
10
|
+
|
11
|
+
config_param :delimiter, :string, :default => "."
|
12
|
+
|
13
|
+
DYNAMIC_PARAM_NAMES = %W[hosts host port include_timestamp logstash_format logstash_prefix logstash_dateformat time_key utc_index index_name tag_key type_name id_key parent_key routing_key write_operation]
|
14
|
+
DYNAMIC_PARAM_SYMBOLS = DYNAMIC_PARAM_NAMES.map { |n| "@#{n}".to_sym }
|
15
|
+
|
16
|
+
RequestInfo = Struct.new(:host, :index)
|
17
|
+
|
18
|
+
attr_reader :dynamic_config
|
19
|
+
|
20
|
+
def configure(conf)
|
21
|
+
super
|
22
|
+
|
23
|
+
# evaluate all configurations here
|
24
|
+
@dynamic_config = {}
|
25
|
+
DYNAMIC_PARAM_SYMBOLS.each_with_index { |var, i|
|
26
|
+
value = expand_param(self.instance_variable_get(var), nil, nil, nil)
|
27
|
+
key = DYNAMIC_PARAM_NAMES[i]
|
28
|
+
@dynamic_config[key] = value.to_s
|
29
|
+
}
|
30
|
+
# end eval all configs
|
31
|
+
end
|
32
|
+
|
33
|
+
def create_meta_config_map
|
34
|
+
{'id_key' => '_id', 'parent_key' => '_parent', 'routing_key' => @routing_key_name}
|
35
|
+
end
|
36
|
+
|
37
|
+
|
38
|
+
def client(host = nil)
|
39
|
+
# check here to see if we already have a client connection for the given host
|
40
|
+
connection_options = get_connection_options(host)
|
41
|
+
|
42
|
+
@_es = nil unless is_existing_connection(connection_options[:hosts])
|
43
|
+
|
44
|
+
@_es ||= begin
|
45
|
+
@current_config = connection_options[:hosts].clone
|
46
|
+
adapter_conf = lambda {|f| f.adapter @http_backend, @backend_options }
|
47
|
+
transport = Elasticsearch::Transport::Transport::HTTP::Faraday.new(connection_options.merge(
|
48
|
+
options: {
|
49
|
+
reload_connections: @reload_connections,
|
50
|
+
reload_on_failure: @reload_on_failure,
|
51
|
+
resurrect_after: @resurrect_after,
|
52
|
+
logger: @transport_logger,
|
53
|
+
transport_options: {
|
54
|
+
headers: { 'Content-Type' => @content_type.to_s },
|
55
|
+
request: { timeout: @request_timeout },
|
56
|
+
ssl: { verify: @ssl_verify, ca_file: @ca_file, version: @ssl_version }
|
57
|
+
},
|
58
|
+
http: {
|
59
|
+
user: @user,
|
60
|
+
password: @password
|
61
|
+
}
|
62
|
+
}), &adapter_conf)
|
63
|
+
Elasticsearch::Client.new transport: transport
|
64
|
+
end
|
65
|
+
end
|
66
|
+
|
67
|
+
def get_connection_options(con_host)
|
68
|
+
raise "`password` must be present if `user` is present" if @user && !@password
|
69
|
+
|
70
|
+
hosts = if con_host || @hosts
|
71
|
+
(con_host || @hosts).split(',').map do |host_str|
|
72
|
+
# Support legacy hosts format host:port,host:port,host:port...
|
73
|
+
if host_str.match(%r{^[^:]+(\:\d+)?$})
|
74
|
+
{
|
75
|
+
host: host_str.split(':')[0],
|
76
|
+
port: (host_str.split(':')[1] || @port).to_i,
|
77
|
+
scheme: @scheme.to_s
|
78
|
+
}
|
79
|
+
else
|
80
|
+
# New hosts format expects URLs such as http://logs.foo.com,https://john:pass@logs2.foo.com/elastic
|
81
|
+
uri = URI(get_escaped_userinfo(host_str))
|
82
|
+
%w(user password path).inject(host: uri.host, port: uri.port, scheme: uri.scheme) do |hash, key|
|
83
|
+
hash[key.to_sym] = uri.public_send(key) unless uri.public_send(key).nil? || uri.public_send(key) == ''
|
84
|
+
hash
|
85
|
+
end
|
86
|
+
end
|
87
|
+
end.compact
|
88
|
+
else
|
89
|
+
[{host: @host, port: @port.to_i, scheme: @scheme.to_s}]
|
90
|
+
end.each do |host|
|
91
|
+
host.merge!(user: @user, password: @password) if !host[:user] && @user
|
92
|
+
host.merge!(path: @path) if !host[:path] && @path
|
93
|
+
end
|
94
|
+
|
95
|
+
{
|
96
|
+
hosts: hosts
|
97
|
+
}
|
98
|
+
end
|
99
|
+
|
100
|
+
def connection_options_description(host)
|
101
|
+
get_connection_options(host)[:hosts].map do |host_info|
|
102
|
+
attributes = host_info.dup
|
103
|
+
attributes[:password] = 'obfuscated' if attributes.has_key?(:password)
|
104
|
+
attributes.inspect
|
105
|
+
end.join(', ')
|
106
|
+
end
|
107
|
+
|
108
|
+
def multi_workers_ready?
|
109
|
+
true
|
110
|
+
end
|
111
|
+
|
112
|
+
def write(chunk)
|
113
|
+
bulk_message = Hash.new { |h,k| h[k] = '' }
|
114
|
+
dynamic_conf = @dynamic_config.clone
|
115
|
+
|
116
|
+
headers = {
|
117
|
+
UPDATE_OP => {},
|
118
|
+
UPSERT_OP => {},
|
119
|
+
CREATE_OP => {},
|
120
|
+
INDEX_OP => {}
|
121
|
+
}
|
122
|
+
|
123
|
+
tag = chunk.metadata.tag
|
124
|
+
|
125
|
+
chunk.msgpack_each do |time, record|
|
126
|
+
next unless record.is_a? Hash
|
127
|
+
|
128
|
+
begin
|
129
|
+
# evaluate all configurations here
|
130
|
+
DYNAMIC_PARAM_SYMBOLS.each_with_index { |var, i|
|
131
|
+
k = DYNAMIC_PARAM_NAMES[i]
|
132
|
+
v = self.instance_variable_get(var)
|
133
|
+
# check here to determine if we should evaluate
|
134
|
+
if dynamic_conf[k] != v
|
135
|
+
value = expand_param(v, tag, time, record)
|
136
|
+
dynamic_conf[k] = value
|
137
|
+
end
|
138
|
+
}
|
139
|
+
# end eval all configs
|
140
|
+
rescue => e
|
141
|
+
# handle dynamic parameters misconfigurations
|
142
|
+
router.emit_error_event(tag, time, record, e)
|
143
|
+
next
|
144
|
+
end
|
145
|
+
|
146
|
+
if eval_or_val(dynamic_conf['logstash_format']) || eval_or_val(dynamic_conf['include_timestamp'])
|
147
|
+
if record.has_key?("@timestamp")
|
148
|
+
time = Time.parse record["@timestamp"]
|
149
|
+
elsif record.has_key?(dynamic_conf['time_key'])
|
150
|
+
time = Time.parse record[dynamic_conf['time_key']]
|
151
|
+
record['@timestamp'] = record[dynamic_conf['time_key']] unless time_key_exclude_timestamp
|
152
|
+
else
|
153
|
+
record.merge!({"@timestamp" => Time.at(time).iso8601(@time_precision)})
|
154
|
+
end
|
155
|
+
end
|
156
|
+
|
157
|
+
if eval_or_val(dynamic_conf['logstash_format'])
|
158
|
+
if eval_or_val(dynamic_conf['utc_index'])
|
159
|
+
target_index = "#{dynamic_conf['logstash_prefix']}#{@logstash_prefix_separator}#{Time.at(time).getutc.strftime("#{dynamic_conf['logstash_dateformat']}")}"
|
160
|
+
else
|
161
|
+
target_index = "#{dynamic_conf['logstash_prefix']}#{@logstash_prefix_separator}#{Time.at(time).strftime("#{dynamic_conf['logstash_dateformat']}")}"
|
162
|
+
end
|
163
|
+
else
|
164
|
+
target_index = dynamic_conf['index_name']
|
165
|
+
end
|
166
|
+
|
167
|
+
# Change target_index to lower-case since Elasticsearch doesn't
|
168
|
+
# allow upper-case characters in index names.
|
169
|
+
target_index = target_index.downcase
|
170
|
+
|
171
|
+
if @include_tag_key
|
172
|
+
record.merge!(dynamic_conf['tag_key'] => tag)
|
173
|
+
end
|
174
|
+
|
175
|
+
if dynamic_conf['hosts']
|
176
|
+
host = dynamic_conf['hosts']
|
177
|
+
else
|
178
|
+
host = "#{dynamic_conf['host']}:#{dynamic_conf['port']}"
|
179
|
+
end
|
180
|
+
|
181
|
+
if @include_index_in_url
|
182
|
+
key = RequestInfo.new(host, target_index)
|
183
|
+
meta = {"_type" => dynamic_conf['type_name']}
|
184
|
+
else
|
185
|
+
key = RequestInfo.new(host, nil)
|
186
|
+
meta = {"_index" => target_index, "_type" => dynamic_conf['type_name']}
|
187
|
+
end
|
188
|
+
|
189
|
+
@meta_config_map.each_pair do |config_name, meta_key|
|
190
|
+
if dynamic_conf[config_name] && accessor = record_accessor_create(dynamic_conf[config_name])
|
191
|
+
if raw_value = accessor.call(record)
|
192
|
+
meta[meta_key] = raw_value
|
193
|
+
end
|
194
|
+
end
|
195
|
+
end
|
196
|
+
|
197
|
+
if @remove_keys
|
198
|
+
@remove_keys.each { |key| record.delete(key) }
|
199
|
+
end
|
200
|
+
|
201
|
+
write_op = dynamic_conf["write_operation"]
|
202
|
+
append_record_to_messages(write_op, meta, headers[write_op], record, bulk_message[key])
|
203
|
+
end
|
204
|
+
|
205
|
+
bulk_message.each do |info, msgs|
|
206
|
+
send_bulk(msgs, info.host, info.index) unless msgs.empty?
|
207
|
+
msgs.clear
|
208
|
+
end
|
209
|
+
end
|
210
|
+
|
211
|
+
def send_bulk(data, host, index)
|
212
|
+
begin
|
213
|
+
response = client(host).bulk body: data, index: index
|
214
|
+
if response['errors']
|
215
|
+
log.error "Could not push log to Elasticsearch: #{response}"
|
216
|
+
end
|
217
|
+
rescue => e
|
218
|
+
@_es = nil if @reconnect_on_error
|
219
|
+
# FIXME: identify unrecoverable errors and raise UnrecoverableRequestFailure instead
|
220
|
+
raise RecoverableRequestFailure, "could not push logs to Elasticsearch cluster (#{connection_options_description(host)}): #{e.message}"
|
221
|
+
end
|
222
|
+
end
|
223
|
+
|
224
|
+
def eval_or_val(var)
|
225
|
+
return var unless var.is_a?(String)
|
226
|
+
eval(var)
|
227
|
+
end
|
228
|
+
|
229
|
+
def expand_param(param, tag, time, record)
|
230
|
+
# check for '${ ... }'
|
231
|
+
# yes => `eval`
|
232
|
+
# no => return param
|
233
|
+
return param if (param =~ /\${.+}/).nil?
|
234
|
+
|
235
|
+
# check for 'tag_parts[]'
|
236
|
+
# separated by a delimiter (default '.')
|
237
|
+
tag_parts = tag.split(@delimiter) unless (param =~ /tag_parts\[.+\]/).nil? || tag.nil?
|
238
|
+
|
239
|
+
# pull out section between ${} then eval
|
240
|
+
inner = param.clone
|
241
|
+
while inner.match(/\${.+}/)
|
242
|
+
to_eval = inner.match(/\${(.+?)}/){$1}
|
243
|
+
|
244
|
+
if !(to_eval =~ /record\[.+\]/).nil? && record.nil?
|
245
|
+
return to_eval
|
246
|
+
elsif !(to_eval =~/tag_parts\[.+\]/).nil? && tag_parts.nil?
|
247
|
+
return to_eval
|
248
|
+
elsif !(to_eval =~/time/).nil? && time.nil?
|
249
|
+
return to_eval
|
250
|
+
else
|
251
|
+
inner.sub!(/\${.+?}/, eval( to_eval ))
|
252
|
+
end
|
253
|
+
end
|
254
|
+
inner
|
255
|
+
end
|
256
|
+
|
257
|
+
def is_valid_expand_param_type(param)
|
258
|
+
return false if [:@buffer_type].include?(param)
|
259
|
+
return self.instance_variable_get(param).is_a?(String)
|
260
|
+
end
|
261
|
+
end
|
262
|
+
end
|