fluent-plugin-elasticsearch 1.5.0 → 1.6.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.gitignore +1 -0
- data/.travis.yml +1 -3
- data/History.md +6 -0
- data/ISSUE_TEMPLATE.md +11 -0
- data/PULL_REQUEST_TEMPLATE.md +1 -1
- data/README.md +28 -3
- data/Rakefile +1 -0
- data/fluent-plugin-elasticsearch.gemspec +4 -2
- data/lib/fluent/plugin/out_elasticsearch.rb +56 -10
- data/lib/fluent/plugin/out_elasticsearch_dynamic.rb +12 -6
- data/test/plugin/test_out_elasticsearch.rb +163 -22
- data/test/plugin/test_out_elasticsearch_dynamic.rb +12 -12
- metadata +8 -7
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 5313f204bb2cc56d58acf4587337bb9be25cabe5
|
4
|
+
data.tar.gz: bdf2ace0733655e50771305036256ec2a2486944
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 30f15c3bf195a5455c5d84f374934721a8550d25636a8e10d1410d6c6792e5f590f69b34129efcc1a9547bf0fefb68f703f82a61765fea4c603096a6d20a2568
|
7
|
+
data.tar.gz: 55ee1ed8366cd4c6624be2c216fa28428d42cbbecfcf6b8c1d87444761e91a55c83176376b91213b11f5e9edd6dd673d2845bb16ef03604688071a733a8617c4
|
data/.gitignore
CHANGED
data/.travis.yml
CHANGED
data/History.md
CHANGED
@@ -2,6 +2,12 @@
|
|
2
2
|
|
3
3
|
### Future
|
4
4
|
|
5
|
+
### 1.6.0
|
6
|
+
- add support for dot separated `target_index_key` and `target_type_key` (#175)
|
7
|
+
- add `remove_keys_on_update` and `remove_keys_on_update_key` (#189)
|
8
|
+
- fix support for fluentd v0.14 (#191)
|
9
|
+
- remove support for elasticsearch v2 for now (#177)
|
10
|
+
|
5
11
|
### 1.5.0
|
6
12
|
- add `routing_key` (#158)
|
7
13
|
- add `time_key_exclude_timestamp` to exclude `@timestamp` (#161)
|
data/ISSUE_TEMPLATE.md
ADDED
data/PULL_REQUEST_TEMPLATE.md
CHANGED
data/README.md
CHANGED
@@ -35,6 +35,8 @@ Note: For Amazon Elasticsearch Service please consider using [fluent-plugin-aws-
|
|
35
35
|
+ [parent_key](#parent_key)
|
36
36
|
+ [routing_key](#routing_key)
|
37
37
|
+ [remove_keys](#remove_keys)
|
38
|
+
+ [remove_keys_on_update](#remove_keys_on_update)
|
39
|
+
+ [remove_keys_on_update_key](#remove_keys_on_update_key)
|
38
40
|
+ [write_operation](#write_operation)
|
39
41
|
+ [Client/host certificate options](#clienthost-certificate-options)
|
40
42
|
+ [Proxy Support](#proxy-support)
|
@@ -187,7 +189,7 @@ By default, the records inserted into index `logstash-YYMMDD` with UTC (Coordina
|
|
187
189
|
|
188
190
|
### target_index_key
|
189
191
|
|
190
|
-
Tell this plugin to find the index name to write to in the record under this key in preference to other mechanisms.
|
192
|
+
Tell this plugin to find the index name to write to in the record under this key in preference to other mechanisms. Key can be specified as path to nested record using dot ('.') as a separator.
|
191
193
|
|
192
194
|
If it is present in the record (and the value is non falsey) the value will be used as the index name to write to and then removed from the record before output; if it is not found then it will use logstash_format or index_name settings as configured.
|
193
195
|
|
@@ -220,7 +222,7 @@ and this record will be written to the specified index (`logstash-2014.12.19`) r
|
|
220
222
|
|
221
223
|
### target_type_key
|
222
224
|
|
223
|
-
Similar to `target_index_key` config, find the type name to write to in the record under this key. If key not found in record - fallback to `type_name` (default "fluentd").
|
225
|
+
Similar to `target_index_key` config, find the type name to write to in the record under this key (or nested record). If key not found in record - fallback to `type_name` (default "fluentd").
|
224
226
|
|
225
227
|
### request_timeout
|
226
228
|
|
@@ -324,7 +326,30 @@ Similar to `parent_key` config, will add `_routing` into elasticsearch command i
|
|
324
326
|
```
|
325
327
|
parent_key a_parent
|
326
328
|
routing_key a_routing
|
327
|
-
remove_keys a_parent, a_routing # a_parent and a_routing
|
329
|
+
remove_keys a_parent, a_routing # a_parent and a_routing fields won't be sent to elasticsearch
|
330
|
+
```
|
331
|
+
|
332
|
+
### remove_keys_on_update
|
333
|
+
|
334
|
+
Remove keys on update will not update the configured keys in elasticsearch when a record is being updated.
|
335
|
+
This setting only has any effect if the write operation is update or upsert.
|
336
|
+
|
337
|
+
If the write setting is upsert then these keys are only removed if the record is being
|
338
|
+
updated, if the record does not exist (by id) then all of the keys are indexed.
|
339
|
+
|
340
|
+
```
|
341
|
+
remove_keys_on_update foo,bar
|
342
|
+
```
|
343
|
+
|
344
|
+
### remove_keys_on_update_key
|
345
|
+
|
346
|
+
This setting allows `remove_keys_on_update` to be configured with a key in each record, in much the same way as `target_index_key` works.
|
347
|
+
The configured key is removed before indexing in elasticsearch. If both `remove_keys_on_update` and `remove_keys_on_update_key` is
|
348
|
+
present in the record then the keys in record are used, if the `remove_keys_on_update_key` is not present then the value of
|
349
|
+
`remove_keys_on_update` is used as a fallback.
|
350
|
+
|
351
|
+
```
|
352
|
+
remove_keys_on_update_key keys_to_skip
|
328
353
|
```
|
329
354
|
|
330
355
|
### write_operation
|
data/Rakefile
CHANGED
@@ -3,7 +3,7 @@ $:.push File.expand_path('../lib', __FILE__)
|
|
3
3
|
|
4
4
|
Gem::Specification.new do |s|
|
5
5
|
s.name = 'fluent-plugin-elasticsearch'
|
6
|
-
s.version = '1.
|
6
|
+
s.version = '1.6.0'
|
7
7
|
s.authors = ['diogo', 'pitr']
|
8
8
|
s.email = ['pitr.vern@gmail.com', 'me@diogoterror.com']
|
9
9
|
s.description = %q{ElasticSearch output plugin for Fluent event collector}
|
@@ -16,9 +16,11 @@ Gem::Specification.new do |s|
|
|
16
16
|
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
17
17
|
s.require_paths = ['lib']
|
18
18
|
|
19
|
+
s.required_ruby_version = Gem::Requirement.new(">= 2.0".freeze)
|
20
|
+
|
19
21
|
s.add_runtime_dependency 'fluentd', '>= 0.10.43'
|
20
22
|
s.add_runtime_dependency 'excon', '>= 0'
|
21
|
-
s.add_runtime_dependency 'elasticsearch', '
|
23
|
+
s.add_runtime_dependency 'elasticsearch', '< 1.1'
|
22
24
|
|
23
25
|
|
24
26
|
s.add_development_dependency 'rake', '>= 0'
|
@@ -45,6 +45,8 @@ class Fluent::ElasticsearchOutput < Fluent::BufferedOutput
|
|
45
45
|
config_param :client_key_pass, :string, :default => nil
|
46
46
|
config_param :ca_file, :string, :default => nil
|
47
47
|
config_param :remove_keys, :string, :default => nil
|
48
|
+
config_param :remove_keys_on_update, :string, :default => ""
|
49
|
+
config_param :remove_keys_on_update_key, :string, :default => nil
|
48
50
|
config_param :flatten_hashes, :bool, :default => false
|
49
51
|
config_param :flatten_hashes_separator, :string, :default => "_"
|
50
52
|
|
@@ -63,6 +65,18 @@ class Fluent::ElasticsearchOutput < Fluent::BufferedOutput
|
|
63
65
|
if @remove_keys
|
64
66
|
@remove_keys = @remove_keys.split(/\s*,\s*/)
|
65
67
|
end
|
68
|
+
|
69
|
+
if @target_index_key && @target_index_key.is_a?(String)
|
70
|
+
@target_index_key = @target_index_key.split '.'
|
71
|
+
end
|
72
|
+
|
73
|
+
if @target_type_key && @target_type_key.is_a?(String)
|
74
|
+
@target_type_key = @target_type_key.split '.'
|
75
|
+
end
|
76
|
+
|
77
|
+
if @remove_keys_on_update && @remove_keys_on_update.is_a?(String)
|
78
|
+
@remove_keys_on_update = @remove_keys_on_update.split ','
|
79
|
+
end
|
66
80
|
end
|
67
81
|
|
68
82
|
def start
|
@@ -183,19 +197,41 @@ class Fluent::ElasticsearchOutput < Fluent::BufferedOutput
|
|
183
197
|
when "update", "upsert"
|
184
198
|
if meta.has_key?("_id")
|
185
199
|
msgs << { "update" => meta }
|
186
|
-
msgs <<
|
200
|
+
msgs << update_body(record, op)
|
187
201
|
end
|
188
202
|
when "create"
|
189
203
|
if meta.has_key?("_id")
|
190
204
|
msgs << { "create" => meta }
|
191
205
|
msgs << record
|
192
|
-
end
|
206
|
+
end
|
193
207
|
when "index"
|
194
208
|
msgs << { "index" => meta }
|
195
209
|
msgs << record
|
196
210
|
end
|
197
211
|
end
|
198
212
|
|
213
|
+
def update_body(record, op)
|
214
|
+
update = remove_keys(record)
|
215
|
+
body = { "doc" => update }
|
216
|
+
if op == "upsert"
|
217
|
+
if update == record
|
218
|
+
body["doc_as_upsert"] = true
|
219
|
+
else
|
220
|
+
body["upsert"] = record
|
221
|
+
end
|
222
|
+
end
|
223
|
+
body
|
224
|
+
end
|
225
|
+
|
226
|
+
def remove_keys(record)
|
227
|
+
keys = record[@remove_keys_on_update_key] || @remove_keys_on_update || []
|
228
|
+
record.delete(@remove_keys_on_update_key)
|
229
|
+
return record unless keys.any?
|
230
|
+
record = record.dup
|
231
|
+
keys.each { |key| record.delete(key) }
|
232
|
+
record
|
233
|
+
end
|
234
|
+
|
199
235
|
def flatten_record(record, prefix=[])
|
200
236
|
ret = {}
|
201
237
|
if record.is_a? Hash
|
@@ -220,8 +256,9 @@ class Fluent::ElasticsearchOutput < Fluent::BufferedOutput
|
|
220
256
|
end
|
221
257
|
|
222
258
|
next unless record.is_a? Hash
|
223
|
-
|
224
|
-
|
259
|
+
target_index_parent, target_index_child_key = get_parent_of(record, @target_index_key)
|
260
|
+
if target_index_parent && target_index_parent[target_index_child_key]
|
261
|
+
target_index = target_index_parent.delete(target_index_child_key)
|
225
262
|
elsif @logstash_format
|
226
263
|
if record.has_key?("@timestamp")
|
227
264
|
dt = record["@timestamp"]
|
@@ -238,21 +275,21 @@ class Fluent::ElasticsearchOutput < Fluent::BufferedOutput
|
|
238
275
|
else
|
239
276
|
target_index = @index_name
|
240
277
|
end
|
241
|
-
|
278
|
+
|
242
279
|
# Change target_index to lower-case since Elasticsearch doesn't
|
243
280
|
# allow upper-case characters in index names.
|
244
281
|
target_index = target_index.downcase
|
245
|
-
|
246
282
|
if @include_tag_key
|
247
283
|
record.merge!(@tag_key => tag)
|
248
284
|
end
|
249
|
-
|
250
|
-
|
251
|
-
|
285
|
+
|
286
|
+
target_type_parent, target_type_child_key = get_parent_of(record, @target_type_key)
|
287
|
+
if target_type_parent && target_type_parent[target_type_child_key]
|
288
|
+
target_type = target_type_parent.delete(target_type_child_key)
|
252
289
|
else
|
253
290
|
target_type = @type_name
|
254
291
|
end
|
255
|
-
|
292
|
+
|
256
293
|
meta = {"_index" => target_index, "_type" => target_type}
|
257
294
|
|
258
295
|
@meta_config_map ||= { 'id_key' => '_id', 'parent_key' => '_parent', 'routing_key' => '_routing' }
|
@@ -272,6 +309,15 @@ class Fluent::ElasticsearchOutput < Fluent::BufferedOutput
|
|
272
309
|
bulk_message.clear
|
273
310
|
end
|
274
311
|
|
312
|
+
# returns [parent, child_key] of child described by path array in record's tree
|
313
|
+
# returns [nil, child_key] if path doesnt exist in record
|
314
|
+
def get_parent_of(record, path)
|
315
|
+
return [nil, nil] unless path
|
316
|
+
|
317
|
+
parent_object = path[0..-2].reduce(record) { |a, e| a.is_a?(Hash) ? a[e] : nil }
|
318
|
+
[parent_object, path[-1]]
|
319
|
+
end
|
320
|
+
|
275
321
|
def send(data)
|
276
322
|
retries = 0
|
277
323
|
begin
|
@@ -21,7 +21,8 @@ class Fluent::ElasticsearchOutputDynamic < Fluent::ElasticsearchOutput
|
|
21
21
|
super
|
22
22
|
|
23
23
|
# evaluate all configurations here
|
24
|
-
@dynamic_params
|
24
|
+
@dynamic_params ||= []
|
25
|
+
@dynamic_params += self.instance_variables.select { |var| is_valid_expand_param_type(var) }
|
25
26
|
@dynamic_config = Hash.new
|
26
27
|
@dynamic_params.each { |var|
|
27
28
|
value = expand_param(self.instance_variable_get(var), nil, nil, nil)
|
@@ -76,7 +77,7 @@ class Fluent::ElasticsearchOutputDynamic < Fluent::ElasticsearchOutput
|
|
76
77
|
if host_str.match(%r{^[^:]+(\:\d+)?$})
|
77
78
|
{
|
78
79
|
host: host_str.split(':')[0],
|
79
|
-
port: (host_str.split(':')[1] || @dynamic_config['port']).to_i,
|
80
|
+
port: (host_str.split(':')[1] || @dynamic_config['port'] || @port).to_i,
|
80
81
|
scheme: @dynamic_config['scheme']
|
81
82
|
}
|
82
83
|
else
|
@@ -127,7 +128,7 @@ class Fluent::ElasticsearchOutputDynamic < Fluent::ElasticsearchOutput
|
|
127
128
|
}
|
128
129
|
# end eval all configs
|
129
130
|
|
130
|
-
if
|
131
|
+
if eval_or_val(dynamic_conf['logstash_format'])
|
131
132
|
if record.has_key?("@timestamp")
|
132
133
|
time = Time.parse record["@timestamp"]
|
133
134
|
elsif record.has_key?(dynamic_conf['time_key'])
|
@@ -137,7 +138,7 @@ class Fluent::ElasticsearchOutputDynamic < Fluent::ElasticsearchOutput
|
|
137
138
|
record.merge!({"@timestamp" => Time.at(time).to_datetime.to_s})
|
138
139
|
end
|
139
140
|
|
140
|
-
if
|
141
|
+
if eval_or_val(dynamic_conf['utc_index'])
|
141
142
|
target_index = "#{dynamic_conf['logstash_prefix']}-#{Time.at(time).getutc.strftime("#{dynamic_conf['logstash_dateformat']}")}"
|
142
143
|
else
|
143
144
|
target_index = "#{dynamic_conf['logstash_prefix']}-#{Time.at(time).strftime("#{dynamic_conf['logstash_dateformat']}")}"
|
@@ -145,11 +146,11 @@ class Fluent::ElasticsearchOutputDynamic < Fluent::ElasticsearchOutput
|
|
145
146
|
else
|
146
147
|
target_index = dynamic_conf['index_name']
|
147
148
|
end
|
148
|
-
|
149
|
+
|
149
150
|
# Change target_index to lower-case since Elasticsearch doesn't
|
150
151
|
# allow upper-case characters in index names.
|
151
152
|
target_index = target_index.downcase
|
152
|
-
|
153
|
+
|
153
154
|
if @include_tag_key
|
154
155
|
record.merge!(dynamic_conf['tag_key'] => tag)
|
155
156
|
end
|
@@ -198,6 +199,11 @@ class Fluent::ElasticsearchOutputDynamic < Fluent::ElasticsearchOutput
|
|
198
199
|
end
|
199
200
|
end
|
200
201
|
|
202
|
+
def eval_or_val(var)
|
203
|
+
return var unless var.is_a?(String)
|
204
|
+
eval(var)
|
205
|
+
end
|
206
|
+
|
201
207
|
def expand_param(param, tag, time, record)
|
202
208
|
# check for '${ ... }'
|
203
209
|
# yes => `eval`
|
@@ -156,15 +156,15 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
156
156
|
driver.run
|
157
157
|
assert_equal('myindex', index_cmds.first['index']['_index'])
|
158
158
|
end
|
159
|
-
|
159
|
+
|
160
160
|
def test_writes_to_speficied_index_uppercase
|
161
161
|
driver.configure("index_name MyIndex\n")
|
162
162
|
stub_elastic_ping
|
163
163
|
stub_elastic
|
164
164
|
driver.emit(sample_record)
|
165
165
|
driver.run
|
166
|
-
|
167
|
-
|
166
|
+
# Allthough index_name has upper-case characters,
|
167
|
+
# it should be set as lower-case when sent to elasticsearch.
|
168
168
|
assert_equal('myindex', index_cmds.first['index']['_index'])
|
169
169
|
end
|
170
170
|
|
@@ -178,28 +178,28 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
178
178
|
assert_equal('local-override', index_cmds.first['index']['_index'])
|
179
179
|
assert_nil(index_cmds[1]['@target_index'])
|
180
180
|
end
|
181
|
-
|
181
|
+
|
182
182
|
def test_writes_to_target_index_key_logstash
|
183
183
|
driver.configure("target_index_key @target_index\n")
|
184
184
|
driver.configure("logstash_format true\n")
|
185
185
|
time = Time.parse Date.today.to_s
|
186
186
|
stub_elastic_ping
|
187
187
|
stub_elastic
|
188
|
-
driver.emit(sample_record.merge('@target_index' => 'local-override'), time)
|
188
|
+
driver.emit(sample_record.merge('@target_index' => 'local-override'), time.to_i)
|
189
189
|
driver.run
|
190
190
|
assert_equal('local-override', index_cmds.first['index']['_index'])
|
191
191
|
end
|
192
|
-
|
192
|
+
|
193
193
|
def test_writes_to_target_index_key_logstash_uppercase
|
194
194
|
driver.configure("target_index_key @target_index\n")
|
195
195
|
driver.configure("logstash_format true\n")
|
196
196
|
time = Time.parse Date.today.to_s
|
197
197
|
stub_elastic_ping
|
198
198
|
stub_elastic
|
199
|
-
driver.emit(sample_record.merge('@target_index' => 'Local-Override'), time)
|
199
|
+
driver.emit(sample_record.merge('@target_index' => 'Local-Override'), time.to_i)
|
200
200
|
driver.run
|
201
|
-
|
202
|
-
|
201
|
+
# Allthough @target_index has upper-case characters,
|
202
|
+
# it should be set as lower-case when sent to elasticsearch.
|
203
203
|
assert_equal('local-override', index_cmds.first['index']['_index'])
|
204
204
|
end
|
205
205
|
|
@@ -219,7 +219,7 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
219
219
|
logstash_index = "logstash-#{time.getutc.strftime("%Y.%m.%d")}"
|
220
220
|
stub_elastic_ping
|
221
221
|
stub_elastic
|
222
|
-
driver.emit(sample_record, time)
|
222
|
+
driver.emit(sample_record, time.to_i)
|
223
223
|
driver.run
|
224
224
|
assert_equal(logstash_index, index_cmds.first['index']['_index'])
|
225
225
|
end
|
@@ -263,6 +263,33 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
263
263
|
assert_equal('mytype', index_cmds.first['index']['_type'])
|
264
264
|
end
|
265
265
|
|
266
|
+
def test_writes_to_target_type_key_nested
|
267
|
+
driver.configure("target_type_key kubernetes.labels.log_type\n")
|
268
|
+
stub_elastic_ping
|
269
|
+
stub_elastic
|
270
|
+
driver.emit(sample_record.merge('kubernetes' => {
|
271
|
+
'labels' => {
|
272
|
+
'log_type' => 'local-override'
|
273
|
+
}
|
274
|
+
}))
|
275
|
+
driver.run
|
276
|
+
assert_equal('local-override', index_cmds.first['index']['_type'])
|
277
|
+
assert_nil(index_cmds[1]['kubernetes']['labels']['log_type'])
|
278
|
+
end
|
279
|
+
|
280
|
+
def test_writes_to_target_type_key_fallack_to_default_nested
|
281
|
+
driver.configure("target_type_key kubernetes.labels.log_type\n")
|
282
|
+
stub_elastic_ping
|
283
|
+
stub_elastic
|
284
|
+
driver.emit(sample_record.merge('kubernetes' => {
|
285
|
+
'labels' => {
|
286
|
+
'other_labels' => 'test'
|
287
|
+
}
|
288
|
+
}))
|
289
|
+
driver.run
|
290
|
+
assert_equal('fluentd', index_cmds.first['index']['_type'])
|
291
|
+
end
|
292
|
+
|
266
293
|
def test_writes_to_speficied_host
|
267
294
|
driver.configure("host 192.168.33.50\n")
|
268
295
|
stub_elastic_ping("http://192.168.33.50:9200")
|
@@ -373,7 +400,7 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
373
400
|
logstash_index = "logstash-2015.05.31"
|
374
401
|
stub_elastic_ping
|
375
402
|
stub_elastic
|
376
|
-
driver.emit(sample_record, dt.to_time)
|
403
|
+
driver.emit(sample_record, dt.to_time.to_i)
|
377
404
|
driver.run
|
378
405
|
assert_equal(logstash_index, index_cmds.first['index']['_index'])
|
379
406
|
end
|
@@ -387,7 +414,7 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
387
414
|
index = "logstash-#{time.strftime("%Y.%m.%d")}"
|
388
415
|
stub_elastic_ping
|
389
416
|
stub_elastic
|
390
|
-
driver.emit(sample_record, time)
|
417
|
+
driver.emit(sample_record, time.to_i)
|
391
418
|
driver.run
|
392
419
|
assert_equal(index, index_cmds.first['index']['_index'])
|
393
420
|
end
|
@@ -399,11 +426,11 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
399
426
|
logstash_index = "myprefix-#{time.getutc.strftime("%Y.%m.%d")}"
|
400
427
|
stub_elastic_ping
|
401
428
|
stub_elastic
|
402
|
-
driver.emit(sample_record, time)
|
429
|
+
driver.emit(sample_record, time.to_i)
|
403
430
|
driver.run
|
404
431
|
assert_equal(logstash_index, index_cmds.first['index']['_index'])
|
405
432
|
end
|
406
|
-
|
433
|
+
|
407
434
|
def test_writes_to_logstash_index_with_specified_prefix_uppercase
|
408
435
|
driver.configure("logstash_format true
|
409
436
|
logstash_prefix MyPrefix")
|
@@ -411,10 +438,10 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
411
438
|
logstash_index = "myprefix-#{time.getutc.strftime("%Y.%m.%d")}"
|
412
439
|
stub_elastic_ping
|
413
440
|
stub_elastic
|
414
|
-
driver.emit(sample_record, time)
|
441
|
+
driver.emit(sample_record, time.to_i)
|
415
442
|
driver.run
|
416
|
-
|
417
|
-
|
443
|
+
# Allthough logstash_prefix has upper-case characters,
|
444
|
+
# it should be set as lower-case when sent to elasticsearch.
|
418
445
|
assert_equal(logstash_index, index_cmds.first['index']['_index'])
|
419
446
|
end
|
420
447
|
|
@@ -425,7 +452,7 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
425
452
|
logstash_index = "logstash-#{time.getutc.strftime("%Y.%m")}"
|
426
453
|
stub_elastic_ping
|
427
454
|
stub_elastic
|
428
|
-
driver.emit(sample_record, time)
|
455
|
+
driver.emit(sample_record, time.to_i)
|
429
456
|
driver.run
|
430
457
|
assert_equal(logstash_index, index_cmds.first['index']['_index'])
|
431
458
|
end
|
@@ -438,7 +465,7 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
438
465
|
logstash_index = "myprefix-#{time.getutc.strftime("%Y.%m")}"
|
439
466
|
stub_elastic_ping
|
440
467
|
stub_elastic
|
441
|
-
driver.emit(sample_record, time)
|
468
|
+
driver.emit(sample_record, time.to_i)
|
442
469
|
driver.run
|
443
470
|
assert_equal(logstash_index, index_cmds.first['index']['_index'])
|
444
471
|
end
|
@@ -724,7 +751,7 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
724
751
|
end
|
725
752
|
|
726
753
|
def test_update_should_write_update_op_and_doc_as_upsert_is_false
|
727
|
-
driver.configure("write_operation update
|
754
|
+
driver.configure("write_operation update
|
728
755
|
id_key request_id")
|
729
756
|
stub_elastic_ping
|
730
757
|
stub_elastic
|
@@ -732,10 +759,23 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
732
759
|
driver.run
|
733
760
|
assert(index_cmds[0].has_key?("update"))
|
734
761
|
assert(!index_cmds[1]["doc_as_upsert"])
|
762
|
+
assert(!index_cmds[1]["upsert"])
|
763
|
+
end
|
764
|
+
|
765
|
+
def test_update_should_remove_keys_from_doc_when_keys_are_skipped
|
766
|
+
driver.configure("write_operation update
|
767
|
+
id_key request_id
|
768
|
+
remove_keys_on_update parent_id")
|
769
|
+
stub_elastic_ping
|
770
|
+
stub_elastic
|
771
|
+
driver.emit(sample_record)
|
772
|
+
driver.run
|
773
|
+
assert(index_cmds[1]["doc"])
|
774
|
+
assert(!index_cmds[1]["doc"]["parent_id"])
|
735
775
|
end
|
736
776
|
|
737
777
|
def test_upsert_should_write_update_op_and_doc_as_upsert_is_true
|
738
|
-
driver.configure("write_operation upsert
|
778
|
+
driver.configure("write_operation upsert
|
739
779
|
id_key request_id")
|
740
780
|
stub_elastic_ping
|
741
781
|
stub_elastic
|
@@ -743,10 +783,111 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
743
783
|
driver.run
|
744
784
|
assert(index_cmds[0].has_key?("update"))
|
745
785
|
assert(index_cmds[1]["doc_as_upsert"])
|
786
|
+
assert(!index_cmds[1]["upsert"])
|
787
|
+
end
|
788
|
+
|
789
|
+
def test_upsert_should_write_update_op_upsert_and_doc_when_keys_are_skipped
|
790
|
+
driver.configure("write_operation upsert
|
791
|
+
id_key request_id
|
792
|
+
remove_keys_on_update parent_id")
|
793
|
+
stub_elastic_ping
|
794
|
+
stub_elastic
|
795
|
+
driver.emit(sample_record)
|
796
|
+
driver.run
|
797
|
+
assert(index_cmds[0].has_key?("update"))
|
798
|
+
assert(!index_cmds[1]["doc_as_upsert"])
|
799
|
+
assert(index_cmds[1]["upsert"])
|
800
|
+
assert(index_cmds[1]["doc"])
|
801
|
+
end
|
802
|
+
|
803
|
+
def test_upsert_should_remove_keys_from_doc_when_keys_are_skipped
|
804
|
+
driver.configure("write_operation upsert
|
805
|
+
id_key request_id
|
806
|
+
remove_keys_on_update parent_id")
|
807
|
+
stub_elastic_ping
|
808
|
+
stub_elastic
|
809
|
+
driver.emit(sample_record)
|
810
|
+
driver.run
|
811
|
+
assert(index_cmds[1]["upsert"] != index_cmds[1]["doc"])
|
812
|
+
assert(!index_cmds[1]["doc"]["parent_id"])
|
813
|
+
assert(index_cmds[1]["upsert"]["parent_id"])
|
814
|
+
end
|
815
|
+
|
816
|
+
def test_upsert_should_remove_multiple_keys_when_keys_are_skipped
|
817
|
+
driver.configure("write_operation upsert
|
818
|
+
id_key id
|
819
|
+
remove_keys_on_update foo,baz")
|
820
|
+
stub_elastic_ping
|
821
|
+
stub_elastic
|
822
|
+
driver.emit("id" => 1, "foo" => "bar", "baz" => "quix", "zip" => "zam")
|
823
|
+
driver.run
|
824
|
+
assert(
|
825
|
+
index_cmds[1]["doc"] == {
|
826
|
+
"id" => 1,
|
827
|
+
"zip" => "zam",
|
828
|
+
}
|
829
|
+
)
|
830
|
+
assert(
|
831
|
+
index_cmds[1]["upsert"] == {
|
832
|
+
"id" => 1,
|
833
|
+
"foo" => "bar",
|
834
|
+
"baz" => "quix",
|
835
|
+
"zip" => "zam",
|
836
|
+
}
|
837
|
+
)
|
838
|
+
end
|
839
|
+
|
840
|
+
def test_upsert_should_remove_keys_from_when_the_keys_are_in_the_record
|
841
|
+
driver.configure("write_operation upsert
|
842
|
+
id_key id
|
843
|
+
remove_keys_on_update_key keys_to_skip")
|
844
|
+
stub_elastic_ping
|
845
|
+
stub_elastic
|
846
|
+
driver.emit("id" => 1, "foo" => "bar", "baz" => "quix", "keys_to_skip" => ["baz"])
|
847
|
+
driver.run
|
848
|
+
assert(
|
849
|
+
index_cmds[1]["doc"] == {
|
850
|
+
"id" => 1,
|
851
|
+
"foo" => "bar",
|
852
|
+
}
|
853
|
+
)
|
854
|
+
assert(
|
855
|
+
index_cmds[1]["upsert"] == {
|
856
|
+
"id" => 1,
|
857
|
+
"foo" => "bar",
|
858
|
+
"baz" => "quix",
|
859
|
+
}
|
860
|
+
)
|
861
|
+
end
|
862
|
+
|
863
|
+
def test_upsert_should_remove_keys_from_key_on_record_has_higher_presedence_than_config
|
864
|
+
driver.configure("write_operation upsert
|
865
|
+
id_key id
|
866
|
+
remove_keys_on_update foo,bar
|
867
|
+
remove_keys_on_update_key keys_to_skip")
|
868
|
+
stub_elastic_ping
|
869
|
+
stub_elastic
|
870
|
+
driver.emit("id" => 1, "foo" => "bar", "baz" => "quix", "keys_to_skip" => ["baz"])
|
871
|
+
driver.run
|
872
|
+
assert(
|
873
|
+
index_cmds[1]["doc"] == {
|
874
|
+
"id" => 1,
|
875
|
+
# we only expect baz to be stripped here, if the config was more important
|
876
|
+
# foo would be stripped too.
|
877
|
+
"foo" => "bar",
|
878
|
+
}
|
879
|
+
)
|
880
|
+
assert(
|
881
|
+
index_cmds[1]["upsert"] == {
|
882
|
+
"id" => 1,
|
883
|
+
"foo" => "bar",
|
884
|
+
"baz" => "quix",
|
885
|
+
}
|
886
|
+
)
|
746
887
|
end
|
747
888
|
|
748
889
|
def test_create_should_write_create_op
|
749
|
-
driver.configure("write_operation create
|
890
|
+
driver.configure("write_operation create
|
750
891
|
id_key request_id")
|
751
892
|
stub_elastic_ping
|
752
893
|
stub_elastic
|
@@ -56,7 +56,7 @@ class ElasticsearchOutputDynamic < Test::Unit::TestCase
|
|
56
56
|
instance = driver('test', config).instance
|
57
57
|
|
58
58
|
assert_equal 'logs.google.com', instance.host
|
59
|
-
assert_equal
|
59
|
+
assert_equal 777, instance.port
|
60
60
|
assert_equal 'https', instance.scheme
|
61
61
|
assert_equal '/es/', instance.path
|
62
62
|
assert_equal 'john', instance.user
|
@@ -154,7 +154,7 @@ class ElasticsearchOutputDynamic < Test::Unit::TestCase
|
|
154
154
|
driver.run
|
155
155
|
assert_equal('myindex', index_cmds.first['index']['_index'])
|
156
156
|
end
|
157
|
-
|
157
|
+
|
158
158
|
def test_writes_to_speficied_index_uppercase
|
159
159
|
driver.configure("index_name MyIndex\n")
|
160
160
|
stub_elastic_ping
|
@@ -247,7 +247,7 @@ class ElasticsearchOutputDynamic < Test::Unit::TestCase
|
|
247
247
|
logstash_index = "logstash-#{time.getutc.strftime("%Y.%m.%d")}"
|
248
248
|
stub_elastic_ping
|
249
249
|
stub_elastic
|
250
|
-
driver.emit(sample_record, time)
|
250
|
+
driver.emit(sample_record, time.to_i)
|
251
251
|
driver.run
|
252
252
|
assert_equal(logstash_index, index_cmds.first['index']['_index'])
|
253
253
|
end
|
@@ -259,7 +259,7 @@ class ElasticsearchOutputDynamic < Test::Unit::TestCase
|
|
259
259
|
utc_index = "logstash-#{time.strftime("%Y.%m.%d")}"
|
260
260
|
stub_elastic_ping
|
261
261
|
stub_elastic
|
262
|
-
driver.emit(sample_record, time)
|
262
|
+
driver.emit(sample_record, time.to_i)
|
263
263
|
driver.run
|
264
264
|
assert_equal(utc_index, index_cmds.first['index']['_index'])
|
265
265
|
end
|
@@ -271,11 +271,11 @@ class ElasticsearchOutputDynamic < Test::Unit::TestCase
|
|
271
271
|
logstash_index = "myprefix-#{time.getutc.strftime("%Y.%m.%d")}"
|
272
272
|
stub_elastic_ping
|
273
273
|
stub_elastic
|
274
|
-
driver.emit(sample_record, time)
|
274
|
+
driver.emit(sample_record, time.to_i)
|
275
275
|
driver.run
|
276
276
|
assert_equal(logstash_index, index_cmds.first['index']['_index'])
|
277
277
|
end
|
278
|
-
|
278
|
+
|
279
279
|
def test_writes_to_logstash_index_with_specified_prefix_uppercase
|
280
280
|
driver.configure("logstash_format true
|
281
281
|
logstash_prefix MyPrefix")
|
@@ -283,7 +283,7 @@ class ElasticsearchOutputDynamic < Test::Unit::TestCase
|
|
283
283
|
logstash_index = "myprefix-#{time.getutc.strftime("%Y.%m.%d")}"
|
284
284
|
stub_elastic_ping
|
285
285
|
stub_elastic
|
286
|
-
driver.emit(sample_record, time)
|
286
|
+
driver.emit(sample_record, time.to_i)
|
287
287
|
driver.run
|
288
288
|
assert_equal(logstash_index, index_cmds.first['index']['_index'])
|
289
289
|
end
|
@@ -295,7 +295,7 @@ class ElasticsearchOutputDynamic < Test::Unit::TestCase
|
|
295
295
|
logstash_index = "logstash-#{time.getutc.strftime("%Y.%m")}"
|
296
296
|
stub_elastic_ping
|
297
297
|
stub_elastic
|
298
|
-
driver.emit(sample_record, time)
|
298
|
+
driver.emit(sample_record, time.to_i)
|
299
299
|
driver.run
|
300
300
|
assert_equal(logstash_index, index_cmds.first['index']['_index'])
|
301
301
|
end
|
@@ -308,7 +308,7 @@ class ElasticsearchOutputDynamic < Test::Unit::TestCase
|
|
308
308
|
logstash_index = "myprefix-#{time.getutc.strftime("%Y.%m")}"
|
309
309
|
stub_elastic_ping
|
310
310
|
stub_elastic
|
311
|
-
driver.emit(sample_record, time)
|
311
|
+
driver.emit(sample_record, time.to_i)
|
312
312
|
driver.run
|
313
313
|
assert_equal(logstash_index, index_cmds.first['index']['_index'])
|
314
314
|
end
|
@@ -546,7 +546,7 @@ class ElasticsearchOutputDynamic < Test::Unit::TestCase
|
|
546
546
|
end
|
547
547
|
|
548
548
|
def test_update_should_write_update_op_and_doc_as_upsert_is_false
|
549
|
-
driver.configure("write_operation update
|
549
|
+
driver.configure("write_operation update
|
550
550
|
id_key request_id")
|
551
551
|
stub_elastic_ping
|
552
552
|
stub_elastic
|
@@ -557,7 +557,7 @@ class ElasticsearchOutputDynamic < Test::Unit::TestCase
|
|
557
557
|
end
|
558
558
|
|
559
559
|
def test_upsert_should_write_update_op_and_doc_as_upsert_is_true
|
560
|
-
driver.configure("write_operation upsert
|
560
|
+
driver.configure("write_operation upsert
|
561
561
|
id_key request_id")
|
562
562
|
stub_elastic_ping
|
563
563
|
stub_elastic
|
@@ -568,7 +568,7 @@ class ElasticsearchOutputDynamic < Test::Unit::TestCase
|
|
568
568
|
end
|
569
569
|
|
570
570
|
def test_create_should_write_create_op
|
571
|
-
driver.configure("write_operation create
|
571
|
+
driver.configure("write_operation create
|
572
572
|
id_key request_id")
|
573
573
|
stub_elastic_ping
|
574
574
|
stub_elastic
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-elasticsearch
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.
|
4
|
+
version: 1.6.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- diogo
|
@@ -9,7 +9,7 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date: 2016-
|
12
|
+
date: 2016-08-19 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: fluentd
|
@@ -43,16 +43,16 @@ dependencies:
|
|
43
43
|
name: elasticsearch
|
44
44
|
requirement: !ruby/object:Gem::Requirement
|
45
45
|
requirements:
|
46
|
-
- - "
|
46
|
+
- - "<"
|
47
47
|
- !ruby/object:Gem::Version
|
48
|
-
version: '
|
48
|
+
version: '1.1'
|
49
49
|
type: :runtime
|
50
50
|
prerelease: false
|
51
51
|
version_requirements: !ruby/object:Gem::Requirement
|
52
52
|
requirements:
|
53
|
-
- - "
|
53
|
+
- - "<"
|
54
54
|
- !ruby/object:Gem::Version
|
55
|
-
version: '
|
55
|
+
version: '1.1'
|
56
56
|
- !ruby/object:Gem::Dependency
|
57
57
|
name: rake
|
58
58
|
requirement: !ruby/object:Gem::Requirement
|
@@ -123,6 +123,7 @@ files:
|
|
123
123
|
- ".travis.yml"
|
124
124
|
- Gemfile
|
125
125
|
- History.md
|
126
|
+
- ISSUE_TEMPLATE.md
|
126
127
|
- LICENSE.txt
|
127
128
|
- PULL_REQUEST_TEMPLATE.md
|
128
129
|
- README.md
|
@@ -145,7 +146,7 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
145
146
|
requirements:
|
146
147
|
- - ">="
|
147
148
|
- !ruby/object:Gem::Version
|
148
|
-
version: '0'
|
149
|
+
version: '2.0'
|
149
150
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
150
151
|
requirements:
|
151
152
|
- - ">="
|