fluent-plugin-elasticsearch 2.11.2 → 2.11.3
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/History.md +4 -0
- data/README.md +10 -0
- data/fluent-plugin-elasticsearch.gemspec +1 -1
- data/lib/fluent/plugin/elasticsearch_error_handler.rb +2 -1
- data/lib/fluent/plugin/out_elasticsearch.rb +23 -10
- data/lib/fluent/plugin/out_elasticsearch_dynamic.rb +20 -12
- data/test/plugin/test_elasticsearch_error_handler.rb +5 -1
- data/test/plugin/test_out_elasticsearch.rb +14 -0
- data/test/plugin/test_out_elasticsearch_dynamic.rb +14 -0
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: ff924cb0d3a10af63436dc4ccf10256530637857bc3cc64dcef2cb59473a15a9
|
4
|
+
data.tar.gz: 7f6615d23e6ead3b22abe866036f79f93f1acfb77a87a1251b1ccd6511df01e1
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 1004359a2090e989ebc1906cb198388178d0556859d07cd051fce90f248d2d2e02c247fdb5f29714aa77e29634e5649d4bac11861d01beb3da92574bd75fe9be
|
7
|
+
data.tar.gz: 0067bfcfc7efc89a9d48c8c3d83c1e0ff2a430c413d61a7ff4dace664832d4f1a1443b90c0066113313e315b736183278bbb55f7955424dcc984de047e3ed331
|
data/History.md
CHANGED
data/README.md
CHANGED
@@ -55,6 +55,7 @@ Current maintainers: @cosmo0920
|
|
55
55
|
+ [reconnect_on_error](#reconnect_on_error)
|
56
56
|
+ [with_transporter_log](#with_transporter_log)
|
57
57
|
+ [content_type](#content_type)
|
58
|
+
+ [include_index_in_url](#include_index_in_url)
|
58
59
|
+ [Client/host certificate options](#clienthost-certificate-options)
|
59
60
|
+ [Proxy Support](#proxy-support)
|
60
61
|
+ [Buffer options](#buffer-options)
|
@@ -614,6 +615,15 @@ If you will not use template, it recommends to set `content_type application/x-n
|
|
614
615
|
content_type application/x-ndjson
|
615
616
|
```
|
616
617
|
|
618
|
+
### include_index_in_url
|
619
|
+
|
620
|
+
With this option set to true, Fluentd manifests the index name in the request URL (rather than in the request body).
|
621
|
+
You can use this option to enforce an URL-based access control.
|
622
|
+
|
623
|
+
```
|
624
|
+
include_index_in_url true
|
625
|
+
```
|
626
|
+
|
617
627
|
### Client/host certificate options
|
618
628
|
|
619
629
|
Need to verify Elasticsearch's certificate? You can use the following parameter to specify a CA instead of using an environment variable.
|
@@ -3,7 +3,7 @@ $:.push File.expand_path('../lib', __FILE__)
|
|
3
3
|
|
4
4
|
Gem::Specification.new do |s|
|
5
5
|
s.name = 'fluent-plugin-elasticsearch'
|
6
|
-
s.version = '2.11.
|
6
|
+
s.version = '2.11.3'
|
7
7
|
s.authors = ['diogo', 'pitr']
|
8
8
|
s.email = ['pitr.vern@gmail.com', 'me@diogoterror.com']
|
9
9
|
s.description = %q{Elasticsearch output plugin for Fluent event collector}
|
@@ -32,7 +32,8 @@ class Fluent::Plugin::ElasticsearchErrorHandler
|
|
32
32
|
begin
|
33
33
|
# we need a deep copy for process_message to alter
|
34
34
|
processrecord = Marshal.load(Marshal.dump(rawrecord))
|
35
|
-
|
35
|
+
meta, header, record = @plugin.process_message(tag, meta, header, time, processrecord, extracted_values)
|
36
|
+
next unless @plugin.append_record_to_messages(@plugin.write_operation, meta, header, record, bulk_message)
|
36
37
|
rescue => e
|
37
38
|
stats[:bad_chunk_record] += 1
|
38
39
|
next
|
@@ -34,6 +34,8 @@ module Fluent::Plugin
|
|
34
34
|
end
|
35
35
|
end
|
36
36
|
|
37
|
+
RequestInfo = Struct.new(:host, :index)
|
38
|
+
|
37
39
|
helpers :event_emitter, :compat_parameters, :record_accessor
|
38
40
|
|
39
41
|
Fluent::Plugin.register_output('elasticsearch', self)
|
@@ -104,6 +106,7 @@ EOC
|
|
104
106
|
elasticsearch gem v6.0.2 starts to use correct Content-Type. Please upgrade elasticserach gem and stop to use this option.
|
105
107
|
see: https://github.com/elastic/elasticsearch-ruby/pull/514
|
106
108
|
EOC
|
109
|
+
config_param :include_index_in_url, :bool, :default => false
|
107
110
|
|
108
111
|
config_section :buffer do
|
109
112
|
config_set_default :@type, DEFAULT_BUFFER_TYPE
|
@@ -420,8 +423,8 @@ EOC
|
|
420
423
|
end
|
421
424
|
|
422
425
|
def write(chunk)
|
423
|
-
bulk_message_count = 0
|
424
|
-
bulk_message = ''
|
426
|
+
bulk_message_count = Hash.new { |h,k| h[k] = 0 }
|
427
|
+
bulk_message = Hash.new { |h,k| h[k] = '' }
|
425
428
|
header = {}
|
426
429
|
meta = {}
|
427
430
|
|
@@ -432,8 +435,15 @@ EOC
|
|
432
435
|
chunk.msgpack_each do |time, record|
|
433
436
|
next unless record.is_a? Hash
|
434
437
|
begin
|
435
|
-
|
436
|
-
|
438
|
+
meta, header, record = process_message(tag, meta, header, time, record, extracted_values)
|
439
|
+
info = if @include_index_in_url
|
440
|
+
RequestInfo.new(nil, meta.delete("_index".freeze))
|
441
|
+
else
|
442
|
+
RequestInfo.new(nil, nil)
|
443
|
+
end
|
444
|
+
|
445
|
+
if append_record_to_messages(@write_operation, meta, header, record, bulk_message[info])
|
446
|
+
bulk_message_count[info] += 1;
|
437
447
|
else
|
438
448
|
if @emit_error_for_missing_id
|
439
449
|
raise MissingIdFieldError, "Missing '_id' field. Write operation is #{@write_operation}"
|
@@ -446,11 +456,14 @@ EOC
|
|
446
456
|
end
|
447
457
|
end
|
448
458
|
|
449
|
-
|
450
|
-
bulk_message.
|
459
|
+
|
460
|
+
bulk_message.each do |info, msgs|
|
461
|
+
send_bulk(msgs, tag, chunk, bulk_message_count[info], extracted_values, info.index) unless msgs.empty?
|
462
|
+
msgs.clear
|
463
|
+
end
|
451
464
|
end
|
452
465
|
|
453
|
-
def process_message(tag, meta, header, time, record,
|
466
|
+
def process_message(tag, meta, header, time, record, extracted_values)
|
454
467
|
logstash_prefix, index_name, type_name = extracted_values
|
455
468
|
|
456
469
|
if @flatten_hashes
|
@@ -530,7 +543,7 @@ EOC
|
|
530
543
|
@remove_keys.each { |key| record.delete(key) }
|
531
544
|
end
|
532
545
|
|
533
|
-
|
546
|
+
return [meta, header, record]
|
534
547
|
end
|
535
548
|
|
536
549
|
# returns [parent, child_key] of child described by path array in record's tree
|
@@ -542,12 +555,12 @@ EOC
|
|
542
555
|
|
543
556
|
# send_bulk given a specific bulk request, the original tag,
|
544
557
|
# chunk, and bulk_message_count
|
545
|
-
def send_bulk(data, tag, chunk, bulk_message_count, extracted_values)
|
558
|
+
def send_bulk(data, tag, chunk, bulk_message_count, extracted_values, index)
|
546
559
|
retries = 0
|
547
560
|
begin
|
548
561
|
|
549
562
|
log.on_trace { log.trace "bulk request: #{data}" }
|
550
|
-
response = client.bulk body: data
|
563
|
+
response = client.bulk body: data, index: index
|
551
564
|
log.on_trace { log.trace "bulk response: #{response}" }
|
552
565
|
|
553
566
|
if response['errors']
|
@@ -13,6 +13,8 @@ module Fluent::Plugin
|
|
13
13
|
DYNAMIC_PARAM_NAMES = %W[hosts host port include_timestamp logstash_format logstash_prefix logstash_dateformat time_key utc_index index_name tag_key type_name id_key parent_key routing_key write_operation]
|
14
14
|
DYNAMIC_PARAM_SYMBOLS = DYNAMIC_PARAM_NAMES.map { |n| "@#{n}".to_sym }
|
15
15
|
|
16
|
+
RequestInfo = Struct.new(:host, :index)
|
17
|
+
|
16
18
|
attr_reader :dynamic_config
|
17
19
|
|
18
20
|
def configure(conf)
|
@@ -186,7 +188,19 @@ module Fluent::Plugin
|
|
186
188
|
record.merge!(dynamic_conf['tag_key'] => tag)
|
187
189
|
end
|
188
190
|
|
189
|
-
|
191
|
+
if dynamic_conf['hosts']
|
192
|
+
host = dynamic_conf['hosts']
|
193
|
+
else
|
194
|
+
host = "#{dynamic_conf['host']}:#{dynamic_conf['port']}"
|
195
|
+
end
|
196
|
+
|
197
|
+
if @include_index_in_url
|
198
|
+
key = RequestInfo.new(host, target_index)
|
199
|
+
meta = {"_type" => dynamic_conf['type_name']}
|
200
|
+
else
|
201
|
+
key = RequestInfo.new(host, nil)
|
202
|
+
meta = {"_index" => target_index, "_type" => dynamic_conf['type_name']}
|
203
|
+
end
|
190
204
|
|
191
205
|
@meta_config_map.each_pair do |config_name, meta_key|
|
192
206
|
if dynamic_conf[config_name] && accessor = record_accessor_create(dynamic_conf[config_name])
|
@@ -196,30 +210,24 @@ module Fluent::Plugin
|
|
196
210
|
end
|
197
211
|
end
|
198
212
|
|
199
|
-
if dynamic_conf['hosts']
|
200
|
-
host = dynamic_conf['hosts']
|
201
|
-
else
|
202
|
-
host = "#{dynamic_conf['host']}:#{dynamic_conf['port']}"
|
203
|
-
end
|
204
|
-
|
205
213
|
if @remove_keys
|
206
214
|
@remove_keys.each { |key| record.delete(key) }
|
207
215
|
end
|
208
216
|
|
209
217
|
write_op = dynamic_conf["write_operation"]
|
210
|
-
append_record_to_messages(write_op, meta, headers[write_op], record, bulk_message[
|
218
|
+
append_record_to_messages(write_op, meta, headers[write_op], record, bulk_message[key])
|
211
219
|
end
|
212
220
|
|
213
|
-
bulk_message.each do |
|
214
|
-
send_bulk(msgs,
|
221
|
+
bulk_message.each do |info, msgs|
|
222
|
+
send_bulk(msgs, info.host, info.index) unless msgs.empty?
|
215
223
|
msgs.clear
|
216
224
|
end
|
217
225
|
end
|
218
226
|
|
219
|
-
def send_bulk(data, host)
|
227
|
+
def send_bulk(data, host, index)
|
220
228
|
retries = 0
|
221
229
|
begin
|
222
|
-
response = client(host).bulk body: data
|
230
|
+
response = client(host).bulk body: data, index: index
|
223
231
|
if response['errors']
|
224
232
|
log.error "Could not push log to Elasticsearch: #{response}"
|
225
233
|
end
|
@@ -22,7 +22,11 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
|
|
22
22
|
@error_events << {:tag => tag, :time=>time, :record=>record, :error=>e}
|
23
23
|
end
|
24
24
|
|
25
|
-
def process_message(tag, meta, header, time, record,
|
25
|
+
def process_message(tag, meta, header, time, record, extracted_values)
|
26
|
+
return [meta, header, record]
|
27
|
+
end
|
28
|
+
|
29
|
+
def append_record_to_messages(op, meta, header, record, msgs)
|
26
30
|
if record.has_key?('raise') && record['raise']
|
27
31
|
raise Exception('process_message')
|
28
32
|
end
|
@@ -2174,4 +2174,18 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
2174
2174
|
assert(index_cmds[0].has_key?("create"))
|
2175
2175
|
end
|
2176
2176
|
|
2177
|
+
def test_include_index_in_url
|
2178
|
+
stub_elastic_ping
|
2179
|
+
stub_elastic('http://localhost:9200/logstash-2018.01.01/_bulk')
|
2180
|
+
|
2181
|
+
driver.configure("index_name logstash-2018.01.01
|
2182
|
+
include_index_in_url true")
|
2183
|
+
driver.run(default_tag: 'test') do
|
2184
|
+
driver.feed(sample_record)
|
2185
|
+
end
|
2186
|
+
|
2187
|
+
assert_equal(index_cmds.length, 2)
|
2188
|
+
assert_equal(index_cmds.first['index']['_index'], nil)
|
2189
|
+
end
|
2190
|
+
|
2177
2191
|
end
|
@@ -990,4 +990,18 @@ class ElasticsearchOutputDynamic < Test::Unit::TestCase
|
|
990
990
|
end
|
991
991
|
assert(index_cmds[0].has_key?("create"))
|
992
992
|
end
|
993
|
+
|
994
|
+
def test_include_index_in_url
|
995
|
+
stub_elastic_ping
|
996
|
+
stub_elastic('http://localhost:9200/logstash-2018.01.01/_bulk')
|
997
|
+
|
998
|
+
driver.configure("index_name logstash-2018.01.01
|
999
|
+
include_index_in_url true")
|
1000
|
+
driver.run(default_tag: 'test') do
|
1001
|
+
driver.feed(sample_record)
|
1002
|
+
end
|
1003
|
+
|
1004
|
+
assert_equal(index_cmds.length, 2)
|
1005
|
+
assert_equal(index_cmds.first['index']['_index'], nil)
|
1006
|
+
end
|
993
1007
|
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-elasticsearch
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 2.11.
|
4
|
+
version: 2.11.3
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- diogo
|
@@ -9,7 +9,7 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date: 2018-07-
|
12
|
+
date: 2018-07-27 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: fluentd
|