fluent-plugin-elasticsearch 4.1.1 → 5.4.3
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.github/ISSUE_TEMPLATE/bug_report.md +37 -0
- data/.github/ISSUE_TEMPLATE/feature_request.md +24 -0
- data/.github/dependabot.yml +6 -0
- data/.github/workflows/issue-auto-closer.yml +2 -2
- data/.github/workflows/linux.yml +5 -2
- data/.github/workflows/macos.yml +5 -2
- data/.github/workflows/windows.yml +5 -2
- data/Gemfile +1 -2
- data/History.md +146 -0
- data/README.ElasticsearchGenID.md +4 -4
- data/README.ElasticsearchInput.md +1 -1
- data/README.Troubleshooting.md +692 -0
- data/README.md +260 -550
- data/fluent-plugin-elasticsearch.gemspec +4 -1
- data/lib/fluent/plugin/elasticsearch_compat.rb +31 -0
- data/lib/fluent/plugin/elasticsearch_error_handler.rb +19 -4
- data/lib/fluent/plugin/elasticsearch_fallback_selector.rb +2 -2
- data/lib/fluent/plugin/elasticsearch_index_lifecycle_management.rb +18 -4
- data/lib/fluent/plugin/elasticsearch_index_template.rb +65 -21
- data/lib/fluent/plugin/elasticsearch_simple_sniffer.rb +2 -1
- data/lib/fluent/plugin/filter_elasticsearch_genid.rb +1 -1
- data/lib/fluent/plugin/in_elasticsearch.rb +8 -2
- data/lib/fluent/plugin/oj_serializer.rb +2 -1
- data/lib/fluent/plugin/out_elasticsearch.rb +192 -36
- data/lib/fluent/plugin/out_elasticsearch_data_stream.rb +298 -0
- data/lib/fluent/plugin/out_elasticsearch_dynamic.rb +3 -1
- data/test/helper.rb +0 -4
- data/test/plugin/mock_chunk.dat +0 -0
- data/test/plugin/test_elasticsearch_error_handler.rb +130 -23
- data/test/plugin/test_elasticsearch_fallback_selector.rb +17 -8
- data/test/plugin/test_elasticsearch_index_lifecycle_management.rb +57 -18
- data/test/plugin/test_elasticsearch_tls.rb +8 -2
- data/test/plugin/test_filter_elasticsearch_genid.rb +16 -16
- data/test/plugin/test_in_elasticsearch.rb +51 -21
- data/test/plugin/test_index_alias_template.json +11 -0
- data/test/plugin/test_index_template.json +25 -0
- data/test/plugin/test_out_elasticsearch.rb +2118 -704
- data/test/plugin/test_out_elasticsearch_data_stream.rb +1199 -0
- data/test/plugin/test_out_elasticsearch_dynamic.rb +170 -31
- metadata +62 -10
- data/.coveralls.yml +0 -2
- data/.travis.yml +0 -44
- data/appveyor.yml +0 -20
- data/gemfiles/Gemfile.without.ilm +0 -10
@@ -2,23 +2,24 @@
|
|
2
2
|
require 'date'
|
3
3
|
require 'excon'
|
4
4
|
require 'elasticsearch'
|
5
|
-
|
6
|
-
require 'elasticsearch/xpack'
|
7
|
-
rescue LoadError
|
8
|
-
end
|
5
|
+
require 'set'
|
9
6
|
require 'json'
|
10
7
|
require 'uri'
|
8
|
+
require 'base64'
|
11
9
|
begin
|
12
10
|
require 'strptime'
|
13
11
|
rescue LoadError
|
14
12
|
end
|
13
|
+
require 'resolv'
|
15
14
|
|
16
15
|
require 'fluent/plugin/output'
|
17
16
|
require 'fluent/event'
|
18
17
|
require 'fluent/error'
|
19
18
|
require 'fluent/time'
|
19
|
+
require 'fluent/unique_id'
|
20
20
|
require 'fluent/log-ext'
|
21
21
|
require 'zlib'
|
22
|
+
require_relative 'elasticsearch_compat'
|
22
23
|
require_relative 'elasticsearch_constants'
|
23
24
|
require_relative 'elasticsearch_error'
|
24
25
|
require_relative 'elasticsearch_error_handler'
|
@@ -31,6 +32,8 @@ begin
|
|
31
32
|
rescue LoadError
|
32
33
|
end
|
33
34
|
|
35
|
+
require 'faraday/excon'
|
36
|
+
|
34
37
|
module Fluent::Plugin
|
35
38
|
class ElasticsearchOutput < Output
|
36
39
|
class RecoverableRequestFailure < StandardError; end
|
@@ -57,6 +60,7 @@ module Fluent::Plugin
|
|
57
60
|
attr_reader :template_names
|
58
61
|
attr_reader :ssl_version_options
|
59
62
|
attr_reader :compressable_connection
|
63
|
+
attr_reader :api_key_header
|
60
64
|
|
61
65
|
helpers :event_emitter, :compat_parameters, :record_accessor, :timer
|
62
66
|
|
@@ -67,13 +71,15 @@ module Fluent::Plugin
|
|
67
71
|
DEFAULT_TYPE_NAME_ES_7x = "_doc".freeze
|
68
72
|
DEFAULT_TYPE_NAME = "fluentd".freeze
|
69
73
|
DEFAULT_RELOAD_AFTER = -1
|
70
|
-
|
74
|
+
DEFAULT_TARGET_BULK_BYTES = -1
|
71
75
|
DEFAULT_POLICY_ID = "logstash-policy"
|
72
76
|
|
73
77
|
config_param :host, :string, :default => 'localhost'
|
74
78
|
config_param :port, :integer, :default => 9200
|
75
79
|
config_param :user, :string, :default => nil
|
76
80
|
config_param :password, :string, :default => nil, :secret => true
|
81
|
+
config_param :cloud_id, :string, :default => nil
|
82
|
+
config_param :cloud_auth, :string, :default => nil
|
77
83
|
config_param :path, :string, :default => nil
|
78
84
|
config_param :scheme, :enum, :list => [:https, :http], :default => :http
|
79
85
|
config_param :hosts, :string, :default => nil
|
@@ -155,10 +161,11 @@ EOC
|
|
155
161
|
config_param :default_elasticsearch_version, :integer, :default => DEFAULT_ELASTICSEARCH_VERSION
|
156
162
|
config_param :log_es_400_reason, :bool, :default => false
|
157
163
|
config_param :custom_headers, :hash, :default => {}
|
164
|
+
config_param :api_key, :string, :default => nil, :secret => true
|
158
165
|
config_param :suppress_doc_wrap, :bool, :default => false
|
159
166
|
config_param :ignore_exceptions, :array, :default => [], value_type: :string, :desc => "Ignorable exception list"
|
160
167
|
config_param :exception_backup, :bool, :default => true, :desc => "Chunk backup flag when ignore exception occured"
|
161
|
-
config_param :bulk_message_request_threshold, :size, :default =>
|
168
|
+
config_param :bulk_message_request_threshold, :size, :default => DEFAULT_TARGET_BULK_BYTES
|
162
169
|
config_param :compression_level, :enum, list: [:no_compression, :best_speed, :best_compression, :default_compression], :default => :no_compression
|
163
170
|
config_param :enable_ilm, :bool, :default => false
|
164
171
|
config_param :ilm_policy_id, :string, :default => DEFAULT_POLICY_ID
|
@@ -166,6 +173,14 @@ EOC
|
|
166
173
|
config_param :ilm_policies, :hash, :default => {}
|
167
174
|
config_param :ilm_policy_overwrite, :bool, :default => false
|
168
175
|
config_param :truncate_caches_interval, :time, :default => nil
|
176
|
+
config_param :use_legacy_template, :bool, :default => true
|
177
|
+
config_param :catch_transport_exception_on_retry, :bool, :default => true
|
178
|
+
config_param :target_index_affinity, :bool, :default => false
|
179
|
+
|
180
|
+
config_section :metadata, param_name: :metainfo, multi: false do
|
181
|
+
config_param :include_chunk_id, :bool, :default => false
|
182
|
+
config_param :chunk_id_key, :string, :default => "chunk_id".freeze
|
183
|
+
end
|
169
184
|
|
170
185
|
config_section :buffer do
|
171
186
|
config_set_default :@type, DEFAULT_BUFFER_TYPE
|
@@ -212,6 +227,8 @@ EOC
|
|
212
227
|
@remove_keys_on_update = @remove_keys_on_update.split ','
|
213
228
|
end
|
214
229
|
|
230
|
+
@api_key_header = setup_api_key
|
231
|
+
|
215
232
|
raise Fluent::ConfigError, "'max_retry_putting_template' must be greater than or equal to zero." if @max_retry_putting_template < 0
|
216
233
|
raise Fluent::ConfigError, "'max_retry_get_es_version' must be greater than or equal to zero." if @max_retry_get_es_version < 0
|
217
234
|
|
@@ -247,8 +264,11 @@ EOC
|
|
247
264
|
template_installation_actual(@deflector_alias ? @deflector_alias : @index_name, @template_name, @customize_template, @application_name, @index_name, @ilm_policy_id)
|
248
265
|
end
|
249
266
|
verify_ilm_working if @enable_ilm
|
250
|
-
|
251
|
-
|
267
|
+
end
|
268
|
+
if @templates
|
269
|
+
retry_operate(@max_retry_putting_template,
|
270
|
+
@fail_on_putting_template_retry_exceed,
|
271
|
+
@catch_transport_exception_on_retry) do
|
252
272
|
templates_hash_install(@templates, @template_overwrite)
|
253
273
|
end
|
254
274
|
end
|
@@ -278,8 +298,14 @@ EOC
|
|
278
298
|
@dump_proc = Yajl.method(:dump)
|
279
299
|
end
|
280
300
|
|
301
|
+
raise Fluent::ConfigError, "`cloud_auth` must be present if `cloud_id` is present" if @cloud_id && @cloud_auth.nil?
|
281
302
|
raise Fluent::ConfigError, "`password` must be present if `user` is present" if @user && @password.nil?
|
282
303
|
|
304
|
+
if @cloud_auth
|
305
|
+
@user = @cloud_auth.split(':', -1)[0]
|
306
|
+
@password = @cloud_auth.split(':', -1)[1]
|
307
|
+
end
|
308
|
+
|
283
309
|
if @user && m = @user.match(/%{(?<user>.*)}/)
|
284
310
|
@user = URI.encode_www_form_component(m["user"])
|
285
311
|
end
|
@@ -325,7 +351,7 @@ EOC
|
|
325
351
|
@type_name = '_doc'.freeze
|
326
352
|
end
|
327
353
|
if @last_seen_major_version >= 8 && @type_name != DEFAULT_TYPE_NAME_ES_7x
|
328
|
-
log.
|
354
|
+
log.debug "Detected ES 8.x or above: This parameter has no effect."
|
329
355
|
@type_name = nil
|
330
356
|
end
|
331
357
|
end
|
@@ -386,17 +412,23 @@ EOC
|
|
386
412
|
end
|
387
413
|
end
|
388
414
|
|
389
|
-
if Gem::Version.create(::
|
415
|
+
if Gem::Version.create(::TRANSPORT_CLASS::VERSION) < Gem::Version.create("7.2.0")
|
390
416
|
if compression
|
391
417
|
raise Fluent::ConfigError, <<-EOC
|
392
418
|
Cannot use compression with elasticsearch-transport plugin version < 7.2.0
|
393
|
-
Your elasticsearch-transport plugin version version is #{
|
419
|
+
Your elasticsearch-transport plugin version version is #{TRANSPORT_CLASS::VERSION}.
|
394
420
|
Please consider to upgrade ES client.
|
395
421
|
EOC
|
396
422
|
end
|
397
423
|
end
|
398
424
|
end
|
399
425
|
|
426
|
+
def setup_api_key
|
427
|
+
return {} unless @api_key
|
428
|
+
|
429
|
+
{ "Authorization" => "ApiKey " + Base64.strict_encode64(@api_key) }
|
430
|
+
end
|
431
|
+
|
400
432
|
def dry_run?
|
401
433
|
if Fluent::Engine.respond_to?(:dry_run_mode)
|
402
434
|
Fluent::Engine.dry_run_mode
|
@@ -441,7 +473,7 @@ EOC
|
|
441
473
|
when :excon
|
442
474
|
{ client_key: @client_key, client_cert: @client_cert, client_key_pass: @client_key_pass, nonblock: @http_backend_excon_nonblock }
|
443
475
|
when :typhoeus
|
444
|
-
require 'typhoeus'
|
476
|
+
require 'faraday/typhoeus'
|
445
477
|
{ sslkey: @client_key, sslcert: @client_cert, keypasswd: @client_key_pass }
|
446
478
|
end
|
447
479
|
rescue LoadError => ex
|
@@ -451,7 +483,9 @@ EOC
|
|
451
483
|
|
452
484
|
def handle_last_seen_es_major_version
|
453
485
|
if @verify_es_version_at_startup && !dry_run?
|
454
|
-
retry_operate(@max_retry_get_es_version,
|
486
|
+
retry_operate(@max_retry_get_es_version,
|
487
|
+
@fail_on_detecting_es_version_retry_exceed,
|
488
|
+
@catch_transport_exception_on_retry) do
|
455
489
|
detect_es_major_version
|
456
490
|
end
|
457
491
|
else
|
@@ -460,8 +494,20 @@ EOC
|
|
460
494
|
end
|
461
495
|
|
462
496
|
def detect_es_major_version
|
463
|
-
|
464
|
-
|
497
|
+
begin
|
498
|
+
@_es_info ||= client.info
|
499
|
+
rescue ::Elasticsearch::UnsupportedProductError => e
|
500
|
+
raise Fluent::ConfigError, "Using Elasticsearch client #{client_library_version} is not compatible for your Elasticsearch server. Please check your using elasticsearch gem version and Elasticsearch server."
|
501
|
+
end
|
502
|
+
begin
|
503
|
+
unless version = @_es_info.dig("version", "number")
|
504
|
+
version = @default_elasticsearch_version
|
505
|
+
end
|
506
|
+
rescue NoMethodError => e
|
507
|
+
log.warn "#{@_es_info} can not dig version information. Assuming Elasticsearch #{@default_elasticsearch_version}", error: e
|
508
|
+
version = @default_elasticsearch_version
|
509
|
+
end
|
510
|
+
version.to_i
|
465
511
|
end
|
466
512
|
|
467
513
|
def client_library_version
|
@@ -533,7 +579,17 @@ EOC
|
|
533
579
|
return Time.at(event_time).to_datetime
|
534
580
|
end
|
535
581
|
|
582
|
+
def cloud_client
|
583
|
+
Elasticsearch::Client.new(
|
584
|
+
cloud_id: @cloud_id,
|
585
|
+
user: @user,
|
586
|
+
password: @password
|
587
|
+
)
|
588
|
+
end
|
589
|
+
|
536
590
|
def client(host = nil, compress_connection = false)
|
591
|
+
return cloud_client if @cloud_id
|
592
|
+
|
537
593
|
# check here to see if we already have a client connection for the given host
|
538
594
|
connection_options = get_connection_options(host)
|
539
595
|
|
@@ -554,10 +610,13 @@ EOC
|
|
554
610
|
else
|
555
611
|
{}
|
556
612
|
end
|
557
|
-
headers = { 'Content-Type' => @content_type.to_s }
|
613
|
+
headers = { 'Content-Type' => @content_type.to_s }
|
614
|
+
.merge(@custom_headers)
|
615
|
+
.merge(@api_key_header)
|
616
|
+
.merge(gzip_headers)
|
558
617
|
ssl_options = { verify: @ssl_verify, ca_file: @ca_file}.merge(@ssl_version_options)
|
559
618
|
|
560
|
-
transport =
|
619
|
+
transport = TRANSPORT_CLASS::Transport::HTTP::Faraday.new(connection_options.merge(
|
561
620
|
options: {
|
562
621
|
reload_connections: local_reload_connections,
|
563
622
|
reload_on_failure: @reload_on_failure,
|
@@ -594,6 +653,14 @@ EOC
|
|
594
653
|
end
|
595
654
|
end
|
596
655
|
|
656
|
+
def is_ipv6_host(host_str)
|
657
|
+
begin
|
658
|
+
IPAddr.new(host_str).ipv6?
|
659
|
+
rescue IPAddr::InvalidAddressError
|
660
|
+
return false
|
661
|
+
end
|
662
|
+
end
|
663
|
+
|
597
664
|
def get_connection_options(con_host=nil)
|
598
665
|
|
599
666
|
hosts = if con_host || @hosts
|
@@ -605,6 +672,21 @@ EOC
|
|
605
672
|
port: (host_str.split(':')[1] || @port).to_i,
|
606
673
|
scheme: @scheme.to_s
|
607
674
|
}
|
675
|
+
# Support ipv6 for host/host placeholders
|
676
|
+
elsif is_ipv6_host(host_str)
|
677
|
+
if Resolv::IPv6::Regex.match(host_str)
|
678
|
+
{
|
679
|
+
host: "[#{host_str}]",
|
680
|
+
port: @port.to_i,
|
681
|
+
scheme: @scheme.to_s
|
682
|
+
}
|
683
|
+
else
|
684
|
+
{
|
685
|
+
host: host_str,
|
686
|
+
port: @port.to_i,
|
687
|
+
scheme: @scheme.to_s
|
688
|
+
}
|
689
|
+
end
|
608
690
|
else
|
609
691
|
# New hosts format expects URLs such as http://logs.foo.com,https://john:pass@logs2.foo.com/elastic
|
610
692
|
uri = URI(get_escaped_userinfo(host_str))
|
@@ -615,7 +697,11 @@ EOC
|
|
615
697
|
end
|
616
698
|
end.compact
|
617
699
|
else
|
618
|
-
|
700
|
+
if Resolv::IPv6::Regex.match(@host)
|
701
|
+
[{host: "[#{@host}]", scheme: @scheme.to_s, port: @port}]
|
702
|
+
else
|
703
|
+
[{host: @host, port: @port, scheme: @scheme.to_s}]
|
704
|
+
end
|
619
705
|
end.each do |host|
|
620
706
|
host.merge!(user: @user, password: @password) if !host[:user] && @user
|
621
707
|
host.merge!(path: @path) if !host[:path] && @path
|
@@ -752,13 +838,24 @@ EOC
|
|
752
838
|
true
|
753
839
|
end
|
754
840
|
|
841
|
+
def inject_chunk_id_to_record_if_needed(record, chunk_id)
|
842
|
+
if @metainfo&.include_chunk_id
|
843
|
+
record[@metainfo.chunk_id_key] = chunk_id
|
844
|
+
record
|
845
|
+
else
|
846
|
+
record
|
847
|
+
end
|
848
|
+
end
|
849
|
+
|
755
850
|
def write(chunk)
|
756
851
|
bulk_message_count = Hash.new { |h,k| h[k] = 0 }
|
757
852
|
bulk_message = Hash.new { |h,k| h[k] = '' }
|
758
853
|
header = {}
|
759
854
|
meta = {}
|
855
|
+
unpackedMsgArr = {}
|
760
856
|
|
761
857
|
tag = chunk.metadata.tag
|
858
|
+
chunk_id = dump_unique_id_hex(chunk.unique_id)
|
762
859
|
extracted_values = expand_placeholders(chunk)
|
763
860
|
host = if @hosts
|
764
861
|
extract_placeholders(@hosts, chunk)
|
@@ -766,19 +863,27 @@ EOC
|
|
766
863
|
extract_placeholders(@host, chunk)
|
767
864
|
end
|
768
865
|
|
866
|
+
affinity_target_indices = get_affinity_target_indices(chunk)
|
769
867
|
chunk.msgpack_each do |time, record|
|
770
868
|
next unless record.is_a? Hash
|
869
|
+
|
870
|
+
record = inject_chunk_id_to_record_if_needed(record, chunk_id)
|
871
|
+
|
771
872
|
begin
|
772
|
-
meta, header, record = process_message(tag, meta, header, time, record, extracted_values)
|
873
|
+
meta, header, record = process_message(tag, meta, header, time, record, affinity_target_indices, extracted_values)
|
773
874
|
info = if @include_index_in_url
|
774
875
|
RequestInfo.new(host, meta.delete("_index".freeze), meta["_index".freeze], meta.delete("_alias".freeze))
|
775
876
|
else
|
776
877
|
RequestInfo.new(host, nil, meta["_index".freeze], meta.delete("_alias".freeze))
|
777
878
|
end
|
778
879
|
|
880
|
+
unpackedMsgArr[info] = [] if unpackedMsgArr[info].nil?
|
881
|
+
unpackedMsgArr[info] << {:time => time, :record => record}
|
882
|
+
|
779
883
|
if split_request?(bulk_message, info)
|
780
884
|
bulk_message.each do |info, msgs|
|
781
|
-
send_bulk(msgs, tag, chunk, bulk_message_count[info], extracted_values, info) unless msgs.empty?
|
885
|
+
send_bulk(msgs, tag, chunk, bulk_message_count[info], extracted_values, info, unpackedMsgArr[info]) unless msgs.empty?
|
886
|
+
unpackedMsgArr[info].clear
|
782
887
|
msgs.clear
|
783
888
|
# Clear bulk_message_count for this info.
|
784
889
|
bulk_message_count[info] = 0;
|
@@ -801,11 +906,49 @@ EOC
|
|
801
906
|
end
|
802
907
|
|
803
908
|
bulk_message.each do |info, msgs|
|
804
|
-
send_bulk(msgs, tag, chunk, bulk_message_count[info], extracted_values, info) unless msgs.empty?
|
909
|
+
send_bulk(msgs, tag, chunk, bulk_message_count[info], extracted_values, info, unpackedMsgArr[info]) unless msgs.empty?
|
910
|
+
|
911
|
+
unpackedMsgArr[info].clear
|
805
912
|
msgs.clear
|
806
913
|
end
|
807
914
|
end
|
808
915
|
|
916
|
+
def target_index_affinity_enabled?()
|
917
|
+
@target_index_affinity && @logstash_format && @id_key && (@write_operation == UPDATE_OP || @write_operation == UPSERT_OP)
|
918
|
+
end
|
919
|
+
|
920
|
+
def get_affinity_target_indices(chunk)
|
921
|
+
indices = Hash.new
|
922
|
+
if target_index_affinity_enabled?()
|
923
|
+
id_key_accessor = record_accessor_create(@id_key)
|
924
|
+
ids = Set.new
|
925
|
+
chunk.msgpack_each do |time, record|
|
926
|
+
next unless record.is_a? Hash
|
927
|
+
begin
|
928
|
+
ids << id_key_accessor.call(record)
|
929
|
+
end
|
930
|
+
end
|
931
|
+
log.debug("Find affinity target_indices by quering on ES (write_operation #{@write_operation}) for ids: #{ids.to_a}")
|
932
|
+
options = {
|
933
|
+
:index => "#{logstash_prefix}#{@logstash_prefix_separator}*",
|
934
|
+
}
|
935
|
+
query = {
|
936
|
+
'query' => { 'ids' => { 'values' => ids.to_a } },
|
937
|
+
'_source' => false,
|
938
|
+
'sort' => [
|
939
|
+
{"_index" => {"order" => "desc"}}
|
940
|
+
]
|
941
|
+
}
|
942
|
+
result = client.search(options.merge(:body => Yajl.dump(query)))
|
943
|
+
# There should be just one hit per _id, but in case there still is multiple, just the oldest index is stored to map
|
944
|
+
result['hits']['hits'].each do |hit|
|
945
|
+
indices[hit["_id"]] = hit["_index"]
|
946
|
+
log.debug("target_index for id: #{hit["_id"]} from es: #{hit["_index"]}")
|
947
|
+
end
|
948
|
+
end
|
949
|
+
indices
|
950
|
+
end
|
951
|
+
|
809
952
|
def split_request?(bulk_message, info)
|
810
953
|
# For safety.
|
811
954
|
end
|
@@ -818,7 +961,7 @@ EOC
|
|
818
961
|
false
|
819
962
|
end
|
820
963
|
|
821
|
-
def process_message(tag, meta, header, time, record, extracted_values)
|
964
|
+
def process_message(tag, meta, header, time, record, affinity_target_indices, extracted_values)
|
822
965
|
logstash_prefix, logstash_dateformat, index_name, type_name, _template_name, _customize_template, _deflector_alias, application_name, pipeline, _ilm_policy_id = extracted_values
|
823
966
|
|
824
967
|
if @flatten_hashes
|
@@ -859,6 +1002,15 @@ EOC
|
|
859
1002
|
record[@tag_key] = tag
|
860
1003
|
end
|
861
1004
|
|
1005
|
+
# If affinity target indices map has value for this particular id, use it as target_index
|
1006
|
+
if !affinity_target_indices.empty?
|
1007
|
+
id_accessor = record_accessor_create(@id_key)
|
1008
|
+
id_value = id_accessor.call(record)
|
1009
|
+
if affinity_target_indices.key?(id_value)
|
1010
|
+
target_index = affinity_target_indices[id_value]
|
1011
|
+
end
|
1012
|
+
end
|
1013
|
+
|
862
1014
|
target_type_parent, target_type_child_key = @target_type_key ? get_parent_of(record, @target_type_key) : nil
|
863
1015
|
if target_type_parent && target_type_parent[target_type_child_key]
|
864
1016
|
target_type = target_type_parent.delete(target_type_child_key)
|
@@ -868,18 +1020,18 @@ EOC
|
|
868
1020
|
elsif @last_seen_major_version == 7
|
869
1021
|
log.warn "Detected ES 7.x: `_doc` will be used as the document `_type`."
|
870
1022
|
target_type = '_doc'.freeze
|
871
|
-
elsif @last_seen_major_version >=8
|
872
|
-
log.
|
1023
|
+
elsif @last_seen_major_version >= 8
|
1024
|
+
log.debug "Detected ES 8.x or above: document type will not be used."
|
873
1025
|
target_type = nil
|
874
1026
|
end
|
875
1027
|
else
|
876
|
-
if @suppress_type_name && @last_seen_major_version
|
1028
|
+
if @suppress_type_name && @last_seen_major_version == 7
|
877
1029
|
target_type = nil
|
878
1030
|
elsif @last_seen_major_version == 7 && @type_name != DEFAULT_TYPE_NAME_ES_7x
|
879
1031
|
log.warn "Detected ES 7.x: `_doc` will be used as the document `_type`."
|
880
1032
|
target_type = '_doc'.freeze
|
881
1033
|
elsif @last_seen_major_version >= 8
|
882
|
-
log.
|
1034
|
+
log.debug "Detected ES 8.x or above: document type will not be used."
|
883
1035
|
target_type = nil
|
884
1036
|
else
|
885
1037
|
target_type = type_name
|
@@ -944,29 +1096,33 @@ EOC
|
|
944
1096
|
|
945
1097
|
def template_installation_actual(deflector_alias, template_name, customize_template, application_name, target_index, ilm_policy_id, host=nil)
|
946
1098
|
if template_name && @template_file
|
947
|
-
if !@logstash_format && @alias_indexes.include?
|
948
|
-
|
949
|
-
|
950
|
-
|
1099
|
+
if !@logstash_format && (deflector_alias.nil? || (@alias_indexes.include? deflector_alias)) && (@template_names.include? template_name)
|
1100
|
+
if deflector_alias
|
1101
|
+
log.debug("Index alias #{deflector_alias} and template #{template_name} already exist (cached)")
|
1102
|
+
else
|
1103
|
+
log.debug("Template #{template_name} already exists (cached)")
|
1104
|
+
end
|
951
1105
|
else
|
952
|
-
retry_operate(@max_retry_putting_template,
|
1106
|
+
retry_operate(@max_retry_putting_template,
|
1107
|
+
@fail_on_putting_template_retry_exceed,
|
1108
|
+
@catch_transport_exception_on_retry) do
|
953
1109
|
if customize_template
|
954
|
-
template_custom_install(template_name, @template_file, @template_overwrite, customize_template, @enable_ilm, deflector_alias, ilm_policy_id, host, target_index)
|
1110
|
+
template_custom_install(template_name, @template_file, @template_overwrite, customize_template, @enable_ilm, deflector_alias, ilm_policy_id, host, target_index, @index_separator)
|
955
1111
|
else
|
956
|
-
template_install(template_name, @template_file, @template_overwrite, @enable_ilm, deflector_alias, ilm_policy_id, host, target_index)
|
1112
|
+
template_install(template_name, @template_file, @template_overwrite, @enable_ilm, deflector_alias, ilm_policy_id, host, target_index, @index_separator)
|
957
1113
|
end
|
958
1114
|
ilm_policy = @ilm_policies[ilm_policy_id] || {}
|
959
1115
|
create_rollover_alias(target_index, @rollover_index, deflector_alias, application_name, @index_date_pattern, @index_separator, @enable_ilm, ilm_policy_id, ilm_policy, @ilm_policy_overwrite, host)
|
960
1116
|
end
|
961
1117
|
@alias_indexes << deflector_alias unless deflector_alias.nil?
|
962
|
-
@template_names << template_name
|
1118
|
+
@template_names << template_name
|
963
1119
|
end
|
964
1120
|
end
|
965
1121
|
end
|
966
1122
|
|
967
1123
|
# send_bulk given a specific bulk request, the original tag,
|
968
1124
|
# chunk, and bulk_message_count
|
969
|
-
def send_bulk(data, tag, chunk, bulk_message_count, extracted_values, info)
|
1125
|
+
def send_bulk(data, tag, chunk, bulk_message_count, extracted_values, info, unpacked_msg_arr)
|
970
1126
|
_logstash_prefix, _logstash_dateformat, index_name, _type_name, template_name, customize_template, deflector_alias, application_name, _pipeline, ilm_policy_id = extracted_values
|
971
1127
|
if deflector_alias
|
972
1128
|
template_installation(deflector_alias, template_name, customize_template, application_name, index_name, ilm_policy_id, info.host)
|
@@ -989,7 +1145,7 @@ EOC
|
|
989
1145
|
|
990
1146
|
if response['errors']
|
991
1147
|
error = Fluent::Plugin::ElasticsearchErrorHandler.new(self)
|
992
|
-
error.handle_error(response, tag, chunk, bulk_message_count, extracted_values)
|
1148
|
+
error.handle_error(response, tag, chunk, bulk_message_count, extracted_values, unpacked_msg_arr)
|
993
1149
|
end
|
994
1150
|
rescue RetryStreamError => e
|
995
1151
|
log.trace "router.emit_stream for retry stream doing..."
|