fluent-plugin-opensearch 1.0.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (52) hide show
  1. checksums.yaml +7 -0
  2. data/.coveralls.yml +1 -0
  3. data/.editorconfig +9 -0
  4. data/.github/ISSUE_TEMPLATE/bug_report.md +37 -0
  5. data/.github/ISSUE_TEMPLATE/feature_request.md +24 -0
  6. data/.github/workflows/coverage.yaml +22 -0
  7. data/.github/workflows/issue-auto-closer.yml +12 -0
  8. data/.github/workflows/linux.yml +26 -0
  9. data/.github/workflows/macos.yml +26 -0
  10. data/.github/workflows/windows.yml +26 -0
  11. data/.gitignore +18 -0
  12. data/CONTRIBUTING.md +24 -0
  13. data/Gemfile +10 -0
  14. data/History.md +6 -0
  15. data/ISSUE_TEMPLATE.md +26 -0
  16. data/LICENSE.txt +201 -0
  17. data/PULL_REQUEST_TEMPLATE.md +9 -0
  18. data/README.OpenSearchGenID.md +116 -0
  19. data/README.OpenSearchInput.md +291 -0
  20. data/README.Troubleshooting.md +482 -0
  21. data/README.md +1556 -0
  22. data/Rakefile +37 -0
  23. data/fluent-plugin-opensearch.gemspec +38 -0
  24. data/gemfiles/Gemfile.elasticsearch.v6 +12 -0
  25. data/lib/fluent/log-ext.rb +64 -0
  26. data/lib/fluent/plugin/filter_opensearch_genid.rb +103 -0
  27. data/lib/fluent/plugin/in_opensearch.rb +351 -0
  28. data/lib/fluent/plugin/oj_serializer.rb +48 -0
  29. data/lib/fluent/plugin/opensearch_constants.rb +39 -0
  30. data/lib/fluent/plugin/opensearch_error.rb +31 -0
  31. data/lib/fluent/plugin/opensearch_error_handler.rb +166 -0
  32. data/lib/fluent/plugin/opensearch_fallback_selector.rb +36 -0
  33. data/lib/fluent/plugin/opensearch_index_template.rb +155 -0
  34. data/lib/fluent/plugin/opensearch_simple_sniffer.rb +36 -0
  35. data/lib/fluent/plugin/opensearch_tls.rb +96 -0
  36. data/lib/fluent/plugin/out_opensearch.rb +1124 -0
  37. data/lib/fluent/plugin/out_opensearch_data_stream.rb +214 -0
  38. data/test/helper.rb +61 -0
  39. data/test/plugin/test_alias_template.json +9 -0
  40. data/test/plugin/test_filter_opensearch_genid.rb +241 -0
  41. data/test/plugin/test_in_opensearch.rb +493 -0
  42. data/test/plugin/test_index_alias_template.json +11 -0
  43. data/test/plugin/test_index_template.json +25 -0
  44. data/test/plugin/test_oj_serializer.rb +45 -0
  45. data/test/plugin/test_opensearch_error_handler.rb +689 -0
  46. data/test/plugin/test_opensearch_fallback_selector.rb +100 -0
  47. data/test/plugin/test_opensearch_tls.rb +171 -0
  48. data/test/plugin/test_out_opensearch.rb +3953 -0
  49. data/test/plugin/test_out_opensearch_data_stream.rb +474 -0
  50. data/test/plugin/test_template.json +23 -0
  51. data/test/test_log-ext.rb +61 -0
  52. metadata +262 -0
@@ -0,0 +1,1124 @@
1
+ # SPDX-License-Identifier: Apache-2.0
2
+ #
3
+ # The fluent-plugin-opensearch Contributors require contributions made to
4
+ # this file be licensed under the Apache-2.0 license or a
5
+ # compatible open source license.
6
+ #
7
+ # Modifications Copyright fluent-plugin-opensearch Contributors. See
8
+ # GitHub history for details.
9
+ #
10
+ # Licensed to Uken Inc. under one or more contributor
11
+ # license agreements. See the NOTICE file distributed with
12
+ # this work for additional information regarding copyright
13
+ # ownership. Uken Inc. licenses this file to you under
14
+ # the Apache License, Version 2.0 (the "License"); you may
15
+ # not use this file except in compliance with the License.
16
+ # You may obtain a copy of the License at
17
+ #
18
+ # http://www.apache.org/licenses/LICENSE-2.0
19
+ #
20
+ # Unless required by applicable law or agreed to in writing,
21
+ # software distributed under the License is distributed on an
22
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
23
+ # KIND, either express or implied. See the License for the
24
+ # specific language governing permissions and limitations
25
+ # under the License.
26
+
27
+ require 'date'
28
+ require 'excon'
29
+ require 'opensearch'
30
+ require 'set'
31
+ require 'json'
32
+ require 'uri'
33
+ require 'base64'
34
+ begin
35
+ require 'strptime'
36
+ rescue LoadError
37
+ end
38
+ require 'resolv'
39
+
40
+ require 'fluent/plugin/output'
41
+ require 'fluent/event'
42
+ require 'fluent/error'
43
+ require 'fluent/time'
44
+ require 'fluent/unique_id'
45
+ require 'fluent/log-ext'
46
+ require 'zlib'
47
+ require_relative 'opensearch_constants'
48
+ require_relative 'opensearch_error'
49
+ require_relative 'opensearch_error_handler'
50
+ require_relative 'opensearch_index_template'
51
+ require_relative 'opensearch_tls'
52
+ require_relative 'opensearch_fallback_selector'
53
+ begin
54
+ require_relative 'oj_serializer'
55
+ rescue LoadError
56
+ end
57
+ require 'aws-sdk-core'
58
+ require 'faraday_middleware/aws_sigv4'
59
+
60
+ module Fluent::Plugin
61
+ class OpenSearchOutput < Output
62
+ class RecoverableRequestFailure < StandardError; end
63
+ class UnrecoverableRequestFailure < Fluent::UnrecoverableError; end
64
+ class RetryStreamEmitFailure < StandardError; end
65
+
66
+ # MissingIdFieldError is raised for records that do not
67
+ # include the field for the unique record identifier
68
+ class MissingIdFieldError < StandardError; end
69
+
70
+ # RetryStreamError privides a stream to be
71
+ # put back in the pipeline for cases where a bulk request
72
+ # failed (e.g some records succeed while others failed)
73
+ class RetryStreamError < StandardError
74
+ attr_reader :retry_stream
75
+ def initialize(retry_stream)
76
+ @retry_stream = retry_stream
77
+ end
78
+ end
79
+
80
+ RequestInfo = Struct.new(:host, :index, :target_index, :alias)
81
+
82
+ attr_reader :template_names
83
+ attr_reader :ssl_version_options
84
+ attr_reader :compressable_connection
85
+
86
+ helpers :event_emitter, :compat_parameters, :record_accessor, :timer
87
+
88
+ Fluent::Plugin.register_output('opensearch', self)
89
+
90
+ DEFAULT_BUFFER_TYPE = "memory"
91
+ DEFAULT_OPENSEARCH_VERSION = 1
92
+ DEFAULT_TYPE_NAME = "_doc".freeze
93
+ DEFAULT_RELOAD_AFTER = -1
94
+ DEFAULT_TARGET_BULK_BYTES = -1
95
+ DEFAULT_POLICY_ID = "logstash-policy"
96
+
97
+ config_param :host, :string, :default => 'localhost'
98
+ config_param :port, :integer, :default => 9200
99
+ config_param :user, :string, :default => nil
100
+ config_param :password, :string, :default => nil, :secret => true
101
+ config_param :path, :string, :default => nil
102
+ config_param :scheme, :enum, :list => [:https, :http], :default => :http
103
+ config_param :hosts, :string, :default => nil
104
+ config_param :target_index_key, :string, :default => nil
105
+ config_param :time_key_format, :string, :default => nil
106
+ config_param :time_precision, :integer, :default => 9
107
+ config_param :include_timestamp, :bool, :default => false
108
+ config_param :logstash_format, :bool, :default => false
109
+ config_param :logstash_prefix, :string, :default => "logstash"
110
+ config_param :logstash_prefix_separator, :string, :default => '-'
111
+ config_param :logstash_dateformat, :string, :default => "%Y.%m.%d"
112
+ config_param :utc_index, :bool, :default => true
113
+ config_param :index_name, :string, :default => "fluentd"
114
+ config_param :id_key, :string, :default => nil
115
+ config_param :write_operation, :string, :default => "index"
116
+ config_param :parent_key, :string, :default => nil
117
+ config_param :routing_key, :string, :default => nil
118
+ config_param :request_timeout, :time, :default => 5
119
+ config_param :reload_connections, :bool, :default => true
120
+ config_param :reload_on_failure, :bool, :default => false
121
+ config_param :retry_tag, :string, :default=>nil
122
+ config_param :resurrect_after, :time, :default => 60
123
+ config_param :time_key, :string, :default => nil
124
+ config_param :time_key_exclude_timestamp, :bool, :default => false
125
+ config_param :ssl_verify , :bool, :default => true
126
+ config_param :client_key, :string, :default => nil
127
+ config_param :client_cert, :string, :default => nil
128
+ config_param :client_key_pass, :string, :default => nil, :secret => true
129
+ config_param :ca_file, :string, :default => nil
130
+ config_param :remove_keys, :string, :default => nil
131
+ config_param :remove_keys_on_update, :string, :default => ""
132
+ config_param :remove_keys_on_update_key, :string, :default => nil
133
+ config_param :flatten_hashes, :bool, :default => false
134
+ config_param :flatten_hashes_separator, :string, :default => "_"
135
+ config_param :template_name, :string, :default => nil
136
+ config_param :template_file, :string, :default => nil
137
+ config_param :template_overwrite, :bool, :default => false
138
+ config_param :customize_template, :hash, :default => nil
139
+ config_param :index_date_pattern, :string, :default => "now/d"
140
+ config_param :index_separator, :string, :default => "-"
141
+ config_param :application_name, :string, :default => "default"
142
+ config_param :templates, :hash, :default => nil
143
+ config_param :max_retry_putting_template, :integer, :default => 10
144
+ config_param :fail_on_putting_template_retry_exceed, :bool, :default => true
145
+ config_param :fail_on_detecting_os_version_retry_exceed, :bool, :default => true
146
+ config_param :max_retry_get_os_version, :integer, :default => 15
147
+ config_param :include_tag_key, :bool, :default => false
148
+ config_param :tag_key, :string, :default => 'tag'
149
+ config_param :time_parse_error_tag, :string, :default => 'opensearch_plugin.output.time.error'
150
+ config_param :reconnect_on_error, :bool, :default => false
151
+ config_param :pipeline, :string, :default => nil
152
+ config_param :with_transporter_log, :bool, :default => false
153
+ config_param :emit_error_for_missing_id, :bool, :default => false
154
+ config_param :sniffer_class_name, :string, :default => nil
155
+ config_param :selector_class_name, :string, :default => nil
156
+ config_param :reload_after, :integer, :default => DEFAULT_RELOAD_AFTER
157
+ config_param :include_index_in_url, :bool, :default => false
158
+ config_param :http_backend, :enum, list: [:excon, :typhoeus], :default => :excon
159
+ config_param :http_backend_excon_nonblock, :bool, :default => true
160
+ config_param :validate_client_version, :bool, :default => false
161
+ config_param :prefer_oj_serializer, :bool, :default => false
162
+ config_param :unrecoverable_error_types, :array, :default => ["out_of_memory_error", "rejected_execution_exception"]
163
+ config_param :verify_os_version_at_startup, :bool, :default => true
164
+ config_param :default_opensearch_version, :integer, :default => DEFAULT_OPENSEARCH_VERSION
165
+ config_param :log_os_400_reason, :bool, :default => false
166
+ config_param :custom_headers, :hash, :default => {}
167
+ config_param :suppress_doc_wrap, :bool, :default => false
168
+ config_param :ignore_exceptions, :array, :default => [], value_type: :string, :desc => "Ignorable exception list"
169
+ config_param :exception_backup, :bool, :default => true, :desc => "Chunk backup flag when ignore exception occured"
170
+ config_param :bulk_message_request_threshold, :size, :default => DEFAULT_TARGET_BULK_BYTES
171
+ config_param :compression_level, :enum, list: [:no_compression, :best_speed, :best_compression, :default_compression], :default => :no_compression
172
+ config_param :truncate_caches_interval, :time, :default => nil
173
+ config_param :use_legacy_template, :bool, :default => true
174
+ config_param :catch_transport_exception_on_retry, :bool, :default => true
175
+ config_param :target_index_affinity, :bool, :default => false
176
+
177
+ config_section :metadata, param_name: :metainfo, multi: false do
178
+ config_param :include_chunk_id, :bool, :default => false
179
+ config_param :chunk_id_key, :string, :default => "chunk_id".freeze
180
+ end
181
+
182
+ config_section :endpoint, multi: false do
183
+ config_param :region, :string
184
+ config_param :url do |c|
185
+ c.chomp("/")
186
+ end
187
+ config_param :access_key_id, :string, :default => ""
188
+ config_param :secret_access_key, :string, :default => "", secret: true
189
+ config_param :assume_role_arn, :string, :default => nil
190
+ config_param :ecs_container_credentials_relative_uri, :string, :default => nil #Set with AWS_CONTAINER_CREDENTIALS_RELATIVE_URI environment variable value
191
+ config_param :assume_role_session_name, :string, :default => "fluentd"
192
+ config_param :assume_role_web_identity_token_file, :string, :default => nil
193
+ config_param :sts_credentials_region, :string, :default => nil
194
+ end
195
+
196
+ config_section :buffer do
197
+ config_set_default :@type, DEFAULT_BUFFER_TYPE
198
+ config_set_default :chunk_keys, ['tag']
199
+ config_set_default :timekey_use_utc, true
200
+ end
201
+
202
+ include Fluent::OpenSearchIndexTemplate
203
+ include Fluent::Plugin::OpenSearchConstants
204
+ include Fluent::Plugin::OpenSearchTLS
205
+
206
+ def initialize
207
+ super
208
+ end
209
+
210
+ ######################################################################################################
211
+ # This creating AWS credentials code part is heavily based on fluent-plugin-aws-elasticsearch-service:
212
+ # https://github.com/atomita/fluent-plugin-aws-elasticsearch-service/blob/master/lib/fluent/plugin/out_aws-elasticsearch-service.rb#L73-L134
213
+ ######################################################################################################
214
+ def aws_credentials(conf)
215
+ credentials = nil
216
+ unless conf[:access_key_id].empty? || conf[:secret_access_key].empty?
217
+ credentials = Aws::Credentials.new(conf[:access_key_id], conf[:secret_access_key])
218
+ else
219
+ if conf[:assume_role_arn].nil?
220
+ aws_container_credentials_relative_uri = conf[:ecs_container_credentials_relative_uri] || ENV["AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"]
221
+ if aws_container_credentials_relative_uri.nil?
222
+ credentials = Aws::SharedCredentials.new({retries: 2}).credentials
223
+ credentials ||= Aws::InstanceProfileCredentials.new.credentials
224
+ credentials ||= Aws::ECSCredentials.new.credentials
225
+ else
226
+ credentials = Aws::ECSCredentials.new({
227
+ credential_path: aws_container_credentials_relative_uri
228
+ }).credentials
229
+ end
230
+ else
231
+ if conf[:assume_role_web_identity_token_file].nil?
232
+ credentials = Aws::AssumeRoleCredentials.new({
233
+ role_arn: conf[:assume_role_arn],
234
+ role_session_name: conf[:assume_role_session_name],
235
+ region: sts_creds_region(conf)
236
+ }).credentials
237
+ else
238
+ credentials = Aws::AssumeRoleWebIdentityCredentials.new({
239
+ role_arn: conf[:assume_role_arn],
240
+ web_identity_token_file: conf[:assume_role_web_identity_token_file],
241
+ region: sts_creds_region(conf)
242
+ }).credentials
243
+ end
244
+ end
245
+ end
246
+ raise "No valid AWS credentials found." unless credentials.set?
247
+
248
+ credentials
249
+ end
250
+
251
+ def sts_creds_region(conf)
252
+ conf[:sts_credentials_region] || conf[:region]
253
+ end
254
+ ###############################
255
+ # AWS credential part is ended.
256
+ ###############################
257
+
258
+ def configure(conf)
259
+ compat_parameters_convert(conf, :buffer)
260
+
261
+ super
262
+
263
+ if @endpoint
264
+ # here overrides default value of reload_connections to false because
265
+ # AWS Elasticsearch Service doesn't return addresses of nodes and Elasticsearch client
266
+ # fails to reload connections properly. This ends up "temporarily failed to flush the buffer"
267
+ # error repeating forever. See this discussion for details:
268
+ # https://discuss.elastic.co/t/elasitcsearch-ruby-raises-cannot-get-new-connection-from-pool-error/36252
269
+ @reload_connections = false
270
+ end
271
+
272
+ if placeholder_substitution_needed_for_template?
273
+ # nop.
274
+ elsif not @buffer_config.chunk_keys.include? "tag" and
275
+ not @buffer_config.chunk_keys.include? "_index"
276
+ raise Fluent::ConfigError, "'tag' or '_index' in chunk_keys is required."
277
+ end
278
+ @time_parser = create_time_parser
279
+ @backend_options = backend_options
280
+ @ssl_version_options = set_tls_minmax_version_config(@ssl_version, @ssl_max_version, @ssl_min_version)
281
+
282
+ if @remove_keys
283
+ @remove_keys = @remove_keys.split(/\s*,\s*/)
284
+ end
285
+
286
+ if @target_index_key && @target_index_key.is_a?(String)
287
+ @target_index_key = @target_index_key.split '.'
288
+ end
289
+
290
+ if @remove_keys_on_update && @remove_keys_on_update.is_a?(String)
291
+ @remove_keys_on_update = @remove_keys_on_update.split ','
292
+ end
293
+
294
+ raise Fluent::ConfigError, "'max_retry_putting_template' must be greater than or equal to zero." if @max_retry_putting_template < 0
295
+ raise Fluent::ConfigError, "'max_retry_get_os_version' must be greater than or equal to zero." if @max_retry_get_os_version < 0
296
+
297
+ # Dump log when using host placeholders and template features at same time.
298
+ valid_host_placeholder = placeholder?(:host_placeholder, @host)
299
+ if valid_host_placeholder && (@template_name && @template_file || @templates)
300
+ if @verify_os_version_at_startup
301
+ raise Fluent::ConfigError, "host placeholder, template installation, and verify OpenSearch version at startup are exclusive feature at same time. Please specify verify_os_version_at_startup as `false` when host placeholder and template installation are enabled."
302
+ end
303
+ log.info "host placeholder and template installation makes your OpenSearch cluster a bit slow down(beta)."
304
+ end
305
+
306
+ @template_names = []
307
+ if !dry_run?
308
+ if @template_name && @template_file
309
+ if @logstash_format || placeholder_substitution_needed_for_template?
310
+ class << self
311
+ alias_method :template_installation, :template_installation_actual
312
+ end
313
+ else
314
+ template_installation_actual(@template_name, @customize_template, @application_name, @index_name)
315
+ end
316
+ end
317
+ if @templates
318
+ retry_operate(@max_retry_putting_template,
319
+ @fail_on_putting_template_retry_exceed,
320
+ @catch_transport_exception_on_retry) do
321
+ templates_hash_install(@templates, @template_overwrite)
322
+ end
323
+ end
324
+ end
325
+
326
+ @truncate_mutex = Mutex.new
327
+ if @truncate_caches_interval
328
+ timer_execute(:out_opensearch_truncate_caches, @truncate_caches_interval) do
329
+ log.info('Clean up the indices and template names cache')
330
+
331
+ @truncate_mutex.synchronize {
332
+ @template_names.clear
333
+ }
334
+ end
335
+ end
336
+
337
+ @serializer_class = nil
338
+ begin
339
+ require 'oj'
340
+ @dump_proc = Oj.method(:dump)
341
+ if @prefer_oj_serializer
342
+ @serializer_class = Fluent::Plugin::Serializer::Oj
343
+ OpenSearch::API.settings[:serializer] = Fluent::Plugin::Serializer::Oj
344
+ end
345
+ rescue LoadError
346
+ @dump_proc = Yajl.method(:dump)
347
+ end
348
+
349
+ raise Fluent::ConfigError, "`password` must be present if `user` is present" if @user && @password.nil?
350
+
351
+ if @user && m = @user.match(/%{(?<user>.*)}/)
352
+ @user = URI.encode_www_form_component(m["user"])
353
+ end
354
+ if @password && m = @password.match(/%{(?<password>.*)}/)
355
+ @password = URI.encode_www_form_component(m["password"])
356
+ end
357
+
358
+ @transport_logger = nil
359
+ if @with_transporter_log
360
+ @transport_logger = log
361
+ log_level = conf['@log_level'] || conf['log_level']
362
+ log.warn "Consider to specify log_level with @log_level." unless log_level
363
+ end
364
+ # Specify @sniffer_class before calling #client.
365
+ # #detect_os_major_version uses #client.
366
+ @sniffer_class = nil
367
+ begin
368
+ @sniffer_class = Object.const_get(@sniffer_class_name) if @sniffer_class_name
369
+ rescue Exception => ex
370
+ raise Fluent::ConfigError, "Could not load sniffer class #{@sniffer_class_name}: #{ex}"
371
+ end
372
+
373
+ @selector_class = nil
374
+ begin
375
+ @selector_class = Object.const_get(@selector_class_name) if @selector_class_name
376
+ rescue Exception => ex
377
+ raise Fluent::ConfigError, "Could not load selector class #{@selector_class_name}: #{ex}"
378
+ end
379
+
380
+ @last_seen_major_version = if major_version = handle_last_seen_os_major_version
381
+ major_version
382
+ else
383
+ @default_opensearch_version
384
+ end
385
+
386
+ if @validate_client_version && !dry_run?
387
+ if @last_seen_major_version != client_library_version.to_i
388
+ raise Fluent::ConfigError, <<-EOC
389
+ Detected OpenSearch #{@last_seen_major_version} but you use OpenSearch client #{client_library_version}.
390
+ Please consider to use #{@last_seen_major_version}.x series OpenSearch client.
391
+ EOC
392
+ end
393
+ end
394
+
395
+ if @last_seen_major_version >= 1
396
+ case @ssl_version
397
+ when :SSLv23, :TLSv1, :TLSv1_1
398
+ if @scheme == :https
399
+ log.warn "Detected OpenSearch 1.x or above and enabled insecure security:
400
+ You might have to specify `ssl_version TLSv1_2` in configuration."
401
+ end
402
+ end
403
+ end
404
+
405
+ if @ssl_version && @scheme == :https
406
+ if !@http_backend_excon_nonblock
407
+ log.warn "TLS handshake will be stucked with block connection.
408
+ Consider to set `http_backend_excon_nonblock` as true"
409
+ end
410
+ end
411
+
412
+ # Consider missing the prefix of "$." in nested key specifiers.
413
+ @id_key = convert_compat_id_key(@id_key) if @id_key
414
+ @parent_key = convert_compat_id_key(@parent_key) if @parent_key
415
+ @routing_key = convert_compat_id_key(@routing_key) if @routing_key
416
+
417
+ @routing_key_name = configure_routing_key_name
418
+ @meta_config_map = create_meta_config_map
419
+ @current_config = nil
420
+ @compressable_connection = false
421
+
422
+ @ignore_exception_classes = @ignore_exceptions.map do |exception|
423
+ unless Object.const_defined?(exception)
424
+ log.warn "Cannot find class #{exception}. Will ignore it."
425
+
426
+ nil
427
+ else
428
+ Object.const_get(exception)
429
+ end
430
+ end.compact
431
+
432
+ if @bulk_message_request_threshold < 0
433
+ class << self
434
+ alias_method :split_request?, :split_request_size_uncheck?
435
+ end
436
+ else
437
+ class << self
438
+ alias_method :split_request?, :split_request_size_check?
439
+ end
440
+ end
441
+ end
442
+
443
+ def dry_run?
444
+ if Fluent::Engine.respond_to?(:dry_run_mode)
445
+ Fluent::Engine.dry_run_mode
446
+ elsif Fluent::Engine.respond_to?(:supervisor_mode)
447
+ Fluent::Engine.supervisor_mode
448
+ end
449
+ end
450
+
451
+ def placeholder?(name, param)
452
+ placeholder_validities = []
453
+ placeholder_validators(name, param).each do |v|
454
+ begin
455
+ v.validate!
456
+ placeholder_validities << true
457
+ rescue Fluent::ConfigError => e
458
+ log.debug("'#{name} #{param}' is tested built-in placeholder(s) but there is no valid placeholder(s). error: #{e}")
459
+ placeholder_validities << false
460
+ end
461
+ end
462
+ placeholder_validities.include?(true)
463
+ end
464
+
465
+ def compression
466
+ !(@compression_level == :no_compression)
467
+ end
468
+
469
+ def compression_strategy
470
+ case @compression_level
471
+ when :default_compression
472
+ Zlib::DEFAULT_COMPRESSION
473
+ when :best_compression
474
+ Zlib::BEST_COMPRESSION
475
+ when :best_speed
476
+ Zlib::BEST_SPEED
477
+ else
478
+ Zlib::NO_COMPRESSION
479
+ end
480
+ end
481
+
482
+ def backend_options
483
+ case @http_backend
484
+ when :excon
485
+ { client_key: @client_key, client_cert: @client_cert, client_key_pass: @client_key_pass, nonblock: @http_backend_excon_nonblock }
486
+ when :typhoeus
487
+ require 'typhoeus'
488
+ { sslkey: @client_key, sslcert: @client_cert, keypasswd: @client_key_pass }
489
+ end
490
+ rescue LoadError => ex
491
+ log.error_backtrace(ex.backtrace)
492
+ raise Fluent::ConfigError, "You must install #{@http_backend} gem. Exception: #{ex}"
493
+ end
494
+
495
+ def handle_last_seen_os_major_version
496
+ if @verify_os_version_at_startup && !dry_run?
497
+ retry_operate(@max_retry_get_os_version,
498
+ @fail_on_detecting_os_version_retry_exceed,
499
+ @catch_transport_exception_on_retry) do
500
+ detect_os_major_version
501
+ end
502
+ else
503
+ nil
504
+ end
505
+ end
506
+
507
+ def detect_os_major_version
508
+ @_os_info ||= client.info
509
+ begin
510
+ unless version = @_os_info.dig("version", "number")
511
+ version = @default_opensearch_version
512
+ end
513
+ rescue NoMethodError => e
514
+ log.warn "#{@_os_info} can not dig version information. Assuming OpenSearch #{@default_opensearch_version}", error: e
515
+ version = @default_opensearch_version
516
+ end
517
+ version.to_i
518
+ end
519
+
520
+ def client_library_version
521
+ OpenSearch::VERSION
522
+ end
523
+
524
+ def configure_routing_key_name
525
+ 'routing'.freeze
526
+ end
527
+
528
+ def convert_compat_id_key(key)
529
+ if key.include?('.') && !key.start_with?('$[')
530
+ key = "$.#{key}" unless key.start_with?('$.')
531
+ end
532
+ key
533
+ end
534
+
535
+ def create_meta_config_map
536
+ result = []
537
+ result << [record_accessor_create(@id_key), '_id'] if @id_key
538
+ result << [record_accessor_create(@parent_key), '_parent'] if @parent_key
539
+ result << [record_accessor_create(@routing_key), @routing_key_name] if @routing_key
540
+ result
541
+ end
542
+
543
+ # once fluent v0.14 is released we might be able to use
544
+ # Fluent::Parser::TimeParser, but it doesn't quite do what we want - if gives
545
+ # [sec,nsec] where as we want something we can call `strftime` on...
546
+ def create_time_parser
547
+ if @time_key_format
548
+ begin
549
+ # Strptime doesn't support all formats, but for those it does it's
550
+ # blazingly fast.
551
+ strptime = Strptime.new(@time_key_format)
552
+ Proc.new { |value|
553
+ value = convert_numeric_time_into_string(value, @time_key_format) if value.is_a?(Numeric)
554
+ strptime.exec(value).to_datetime
555
+ }
556
+ rescue
557
+ # Can happen if Strptime doesn't recognize the format; or
558
+ # if strptime couldn't be required (because it's not installed -- it's
559
+ # ruby 2 only)
560
+ Proc.new { |value|
561
+ value = convert_numeric_time_into_string(value, @time_key_format) if value.is_a?(Numeric)
562
+ DateTime.strptime(value, @time_key_format)
563
+ }
564
+ end
565
+ else
566
+ Proc.new { |value|
567
+ value = convert_numeric_time_into_string(value) if value.is_a?(Numeric)
568
+ DateTime.parse(value)
569
+ }
570
+ end
571
+ end
572
+
573
+ def convert_numeric_time_into_string(numeric_time, time_key_format = "%Y-%m-%d %H:%M:%S.%N%z")
574
+ numeric_time_parser = Fluent::NumericTimeParser.new(:float)
575
+ Time.at(numeric_time_parser.parse(numeric_time).to_r).strftime(time_key_format)
576
+ end
577
+
578
+ def parse_time(value, event_time, tag)
579
+ @time_parser.call(value)
580
+ rescue => e
581
+ router.emit_error_event(@time_parse_error_tag, Fluent::Engine.now, {'tag' => tag, 'time' => event_time, 'format' => @time_key_format, 'value' => value}, e)
582
+ return Time.at(event_time).to_datetime
583
+ end
584
+
585
+ def client(host = nil, compress_connection = false)
586
+ # check here to see if we already have a client connection for the given host
587
+ connection_options = get_connection_options(host)
588
+
589
+ @_os = nil unless is_existing_connection(connection_options[:hosts])
590
+ @_os = nil unless @compressable_connection == compress_connection
591
+
592
+ @_os ||= begin
593
+ @compressable_connection = compress_connection
594
+ @current_config = connection_options[:hosts].clone
595
+ adapter_conf = if @endpoint
596
+ lambda do |f|
597
+ f.request(
598
+ :aws_sigv4,
599
+ service: 'es',
600
+ region: @endpoint.region,
601
+ credentials: aws_credentials(@endpoint),
602
+ )
603
+
604
+ f.adapter @http_backend, @backend_options
605
+ end
606
+ else
607
+ lambda {|f| f.adapter @http_backend, @backend_options }
608
+ end
609
+
610
+ local_reload_connections = @reload_connections
611
+ if local_reload_connections && @reload_after > DEFAULT_RELOAD_AFTER
612
+ local_reload_connections = @reload_after
613
+ end
614
+
615
+ gzip_headers = if compress_connection
616
+ {'Content-Encoding' => 'gzip'}
617
+ else
618
+ {}
619
+ end
620
+ headers = {}.merge(@custom_headers)
621
+ .merge(gzip_headers)
622
+ ssl_options = { verify: @ssl_verify, ca_file: @ca_file}.merge(@ssl_version_options)
623
+
624
+ transport = OpenSearch::Transport::Transport::HTTP::Faraday.new(connection_options.merge(
625
+ options: {
626
+ reload_connections: local_reload_connections,
627
+ reload_on_failure: @reload_on_failure,
628
+ resurrect_after: @resurrect_after,
629
+ logger: @transport_logger,
630
+ transport_options: {
631
+ headers: headers,
632
+ request: { timeout: @request_timeout },
633
+ ssl: ssl_options,
634
+ },
635
+ http: {
636
+ user: @user,
637
+ password: @password,
638
+ scheme: @scheme
639
+ },
640
+ sniffer_class: @sniffer_class,
641
+ serializer_class: @serializer_class,
642
+ selector_class: @selector_class,
643
+ compression: compress_connection,
644
+ }), &adapter_conf)
645
+ OpenSearch::Client.new transport: transport
646
+ end
647
+ end
648
+
649
+ def get_escaped_userinfo(host_str)
650
+ if m = host_str.match(/(?<scheme>.*)%{(?<user>.*)}:%{(?<password>.*)}(?<path>@.*)/)
651
+ m["scheme"] +
652
+ URI.encode_www_form_component(m["user"]) +
653
+ ':' +
654
+ URI.encode_www_form_component(m["password"]) +
655
+ m["path"]
656
+ else
657
+ host_str
658
+ end
659
+ end
660
+
661
+ def get_connection_options(con_host=nil)
662
+
663
+ hosts = if @endpoint # For AWS OpenSearch Service
664
+ uri = URI(@endpoint.url)
665
+ host = %w(user password path).inject(host: uri.host, port: uri.port, scheme: uri.scheme) do |hash, key|
666
+ hash[key.to_sym] = uri.public_send(key) unless uri.public_send(key).nil? || uri.public_send(key) == ''
667
+ hash
668
+ end
669
+ [host]
670
+ elsif con_host || @hosts
671
+ (con_host || @hosts).split(',').map do |host_str|
672
+ # Support legacy hosts format host:port,host:port,host:port...
673
+ if host_str.match(%r{^[^:]+(\:\d+)?$})
674
+ {
675
+ host: host_str.split(':')[0],
676
+ port: (host_str.split(':')[1] || @port).to_i,
677
+ scheme: @scheme.to_s
678
+ }
679
+ else
680
+ # New hosts format expects URLs such as http://logs.foo.com,https://john:pass@logs2.foo.com/elastic
681
+ uri = URI(get_escaped_userinfo(host_str))
682
+ %w(user password path).inject(host: uri.host, port: uri.port, scheme: uri.scheme) do |hash, key|
683
+ hash[key.to_sym] = uri.public_send(key) unless uri.public_send(key).nil? || uri.public_send(key) == ''
684
+ hash
685
+ end
686
+ end
687
+ end.compact
688
+ else
689
+ if Resolv::IPv6::Regex.match(@host)
690
+ [{host: "[#{@host}]", scheme: @scheme.to_s, port: @port}]
691
+ else
692
+ [{host: @host, port: @port, scheme: @scheme.to_s}]
693
+ end
694
+ end.each do |host|
695
+ host.merge!(user: @user, password: @password) if !host[:user] && @user
696
+ host.merge!(path: @path) if !host[:path] && @path
697
+ end
698
+
699
+ {
700
+ hosts: hosts
701
+ }
702
+ end
703
+
704
+ def connection_options_description(con_host=nil)
705
+ get_connection_options(con_host)[:hosts].map do |host_info|
706
+ attributes = host_info.dup
707
+ attributes[:password] = 'obfuscated' if attributes.has_key?(:password)
708
+ attributes.inspect
709
+ end.join(', ')
710
+ end
711
+
712
+ # append_record_to_messages adds a record to the bulk message
713
+ # payload to be submitted to OpenSearch. Records that do
714
+ # not include '_id' field are skipped when 'write_operation'
715
+ # is configured for 'create' or 'update'
716
+ #
717
+ # returns 'true' if record was appended to the bulk message
718
+ # and 'false' otherwise
719
+ def append_record_to_messages(op, meta, header, record, msgs)
720
+ case op
721
+ when UPDATE_OP, UPSERT_OP
722
+ if meta.has_key?(ID_FIELD)
723
+ header[UPDATE_OP] = meta
724
+ msgs << @dump_proc.call(header) << BODY_DELIMITER
725
+ msgs << @dump_proc.call(update_body(record, op)) << BODY_DELIMITER
726
+ return true
727
+ end
728
+ when CREATE_OP
729
+ if meta.has_key?(ID_FIELD)
730
+ header[CREATE_OP] = meta
731
+ msgs << @dump_proc.call(header) << BODY_DELIMITER
732
+ msgs << @dump_proc.call(record) << BODY_DELIMITER
733
+ return true
734
+ end
735
+ when INDEX_OP
736
+ header[INDEX_OP] = meta
737
+ msgs << @dump_proc.call(header) << BODY_DELIMITER
738
+ msgs << @dump_proc.call(record) << BODY_DELIMITER
739
+ return true
740
+ end
741
+ return false
742
+ end
743
+
744
+ def update_body(record, op)
745
+ update = remove_keys(record)
746
+ if @suppress_doc_wrap
747
+ return update
748
+ end
749
+ body = {"doc".freeze => update}
750
+ if op == UPSERT_OP
751
+ if update == record
752
+ body["doc_as_upsert".freeze] = true
753
+ else
754
+ body[UPSERT_OP] = record
755
+ end
756
+ end
757
+ body
758
+ end
759
+
760
+ def remove_keys(record)
761
+ keys = record[@remove_keys_on_update_key] || @remove_keys_on_update || []
762
+ record.delete(@remove_keys_on_update_key)
763
+ return record unless keys.any?
764
+ record = record.dup
765
+ keys.each { |key| record.delete(key) }
766
+ record
767
+ end
768
+
769
+ def flatten_record(record, prefix=[])
770
+ ret = {}
771
+ if record.is_a? Hash
772
+ record.each { |key, value|
773
+ ret.merge! flatten_record(value, prefix + [key.to_s])
774
+ }
775
+ elsif record.is_a? Array
776
+ # Don't mess with arrays, leave them unprocessed
777
+ ret.merge!({prefix.join(@flatten_hashes_separator) => record})
778
+ else
779
+ return {prefix.join(@flatten_hashes_separator) => record}
780
+ end
781
+ ret
782
+ end
783
+
784
+ def expand_placeholders(chunk)
785
+ logstash_prefix = extract_placeholders(@logstash_prefix, chunk)
786
+ logstash_dateformat = extract_placeholders(@logstash_dateformat, chunk)
787
+ index_name = extract_placeholders(@index_name, chunk)
788
+ if @template_name
789
+ template_name = extract_placeholders(@template_name, chunk)
790
+ else
791
+ template_name = nil
792
+ end
793
+ if @customize_template
794
+ customize_template = @customize_template.each_with_object({}) { |(key, value), hash| hash[key] = extract_placeholders(value, chunk) }
795
+ else
796
+ customize_template = nil
797
+ end
798
+ if @application_name
799
+ application_name = extract_placeholders(@application_name, chunk)
800
+ else
801
+ application_name = nil
802
+ end
803
+ if @pipeline
804
+ pipeline = extract_placeholders(@pipeline, chunk)
805
+ else
806
+ pipeline = nil
807
+ end
808
+ return logstash_prefix, logstash_dateformat, index_name, template_name, customize_template, application_name, pipeline
809
+ end
810
+
811
+ def multi_workers_ready?
812
+ true
813
+ end
814
+
815
+ def inject_chunk_id_to_record_if_needed(record, chunk_id)
816
+ if @metainfo&.include_chunk_id
817
+ record[@metainfo.chunk_id_key] = chunk_id
818
+ record
819
+ else
820
+ record
821
+ end
822
+ end
823
+
824
+ def write(chunk)
825
+ bulk_message_count = Hash.new { |h,k| h[k] = 0 }
826
+ bulk_message = Hash.new { |h,k| h[k] = '' }
827
+ header = {}
828
+ meta = {}
829
+
830
+ tag = chunk.metadata.tag
831
+ chunk_id = dump_unique_id_hex(chunk.unique_id)
832
+ extracted_values = expand_placeholders(chunk)
833
+ host = if @hosts
834
+ extract_placeholders(@hosts, chunk)
835
+ else
836
+ extract_placeholders(@host, chunk)
837
+ end
838
+
839
+ affinity_target_indices = get_affinity_target_indices(chunk)
840
+ chunk.msgpack_each do |time, record|
841
+ next unless record.is_a? Hash
842
+
843
+ record = inject_chunk_id_to_record_if_needed(record, chunk_id)
844
+
845
+ begin
846
+ meta, header, record = process_message(tag, meta, header, time, record, affinity_target_indices, extracted_values)
847
+ info = if @include_index_in_url
848
+ RequestInfo.new(host, meta.delete("_index".freeze), meta["_index".freeze], meta.delete("_alias".freeze))
849
+ else
850
+ RequestInfo.new(host, nil, meta["_index".freeze], meta.delete("_alias".freeze))
851
+ end
852
+
853
+ if split_request?(bulk_message, info)
854
+ bulk_message.each do |info, msgs|
855
+ send_bulk(msgs, tag, chunk, bulk_message_count[info], extracted_values, info) unless msgs.empty?
856
+ msgs.clear
857
+ # Clear bulk_message_count for this info.
858
+ bulk_message_count[info] = 0;
859
+ next
860
+ end
861
+ end
862
+
863
+ if append_record_to_messages(@write_operation, meta, header, record, bulk_message[info])
864
+ bulk_message_count[info] += 1;
865
+ else
866
+ if @emit_error_for_missing_id
867
+ raise MissingIdFieldError, "Missing '_id' field. Write operation is #{@write_operation}"
868
+ else
869
+ log.on_debug { log.debug("Dropping record because its missing an '_id' field and write_operation is #{@write_operation}: #{record}") }
870
+ end
871
+ end
872
+ rescue => e
873
+ router.emit_error_event(tag, time, record, e)
874
+ end
875
+ end
876
+
877
+ bulk_message.each do |info, msgs|
878
+ send_bulk(msgs, tag, chunk, bulk_message_count[info], extracted_values, info) unless msgs.empty?
879
+ msgs.clear
880
+ end
881
+ end
882
+
883
+ def target_index_affinity_enabled?()
884
+ @target_index_affinity && @logstash_format && @id_key && (@write_operation == UPDATE_OP || @write_operation == UPSERT_OP)
885
+ end
886
+
887
+ def get_affinity_target_indices(chunk)
888
+ indices = Hash.new
889
+ if target_index_affinity_enabled?()
890
+ id_key_accessor = record_accessor_create(@id_key)
891
+ ids = Set.new
892
+ chunk.msgpack_each do |time, record|
893
+ next unless record.is_a? Hash
894
+ begin
895
+ ids << id_key_accessor.call(record)
896
+ end
897
+ end
898
+ log.debug("Find affinity target_indices by quering on OpenSearch (write_operation #{@write_operation}) for ids: #{ids.to_a}")
899
+ options = {
900
+ :index => "#{logstash_prefix}#{@logstash_prefix_separator}*",
901
+ }
902
+ query = {
903
+ 'query' => { 'ids' => { 'values' => ids.to_a } },
904
+ '_source' => false,
905
+ 'sort' => [
906
+ {"_index" => {"order" => "desc"}}
907
+ ]
908
+ }
909
+ result = client.search(options.merge(:body => Yajl.dump(query)))
910
+ # There should be just one hit per _id, but in case there still is multiple, just the oldest index is stored to map
911
+ result['hits']['hits'].each do |hit|
912
+ indices[hit["_id"]] = hit["_index"]
913
+ log.debug("target_index for id: #{hit["_id"]} from es: #{hit["_index"]}")
914
+ end
915
+ end
916
+ indices
917
+ end
918
+
919
+ def split_request?(bulk_message, info)
920
+ # For safety.
921
+ end
922
+
923
+ def split_request_size_check?(bulk_message, info)
924
+ bulk_message[info].size > @bulk_message_request_threshold
925
+ end
926
+
927
+ def split_request_size_uncheck?(bulk_message, info)
928
+ false
929
+ end
930
+
931
+ def process_message(tag, meta, header, time, record, affinity_target_indices, extracted_values)
932
+ logstash_prefix, logstash_dateformat, index_name, _template_name, _customize_template, application_name, pipeline = extracted_values
933
+
934
+ if @flatten_hashes
935
+ record = flatten_record(record)
936
+ end
937
+
938
+ dt = nil
939
+ if @logstash_format || @include_timestamp
940
+ if record.has_key?(TIMESTAMP_FIELD)
941
+ rts = record[TIMESTAMP_FIELD]
942
+ dt = parse_time(rts, time, tag)
943
+ elsif record.has_key?(@time_key)
944
+ rts = record[@time_key]
945
+ dt = parse_time(rts, time, tag)
946
+ record[TIMESTAMP_FIELD] = dt.iso8601(@time_precision) unless @time_key_exclude_timestamp
947
+ else
948
+ dt = Time.at(time).to_datetime
949
+ record[TIMESTAMP_FIELD] = dt.iso8601(@time_precision)
950
+ end
951
+ end
952
+
953
+ target_index_parent, target_index_child_key = @target_index_key ? get_parent_of(record, @target_index_key) : nil
954
+ if target_index_parent && target_index_parent[target_index_child_key]
955
+ target_index_alias = target_index = target_index_parent.delete(target_index_child_key)
956
+ elsif @logstash_format
957
+ dt = dt.new_offset(0) if @utc_index
958
+ target_index = "#{logstash_prefix}#{@logstash_prefix_separator}#{dt.strftime(logstash_dateformat)}"
959
+ target_index_alias = "#{logstash_prefix}#{@logstash_prefix_separator}#{application_name}#{@logstash_prefix_separator}#{dt.strftime(logstash_dateformat)}"
960
+ else
961
+ target_index_alias = target_index = index_name
962
+ end
963
+
964
+ # Change target_index to lower-case since OpenSearch doesn't
965
+ # allow upper-case characters in index names.
966
+ target_index = target_index.downcase
967
+ target_index_alias = target_index_alias.downcase
968
+ if @include_tag_key
969
+ record[@tag_key] = tag
970
+ end
971
+
972
+ # If affinity target indices map has value for this particular id, use it as target_index
973
+ if !affinity_target_indices.empty?
974
+ id_accessor = record_accessor_create(@id_key)
975
+ id_value = id_accessor.call(record)
976
+ if affinity_target_indices.key?(id_value)
977
+ target_index = affinity_target_indices[id_value]
978
+ end
979
+ end
980
+
981
+ # OpenSearch only supports "_doc".
982
+ target_type = DEFAULT_TYPE_NAME
983
+
984
+ meta.clear
985
+ meta["_index".freeze] = target_index
986
+ meta["_type".freeze] = target_type
987
+ meta["_alias".freeze] = target_index_alias
988
+
989
+ if @pipeline
990
+ meta["pipeline".freeze] = pipeline
991
+ end
992
+
993
+ @meta_config_map.each do |record_accessor, meta_key|
994
+ if raw_value = record_accessor.call(record)
995
+ meta[meta_key] = raw_value
996
+ end
997
+ end
998
+
999
+ if @remove_keys
1000
+ @remove_keys.each { |key| record.delete(key) }
1001
+ end
1002
+
1003
+ return [meta, header, record]
1004
+ end
1005
+
1006
+ # returns [parent, child_key] of child described by path array in record's tree
1007
+ # returns [nil, child_key] if path doesnt exist in record
1008
+ def get_parent_of(record, path)
1009
+ parent_object = path[0..-2].reduce(record) { |a, e| a.is_a?(Hash) ? a[e] : nil }
1010
+ [parent_object, path[-1]]
1011
+ end
1012
+
1013
+ # gzip compress data
1014
+ def gzip(string)
1015
+ wio = StringIO.new("w")
1016
+ w_gz = Zlib::GzipWriter.new(wio, strategy = compression_strategy)
1017
+ w_gz.write(string)
1018
+ w_gz.close
1019
+ wio.string
1020
+ end
1021
+
1022
+ def placeholder_substitution_needed_for_template?
1023
+ need_substitution = placeholder?(:host, @host.to_s) ||
1024
+ placeholder?(:index_name, @index_name.to_s) ||
1025
+ placeholder?(:template_name, @template_name.to_s) ||
1026
+ @customize_template&.values&.any? { |value| placeholder?(:customize_template, value.to_s) } ||
1027
+ placeholder?(:logstash_prefix, @logstash_prefix.to_s) ||
1028
+ placeholder?(:logstash_dateformat, @logstash_dateformat.to_s) ||
1029
+ placeholder?(:application_name, @application_name.to_s) ||
1030
+ log.debug("Need substitution: #{need_substitution}")
1031
+ need_substitution
1032
+ end
1033
+
1034
+ def template_installation(template_name, customize_template, application_name, target_index, host)
1035
+ # for safety.
1036
+ end
1037
+
1038
+ def template_installation_actual(template_name, customize_template, application_name, target_index, host=nil)
1039
+ if template_name && @template_file
1040
+ if !@logstash_format && @template_names.include?(template_name)
1041
+ log.debug("Template #{template_name} already exists (cached)")
1042
+ else
1043
+ retry_operate(@max_retry_putting_template,
1044
+ @fail_on_putting_template_retry_exceed,
1045
+ @catch_transport_exception_on_retry) do
1046
+ if customize_template
1047
+ template_custom_install(template_name, @template_file, @template_overwrite, customize_template, host, target_index, @index_separator)
1048
+ else
1049
+ template_install(template_name, @template_file, @template_overwrite, host, target_index, @index_separator)
1050
+ end
1051
+ end
1052
+ @template_names << template_name
1053
+ end
1054
+ end
1055
+ end
1056
+
1057
+ # send_bulk given a specific bulk request, the original tag,
1058
+ # chunk, and bulk_message_count
1059
+ def send_bulk(data, tag, chunk, bulk_message_count, extracted_values, info)
1060
+ _logstash_prefix, _logstash_dateformat, index_name, template_name, customize_template, application_name, _pipeline = extracted_values
1061
+ template_installation(template_name, customize_template, application_name, index_name, info.host)
1062
+
1063
+ begin
1064
+
1065
+ log.on_trace { log.trace "bulk request: #{data}" }
1066
+
1067
+ prepared_data = if compression
1068
+ gzip(data)
1069
+ else
1070
+ data
1071
+ end
1072
+
1073
+ response = client(info.host, compression).bulk body: prepared_data, index: info.index
1074
+ log.on_trace { log.trace "bulk response: #{response}" }
1075
+
1076
+ if response['errors']
1077
+ error = Fluent::Plugin::OpenSearchErrorHandler.new(self)
1078
+ error.handle_error(response, tag, chunk, bulk_message_count, extracted_values)
1079
+ end
1080
+ rescue RetryStreamError => e
1081
+ log.trace "router.emit_stream for retry stream doing..."
1082
+ emit_tag = @retry_tag ? @retry_tag : tag
1083
+ # check capacity of buffer space
1084
+ if retry_stream_retryable?
1085
+ router.emit_stream(emit_tag, e.retry_stream)
1086
+ else
1087
+ raise RetryStreamEmitFailure, "buffer is full."
1088
+ end
1089
+ log.trace "router.emit_stream for retry stream done."
1090
+ rescue => e
1091
+ ignore = @ignore_exception_classes.any? { |clazz| e.class <= clazz }
1092
+
1093
+ log.warn "Exception ignored in tag #{tag}: #{e.class.name} #{e.message}" if ignore
1094
+
1095
+ @_os = nil if @reconnect_on_error
1096
+ @_os_info = nil if @reconnect_on_error
1097
+
1098
+ raise UnrecoverableRequestFailure if ignore && @exception_backup
1099
+
1100
+ # FIXME: identify unrecoverable errors and raise UnrecoverableRequestFailure instead
1101
+ raise RecoverableRequestFailure, "could not push logs to OpenSearch cluster (#{connection_options_description(info.host)}): #{e.message}" unless ignore
1102
+ end
1103
+ end
1104
+
1105
+ def retry_stream_retryable?
1106
+ @buffer.storable?
1107
+ end
1108
+
1109
+ def is_existing_connection(host)
1110
+ # check if the host provided match the current connection
1111
+ return false if @_os.nil?
1112
+ return false if @current_config.nil?
1113
+ return false if host.length != @current_config.length
1114
+
1115
+ for i in 0...host.length
1116
+ if !host[i][:host].eql? @current_config[i][:host] || host[i][:port] != @current_config[i][:port]
1117
+ return false
1118
+ end
1119
+ end
1120
+
1121
+ return true
1122
+ end
1123
+ end
1124
+ end