fluent-plugin-elasticsearch 5.2.0 → 5.2.3

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 4c4d872817c5c5f788b40c89adfddec72dc5765666ab90046a38d5e881ba4f2d
4
- data.tar.gz: 8e83854fa3eea144ba97e44ba52885dd5f28ef3801fe293c69cf5158e7839789
3
+ metadata.gz: 7e2737dc1ced4c50a3db85d71c83351e77a606d581fbd768261ca6613b1700f8
4
+ data.tar.gz: 49a84ff1ea184c4afd43e69fb6cbb89559926bbd6ac063196bd777281be17d2b
5
5
  SHA512:
6
- metadata.gz: 65eb51a5cd710eb39c54f822f3748bb6ba202ebc87d9aab563adf4befec9696279219f452cc99a1d7528e4c484e03807f195cba8695ec16cc0b3525793a872be
7
- data.tar.gz: 3ce726708a2b1900f27999518a56bbf857c28337ce1e3d82ab0acd8fbdb58d92488ddb7e9ccf5a8482768b2c4da1c261b96835f02237321b1a941c6d3b0eb2dc
6
+ metadata.gz: 157dbe3ab067ec279f2051a8b6c6ac25538821903b48e96a09687b547270796d7f6972d0707225c99cd36694e8a8668d32dc9ebba3599d24f08c814712c0849a
7
+ data.tar.gz: 73c611531aa95d5d03d8bd5146ab5f6812c30b3be634f5d1ca139b2ffc6206ccebb8915c4fa1e78d2d53706f6c14b7203342d235bd217a89ac9bf0135e3fec21
@@ -0,0 +1,6 @@
1
+ version: 2
2
+ updates:
3
+ - package-ecosystem: "github-actions"
4
+ directory: "/"
5
+ schedule:
6
+ interval: "weekly"
@@ -2,6 +2,9 @@ name: Testing on Ubuntu
2
2
  on:
3
3
  - push
4
4
  - pull_request
5
+ permissions:
6
+ contents: read
7
+
5
8
  jobs:
6
9
  build:
7
10
  runs-on: ${{ matrix.os }}
@@ -13,7 +16,7 @@ jobs:
13
16
  - ubuntu-latest
14
17
  name: Ruby ${{ matrix.ruby }} unit testing on ${{ matrix.os }}
15
18
  steps:
16
- - uses: actions/checkout@v2
19
+ - uses: actions/checkout@v3
17
20
  - uses: ruby/setup-ruby@v1
18
21
  with:
19
22
  ruby-version: ${{ matrix.ruby }}
@@ -2,6 +2,9 @@ name: Testing on macOS
2
2
  on:
3
3
  - push
4
4
  - pull_request
5
+ permissions:
6
+ contents: read
7
+
5
8
  jobs:
6
9
  build:
7
10
  runs-on: ${{ matrix.os }}
@@ -13,7 +16,7 @@ jobs:
13
16
  - macOS-latest
14
17
  name: Ruby ${{ matrix.ruby }} unit testing on ${{ matrix.os }}
15
18
  steps:
16
- - uses: actions/checkout@v2
19
+ - uses: actions/checkout@v3
17
20
  - uses: ruby/setup-ruby@v1
18
21
  with:
19
22
  ruby-version: ${{ matrix.ruby }}
@@ -2,6 +2,9 @@ name: Testing on Windows
2
2
  on:
3
3
  - push
4
4
  - pull_request
5
+ permissions:
6
+ contents: read
7
+
5
8
  jobs:
6
9
  build:
7
10
  runs-on: ${{ matrix.os }}
@@ -13,7 +16,7 @@ jobs:
13
16
  - windows-latest
14
17
  name: Ruby ${{ matrix.ruby }} unit testing on ${{ matrix.os }}
15
18
  steps:
16
- - uses: actions/checkout@v2
19
+ - uses: actions/checkout@v3
17
20
  - uses: ruby/setup-ruby@v1
18
21
  with:
19
22
  ruby-version: ${{ matrix.ruby }}
data/History.md CHANGED
@@ -2,6 +2,22 @@
2
2
 
3
3
  ### [Unreleased]
4
4
 
5
+ ### 5.2.3
6
+ - Bump actions/checkout from 2 to 3 (#978)
7
+ - chore: Included githubactions in the dependabot config (#977)
8
+ - chore: Set permissions for GitHub actions (#972)
9
+ - Remove nested msgpack\_each in handle\_error (#970)
10
+ - do not overwrite @timestamp in data stream if it already exists in the record (#968)
11
+
12
+ ### 5.2.2
13
+ - Add missing top level class markers (#961)
14
+ - Ensure use_record_as_seed for same records (#960)
15
+
16
+ ### 5.2.1
17
+ - respect include\_tag\_key and tag\_key setting when using data streams (#936)
18
+ - Handle unsupported version error (#956)
19
+ - Display deprecated warning on ES dynamic plugin (#955)
20
+
5
21
  ### 5.2.0
6
22
  - Migrate to handle Elasticsearch 8 (#949)
7
23
 
@@ -3,7 +3,7 @@ $:.push File.expand_path('../lib', __FILE__)
3
3
 
4
4
  Gem::Specification.new do |s|
5
5
  s.name = 'fluent-plugin-elasticsearch'
6
- s.version = '5.2.0'
6
+ s.version = '5.2.3'
7
7
  s.authors = ['diogo', 'pitr', 'Hiroshi Hatake']
8
8
  s.email = ['pitr.vern@gmail.com', 'me@diogoterror.com', 'cosmo0920.wp@gmail.com']
9
9
  s.description = %q{Elasticsearch output plugin for Fluent event collector}
@@ -25,3 +25,6 @@ begin
25
25
  ::SELECTOR_CLASS = Elasticsearch::Transport::Transport::Connections::Selector
26
26
  rescue LoadError
27
27
  end
28
+ unless defined?(::Elasticsearch::UnsupportedProductError)
29
+ class ::Elasticsearch::UnsupportedProductError < StandardError; end
30
+ end
@@ -35,7 +35,7 @@ class Fluent::Plugin::ElasticsearchErrorHandler
35
35
  end
36
36
  end
37
37
 
38
- def handle_error(response, tag, chunk, bulk_message_count, extracted_values)
38
+ def handle_error(response, tag, chunk, bulk_message_count, extracted_values, unpacked_msg_arr)
39
39
  items = response['items']
40
40
  if items.nil? || !items.is_a?(Array)
41
41
  raise ElasticsearchVersionMismatch, "The response format was unrecognized: #{response}"
@@ -48,7 +48,11 @@ class Fluent::Plugin::ElasticsearchErrorHandler
48
48
  meta = {}
49
49
  header = {}
50
50
  affinity_target_indices = @plugin.get_affinity_target_indices(chunk)
51
- chunk.msgpack_each do |time, rawrecord|
51
+
52
+ unpacked_msg_arr.each do |msg|
53
+ time = msg[:time]
54
+ rawrecord = msg[:record]
55
+
52
56
  bulk_message = ''
53
57
  next unless rawrecord.is_a? Hash
54
58
  begin
@@ -53,7 +53,7 @@ module Fluent::Plugin
53
53
  seed += tag + separator if @include_tag_in_seed
54
54
  seed += time.to_s + separator if @include_time_in_seed
55
55
  if @use_entire_record
56
- record.each {|k,v| seed += "|#{k}|#{v}"}
56
+ record.keys.sort.each {|k| seed += "|#{k}|#{record[k]}"}
57
57
  else
58
58
  seed += record_keys.map {|k| record[k]}.join(separator)
59
59
  end
@@ -492,7 +492,11 @@ EOC
492
492
  end
493
493
 
494
494
  def detect_es_major_version
495
- @_es_info ||= client.info
495
+ begin
496
+ @_es_info ||= client.info
497
+ rescue ::Elasticsearch::UnsupportedProductError => e
498
+ raise Fluent::ConfigError, "Using Elasticsearch client #{client_library_version} is not compatible for your Elasticsearch server. Please check your using elasticsearch gem version and Elasticsearch server."
499
+ end
496
500
  begin
497
501
  unless version = @_es_info.dig("version", "number")
498
502
  version = @default_elasticsearch_version
@@ -823,6 +827,7 @@ EOC
823
827
  bulk_message = Hash.new { |h,k| h[k] = '' }
824
828
  header = {}
825
829
  meta = {}
830
+ unpackedMsgArr = {}
826
831
 
827
832
  tag = chunk.metadata.tag
828
833
  chunk_id = dump_unique_id_hex(chunk.unique_id)
@@ -847,9 +852,13 @@ EOC
847
852
  RequestInfo.new(host, nil, meta["_index".freeze], meta.delete("_alias".freeze))
848
853
  end
849
854
 
855
+ unpackedMsgArr[info] = [] if unpackedMsgArr[info].nil?
856
+ unpackedMsgArr[info] << {:time => time, :record => record}
857
+
850
858
  if split_request?(bulk_message, info)
851
859
  bulk_message.each do |info, msgs|
852
- send_bulk(msgs, tag, chunk, bulk_message_count[info], extracted_values, info) unless msgs.empty?
860
+ send_bulk(msgs, tag, chunk, bulk_message_count[info], extracted_values, info, unpackedMsgArr[info]) unless msgs.empty?
861
+ unpackedMsgArr[info].clear
853
862
  msgs.clear
854
863
  # Clear bulk_message_count for this info.
855
864
  bulk_message_count[info] = 0;
@@ -872,7 +881,9 @@ EOC
872
881
  end
873
882
 
874
883
  bulk_message.each do |info, msgs|
875
- send_bulk(msgs, tag, chunk, bulk_message_count[info], extracted_values, info) unless msgs.empty?
884
+ send_bulk(msgs, tag, chunk, bulk_message_count[info], extracted_values, info, unpackedMsgArr[info]) unless msgs.empty?
885
+
886
+ unpackedMsgArr[info].clear
876
887
  msgs.clear
877
888
  end
878
889
  end
@@ -1086,7 +1097,7 @@ EOC
1086
1097
 
1087
1098
  # send_bulk given a specific bulk request, the original tag,
1088
1099
  # chunk, and bulk_message_count
1089
- def send_bulk(data, tag, chunk, bulk_message_count, extracted_values, info)
1100
+ def send_bulk(data, tag, chunk, bulk_message_count, extracted_values, info, unpacked_msg_arr)
1090
1101
  _logstash_prefix, _logstash_dateformat, index_name, _type_name, template_name, customize_template, deflector_alias, application_name, _pipeline, ilm_policy_id = extracted_values
1091
1102
  if deflector_alias
1092
1103
  template_installation(deflector_alias, template_name, customize_template, application_name, index_name, ilm_policy_id, info.host)
@@ -1109,7 +1120,7 @@ EOC
1109
1120
 
1110
1121
  if response['errors']
1111
1122
  error = Fluent::Plugin::ElasticsearchErrorHandler.new(self)
1112
- error.handle_error(response, tag, chunk, bulk_message_count, extracted_values)
1123
+ error.handle_error(response, tag, chunk, bulk_message_count, extracted_values, unpacked_msg_arr)
1113
1124
  end
1114
1125
  rescue RetryStreamError => e
1115
1126
  log.trace "router.emit_stream for retry stream doing..."
@@ -243,8 +243,14 @@ module Fluent::Plugin
243
243
  chunk.msgpack_each do |time, record|
244
244
  next unless record.is_a? Hash
245
245
 
246
+ if @include_tag_key
247
+ record[@tag_key] = tag
248
+ end
249
+
246
250
  begin
247
- record.merge!({"@timestamp" => Time.at(time).iso8601(@time_precision)})
251
+ unless record.has_key?("@timestamp")
252
+ record.merge!({"@timestamp" => Time.at(time).iso8601(@time_precision)})
253
+ end
248
254
  bulk_message = append_record_to_messages(CREATE_OP, {}, headers, record, bulk_message)
249
255
  rescue => e
250
256
  router.emit_error_event(tag, time, record, e)
@@ -28,6 +28,8 @@ module Fluent::Plugin
28
28
  @dynamic_config[key] = value.to_s
29
29
  }
30
30
  # end eval all configs
31
+
32
+ log.warn "Elasticsearch dynamic plugin will be deprecated and removed in the future. Please consider to use normal Elasticsearch plugin"
31
33
  end
32
34
 
33
35
  def create_meta_config_map
Binary file
@@ -2,6 +2,7 @@ require_relative '../helper'
2
2
  require 'fluent/plugin/out_elasticsearch'
3
3
  require 'fluent/plugin/elasticsearch_error_handler'
4
4
  require 'json'
5
+ require 'msgpack'
5
6
 
6
7
  class TestElasticsearchErrorHandler < Test::Unit::TestCase
7
8
 
@@ -54,6 +55,27 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
54
55
  end
55
56
  end
56
57
 
58
+ class MockMsgpackChunk
59
+ def initialize(chunk)
60
+ @chunk = chunk
61
+ @factory = MessagePack::Factory.new
62
+ @factory.register_type(Fluent::EventTime::TYPE, Fluent::EventTime)
63
+ end
64
+
65
+ def msgpack_each
66
+ @factory.unpacker(@chunk).each { |time, record| yield(time, record) }
67
+ end
68
+ end
69
+
70
+ class MockUnpackedMsg
71
+ def initialize(records)
72
+ @records = records
73
+ end
74
+ def each
75
+ @records.each { |item| yield({:time => item[:time], :record => item[:record]}) }
76
+ end
77
+ end
78
+
57
79
  def setup
58
80
  Fluent::Test.setup
59
81
  @log_device = Fluent::Test::DummyLogDevice.new
@@ -98,8 +120,9 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
98
120
  ]
99
121
  }))
100
122
  chunk = MockChunk.new(records)
123
+ unpacked_msg_arr = MockUnpackedMsg.new(records)
101
124
  dummy_extracted_values = []
102
- @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
125
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values, unpacked_msg_arr)
103
126
  assert_equal(1, @plugin.error_events.size)
104
127
  expected_log = "failed to parse"
105
128
  exception_message = @plugin.error_events.first[:error].message
@@ -140,8 +163,9 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
140
163
  ]
141
164
  }))
142
165
  chunk = MockChunk.new(records)
166
+ unpacked_msg_arr = MockUnpackedMsg.new(records)
143
167
  dummy_extracted_values = []
144
- @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
168
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values, unpacked_msg_arr)
145
169
  assert_equal(1, @plugin.error_events.size)
146
170
  expected_log = "failed to parse"
147
171
  exception_message = @plugin.error_events.first[:error].message
@@ -159,8 +183,9 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
159
183
  "items" : [{}]
160
184
  }))
161
185
  chunk = MockChunk.new(records)
186
+ unpacked_msg_arr = MockUnpackedMsg.new(records)
162
187
  dummy_extracted_values = []
163
- @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
188
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values, unpacked_msg_arr)
164
189
  assert_equal(0, @plugin.error_events.size)
165
190
  assert_nil(@plugin.error_events[0])
166
191
  end
@@ -181,8 +206,9 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
181
206
  ]
182
207
  }))
183
208
  chunk = MockChunk.new(records)
209
+ unpacked_msg_arr = MockUnpackedMsg.new(records)
184
210
  dummy_extracted_values = []
185
- @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
211
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values, unpacked_msg_arr)
186
212
  assert_equal(1, @plugin.error_events.size)
187
213
  assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
188
214
  end
@@ -204,8 +230,9 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
204
230
  ]
205
231
  }))
206
232
  chunk = MockChunk.new(records)
233
+ unpacked_msg_arr = MockUnpackedMsg.new(records)
207
234
  dummy_extracted_values = []
208
- @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
235
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values, unpacked_msg_arr)
209
236
  assert_equal(1, @plugin.error_events.size)
210
237
  assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
211
238
  end
@@ -230,10 +257,11 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
230
257
  ]
231
258
  }))
232
259
 
233
- chunk = MockChunk.new(records)
234
- dummy_extracted_values = []
260
+ chunk = MockChunk.new(records)
261
+ unpacked_msg_arr = MockUnpackedMsg.new(records)
262
+ dummy_extracted_values = []
235
263
  assert_raise(Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError) do
236
- @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
264
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values, unpacked_msg_arr)
237
265
  end
238
266
  end
239
267
 
@@ -257,10 +285,11 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
257
285
  ]
258
286
  }))
259
287
 
260
- chunk = MockChunk.new(records)
261
- dummy_extracted_values = []
288
+ chunk = MockChunk.new(records)
289
+ unpacked_msg_arr = MockUnpackedMsg.new(records)
290
+ dummy_extracted_values = []
262
291
  assert_raise(Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError) do
263
- @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
292
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values, unpacked_msg_arr)
264
293
  end
265
294
  end
266
295
 
@@ -290,8 +319,9 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
290
319
  begin
291
320
  failed = false
292
321
  chunk = MockChunk.new(records)
322
+ unpacked_msg_arr = MockUnpackedMsg.new(records)
293
323
  dummy_extracted_values = []
294
- handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
324
+ handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values, unpacked_msg_arr)
295
325
  rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
296
326
  failed = true
297
327
  records = [].tap do |records|
@@ -312,6 +342,7 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
312
342
  records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
313
343
  end
314
344
  chunk = MockChunk.new(records)
345
+ unpacked_msg_arr = MockUnpackedMsg.new(records)
315
346
 
316
347
  response = parse_response(%({
317
348
  "took" : 1,
@@ -410,7 +441,7 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
410
441
  begin
411
442
  failed = false
412
443
  dummy_extracted_values = []
413
- @handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
444
+ @handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values, unpacked_msg_arr)
414
445
  rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
415
446
  failed = true
416
447
  records = [].tap do |records|
@@ -439,6 +470,7 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
439
470
  records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
440
471
  end
441
472
  chunk = MockChunk.new(records)
473
+ unpacked_msg_arr = MockUnpackedMsg.new(records)
442
474
 
443
475
  response = parse_response(%({
444
476
  "took" : 1,
@@ -526,7 +558,7 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
526
558
  begin
527
559
  failed = false
528
560
  dummy_extracted_values = []
529
- @handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
561
+ @handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values, unpacked_msg_arr)
530
562
  rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
531
563
  failed = true
532
564
  records = [].tap do |records|
@@ -549,6 +581,7 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
549
581
  records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
550
582
  end
551
583
  chunk = MockChunk.new(records)
584
+ unpacked_msg_arr = MockUnpackedMsg.new(records)
552
585
 
553
586
  response = parse_response(%({
554
587
  "took" : 1,
@@ -639,7 +672,7 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
639
672
  begin
640
673
  failed = false
641
674
  dummy_extracted_values = []
642
- @handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
675
+ @handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values, unpacked_msg_arr)
643
676
  rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
644
677
  failed = true
645
678
  records = [].tap do |records|
@@ -660,4 +693,61 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
660
693
  end
661
694
  assert_true failed
662
695
  end
696
+
697
+ def test_nested_msgpack_each
698
+ cwd = File.dirname(__FILE__)
699
+ chunk_path = File.join(cwd, 'mock_chunk.dat')
700
+ chunk_file = File.open(chunk_path, 'rb', 0644)
701
+ chunk_file.seek(0, IO::SEEK_SET)
702
+
703
+ chunk = MockMsgpackChunk.new(chunk_file)
704
+
705
+ unpacked_msg_arr = []
706
+ msg_count = 0
707
+ count_to_trigger_error_handle = 0
708
+ chunk.msgpack_each do |time, record|
709
+ next unless record.is_a? Hash
710
+
711
+ unpacked_msg_arr << {:time => time, :record => record}
712
+ msg_count += 1
713
+
714
+ record.each_key do |k|
715
+ if k != 'aaa' && k != 'bbb' && k != 'ccc' && k != 'log_path'
716
+ assert_equal(:impossible, k)
717
+ end
718
+ end
719
+
720
+ if msg_count % 55 == 0
721
+ if count_to_trigger_error_handle == 1
722
+ begin
723
+ response = {}
724
+ response['errors'] = true
725
+ response['items'] = []
726
+ item = {}
727
+ item['index'] = {}
728
+ item['index']['status'] = 429
729
+ item['index']['error'] = {}
730
+ item['index']['error']['type'] = "es_rejected_execution_exception"
731
+ abc = 0
732
+ while abc < unpacked_msg_arr.length
733
+ abc += 1
734
+ response['items'] << item
735
+ end
736
+
737
+ dummy_extracted_values = []
738
+ @handler.handle_error(response, 'atag', chunk, unpacked_msg_arr.length, dummy_extracted_values, unpacked_msg_arr)
739
+ assert_equal(0, @plugin.error_events.size)
740
+ assert_nil(@plugin.error_events[0])
741
+ rescue => e
742
+ # capture ElasticsearchRequestAbortError, beacuse es_rejected_execution_exception is unrecoverable.
743
+ end
744
+ end
745
+
746
+ count_to_trigger_error_handle += 1
747
+ unpacked_msg_arr.clear
748
+ end # end if
749
+ end # end chunk.msgpack_each
750
+
751
+ chunk_file.close
752
+ end
663
753
  end
@@ -132,10 +132,10 @@ class ElasticsearchGenidFilterTest < Test::Unit::TestCase
132
132
  end
133
133
 
134
134
  class UseEntireRecordAsSeedTest < self
135
- data("md5" => ["md5", "MuMU0gHOP1cWvvg/J4aEFg=="],
136
- "sha1" => ["sha1", "GZ6Iup9Ywyk5spCWtPQbtZnfK0U="],
137
- "sha256" => ["sha256", "O4YN0RiXCUAYeaR97UUULRLxgra/R2dvTV47viir5l4="],
138
- "sha512" => ["sha512", "FtbwO1xsLUq0KcO0mj0l80rbwFH5rGE3vL+Vgh90+4R/9j+/Ni/ipwhiOoUcetDxj1r5Vf/92B54La+QTu3eMA=="],)
135
+ data("md5" => ["md5", "OAod7J0DR9s9/rOQnkeSFw=="],
136
+ "sha1" => ["sha1", "0CT4aMJ4gxMT3TKaYPCYApiVsq8="],
137
+ "sha256" => ["sha256", "mbAuKF5can0TTj/JBk71AXtOyoVqw5W5gMPUxx6pxLk="],
138
+ "sha512" => ["sha512", "f7kz5KVuDy+riENePDzqBjGQfbuRNpRBSQMzT2/6hrljXbYtBy3YFmxB86ofIf3zz4ZBao2QM2W7YvcwbRtK1w=="],)
139
139
  def test_record
140
140
  hash_type, expected = data
141
141
  d = create_driver(%[
@@ -151,10 +151,10 @@ class ElasticsearchGenidFilterTest < Test::Unit::TestCase
151
151
  d.filtered.map {|e| e.last}.first[d.instance.hash_id_key])
152
152
  end
153
153
 
154
- data("md5" => ["md5", "GJfpWe8ofiGzn97bc9Gh0Q=="],
155
- "sha1" => ["sha1", "AVaK67Tz0bEJ8xNEzjOQ6r9fAu4="],
156
- "sha256" => ["sha256", "WIXWAuf/Z94Uw95mudloo2bgjhSsSduQIwkKTQsNFgU="],
157
- "sha512" => ["sha512", "yjMGGxy8uc7gCrPgm8W6MzJGLFk0GtUwJ6w/91laf6WNywuvG/7T6kNHLagAV8rSW8xzxmtEfyValBO5scuoKw=="],)
154
+ data("md5" => ["md5", "Hb0jwxofNQP+ufQTKK1U4g=="],
155
+ "sha1" => ["sha1", "BakTtlotl/u+yOON6YcViTz6nms="],
156
+ "sha256" => ["sha256", "eLuTCsFqDlk6PfABNyD39r36+yNIBeDTHyNKfJ8fZQw="],
157
+ "sha512" => ["sha512", "PhPCNGalM4H4xT19DnCBnpwr56lbvCo8wJGyCiH9dWcyhn1nA5l1diYSZlF2fNiq1+wzMqfGvJILIjgQrlAPcg=="],)
158
158
  def test_record_with_tag
159
159
  hash_type, expected = data
160
160
  d = create_driver(%[
@@ -171,10 +171,10 @@ class ElasticsearchGenidFilterTest < Test::Unit::TestCase
171
171
  d.filtered.map {|e| e.last}.first[d.instance.hash_id_key])
172
172
  end
173
173
 
174
- data("md5" => ["md5", "5nQSaJ4F1p9rDFign13Lfg=="],
175
- "sha1" => ["sha1", "hyo9+0ZFBpizKl2NShs3C8yQcGw="],
176
- "sha256" => ["sha256", "romVsZSIksbqYsOSnUzolZQw76ankcy0DgvDZ3CayTo="],
177
- "sha512" => ["sha512", "RPU7K2Pt0iVyvV7p5usqcUIIOmfTajD1aa7pkR9qZ89UARH/lpm6ESY9iwuYJj92lxOUuF5OxlEwvV7uXJ07iA=="],)
174
+ data("md5" => ["md5", "C8vfhC4kecNCNutFCuC6MA=="],
175
+ "sha1" => ["sha1", "+YWVqUEL90wpKJRrionUJwNgXHg="],
176
+ "sha256" => ["sha256", "eSqGZqjnO6Uum/4CNfJaolX49+2XKogiGMHGNHiO91Q="],
177
+ "sha512" => ["sha512", "iVmuD0D+i/WtBwNza09ZXSIW8Xg8/yrUwK/M/EZaCMjz/x5FyyCiVkb1VVKsgNnJy0SYt4w21dhHewu1aXM6HA=="],)
178
178
  def test_record_with_time
179
179
  hash_type, expected = data
180
180
  d = create_driver(%[
@@ -191,10 +191,10 @@ class ElasticsearchGenidFilterTest < Test::Unit::TestCase
191
191
  d.filtered.map {|e| e.last}.first[d.instance.hash_id_key])
192
192
  end
193
193
 
194
- data("md5" => ["md5", "zGQF35KlMUibJAcgkgQDtw=="],
195
- "sha1" => ["sha1", "1x9RZO1xEuWps090qq4DUIsU9x8="],
196
- "sha256" => ["sha256", "eulMz0eF56lBEf31aIs0OG2TGCH/aoPfZbRqfEOkAwk="],
197
- "sha512" => ["sha512", "mIiYATtpdUFEFCIZg1FdKssIs7oWY0gJjhSSbet0ddUmqB+CiQAcAMTmrXO6AVSH0vsMvao/8vtC8AsIPfF1fA=="],)
194
+ data("md5" => ["md5", "lU7d4EiF+2M1zxWcsmBbjg=="],
195
+ "sha1" => ["sha1", "nghmz1y3KTEFxalfS2/Oe4n4yfQ="],
196
+ "sha256" => ["sha256", "d0le9UOnUeuGPF/2yEBRM1YzOYeHtxYOE1UU6JgJrvU="],
197
+ "sha512" => ["sha512", "n7rhisGHUBne6c4Cs9DRMbPror8O5Y/vYajDqAtOaiUTys/Z1EKBMnZQA0iVNFw7joX33cenBW3Yyccct3xSew=="],)
198
198
  def test_record_with_tag_and_time
199
199
  hash_type, expected = data
200
200
  d = create_driver(%[
@@ -920,6 +920,67 @@ class ElasticsearchOutputTest < Test::Unit::TestCase
920
920
  end
921
921
  end
922
922
 
923
+ class GetElasticsearchIncompatibleVersionTest < self
924
+ def create_driver(conf='', client_version="7.14")
925
+ # For request stub to detect compatibility.
926
+ @client_version ||= client_version
927
+ # Ensure original implementation existence.
928
+ Fluent::Plugin::ElasticsearchOutput.module_eval(<<-CODE)
929
+ def detect_es_major_version
930
+ begin
931
+ @_es_info ||= client.info
932
+ rescue ::Elasticsearch::UnsupportedProductError => e
933
+ raise Fluent::ConfigError, "Using Elasticsearch client #{@client_version} is not compatible for your Elasticsearch server. Please check your using elasticsearch gem version and Elasticsearch server."
934
+ end
935
+ begin
936
+ unless version = @_es_info.dig("version", "number")
937
+ version = @default_elasticsearch_version
938
+ end
939
+ rescue NoMethodError => e
940
+ log.warn "#{@_es_info} can not dig version information. Assuming Elasticsearch #{@default_elasticsearch_version}", error: e
941
+ version = @default_elasticsearch_version
942
+ end
943
+ version.to_i
944
+ end
945
+ CODE
946
+ Fluent::Plugin::ElasticsearchOutput.module_eval(<<-CODE)
947
+ def client_library_version
948
+ #{@client_version}
949
+ end
950
+ CODE
951
+ Fluent::Test::Driver::Output.new(Fluent::Plugin::ElasticsearchOutput).configure(conf)
952
+ end
953
+
954
+ def test_incompatible_es_version
955
+ if Gem::Version.create(::TRANSPORT_CLASS::VERSION) < Gem::Version.create("7.14.0")
956
+ omit "This test is not effective before elasticsearch 7.14"
957
+ end
958
+ config = %{
959
+ host logs.google.com
960
+ port 778
961
+ scheme https
962
+ path /es/
963
+ user john
964
+ password doe
965
+ verify_es_version_at_startup true
966
+ max_retry_get_es_version 1
967
+ }
968
+
969
+ connection_resets = 0
970
+ stub_request(:get, "https://logs.google.com:778/es//").
971
+ with(basic_auth: ['john', 'doe']) do |req|
972
+ connection_resets += 1
973
+ raise ::Elasticsearch::UnsupportedProductError
974
+ end
975
+
976
+ assert_raise(Fluent::ConfigError) do
977
+ create_driver(config)
978
+ end
979
+
980
+ assert_equal(1, connection_resets)
981
+ end
982
+ end
983
+
923
984
  class GetElasticsearchVersionWithFallbackTest < self
924
985
  def create_driver(conf='', client_version="\"5.0\"")
925
986
  # For request stub to detect compatibility.
@@ -19,7 +19,7 @@ class ElasticsearchOutputDataStreamTest < Test::Unit::TestCase
19
19
  @driver = nil
20
20
  log = Fluent::Engine.log
21
21
  log.out.logs.slice!(0, log.out.logs.length)
22
- @bulk_records = 0
22
+ @bulk_records = []
23
23
  end
24
24
 
25
25
  def elasticsearch_version
@@ -70,8 +70,13 @@ class ElasticsearchOutputDataStreamTest < Test::Unit::TestCase
70
70
  }
71
71
  end
72
72
 
73
+ SAMPLE_RECORD_TIMESTAMP = Time.now.iso8601
73
74
  def sample_record
74
- {'@timestamp' => Time.now.iso8601, 'message' => 'Sample record'}
75
+ {'@timestamp' => SAMPLE_RECORD_TIMESTAMP, 'message' => 'Sample record'}
76
+ end
77
+
78
+ def sample_record_no_timestamp
79
+ {'message' => 'Sample record no timestamp'}
75
80
  end
76
81
 
77
82
  RESPONSE_ACKNOWLEDGED = {"acknowledged": true}
@@ -115,6 +120,17 @@ class ElasticsearchOutputDataStreamTest < Test::Unit::TestCase
115
120
  stub_request(:get, "#{url}/_index_template/#{name}").to_return(:status => [404, TRANSPORT_CLASS::Transport::Errors::NotFound])
116
121
  end
117
122
 
123
+
124
+ def push_bulk_request(req_body)
125
+ # bulk data must be pair of OP and records
126
+ # {"create": {}}\nhttp://localhost:9200/_ilm/policy/foo_ilm_bar
127
+ # {"@timestamp": ...}
128
+ ops = req_body.split("\n")
129
+ @bulk_records += ops.values_at(
130
+ * ops.each_index.select {|i| i.odd? }
131
+ ).map{ |i| JSON.parse(i) }
132
+ end
133
+
118
134
  def stub_nonexistent_template_retry?(name="foo_tpl", url="http://localhost:9200")
119
135
  stub_request(:get, "#{url}/_index_template/#{name}").
120
136
  to_return({ status: 500, body: 'Internal Server Error' }, { status: 404, body: '{}' })
@@ -125,19 +141,19 @@ class ElasticsearchOutputDataStreamTest < Test::Unit::TestCase
125
141
  # bulk data must be pair of OP and records
126
142
  # {"create": {}}\nhttp://localhost:9200/_ilm/policy/foo_ilm_bar
127
143
  # {"@timestamp": ...}
128
- @bulk_records += req.body.split("\n").size / 2
144
+ push_bulk_request(req.body)
129
145
  end
130
146
  stub_request(:post, "#{url}/#{ilm_name}/_bulk").with do |req|
131
147
  # bulk data must be pair of OP and records
132
148
  # {"create": {}}\nhttp://localhost:9200/_ilm/policy/foo_ilm_bar
133
149
  # {"@timestamp": ...}
134
- @bulk_records += req.body.split("\n").size / 2
150
+ push_bulk_request(req.body)
135
151
  end
136
152
  stub_request(:post, "#{url}/#{template_name}/_bulk").with do |req|
137
153
  # bulk data must be pair of OP and records
138
154
  # {"create": {}}\nhttp://localhost:9200/_ilm/policy/foo_ilm_bar
139
155
  # {"@timestamp": ...}
140
- @bulk_records += req.body.split("\n").size / 2
156
+ push_bulk_request(req.body)
141
157
  end
142
158
  end
143
159
 
@@ -600,7 +616,7 @@ class ElasticsearchOutputDataStreamTest < Test::Unit::TestCase
600
616
  driver(conf).run(default_tag: 'test') do
601
617
  driver.feed(sample_record)
602
618
  end
603
- assert_equal 1, @bulk_records
619
+ assert_equal 1, @bulk_records.length
604
620
  end
605
621
 
606
622
  def test_placeholder_params_unset
@@ -619,7 +635,7 @@ class ElasticsearchOutputDataStreamTest < Test::Unit::TestCase
619
635
  driver(conf).run(default_tag: 'test') do
620
636
  driver.feed(sample_record)
621
637
  end
622
- assert_equal 1, @bulk_records
638
+ assert_equal 1, @bulk_records.length
623
639
  end
624
640
 
625
641
 
@@ -645,7 +661,7 @@ class ElasticsearchOutputDataStreamTest < Test::Unit::TestCase
645
661
  driver(conf).run(default_tag: 'test') do
646
662
  driver.feed(sample_record)
647
663
  end
648
- assert_equal 1, @bulk_records
664
+ assert_equal 1, @bulk_records.length
649
665
  end
650
666
 
651
667
  def test_custom_record_placeholder
@@ -675,7 +691,7 @@ class ElasticsearchOutputDataStreamTest < Test::Unit::TestCase
675
691
  driver.feed(record)
676
692
  end
677
693
  end
678
- assert_equal keys.count, @bulk_records
694
+ assert_equal keys.count, @bulk_records.length
679
695
  end
680
696
 
681
697
  def test_bulk_insert_feed
@@ -693,7 +709,7 @@ class ElasticsearchOutputDataStreamTest < Test::Unit::TestCase
693
709
  driver(conf).run(default_tag: 'test') do
694
710
  driver.feed(sample_record)
695
711
  end
696
- assert_equal 1, @bulk_records
712
+ assert_equal 1, @bulk_records.length
697
713
  end
698
714
 
699
715
  def test_template_retry_install_fails
@@ -813,4 +829,109 @@ class ElasticsearchOutputDataStreamTest < Test::Unit::TestCase
813
829
  body: '{"policy":{"phases":{"hot":{"actions":{"rollover":{"max_age":"15d"}}}}}}',
814
830
  times: 1)
815
831
  end
832
+
833
+ def test_doesnt_add_tag_key_when_not_configured
834
+ omit REQUIRED_ELASTIC_MESSAGE unless data_stream_supported?
835
+
836
+ config = %{
837
+ data_stream_name foo
838
+ data_stream_template_name foo_tpl
839
+ data_stream_ilm_name foo_ilm_policy
840
+ }
841
+
842
+ stub_default
843
+ stub_bulk_feed
844
+ driver(config)
845
+ driver.run(default_tag: 'mytag') do
846
+ driver.feed(sample_record)
847
+ end
848
+
849
+ assert_equal(1, @bulk_records.length)
850
+ assert_false(@bulk_records[0].has_key?('tag'))
851
+ end
852
+
853
+
854
+ def test_adds_tag_key_when_configured
855
+ omit REQUIRED_ELASTIC_MESSAGE unless data_stream_supported?
856
+
857
+ config = %{
858
+ data_stream_name foo
859
+ data_stream_template_name foo_tpl
860
+ data_stream_ilm_name foo_ilm_policy
861
+ include_tag_key true
862
+ }
863
+
864
+ stub_default
865
+ stub_bulk_feed
866
+ driver(config)
867
+ driver.run(default_tag: 'mytag') do
868
+ driver.feed(sample_record)
869
+ end
870
+
871
+ assert_equal(1, @bulk_records.length)
872
+ assert(@bulk_records[0].has_key?('tag'))
873
+ assert_equal('mytag', @bulk_records[0]['tag'])
874
+ end
875
+
876
+ def test_adds_custom_tag_key_when_configured
877
+ omit REQUIRED_ELASTIC_MESSAGE unless data_stream_supported?
878
+
879
+ config = %{
880
+ data_stream_name foo
881
+ data_stream_template_name foo_tpl
882
+ data_stream_ilm_name foo_ilm_policy
883
+ include_tag_key true
884
+ tag_key custom_tag_key
885
+ }
886
+
887
+ stub_default
888
+ stub_bulk_feed
889
+ driver(config)
890
+ driver.run(default_tag: 'mytag') do
891
+ driver.feed(sample_record)
892
+ end
893
+
894
+ assert_equal(1, @bulk_records.length)
895
+ assert(@bulk_records[0].has_key?('custom_tag_key'))
896
+ assert_equal('mytag', @bulk_records[0]['custom_tag_key'])
897
+ end
898
+
899
+ def test_use_record_timestamp_if_present
900
+ omit REQUIRED_ELASTIC_MESSAGE unless data_stream_supported?
901
+
902
+ stub_default
903
+ stub_bulk_feed
904
+ conf = config_element(
905
+ 'ROOT', '', {
906
+ '@type' => ELASTIC_DATA_STREAM_TYPE,
907
+ 'data_stream_name' => 'foo',
908
+ 'data_stream_ilm_name' => 'foo_ilm_policy',
909
+ 'data_stream_template_name' => 'foo_tpl'
910
+ })
911
+ driver(conf).run(default_tag: 'test') do
912
+ driver.feed(sample_record)
913
+ end
914
+ assert_equal 1, @bulk_records.length
915
+ assert(@bulk_records[0].has_key?('@timestamp'))
916
+ assert_equal SAMPLE_RECORD_TIMESTAMP, @bulk_records[0]['@timestamp']
917
+ end
918
+
919
+ def test_add_timestamp_if_not_present_in_record
920
+ omit REQUIRED_ELASTIC_MESSAGE unless data_stream_supported?
921
+
922
+ stub_default
923
+ stub_bulk_feed
924
+ conf = config_element(
925
+ 'ROOT', '', {
926
+ '@type' => ELASTIC_DATA_STREAM_TYPE,
927
+ 'data_stream_name' => 'foo',
928
+ 'data_stream_ilm_name' => 'foo_ilm_policy',
929
+ 'data_stream_template_name' => 'foo_tpl'
930
+ })
931
+ driver(conf).run(default_tag: 'test') do
932
+ driver.feed(sample_record_no_timestamp)
933
+ end
934
+ assert_equal 1, @bulk_records.length
935
+ assert(@bulk_records[0].has_key?('@timestamp'))
936
+ end
816
937
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-elasticsearch
3
3
  version: !ruby/object:Gem::Version
4
- version: 5.2.0
4
+ version: 5.2.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - diogo
@@ -10,7 +10,7 @@ authors:
10
10
  autorequire:
11
11
  bindir: bin
12
12
  cert_chain: []
13
- date: 2022-02-21 00:00:00.000000000 Z
13
+ date: 2022-06-16 00:00:00.000000000 Z
14
14
  dependencies:
15
15
  - !ruby/object:Gem::Dependency
16
16
  name: fluentd
@@ -151,6 +151,7 @@ files:
151
151
  - ".editorconfig"
152
152
  - ".github/ISSUE_TEMPLATE/bug_report.md"
153
153
  - ".github/ISSUE_TEMPLATE/feature_request.md"
154
+ - ".github/dependabot.yml"
154
155
  - ".github/workflows/issue-auto-closer.yml"
155
156
  - ".github/workflows/linux.yml"
156
157
  - ".github/workflows/macos.yml"
@@ -187,6 +188,7 @@ files:
187
188
  - lib/fluent/plugin/out_elasticsearch_data_stream.rb
188
189
  - lib/fluent/plugin/out_elasticsearch_dynamic.rb
189
190
  - test/helper.rb
191
+ - test/plugin/mock_chunk.dat
190
192
  - test/plugin/test_alias_template.json
191
193
  - test/plugin/test_elasticsearch_error_handler.rb
192
194
  - test/plugin/test_elasticsearch_fallback_selector.rb
@@ -222,12 +224,13 @@ required_rubygems_version: !ruby/object:Gem::Requirement
222
224
  - !ruby/object:Gem::Version
223
225
  version: '0'
224
226
  requirements: []
225
- rubygems_version: 3.2.30
227
+ rubygems_version: 3.2.32
226
228
  signing_key:
227
229
  specification_version: 4
228
230
  summary: Elasticsearch output plugin for Fluent event collector
229
231
  test_files:
230
232
  - test/helper.rb
233
+ - test/plugin/mock_chunk.dat
231
234
  - test/plugin/test_alias_template.json
232
235
  - test/plugin/test_elasticsearch_error_handler.rb
233
236
  - test/plugin/test_elasticsearch_fallback_selector.rb