fluent-plugin-elasticsearch 5.2.2 → 5.2.4
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.github/dependabot.yml +6 -0
- data/.github/workflows/linux.yml +4 -1
- data/.github/workflows/macos.yml +4 -1
- data/.github/workflows/windows.yml +4 -1
- data/History.md +11 -0
- data/fluent-plugin-elasticsearch.gemspec +2 -1
- data/lib/fluent/plugin/elasticsearch_error_handler.rb +6 -2
- data/lib/fluent/plugin/out_elasticsearch.rb +11 -4
- data/lib/fluent/plugin/out_elasticsearch_data_stream.rb +4 -1
- data/test/plugin/mock_chunk.dat +0 -0
- data/test/plugin/test_elasticsearch_error_handler.rb +105 -15
- data/test/plugin/test_out_elasticsearch_data_stream.rb +47 -3
- metadata +19 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: f822334b8b2ed8169045953bd05cb383b6dd2068b513140429777f55e0f8d6bc
|
4
|
+
data.tar.gz: a8741a46f4f9fe0dd990c38844ac53d19fea9eb27cf7b3505f26d29d0ed7b83f
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: ed5b593be5147c505fae9b563f87e831db8f110887026bab90a2ece444a1462cbd51a079134b62dab3e21c5a57ce78eabc371d9165f3d765ffe8c51493bc1344
|
7
|
+
data.tar.gz: 1fed7a68045103ed5a5bd9b49fe283468a87c45a2a68bf7a4922881deba2e93cedd9923afc046159465b5552d4d0bd4d47b3a86826b6622fcae02974eaa0ac91
|
data/.github/workflows/linux.yml
CHANGED
@@ -2,6 +2,9 @@ name: Testing on Ubuntu
|
|
2
2
|
on:
|
3
3
|
- push
|
4
4
|
- pull_request
|
5
|
+
permissions:
|
6
|
+
contents: read
|
7
|
+
|
5
8
|
jobs:
|
6
9
|
build:
|
7
10
|
runs-on: ${{ matrix.os }}
|
@@ -13,7 +16,7 @@ jobs:
|
|
13
16
|
- ubuntu-latest
|
14
17
|
name: Ruby ${{ matrix.ruby }} unit testing on ${{ matrix.os }}
|
15
18
|
steps:
|
16
|
-
- uses: actions/checkout@
|
19
|
+
- uses: actions/checkout@v3
|
17
20
|
- uses: ruby/setup-ruby@v1
|
18
21
|
with:
|
19
22
|
ruby-version: ${{ matrix.ruby }}
|
data/.github/workflows/macos.yml
CHANGED
@@ -2,6 +2,9 @@ name: Testing on macOS
|
|
2
2
|
on:
|
3
3
|
- push
|
4
4
|
- pull_request
|
5
|
+
permissions:
|
6
|
+
contents: read
|
7
|
+
|
5
8
|
jobs:
|
6
9
|
build:
|
7
10
|
runs-on: ${{ matrix.os }}
|
@@ -13,7 +16,7 @@ jobs:
|
|
13
16
|
- macOS-latest
|
14
17
|
name: Ruby ${{ matrix.ruby }} unit testing on ${{ matrix.os }}
|
15
18
|
steps:
|
16
|
-
- uses: actions/checkout@
|
19
|
+
- uses: actions/checkout@v3
|
17
20
|
- uses: ruby/setup-ruby@v1
|
18
21
|
with:
|
19
22
|
ruby-version: ${{ matrix.ruby }}
|
@@ -2,6 +2,9 @@ name: Testing on Windows
|
|
2
2
|
on:
|
3
3
|
- push
|
4
4
|
- pull_request
|
5
|
+
permissions:
|
6
|
+
contents: read
|
7
|
+
|
5
8
|
jobs:
|
6
9
|
build:
|
7
10
|
runs-on: ${{ matrix.os }}
|
@@ -13,7 +16,7 @@ jobs:
|
|
13
16
|
- windows-latest
|
14
17
|
name: Ruby ${{ matrix.ruby }} unit testing on ${{ matrix.os }}
|
15
18
|
steps:
|
16
|
-
- uses: actions/checkout@
|
19
|
+
- uses: actions/checkout@v3
|
17
20
|
- uses: ruby/setup-ruby@v1
|
18
21
|
with:
|
19
22
|
ruby-version: ${{ matrix.ruby }}
|
data/History.md
CHANGED
@@ -2,6 +2,17 @@
|
|
2
2
|
|
3
3
|
### [Unreleased]
|
4
4
|
|
5
|
+
### 5.2.4
|
6
|
+
- Pin Faraday 1.10 (#987)
|
7
|
+
- Increase errors metric on error response in data stream (#986)
|
8
|
+
|
9
|
+
### 5.2.3
|
10
|
+
- Bump actions/checkout from 2 to 3 (#978)
|
11
|
+
- chore: Included githubactions in the dependabot config (#977)
|
12
|
+
- chore: Set permissions for GitHub actions (#972)
|
13
|
+
- Remove nested msgpack\_each in handle\_error (#970)
|
14
|
+
- do not overwrite @timestamp in data stream if it already exists in the record (#968)
|
15
|
+
|
5
16
|
### 5.2.2
|
6
17
|
- Add missing top level class markers (#961)
|
7
18
|
- Ensure use_record_as_seed for same records (#960)
|
@@ -3,7 +3,7 @@ $:.push File.expand_path('../lib', __FILE__)
|
|
3
3
|
|
4
4
|
Gem::Specification.new do |s|
|
5
5
|
s.name = 'fluent-plugin-elasticsearch'
|
6
|
-
s.version = '5.2.
|
6
|
+
s.version = '5.2.4'
|
7
7
|
s.authors = ['diogo', 'pitr', 'Hiroshi Hatake']
|
8
8
|
s.email = ['pitr.vern@gmail.com', 'me@diogoterror.com', 'cosmo0920.wp@gmail.com']
|
9
9
|
s.description = %q{Elasticsearch output plugin for Fluent event collector}
|
@@ -23,6 +23,7 @@ Gem::Specification.new do |s|
|
|
23
23
|
s.required_ruby_version = Gem::Requirement.new(">= 2.3".freeze)
|
24
24
|
|
25
25
|
s.add_runtime_dependency 'fluentd', '>= 0.14.22'
|
26
|
+
s.add_runtime_dependency "faraday", "~> 1.10"
|
26
27
|
s.add_runtime_dependency 'excon', '>= 0'
|
27
28
|
s.add_runtime_dependency 'elasticsearch'
|
28
29
|
|
@@ -35,7 +35,7 @@ class Fluent::Plugin::ElasticsearchErrorHandler
|
|
35
35
|
end
|
36
36
|
end
|
37
37
|
|
38
|
-
def handle_error(response, tag, chunk, bulk_message_count, extracted_values)
|
38
|
+
def handle_error(response, tag, chunk, bulk_message_count, extracted_values, unpacked_msg_arr)
|
39
39
|
items = response['items']
|
40
40
|
if items.nil? || !items.is_a?(Array)
|
41
41
|
raise ElasticsearchVersionMismatch, "The response format was unrecognized: #{response}"
|
@@ -48,7 +48,11 @@ class Fluent::Plugin::ElasticsearchErrorHandler
|
|
48
48
|
meta = {}
|
49
49
|
header = {}
|
50
50
|
affinity_target_indices = @plugin.get_affinity_target_indices(chunk)
|
51
|
-
|
51
|
+
|
52
|
+
unpacked_msg_arr.each do |msg|
|
53
|
+
time = msg[:time]
|
54
|
+
rawrecord = msg[:record]
|
55
|
+
|
52
56
|
bulk_message = ''
|
53
57
|
next unless rawrecord.is_a? Hash
|
54
58
|
begin
|
@@ -827,6 +827,7 @@ EOC
|
|
827
827
|
bulk_message = Hash.new { |h,k| h[k] = '' }
|
828
828
|
header = {}
|
829
829
|
meta = {}
|
830
|
+
unpackedMsgArr = {}
|
830
831
|
|
831
832
|
tag = chunk.metadata.tag
|
832
833
|
chunk_id = dump_unique_id_hex(chunk.unique_id)
|
@@ -851,9 +852,13 @@ EOC
|
|
851
852
|
RequestInfo.new(host, nil, meta["_index".freeze], meta.delete("_alias".freeze))
|
852
853
|
end
|
853
854
|
|
855
|
+
unpackedMsgArr[info] = [] if unpackedMsgArr[info].nil?
|
856
|
+
unpackedMsgArr[info] << {:time => time, :record => record}
|
857
|
+
|
854
858
|
if split_request?(bulk_message, info)
|
855
859
|
bulk_message.each do |info, msgs|
|
856
|
-
send_bulk(msgs, tag, chunk, bulk_message_count[info], extracted_values, info) unless msgs.empty?
|
860
|
+
send_bulk(msgs, tag, chunk, bulk_message_count[info], extracted_values, info, unpackedMsgArr[info]) unless msgs.empty?
|
861
|
+
unpackedMsgArr[info].clear
|
857
862
|
msgs.clear
|
858
863
|
# Clear bulk_message_count for this info.
|
859
864
|
bulk_message_count[info] = 0;
|
@@ -876,7 +881,9 @@ EOC
|
|
876
881
|
end
|
877
882
|
|
878
883
|
bulk_message.each do |info, msgs|
|
879
|
-
send_bulk(msgs, tag, chunk, bulk_message_count[info], extracted_values, info) unless msgs.empty?
|
884
|
+
send_bulk(msgs, tag, chunk, bulk_message_count[info], extracted_values, info, unpackedMsgArr[info]) unless msgs.empty?
|
885
|
+
|
886
|
+
unpackedMsgArr[info].clear
|
880
887
|
msgs.clear
|
881
888
|
end
|
882
889
|
end
|
@@ -1090,7 +1097,7 @@ EOC
|
|
1090
1097
|
|
1091
1098
|
# send_bulk given a specific bulk request, the original tag,
|
1092
1099
|
# chunk, and bulk_message_count
|
1093
|
-
def send_bulk(data, tag, chunk, bulk_message_count, extracted_values, info)
|
1100
|
+
def send_bulk(data, tag, chunk, bulk_message_count, extracted_values, info, unpacked_msg_arr)
|
1094
1101
|
_logstash_prefix, _logstash_dateformat, index_name, _type_name, template_name, customize_template, deflector_alias, application_name, _pipeline, ilm_policy_id = extracted_values
|
1095
1102
|
if deflector_alias
|
1096
1103
|
template_installation(deflector_alias, template_name, customize_template, application_name, index_name, ilm_policy_id, info.host)
|
@@ -1113,7 +1120,7 @@ EOC
|
|
1113
1120
|
|
1114
1121
|
if response['errors']
|
1115
1122
|
error = Fluent::Plugin::ElasticsearchErrorHandler.new(self)
|
1116
|
-
error.handle_error(response, tag, chunk, bulk_message_count, extracted_values)
|
1123
|
+
error.handle_error(response, tag, chunk, bulk_message_count, extracted_values, unpacked_msg_arr)
|
1117
1124
|
end
|
1118
1125
|
rescue RetryStreamError => e
|
1119
1126
|
log.trace "router.emit_stream for retry stream doing..."
|
@@ -248,7 +248,9 @@ module Fluent::Plugin
|
|
248
248
|
end
|
249
249
|
|
250
250
|
begin
|
251
|
-
record.
|
251
|
+
unless record.has_key?("@timestamp")
|
252
|
+
record.merge!({"@timestamp" => Time.at(time).iso8601(@time_precision)})
|
253
|
+
end
|
252
254
|
bulk_message = append_record_to_messages(CREATE_OP, {}, headers, record, bulk_message)
|
253
255
|
rescue => e
|
254
256
|
router.emit_error_event(tag, time, record, e)
|
@@ -263,6 +265,7 @@ module Fluent::Plugin
|
|
263
265
|
response = client(host).bulk(params)
|
264
266
|
if response['errors']
|
265
267
|
log.error "Could not bulk insert to Data Stream: #{data_stream_name} #{response}"
|
268
|
+
@num_errors_metrics.inc
|
266
269
|
end
|
267
270
|
rescue => e
|
268
271
|
raise RecoverableRequestFailure, "could not push logs to Elasticsearch cluster (#{data_stream_name}): #{e.message}"
|
Binary file
|
@@ -2,6 +2,7 @@ require_relative '../helper'
|
|
2
2
|
require 'fluent/plugin/out_elasticsearch'
|
3
3
|
require 'fluent/plugin/elasticsearch_error_handler'
|
4
4
|
require 'json'
|
5
|
+
require 'msgpack'
|
5
6
|
|
6
7
|
class TestElasticsearchErrorHandler < Test::Unit::TestCase
|
7
8
|
|
@@ -54,6 +55,27 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
|
|
54
55
|
end
|
55
56
|
end
|
56
57
|
|
58
|
+
class MockMsgpackChunk
|
59
|
+
def initialize(chunk)
|
60
|
+
@chunk = chunk
|
61
|
+
@factory = MessagePack::Factory.new
|
62
|
+
@factory.register_type(Fluent::EventTime::TYPE, Fluent::EventTime)
|
63
|
+
end
|
64
|
+
|
65
|
+
def msgpack_each
|
66
|
+
@factory.unpacker(@chunk).each { |time, record| yield(time, record) }
|
67
|
+
end
|
68
|
+
end
|
69
|
+
|
70
|
+
class MockUnpackedMsg
|
71
|
+
def initialize(records)
|
72
|
+
@records = records
|
73
|
+
end
|
74
|
+
def each
|
75
|
+
@records.each { |item| yield({:time => item[:time], :record => item[:record]}) }
|
76
|
+
end
|
77
|
+
end
|
78
|
+
|
57
79
|
def setup
|
58
80
|
Fluent::Test.setup
|
59
81
|
@log_device = Fluent::Test::DummyLogDevice.new
|
@@ -98,8 +120,9 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
|
|
98
120
|
]
|
99
121
|
}))
|
100
122
|
chunk = MockChunk.new(records)
|
123
|
+
unpacked_msg_arr = MockUnpackedMsg.new(records)
|
101
124
|
dummy_extracted_values = []
|
102
|
-
@handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
|
125
|
+
@handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values, unpacked_msg_arr)
|
103
126
|
assert_equal(1, @plugin.error_events.size)
|
104
127
|
expected_log = "failed to parse"
|
105
128
|
exception_message = @plugin.error_events.first[:error].message
|
@@ -140,8 +163,9 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
|
|
140
163
|
]
|
141
164
|
}))
|
142
165
|
chunk = MockChunk.new(records)
|
166
|
+
unpacked_msg_arr = MockUnpackedMsg.new(records)
|
143
167
|
dummy_extracted_values = []
|
144
|
-
@handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
|
168
|
+
@handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values, unpacked_msg_arr)
|
145
169
|
assert_equal(1, @plugin.error_events.size)
|
146
170
|
expected_log = "failed to parse"
|
147
171
|
exception_message = @plugin.error_events.first[:error].message
|
@@ -159,8 +183,9 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
|
|
159
183
|
"items" : [{}]
|
160
184
|
}))
|
161
185
|
chunk = MockChunk.new(records)
|
186
|
+
unpacked_msg_arr = MockUnpackedMsg.new(records)
|
162
187
|
dummy_extracted_values = []
|
163
|
-
@handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
|
188
|
+
@handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values, unpacked_msg_arr)
|
164
189
|
assert_equal(0, @plugin.error_events.size)
|
165
190
|
assert_nil(@plugin.error_events[0])
|
166
191
|
end
|
@@ -181,8 +206,9 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
|
|
181
206
|
]
|
182
207
|
}))
|
183
208
|
chunk = MockChunk.new(records)
|
209
|
+
unpacked_msg_arr = MockUnpackedMsg.new(records)
|
184
210
|
dummy_extracted_values = []
|
185
|
-
@handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
|
211
|
+
@handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values, unpacked_msg_arr)
|
186
212
|
assert_equal(1, @plugin.error_events.size)
|
187
213
|
assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
|
188
214
|
end
|
@@ -204,8 +230,9 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
|
|
204
230
|
]
|
205
231
|
}))
|
206
232
|
chunk = MockChunk.new(records)
|
233
|
+
unpacked_msg_arr = MockUnpackedMsg.new(records)
|
207
234
|
dummy_extracted_values = []
|
208
|
-
@handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
|
235
|
+
@handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values, unpacked_msg_arr)
|
209
236
|
assert_equal(1, @plugin.error_events.size)
|
210
237
|
assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
|
211
238
|
end
|
@@ -230,10 +257,11 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
|
|
230
257
|
]
|
231
258
|
}))
|
232
259
|
|
233
|
-
|
234
|
-
|
260
|
+
chunk = MockChunk.new(records)
|
261
|
+
unpacked_msg_arr = MockUnpackedMsg.new(records)
|
262
|
+
dummy_extracted_values = []
|
235
263
|
assert_raise(Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError) do
|
236
|
-
@handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
|
264
|
+
@handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values, unpacked_msg_arr)
|
237
265
|
end
|
238
266
|
end
|
239
267
|
|
@@ -257,10 +285,11 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
|
|
257
285
|
]
|
258
286
|
}))
|
259
287
|
|
260
|
-
|
261
|
-
|
288
|
+
chunk = MockChunk.new(records)
|
289
|
+
unpacked_msg_arr = MockUnpackedMsg.new(records)
|
290
|
+
dummy_extracted_values = []
|
262
291
|
assert_raise(Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError) do
|
263
|
-
@handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
|
292
|
+
@handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values, unpacked_msg_arr)
|
264
293
|
end
|
265
294
|
end
|
266
295
|
|
@@ -290,8 +319,9 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
|
|
290
319
|
begin
|
291
320
|
failed = false
|
292
321
|
chunk = MockChunk.new(records)
|
322
|
+
unpacked_msg_arr = MockUnpackedMsg.new(records)
|
293
323
|
dummy_extracted_values = []
|
294
|
-
handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
|
324
|
+
handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values, unpacked_msg_arr)
|
295
325
|
rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
|
296
326
|
failed = true
|
297
327
|
records = [].tap do |records|
|
@@ -312,6 +342,7 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
|
|
312
342
|
records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
|
313
343
|
end
|
314
344
|
chunk = MockChunk.new(records)
|
345
|
+
unpacked_msg_arr = MockUnpackedMsg.new(records)
|
315
346
|
|
316
347
|
response = parse_response(%({
|
317
348
|
"took" : 1,
|
@@ -410,7 +441,7 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
|
|
410
441
|
begin
|
411
442
|
failed = false
|
412
443
|
dummy_extracted_values = []
|
413
|
-
@handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
|
444
|
+
@handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values, unpacked_msg_arr)
|
414
445
|
rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
|
415
446
|
failed = true
|
416
447
|
records = [].tap do |records|
|
@@ -439,6 +470,7 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
|
|
439
470
|
records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
|
440
471
|
end
|
441
472
|
chunk = MockChunk.new(records)
|
473
|
+
unpacked_msg_arr = MockUnpackedMsg.new(records)
|
442
474
|
|
443
475
|
response = parse_response(%({
|
444
476
|
"took" : 1,
|
@@ -526,7 +558,7 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
|
|
526
558
|
begin
|
527
559
|
failed = false
|
528
560
|
dummy_extracted_values = []
|
529
|
-
@handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
|
561
|
+
@handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values, unpacked_msg_arr)
|
530
562
|
rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
|
531
563
|
failed = true
|
532
564
|
records = [].tap do |records|
|
@@ -549,6 +581,7 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
|
|
549
581
|
records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
|
550
582
|
end
|
551
583
|
chunk = MockChunk.new(records)
|
584
|
+
unpacked_msg_arr = MockUnpackedMsg.new(records)
|
552
585
|
|
553
586
|
response = parse_response(%({
|
554
587
|
"took" : 1,
|
@@ -639,7 +672,7 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
|
|
639
672
|
begin
|
640
673
|
failed = false
|
641
674
|
dummy_extracted_values = []
|
642
|
-
@handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
|
675
|
+
@handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values, unpacked_msg_arr)
|
643
676
|
rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
|
644
677
|
failed = true
|
645
678
|
records = [].tap do |records|
|
@@ -660,4 +693,61 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
|
|
660
693
|
end
|
661
694
|
assert_true failed
|
662
695
|
end
|
696
|
+
|
697
|
+
def test_nested_msgpack_each
|
698
|
+
cwd = File.dirname(__FILE__)
|
699
|
+
chunk_path = File.join(cwd, 'mock_chunk.dat')
|
700
|
+
chunk_file = File.open(chunk_path, 'rb', 0644)
|
701
|
+
chunk_file.seek(0, IO::SEEK_SET)
|
702
|
+
|
703
|
+
chunk = MockMsgpackChunk.new(chunk_file)
|
704
|
+
|
705
|
+
unpacked_msg_arr = []
|
706
|
+
msg_count = 0
|
707
|
+
count_to_trigger_error_handle = 0
|
708
|
+
chunk.msgpack_each do |time, record|
|
709
|
+
next unless record.is_a? Hash
|
710
|
+
|
711
|
+
unpacked_msg_arr << {:time => time, :record => record}
|
712
|
+
msg_count += 1
|
713
|
+
|
714
|
+
record.each_key do |k|
|
715
|
+
if k != 'aaa' && k != 'bbb' && k != 'ccc' && k != 'log_path'
|
716
|
+
assert_equal(:impossible, k)
|
717
|
+
end
|
718
|
+
end
|
719
|
+
|
720
|
+
if msg_count % 55 == 0
|
721
|
+
if count_to_trigger_error_handle == 1
|
722
|
+
begin
|
723
|
+
response = {}
|
724
|
+
response['errors'] = true
|
725
|
+
response['items'] = []
|
726
|
+
item = {}
|
727
|
+
item['index'] = {}
|
728
|
+
item['index']['status'] = 429
|
729
|
+
item['index']['error'] = {}
|
730
|
+
item['index']['error']['type'] = "es_rejected_execution_exception"
|
731
|
+
abc = 0
|
732
|
+
while abc < unpacked_msg_arr.length
|
733
|
+
abc += 1
|
734
|
+
response['items'] << item
|
735
|
+
end
|
736
|
+
|
737
|
+
dummy_extracted_values = []
|
738
|
+
@handler.handle_error(response, 'atag', chunk, unpacked_msg_arr.length, dummy_extracted_values, unpacked_msg_arr)
|
739
|
+
assert_equal(0, @plugin.error_events.size)
|
740
|
+
assert_nil(@plugin.error_events[0])
|
741
|
+
rescue => e
|
742
|
+
# capture ElasticsearchRequestAbortError, beacuse es_rejected_execution_exception is unrecoverable.
|
743
|
+
end
|
744
|
+
end
|
745
|
+
|
746
|
+
count_to_trigger_error_handle += 1
|
747
|
+
unpacked_msg_arr.clear
|
748
|
+
end # end if
|
749
|
+
end # end chunk.msgpack_each
|
750
|
+
|
751
|
+
chunk_file.close
|
752
|
+
end
|
663
753
|
end
|
@@ -70,8 +70,13 @@ class ElasticsearchOutputDataStreamTest < Test::Unit::TestCase
|
|
70
70
|
}
|
71
71
|
end
|
72
72
|
|
73
|
+
SAMPLE_RECORD_TIMESTAMP = Time.now.iso8601
|
73
74
|
def sample_record
|
74
|
-
{'@timestamp' =>
|
75
|
+
{'@timestamp' => SAMPLE_RECORD_TIMESTAMP, 'message' => 'Sample record'}
|
76
|
+
end
|
77
|
+
|
78
|
+
def sample_record_no_timestamp
|
79
|
+
{'message' => 'Sample record no timestamp'}
|
75
80
|
end
|
76
81
|
|
77
82
|
RESPONSE_ACKNOWLEDGED = {"acknowledged": true}
|
@@ -859,7 +864,7 @@ class ElasticsearchOutputDataStreamTest < Test::Unit::TestCase
|
|
859
864
|
stub_default
|
860
865
|
stub_bulk_feed
|
861
866
|
driver(config)
|
862
|
-
driver.run(default_tag: 'mytag') do
|
867
|
+
driver.run(default_tag: 'mytag') do
|
863
868
|
driver.feed(sample_record)
|
864
869
|
end
|
865
870
|
|
@@ -882,7 +887,7 @@ class ElasticsearchOutputDataStreamTest < Test::Unit::TestCase
|
|
882
887
|
stub_default
|
883
888
|
stub_bulk_feed
|
884
889
|
driver(config)
|
885
|
-
driver.run(default_tag: 'mytag') do
|
890
|
+
driver.run(default_tag: 'mytag') do
|
886
891
|
driver.feed(sample_record)
|
887
892
|
end
|
888
893
|
|
@@ -890,4 +895,43 @@ class ElasticsearchOutputDataStreamTest < Test::Unit::TestCase
|
|
890
895
|
assert(@bulk_records[0].has_key?('custom_tag_key'))
|
891
896
|
assert_equal('mytag', @bulk_records[0]['custom_tag_key'])
|
892
897
|
end
|
898
|
+
|
899
|
+
def test_use_record_timestamp_if_present
|
900
|
+
omit REQUIRED_ELASTIC_MESSAGE unless data_stream_supported?
|
901
|
+
|
902
|
+
stub_default
|
903
|
+
stub_bulk_feed
|
904
|
+
conf = config_element(
|
905
|
+
'ROOT', '', {
|
906
|
+
'@type' => ELASTIC_DATA_STREAM_TYPE,
|
907
|
+
'data_stream_name' => 'foo',
|
908
|
+
'data_stream_ilm_name' => 'foo_ilm_policy',
|
909
|
+
'data_stream_template_name' => 'foo_tpl'
|
910
|
+
})
|
911
|
+
driver(conf).run(default_tag: 'test') do
|
912
|
+
driver.feed(sample_record)
|
913
|
+
end
|
914
|
+
assert_equal 1, @bulk_records.length
|
915
|
+
assert(@bulk_records[0].has_key?('@timestamp'))
|
916
|
+
assert_equal SAMPLE_RECORD_TIMESTAMP, @bulk_records[0]['@timestamp']
|
917
|
+
end
|
918
|
+
|
919
|
+
def test_add_timestamp_if_not_present_in_record
|
920
|
+
omit REQUIRED_ELASTIC_MESSAGE unless data_stream_supported?
|
921
|
+
|
922
|
+
stub_default
|
923
|
+
stub_bulk_feed
|
924
|
+
conf = config_element(
|
925
|
+
'ROOT', '', {
|
926
|
+
'@type' => ELASTIC_DATA_STREAM_TYPE,
|
927
|
+
'data_stream_name' => 'foo',
|
928
|
+
'data_stream_ilm_name' => 'foo_ilm_policy',
|
929
|
+
'data_stream_template_name' => 'foo_tpl'
|
930
|
+
})
|
931
|
+
driver(conf).run(default_tag: 'test') do
|
932
|
+
driver.feed(sample_record_no_timestamp)
|
933
|
+
end
|
934
|
+
assert_equal 1, @bulk_records.length
|
935
|
+
assert(@bulk_records[0].has_key?('@timestamp'))
|
936
|
+
end
|
893
937
|
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-elasticsearch
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 5.2.
|
4
|
+
version: 5.2.4
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- diogo
|
@@ -10,7 +10,7 @@ authors:
|
|
10
10
|
autorequire:
|
11
11
|
bindir: bin
|
12
12
|
cert_chain: []
|
13
|
-
date: 2022-
|
13
|
+
date: 2022-09-18 00:00:00.000000000 Z
|
14
14
|
dependencies:
|
15
15
|
- !ruby/object:Gem::Dependency
|
16
16
|
name: fluentd
|
@@ -26,6 +26,20 @@ dependencies:
|
|
26
26
|
- - ">="
|
27
27
|
- !ruby/object:Gem::Version
|
28
28
|
version: 0.14.22
|
29
|
+
- !ruby/object:Gem::Dependency
|
30
|
+
name: faraday
|
31
|
+
requirement: !ruby/object:Gem::Requirement
|
32
|
+
requirements:
|
33
|
+
- - "~>"
|
34
|
+
- !ruby/object:Gem::Version
|
35
|
+
version: '1.10'
|
36
|
+
type: :runtime
|
37
|
+
prerelease: false
|
38
|
+
version_requirements: !ruby/object:Gem::Requirement
|
39
|
+
requirements:
|
40
|
+
- - "~>"
|
41
|
+
- !ruby/object:Gem::Version
|
42
|
+
version: '1.10'
|
29
43
|
- !ruby/object:Gem::Dependency
|
30
44
|
name: excon
|
31
45
|
requirement: !ruby/object:Gem::Requirement
|
@@ -151,6 +165,7 @@ files:
|
|
151
165
|
- ".editorconfig"
|
152
166
|
- ".github/ISSUE_TEMPLATE/bug_report.md"
|
153
167
|
- ".github/ISSUE_TEMPLATE/feature_request.md"
|
168
|
+
- ".github/dependabot.yml"
|
154
169
|
- ".github/workflows/issue-auto-closer.yml"
|
155
170
|
- ".github/workflows/linux.yml"
|
156
171
|
- ".github/workflows/macos.yml"
|
@@ -187,6 +202,7 @@ files:
|
|
187
202
|
- lib/fluent/plugin/out_elasticsearch_data_stream.rb
|
188
203
|
- lib/fluent/plugin/out_elasticsearch_dynamic.rb
|
189
204
|
- test/helper.rb
|
205
|
+
- test/plugin/mock_chunk.dat
|
190
206
|
- test/plugin/test_alias_template.json
|
191
207
|
- test/plugin/test_elasticsearch_error_handler.rb
|
192
208
|
- test/plugin/test_elasticsearch_fallback_selector.rb
|
@@ -228,6 +244,7 @@ specification_version: 4
|
|
228
244
|
summary: Elasticsearch output plugin for Fluent event collector
|
229
245
|
test_files:
|
230
246
|
- test/helper.rb
|
247
|
+
- test/plugin/mock_chunk.dat
|
231
248
|
- test/plugin/test_alias_template.json
|
232
249
|
- test/plugin/test_elasticsearch_error_handler.rb
|
233
250
|
- test/plugin/test_elasticsearch_fallback_selector.rb
|