fluent-plugin-input-opensearch 1.1.9

Sign up to get free protection for your applications and to get access to all the features.
Files changed (52) hide show
  1. checksums.yaml +7 -0
  2. data/.coveralls.yml +1 -0
  3. data/.editorconfig +9 -0
  4. data/.github/ISSUE_TEMPLATE/bug_report.md +29 -0
  5. data/.github/ISSUE_TEMPLATE/feature_request.md +24 -0
  6. data/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md +9 -0
  7. data/.github/workflows/coverage.yaml +22 -0
  8. data/.github/workflows/issue-auto-closer.yml +12 -0
  9. data/.github/workflows/linux.yml +26 -0
  10. data/.github/workflows/macos.yml +26 -0
  11. data/.github/workflows/windows.yml +26 -0
  12. data/.gitignore +18 -0
  13. data/CONTRIBUTING.md +24 -0
  14. data/Gemfile +10 -0
  15. data/History.md +67 -0
  16. data/LICENSE.txt +201 -0
  17. data/README.OpenSearchGenID.md +116 -0
  18. data/README.OpenSearchInput.md +314 -0
  19. data/README.Troubleshooting.md +482 -0
  20. data/README.md +1622 -0
  21. data/Rakefile +37 -0
  22. data/fluent-plugin-opensearch.gemspec +39 -0
  23. data/gemfiles/Gemfile.elasticsearch.v6 +12 -0
  24. data/lib/fluent/log-ext.rb +64 -0
  25. data/lib/fluent/plugin/filter_opensearch_genid.rb +103 -0
  26. data/lib/fluent/plugin/in_opensearch.rb +410 -0
  27. data/lib/fluent/plugin/oj_serializer.rb +48 -0
  28. data/lib/fluent/plugin/opensearch_constants.rb +39 -0
  29. data/lib/fluent/plugin/opensearch_error.rb +31 -0
  30. data/lib/fluent/plugin/opensearch_error_handler.rb +182 -0
  31. data/lib/fluent/plugin/opensearch_fallback_selector.rb +36 -0
  32. data/lib/fluent/plugin/opensearch_index_template.rb +155 -0
  33. data/lib/fluent/plugin/opensearch_simple_sniffer.rb +36 -0
  34. data/lib/fluent/plugin/opensearch_tls.rb +96 -0
  35. data/lib/fluent/plugin/out_opensearch.rb +1158 -0
  36. data/lib/fluent/plugin/out_opensearch_data_stream.rb +229 -0
  37. data/test/helper.rb +60 -0
  38. data/test/plugin/datastream_template.json +4 -0
  39. data/test/plugin/test_alias_template.json +9 -0
  40. data/test/plugin/test_filter_opensearch_genid.rb +241 -0
  41. data/test/plugin/test_in_opensearch.rb +500 -0
  42. data/test/plugin/test_index_alias_template.json +11 -0
  43. data/test/plugin/test_index_template.json +25 -0
  44. data/test/plugin/test_oj_serializer.rb +45 -0
  45. data/test/plugin/test_opensearch_error_handler.rb +770 -0
  46. data/test/plugin/test_opensearch_fallback_selector.rb +100 -0
  47. data/test/plugin/test_opensearch_tls.rb +171 -0
  48. data/test/plugin/test_out_opensearch.rb +3980 -0
  49. data/test/plugin/test_out_opensearch_data_stream.rb +746 -0
  50. data/test/plugin/test_template.json +23 -0
  51. data/test/test_log-ext.rb +61 -0
  52. metadata +291 -0
@@ -0,0 +1,770 @@
1
+ # SPDX-License-Identifier: Apache-2.0
2
+ #
3
+ # The fluent-plugin-opensearch Contributors require contributions made to
4
+ # this file be licensed under the Apache-2.0 license or a
5
+ # compatible open source license.
6
+ #
7
+ # Modifications Copyright fluent-plugin-opensearch Contributors. See
8
+ # GitHub history for details.
9
+ #
10
+ # Licensed to Uken Inc. under one or more contributor
11
+ # license agreements. See the NOTICE file distributed with
12
+ # this work for additional information regarding copyright
13
+ # ownership. Uken Inc. licenses this file to you under
14
+ # the Apache License, Version 2.0 (the "License"); you may
15
+ # not use this file except in compliance with the License.
16
+ # You may obtain a copy of the License at
17
+ #
18
+ # http://www.apache.org/licenses/LICENSE-2.0
19
+ #
20
+ # Unless required by applicable law or agreed to in writing,
21
+ # software distributed under the License is distributed on an
22
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
23
+ # KIND, either express or implied. See the License for the
24
+ # specific language governing permissions and limitations
25
+ # under the License.
26
+
27
+ require_relative '../helper'
28
+ require 'fluent/plugin/out_opensearch'
29
+ require 'fluent/plugin/opensearch_error_handler'
30
+ require 'json'
31
+
32
+ class TestOpenSearchErrorHandler < Test::Unit::TestCase
33
+
34
+ class TestPlugin
35
+ attr_reader :log
36
+ attr_reader :error_events
37
+ attr_accessor :unrecoverable_error_types
38
+ attr_accessor :unrecoverable_record_types
39
+ attr_accessor :log_os_400_reason
40
+ attr_accessor :write_operation
41
+ attr_accessor :emit_error_label_event
42
+ def initialize(log, log_os_400_reason = false)
43
+ @log = log
44
+ @write_operation = 'index'
45
+ @error_events = []
46
+ @unrecoverable_error_types = ["out_of_memory_error", "rejected_execution_exception"]
47
+ @unrecoverable_record_types = ["json_parse_exception"]
48
+ @log_os_400_reason = log_os_400_reason
49
+ @emit_error_label_event = true
50
+ end
51
+
52
+ def router
53
+ self
54
+ end
55
+
56
+ def emit_error_event(tag, time, record, e)
57
+ @error_events << {:tag => tag, :time=>time, :record=>record, :error=>e}
58
+ end
59
+
60
+ def process_message(tag, meta, header, time, record, affinity_target_indices, extracted_values)
61
+ return [meta, header, record]
62
+ end
63
+
64
+ def get_affinity_target_indices(chunk)
65
+ indices = Hash.new
66
+ indices
67
+ end
68
+
69
+ def append_record_to_messages(op, meta, header, record, msgs)
70
+ if record.has_key?('raise') && record['raise']
71
+ raise 'process_message'
72
+ end
73
+ return true
74
+ end
75
+ end
76
+
77
+ class MockChunk
78
+ def initialize(records)
79
+ @records = records
80
+ @index = 0
81
+ end
82
+ def msgpack_each
83
+ @records.each { |item| yield(item[:time],item[:record]) }
84
+ end
85
+ end
86
+
87
+ def setup
88
+ Fluent::Test.setup
89
+ @log_device = Fluent::Test::DummyLogDevice.new
90
+ dl_opts = {:log_level => ServerEngine::DaemonLogger::INFO}
91
+ logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
92
+ @log = Fluent::Log.new(logger)
93
+ @plugin = TestPlugin.new(@log)
94
+ @handler = Fluent::Plugin::OpenSearchErrorHandler.new(@plugin)
95
+ end
96
+
97
+ def parse_response(value)
98
+ JSON.parse(value)
99
+ end
100
+
101
+ class TEST400ResponseReason < self
102
+ def setup
103
+ Fluent::Test.setup
104
+ @log_device = Fluent::Test::DummyLogDevice.new
105
+ dl_opts = {:log_level => ServerEngine::DaemonLogger::DEBUG}
106
+ logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
107
+ @log = Fluent::Log.new(logger)
108
+ @plugin = TestPlugin.new(@log)
109
+ @handler = Fluent::Plugin::OpenSearchErrorHandler.new(@plugin)
110
+ end
111
+
112
+ def test_400_responses_reason_log
113
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
114
+ response = parse_response(%({
115
+ "took" : 0,
116
+ "errors" : true,
117
+ "items" : [
118
+ {
119
+ "create" : {
120
+ "_index" : "foo",
121
+ "status" : 400,
122
+ "error" : {
123
+ "type" : "mapper_parsing_exception",
124
+ "reason" : "failed to parse"
125
+ }
126
+ }
127
+ }
128
+ ]
129
+ }))
130
+ chunk = MockChunk.new(records)
131
+ dummy_extracted_values = []
132
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
133
+ assert_equal(1, @plugin.error_events.size)
134
+ expected_log = "failed to parse"
135
+ exception_message = @plugin.error_events.first[:error].message
136
+ assert_true(exception_message.include?(expected_log),
137
+ "Exception do not contain '#{exception_message}' '#{expected_log}'")
138
+ assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
139
+ end
140
+ end
141
+
142
+ class TEST400ResponseReasonWithoutErrorLog < self
143
+ def setup
144
+ Fluent::Test.setup
145
+ @log_device = Fluent::Test::DummyLogDevice.new
146
+ dl_opts = {:log_level => ServerEngine::DaemonLogger::DEBUG}
147
+ logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
148
+ @log = Fluent::Log.new(logger)
149
+ @plugin = TestPlugin.new(@log)
150
+ @handler = Fluent::Plugin::OpenSearchErrorHandler.new(@plugin)
151
+ @plugin.emit_error_label_event = false
152
+ end
153
+
154
+ def test_400_responses_reason_log
155
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
156
+ response = parse_response(%({
157
+ "took" : 0,
158
+ "errors" : true,
159
+ "items" : [
160
+ {
161
+ "create" : {
162
+ "_index" : "foo",
163
+ "status" : 400,
164
+ "error" : {
165
+ "type" : "mapper_parsing_exception",
166
+ "reason" : "failed to parse"
167
+ }
168
+ }
169
+ }
170
+ ]
171
+ }))
172
+ chunk = MockChunk.new(records)
173
+ dummy_extracted_values = []
174
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
175
+ assert_equal(0, @plugin.error_events.size)
176
+ assert_true(@plugin.error_events.empty?)
177
+ end
178
+ end
179
+
180
+ class TEST400ResponseReasonNoDebug < self
181
+ def setup
182
+ Fluent::Test.setup
183
+ @log_device = Fluent::Test::DummyLogDevice.new
184
+ dl_opts = {:log_level => ServerEngine::DaemonLogger::INFO}
185
+ logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
186
+ @log = Fluent::Log.new(logger)
187
+ @plugin = TestPlugin.new(@log)
188
+ @handler = Fluent::Plugin::OpenSearchErrorHandler.new(@plugin)
189
+ @plugin.log_os_400_reason = true
190
+ end
191
+
192
+ def test_400_responses_reason_log
193
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
194
+ response = parse_response(%({
195
+ "took" : 0,
196
+ "errors" : true,
197
+ "items" : [
198
+ {
199
+ "create" : {
200
+ "_index" : "foo",
201
+ "status" : 400,
202
+ "error" : {
203
+ "type" : "mapper_parsing_exception",
204
+ "reason" : "failed to parse"
205
+ }
206
+ }
207
+ }
208
+ ]
209
+ }))
210
+ chunk = MockChunk.new(records)
211
+ dummy_extracted_values = []
212
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
213
+ assert_equal(1, @plugin.error_events.size)
214
+ expected_log = "failed to parse"
215
+ exception_message = @plugin.error_events.first[:error].message
216
+ assert_true(exception_message.include?(expected_log),
217
+ "Exception do not contain '#{exception_message}' '#{expected_log}'")
218
+ assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
219
+ end
220
+ end
221
+
222
+ class TEST400ResponseReasonNoDebugAndNoErrorLog < self
223
+ def setup
224
+ Fluent::Test.setup
225
+ @log_device = Fluent::Test::DummyLogDevice.new
226
+ dl_opts = {:log_level => ServerEngine::DaemonLogger::INFO}
227
+ logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
228
+ @log = Fluent::Log.new(logger)
229
+ @plugin = TestPlugin.new(@log)
230
+ @handler = Fluent::Plugin::OpenSearchErrorHandler.new(@plugin)
231
+ @plugin.log_os_400_reason = true
232
+ @plugin.emit_error_label_event = false
233
+ end
234
+
235
+ def test_400_responses_reason_log
236
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
237
+ response = parse_response(%({
238
+ "took" : 0,
239
+ "errors" : true,
240
+ "items" : [
241
+ {
242
+ "create" : {
243
+ "_index" : "foo",
244
+ "status" : 400,
245
+ "error" : {
246
+ "type" : "mapper_parsing_exception",
247
+ "reason" : "failed to parse"
248
+ }
249
+ }
250
+ }
251
+ ]
252
+ }))
253
+ chunk = MockChunk.new(records)
254
+ dummy_extracted_values = []
255
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
256
+ assert_equal(0, @plugin.error_events.size)
257
+ assert_true(@plugin.error_events.empty?)
258
+ end
259
+ end
260
+
261
+ def test_nil_items_responses
262
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
263
+ response = parse_response(%({
264
+ "took" : 0,
265
+ "errors" : true,
266
+ "items" : [{}]
267
+ }))
268
+ chunk = MockChunk.new(records)
269
+ dummy_extracted_values = []
270
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
271
+ assert_equal(0, @plugin.error_events.size)
272
+ assert_nil(@plugin.error_events[0])
273
+ end
274
+
275
+ def test_blocked_items_responses
276
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
277
+ response = parse_response(%({
278
+ "took" : 0,
279
+ "errors" : true,
280
+ "items" : [
281
+ {
282
+ "create" : {
283
+ "_index" : "foo",
284
+ "status" : 503,
285
+ "error" : "ClusterBlockException[blocked by: [SERVICE_UNAVAILABLE/1/state not recovered / initialized];]"
286
+ }
287
+ }
288
+ ]
289
+ }))
290
+ chunk = MockChunk.new(records)
291
+ dummy_extracted_values = []
292
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
293
+ assert_equal(1, @plugin.error_events.size)
294
+ assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
295
+ end
296
+
297
+ def test_dlq_400_responses
298
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
299
+ response = parse_response(%({
300
+ "took" : 0,
301
+ "errors" : true,
302
+ "items" : [
303
+ {
304
+ "create" : {
305
+ "_index" : "foo",
306
+ "status" : 400,
307
+ "_type" : "bar",
308
+ "reason":"unrecognized error"
309
+ }
310
+ }
311
+ ]
312
+ }))
313
+ chunk = MockChunk.new(records)
314
+ dummy_extracted_values = []
315
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
316
+ assert_equal(1, @plugin.error_events.size)
317
+ assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
318
+ end
319
+
320
+ def test_out_of_memory_responses
321
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
322
+ response = parse_response(%({
323
+ "took" : 0,
324
+ "errors" : true,
325
+ "items" : [
326
+ {
327
+ "create" : {
328
+ "_index" : "foo",
329
+ "status" : 500,
330
+ "_type" : "bar",
331
+ "error" : {
332
+ "type" : "out_of_memory_error",
333
+ "reason":"Java heap space"
334
+ }
335
+ }
336
+ }
337
+ ]
338
+ }))
339
+
340
+ chunk = MockChunk.new(records)
341
+ dummy_extracted_values = []
342
+ assert_raise(Fluent::Plugin::OpenSearchErrorHandler::OpenSearchRequestAbortError) do
343
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
344
+ end
345
+ end
346
+
347
+ def test_rejected_execution_exception_responses
348
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
349
+ response = parse_response(%({
350
+ "took" : 0,
351
+ "errors" : true,
352
+ "items" : [
353
+ {
354
+ "create" : {
355
+ "_index" : "foo",
356
+ "status" : 429,
357
+ "_type" : "bar",
358
+ "error" : {
359
+ "type" : "rejected_execution_exception",
360
+ "reason":"rejected execution of org.opensearch.transport.TransportService"
361
+ }
362
+ }
363
+ }
364
+ ]
365
+ }))
366
+
367
+ chunk = MockChunk.new(records)
368
+ dummy_extracted_values = []
369
+ assert_raise(Fluent::Plugin::OpenSearchErrorHandler::OpenSearchRequestAbortError) do
370
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
371
+ end
372
+ end
373
+
374
+ def test_es_rejected_execution_exception_responses_as_not_error
375
+ plugin = TestPlugin.new(@log)
376
+ plugin.unrecoverable_error_types = ["out_of_memory_error"]
377
+ handler = Fluent::Plugin::OpenSearchErrorHandler.new(plugin)
378
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
379
+ response = parse_response(%({
380
+ "took" : 0,
381
+ "errors" : true,
382
+ "items" : [
383
+ {
384
+ "create" : {
385
+ "_index" : "foo",
386
+ "status" : 429,
387
+ "_type" : "bar",
388
+ "error" : {
389
+ "type" : "es_rejected_execution_exception",
390
+ "reason":"rejected execution of org.opensearch.transport.TransportService"
391
+ }
392
+ }
393
+ }
394
+ ]
395
+ }))
396
+
397
+ begin
398
+ failed = false
399
+ chunk = MockChunk.new(records)
400
+ dummy_extracted_values = []
401
+ handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
402
+ rescue Fluent::Plugin::OpenSearchErrorHandler::OpenSearchRequestAbortError, Fluent::Plugin::OpenSearchOutput::RetryStreamError=>e
403
+ failed = true
404
+ records = [].tap do |records|
405
+ next unless e.respond_to?(:retry_stream)
406
+ e.retry_stream.each {|time, record| records << record}
407
+ end
408
+ # should retry chunk when unrecoverable error is not thrown
409
+ assert_equal 1, records.length
410
+ end
411
+ assert_true failed
412
+ end
413
+
414
+ def test_retry_error
415
+ records = []
416
+ error_records = Hash.new(false)
417
+ error_records.merge!({0=>true, 4=>true})
418
+ 10.times do |i|
419
+ records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
420
+ end
421
+ chunk = MockChunk.new(records)
422
+
423
+ response = parse_response(%({
424
+ "took" : 1,
425
+ "errors" : true,
426
+ "items" : [
427
+ {
428
+ "create" : {
429
+ "_index" : "foo",
430
+ "_type" : "bar",
431
+ "_id" : "1",
432
+ "status" : 201
433
+ }
434
+ },
435
+ {
436
+ "create" : {
437
+ "_index" : "foo",
438
+ "_type" : "bar",
439
+ "_id" : "2",
440
+ "status" : 500,
441
+ "error" : {
442
+ "type" : "some unrecognized type",
443
+ "reason":"unrecognized error"
444
+ }
445
+ }
446
+ },
447
+ {
448
+ "create" : {
449
+ "_index" : "foo",
450
+ "_type" : "bar",
451
+ "_id" : "3",
452
+ "status" : 409
453
+ }
454
+ },
455
+ {
456
+ "create" : {
457
+ "_index" : "foo",
458
+ "_type" : "bar",
459
+ "_id" : "5",
460
+ "status" : 500,
461
+ "error" : {
462
+ "reason":"unrecognized error - no type field"
463
+ }
464
+ }
465
+ },
466
+ {
467
+ "create" : {
468
+ "_index" : "foo",
469
+ "_type" : "bar",
470
+ "_id" : "6",
471
+ "status" : 400,
472
+ "error" : {
473
+ "type" : "mapper_parsing_exception",
474
+ "reason":"failed to parse"
475
+ }
476
+ }
477
+ },
478
+ {
479
+ "create" : {
480
+ "_index" : "foo",
481
+ "_type" : "bar",
482
+ "_id" : "7",
483
+ "status" : 400,
484
+ "error" : {
485
+ "type" : "some unrecognized type",
486
+ "reason":"unrecognized error"
487
+ }
488
+ }
489
+ },
490
+ {
491
+ "create" : {
492
+ "_index" : "foo",
493
+ "_type" : "bar",
494
+ "_id" : "8",
495
+ "status" : 500,
496
+ "error" : {
497
+ "type" : "some unrecognized type",
498
+ "reason":"unrecognized error"
499
+ }
500
+ }
501
+ },
502
+ {
503
+ "create" : {
504
+ "_index" : "foo",
505
+ "_type" : "bar",
506
+ "_id" : "9",
507
+ "status" : 500,
508
+ "error" : {
509
+ "type" : "json_parse_exception",
510
+ "reason":"Invalid UTF-8 start byte 0x92\\n at [Source: org.opensearch.transport.netty4.ByteBufStreamInput@204fe9c9; line: 1, column: 81]"
511
+ }
512
+ }
513
+ }
514
+ ]
515
+ }))
516
+
517
+ begin
518
+ failed = false
519
+ dummy_extracted_values = []
520
+ @handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
521
+ rescue Fluent::Plugin::OpenSearchErrorHandler::OpenSearchRequestAbortError, Fluent::Plugin::OpenSearchOutput::RetryStreamError=>e
522
+ failed = true
523
+ records = [].tap do |records|
524
+ next unless e.respond_to?(:retry_stream)
525
+ e.retry_stream.each {|time, record| records << record}
526
+ end
527
+ assert_equal 2, records.length, "Exp. retry_stream to contain records"
528
+ assert_equal 2, records[0]['_id'], "Exp record with given ID to in retry_stream"
529
+ assert_equal 8, records[1]['_id'], "Exp record with given ID to in retry_stream"
530
+ error_ids = @plugin.error_events.collect {|h| h[:record]['_id']}
531
+ assert_equal 4, error_ids.length, "Exp. a certain number of records to be dropped from retry_stream"
532
+ assert_equal [5, 6, 7, 9], error_ids, "Exp. specific records to be dropped from retry_stream"
533
+ @plugin.error_events.collect {|h| h[:error]}.each do |e|
534
+ assert_true e.respond_to?(:backtrace)
535
+ end
536
+ end
537
+ assert_true failed
538
+
539
+ end
540
+
541
+ def test_unrecoverable_error_included_in_responses
542
+ records = []
543
+ error_records = Hash.new(false)
544
+ error_records.merge!({0=>true, 4=>true, 9=>true})
545
+ 10.times do |i|
546
+ records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
547
+ end
548
+ chunk = MockChunk.new(records)
549
+
550
+ response = parse_response(%({
551
+ "took" : 1,
552
+ "errors" : true,
553
+ "items" : [
554
+ {
555
+ "create" : {
556
+ "_index" : "foo",
557
+ "_type" : "bar",
558
+ "_id" : "1",
559
+ "status" : 201
560
+ }
561
+ },
562
+ {
563
+ "create" : {
564
+ "_index" : "foo",
565
+ "_type" : "bar",
566
+ "_id" : "2",
567
+ "status" : 500,
568
+ "error" : {
569
+ "type" : "some unrecognized type",
570
+ "reason":"unrecognized error"
571
+ }
572
+ }
573
+ },
574
+ {
575
+ "create" : {
576
+ "_index" : "foo",
577
+ "_type" : "bar",
578
+ "_id" : "3",
579
+ "status" : 409
580
+ }
581
+ },
582
+ {
583
+ "create" : {
584
+ "_index" : "foo",
585
+ "_type" : "bar",
586
+ "_id" : "5",
587
+ "status" : 500,
588
+ "error" : {
589
+ "reason":"unrecognized error - no type field"
590
+ }
591
+ }
592
+ },
593
+ {
594
+ "create" : {
595
+ "_index" : "foo",
596
+ "_type" : "bar",
597
+ "_id" : "6",
598
+ "status" : 500,
599
+ "_type" : "bar",
600
+ "error" : {
601
+ "type" : "out_of_memory_error",
602
+ "reason":"Java heap space"
603
+ }
604
+ }
605
+ },
606
+ {
607
+ "create" : {
608
+ "_index" : "foo",
609
+ "_type" : "bar",
610
+ "_id" : "7",
611
+ "status" : 400,
612
+ "error" : {
613
+ "type" : "some unrecognized type",
614
+ "reason":"unrecognized error"
615
+ }
616
+ }
617
+ },
618
+ {
619
+ "create" : {
620
+ "_index" : "foo",
621
+ "_type" : "bar",
622
+ "_id" : "8",
623
+ "status" : 500,
624
+ "error" : {
625
+ "type" : "some unrecognized type",
626
+ "reason":"unrecognized error"
627
+ }
628
+ }
629
+ }
630
+ ]
631
+ }))
632
+
633
+ begin
634
+ failed = false
635
+ dummy_extracted_values = []
636
+ @handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
637
+ rescue Fluent::Plugin::OpenSearchErrorHandler::OpenSearchRequestAbortError, Fluent::Plugin::OpenSearchOutput::RetryStreamError=>e
638
+ failed = true
639
+ records = [].tap do |records|
640
+ next unless e.respond_to?(:retry_stream)
641
+ e.retry_stream.each {|time, record| records << record}
642
+ end
643
+ # should drop entire chunk when unrecoverable error response is replied
644
+ assert_equal 0, records.length
645
+ end
646
+ assert_true failed
647
+
648
+ end
649
+
650
+ def test_retry_error_upsert
651
+ @plugin.write_operation = 'upsert'
652
+ records = []
653
+ error_records = Hash.new(false)
654
+ error_records.merge!({0=>true, 4=>true, 9=>true})
655
+ 10.times do |i|
656
+ records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
657
+ end
658
+ chunk = MockChunk.new(records)
659
+
660
+ response = parse_response(%({
661
+ "took" : 1,
662
+ "errors" : true,
663
+ "items" : [
664
+ {
665
+ "update" : {
666
+ "_index" : "foo",
667
+ "_type" : "bar",
668
+ "_id" : "1",
669
+ "status" : 201
670
+ }
671
+ },
672
+ {
673
+ "update" : {
674
+ "_index" : "foo",
675
+ "_type" : "bar",
676
+ "_id" : "2",
677
+ "status" : 500,
678
+ "error" : {
679
+ "type" : "some unrecognized type",
680
+ "reason":"unrecognized error"
681
+ }
682
+ }
683
+ },
684
+ {
685
+ "update" : {
686
+ "_index" : "foo",
687
+ "_type" : "bar",
688
+ "_id" : "3",
689
+ "status" : 409,
690
+ "error" : {
691
+ "type":"version_conflict_engine_exception",
692
+ "reason":"document already exists"
693
+ }
694
+ }
695
+ },
696
+ {
697
+ "update" : {
698
+ "_index" : "foo",
699
+ "_type" : "bar",
700
+ "_id" : "5",
701
+ "status" : 500,
702
+ "error" : {
703
+ "reason":"unrecognized error - no type field"
704
+ }
705
+ }
706
+ },
707
+ {
708
+ "update" : {
709
+ "_index" : "foo",
710
+ "_type" : "bar",
711
+ "_id" : "6",
712
+ "status" : 400,
713
+ "error" : {
714
+ "type" : "mapper_parsing_exception",
715
+ "reason":"failed to parse"
716
+ }
717
+ }
718
+ },
719
+ {
720
+ "update" : {
721
+ "_index" : "foo",
722
+ "_type" : "bar",
723
+ "_id" : "7",
724
+ "status" : 400,
725
+ "error" : {
726
+ "type" : "some unrecognized type",
727
+ "reason":"unrecognized error"
728
+ }
729
+ }
730
+ },
731
+ {
732
+ "update" : {
733
+ "_index" : "foo",
734
+ "_type" : "bar",
735
+ "_id" : "8",
736
+ "status" : 500,
737
+ "error" : {
738
+ "type" : "some unrecognized type",
739
+ "reason":"unrecognized error"
740
+ }
741
+ }
742
+ }
743
+ ]
744
+ }))
745
+
746
+ begin
747
+ failed = false
748
+ dummy_extracted_values = []
749
+ @handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
750
+ rescue Fluent::Plugin::OpenSearchErrorHandler::OpenSearchRequestAbortError, Fluent::Plugin::OpenSearchOutput::RetryStreamError=>e
751
+ failed = true
752
+ records = [].tap do |records|
753
+ next unless e.respond_to?(:retry_stream)
754
+ e.retry_stream.each {|time, record| records << record}
755
+ end
756
+ assert_equal 3, records.length
757
+ assert_equal 2, records[0]['_id']
758
+ # upsert is retried in case of conflict error.
759
+ assert_equal 3, records[1]['_id']
760
+ assert_equal 8, records[2]['_id']
761
+ error_ids = @plugin.error_events.collect {|h| h[:record]['_id']}
762
+ assert_equal 3, error_ids.length
763
+ assert_equal [5, 6, 7], error_ids
764
+ @plugin.error_events.collect {|h| h[:error]}.each do |e|
765
+ assert_true e.respond_to?(:backtrace)
766
+ end
767
+ end
768
+ assert_true failed
769
+ end
770
+ end