fluent-plugin-opensearch 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. checksums.yaml +7 -0
  2. data/.coveralls.yml +1 -0
  3. data/.editorconfig +9 -0
  4. data/.github/ISSUE_TEMPLATE/bug_report.md +37 -0
  5. data/.github/ISSUE_TEMPLATE/feature_request.md +24 -0
  6. data/.github/workflows/coverage.yaml +22 -0
  7. data/.github/workflows/issue-auto-closer.yml +12 -0
  8. data/.github/workflows/linux.yml +26 -0
  9. data/.github/workflows/macos.yml +26 -0
  10. data/.github/workflows/windows.yml +26 -0
  11. data/.gitignore +18 -0
  12. data/CONTRIBUTING.md +24 -0
  13. data/Gemfile +10 -0
  14. data/History.md +6 -0
  15. data/ISSUE_TEMPLATE.md +26 -0
  16. data/LICENSE.txt +201 -0
  17. data/PULL_REQUEST_TEMPLATE.md +9 -0
  18. data/README.OpenSearchGenID.md +116 -0
  19. data/README.OpenSearchInput.md +291 -0
  20. data/README.Troubleshooting.md +482 -0
  21. data/README.md +1556 -0
  22. data/Rakefile +37 -0
  23. data/fluent-plugin-opensearch.gemspec +38 -0
  24. data/gemfiles/Gemfile.elasticsearch.v6 +12 -0
  25. data/lib/fluent/log-ext.rb +64 -0
  26. data/lib/fluent/plugin/filter_opensearch_genid.rb +103 -0
  27. data/lib/fluent/plugin/in_opensearch.rb +351 -0
  28. data/lib/fluent/plugin/oj_serializer.rb +48 -0
  29. data/lib/fluent/plugin/opensearch_constants.rb +39 -0
  30. data/lib/fluent/plugin/opensearch_error.rb +31 -0
  31. data/lib/fluent/plugin/opensearch_error_handler.rb +166 -0
  32. data/lib/fluent/plugin/opensearch_fallback_selector.rb +36 -0
  33. data/lib/fluent/plugin/opensearch_index_template.rb +155 -0
  34. data/lib/fluent/plugin/opensearch_simple_sniffer.rb +36 -0
  35. data/lib/fluent/plugin/opensearch_tls.rb +96 -0
  36. data/lib/fluent/plugin/out_opensearch.rb +1124 -0
  37. data/lib/fluent/plugin/out_opensearch_data_stream.rb +214 -0
  38. data/test/helper.rb +61 -0
  39. data/test/plugin/test_alias_template.json +9 -0
  40. data/test/plugin/test_filter_opensearch_genid.rb +241 -0
  41. data/test/plugin/test_in_opensearch.rb +493 -0
  42. data/test/plugin/test_index_alias_template.json +11 -0
  43. data/test/plugin/test_index_template.json +25 -0
  44. data/test/plugin/test_oj_serializer.rb +45 -0
  45. data/test/plugin/test_opensearch_error_handler.rb +689 -0
  46. data/test/plugin/test_opensearch_fallback_selector.rb +100 -0
  47. data/test/plugin/test_opensearch_tls.rb +171 -0
  48. data/test/plugin/test_out_opensearch.rb +3953 -0
  49. data/test/plugin/test_out_opensearch_data_stream.rb +474 -0
  50. data/test/plugin/test_template.json +23 -0
  51. data/test/test_log-ext.rb +61 -0
  52. metadata +262 -0
@@ -0,0 +1,689 @@
1
+ # SPDX-License-Identifier: Apache-2.0
2
+ #
3
+ # The fluent-plugin-opensearch Contributors require contributions made to
4
+ # this file be licensed under the Apache-2.0 license or a
5
+ # compatible open source license.
6
+ #
7
+ # Modifications Copyright fluent-plugin-opensearch Contributors. See
8
+ # GitHub history for details.
9
+ #
10
+ # Licensed to Uken Inc. under one or more contributor
11
+ # license agreements. See the NOTICE file distributed with
12
+ # this work for additional information regarding copyright
13
+ # ownership. Uken Inc. licenses this file to you under
14
+ # the Apache License, Version 2.0 (the "License"); you may
15
+ # not use this file except in compliance with the License.
16
+ # You may obtain a copy of the License at
17
+ #
18
+ # http://www.apache.org/licenses/LICENSE-2.0
19
+ #
20
+ # Unless required by applicable law or agreed to in writing,
21
+ # software distributed under the License is distributed on an
22
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
23
+ # KIND, either express or implied. See the License for the
24
+ # specific language governing permissions and limitations
25
+ # under the License.
26
+
27
+ require_relative '../helper'
28
+ require 'fluent/plugin/out_opensearch'
29
+ require 'fluent/plugin/opensearch_error_handler'
30
+ require 'json'
31
+
32
+ class TestOpenSearchErrorHandler < Test::Unit::TestCase
33
+
34
+ class TestPlugin
35
+ attr_reader :log
36
+ attr_reader :error_events
37
+ attr_accessor :unrecoverable_error_types
38
+ attr_accessor :log_os_400_reason
39
+ attr_accessor :write_operation
40
+ def initialize(log, log_os_400_reason = false)
41
+ @log = log
42
+ @write_operation = 'index'
43
+ @error_events = []
44
+ @unrecoverable_error_types = ["out_of_memory_error", "rejected_execution_exception"]
45
+ @log_os_400_reason = log_os_400_reason
46
+ end
47
+
48
+ def router
49
+ self
50
+ end
51
+
52
+ def emit_error_event(tag, time, record, e)
53
+ @error_events << {:tag => tag, :time=>time, :record=>record, :error=>e}
54
+ end
55
+
56
+ def process_message(tag, meta, header, time, record, affinity_target_indices, extracted_values)
57
+ return [meta, header, record]
58
+ end
59
+
60
+ def get_affinity_target_indices(chunk)
61
+ indices = Hash.new
62
+ indices
63
+ end
64
+
65
+ def append_record_to_messages(op, meta, header, record, msgs)
66
+ if record.has_key?('raise') && record['raise']
67
+ raise 'process_message'
68
+ end
69
+ return true
70
+ end
71
+ end
72
+
73
+ class MockChunk
74
+ def initialize(records)
75
+ @records = records
76
+ @index = 0
77
+ end
78
+ def msgpack_each
79
+ @records.each { |item| yield(item[:time],item[:record]) }
80
+ end
81
+ end
82
+
83
+ def setup
84
+ Fluent::Test.setup
85
+ @log_device = Fluent::Test::DummyLogDevice.new
86
+ dl_opts = {:log_level => ServerEngine::DaemonLogger::INFO}
87
+ logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
88
+ @log = Fluent::Log.new(logger)
89
+ @plugin = TestPlugin.new(@log)
90
+ @handler = Fluent::Plugin::OpenSearchErrorHandler.new(@plugin)
91
+ end
92
+
93
+ def parse_response(value)
94
+ JSON.parse(value)
95
+ end
96
+
97
+ class TEST400ResponseReason < self
98
+ def setup
99
+ Fluent::Test.setup
100
+ @log_device = Fluent::Test::DummyLogDevice.new
101
+ dl_opts = {:log_level => ServerEngine::DaemonLogger::DEBUG}
102
+ logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
103
+ @log = Fluent::Log.new(logger)
104
+ @plugin = TestPlugin.new(@log)
105
+ @handler = Fluent::Plugin::OpenSearchErrorHandler.new(@plugin)
106
+ end
107
+
108
+ def test_400_responses_reason_log
109
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
110
+ response = parse_response(%({
111
+ "took" : 0,
112
+ "errors" : true,
113
+ "items" : [
114
+ {
115
+ "create" : {
116
+ "_index" : "foo",
117
+ "status" : 400,
118
+ "error" : {
119
+ "type" : "mapper_parsing_exception",
120
+ "reason" : "failed to parse"
121
+ }
122
+ }
123
+ }
124
+ ]
125
+ }))
126
+ chunk = MockChunk.new(records)
127
+ dummy_extracted_values = []
128
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
129
+ assert_equal(1, @plugin.error_events.size)
130
+ expected_log = "failed to parse"
131
+ exception_message = @plugin.error_events.first[:error].message
132
+ assert_true(exception_message.include?(expected_log),
133
+ "Exception do not contain '#{exception_message}' '#{expected_log}'")
134
+ assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
135
+ end
136
+ end
137
+
138
+ class TEST400ResponseReasonNoDebug < self
139
+ def setup
140
+ Fluent::Test.setup
141
+ @log_device = Fluent::Test::DummyLogDevice.new
142
+ dl_opts = {:log_level => ServerEngine::DaemonLogger::INFO}
143
+ logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
144
+ @log = Fluent::Log.new(logger)
145
+ @plugin = TestPlugin.new(@log)
146
+ @handler = Fluent::Plugin::OpenSearchErrorHandler.new(@plugin)
147
+ @plugin.log_os_400_reason = true
148
+ end
149
+
150
+ def test_400_responses_reason_log
151
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
152
+ response = parse_response(%({
153
+ "took" : 0,
154
+ "errors" : true,
155
+ "items" : [
156
+ {
157
+ "create" : {
158
+ "_index" : "foo",
159
+ "status" : 400,
160
+ "error" : {
161
+ "type" : "mapper_parsing_exception",
162
+ "reason" : "failed to parse"
163
+ }
164
+ }
165
+ }
166
+ ]
167
+ }))
168
+ chunk = MockChunk.new(records)
169
+ dummy_extracted_values = []
170
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
171
+ assert_equal(1, @plugin.error_events.size)
172
+ expected_log = "failed to parse"
173
+ exception_message = @plugin.error_events.first[:error].message
174
+ assert_true(exception_message.include?(expected_log),
175
+ "Exception do not contain '#{exception_message}' '#{expected_log}'")
176
+ assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
177
+ end
178
+ end
179
+
180
+ def test_nil_items_responses
181
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
182
+ response = parse_response(%({
183
+ "took" : 0,
184
+ "errors" : true,
185
+ "items" : [{}]
186
+ }))
187
+ chunk = MockChunk.new(records)
188
+ dummy_extracted_values = []
189
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
190
+ assert_equal(0, @plugin.error_events.size)
191
+ assert_nil(@plugin.error_events[0])
192
+ end
193
+
194
+ def test_blocked_items_responses
195
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
196
+ response = parse_response(%({
197
+ "took" : 0,
198
+ "errors" : true,
199
+ "items" : [
200
+ {
201
+ "create" : {
202
+ "_index" : "foo",
203
+ "status" : 503,
204
+ "error" : "ClusterBlockException[blocked by: [SERVICE_UNAVAILABLE/1/state not recovered / initialized];]"
205
+ }
206
+ }
207
+ ]
208
+ }))
209
+ chunk = MockChunk.new(records)
210
+ dummy_extracted_values = []
211
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
212
+ assert_equal(1, @plugin.error_events.size)
213
+ assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
214
+ end
215
+
216
+ def test_dlq_400_responses
217
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
218
+ response = parse_response(%({
219
+ "took" : 0,
220
+ "errors" : true,
221
+ "items" : [
222
+ {
223
+ "create" : {
224
+ "_index" : "foo",
225
+ "status" : 400,
226
+ "_type" : "bar",
227
+ "reason":"unrecognized error"
228
+ }
229
+ }
230
+ ]
231
+ }))
232
+ chunk = MockChunk.new(records)
233
+ dummy_extracted_values = []
234
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
235
+ assert_equal(1, @plugin.error_events.size)
236
+ assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
237
+ end
238
+
239
+ def test_out_of_memory_responses
240
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
241
+ response = parse_response(%({
242
+ "took" : 0,
243
+ "errors" : true,
244
+ "items" : [
245
+ {
246
+ "create" : {
247
+ "_index" : "foo",
248
+ "status" : 500,
249
+ "_type" : "bar",
250
+ "error" : {
251
+ "type" : "out_of_memory_error",
252
+ "reason":"Java heap space"
253
+ }
254
+ }
255
+ }
256
+ ]
257
+ }))
258
+
259
+ chunk = MockChunk.new(records)
260
+ dummy_extracted_values = []
261
+ assert_raise(Fluent::Plugin::OpenSearchErrorHandler::OpenSearchRequestAbortError) do
262
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
263
+ end
264
+ end
265
+
266
+ def test_rejected_execution_exception_responses
267
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
268
+ response = parse_response(%({
269
+ "took" : 0,
270
+ "errors" : true,
271
+ "items" : [
272
+ {
273
+ "create" : {
274
+ "_index" : "foo",
275
+ "status" : 429,
276
+ "_type" : "bar",
277
+ "error" : {
278
+ "type" : "rejected_execution_exception",
279
+ "reason":"rejected execution of org.opensearch.transport.TransportService"
280
+ }
281
+ }
282
+ }
283
+ ]
284
+ }))
285
+
286
+ chunk = MockChunk.new(records)
287
+ dummy_extracted_values = []
288
+ assert_raise(Fluent::Plugin::OpenSearchErrorHandler::OpenSearchRequestAbortError) do
289
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
290
+ end
291
+ end
292
+
293
+ def test_es_rejected_execution_exception_responses_as_not_error
294
+ plugin = TestPlugin.new(@log)
295
+ plugin.unrecoverable_error_types = ["out_of_memory_error"]
296
+ handler = Fluent::Plugin::OpenSearchErrorHandler.new(plugin)
297
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
298
+ response = parse_response(%({
299
+ "took" : 0,
300
+ "errors" : true,
301
+ "items" : [
302
+ {
303
+ "create" : {
304
+ "_index" : "foo",
305
+ "status" : 429,
306
+ "_type" : "bar",
307
+ "error" : {
308
+ "type" : "es_rejected_execution_exception",
309
+ "reason":"rejected execution of org.opensearch.transport.TransportService"
310
+ }
311
+ }
312
+ }
313
+ ]
314
+ }))
315
+
316
+ begin
317
+ failed = false
318
+ chunk = MockChunk.new(records)
319
+ dummy_extracted_values = []
320
+ handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
321
+ rescue Fluent::Plugin::OpenSearchErrorHandler::OpenSearchRequestAbortError, Fluent::Plugin::OpenSearchOutput::RetryStreamError=>e
322
+ failed = true
323
+ records = [].tap do |records|
324
+ next unless e.respond_to?(:retry_stream)
325
+ e.retry_stream.each {|time, record| records << record}
326
+ end
327
+ # should retry chunk when unrecoverable error is not thrown
328
+ assert_equal 1, records.length
329
+ end
330
+ assert_true failed
331
+ end
332
+
333
+ def test_retry_error
334
+ records = []
335
+ error_records = Hash.new(false)
336
+ error_records.merge!({0=>true, 4=>true})
337
+ 10.times do |i|
338
+ records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
339
+ end
340
+ chunk = MockChunk.new(records)
341
+
342
+ response = parse_response(%({
343
+ "took" : 1,
344
+ "errors" : true,
345
+ "items" : [
346
+ {
347
+ "create" : {
348
+ "_index" : "foo",
349
+ "_type" : "bar",
350
+ "_id" : "1",
351
+ "status" : 201
352
+ }
353
+ },
354
+ {
355
+ "create" : {
356
+ "_index" : "foo",
357
+ "_type" : "bar",
358
+ "_id" : "2",
359
+ "status" : 500,
360
+ "error" : {
361
+ "type" : "some unrecognized type",
362
+ "reason":"unrecognized error"
363
+ }
364
+ }
365
+ },
366
+ {
367
+ "create" : {
368
+ "_index" : "foo",
369
+ "_type" : "bar",
370
+ "_id" : "3",
371
+ "status" : 409
372
+ }
373
+ },
374
+ {
375
+ "create" : {
376
+ "_index" : "foo",
377
+ "_type" : "bar",
378
+ "_id" : "5",
379
+ "status" : 500,
380
+ "error" : {
381
+ "reason":"unrecognized error - no type field"
382
+ }
383
+ }
384
+ },
385
+ {
386
+ "create" : {
387
+ "_index" : "foo",
388
+ "_type" : "bar",
389
+ "_id" : "6",
390
+ "status" : 400,
391
+ "error" : {
392
+ "type" : "mapper_parsing_exception",
393
+ "reason":"failed to parse"
394
+ }
395
+ }
396
+ },
397
+ {
398
+ "create" : {
399
+ "_index" : "foo",
400
+ "_type" : "bar",
401
+ "_id" : "7",
402
+ "status" : 400,
403
+ "error" : {
404
+ "type" : "some unrecognized type",
405
+ "reason":"unrecognized error"
406
+ }
407
+ }
408
+ },
409
+ {
410
+ "create" : {
411
+ "_index" : "foo",
412
+ "_type" : "bar",
413
+ "_id" : "8",
414
+ "status" : 500,
415
+ "error" : {
416
+ "type" : "some unrecognized type",
417
+ "reason":"unrecognized error"
418
+ }
419
+ }
420
+ },
421
+ {
422
+ "create" : {
423
+ "_index" : "foo",
424
+ "_type" : "bar",
425
+ "_id" : "9",
426
+ "status" : 500,
427
+ "error" : {
428
+ "type" : "json_parse_exception",
429
+ "reason":"Invalid UTF-8 start byte 0x92\\n at [Source: org.opensearch.transport.netty4.ByteBufStreamInput@204fe9c9; line: 1, column: 81]"
430
+ }
431
+ }
432
+ }
433
+ ]
434
+ }))
435
+
436
+ begin
437
+ failed = false
438
+ dummy_extracted_values = []
439
+ @handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
440
+ rescue Fluent::Plugin::OpenSearchErrorHandler::OpenSearchRequestAbortError, Fluent::Plugin::OpenSearchOutput::RetryStreamError=>e
441
+ failed = true
442
+ records = [].tap do |records|
443
+ next unless e.respond_to?(:retry_stream)
444
+ e.retry_stream.each {|time, record| records << record}
445
+ end
446
+ assert_equal 2, records.length, "Exp. retry_stream to contain records"
447
+ assert_equal 2, records[0]['_id'], "Exp record with given ID to in retry_stream"
448
+ assert_equal 8, records[1]['_id'], "Exp record with given ID to in retry_stream"
449
+ error_ids = @plugin.error_events.collect {|h| h[:record]['_id']}
450
+ assert_equal 4, error_ids.length, "Exp. a certain number of records to be dropped from retry_stream"
451
+ assert_equal [5, 6, 7, 9], error_ids, "Exp. specific records to be dropped from retry_stream"
452
+ @plugin.error_events.collect {|h| h[:error]}.each do |e|
453
+ assert_true e.respond_to?(:backtrace)
454
+ end
455
+ end
456
+ assert_true failed
457
+
458
+ end
459
+
460
+ def test_unrecoverable_error_included_in_responses
461
+ records = []
462
+ error_records = Hash.new(false)
463
+ error_records.merge!({0=>true, 4=>true, 9=>true})
464
+ 10.times do |i|
465
+ records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
466
+ end
467
+ chunk = MockChunk.new(records)
468
+
469
+ response = parse_response(%({
470
+ "took" : 1,
471
+ "errors" : true,
472
+ "items" : [
473
+ {
474
+ "create" : {
475
+ "_index" : "foo",
476
+ "_type" : "bar",
477
+ "_id" : "1",
478
+ "status" : 201
479
+ }
480
+ },
481
+ {
482
+ "create" : {
483
+ "_index" : "foo",
484
+ "_type" : "bar",
485
+ "_id" : "2",
486
+ "status" : 500,
487
+ "error" : {
488
+ "type" : "some unrecognized type",
489
+ "reason":"unrecognized error"
490
+ }
491
+ }
492
+ },
493
+ {
494
+ "create" : {
495
+ "_index" : "foo",
496
+ "_type" : "bar",
497
+ "_id" : "3",
498
+ "status" : 409
499
+ }
500
+ },
501
+ {
502
+ "create" : {
503
+ "_index" : "foo",
504
+ "_type" : "bar",
505
+ "_id" : "5",
506
+ "status" : 500,
507
+ "error" : {
508
+ "reason":"unrecognized error - no type field"
509
+ }
510
+ }
511
+ },
512
+ {
513
+ "create" : {
514
+ "_index" : "foo",
515
+ "_type" : "bar",
516
+ "_id" : "6",
517
+ "status" : 500,
518
+ "_type" : "bar",
519
+ "error" : {
520
+ "type" : "out_of_memory_error",
521
+ "reason":"Java heap space"
522
+ }
523
+ }
524
+ },
525
+ {
526
+ "create" : {
527
+ "_index" : "foo",
528
+ "_type" : "bar",
529
+ "_id" : "7",
530
+ "status" : 400,
531
+ "error" : {
532
+ "type" : "some unrecognized type",
533
+ "reason":"unrecognized error"
534
+ }
535
+ }
536
+ },
537
+ {
538
+ "create" : {
539
+ "_index" : "foo",
540
+ "_type" : "bar",
541
+ "_id" : "8",
542
+ "status" : 500,
543
+ "error" : {
544
+ "type" : "some unrecognized type",
545
+ "reason":"unrecognized error"
546
+ }
547
+ }
548
+ }
549
+ ]
550
+ }))
551
+
552
+ begin
553
+ failed = false
554
+ dummy_extracted_values = []
555
+ @handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
556
+ rescue Fluent::Plugin::OpenSearchErrorHandler::OpenSearchRequestAbortError, Fluent::Plugin::OpenSearchOutput::RetryStreamError=>e
557
+ failed = true
558
+ records = [].tap do |records|
559
+ next unless e.respond_to?(:retry_stream)
560
+ e.retry_stream.each {|time, record| records << record}
561
+ end
562
+ # should drop entire chunk when unrecoverable error response is replied
563
+ assert_equal 0, records.length
564
+ end
565
+ assert_true failed
566
+
567
+ end
568
+
569
+ def test_retry_error_upsert
570
+ @plugin.write_operation = 'upsert'
571
+ records = []
572
+ error_records = Hash.new(false)
573
+ error_records.merge!({0=>true, 4=>true, 9=>true})
574
+ 10.times do |i|
575
+ records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
576
+ end
577
+ chunk = MockChunk.new(records)
578
+
579
+ response = parse_response(%({
580
+ "took" : 1,
581
+ "errors" : true,
582
+ "items" : [
583
+ {
584
+ "update" : {
585
+ "_index" : "foo",
586
+ "_type" : "bar",
587
+ "_id" : "1",
588
+ "status" : 201
589
+ }
590
+ },
591
+ {
592
+ "update" : {
593
+ "_index" : "foo",
594
+ "_type" : "bar",
595
+ "_id" : "2",
596
+ "status" : 500,
597
+ "error" : {
598
+ "type" : "some unrecognized type",
599
+ "reason":"unrecognized error"
600
+ }
601
+ }
602
+ },
603
+ {
604
+ "update" : {
605
+ "_index" : "foo",
606
+ "_type" : "bar",
607
+ "_id" : "3",
608
+ "status" : 409,
609
+ "error" : {
610
+ "type":"version_conflict_engine_exception",
611
+ "reason":"document already exists"
612
+ }
613
+ }
614
+ },
615
+ {
616
+ "update" : {
617
+ "_index" : "foo",
618
+ "_type" : "bar",
619
+ "_id" : "5",
620
+ "status" : 500,
621
+ "error" : {
622
+ "reason":"unrecognized error - no type field"
623
+ }
624
+ }
625
+ },
626
+ {
627
+ "update" : {
628
+ "_index" : "foo",
629
+ "_type" : "bar",
630
+ "_id" : "6",
631
+ "status" : 400,
632
+ "error" : {
633
+ "type" : "mapper_parsing_exception",
634
+ "reason":"failed to parse"
635
+ }
636
+ }
637
+ },
638
+ {
639
+ "update" : {
640
+ "_index" : "foo",
641
+ "_type" : "bar",
642
+ "_id" : "7",
643
+ "status" : 400,
644
+ "error" : {
645
+ "type" : "some unrecognized type",
646
+ "reason":"unrecognized error"
647
+ }
648
+ }
649
+ },
650
+ {
651
+ "update" : {
652
+ "_index" : "foo",
653
+ "_type" : "bar",
654
+ "_id" : "8",
655
+ "status" : 500,
656
+ "error" : {
657
+ "type" : "some unrecognized type",
658
+ "reason":"unrecognized error"
659
+ }
660
+ }
661
+ }
662
+ ]
663
+ }))
664
+
665
+ begin
666
+ failed = false
667
+ dummy_extracted_values = []
668
+ @handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
669
+ rescue Fluent::Plugin::OpenSearchErrorHandler::OpenSearchRequestAbortError, Fluent::Plugin::OpenSearchOutput::RetryStreamError=>e
670
+ failed = true
671
+ records = [].tap do |records|
672
+ next unless e.respond_to?(:retry_stream)
673
+ e.retry_stream.each {|time, record| records << record}
674
+ end
675
+ assert_equal 3, records.length
676
+ assert_equal 2, records[0]['_id']
677
+ # upsert is retried in case of conflict error.
678
+ assert_equal 3, records[1]['_id']
679
+ assert_equal 8, records[2]['_id']
680
+ error_ids = @plugin.error_events.collect {|h| h[:record]['_id']}
681
+ assert_equal 3, error_ids.length
682
+ assert_equal [5, 6, 7], error_ids
683
+ @plugin.error_events.collect {|h| h[:error]}.each do |e|
684
+ assert_true e.respond_to?(:backtrace)
685
+ end
686
+ end
687
+ assert_true failed
688
+ end
689
+ end