fluent-plugin-elasticsearch 1.9.4 → 5.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. checksums.yaml +5 -5
  2. data/.github/ISSUE_TEMPLATE/bug_report.md +37 -0
  3. data/.github/ISSUE_TEMPLATE/feature_request.md +24 -0
  4. data/.github/workflows/issue-auto-closer.yml +12 -0
  5. data/.github/workflows/linux.yml +26 -0
  6. data/.github/workflows/macos.yml +26 -0
  7. data/.github/workflows/windows.yml +26 -0
  8. data/.travis.yml +33 -6
  9. data/CONTRIBUTING.md +24 -0
  10. data/Gemfile +4 -1
  11. data/History.md +445 -1
  12. data/ISSUE_TEMPLATE.md +19 -0
  13. data/README.ElasticsearchGenID.md +116 -0
  14. data/README.ElasticsearchInput.md +293 -0
  15. data/README.Troubleshooting.md +692 -0
  16. data/README.md +1013 -38
  17. data/appveyor.yml +20 -0
  18. data/fluent-plugin-elasticsearch.gemspec +15 -9
  19. data/{Gemfile.v0.12 → gemfiles/Gemfile.elasticsearch.v6} +6 -5
  20. data/lib/fluent/log-ext.rb +38 -0
  21. data/lib/fluent/plugin/default-ilm-policy.json +14 -0
  22. data/lib/fluent/plugin/elasticsearch_constants.rb +13 -0
  23. data/lib/fluent/plugin/elasticsearch_error.rb +5 -0
  24. data/lib/fluent/plugin/elasticsearch_error_handler.rb +129 -0
  25. data/lib/fluent/plugin/elasticsearch_fallback_selector.rb +9 -0
  26. data/lib/fluent/plugin/elasticsearch_index_lifecycle_management.rb +67 -0
  27. data/lib/fluent/plugin/elasticsearch_index_template.rb +186 -12
  28. data/lib/fluent/plugin/elasticsearch_simple_sniffer.rb +10 -0
  29. data/lib/fluent/plugin/elasticsearch_tls.rb +70 -0
  30. data/lib/fluent/plugin/filter_elasticsearch_genid.rb +77 -0
  31. data/lib/fluent/plugin/in_elasticsearch.rb +325 -0
  32. data/lib/fluent/plugin/oj_serializer.rb +22 -0
  33. data/lib/fluent/plugin/out_elasticsearch.rb +1008 -267
  34. data/lib/fluent/plugin/out_elasticsearch_data_stream.rb +218 -0
  35. data/lib/fluent/plugin/out_elasticsearch_dynamic.rb +232 -214
  36. data/test/plugin/test_alias_template.json +9 -0
  37. data/test/plugin/test_elasticsearch_error_handler.rb +646 -0
  38. data/test/plugin/test_elasticsearch_fallback_selector.rb +74 -0
  39. data/test/plugin/test_elasticsearch_index_lifecycle_management.rb +66 -0
  40. data/test/plugin/test_elasticsearch_tls.rb +145 -0
  41. data/test/plugin/test_filter_elasticsearch_genid.rb +215 -0
  42. data/test/plugin/test_in_elasticsearch.rb +459 -0
  43. data/test/plugin/test_index_alias_template.json +11 -0
  44. data/test/plugin/test_index_template.json +25 -0
  45. data/test/plugin/test_oj_serializer.rb +19 -0
  46. data/test/plugin/test_out_elasticsearch.rb +5029 -387
  47. data/test/plugin/test_out_elasticsearch_data_stream.rb +337 -0
  48. data/test/plugin/test_out_elasticsearch_dynamic.rb +681 -208
  49. data/test/test_log-ext.rb +35 -0
  50. metadata +97 -19
@@ -0,0 +1,646 @@
1
+ require_relative '../helper'
2
+ require 'fluent/plugin/out_elasticsearch'
3
+ require 'fluent/plugin/elasticsearch_error_handler'
4
+ require 'json'
5
+
6
+ class TestElasticsearchErrorHandler < Test::Unit::TestCase
7
+
8
+ class TestPlugin
9
+ attr_reader :log
10
+ attr_reader :error_events
11
+ attr_accessor :unrecoverable_error_types
12
+ attr_accessor :log_es_400_reason
13
+ attr_accessor :write_operation
14
+ def initialize(log, log_es_400_reason = false)
15
+ @log = log
16
+ @write_operation = 'index'
17
+ @error_events = []
18
+ @unrecoverable_error_types = ["out_of_memory_error", "es_rejected_execution_exception"]
19
+ @log_es_400_reason = log_es_400_reason
20
+ end
21
+
22
+ def router
23
+ self
24
+ end
25
+
26
+ def emit_error_event(tag, time, record, e)
27
+ @error_events << {:tag => tag, :time=>time, :record=>record, :error=>e}
28
+ end
29
+
30
+ def process_message(tag, meta, header, time, record, extracted_values)
31
+ return [meta, header, record]
32
+ end
33
+
34
+ def append_record_to_messages(op, meta, header, record, msgs)
35
+ if record.has_key?('raise') && record['raise']
36
+ raise Exception('process_message')
37
+ end
38
+ return true
39
+ end
40
+ end
41
+
42
+ class MockChunk
43
+ def initialize(records)
44
+ @records = records
45
+ @index = 0
46
+ end
47
+ def msgpack_each
48
+ @records.each { |item| yield(item[:time],item[:record]) }
49
+ end
50
+ end
51
+
52
+ def setup
53
+ Fluent::Test.setup
54
+ @log_device = Fluent::Test::DummyLogDevice.new
55
+ dl_opts = {:log_level => ServerEngine::DaemonLogger::INFO}
56
+ logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
57
+ @log = Fluent::Log.new(logger)
58
+ @plugin = TestPlugin.new(@log)
59
+ @handler = Fluent::Plugin::ElasticsearchErrorHandler.new(@plugin)
60
+ end
61
+
62
+ def parse_response(value)
63
+ JSON.parse(value)
64
+ end
65
+
66
+ class TEST400ResponseReason < self
67
+ def setup
68
+ Fluent::Test.setup
69
+ @log_device = Fluent::Test::DummyLogDevice.new
70
+ dl_opts = {:log_level => ServerEngine::DaemonLogger::DEBUG}
71
+ logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
72
+ @log = Fluent::Log.new(logger)
73
+ @plugin = TestPlugin.new(@log)
74
+ @handler = Fluent::Plugin::ElasticsearchErrorHandler.new(@plugin)
75
+ end
76
+
77
+ def test_400_responses_reason_log
78
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
79
+ response = parse_response(%({
80
+ "took" : 0,
81
+ "errors" : true,
82
+ "items" : [
83
+ {
84
+ "create" : {
85
+ "_index" : "foo",
86
+ "status" : 400,
87
+ "error" : {
88
+ "type" : "mapper_parsing_exception",
89
+ "reason" : "failed to parse"
90
+ }
91
+ }
92
+ }
93
+ ]
94
+ }))
95
+ chunk = MockChunk.new(records)
96
+ dummy_extracted_values = []
97
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
98
+ assert_equal(1, @plugin.error_events.size)
99
+ expected_log = "failed to parse"
100
+ exception_message = @plugin.error_events.first[:error].message
101
+ assert_true(exception_message.include?(expected_log),
102
+ "Exception do not contain '#{exception_message}' '#{expected_log}'")
103
+ assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
104
+ end
105
+ end
106
+
107
+ class TEST400ResponseReasonNoDebug < self
108
+ def setup
109
+ Fluent::Test.setup
110
+ @log_device = Fluent::Test::DummyLogDevice.new
111
+ dl_opts = {:log_level => ServerEngine::DaemonLogger::INFO}
112
+ logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
113
+ @log = Fluent::Log.new(logger)
114
+ @plugin = TestPlugin.new(@log)
115
+ @handler = Fluent::Plugin::ElasticsearchErrorHandler.new(@plugin)
116
+ @plugin.log_es_400_reason = true
117
+ end
118
+
119
+ def test_400_responses_reason_log
120
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
121
+ response = parse_response(%({
122
+ "took" : 0,
123
+ "errors" : true,
124
+ "items" : [
125
+ {
126
+ "create" : {
127
+ "_index" : "foo",
128
+ "status" : 400,
129
+ "error" : {
130
+ "type" : "mapper_parsing_exception",
131
+ "reason" : "failed to parse"
132
+ }
133
+ }
134
+ }
135
+ ]
136
+ }))
137
+ chunk = MockChunk.new(records)
138
+ dummy_extracted_values = []
139
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
140
+ assert_equal(1, @plugin.error_events.size)
141
+ expected_log = "failed to parse"
142
+ exception_message = @plugin.error_events.first[:error].message
143
+ assert_true(exception_message.include?(expected_log),
144
+ "Exception do not contain '#{exception_message}' '#{expected_log}'")
145
+ assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
146
+ end
147
+ end
148
+
149
+ def test_nil_items_responses
150
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
151
+ response = parse_response(%({
152
+ "took" : 0,
153
+ "errors" : true,
154
+ "items" : [{}]
155
+ }))
156
+ chunk = MockChunk.new(records)
157
+ dummy_extracted_values = []
158
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
159
+ assert_equal(0, @plugin.error_events.size)
160
+ assert_nil(@plugin.error_events[0])
161
+ end
162
+
163
+ def test_blocked_items_responses
164
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
165
+ response = parse_response(%({
166
+ "took" : 0,
167
+ "errors" : true,
168
+ "items" : [
169
+ {
170
+ "create" : {
171
+ "_index" : "foo",
172
+ "status" : 503,
173
+ "error" : "ClusterBlockException[blocked by: [SERVICE_UNAVAILABLE/1/state not recovered / initialized];]"
174
+ }
175
+ }
176
+ ]
177
+ }))
178
+ chunk = MockChunk.new(records)
179
+ dummy_extracted_values = []
180
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
181
+ assert_equal(1, @plugin.error_events.size)
182
+ assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
183
+ end
184
+
185
+ def test_dlq_400_responses
186
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
187
+ response = parse_response(%({
188
+ "took" : 0,
189
+ "errors" : true,
190
+ "items" : [
191
+ {
192
+ "create" : {
193
+ "_index" : "foo",
194
+ "status" : 400,
195
+ "_type" : "bar",
196
+ "reason":"unrecognized error"
197
+ }
198
+ }
199
+ ]
200
+ }))
201
+ chunk = MockChunk.new(records)
202
+ dummy_extracted_values = []
203
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
204
+ assert_equal(1, @plugin.error_events.size)
205
+ assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
206
+ end
207
+
208
+ def test_out_of_memory_responses
209
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
210
+ response = parse_response(%({
211
+ "took" : 0,
212
+ "errors" : true,
213
+ "items" : [
214
+ {
215
+ "create" : {
216
+ "_index" : "foo",
217
+ "status" : 500,
218
+ "_type" : "bar",
219
+ "error" : {
220
+ "type" : "out_of_memory_error",
221
+ "reason":"Java heap space"
222
+ }
223
+ }
224
+ }
225
+ ]
226
+ }))
227
+
228
+ chunk = MockChunk.new(records)
229
+ dummy_extracted_values = []
230
+ assert_raise(Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError) do
231
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
232
+ end
233
+ end
234
+
235
+ def test_es_rejected_execution_exception_responses
236
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
237
+ response = parse_response(%({
238
+ "took" : 0,
239
+ "errors" : true,
240
+ "items" : [
241
+ {
242
+ "create" : {
243
+ "_index" : "foo",
244
+ "status" : 429,
245
+ "_type" : "bar",
246
+ "error" : {
247
+ "type" : "es_rejected_execution_exception",
248
+ "reason":"rejected execution of org.elasticsearch.transport.TransportService"
249
+ }
250
+ }
251
+ }
252
+ ]
253
+ }))
254
+
255
+ chunk = MockChunk.new(records)
256
+ dummy_extracted_values = []
257
+ assert_raise(Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError) do
258
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
259
+ end
260
+ end
261
+
262
+ def test_es_rejected_execution_exception_responses_as_not_error
263
+ plugin = TestPlugin.new(@log)
264
+ plugin.unrecoverable_error_types = ["out_of_memory_error"]
265
+ handler = Fluent::Plugin::ElasticsearchErrorHandler.new(plugin)
266
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
267
+ response = parse_response(%({
268
+ "took" : 0,
269
+ "errors" : true,
270
+ "items" : [
271
+ {
272
+ "create" : {
273
+ "_index" : "foo",
274
+ "status" : 429,
275
+ "_type" : "bar",
276
+ "error" : {
277
+ "type" : "es_rejected_execution_exception",
278
+ "reason":"rejected execution of org.elasticsearch.transport.TransportService"
279
+ }
280
+ }
281
+ }
282
+ ]
283
+ }))
284
+
285
+ begin
286
+ failed = false
287
+ chunk = MockChunk.new(records)
288
+ dummy_extracted_values = []
289
+ handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
290
+ rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
291
+ failed = true
292
+ records = [].tap do |records|
293
+ next unless e.respond_to?(:retry_stream)
294
+ e.retry_stream.each {|time, record| records << record}
295
+ end
296
+ # should retry chunk when unrecoverable error is not thrown
297
+ assert_equal 1, records.length
298
+ end
299
+ assert_true failed
300
+ end
301
+
302
+ def test_retry_error
303
+ records = []
304
+ error_records = Hash.new(false)
305
+ error_records.merge!({0=>true, 4=>true, 9=>true})
306
+ 10.times do |i|
307
+ records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
308
+ end
309
+ chunk = MockChunk.new(records)
310
+
311
+ response = parse_response(%({
312
+ "took" : 1,
313
+ "errors" : true,
314
+ "items" : [
315
+ {
316
+ "create" : {
317
+ "_index" : "foo",
318
+ "_type" : "bar",
319
+ "_id" : "1",
320
+ "status" : 201
321
+ }
322
+ },
323
+ {
324
+ "create" : {
325
+ "_index" : "foo",
326
+ "_type" : "bar",
327
+ "_id" : "2",
328
+ "status" : 500,
329
+ "error" : {
330
+ "type" : "some unrecognized type",
331
+ "reason":"unrecognized error"
332
+ }
333
+ }
334
+ },
335
+ {
336
+ "create" : {
337
+ "_index" : "foo",
338
+ "_type" : "bar",
339
+ "_id" : "3",
340
+ "status" : 409
341
+ }
342
+ },
343
+ {
344
+ "create" : {
345
+ "_index" : "foo",
346
+ "_type" : "bar",
347
+ "_id" : "5",
348
+ "status" : 500,
349
+ "error" : {
350
+ "reason":"unrecognized error - no type field"
351
+ }
352
+ }
353
+ },
354
+ {
355
+ "create" : {
356
+ "_index" : "foo",
357
+ "_type" : "bar",
358
+ "_id" : "6",
359
+ "status" : 400,
360
+ "error" : {
361
+ "type" : "mapper_parsing_exception",
362
+ "reason":"failed to parse"
363
+ }
364
+ }
365
+ },
366
+ {
367
+ "create" : {
368
+ "_index" : "foo",
369
+ "_type" : "bar",
370
+ "_id" : "7",
371
+ "status" : 400,
372
+ "error" : {
373
+ "type" : "some unrecognized type",
374
+ "reason":"unrecognized error"
375
+ }
376
+ }
377
+ },
378
+ {
379
+ "create" : {
380
+ "_index" : "foo",
381
+ "_type" : "bar",
382
+ "_id" : "8",
383
+ "status" : 500,
384
+ "error" : {
385
+ "type" : "some unrecognized type",
386
+ "reason":"unrecognized error"
387
+ }
388
+ }
389
+ }
390
+ ]
391
+ }))
392
+
393
+ begin
394
+ failed = false
395
+ dummy_extracted_values = []
396
+ @handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
397
+ rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
398
+ failed = true
399
+ records = [].tap do |records|
400
+ next unless e.respond_to?(:retry_stream)
401
+ e.retry_stream.each {|time, record| records << record}
402
+ end
403
+ assert_equal 2, records.length
404
+ assert_equal 2, records[0]['_id']
405
+ assert_equal 8, records[1]['_id']
406
+ error_ids = @plugin.error_events.collect {|h| h[:record]['_id']}
407
+ assert_equal 3, error_ids.length
408
+ assert_equal [5, 6, 7], error_ids
409
+ @plugin.error_events.collect {|h| h[:error]}.each do |e|
410
+ assert_true e.respond_to?(:backtrace)
411
+ end
412
+ end
413
+ assert_true failed
414
+
415
+ end
416
+
417
+ def test_unrecoverable_error_included_in_responses
418
+ records = []
419
+ error_records = Hash.new(false)
420
+ error_records.merge!({0=>true, 4=>true, 9=>true})
421
+ 10.times do |i|
422
+ records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
423
+ end
424
+ chunk = MockChunk.new(records)
425
+
426
+ response = parse_response(%({
427
+ "took" : 1,
428
+ "errors" : true,
429
+ "items" : [
430
+ {
431
+ "create" : {
432
+ "_index" : "foo",
433
+ "_type" : "bar",
434
+ "_id" : "1",
435
+ "status" : 201
436
+ }
437
+ },
438
+ {
439
+ "create" : {
440
+ "_index" : "foo",
441
+ "_type" : "bar",
442
+ "_id" : "2",
443
+ "status" : 500,
444
+ "error" : {
445
+ "type" : "some unrecognized type",
446
+ "reason":"unrecognized error"
447
+ }
448
+ }
449
+ },
450
+ {
451
+ "create" : {
452
+ "_index" : "foo",
453
+ "_type" : "bar",
454
+ "_id" : "3",
455
+ "status" : 409
456
+ }
457
+ },
458
+ {
459
+ "create" : {
460
+ "_index" : "foo",
461
+ "_type" : "bar",
462
+ "_id" : "5",
463
+ "status" : 500,
464
+ "error" : {
465
+ "reason":"unrecognized error - no type field"
466
+ }
467
+ }
468
+ },
469
+ {
470
+ "create" : {
471
+ "_index" : "foo",
472
+ "_type" : "bar",
473
+ "_id" : "6",
474
+ "status" : 500,
475
+ "_type" : "bar",
476
+ "error" : {
477
+ "type" : "out_of_memory_error",
478
+ "reason":"Java heap space"
479
+ }
480
+ }
481
+ },
482
+ {
483
+ "create" : {
484
+ "_index" : "foo",
485
+ "_type" : "bar",
486
+ "_id" : "7",
487
+ "status" : 400,
488
+ "error" : {
489
+ "type" : "some unrecognized type",
490
+ "reason":"unrecognized error"
491
+ }
492
+ }
493
+ },
494
+ {
495
+ "create" : {
496
+ "_index" : "foo",
497
+ "_type" : "bar",
498
+ "_id" : "8",
499
+ "status" : 500,
500
+ "error" : {
501
+ "type" : "some unrecognized type",
502
+ "reason":"unrecognized error"
503
+ }
504
+ }
505
+ }
506
+ ]
507
+ }))
508
+
509
+ begin
510
+ failed = false
511
+ dummy_extracted_values = []
512
+ @handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
513
+ rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
514
+ failed = true
515
+ records = [].tap do |records|
516
+ next unless e.respond_to?(:retry_stream)
517
+ e.retry_stream.each {|time, record| records << record}
518
+ end
519
+ # should drop entire chunk when unrecoverable error response is replied
520
+ assert_equal 0, records.length
521
+ end
522
+ assert_true failed
523
+
524
+ end
525
+
526
+ def test_retry_error_upsert
527
+ @plugin.write_operation = 'upsert'
528
+ records = []
529
+ error_records = Hash.new(false)
530
+ error_records.merge!({0=>true, 4=>true, 9=>true})
531
+ 10.times do |i|
532
+ records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
533
+ end
534
+ chunk = MockChunk.new(records)
535
+
536
+ response = parse_response(%({
537
+ "took" : 1,
538
+ "errors" : true,
539
+ "items" : [
540
+ {
541
+ "update" : {
542
+ "_index" : "foo",
543
+ "_type" : "bar",
544
+ "_id" : "1",
545
+ "status" : 201
546
+ }
547
+ },
548
+ {
549
+ "update" : {
550
+ "_index" : "foo",
551
+ "_type" : "bar",
552
+ "_id" : "2",
553
+ "status" : 500,
554
+ "error" : {
555
+ "type" : "some unrecognized type",
556
+ "reason":"unrecognized error"
557
+ }
558
+ }
559
+ },
560
+ {
561
+ "update" : {
562
+ "_index" : "foo",
563
+ "_type" : "bar",
564
+ "_id" : "3",
565
+ "status" : 409,
566
+ "error" : {
567
+ "type":"version_conflict_engine_exception",
568
+ "reason":"document already exists"
569
+ }
570
+ }
571
+ },
572
+ {
573
+ "update" : {
574
+ "_index" : "foo",
575
+ "_type" : "bar",
576
+ "_id" : "5",
577
+ "status" : 500,
578
+ "error" : {
579
+ "reason":"unrecognized error - no type field"
580
+ }
581
+ }
582
+ },
583
+ {
584
+ "update" : {
585
+ "_index" : "foo",
586
+ "_type" : "bar",
587
+ "_id" : "6",
588
+ "status" : 400,
589
+ "error" : {
590
+ "type" : "mapper_parsing_exception",
591
+ "reason":"failed to parse"
592
+ }
593
+ }
594
+ },
595
+ {
596
+ "update" : {
597
+ "_index" : "foo",
598
+ "_type" : "bar",
599
+ "_id" : "7",
600
+ "status" : 400,
601
+ "error" : {
602
+ "type" : "some unrecognized type",
603
+ "reason":"unrecognized error"
604
+ }
605
+ }
606
+ },
607
+ {
608
+ "update" : {
609
+ "_index" : "foo",
610
+ "_type" : "bar",
611
+ "_id" : "8",
612
+ "status" : 500,
613
+ "error" : {
614
+ "type" : "some unrecognized type",
615
+ "reason":"unrecognized error"
616
+ }
617
+ }
618
+ }
619
+ ]
620
+ }))
621
+
622
+ begin
623
+ failed = false
624
+ dummy_extracted_values = []
625
+ @handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
626
+ rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
627
+ failed = true
628
+ records = [].tap do |records|
629
+ next unless e.respond_to?(:retry_stream)
630
+ e.retry_stream.each {|time, record| records << record}
631
+ end
632
+ assert_equal 3, records.length
633
+ assert_equal 2, records[0]['_id']
634
+ # upsert is retried in case of conflict error.
635
+ assert_equal 3, records[1]['_id']
636
+ assert_equal 8, records[2]['_id']
637
+ error_ids = @plugin.error_events.collect {|h| h[:record]['_id']}
638
+ assert_equal 3, error_ids.length
639
+ assert_equal [5, 6, 7], error_ids
640
+ @plugin.error_events.collect {|h| h[:error]}.each do |e|
641
+ assert_true e.respond_to?(:backtrace)
642
+ end
643
+ end
644
+ assert_true failed
645
+ end
646
+ end