fluent-plugin-elasticsearch2 3.5.5 → 3.5.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,525 +1,525 @@
1
- require 'helper'
2
- require 'fluent/plugin/out_elasticsearch'
3
- require 'fluent/plugin/elasticsearch_error_handler'
4
- require 'json'
5
-
6
- class TestElasticsearchErrorHandler < Test::Unit::TestCase
7
-
8
- class TestPlugin
9
- attr_reader :log
10
- attr_reader :write_operation, :error_events
11
- attr_accessor :unrecoverable_error_types
12
- attr_accessor :log_es_400_reason
13
- def initialize(log, log_es_400_reason = false)
14
- @log = log
15
- @write_operation = 'index'
16
- @error_events = []
17
- @unrecoverable_error_types = ["out_of_memory_error", "es_rejected_execution_exception"]
18
- @log_es_400_reason = log_es_400_reason
19
- end
20
-
21
- def router
22
- self
23
- end
24
-
25
- def emit_error_event(tag, time, record, e)
26
- @error_events << {:tag => tag, :time=>time, :record=>record, :error=>e}
27
- end
28
-
29
- def process_message(tag, meta, header, time, record, extracted_values)
30
- return [meta, header, record]
31
- end
32
-
33
- def append_record_to_messages(op, meta, header, record, msgs)
34
- if record.has_key?('raise') && record['raise']
35
- raise Exception('process_message')
36
- end
37
- return true
38
- end
39
- end
40
-
41
- class MockChunk
42
- def initialize(records)
43
- @records = records
44
- @index = 0
45
- end
46
- def msgpack_each
47
- @records.each { |item| yield(item[:time],item[:record]) }
48
- end
49
- end
50
-
51
- def setup
52
- Fluent::Test.setup
53
- @log_device = Fluent::Test::DummyLogDevice.new
54
- dl_opts = {:log_level => ServerEngine::DaemonLogger::INFO}
55
- logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
56
- @log = Fluent::Log.new(logger)
57
- @plugin = TestPlugin.new(@log)
58
- @handler = Fluent::Plugin::ElasticsearchErrorHandler.new(@plugin)
59
- end
60
-
61
- def parse_response(value)
62
- JSON.parse(value)
63
- end
64
-
65
- class TEST400ResponseReason < self
66
- def setup
67
- Fluent::Test.setup
68
- @log_device = Fluent::Test::DummyLogDevice.new
69
- dl_opts = {:log_level => ServerEngine::DaemonLogger::DEBUG}
70
- logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
71
- @log = Fluent::Log.new(logger)
72
- @plugin = TestPlugin.new(@log)
73
- @handler = Fluent::Plugin::ElasticsearchErrorHandler.new(@plugin)
74
- end
75
-
76
- def test_400_responses_reason_log
77
- records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
78
- response = parse_response(%({
79
- "took" : 0,
80
- "errors" : true,
81
- "items" : [
82
- {
83
- "create" : {
84
- "_index" : "foo",
85
- "status" : 400,
86
- "error" : {
87
- "type" : "mapper_parsing_exception",
88
- "reason" : "failed to parse"
89
- }
90
- }
91
- }
92
- ]
93
- }))
94
- chunk = MockChunk.new(records)
95
- dummy_extracted_values = []
96
- @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
97
- assert_equal(1, @plugin.error_events.size)
98
- expected_log = "failed to parse"
99
- exception_message = @plugin.error_events.first[:error].message
100
- assert_true(exception_message.include?(expected_log),
101
- "Exception do not contain '#{exception_message}' '#{expected_log}'")
102
- assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
103
- end
104
- end
105
-
106
- class TEST400ResponseReasonNoDebug < self
107
- def setup
108
- Fluent::Test.setup
109
- @log_device = Fluent::Test::DummyLogDevice.new
110
- dl_opts = {:log_level => ServerEngine::DaemonLogger::INFO}
111
- logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
112
- @log = Fluent::Log.new(logger)
113
- @plugin = TestPlugin.new(@log)
114
- @handler = Fluent::Plugin::ElasticsearchErrorHandler.new(@plugin)
115
- @plugin.log_es_400_reason = true
116
- end
117
-
118
- def test_400_responses_reason_log
119
- records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
120
- response = parse_response(%({
121
- "took" : 0,
122
- "errors" : true,
123
- "items" : [
124
- {
125
- "create" : {
126
- "_index" : "foo",
127
- "status" : 400,
128
- "error" : {
129
- "type" : "mapper_parsing_exception",
130
- "reason" : "failed to parse"
131
- }
132
- }
133
- }
134
- ]
135
- }))
136
- chunk = MockChunk.new(records)
137
- dummy_extracted_values = []
138
- @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
139
- assert_equal(1, @plugin.error_events.size)
140
- expected_log = "failed to parse"
141
- exception_message = @plugin.error_events.first[:error].message
142
- assert_true(exception_message.include?(expected_log),
143
- "Exception do not contain '#{exception_message}' '#{expected_log}'")
144
- assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
145
- end
146
- end
147
-
148
- def test_nil_items_responses
149
- records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
150
- response = parse_response(%({
151
- "took" : 0,
152
- "errors" : true,
153
- "items" : [{}]
154
- }))
155
- chunk = MockChunk.new(records)
156
- dummy_extracted_values = []
157
- @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
158
- assert_equal(0, @plugin.error_events.size)
159
- assert_nil(@plugin.error_events[0])
160
- end
161
-
162
- def test_blocked_items_responses
163
- records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
164
- response = parse_response(%({
165
- "took" : 0,
166
- "errors" : true,
167
- "items" : [
168
- {
169
- "create" : {
170
- "_index" : "foo",
171
- "status" : 503,
172
- "error" : "ClusterBlockException[blocked by: [SERVICE_UNAVAILABLE/1/state not recovered / initialized];]"
173
- }
174
- }
175
- ]
176
- }))
177
- chunk = MockChunk.new(records)
178
- dummy_extracted_values = []
179
- @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
180
- assert_equal(1, @plugin.error_events.size)
181
- assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
182
- end
183
-
184
- def test_dlq_400_responses
185
- records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
186
- response = parse_response(%({
187
- "took" : 0,
188
- "errors" : true,
189
- "items" : [
190
- {
191
- "create" : {
192
- "_index" : "foo",
193
- "status" : 400,
194
- "_type" : "bar",
195
- "reason":"unrecognized error"
196
- }
197
- }
198
- ]
199
- }))
200
- chunk = MockChunk.new(records)
201
- dummy_extracted_values = []
202
- @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
203
- assert_equal(1, @plugin.error_events.size)
204
- assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
205
- end
206
-
207
- def test_out_of_memory_responses
208
- records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
209
- response = parse_response(%({
210
- "took" : 0,
211
- "errors" : true,
212
- "items" : [
213
- {
214
- "create" : {
215
- "_index" : "foo",
216
- "status" : 500,
217
- "_type" : "bar",
218
- "error" : {
219
- "type" : "out_of_memory_error",
220
- "reason":"Java heap space"
221
- }
222
- }
223
- }
224
- ]
225
- }))
226
-
227
- chunk = MockChunk.new(records)
228
- dummy_extracted_values = []
229
- assert_raise(Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError) do
230
- @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
231
- end
232
- end
233
-
234
- def test_es_rejected_execution_exception_responses
235
- records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
236
- response = parse_response(%({
237
- "took" : 0,
238
- "errors" : true,
239
- "items" : [
240
- {
241
- "create" : {
242
- "_index" : "foo",
243
- "status" : 429,
244
- "_type" : "bar",
245
- "error" : {
246
- "type" : "es_rejected_execution_exception",
247
- "reason":"rejected execution of org.elasticsearch.transport.TransportService"
248
- }
249
- }
250
- }
251
- ]
252
- }))
253
-
254
- chunk = MockChunk.new(records)
255
- dummy_extracted_values = []
256
- assert_raise(Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError) do
257
- @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
258
- end
259
- end
260
-
261
- def test_es_rejected_execution_exception_responses_as_not_error
262
- plugin = TestPlugin.new(@log)
263
- plugin.unrecoverable_error_types = ["out_of_memory_error"]
264
- handler = Fluent::Plugin::ElasticsearchErrorHandler.new(plugin)
265
- records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
266
- response = parse_response(%({
267
- "took" : 0,
268
- "errors" : true,
269
- "items" : [
270
- {
271
- "create" : {
272
- "_index" : "foo",
273
- "status" : 429,
274
- "_type" : "bar",
275
- "error" : {
276
- "type" : "es_rejected_execution_exception",
277
- "reason":"rejected execution of org.elasticsearch.transport.TransportService"
278
- }
279
- }
280
- }
281
- ]
282
- }))
283
-
284
- begin
285
- failed = false
286
- chunk = MockChunk.new(records)
287
- dummy_extracted_values = []
288
- handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
289
- rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
290
- failed = true
291
- records = [].tap do |records|
292
- next unless e.respond_to?(:retry_stream)
293
- e.retry_stream.each {|time, record| records << record}
294
- end
295
- # should retry chunk when unrecoverable error is not thrown
296
- assert_equal 1, records.length
297
- end
298
- assert_true failed
299
- end
300
-
301
- def test_retry_error
302
- records = []
303
- error_records = Hash.new(false)
304
- error_records.merge!({0=>true, 4=>true, 9=>true})
305
- 10.times do |i|
306
- records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
307
- end
308
- chunk = MockChunk.new(records)
309
-
310
- response = parse_response(%({
311
- "took" : 1,
312
- "errors" : true,
313
- "items" : [
314
- {
315
- "create" : {
316
- "_index" : "foo",
317
- "_type" : "bar",
318
- "_id" : "1",
319
- "status" : 201
320
- }
321
- },
322
- {
323
- "create" : {
324
- "_index" : "foo",
325
- "_type" : "bar",
326
- "_id" : "2",
327
- "status" : 500,
328
- "error" : {
329
- "type" : "some unrecognized type",
330
- "reason":"unrecognized error"
331
- }
332
- }
333
- },
334
- {
335
- "create" : {
336
- "_index" : "foo",
337
- "_type" : "bar",
338
- "_id" : "3",
339
- "status" : 409
340
- }
341
- },
342
- {
343
- "create" : {
344
- "_index" : "foo",
345
- "_type" : "bar",
346
- "_id" : "5",
347
- "status" : 500,
348
- "error" : {
349
- "reason":"unrecognized error - no type field"
350
- }
351
- }
352
- },
353
- {
354
- "create" : {
355
- "_index" : "foo",
356
- "_type" : "bar",
357
- "_id" : "6",
358
- "status" : 400,
359
- "error" : {
360
- "type" : "mapper_parsing_exception",
361
- "reason":"failed to parse"
362
- }
363
- }
364
- },
365
- {
366
- "create" : {
367
- "_index" : "foo",
368
- "_type" : "bar",
369
- "_id" : "7",
370
- "status" : 400,
371
- "error" : {
372
- "type" : "some unrecognized type",
373
- "reason":"unrecognized error"
374
- }
375
- }
376
- },
377
- {
378
- "create" : {
379
- "_index" : "foo",
380
- "_type" : "bar",
381
- "_id" : "8",
382
- "status" : 500,
383
- "error" : {
384
- "type" : "some unrecognized type",
385
- "reason":"unrecognized error"
386
- }
387
- }
388
- }
389
- ]
390
- }))
391
-
392
- begin
393
- failed = false
394
- dummy_extracted_values = []
395
- @handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
396
- rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
397
- failed = true
398
- records = [].tap do |records|
399
- next unless e.respond_to?(:retry_stream)
400
- e.retry_stream.each {|time, record| records << record}
401
- end
402
- assert_equal 2, records.length
403
- assert_equal 2, records[0]['_id']
404
- assert_equal 8, records[1]['_id']
405
- error_ids = @plugin.error_events.collect {|h| h[:record]['_id']}
406
- assert_equal 3, error_ids.length
407
- assert_equal [5, 6, 7], error_ids
408
- @plugin.error_events.collect {|h| h[:error]}.each do |e|
409
- assert_true e.respond_to?(:backtrace)
410
- end
411
- end
412
- assert_true failed
413
-
414
- end
415
-
416
- def test_unrecoverable_error_included_in_responses
417
- records = []
418
- error_records = Hash.new(false)
419
- error_records.merge!({0=>true, 4=>true, 9=>true})
420
- 10.times do |i|
421
- records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
422
- end
423
- chunk = MockChunk.new(records)
424
-
425
- response = parse_response(%({
426
- "took" : 1,
427
- "errors" : true,
428
- "items" : [
429
- {
430
- "create" : {
431
- "_index" : "foo",
432
- "_type" : "bar",
433
- "_id" : "1",
434
- "status" : 201
435
- }
436
- },
437
- {
438
- "create" : {
439
- "_index" : "foo",
440
- "_type" : "bar",
441
- "_id" : "2",
442
- "status" : 500,
443
- "error" : {
444
- "type" : "some unrecognized type",
445
- "reason":"unrecognized error"
446
- }
447
- }
448
- },
449
- {
450
- "create" : {
451
- "_index" : "foo",
452
- "_type" : "bar",
453
- "_id" : "3",
454
- "status" : 409
455
- }
456
- },
457
- {
458
- "create" : {
459
- "_index" : "foo",
460
- "_type" : "bar",
461
- "_id" : "5",
462
- "status" : 500,
463
- "error" : {
464
- "reason":"unrecognized error - no type field"
465
- }
466
- }
467
- },
468
- {
469
- "create" : {
470
- "_index" : "foo",
471
- "_type" : "bar",
472
- "_id" : "6",
473
- "status" : 500,
474
- "_type" : "bar",
475
- "error" : {
476
- "type" : "out_of_memory_error",
477
- "reason":"Java heap space"
478
- }
479
- }
480
- },
481
- {
482
- "create" : {
483
- "_index" : "foo",
484
- "_type" : "bar",
485
- "_id" : "7",
486
- "status" : 400,
487
- "error" : {
488
- "type" : "some unrecognized type",
489
- "reason":"unrecognized error"
490
- }
491
- }
492
- },
493
- {
494
- "create" : {
495
- "_index" : "foo",
496
- "_type" : "bar",
497
- "_id" : "8",
498
- "status" : 500,
499
- "error" : {
500
- "type" : "some unrecognized type",
501
- "reason":"unrecognized error"
502
- }
503
- }
504
- }
505
- ]
506
- }))
507
-
508
- begin
509
- failed = false
510
- dummy_extracted_values = []
511
- @handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
512
- rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
513
- failed = true
514
- records = [].tap do |records|
515
- next unless e.respond_to?(:retry_stream)
516
- e.retry_stream.each {|time, record| records << record}
517
- end
518
- # should drop entire chunk when unrecoverable error response is replied
519
- assert_equal 0, records.length
520
- end
521
- assert_true failed
522
-
523
- end
524
-
525
- end
1
+ require 'helper'
2
+ require 'fluent/plugin/out_elasticsearch2'
3
+ require 'fluent/plugin/elasticsearch_error_handler'
4
+ require 'json'
5
+
6
+ class TestElasticsearchErrorHandler < Test::Unit::TestCase
7
+
8
+ class TestPlugin
9
+ attr_reader :log
10
+ attr_reader :write_operation, :error_events
11
+ attr_accessor :unrecoverable_error_types
12
+ attr_accessor :log_es_400_reason
13
+ def initialize(log, log_es_400_reason = false)
14
+ @log = log
15
+ @write_operation = 'index'
16
+ @error_events = []
17
+ @unrecoverable_error_types = ["out_of_memory_error", "es_rejected_execution_exception"]
18
+ @log_es_400_reason = log_es_400_reason
19
+ end
20
+
21
+ def router
22
+ self
23
+ end
24
+
25
+ def emit_error_event(tag, time, record, e)
26
+ @error_events << {:tag => tag, :time=>time, :record=>record, :error=>e}
27
+ end
28
+
29
+ def process_message(tag, meta, header, time, record, extracted_values)
30
+ return [meta, header, record]
31
+ end
32
+
33
+ def append_record_to_messages(op, meta, header, record, msgs)
34
+ if record.has_key?('raise') && record['raise']
35
+ raise Exception('process_message')
36
+ end
37
+ return true
38
+ end
39
+ end
40
+
41
+ class MockChunk
42
+ def initialize(records)
43
+ @records = records
44
+ @index = 0
45
+ end
46
+ def msgpack_each
47
+ @records.each { |item| yield(item[:time],item[:record]) }
48
+ end
49
+ end
50
+
51
+ def setup
52
+ Fluent::Test.setup
53
+ @log_device = Fluent::Test::DummyLogDevice.new
54
+ dl_opts = {:log_level => ServerEngine::DaemonLogger::INFO}
55
+ logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
56
+ @log = Fluent::Log.new(logger)
57
+ @plugin = TestPlugin.new(@log)
58
+ @handler = Fluent::Plugin::ElasticsearchErrorHandler.new(@plugin)
59
+ end
60
+
61
+ def parse_response(value)
62
+ JSON.parse(value)
63
+ end
64
+
65
+ class TEST400ResponseReason < self
66
+ def setup
67
+ Fluent::Test.setup
68
+ @log_device = Fluent::Test::DummyLogDevice.new
69
+ dl_opts = {:log_level => ServerEngine::DaemonLogger::DEBUG}
70
+ logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
71
+ @log = Fluent::Log.new(logger)
72
+ @plugin = TestPlugin.new(@log)
73
+ @handler = Fluent::Plugin::ElasticsearchErrorHandler.new(@plugin)
74
+ end
75
+
76
+ def test_400_responses_reason_log
77
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
78
+ response = parse_response(%({
79
+ "took" : 0,
80
+ "errors" : true,
81
+ "items" : [
82
+ {
83
+ "create" : {
84
+ "_index" : "foo",
85
+ "status" : 400,
86
+ "error" : {
87
+ "type" : "mapper_parsing_exception",
88
+ "reason" : "failed to parse"
89
+ }
90
+ }
91
+ }
92
+ ]
93
+ }))
94
+ chunk = MockChunk.new(records)
95
+ dummy_extracted_values = []
96
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
97
+ assert_equal(1, @plugin.error_events.size)
98
+ expected_log = "failed to parse"
99
+ exception_message = @plugin.error_events.first[:error].message
100
+ assert_true(exception_message.include?(expected_log),
101
+ "Exception do not contain '#{exception_message}' '#{expected_log}'")
102
+ assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
103
+ end
104
+ end
105
+
106
+ class TEST400ResponseReasonNoDebug < self
107
+ def setup
108
+ Fluent::Test.setup
109
+ @log_device = Fluent::Test::DummyLogDevice.new
110
+ dl_opts = {:log_level => ServerEngine::DaemonLogger::INFO}
111
+ logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
112
+ @log = Fluent::Log.new(logger)
113
+ @plugin = TestPlugin.new(@log)
114
+ @handler = Fluent::Plugin::ElasticsearchErrorHandler.new(@plugin)
115
+ @plugin.log_es_400_reason = true
116
+ end
117
+
118
+ def test_400_responses_reason_log
119
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
120
+ response = parse_response(%({
121
+ "took" : 0,
122
+ "errors" : true,
123
+ "items" : [
124
+ {
125
+ "create" : {
126
+ "_index" : "foo",
127
+ "status" : 400,
128
+ "error" : {
129
+ "type" : "mapper_parsing_exception",
130
+ "reason" : "failed to parse"
131
+ }
132
+ }
133
+ }
134
+ ]
135
+ }))
136
+ chunk = MockChunk.new(records)
137
+ dummy_extracted_values = []
138
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
139
+ assert_equal(1, @plugin.error_events.size)
140
+ expected_log = "failed to parse"
141
+ exception_message = @plugin.error_events.first[:error].message
142
+ assert_true(exception_message.include?(expected_log),
143
+ "Exception do not contain '#{exception_message}' '#{expected_log}'")
144
+ assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
145
+ end
146
+ end
147
+
148
+ def test_nil_items_responses
149
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
150
+ response = parse_response(%({
151
+ "took" : 0,
152
+ "errors" : true,
153
+ "items" : [{}]
154
+ }))
155
+ chunk = MockChunk.new(records)
156
+ dummy_extracted_values = []
157
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
158
+ assert_equal(0, @plugin.error_events.size)
159
+ assert_nil(@plugin.error_events[0])
160
+ end
161
+
162
+ def test_blocked_items_responses
163
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
164
+ response = parse_response(%({
165
+ "took" : 0,
166
+ "errors" : true,
167
+ "items" : [
168
+ {
169
+ "create" : {
170
+ "_index" : "foo",
171
+ "status" : 503,
172
+ "error" : "ClusterBlockException[blocked by: [SERVICE_UNAVAILABLE/1/state not recovered / initialized];]"
173
+ }
174
+ }
175
+ ]
176
+ }))
177
+ chunk = MockChunk.new(records)
178
+ dummy_extracted_values = []
179
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
180
+ assert_equal(1, @plugin.error_events.size)
181
+ assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
182
+ end
183
+
184
+ def test_dlq_400_responses
185
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
186
+ response = parse_response(%({
187
+ "took" : 0,
188
+ "errors" : true,
189
+ "items" : [
190
+ {
191
+ "create" : {
192
+ "_index" : "foo",
193
+ "status" : 400,
194
+ "_type" : "bar",
195
+ "reason":"unrecognized error"
196
+ }
197
+ }
198
+ ]
199
+ }))
200
+ chunk = MockChunk.new(records)
201
+ dummy_extracted_values = []
202
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
203
+ assert_equal(1, @plugin.error_events.size)
204
+ assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
205
+ end
206
+
207
+ def test_out_of_memory_responses
208
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
209
+ response = parse_response(%({
210
+ "took" : 0,
211
+ "errors" : true,
212
+ "items" : [
213
+ {
214
+ "create" : {
215
+ "_index" : "foo",
216
+ "status" : 500,
217
+ "_type" : "bar",
218
+ "error" : {
219
+ "type" : "out_of_memory_error",
220
+ "reason":"Java heap space"
221
+ }
222
+ }
223
+ }
224
+ ]
225
+ }))
226
+
227
+ chunk = MockChunk.new(records)
228
+ dummy_extracted_values = []
229
+ assert_raise(Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError) do
230
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
231
+ end
232
+ end
233
+
234
+ def test_es_rejected_execution_exception_responses
235
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
236
+ response = parse_response(%({
237
+ "took" : 0,
238
+ "errors" : true,
239
+ "items" : [
240
+ {
241
+ "create" : {
242
+ "_index" : "foo",
243
+ "status" : 429,
244
+ "_type" : "bar",
245
+ "error" : {
246
+ "type" : "es_rejected_execution_exception",
247
+ "reason":"rejected execution of org.elasticsearch.transport.TransportService"
248
+ }
249
+ }
250
+ }
251
+ ]
252
+ }))
253
+
254
+ chunk = MockChunk.new(records)
255
+ dummy_extracted_values = []
256
+ assert_raise(Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError) do
257
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
258
+ end
259
+ end
260
+
261
+ def test_es_rejected_execution_exception_responses_as_not_error
262
+ plugin = TestPlugin.new(@log)
263
+ plugin.unrecoverable_error_types = ["out_of_memory_error"]
264
+ handler = Fluent::Plugin::ElasticsearchErrorHandler.new(plugin)
265
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
266
+ response = parse_response(%({
267
+ "took" : 0,
268
+ "errors" : true,
269
+ "items" : [
270
+ {
271
+ "create" : {
272
+ "_index" : "foo",
273
+ "status" : 429,
274
+ "_type" : "bar",
275
+ "error" : {
276
+ "type" : "es_rejected_execution_exception",
277
+ "reason":"rejected execution of org.elasticsearch.transport.TransportService"
278
+ }
279
+ }
280
+ }
281
+ ]
282
+ }))
283
+
284
+ begin
285
+ failed = false
286
+ chunk = MockChunk.new(records)
287
+ dummy_extracted_values = []
288
+ handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
289
+ rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
290
+ failed = true
291
+ records = [].tap do |records|
292
+ next unless e.respond_to?(:retry_stream)
293
+ e.retry_stream.each {|time, record| records << record}
294
+ end
295
+ # should retry chunk when unrecoverable error is not thrown
296
+ assert_equal 1, records.length
297
+ end
298
+ assert_true failed
299
+ end
300
+
301
+ def test_retry_error
302
+ records = []
303
+ error_records = Hash.new(false)
304
+ error_records.merge!({0=>true, 4=>true, 9=>true})
305
+ 10.times do |i|
306
+ records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
307
+ end
308
+ chunk = MockChunk.new(records)
309
+
310
+ response = parse_response(%({
311
+ "took" : 1,
312
+ "errors" : true,
313
+ "items" : [
314
+ {
315
+ "create" : {
316
+ "_index" : "foo",
317
+ "_type" : "bar",
318
+ "_id" : "1",
319
+ "status" : 201
320
+ }
321
+ },
322
+ {
323
+ "create" : {
324
+ "_index" : "foo",
325
+ "_type" : "bar",
326
+ "_id" : "2",
327
+ "status" : 500,
328
+ "error" : {
329
+ "type" : "some unrecognized type",
330
+ "reason":"unrecognized error"
331
+ }
332
+ }
333
+ },
334
+ {
335
+ "create" : {
336
+ "_index" : "foo",
337
+ "_type" : "bar",
338
+ "_id" : "3",
339
+ "status" : 409
340
+ }
341
+ },
342
+ {
343
+ "create" : {
344
+ "_index" : "foo",
345
+ "_type" : "bar",
346
+ "_id" : "5",
347
+ "status" : 500,
348
+ "error" : {
349
+ "reason":"unrecognized error - no type field"
350
+ }
351
+ }
352
+ },
353
+ {
354
+ "create" : {
355
+ "_index" : "foo",
356
+ "_type" : "bar",
357
+ "_id" : "6",
358
+ "status" : 400,
359
+ "error" : {
360
+ "type" : "mapper_parsing_exception",
361
+ "reason":"failed to parse"
362
+ }
363
+ }
364
+ },
365
+ {
366
+ "create" : {
367
+ "_index" : "foo",
368
+ "_type" : "bar",
369
+ "_id" : "7",
370
+ "status" : 400,
371
+ "error" : {
372
+ "type" : "some unrecognized type",
373
+ "reason":"unrecognized error"
374
+ }
375
+ }
376
+ },
377
+ {
378
+ "create" : {
379
+ "_index" : "foo",
380
+ "_type" : "bar",
381
+ "_id" : "8",
382
+ "status" : 500,
383
+ "error" : {
384
+ "type" : "some unrecognized type",
385
+ "reason":"unrecognized error"
386
+ }
387
+ }
388
+ }
389
+ ]
390
+ }))
391
+
392
+ begin
393
+ failed = false
394
+ dummy_extracted_values = []
395
+ @handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
396
+ rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
397
+ failed = true
398
+ records = [].tap do |records|
399
+ next unless e.respond_to?(:retry_stream)
400
+ e.retry_stream.each {|time, record| records << record}
401
+ end
402
+ assert_equal 2, records.length
403
+ assert_equal 2, records[0]['_id']
404
+ assert_equal 8, records[1]['_id']
405
+ error_ids = @plugin.error_events.collect {|h| h[:record]['_id']}
406
+ assert_equal 3, error_ids.length
407
+ assert_equal [5, 6, 7], error_ids
408
+ @plugin.error_events.collect {|h| h[:error]}.each do |e|
409
+ assert_true e.respond_to?(:backtrace)
410
+ end
411
+ end
412
+ assert_true failed
413
+
414
+ end
415
+
416
+ def test_unrecoverable_error_included_in_responses
417
+ records = []
418
+ error_records = Hash.new(false)
419
+ error_records.merge!({0=>true, 4=>true, 9=>true})
420
+ 10.times do |i|
421
+ records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
422
+ end
423
+ chunk = MockChunk.new(records)
424
+
425
+ response = parse_response(%({
426
+ "took" : 1,
427
+ "errors" : true,
428
+ "items" : [
429
+ {
430
+ "create" : {
431
+ "_index" : "foo",
432
+ "_type" : "bar",
433
+ "_id" : "1",
434
+ "status" : 201
435
+ }
436
+ },
437
+ {
438
+ "create" : {
439
+ "_index" : "foo",
440
+ "_type" : "bar",
441
+ "_id" : "2",
442
+ "status" : 500,
443
+ "error" : {
444
+ "type" : "some unrecognized type",
445
+ "reason":"unrecognized error"
446
+ }
447
+ }
448
+ },
449
+ {
450
+ "create" : {
451
+ "_index" : "foo",
452
+ "_type" : "bar",
453
+ "_id" : "3",
454
+ "status" : 409
455
+ }
456
+ },
457
+ {
458
+ "create" : {
459
+ "_index" : "foo",
460
+ "_type" : "bar",
461
+ "_id" : "5",
462
+ "status" : 500,
463
+ "error" : {
464
+ "reason":"unrecognized error - no type field"
465
+ }
466
+ }
467
+ },
468
+ {
469
+ "create" : {
470
+ "_index" : "foo",
471
+ "_type" : "bar",
472
+ "_id" : "6",
473
+ "status" : 500,
474
+ "_type" : "bar",
475
+ "error" : {
476
+ "type" : "out_of_memory_error",
477
+ "reason":"Java heap space"
478
+ }
479
+ }
480
+ },
481
+ {
482
+ "create" : {
483
+ "_index" : "foo",
484
+ "_type" : "bar",
485
+ "_id" : "7",
486
+ "status" : 400,
487
+ "error" : {
488
+ "type" : "some unrecognized type",
489
+ "reason":"unrecognized error"
490
+ }
491
+ }
492
+ },
493
+ {
494
+ "create" : {
495
+ "_index" : "foo",
496
+ "_type" : "bar",
497
+ "_id" : "8",
498
+ "status" : 500,
499
+ "error" : {
500
+ "type" : "some unrecognized type",
501
+ "reason":"unrecognized error"
502
+ }
503
+ }
504
+ }
505
+ ]
506
+ }))
507
+
508
+ begin
509
+ failed = false
510
+ dummy_extracted_values = []
511
+ @handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
512
+ rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
513
+ failed = true
514
+ records = [].tap do |records|
515
+ next unless e.respond_to?(:retry_stream)
516
+ e.retry_stream.each {|time, record| records << record}
517
+ end
518
+ # should drop entire chunk when unrecoverable error response is replied
519
+ assert_equal 0, records.length
520
+ end
521
+ assert_true failed
522
+
523
+ end
524
+
525
+ end