fluent-plugin-elasticsearch 3.5.3 → 3.5.4

Sign up to get free protection for your applications and to get access to all the features.
data/test/helper.rb CHANGED
@@ -1,24 +1,24 @@
1
- require 'simplecov'
2
- SimpleCov.start do
3
- add_filter do |src|
4
- !(src.filename =~ /^#{SimpleCov.root}\/lib/)
5
- end
6
- end
7
-
8
- require 'coveralls'
9
- Coveralls.wear!
10
-
11
- # needs to be after simplecov but before test/unit, because fluentd sets default
12
- # encoding to ASCII-8BIT, but coverall might load git data which could contain a
13
- # UTF-8 character
14
- at_exit do
15
- Encoding.default_internal = 'UTF-8' if defined?(Encoding) && Encoding.respond_to?(:default_internal)
16
- Encoding.default_external = 'UTF-8' if defined?(Encoding) && Encoding.respond_to?(:default_external)
17
- end
18
-
19
- require 'test/unit'
20
- require 'fluent/test'
21
- require 'minitest/pride'
22
-
23
- require 'webmock/test_unit'
24
- WebMock.disable_net_connect!
1
+ require 'simplecov'
2
+ SimpleCov.start do
3
+ add_filter do |src|
4
+ !(src.filename =~ /^#{SimpleCov.root}\/lib/)
5
+ end
6
+ end
7
+
8
+ require 'coveralls'
9
+ Coveralls.wear!
10
+
11
+ # needs to be after simplecov but before test/unit, because fluentd sets default
12
+ # encoding to ASCII-8BIT, but coverall might load git data which could contain a
13
+ # UTF-8 character
14
+ at_exit do
15
+ Encoding.default_internal = 'UTF-8' if defined?(Encoding) && Encoding.respond_to?(:default_internal)
16
+ Encoding.default_external = 'UTF-8' if defined?(Encoding) && Encoding.respond_to?(:default_external)
17
+ end
18
+
19
+ require 'test/unit'
20
+ require 'fluent/test'
21
+ require 'minitest/pride'
22
+
23
+ require 'webmock/test_unit'
24
+ WebMock.disable_net_connect!
@@ -1,9 +1,9 @@
1
- {
2
- "order": 5,
3
- "template": "--index_prefix-----appid---*",
4
- "settings": {},
5
- "mappings": {},
6
- "aliases": {
7
- "--appid---alias": {}
8
- }
1
+ {
2
+ "order": 5,
3
+ "template": "--index_prefix-----appid---*",
4
+ "settings": {},
5
+ "mappings": {},
6
+ "aliases": {
7
+ "--appid---alias": {}
8
+ }
9
9
  }
@@ -1,503 +1,503 @@
1
- require 'helper'
2
- require 'fluent/plugin/out_elasticsearch'
3
- require 'fluent/plugin/elasticsearch_error_handler'
4
- require 'json'
5
-
6
- class TestElasticsearchErrorHandler < Test::Unit::TestCase
7
-
8
- class TestPlugin
9
- attr_reader :log
10
- attr_reader :write_operation, :error_events
11
- attr_accessor :unrecoverable_error_types
12
- attr_accessor :log_es_400_reason
13
- def initialize(log, log_es_400_reason = false)
14
- @log = log
15
- @write_operation = 'index'
16
- @error_events = []
17
- @unrecoverable_error_types = ["out_of_memory_error", "es_rejected_execution_exception"]
18
- @log_es_400_reason = log_es_400_reason
19
- end
20
-
21
- def router
22
- self
23
- end
24
-
25
- def emit_error_event(tag, time, record, e)
26
- @error_events << {:tag => tag, :time=>time, :record=>record, :error=>e}
27
- end
28
-
29
- def process_message(tag, meta, header, time, record, extracted_values)
30
- return [meta, header, record]
31
- end
32
-
33
- def append_record_to_messages(op, meta, header, record, msgs)
34
- if record.has_key?('raise') && record['raise']
35
- raise Exception('process_message')
36
- end
37
- return true
38
- end
39
- end
40
-
41
- class MockChunk
42
- def initialize(records)
43
- @records = records
44
- @index = 0
45
- end
46
- def msgpack_each
47
- @records.each { |item| yield(item[:time],item[:record]) }
48
- end
49
- end
50
-
51
- def setup
52
- Fluent::Test.setup
53
- @log_device = Fluent::Test::DummyLogDevice.new
54
- dl_opts = {:log_level => ServerEngine::DaemonLogger::INFO}
55
- logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
56
- @log = Fluent::Log.new(logger)
57
- @plugin = TestPlugin.new(@log)
58
- @handler = Fluent::Plugin::ElasticsearchErrorHandler.new(@plugin)
59
- end
60
-
61
- def parse_response(value)
62
- JSON.parse(value)
63
- end
64
-
65
- class TEST400ResponseReason < self
66
- def setup
67
- Fluent::Test.setup
68
- @log_device = Fluent::Test::DummyLogDevice.new
69
- dl_opts = {:log_level => ServerEngine::DaemonLogger::DEBUG}
70
- logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
71
- @log = Fluent::Log.new(logger)
72
- @plugin = TestPlugin.new(@log)
73
- @handler = Fluent::Plugin::ElasticsearchErrorHandler.new(@plugin)
74
- end
75
-
76
- def test_400_responses_reason_log
77
- records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
78
- response = parse_response(%({
79
- "took" : 0,
80
- "errors" : true,
81
- "items" : [
82
- {
83
- "create" : {
84
- "_index" : "foo",
85
- "status" : 400,
86
- "error" : {
87
- "type" : "mapper_parsing_exception",
88
- "reason" : "failed to parse"
89
- }
90
- }
91
- }
92
- ]
93
- }))
94
- chunk = MockChunk.new(records)
95
- dummy_extracted_values = []
96
- @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
97
- assert_equal(1, @plugin.error_events.size)
98
- expected_log = "failed to parse"
99
- exception_message = @plugin.error_events.first[:error].message
100
- assert_true(exception_message.include?(expected_log),
101
- "Exception do not contain '#{exception_message}' '#{expected_log}'")
102
- assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
103
- end
104
- end
105
-
106
- class TEST400ResponseReasonNoDebug < self
107
- def setup
108
- Fluent::Test.setup
109
- @log_device = Fluent::Test::DummyLogDevice.new
110
- dl_opts = {:log_level => ServerEngine::DaemonLogger::INFO}
111
- logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
112
- @log = Fluent::Log.new(logger)
113
- @plugin = TestPlugin.new(@log)
114
- @handler = Fluent::Plugin::ElasticsearchErrorHandler.new(@plugin)
115
- @plugin.log_es_400_reason = true
116
- end
117
-
118
- def test_400_responses_reason_log
119
- records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
120
- response = parse_response(%({
121
- "took" : 0,
122
- "errors" : true,
123
- "items" : [
124
- {
125
- "create" : {
126
- "_index" : "foo",
127
- "status" : 400,
128
- "error" : {
129
- "type" : "mapper_parsing_exception",
130
- "reason" : "failed to parse"
131
- }
132
- }
133
- }
134
- ]
135
- }))
136
- chunk = MockChunk.new(records)
137
- dummy_extracted_values = []
138
- @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
139
- assert_equal(1, @plugin.error_events.size)
140
- expected_log = "failed to parse"
141
- exception_message = @plugin.error_events.first[:error].message
142
- assert_true(exception_message.include?(expected_log),
143
- "Exception do not contain '#{exception_message}' '#{expected_log}'")
144
- assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
145
- end
146
- end
147
-
148
- def test_nil_items_responses
149
- records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
150
- response = parse_response(%({
151
- "took" : 0,
152
- "errors" : true,
153
- "items" : [{}]
154
- }))
155
- chunk = MockChunk.new(records)
156
- dummy_extracted_values = []
157
- @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
158
- assert_equal(0, @plugin.error_events.size)
159
- assert_nil(@plugin.error_events[0])
160
- end
161
-
162
- def test_dlq_400_responses
163
- records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
164
- response = parse_response(%({
165
- "took" : 0,
166
- "errors" : true,
167
- "items" : [
168
- {
169
- "create" : {
170
- "_index" : "foo",
171
- "status" : 400,
172
- "_type" : "bar",
173
- "reason":"unrecognized error"
174
- }
175
- }
176
- ]
177
- }))
178
- chunk = MockChunk.new(records)
179
- dummy_extracted_values = []
180
- @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
181
- assert_equal(1, @plugin.error_events.size)
182
- assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
183
- end
184
-
185
- def test_out_of_memory_responses
186
- records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
187
- response = parse_response(%({
188
- "took" : 0,
189
- "errors" : true,
190
- "items" : [
191
- {
192
- "create" : {
193
- "_index" : "foo",
194
- "status" : 500,
195
- "_type" : "bar",
196
- "error" : {
197
- "type" : "out_of_memory_error",
198
- "reason":"Java heap space"
199
- }
200
- }
201
- }
202
- ]
203
- }))
204
-
205
- chunk = MockChunk.new(records)
206
- dummy_extracted_values = []
207
- assert_raise(Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError) do
208
- @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
209
- end
210
- end
211
-
212
- def test_es_rejected_execution_exception_responses
213
- records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
214
- response = parse_response(%({
215
- "took" : 0,
216
- "errors" : true,
217
- "items" : [
218
- {
219
- "create" : {
220
- "_index" : "foo",
221
- "status" : 429,
222
- "_type" : "bar",
223
- "error" : {
224
- "type" : "es_rejected_execution_exception",
225
- "reason":"rejected execution of org.elasticsearch.transport.TransportService"
226
- }
227
- }
228
- }
229
- ]
230
- }))
231
-
232
- chunk = MockChunk.new(records)
233
- dummy_extracted_values = []
234
- assert_raise(Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError) do
235
- @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
236
- end
237
- end
238
-
239
- def test_es_rejected_execution_exception_responses_as_not_error
240
- plugin = TestPlugin.new(@log)
241
- plugin.unrecoverable_error_types = ["out_of_memory_error"]
242
- handler = Fluent::Plugin::ElasticsearchErrorHandler.new(plugin)
243
- records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
244
- response = parse_response(%({
245
- "took" : 0,
246
- "errors" : true,
247
- "items" : [
248
- {
249
- "create" : {
250
- "_index" : "foo",
251
- "status" : 429,
252
- "_type" : "bar",
253
- "error" : {
254
- "type" : "es_rejected_execution_exception",
255
- "reason":"rejected execution of org.elasticsearch.transport.TransportService"
256
- }
257
- }
258
- }
259
- ]
260
- }))
261
-
262
- begin
263
- failed = false
264
- chunk = MockChunk.new(records)
265
- dummy_extracted_values = []
266
- handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
267
- rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
268
- failed = true
269
- records = [].tap do |records|
270
- next unless e.respond_to?(:retry_stream)
271
- e.retry_stream.each {|time, record| records << record}
272
- end
273
- # should retry chunk when unrecoverable error is not thrown
274
- assert_equal 1, records.length
275
- end
276
- assert_true failed
277
- end
278
-
279
- def test_retry_error
280
- records = []
281
- error_records = Hash.new(false)
282
- error_records.merge!({0=>true, 4=>true, 9=>true})
283
- 10.times do |i|
284
- records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
285
- end
286
- chunk = MockChunk.new(records)
287
-
288
- response = parse_response(%({
289
- "took" : 1,
290
- "errors" : true,
291
- "items" : [
292
- {
293
- "create" : {
294
- "_index" : "foo",
295
- "_type" : "bar",
296
- "_id" : "1",
297
- "status" : 201
298
- }
299
- },
300
- {
301
- "create" : {
302
- "_index" : "foo",
303
- "_type" : "bar",
304
- "_id" : "2",
305
- "status" : 500,
306
- "error" : {
307
- "type" : "some unrecognized type",
308
- "reason":"unrecognized error"
309
- }
310
- }
311
- },
312
- {
313
- "create" : {
314
- "_index" : "foo",
315
- "_type" : "bar",
316
- "_id" : "3",
317
- "status" : 409
318
- }
319
- },
320
- {
321
- "create" : {
322
- "_index" : "foo",
323
- "_type" : "bar",
324
- "_id" : "5",
325
- "status" : 500,
326
- "error" : {
327
- "reason":"unrecognized error - no type field"
328
- }
329
- }
330
- },
331
- {
332
- "create" : {
333
- "_index" : "foo",
334
- "_type" : "bar",
335
- "_id" : "6",
336
- "status" : 400,
337
- "error" : {
338
- "type" : "mapper_parsing_exception",
339
- "reason":"failed to parse"
340
- }
341
- }
342
- },
343
- {
344
- "create" : {
345
- "_index" : "foo",
346
- "_type" : "bar",
347
- "_id" : "7",
348
- "status" : 400,
349
- "error" : {
350
- "type" : "some unrecognized type",
351
- "reason":"unrecognized error"
352
- }
353
- }
354
- },
355
- {
356
- "create" : {
357
- "_index" : "foo",
358
- "_type" : "bar",
359
- "_id" : "8",
360
- "status" : 500,
361
- "error" : {
362
- "type" : "some unrecognized type",
363
- "reason":"unrecognized error"
364
- }
365
- }
366
- }
367
- ]
368
- }))
369
-
370
- begin
371
- failed = false
372
- dummy_extracted_values = []
373
- @handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
374
- rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
375
- failed = true
376
- records = [].tap do |records|
377
- next unless e.respond_to?(:retry_stream)
378
- e.retry_stream.each {|time, record| records << record}
379
- end
380
- assert_equal 2, records.length
381
- assert_equal 2, records[0]['_id']
382
- assert_equal 8, records[1]['_id']
383
- error_ids = @plugin.error_events.collect {|h| h[:record]['_id']}
384
- assert_equal 3, error_ids.length
385
- assert_equal [5, 6, 7], error_ids
386
- @plugin.error_events.collect {|h| h[:error]}.each do |e|
387
- assert_true e.respond_to?(:backtrace)
388
- end
389
- end
390
- assert_true failed
391
-
392
- end
393
-
394
- def test_unrecoverable_error_included_in_responses
395
- records = []
396
- error_records = Hash.new(false)
397
- error_records.merge!({0=>true, 4=>true, 9=>true})
398
- 10.times do |i|
399
- records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
400
- end
401
- chunk = MockChunk.new(records)
402
-
403
- response = parse_response(%({
404
- "took" : 1,
405
- "errors" : true,
406
- "items" : [
407
- {
408
- "create" : {
409
- "_index" : "foo",
410
- "_type" : "bar",
411
- "_id" : "1",
412
- "status" : 201
413
- }
414
- },
415
- {
416
- "create" : {
417
- "_index" : "foo",
418
- "_type" : "bar",
419
- "_id" : "2",
420
- "status" : 500,
421
- "error" : {
422
- "type" : "some unrecognized type",
423
- "reason":"unrecognized error"
424
- }
425
- }
426
- },
427
- {
428
- "create" : {
429
- "_index" : "foo",
430
- "_type" : "bar",
431
- "_id" : "3",
432
- "status" : 409
433
- }
434
- },
435
- {
436
- "create" : {
437
- "_index" : "foo",
438
- "_type" : "bar",
439
- "_id" : "5",
440
- "status" : 500,
441
- "error" : {
442
- "reason":"unrecognized error - no type field"
443
- }
444
- }
445
- },
446
- {
447
- "create" : {
448
- "_index" : "foo",
449
- "_type" : "bar",
450
- "_id" : "6",
451
- "status" : 500,
452
- "_type" : "bar",
453
- "error" : {
454
- "type" : "out_of_memory_error",
455
- "reason":"Java heap space"
456
- }
457
- }
458
- },
459
- {
460
- "create" : {
461
- "_index" : "foo",
462
- "_type" : "bar",
463
- "_id" : "7",
464
- "status" : 400,
465
- "error" : {
466
- "type" : "some unrecognized type",
467
- "reason":"unrecognized error"
468
- }
469
- }
470
- },
471
- {
472
- "create" : {
473
- "_index" : "foo",
474
- "_type" : "bar",
475
- "_id" : "8",
476
- "status" : 500,
477
- "error" : {
478
- "type" : "some unrecognized type",
479
- "reason":"unrecognized error"
480
- }
481
- }
482
- }
483
- ]
484
- }))
485
-
486
- begin
487
- failed = false
488
- dummy_extracted_values = []
489
- @handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
490
- rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
491
- failed = true
492
- records = [].tap do |records|
493
- next unless e.respond_to?(:retry_stream)
494
- e.retry_stream.each {|time, record| records << record}
495
- end
496
- # should drop entire chunk when unrecoverable error response is replied
497
- assert_equal 0, records.length
498
- end
499
- assert_true failed
500
-
501
- end
502
-
503
- end
1
+ require 'helper'
2
+ require 'fluent/plugin/out_elasticsearch'
3
+ require 'fluent/plugin/elasticsearch_error_handler'
4
+ require 'json'
5
+
6
+ class TestElasticsearchErrorHandler < Test::Unit::TestCase
7
+
8
+ class TestPlugin
9
+ attr_reader :log
10
+ attr_reader :write_operation, :error_events
11
+ attr_accessor :unrecoverable_error_types
12
+ attr_accessor :log_es_400_reason
13
+ def initialize(log, log_es_400_reason = false)
14
+ @log = log
15
+ @write_operation = 'index'
16
+ @error_events = []
17
+ @unrecoverable_error_types = ["out_of_memory_error", "es_rejected_execution_exception"]
18
+ @log_es_400_reason = log_es_400_reason
19
+ end
20
+
21
+ def router
22
+ self
23
+ end
24
+
25
+ def emit_error_event(tag, time, record, e)
26
+ @error_events << {:tag => tag, :time=>time, :record=>record, :error=>e}
27
+ end
28
+
29
+ def process_message(tag, meta, header, time, record, extracted_values)
30
+ return [meta, header, record]
31
+ end
32
+
33
+ def append_record_to_messages(op, meta, header, record, msgs)
34
+ if record.has_key?('raise') && record['raise']
35
+ raise Exception('process_message')
36
+ end
37
+ return true
38
+ end
39
+ end
40
+
41
+ class MockChunk
42
+ def initialize(records)
43
+ @records = records
44
+ @index = 0
45
+ end
46
+ def msgpack_each
47
+ @records.each { |item| yield(item[:time],item[:record]) }
48
+ end
49
+ end
50
+
51
+ def setup
52
+ Fluent::Test.setup
53
+ @log_device = Fluent::Test::DummyLogDevice.new
54
+ dl_opts = {:log_level => ServerEngine::DaemonLogger::INFO}
55
+ logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
56
+ @log = Fluent::Log.new(logger)
57
+ @plugin = TestPlugin.new(@log)
58
+ @handler = Fluent::Plugin::ElasticsearchErrorHandler.new(@plugin)
59
+ end
60
+
61
+ def parse_response(value)
62
+ JSON.parse(value)
63
+ end
64
+
65
+ class TEST400ResponseReason < self
66
+ def setup
67
+ Fluent::Test.setup
68
+ @log_device = Fluent::Test::DummyLogDevice.new
69
+ dl_opts = {:log_level => ServerEngine::DaemonLogger::DEBUG}
70
+ logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
71
+ @log = Fluent::Log.new(logger)
72
+ @plugin = TestPlugin.new(@log)
73
+ @handler = Fluent::Plugin::ElasticsearchErrorHandler.new(@plugin)
74
+ end
75
+
76
+ def test_400_responses_reason_log
77
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
78
+ response = parse_response(%({
79
+ "took" : 0,
80
+ "errors" : true,
81
+ "items" : [
82
+ {
83
+ "create" : {
84
+ "_index" : "foo",
85
+ "status" : 400,
86
+ "error" : {
87
+ "type" : "mapper_parsing_exception",
88
+ "reason" : "failed to parse"
89
+ }
90
+ }
91
+ }
92
+ ]
93
+ }))
94
+ chunk = MockChunk.new(records)
95
+ dummy_extracted_values = []
96
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
97
+ assert_equal(1, @plugin.error_events.size)
98
+ expected_log = "failed to parse"
99
+ exception_message = @plugin.error_events.first[:error].message
100
+ assert_true(exception_message.include?(expected_log),
101
+ "Exception do not contain '#{exception_message}' '#{expected_log}'")
102
+ assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
103
+ end
104
+ end
105
+
106
+ class TEST400ResponseReasonNoDebug < self
107
+ def setup
108
+ Fluent::Test.setup
109
+ @log_device = Fluent::Test::DummyLogDevice.new
110
+ dl_opts = {:log_level => ServerEngine::DaemonLogger::INFO}
111
+ logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
112
+ @log = Fluent::Log.new(logger)
113
+ @plugin = TestPlugin.new(@log)
114
+ @handler = Fluent::Plugin::ElasticsearchErrorHandler.new(@plugin)
115
+ @plugin.log_es_400_reason = true
116
+ end
117
+
118
+ def test_400_responses_reason_log
119
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
120
+ response = parse_response(%({
121
+ "took" : 0,
122
+ "errors" : true,
123
+ "items" : [
124
+ {
125
+ "create" : {
126
+ "_index" : "foo",
127
+ "status" : 400,
128
+ "error" : {
129
+ "type" : "mapper_parsing_exception",
130
+ "reason" : "failed to parse"
131
+ }
132
+ }
133
+ }
134
+ ]
135
+ }))
136
+ chunk = MockChunk.new(records)
137
+ dummy_extracted_values = []
138
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
139
+ assert_equal(1, @plugin.error_events.size)
140
+ expected_log = "failed to parse"
141
+ exception_message = @plugin.error_events.first[:error].message
142
+ assert_true(exception_message.include?(expected_log),
143
+ "Exception do not contain '#{exception_message}' '#{expected_log}'")
144
+ assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
145
+ end
146
+ end
147
+
148
+ def test_nil_items_responses
149
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
150
+ response = parse_response(%({
151
+ "took" : 0,
152
+ "errors" : true,
153
+ "items" : [{}]
154
+ }))
155
+ chunk = MockChunk.new(records)
156
+ dummy_extracted_values = []
157
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
158
+ assert_equal(0, @plugin.error_events.size)
159
+ assert_nil(@plugin.error_events[0])
160
+ end
161
+
162
+ def test_dlq_400_responses
163
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
164
+ response = parse_response(%({
165
+ "took" : 0,
166
+ "errors" : true,
167
+ "items" : [
168
+ {
169
+ "create" : {
170
+ "_index" : "foo",
171
+ "status" : 400,
172
+ "_type" : "bar",
173
+ "reason":"unrecognized error"
174
+ }
175
+ }
176
+ ]
177
+ }))
178
+ chunk = MockChunk.new(records)
179
+ dummy_extracted_values = []
180
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
181
+ assert_equal(1, @plugin.error_events.size)
182
+ assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
183
+ end
184
+
185
+ def test_out_of_memory_responses
186
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
187
+ response = parse_response(%({
188
+ "took" : 0,
189
+ "errors" : true,
190
+ "items" : [
191
+ {
192
+ "create" : {
193
+ "_index" : "foo",
194
+ "status" : 500,
195
+ "_type" : "bar",
196
+ "error" : {
197
+ "type" : "out_of_memory_error",
198
+ "reason":"Java heap space"
199
+ }
200
+ }
201
+ }
202
+ ]
203
+ }))
204
+
205
+ chunk = MockChunk.new(records)
206
+ dummy_extracted_values = []
207
+ assert_raise(Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError) do
208
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
209
+ end
210
+ end
211
+
212
+ def test_es_rejected_execution_exception_responses
213
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
214
+ response = parse_response(%({
215
+ "took" : 0,
216
+ "errors" : true,
217
+ "items" : [
218
+ {
219
+ "create" : {
220
+ "_index" : "foo",
221
+ "status" : 429,
222
+ "_type" : "bar",
223
+ "error" : {
224
+ "type" : "es_rejected_execution_exception",
225
+ "reason":"rejected execution of org.elasticsearch.transport.TransportService"
226
+ }
227
+ }
228
+ }
229
+ ]
230
+ }))
231
+
232
+ chunk = MockChunk.new(records)
233
+ dummy_extracted_values = []
234
+ assert_raise(Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError) do
235
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
236
+ end
237
+ end
238
+
239
+ def test_es_rejected_execution_exception_responses_as_not_error
240
+ plugin = TestPlugin.new(@log)
241
+ plugin.unrecoverable_error_types = ["out_of_memory_error"]
242
+ handler = Fluent::Plugin::ElasticsearchErrorHandler.new(plugin)
243
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
244
+ response = parse_response(%({
245
+ "took" : 0,
246
+ "errors" : true,
247
+ "items" : [
248
+ {
249
+ "create" : {
250
+ "_index" : "foo",
251
+ "status" : 429,
252
+ "_type" : "bar",
253
+ "error" : {
254
+ "type" : "es_rejected_execution_exception",
255
+ "reason":"rejected execution of org.elasticsearch.transport.TransportService"
256
+ }
257
+ }
258
+ }
259
+ ]
260
+ }))
261
+
262
+ begin
263
+ failed = false
264
+ chunk = MockChunk.new(records)
265
+ dummy_extracted_values = []
266
+ handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
267
+ rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
268
+ failed = true
269
+ records = [].tap do |records|
270
+ next unless e.respond_to?(:retry_stream)
271
+ e.retry_stream.each {|time, record| records << record}
272
+ end
273
+ # should retry chunk when unrecoverable error is not thrown
274
+ assert_equal 1, records.length
275
+ end
276
+ assert_true failed
277
+ end
278
+
279
+ def test_retry_error
280
+ records = []
281
+ error_records = Hash.new(false)
282
+ error_records.merge!({0=>true, 4=>true, 9=>true})
283
+ 10.times do |i|
284
+ records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
285
+ end
286
+ chunk = MockChunk.new(records)
287
+
288
+ response = parse_response(%({
289
+ "took" : 1,
290
+ "errors" : true,
291
+ "items" : [
292
+ {
293
+ "create" : {
294
+ "_index" : "foo",
295
+ "_type" : "bar",
296
+ "_id" : "1",
297
+ "status" : 201
298
+ }
299
+ },
300
+ {
301
+ "create" : {
302
+ "_index" : "foo",
303
+ "_type" : "bar",
304
+ "_id" : "2",
305
+ "status" : 500,
306
+ "error" : {
307
+ "type" : "some unrecognized type",
308
+ "reason":"unrecognized error"
309
+ }
310
+ }
311
+ },
312
+ {
313
+ "create" : {
314
+ "_index" : "foo",
315
+ "_type" : "bar",
316
+ "_id" : "3",
317
+ "status" : 409
318
+ }
319
+ },
320
+ {
321
+ "create" : {
322
+ "_index" : "foo",
323
+ "_type" : "bar",
324
+ "_id" : "5",
325
+ "status" : 500,
326
+ "error" : {
327
+ "reason":"unrecognized error - no type field"
328
+ }
329
+ }
330
+ },
331
+ {
332
+ "create" : {
333
+ "_index" : "foo",
334
+ "_type" : "bar",
335
+ "_id" : "6",
336
+ "status" : 400,
337
+ "error" : {
338
+ "type" : "mapper_parsing_exception",
339
+ "reason":"failed to parse"
340
+ }
341
+ }
342
+ },
343
+ {
344
+ "create" : {
345
+ "_index" : "foo",
346
+ "_type" : "bar",
347
+ "_id" : "7",
348
+ "status" : 400,
349
+ "error" : {
350
+ "type" : "some unrecognized type",
351
+ "reason":"unrecognized error"
352
+ }
353
+ }
354
+ },
355
+ {
356
+ "create" : {
357
+ "_index" : "foo",
358
+ "_type" : "bar",
359
+ "_id" : "8",
360
+ "status" : 500,
361
+ "error" : {
362
+ "type" : "some unrecognized type",
363
+ "reason":"unrecognized error"
364
+ }
365
+ }
366
+ }
367
+ ]
368
+ }))
369
+
370
+ begin
371
+ failed = false
372
+ dummy_extracted_values = []
373
+ @handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
374
+ rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
375
+ failed = true
376
+ records = [].tap do |records|
377
+ next unless e.respond_to?(:retry_stream)
378
+ e.retry_stream.each {|time, record| records << record}
379
+ end
380
+ assert_equal 2, records.length
381
+ assert_equal 2, records[0]['_id']
382
+ assert_equal 8, records[1]['_id']
383
+ error_ids = @plugin.error_events.collect {|h| h[:record]['_id']}
384
+ assert_equal 3, error_ids.length
385
+ assert_equal [5, 6, 7], error_ids
386
+ @plugin.error_events.collect {|h| h[:error]}.each do |e|
387
+ assert_true e.respond_to?(:backtrace)
388
+ end
389
+ end
390
+ assert_true failed
391
+
392
+ end
393
+
394
+ def test_unrecoverable_error_included_in_responses
395
+ records = []
396
+ error_records = Hash.new(false)
397
+ error_records.merge!({0=>true, 4=>true, 9=>true})
398
+ 10.times do |i|
399
+ records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
400
+ end
401
+ chunk = MockChunk.new(records)
402
+
403
+ response = parse_response(%({
404
+ "took" : 1,
405
+ "errors" : true,
406
+ "items" : [
407
+ {
408
+ "create" : {
409
+ "_index" : "foo",
410
+ "_type" : "bar",
411
+ "_id" : "1",
412
+ "status" : 201
413
+ }
414
+ },
415
+ {
416
+ "create" : {
417
+ "_index" : "foo",
418
+ "_type" : "bar",
419
+ "_id" : "2",
420
+ "status" : 500,
421
+ "error" : {
422
+ "type" : "some unrecognized type",
423
+ "reason":"unrecognized error"
424
+ }
425
+ }
426
+ },
427
+ {
428
+ "create" : {
429
+ "_index" : "foo",
430
+ "_type" : "bar",
431
+ "_id" : "3",
432
+ "status" : 409
433
+ }
434
+ },
435
+ {
436
+ "create" : {
437
+ "_index" : "foo",
438
+ "_type" : "bar",
439
+ "_id" : "5",
440
+ "status" : 500,
441
+ "error" : {
442
+ "reason":"unrecognized error - no type field"
443
+ }
444
+ }
445
+ },
446
+ {
447
+ "create" : {
448
+ "_index" : "foo",
449
+ "_type" : "bar",
450
+ "_id" : "6",
451
+ "status" : 500,
452
+ "_type" : "bar",
453
+ "error" : {
454
+ "type" : "out_of_memory_error",
455
+ "reason":"Java heap space"
456
+ }
457
+ }
458
+ },
459
+ {
460
+ "create" : {
461
+ "_index" : "foo",
462
+ "_type" : "bar",
463
+ "_id" : "7",
464
+ "status" : 400,
465
+ "error" : {
466
+ "type" : "some unrecognized type",
467
+ "reason":"unrecognized error"
468
+ }
469
+ }
470
+ },
471
+ {
472
+ "create" : {
473
+ "_index" : "foo",
474
+ "_type" : "bar",
475
+ "_id" : "8",
476
+ "status" : 500,
477
+ "error" : {
478
+ "type" : "some unrecognized type",
479
+ "reason":"unrecognized error"
480
+ }
481
+ }
482
+ }
483
+ ]
484
+ }))
485
+
486
+ begin
487
+ failed = false
488
+ dummy_extracted_values = []
489
+ @handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
490
+ rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
491
+ failed = true
492
+ records = [].tap do |records|
493
+ next unless e.respond_to?(:retry_stream)
494
+ e.retry_stream.each {|time, record| records << record}
495
+ end
496
+ # should drop entire chunk when unrecoverable error response is replied
497
+ assert_equal 0, records.length
498
+ end
499
+ assert_true failed
500
+
501
+ end
502
+
503
+ end