fluent-plugin-elasticsearch 3.5.4 → 3.5.5

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,24 +1,24 @@
1
- require 'simplecov'
2
- SimpleCov.start do
3
- add_filter do |src|
4
- !(src.filename =~ /^#{SimpleCov.root}\/lib/)
5
- end
6
- end
7
-
8
- require 'coveralls'
9
- Coveralls.wear!
10
-
11
- # needs to be after simplecov but before test/unit, because fluentd sets default
12
- # encoding to ASCII-8BIT, but coverall might load git data which could contain a
13
- # UTF-8 character
14
- at_exit do
15
- Encoding.default_internal = 'UTF-8' if defined?(Encoding) && Encoding.respond_to?(:default_internal)
16
- Encoding.default_external = 'UTF-8' if defined?(Encoding) && Encoding.respond_to?(:default_external)
17
- end
18
-
19
- require 'test/unit'
20
- require 'fluent/test'
21
- require 'minitest/pride'
22
-
23
- require 'webmock/test_unit'
24
- WebMock.disable_net_connect!
1
+ require 'simplecov'
2
+ SimpleCov.start do
3
+ add_filter do |src|
4
+ !(src.filename =~ /^#{SimpleCov.root}\/lib/)
5
+ end
6
+ end
7
+
8
+ require 'coveralls'
9
+ Coveralls.wear!
10
+
11
+ # needs to be after simplecov but before test/unit, because fluentd sets default
12
+ # encoding to ASCII-8BIT, but coverall might load git data which could contain a
13
+ # UTF-8 character
14
+ at_exit do
15
+ Encoding.default_internal = 'UTF-8' if defined?(Encoding) && Encoding.respond_to?(:default_internal)
16
+ Encoding.default_external = 'UTF-8' if defined?(Encoding) && Encoding.respond_to?(:default_external)
17
+ end
18
+
19
+ require 'test/unit'
20
+ require 'fluent/test'
21
+ require 'minitest/pride'
22
+
23
+ require 'webmock/test_unit'
24
+ WebMock.disable_net_connect!
@@ -1,9 +1,9 @@
1
- {
2
- "order": 5,
3
- "template": "--index_prefix-----appid---*",
4
- "settings": {},
5
- "mappings": {},
6
- "aliases": {
7
- "--appid---alias": {}
8
- }
1
+ {
2
+ "order": 5,
3
+ "template": "--index_prefix-----appid---*",
4
+ "settings": {},
5
+ "mappings": {},
6
+ "aliases": {
7
+ "--appid---alias": {}
8
+ }
9
9
  }
@@ -1,503 +1,525 @@
1
- require 'helper'
2
- require 'fluent/plugin/out_elasticsearch'
3
- require 'fluent/plugin/elasticsearch_error_handler'
4
- require 'json'
5
-
6
- class TestElasticsearchErrorHandler < Test::Unit::TestCase
7
-
8
- class TestPlugin
9
- attr_reader :log
10
- attr_reader :write_operation, :error_events
11
- attr_accessor :unrecoverable_error_types
12
- attr_accessor :log_es_400_reason
13
- def initialize(log, log_es_400_reason = false)
14
- @log = log
15
- @write_operation = 'index'
16
- @error_events = []
17
- @unrecoverable_error_types = ["out_of_memory_error", "es_rejected_execution_exception"]
18
- @log_es_400_reason = log_es_400_reason
19
- end
20
-
21
- def router
22
- self
23
- end
24
-
25
- def emit_error_event(tag, time, record, e)
26
- @error_events << {:tag => tag, :time=>time, :record=>record, :error=>e}
27
- end
28
-
29
- def process_message(tag, meta, header, time, record, extracted_values)
30
- return [meta, header, record]
31
- end
32
-
33
- def append_record_to_messages(op, meta, header, record, msgs)
34
- if record.has_key?('raise') && record['raise']
35
- raise Exception('process_message')
36
- end
37
- return true
38
- end
39
- end
40
-
41
- class MockChunk
42
- def initialize(records)
43
- @records = records
44
- @index = 0
45
- end
46
- def msgpack_each
47
- @records.each { |item| yield(item[:time],item[:record]) }
48
- end
49
- end
50
-
51
- def setup
52
- Fluent::Test.setup
53
- @log_device = Fluent::Test::DummyLogDevice.new
54
- dl_opts = {:log_level => ServerEngine::DaemonLogger::INFO}
55
- logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
56
- @log = Fluent::Log.new(logger)
57
- @plugin = TestPlugin.new(@log)
58
- @handler = Fluent::Plugin::ElasticsearchErrorHandler.new(@plugin)
59
- end
60
-
61
- def parse_response(value)
62
- JSON.parse(value)
63
- end
64
-
65
- class TEST400ResponseReason < self
66
- def setup
67
- Fluent::Test.setup
68
- @log_device = Fluent::Test::DummyLogDevice.new
69
- dl_opts = {:log_level => ServerEngine::DaemonLogger::DEBUG}
70
- logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
71
- @log = Fluent::Log.new(logger)
72
- @plugin = TestPlugin.new(@log)
73
- @handler = Fluent::Plugin::ElasticsearchErrorHandler.new(@plugin)
74
- end
75
-
76
- def test_400_responses_reason_log
77
- records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
78
- response = parse_response(%({
79
- "took" : 0,
80
- "errors" : true,
81
- "items" : [
82
- {
83
- "create" : {
84
- "_index" : "foo",
85
- "status" : 400,
86
- "error" : {
87
- "type" : "mapper_parsing_exception",
88
- "reason" : "failed to parse"
89
- }
90
- }
91
- }
92
- ]
93
- }))
94
- chunk = MockChunk.new(records)
95
- dummy_extracted_values = []
96
- @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
97
- assert_equal(1, @plugin.error_events.size)
98
- expected_log = "failed to parse"
99
- exception_message = @plugin.error_events.first[:error].message
100
- assert_true(exception_message.include?(expected_log),
101
- "Exception do not contain '#{exception_message}' '#{expected_log}'")
102
- assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
103
- end
104
- end
105
-
106
- class TEST400ResponseReasonNoDebug < self
107
- def setup
108
- Fluent::Test.setup
109
- @log_device = Fluent::Test::DummyLogDevice.new
110
- dl_opts = {:log_level => ServerEngine::DaemonLogger::INFO}
111
- logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
112
- @log = Fluent::Log.new(logger)
113
- @plugin = TestPlugin.new(@log)
114
- @handler = Fluent::Plugin::ElasticsearchErrorHandler.new(@plugin)
115
- @plugin.log_es_400_reason = true
116
- end
117
-
118
- def test_400_responses_reason_log
119
- records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
120
- response = parse_response(%({
121
- "took" : 0,
122
- "errors" : true,
123
- "items" : [
124
- {
125
- "create" : {
126
- "_index" : "foo",
127
- "status" : 400,
128
- "error" : {
129
- "type" : "mapper_parsing_exception",
130
- "reason" : "failed to parse"
131
- }
132
- }
133
- }
134
- ]
135
- }))
136
- chunk = MockChunk.new(records)
137
- dummy_extracted_values = []
138
- @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
139
- assert_equal(1, @plugin.error_events.size)
140
- expected_log = "failed to parse"
141
- exception_message = @plugin.error_events.first[:error].message
142
- assert_true(exception_message.include?(expected_log),
143
- "Exception do not contain '#{exception_message}' '#{expected_log}'")
144
- assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
145
- end
146
- end
147
-
148
- def test_nil_items_responses
149
- records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
150
- response = parse_response(%({
151
- "took" : 0,
152
- "errors" : true,
153
- "items" : [{}]
154
- }))
155
- chunk = MockChunk.new(records)
156
- dummy_extracted_values = []
157
- @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
158
- assert_equal(0, @plugin.error_events.size)
159
- assert_nil(@plugin.error_events[0])
160
- end
161
-
162
- def test_dlq_400_responses
163
- records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
164
- response = parse_response(%({
165
- "took" : 0,
166
- "errors" : true,
167
- "items" : [
168
- {
169
- "create" : {
170
- "_index" : "foo",
171
- "status" : 400,
172
- "_type" : "bar",
173
- "reason":"unrecognized error"
174
- }
175
- }
176
- ]
177
- }))
178
- chunk = MockChunk.new(records)
179
- dummy_extracted_values = []
180
- @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
181
- assert_equal(1, @plugin.error_events.size)
182
- assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
183
- end
184
-
185
- def test_out_of_memory_responses
186
- records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
187
- response = parse_response(%({
188
- "took" : 0,
189
- "errors" : true,
190
- "items" : [
191
- {
192
- "create" : {
193
- "_index" : "foo",
194
- "status" : 500,
195
- "_type" : "bar",
196
- "error" : {
197
- "type" : "out_of_memory_error",
198
- "reason":"Java heap space"
199
- }
200
- }
201
- }
202
- ]
203
- }))
204
-
205
- chunk = MockChunk.new(records)
206
- dummy_extracted_values = []
207
- assert_raise(Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError) do
208
- @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
209
- end
210
- end
211
-
212
- def test_es_rejected_execution_exception_responses
213
- records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
214
- response = parse_response(%({
215
- "took" : 0,
216
- "errors" : true,
217
- "items" : [
218
- {
219
- "create" : {
220
- "_index" : "foo",
221
- "status" : 429,
222
- "_type" : "bar",
223
- "error" : {
224
- "type" : "es_rejected_execution_exception",
225
- "reason":"rejected execution of org.elasticsearch.transport.TransportService"
226
- }
227
- }
228
- }
229
- ]
230
- }))
231
-
232
- chunk = MockChunk.new(records)
233
- dummy_extracted_values = []
234
- assert_raise(Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError) do
235
- @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
236
- end
237
- end
238
-
239
- def test_es_rejected_execution_exception_responses_as_not_error
240
- plugin = TestPlugin.new(@log)
241
- plugin.unrecoverable_error_types = ["out_of_memory_error"]
242
- handler = Fluent::Plugin::ElasticsearchErrorHandler.new(plugin)
243
- records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
244
- response = parse_response(%({
245
- "took" : 0,
246
- "errors" : true,
247
- "items" : [
248
- {
249
- "create" : {
250
- "_index" : "foo",
251
- "status" : 429,
252
- "_type" : "bar",
253
- "error" : {
254
- "type" : "es_rejected_execution_exception",
255
- "reason":"rejected execution of org.elasticsearch.transport.TransportService"
256
- }
257
- }
258
- }
259
- ]
260
- }))
261
-
262
- begin
263
- failed = false
264
- chunk = MockChunk.new(records)
265
- dummy_extracted_values = []
266
- handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
267
- rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
268
- failed = true
269
- records = [].tap do |records|
270
- next unless e.respond_to?(:retry_stream)
271
- e.retry_stream.each {|time, record| records << record}
272
- end
273
- # should retry chunk when unrecoverable error is not thrown
274
- assert_equal 1, records.length
275
- end
276
- assert_true failed
277
- end
278
-
279
- def test_retry_error
280
- records = []
281
- error_records = Hash.new(false)
282
- error_records.merge!({0=>true, 4=>true, 9=>true})
283
- 10.times do |i|
284
- records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
285
- end
286
- chunk = MockChunk.new(records)
287
-
288
- response = parse_response(%({
289
- "took" : 1,
290
- "errors" : true,
291
- "items" : [
292
- {
293
- "create" : {
294
- "_index" : "foo",
295
- "_type" : "bar",
296
- "_id" : "1",
297
- "status" : 201
298
- }
299
- },
300
- {
301
- "create" : {
302
- "_index" : "foo",
303
- "_type" : "bar",
304
- "_id" : "2",
305
- "status" : 500,
306
- "error" : {
307
- "type" : "some unrecognized type",
308
- "reason":"unrecognized error"
309
- }
310
- }
311
- },
312
- {
313
- "create" : {
314
- "_index" : "foo",
315
- "_type" : "bar",
316
- "_id" : "3",
317
- "status" : 409
318
- }
319
- },
320
- {
321
- "create" : {
322
- "_index" : "foo",
323
- "_type" : "bar",
324
- "_id" : "5",
325
- "status" : 500,
326
- "error" : {
327
- "reason":"unrecognized error - no type field"
328
- }
329
- }
330
- },
331
- {
332
- "create" : {
333
- "_index" : "foo",
334
- "_type" : "bar",
335
- "_id" : "6",
336
- "status" : 400,
337
- "error" : {
338
- "type" : "mapper_parsing_exception",
339
- "reason":"failed to parse"
340
- }
341
- }
342
- },
343
- {
344
- "create" : {
345
- "_index" : "foo",
346
- "_type" : "bar",
347
- "_id" : "7",
348
- "status" : 400,
349
- "error" : {
350
- "type" : "some unrecognized type",
351
- "reason":"unrecognized error"
352
- }
353
- }
354
- },
355
- {
356
- "create" : {
357
- "_index" : "foo",
358
- "_type" : "bar",
359
- "_id" : "8",
360
- "status" : 500,
361
- "error" : {
362
- "type" : "some unrecognized type",
363
- "reason":"unrecognized error"
364
- }
365
- }
366
- }
367
- ]
368
- }))
369
-
370
- begin
371
- failed = false
372
- dummy_extracted_values = []
373
- @handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
374
- rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
375
- failed = true
376
- records = [].tap do |records|
377
- next unless e.respond_to?(:retry_stream)
378
- e.retry_stream.each {|time, record| records << record}
379
- end
380
- assert_equal 2, records.length
381
- assert_equal 2, records[0]['_id']
382
- assert_equal 8, records[1]['_id']
383
- error_ids = @plugin.error_events.collect {|h| h[:record]['_id']}
384
- assert_equal 3, error_ids.length
385
- assert_equal [5, 6, 7], error_ids
386
- @plugin.error_events.collect {|h| h[:error]}.each do |e|
387
- assert_true e.respond_to?(:backtrace)
388
- end
389
- end
390
- assert_true failed
391
-
392
- end
393
-
394
- def test_unrecoverable_error_included_in_responses
395
- records = []
396
- error_records = Hash.new(false)
397
- error_records.merge!({0=>true, 4=>true, 9=>true})
398
- 10.times do |i|
399
- records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
400
- end
401
- chunk = MockChunk.new(records)
402
-
403
- response = parse_response(%({
404
- "took" : 1,
405
- "errors" : true,
406
- "items" : [
407
- {
408
- "create" : {
409
- "_index" : "foo",
410
- "_type" : "bar",
411
- "_id" : "1",
412
- "status" : 201
413
- }
414
- },
415
- {
416
- "create" : {
417
- "_index" : "foo",
418
- "_type" : "bar",
419
- "_id" : "2",
420
- "status" : 500,
421
- "error" : {
422
- "type" : "some unrecognized type",
423
- "reason":"unrecognized error"
424
- }
425
- }
426
- },
427
- {
428
- "create" : {
429
- "_index" : "foo",
430
- "_type" : "bar",
431
- "_id" : "3",
432
- "status" : 409
433
- }
434
- },
435
- {
436
- "create" : {
437
- "_index" : "foo",
438
- "_type" : "bar",
439
- "_id" : "5",
440
- "status" : 500,
441
- "error" : {
442
- "reason":"unrecognized error - no type field"
443
- }
444
- }
445
- },
446
- {
447
- "create" : {
448
- "_index" : "foo",
449
- "_type" : "bar",
450
- "_id" : "6",
451
- "status" : 500,
452
- "_type" : "bar",
453
- "error" : {
454
- "type" : "out_of_memory_error",
455
- "reason":"Java heap space"
456
- }
457
- }
458
- },
459
- {
460
- "create" : {
461
- "_index" : "foo",
462
- "_type" : "bar",
463
- "_id" : "7",
464
- "status" : 400,
465
- "error" : {
466
- "type" : "some unrecognized type",
467
- "reason":"unrecognized error"
468
- }
469
- }
470
- },
471
- {
472
- "create" : {
473
- "_index" : "foo",
474
- "_type" : "bar",
475
- "_id" : "8",
476
- "status" : 500,
477
- "error" : {
478
- "type" : "some unrecognized type",
479
- "reason":"unrecognized error"
480
- }
481
- }
482
- }
483
- ]
484
- }))
485
-
486
- begin
487
- failed = false
488
- dummy_extracted_values = []
489
- @handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
490
- rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
491
- failed = true
492
- records = [].tap do |records|
493
- next unless e.respond_to?(:retry_stream)
494
- e.retry_stream.each {|time, record| records << record}
495
- end
496
- # should drop entire chunk when unrecoverable error response is replied
497
- assert_equal 0, records.length
498
- end
499
- assert_true failed
500
-
501
- end
502
-
503
- end
1
+ require 'helper'
2
+ require 'fluent/plugin/out_elasticsearch'
3
+ require 'fluent/plugin/elasticsearch_error_handler'
4
+ require 'json'
5
+
6
+ class TestElasticsearchErrorHandler < Test::Unit::TestCase
7
+
8
+ class TestPlugin
9
+ attr_reader :log
10
+ attr_reader :write_operation, :error_events
11
+ attr_accessor :unrecoverable_error_types
12
+ attr_accessor :log_es_400_reason
13
+ def initialize(log, log_es_400_reason = false)
14
+ @log = log
15
+ @write_operation = 'index'
16
+ @error_events = []
17
+ @unrecoverable_error_types = ["out_of_memory_error", "es_rejected_execution_exception"]
18
+ @log_es_400_reason = log_es_400_reason
19
+ end
20
+
21
+ def router
22
+ self
23
+ end
24
+
25
+ def emit_error_event(tag, time, record, e)
26
+ @error_events << {:tag => tag, :time=>time, :record=>record, :error=>e}
27
+ end
28
+
29
+ def process_message(tag, meta, header, time, record, extracted_values)
30
+ return [meta, header, record]
31
+ end
32
+
33
+ def append_record_to_messages(op, meta, header, record, msgs)
34
+ if record.has_key?('raise') && record['raise']
35
+ raise Exception('process_message')
36
+ end
37
+ return true
38
+ end
39
+ end
40
+
41
+ class MockChunk
42
+ def initialize(records)
43
+ @records = records
44
+ @index = 0
45
+ end
46
+ def msgpack_each
47
+ @records.each { |item| yield(item[:time],item[:record]) }
48
+ end
49
+ end
50
+
51
+ def setup
52
+ Fluent::Test.setup
53
+ @log_device = Fluent::Test::DummyLogDevice.new
54
+ dl_opts = {:log_level => ServerEngine::DaemonLogger::INFO}
55
+ logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
56
+ @log = Fluent::Log.new(logger)
57
+ @plugin = TestPlugin.new(@log)
58
+ @handler = Fluent::Plugin::ElasticsearchErrorHandler.new(@plugin)
59
+ end
60
+
61
+ def parse_response(value)
62
+ JSON.parse(value)
63
+ end
64
+
65
+ class TEST400ResponseReason < self
66
+ def setup
67
+ Fluent::Test.setup
68
+ @log_device = Fluent::Test::DummyLogDevice.new
69
+ dl_opts = {:log_level => ServerEngine::DaemonLogger::DEBUG}
70
+ logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
71
+ @log = Fluent::Log.new(logger)
72
+ @plugin = TestPlugin.new(@log)
73
+ @handler = Fluent::Plugin::ElasticsearchErrorHandler.new(@plugin)
74
+ end
75
+
76
+ def test_400_responses_reason_log
77
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
78
+ response = parse_response(%({
79
+ "took" : 0,
80
+ "errors" : true,
81
+ "items" : [
82
+ {
83
+ "create" : {
84
+ "_index" : "foo",
85
+ "status" : 400,
86
+ "error" : {
87
+ "type" : "mapper_parsing_exception",
88
+ "reason" : "failed to parse"
89
+ }
90
+ }
91
+ }
92
+ ]
93
+ }))
94
+ chunk = MockChunk.new(records)
95
+ dummy_extracted_values = []
96
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
97
+ assert_equal(1, @plugin.error_events.size)
98
+ expected_log = "failed to parse"
99
+ exception_message = @plugin.error_events.first[:error].message
100
+ assert_true(exception_message.include?(expected_log),
101
+ "Exception do not contain '#{exception_message}' '#{expected_log}'")
102
+ assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
103
+ end
104
+ end
105
+
106
+ class TEST400ResponseReasonNoDebug < self
107
+ def setup
108
+ Fluent::Test.setup
109
+ @log_device = Fluent::Test::DummyLogDevice.new
110
+ dl_opts = {:log_level => ServerEngine::DaemonLogger::INFO}
111
+ logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
112
+ @log = Fluent::Log.new(logger)
113
+ @plugin = TestPlugin.new(@log)
114
+ @handler = Fluent::Plugin::ElasticsearchErrorHandler.new(@plugin)
115
+ @plugin.log_es_400_reason = true
116
+ end
117
+
118
+ def test_400_responses_reason_log
119
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
120
+ response = parse_response(%({
121
+ "took" : 0,
122
+ "errors" : true,
123
+ "items" : [
124
+ {
125
+ "create" : {
126
+ "_index" : "foo",
127
+ "status" : 400,
128
+ "error" : {
129
+ "type" : "mapper_parsing_exception",
130
+ "reason" : "failed to parse"
131
+ }
132
+ }
133
+ }
134
+ ]
135
+ }))
136
+ chunk = MockChunk.new(records)
137
+ dummy_extracted_values = []
138
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
139
+ assert_equal(1, @plugin.error_events.size)
140
+ expected_log = "failed to parse"
141
+ exception_message = @plugin.error_events.first[:error].message
142
+ assert_true(exception_message.include?(expected_log),
143
+ "Exception do not contain '#{exception_message}' '#{expected_log}'")
144
+ assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
145
+ end
146
+ end
147
+
148
+ def test_nil_items_responses
149
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
150
+ response = parse_response(%({
151
+ "took" : 0,
152
+ "errors" : true,
153
+ "items" : [{}]
154
+ }))
155
+ chunk = MockChunk.new(records)
156
+ dummy_extracted_values = []
157
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
158
+ assert_equal(0, @plugin.error_events.size)
159
+ assert_nil(@plugin.error_events[0])
160
+ end
161
+
162
+ def test_blocked_items_responses
163
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
164
+ response = parse_response(%({
165
+ "took" : 0,
166
+ "errors" : true,
167
+ "items" : [
168
+ {
169
+ "create" : {
170
+ "_index" : "foo",
171
+ "status" : 503,
172
+ "error" : "ClusterBlockException[blocked by: [SERVICE_UNAVAILABLE/1/state not recovered / initialized];]"
173
+ }
174
+ }
175
+ ]
176
+ }))
177
+ chunk = MockChunk.new(records)
178
+ dummy_extracted_values = []
179
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
180
+ assert_equal(1, @plugin.error_events.size)
181
+ assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
182
+ end
183
+
184
+ def test_dlq_400_responses
185
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
186
+ response = parse_response(%({
187
+ "took" : 0,
188
+ "errors" : true,
189
+ "items" : [
190
+ {
191
+ "create" : {
192
+ "_index" : "foo",
193
+ "status" : 400,
194
+ "_type" : "bar",
195
+ "reason":"unrecognized error"
196
+ }
197
+ }
198
+ ]
199
+ }))
200
+ chunk = MockChunk.new(records)
201
+ dummy_extracted_values = []
202
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
203
+ assert_equal(1, @plugin.error_events.size)
204
+ assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
205
+ end
206
+
207
+ def test_out_of_memory_responses
208
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
209
+ response = parse_response(%({
210
+ "took" : 0,
211
+ "errors" : true,
212
+ "items" : [
213
+ {
214
+ "create" : {
215
+ "_index" : "foo",
216
+ "status" : 500,
217
+ "_type" : "bar",
218
+ "error" : {
219
+ "type" : "out_of_memory_error",
220
+ "reason":"Java heap space"
221
+ }
222
+ }
223
+ }
224
+ ]
225
+ }))
226
+
227
+ chunk = MockChunk.new(records)
228
+ dummy_extracted_values = []
229
+ assert_raise(Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError) do
230
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
231
+ end
232
+ end
233
+
234
+ def test_es_rejected_execution_exception_responses
235
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
236
+ response = parse_response(%({
237
+ "took" : 0,
238
+ "errors" : true,
239
+ "items" : [
240
+ {
241
+ "create" : {
242
+ "_index" : "foo",
243
+ "status" : 429,
244
+ "_type" : "bar",
245
+ "error" : {
246
+ "type" : "es_rejected_execution_exception",
247
+ "reason":"rejected execution of org.elasticsearch.transport.TransportService"
248
+ }
249
+ }
250
+ }
251
+ ]
252
+ }))
253
+
254
+ chunk = MockChunk.new(records)
255
+ dummy_extracted_values = []
256
+ assert_raise(Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError) do
257
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
258
+ end
259
+ end
260
+
261
+ def test_es_rejected_execution_exception_responses_as_not_error
262
+ plugin = TestPlugin.new(@log)
263
+ plugin.unrecoverable_error_types = ["out_of_memory_error"]
264
+ handler = Fluent::Plugin::ElasticsearchErrorHandler.new(plugin)
265
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
266
+ response = parse_response(%({
267
+ "took" : 0,
268
+ "errors" : true,
269
+ "items" : [
270
+ {
271
+ "create" : {
272
+ "_index" : "foo",
273
+ "status" : 429,
274
+ "_type" : "bar",
275
+ "error" : {
276
+ "type" : "es_rejected_execution_exception",
277
+ "reason":"rejected execution of org.elasticsearch.transport.TransportService"
278
+ }
279
+ }
280
+ }
281
+ ]
282
+ }))
283
+
284
+ begin
285
+ failed = false
286
+ chunk = MockChunk.new(records)
287
+ dummy_extracted_values = []
288
+ handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
289
+ rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
290
+ failed = true
291
+ records = [].tap do |records|
292
+ next unless e.respond_to?(:retry_stream)
293
+ e.retry_stream.each {|time, record| records << record}
294
+ end
295
+ # should retry chunk when unrecoverable error is not thrown
296
+ assert_equal 1, records.length
297
+ end
298
+ assert_true failed
299
+ end
300
+
301
+ def test_retry_error
302
+ records = []
303
+ error_records = Hash.new(false)
304
+ error_records.merge!({0=>true, 4=>true, 9=>true})
305
+ 10.times do |i|
306
+ records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
307
+ end
308
+ chunk = MockChunk.new(records)
309
+
310
+ response = parse_response(%({
311
+ "took" : 1,
312
+ "errors" : true,
313
+ "items" : [
314
+ {
315
+ "create" : {
316
+ "_index" : "foo",
317
+ "_type" : "bar",
318
+ "_id" : "1",
319
+ "status" : 201
320
+ }
321
+ },
322
+ {
323
+ "create" : {
324
+ "_index" : "foo",
325
+ "_type" : "bar",
326
+ "_id" : "2",
327
+ "status" : 500,
328
+ "error" : {
329
+ "type" : "some unrecognized type",
330
+ "reason":"unrecognized error"
331
+ }
332
+ }
333
+ },
334
+ {
335
+ "create" : {
336
+ "_index" : "foo",
337
+ "_type" : "bar",
338
+ "_id" : "3",
339
+ "status" : 409
340
+ }
341
+ },
342
+ {
343
+ "create" : {
344
+ "_index" : "foo",
345
+ "_type" : "bar",
346
+ "_id" : "5",
347
+ "status" : 500,
348
+ "error" : {
349
+ "reason":"unrecognized error - no type field"
350
+ }
351
+ }
352
+ },
353
+ {
354
+ "create" : {
355
+ "_index" : "foo",
356
+ "_type" : "bar",
357
+ "_id" : "6",
358
+ "status" : 400,
359
+ "error" : {
360
+ "type" : "mapper_parsing_exception",
361
+ "reason":"failed to parse"
362
+ }
363
+ }
364
+ },
365
+ {
366
+ "create" : {
367
+ "_index" : "foo",
368
+ "_type" : "bar",
369
+ "_id" : "7",
370
+ "status" : 400,
371
+ "error" : {
372
+ "type" : "some unrecognized type",
373
+ "reason":"unrecognized error"
374
+ }
375
+ }
376
+ },
377
+ {
378
+ "create" : {
379
+ "_index" : "foo",
380
+ "_type" : "bar",
381
+ "_id" : "8",
382
+ "status" : 500,
383
+ "error" : {
384
+ "type" : "some unrecognized type",
385
+ "reason":"unrecognized error"
386
+ }
387
+ }
388
+ }
389
+ ]
390
+ }))
391
+
392
+ begin
393
+ failed = false
394
+ dummy_extracted_values = []
395
+ @handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
396
+ rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
397
+ failed = true
398
+ records = [].tap do |records|
399
+ next unless e.respond_to?(:retry_stream)
400
+ e.retry_stream.each {|time, record| records << record}
401
+ end
402
+ assert_equal 2, records.length
403
+ assert_equal 2, records[0]['_id']
404
+ assert_equal 8, records[1]['_id']
405
+ error_ids = @plugin.error_events.collect {|h| h[:record]['_id']}
406
+ assert_equal 3, error_ids.length
407
+ assert_equal [5, 6, 7], error_ids
408
+ @plugin.error_events.collect {|h| h[:error]}.each do |e|
409
+ assert_true e.respond_to?(:backtrace)
410
+ end
411
+ end
412
+ assert_true failed
413
+
414
+ end
415
+
416
+ def test_unrecoverable_error_included_in_responses
417
+ records = []
418
+ error_records = Hash.new(false)
419
+ error_records.merge!({0=>true, 4=>true, 9=>true})
420
+ 10.times do |i|
421
+ records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
422
+ end
423
+ chunk = MockChunk.new(records)
424
+
425
+ response = parse_response(%({
426
+ "took" : 1,
427
+ "errors" : true,
428
+ "items" : [
429
+ {
430
+ "create" : {
431
+ "_index" : "foo",
432
+ "_type" : "bar",
433
+ "_id" : "1",
434
+ "status" : 201
435
+ }
436
+ },
437
+ {
438
+ "create" : {
439
+ "_index" : "foo",
440
+ "_type" : "bar",
441
+ "_id" : "2",
442
+ "status" : 500,
443
+ "error" : {
444
+ "type" : "some unrecognized type",
445
+ "reason":"unrecognized error"
446
+ }
447
+ }
448
+ },
449
+ {
450
+ "create" : {
451
+ "_index" : "foo",
452
+ "_type" : "bar",
453
+ "_id" : "3",
454
+ "status" : 409
455
+ }
456
+ },
457
+ {
458
+ "create" : {
459
+ "_index" : "foo",
460
+ "_type" : "bar",
461
+ "_id" : "5",
462
+ "status" : 500,
463
+ "error" : {
464
+ "reason":"unrecognized error - no type field"
465
+ }
466
+ }
467
+ },
468
+ {
469
+ "create" : {
470
+ "_index" : "foo",
471
+ "_type" : "bar",
472
+ "_id" : "6",
473
+ "status" : 500,
474
+ "_type" : "bar",
475
+ "error" : {
476
+ "type" : "out_of_memory_error",
477
+ "reason":"Java heap space"
478
+ }
479
+ }
480
+ },
481
+ {
482
+ "create" : {
483
+ "_index" : "foo",
484
+ "_type" : "bar",
485
+ "_id" : "7",
486
+ "status" : 400,
487
+ "error" : {
488
+ "type" : "some unrecognized type",
489
+ "reason":"unrecognized error"
490
+ }
491
+ }
492
+ },
493
+ {
494
+ "create" : {
495
+ "_index" : "foo",
496
+ "_type" : "bar",
497
+ "_id" : "8",
498
+ "status" : 500,
499
+ "error" : {
500
+ "type" : "some unrecognized type",
501
+ "reason":"unrecognized error"
502
+ }
503
+ }
504
+ }
505
+ ]
506
+ }))
507
+
508
+ begin
509
+ failed = false
510
+ dummy_extracted_values = []
511
+ @handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
512
+ rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
513
+ failed = true
514
+ records = [].tap do |records|
515
+ next unless e.respond_to?(:retry_stream)
516
+ e.retry_stream.each {|time, record| records << record}
517
+ end
518
+ # should drop entire chunk when unrecoverable error response is replied
519
+ assert_equal 0, records.length
520
+ end
521
+ assert_true failed
522
+
523
+ end
524
+
525
+ end