fluent-plugin-elasticsearch 2.9.2 → 2.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/History.md +3 -0
- data/fluent-plugin-elasticsearch.gemspec +1 -1
- data/lib/fluent/plugin/elasticsearch_error_handler.rb +34 -23
- data/lib/fluent/plugin/out_elasticsearch.rb +24 -7
- data/test/plugin/test_elasticsearch_error_handler.rb +77 -176
- data/test/plugin/test_out_elasticsearch.rb +65 -72
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: f778421b8290ea1db16ad57f1c0be9a35e656d201e5b52c502ae9301c60a67c3
|
4
|
+
data.tar.gz: 7e72d88f792ac7a959faa393e91fcc08917ada672860730352219b5d26b167b4
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 8222728bf41a377029aa620f94f7d773529d4dbae47081158022c49b8ebb08aed224d93208db1d4933663ea5226decd1e99096c9890a0f59a58d22e8aca9d5ae
|
7
|
+
data.tar.gz: fe857ad374b96cfc4eeedb02642fc1bebfb4da43f0cba397789db9f74a6df6704ffb1c57b65524b00866e4b540c97130c387f6252bbf35fa1215d95c7f3c1f1a
|
data/History.md
CHANGED
@@ -3,7 +3,7 @@ $:.push File.expand_path('../lib', __FILE__)
|
|
3
3
|
|
4
4
|
Gem::Specification.new do |s|
|
5
5
|
s.name = 'fluent-plugin-elasticsearch'
|
6
|
-
s.version = '2.
|
6
|
+
s.version = '2.10.0'
|
7
7
|
s.authors = ['diogo', 'pitr']
|
8
8
|
s.email = ['pitr.vern@gmail.com', 'me@diogoterror.com']
|
9
9
|
s.description = %q{Elasticsearch output plugin for Fluent event collector}
|
@@ -1,23 +1,41 @@
|
|
1
|
+
require 'fluent/event'
|
1
2
|
require_relative 'elasticsearch_constants'
|
2
3
|
|
3
4
|
class Fluent::Plugin::ElasticsearchErrorHandler
|
4
5
|
include Fluent::Plugin::ElasticsearchConstants
|
5
6
|
|
6
|
-
attr_accessor :
|
7
|
-
class BulkIndexQueueFull < StandardError; end
|
8
|
-
class ElasticsearchOutOfMemory < StandardError; end
|
7
|
+
attr_accessor :bulk_message_count
|
9
8
|
class ElasticsearchVersionMismatch < StandardError; end
|
10
|
-
class UnrecognizedElasticsearchError < StandardError; end
|
11
9
|
class ElasticsearchError < StandardError; end
|
12
|
-
|
10
|
+
|
11
|
+
def initialize(plugin)
|
13
12
|
@plugin = plugin
|
14
|
-
@records = records
|
15
|
-
@bulk_message_count = bulk_message_count
|
16
13
|
end
|
17
14
|
|
18
|
-
def handle_error(response)
|
15
|
+
def handle_error(response, tag, chunk, bulk_message_count, extracted_values)
|
16
|
+
items = response['items']
|
17
|
+
if items.nil? || !items.is_a?(Array)
|
18
|
+
raise ElasticsearchVersionMismatch, "The response format was unrecognized: #{response}"
|
19
|
+
end
|
20
|
+
if bulk_message_count != items.length
|
21
|
+
raise ElasticsearchError, "The number of records submitted #{bulk_message_count} do not match the number returned #{items.length}. Unable to process bulk response."
|
22
|
+
end
|
23
|
+
retry_stream = Fluent::MultiEventStream.new
|
19
24
|
stats = Hash.new(0)
|
20
|
-
|
25
|
+
meta = {}
|
26
|
+
header = {}
|
27
|
+
chunk.msgpack_each do |time, rawrecord|
|
28
|
+
bulk_message = ''
|
29
|
+
next unless rawrecord.is_a? Hash
|
30
|
+
begin
|
31
|
+
# we need a deep copy for process_message to alter
|
32
|
+
processrecord = Marshal.load(Marshal.dump(rawrecord))
|
33
|
+
@plugin.process_message(tag, meta, header, time, processrecord, bulk_message, extracted_values)
|
34
|
+
rescue => e
|
35
|
+
stats[:bad_chunk_record] += 1
|
36
|
+
next
|
37
|
+
end
|
38
|
+
item = items.shift
|
21
39
|
if item.has_key?(@plugin.write_operation)
|
22
40
|
write_operation = @plugin.write_operation
|
23
41
|
elsif INDEX_OP == @plugin.write_operation && item.has_key?(CREATE_OP)
|
@@ -41,13 +59,19 @@ class Fluent::Plugin::ElasticsearchErrorHandler
|
|
41
59
|
stats[:successes] += 1
|
42
60
|
when CREATE_OP == write_operation && 409 == status
|
43
61
|
stats[:duplicates] += 1
|
62
|
+
when 400 == status
|
63
|
+
stats[:bad_argument] += 1
|
64
|
+
@plugin.router.emit_error_event(tag, time, rawrecord, '400 - Rejected by Elasticsearch')
|
44
65
|
else
|
45
66
|
if item[write_operation].has_key?('error') && item[write_operation]['error'].has_key?('type')
|
46
67
|
type = item[write_operation]['error']['type']
|
68
|
+
stats[type] += 1
|
69
|
+
retry_stream.add(time, rawrecord)
|
47
70
|
else
|
48
71
|
# When we don't have a type field, something changed in the API
|
49
72
|
# expected return values (ES 2.x)
|
50
73
|
stats[:errors_bad_resp] += 1
|
74
|
+
@plugin.router.emit_error_event(tag, time, rawrecord, "#{status} - No error type provided in the response")
|
51
75
|
next
|
52
76
|
end
|
53
77
|
stats[type] += 1
|
@@ -58,19 +82,6 @@ class Fluent::Plugin::ElasticsearchErrorHandler
|
|
58
82
|
stats.each_pair { |key, value| msg << "#{value} #{key}" }
|
59
83
|
@plugin.log.debug msg.join(', ')
|
60
84
|
end
|
61
|
-
|
62
|
-
when stats[:errors_bad_resp] > 0
|
63
|
-
@plugin.log.on_debug { @plugin.log.debug("Unable to parse response from elasticsearch, likely an API version mismatch: #{response}") }
|
64
|
-
raise ElasticsearchVersionMismatch, "Unable to parse error response from Elasticsearch, likely an API version mismatch. Add '@log_level debug' to your config to see the full response"
|
65
|
-
when stats[:successes] + stats[:duplicates] == bulk_message_count
|
66
|
-
@plugin.log.info("retry succeeded - successes=#{stats[:successes]} duplicates=#{stats[:duplicates]}")
|
67
|
-
when stats['es_rejected_execution_exception'] > 0
|
68
|
-
raise BulkIndexQueueFull, 'Bulk index queue is full, retrying'
|
69
|
-
when stats['out_of_memory_error'] > 0
|
70
|
-
raise ElasticsearchOutOfMemory, 'Elasticsearch has exhausted its heap, retrying'
|
71
|
-
else
|
72
|
-
@plugin.log.on_debug { @plugin.log.debug("Elasticsearch errors returned, retrying: #{response}") }
|
73
|
-
raise ElasticsearchError, "Elasticsearch returned errors, retrying. Add '@log_level debug' to your config to see the full response"
|
74
|
-
end
|
85
|
+
raise Fluent::Plugin::ElasticsearchOutput::RetryStreamError.new(retry_stream) unless retry_stream.empty?
|
75
86
|
end
|
76
87
|
end
|
@@ -10,6 +10,7 @@ rescue LoadError
|
|
10
10
|
end
|
11
11
|
|
12
12
|
require 'fluent/plugin/output'
|
13
|
+
require 'fluent/event'
|
13
14
|
require_relative 'elasticsearch_constants'
|
14
15
|
require_relative 'elasticsearch_error_handler'
|
15
16
|
require_relative 'elasticsearch_index_template'
|
@@ -18,6 +19,16 @@ module Fluent::Plugin
|
|
18
19
|
class ElasticsearchOutput < Output
|
19
20
|
class ConnectionFailure < StandardError; end
|
20
21
|
|
22
|
+
# RetryStreamError privides a stream to be
|
23
|
+
# put back in the pipeline for cases where a bulk request
|
24
|
+
# failed (e.g some records succeed while others failed)
|
25
|
+
class RetryStreamError < StandardError
|
26
|
+
attr_reader :retry_stream
|
27
|
+
def initialize(retry_stream)
|
28
|
+
@retry_stream = retry_stream
|
29
|
+
end
|
30
|
+
end
|
31
|
+
|
21
32
|
helpers :event_emitter, :compat_parameters, :record_accessor
|
22
33
|
|
23
34
|
Fluent::Plugin.register_output('elasticsearch', self)
|
@@ -375,26 +386,26 @@ EOC
|
|
375
386
|
end
|
376
387
|
|
377
388
|
def write(chunk)
|
389
|
+
bulk_message_count = 0
|
378
390
|
bulk_message = ''
|
379
391
|
header = {}
|
380
392
|
meta = {}
|
381
393
|
|
382
394
|
tag = chunk.metadata.tag
|
383
395
|
extracted_values = expand_placeholders(chunk.metadata)
|
384
|
-
@error = Fluent::Plugin::ElasticsearchErrorHandler.new(self)
|
385
396
|
@last_seen_major_version = detect_es_major_version rescue DEFAULT_ELASTICSEARCH_VERSION
|
386
397
|
|
387
398
|
chunk.msgpack_each do |time, record|
|
388
|
-
@error.records += 1
|
389
399
|
next unless record.is_a? Hash
|
390
|
-
|
391
400
|
begin
|
392
401
|
process_message(tag, meta, header, time, record, bulk_message, extracted_values)
|
402
|
+
bulk_message_count += 1
|
393
403
|
rescue => e
|
394
404
|
router.emit_error_event(tag, time, record, e)
|
395
405
|
end
|
396
406
|
end
|
397
|
-
|
407
|
+
|
408
|
+
send_bulk(bulk_message, tag, chunk, bulk_message_count, extracted_values) unless bulk_message.empty?
|
398
409
|
bulk_message.clear
|
399
410
|
end
|
400
411
|
|
@@ -479,7 +490,6 @@ EOC
|
|
479
490
|
end
|
480
491
|
|
481
492
|
append_record_to_messages(@write_operation, meta, header, record, bulk_message)
|
482
|
-
@error.bulk_message_count += 1
|
483
493
|
end
|
484
494
|
|
485
495
|
# returns [parent, child_key] of child described by path array in record's tree
|
@@ -489,11 +499,18 @@ EOC
|
|
489
499
|
[parent_object, path[-1]]
|
490
500
|
end
|
491
501
|
|
492
|
-
|
502
|
+
# send_bulk given a specific bulk request, the original tag,
|
503
|
+
# chunk, and bulk_message_count
|
504
|
+
def send_bulk(data, tag, chunk, bulk_message_count, extracted_values)
|
493
505
|
retries = 0
|
494
506
|
begin
|
495
507
|
response = client.bulk body: data
|
496
|
-
|
508
|
+
if response['errors']
|
509
|
+
error = Fluent::Plugin::ElasticsearchErrorHandler.new(self)
|
510
|
+
error.handle_error(response, tag, chunk, bulk_message_count, extracted_values)
|
511
|
+
end
|
512
|
+
rescue RetryStreamError => e
|
513
|
+
router.emit_stream(tag, e.retry_stream)
|
497
514
|
rescue *client.transport.host_unreachable_exceptions => e
|
498
515
|
if retries < 2
|
499
516
|
retries += 1
|
@@ -1,4 +1,5 @@
|
|
1
1
|
require 'helper'
|
2
|
+
require 'fluent/plugin/out_elasticsearch'
|
2
3
|
require 'fluent/plugin/elasticsearch_error_handler'
|
3
4
|
require 'json'
|
4
5
|
|
@@ -6,10 +7,35 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
|
|
6
7
|
|
7
8
|
class TestPlugin
|
8
9
|
attr_reader :log
|
9
|
-
attr_reader :write_operation
|
10
|
+
attr_reader :write_operation, :error_events
|
10
11
|
def initialize(log)
|
11
12
|
@log = log
|
12
13
|
@write_operation = 'index'
|
14
|
+
@error_events = Fluent::MultiEventStream.new
|
15
|
+
end
|
16
|
+
|
17
|
+
def router
|
18
|
+
self
|
19
|
+
end
|
20
|
+
|
21
|
+
def emit_error_event(tag, time, record, e)
|
22
|
+
@error_events.add(time, record)
|
23
|
+
end
|
24
|
+
|
25
|
+
def process_message(tag, meta, header, time, record, bulk_message, extracted_values)
|
26
|
+
if record.has_key?('raise') && record['raise']
|
27
|
+
raise Exception('process_message')
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
31
|
+
|
32
|
+
class MockChunk
|
33
|
+
def initialize(records)
|
34
|
+
@records = records
|
35
|
+
@index = 0
|
36
|
+
end
|
37
|
+
def msgpack_each
|
38
|
+
@records.each { |item| yield(item[:time],item[:record]) }
|
13
39
|
end
|
14
40
|
end
|
15
41
|
|
@@ -27,7 +53,8 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
|
|
27
53
|
JSON.parse(value)
|
28
54
|
end
|
29
55
|
|
30
|
-
def
|
56
|
+
def test_dlq_400_responses
|
57
|
+
records = [{time: 123, record: {"foo" => "bar"}}]
|
31
58
|
response = parse_response(%({
|
32
59
|
"took" : 0,
|
33
60
|
"errors" : true,
|
@@ -35,148 +62,45 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
|
|
35
62
|
{
|
36
63
|
"create" : {
|
37
64
|
"_index" : "foo",
|
38
|
-
"_type" : "bar",
|
39
|
-
"_id" : "abc",
|
40
|
-
"status" : 500,
|
41
|
-
"error" : {
|
42
|
-
"type" : "some unrecognized type",
|
43
|
-
"reason":"unrecognized error"
|
44
|
-
}
|
45
|
-
}
|
46
|
-
},
|
47
|
-
{
|
48
|
-
"create" : {
|
49
|
-
"_index" : "foo",
|
50
|
-
"_type" : "bar",
|
51
|
-
"_id" : "abc",
|
52
|
-
"status" : 500,
|
53
|
-
"error" : {
|
54
|
-
"type" : "some unrecognized type",
|
55
|
-
"reason":"unrecognized error"
|
56
|
-
}
|
57
|
-
}
|
58
|
-
},
|
59
|
-
{
|
60
|
-
"create" : {
|
61
|
-
"_index" : "foo",
|
62
|
-
"_type" : "bar",
|
63
|
-
"_id" : "abc",
|
64
|
-
"status" : 201
|
65
|
-
}
|
66
|
-
},
|
67
|
-
{
|
68
|
-
"create" : {
|
69
|
-
"_index" : "foo",
|
70
|
-
"_type" : "bar",
|
71
|
-
"_id" : "abc",
|
72
|
-
"status" : 409
|
73
|
-
}
|
74
|
-
},
|
75
|
-
{
|
76
|
-
"create" : {
|
77
|
-
"_index" : "foo",
|
78
|
-
"_type" : "bar",
|
79
|
-
"_id" : "abc",
|
80
65
|
"status" : 400,
|
81
|
-
"
|
82
|
-
"type" : "some unrecognized type",
|
66
|
+
"_type" : "bar",
|
83
67
|
"reason":"unrecognized error"
|
84
68
|
}
|
85
|
-
}
|
86
69
|
}
|
87
70
|
]
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
71
|
+
}))
|
72
|
+
chunk = MockChunk.new(records)
|
73
|
+
dummy_extracted_values = []
|
74
|
+
@handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
|
75
|
+
assert_equal(1, @plugin.error_events.instance_variable_get(:@time_array).size)
|
94
76
|
end
|
95
77
|
|
96
|
-
def
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
"
|
102
|
-
{
|
103
|
-
"create" : {
|
104
|
-
"_index" : "foo",
|
105
|
-
"_type" : "bar",
|
106
|
-
"_id" : "abc",
|
107
|
-
"status" : 500,
|
108
|
-
"error" : {
|
109
|
-
"reason":"some error to cause version mismatch"
|
110
|
-
}
|
111
|
-
}
|
112
|
-
}
|
113
|
-
]
|
114
|
-
}
|
115
|
-
))
|
116
|
-
|
117
|
-
assert_raise Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchVersionMismatch do
|
118
|
-
@handler.handle_error(response)
|
78
|
+
def test_retry_error
|
79
|
+
records = []
|
80
|
+
error_records = Hash.new(false)
|
81
|
+
error_records.merge!({0=>true, 4=>true, 9=>true})
|
82
|
+
10.times do |i|
|
83
|
+
records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
|
119
84
|
end
|
85
|
+
chunk = MockChunk.new(records)
|
120
86
|
|
121
|
-
end
|
122
|
-
|
123
|
-
def test_retry_with_successes_and_duplicates
|
124
|
-
response = parse_response(%(
|
125
|
-
{
|
126
|
-
"took" : 0,
|
127
|
-
"errors" : true,
|
128
|
-
"items" : [
|
129
|
-
{
|
130
|
-
"create" : {
|
131
|
-
"_index" : "foo",
|
132
|
-
"_type" : "bar",
|
133
|
-
"_id" : "abc",
|
134
|
-
"status" : 409,
|
135
|
-
"error" : {
|
136
|
-
"reason":"duplicate ID"
|
137
|
-
}
|
138
|
-
}
|
139
|
-
},
|
140
|
-
{
|
141
|
-
"create" : {
|
142
|
-
"_index" : "foo",
|
143
|
-
"_type" : "bar",
|
144
|
-
"_id" : "abc",
|
145
|
-
"status" : 201
|
146
|
-
}
|
147
|
-
}
|
148
|
-
]
|
149
|
-
}
|
150
|
-
))
|
151
|
-
|
152
|
-
@plugin.instance_variable_set(:@write_operation, 'create')
|
153
|
-
@handler.instance_variable_set(:@bulk_message_count, 2)
|
154
|
-
@handler.handle_error(response)
|
155
|
-
assert_match /retry succeeded - successes=1 duplicates=1/, @log.out.logs[0]
|
156
|
-
end
|
157
|
-
|
158
|
-
def test_bulk_rejection_errors
|
159
87
|
response = parse_response(%({
|
160
|
-
"took" :
|
88
|
+
"took" : 1,
|
161
89
|
"errors" : true,
|
162
90
|
"items" : [
|
163
91
|
{
|
164
92
|
"create" : {
|
165
93
|
"_index" : "foo",
|
166
94
|
"_type" : "bar",
|
167
|
-
"_id" : "
|
168
|
-
"status" :
|
169
|
-
"error" : {
|
170
|
-
"type" : "some unrecognized type",
|
171
|
-
"reason":"unrecognized error"
|
172
|
-
}
|
95
|
+
"_id" : "1",
|
96
|
+
"status" : 201
|
173
97
|
}
|
174
98
|
},
|
175
99
|
{
|
176
100
|
"create" : {
|
177
101
|
"_index" : "foo",
|
178
102
|
"_type" : "bar",
|
179
|
-
"_id" : "
|
103
|
+
"_id" : "2",
|
180
104
|
"status" : 500,
|
181
105
|
"error" : {
|
182
106
|
"type" : "some unrecognized type",
|
@@ -188,50 +112,39 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
|
|
188
112
|
"create" : {
|
189
113
|
"_index" : "foo",
|
190
114
|
"_type" : "bar",
|
191
|
-
"_id" : "
|
192
|
-
"status" :
|
115
|
+
"_id" : "3",
|
116
|
+
"status" : 409
|
193
117
|
}
|
194
118
|
},
|
195
119
|
{
|
196
120
|
"create" : {
|
197
121
|
"_index" : "foo",
|
198
122
|
"_type" : "bar",
|
199
|
-
"_id" : "
|
200
|
-
"status" :
|
123
|
+
"_id" : "5",
|
124
|
+
"status" : 500,
|
125
|
+
"error" : {
|
126
|
+
"reason":"unrecognized error - no type field"
|
127
|
+
}
|
201
128
|
}
|
202
129
|
},
|
203
130
|
{
|
204
131
|
"create" : {
|
205
132
|
"_index" : "foo",
|
206
133
|
"_type" : "bar",
|
207
|
-
"_id" : "
|
134
|
+
"_id" : "6",
|
208
135
|
"status" : 429,
|
209
136
|
"error" : {
|
210
137
|
"type" : "es_rejected_execution_exception",
|
211
|
-
"reason":"
|
138
|
+
"reason":"unable to fulfill request at this time, try again later"
|
212
139
|
}
|
213
140
|
}
|
214
|
-
}
|
215
|
-
]
|
216
|
-
}))
|
217
|
-
|
218
|
-
assert_raise Fluent::Plugin::ElasticsearchErrorHandler::BulkIndexQueueFull do
|
219
|
-
@handler.handle_error(response)
|
220
|
-
end
|
221
|
-
|
222
|
-
end
|
223
|
-
|
224
|
-
def test_out_of_memory_errors
|
225
|
-
response = parse_response(%({
|
226
|
-
"took" : 0,
|
227
|
-
"errors" : true,
|
228
|
-
"items" : [
|
141
|
+
},
|
229
142
|
{
|
230
143
|
"create" : {
|
231
144
|
"_index" : "foo",
|
232
145
|
"_type" : "bar",
|
233
|
-
"_id" : "
|
234
|
-
"status" :
|
146
|
+
"_id" : "7",
|
147
|
+
"status" : 400,
|
235
148
|
"error" : {
|
236
149
|
"type" : "some unrecognized type",
|
237
150
|
"reason":"unrecognized error"
|
@@ -242,48 +155,36 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
|
|
242
155
|
"create" : {
|
243
156
|
"_index" : "foo",
|
244
157
|
"_type" : "bar",
|
245
|
-
"_id" : "
|
158
|
+
"_id" : "8",
|
246
159
|
"status" : 500,
|
247
160
|
"error" : {
|
248
161
|
"type" : "some unrecognized type",
|
249
162
|
"reason":"unrecognized error"
|
250
163
|
}
|
251
164
|
}
|
252
|
-
},
|
253
|
-
{
|
254
|
-
"create" : {
|
255
|
-
"_index" : "foo",
|
256
|
-
"_type" : "bar",
|
257
|
-
"_id" : "abc",
|
258
|
-
"status" : 201
|
259
|
-
}
|
260
|
-
},
|
261
|
-
{
|
262
|
-
"create" : {
|
263
|
-
"_index" : "foo",
|
264
|
-
"_type" : "bar",
|
265
|
-
"_id" : "abc",
|
266
|
-
"status" : 409
|
267
|
-
}
|
268
|
-
},
|
269
|
-
{
|
270
|
-
"create" : {
|
271
|
-
"_index" : "foo",
|
272
|
-
"_type" : "bar",
|
273
|
-
"_id" : "abc",
|
274
|
-
"status" : 400,
|
275
|
-
"error" : {
|
276
|
-
"type" : "out_of_memory_error",
|
277
|
-
"reason":"Elasticsearch exhausted its heap"
|
278
|
-
}
|
279
|
-
}
|
280
165
|
}
|
281
166
|
]
|
282
167
|
}))
|
283
168
|
|
284
|
-
|
285
|
-
|
169
|
+
begin
|
170
|
+
failed = false
|
171
|
+
dummy_extracted_values = []
|
172
|
+
@handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
|
173
|
+
rescue Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
|
174
|
+
failed = true
|
175
|
+
records = [].tap do |records|
|
176
|
+
e.retry_stream.each {|time, record| records << record}
|
177
|
+
end
|
178
|
+
assert_equal 3, records.length
|
179
|
+
assert_equal 2, records[0]['_id']
|
180
|
+
assert_equal 6, records[1]['_id']
|
181
|
+
assert_equal 8, records[2]['_id']
|
182
|
+
errors = @plugin.error_events.collect {|time, record| record}
|
183
|
+
assert_equal 2, errors.length
|
184
|
+
assert_equal 5, errors[0]['_id']
|
185
|
+
assert_equal 7, errors[1]['_id']
|
286
186
|
end
|
187
|
+
assert_true failed
|
287
188
|
|
288
189
|
end
|
289
190
|
|
@@ -170,33 +170,6 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
170
170
|
stub_request(:post, url).to_return(lambda { |req| { :status => 200, :body => make_response_body(req, 1, 500, error), :headers => { 'Content-Type' => 'json' } } })
|
171
171
|
end
|
172
172
|
|
173
|
-
def stub_elastic_unrecognized_error(url="http://localhost:9200/_bulk")
|
174
|
-
error = {
|
175
|
-
"status" => 500,
|
176
|
-
"type" => "some-other-type",
|
177
|
-
"reason" => "some-other-reason"
|
178
|
-
}
|
179
|
-
stub_request(:post, url).to_return(lambda { |req| { :status => 200, :body => make_response_body(req, 1, 504, error), :headers => { 'Content-Type' => 'json' } } })
|
180
|
-
end
|
181
|
-
|
182
|
-
def stub_elastic_version_mismatch(url="http://localhost:9200/_bulk")
|
183
|
-
error = {
|
184
|
-
"status" => 500,
|
185
|
-
"category" => "some-other-type",
|
186
|
-
"reason" => "some-other-reason"
|
187
|
-
}
|
188
|
-
stub_request(:post, url).to_return(lambda { |req| { :status => 200, :body => make_response_body(req, 1, 500, error), :headers => { 'Content-Type' => 'json' } } })
|
189
|
-
end
|
190
|
-
|
191
|
-
def stub_elastic_index_to_create(url="http://localhost:9200/_bulk")
|
192
|
-
error = {
|
193
|
-
"category" => "some-other-type",
|
194
|
-
"reason" => "some-other-reason",
|
195
|
-
"type" => "some-other-type"
|
196
|
-
}
|
197
|
-
stub_request(:post, url).to_return(lambda { |req| { :status => 200, :body => make_response_body(req, 0, 500, error), :headers => { 'Content-Type' => 'json' } } })
|
198
|
-
end
|
199
|
-
|
200
173
|
def stub_elastic_unexpected_response_op(url="http://localhost:9200/_bulk")
|
201
174
|
error = {
|
202
175
|
"category" => "some-other-type",
|
@@ -1748,51 +1721,70 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
1748
1721
|
|
1749
1722
|
def test_bulk_error
|
1750
1723
|
stub_elastic_ping
|
1751
|
-
|
1752
|
-
|
1753
|
-
|
1754
|
-
|
1755
|
-
|
1756
|
-
|
1757
|
-
|
1758
|
-
|
1759
|
-
|
1760
|
-
|
1761
|
-
|
1762
|
-
|
1763
|
-
|
1764
|
-
|
1765
|
-
|
1766
|
-
|
1767
|
-
|
1768
|
-
|
1769
|
-
|
1770
|
-
|
1771
|
-
|
1772
|
-
|
1773
|
-
|
1774
|
-
|
1775
|
-
|
1776
|
-
|
1777
|
-
|
1778
|
-
|
1779
|
-
|
1780
|
-
|
1781
|
-
|
1782
|
-
|
1783
|
-
|
1784
|
-
|
1785
|
-
|
1786
|
-
|
1787
|
-
|
1788
|
-
|
1789
|
-
|
1790
|
-
|
1791
|
-
|
1792
|
-
|
1793
|
-
|
1794
|
-
|
1795
|
-
|
1724
|
+
stub_request(:post, 'http://localhost:9200/_bulk')
|
1725
|
+
.to_return(lambda do |req|
|
1726
|
+
{ :status => 200,
|
1727
|
+
:headers => { 'Content-Type' => 'json' },
|
1728
|
+
:body => %({
|
1729
|
+
"took" : 1,
|
1730
|
+
"errors" : true,
|
1731
|
+
"items" : [
|
1732
|
+
{
|
1733
|
+
"create" : {
|
1734
|
+
"_index" : "foo",
|
1735
|
+
"_type" : "bar",
|
1736
|
+
"_id" : "abc",
|
1737
|
+
"status" : 500,
|
1738
|
+
"error" : {
|
1739
|
+
"type" : "some unrecognized type",
|
1740
|
+
"reason":"some error to cause version mismatch"
|
1741
|
+
}
|
1742
|
+
}
|
1743
|
+
},
|
1744
|
+
{
|
1745
|
+
"create" : {
|
1746
|
+
"_index" : "foo",
|
1747
|
+
"_type" : "bar",
|
1748
|
+
"_id" : "abc",
|
1749
|
+
"status" : 201
|
1750
|
+
}
|
1751
|
+
},
|
1752
|
+
{
|
1753
|
+
"create" : {
|
1754
|
+
"_index" : "foo",
|
1755
|
+
"_type" : "bar",
|
1756
|
+
"_id" : "abc",
|
1757
|
+
"status" : 500,
|
1758
|
+
"error" : {
|
1759
|
+
"type" : "some unrecognized type",
|
1760
|
+
"reason":"some error to cause version mismatch"
|
1761
|
+
}
|
1762
|
+
}
|
1763
|
+
},
|
1764
|
+
{
|
1765
|
+
"create" : {
|
1766
|
+
"_index" : "foo",
|
1767
|
+
"_type" : "bar",
|
1768
|
+
"_id" : "abc",
|
1769
|
+
"_id" : "abc",
|
1770
|
+
"status" : 409
|
1771
|
+
}
|
1772
|
+
}
|
1773
|
+
]
|
1774
|
+
})
|
1775
|
+
}
|
1776
|
+
end)
|
1777
|
+
|
1778
|
+
driver.run(default_tag: 'test') do
|
1779
|
+
driver.feed(1, sample_record)
|
1780
|
+
driver.feed(2, sample_record)
|
1781
|
+
driver.feed(3, sample_record)
|
1782
|
+
driver.feed(4, sample_record)
|
1783
|
+
end
|
1784
|
+
|
1785
|
+
expect = [['test', 1, sample_record],
|
1786
|
+
['test', 3, sample_record]]
|
1787
|
+
assert_equal expect, driver.events
|
1796
1788
|
end
|
1797
1789
|
|
1798
1790
|
def test_update_should_not_write_if_theres_no_id
|
@@ -1979,4 +1971,5 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
1979
1971
|
end
|
1980
1972
|
assert(index_cmds[0].has_key?("create"))
|
1981
1973
|
end
|
1974
|
+
|
1982
1975
|
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-elasticsearch
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 2.
|
4
|
+
version: 2.10.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- diogo
|
@@ -9,7 +9,7 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date: 2018-
|
12
|
+
date: 2018-05-03 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: fluentd
|