fluent-plugin-elasticsearch 1.15.2 → 1.16.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: dc1ee6d297a64d46320b2b48b8528faf9a377f1a9af9274c083f36e0d7f48c88
4
- data.tar.gz: ff6de82c88a71a3f8a3c47951fadf0b555fe4118feb18fe911c4ef7b66178218
3
+ metadata.gz: 15e80a669ab0e24b83a279e1cbcc3240b76560b445319b7af7b2f719523b86e1
4
+ data.tar.gz: df01b4f813ffeba65619b778d418384b1cf3df3323929cf2921cf1273a336738
5
5
  SHA512:
6
- metadata.gz: 3123a75aeee15020ec775457d36bfe6eaf52ba22309163c37859ca096556605dc400a98495f587f954e2e7b4d4ad2aa9cf9b7010c9c75158d911feaa6e181fa9
7
- data.tar.gz: 506fc86e5930c18c38992d477d7865363bd351851e141879c67834dca05828f710795fc63725a6f5bc7fde9732660fa97a5caec45d6361b6c974e391fa1ef36c
6
+ metadata.gz: 2e44489b1f603ae61d9566c4cd0d6c16a7cec6ee5c701053f884ee284945fa2f48407674108223ec6c4c7127daabcaf17b6eb534b06d8c26339dd3aaba2fa908
7
+ data.tar.gz: cee4380e5d0aecfd9305bdbc68eaf3c2dbc9fef6b0e8f88f05ad4870a33d3b330983118a6ebe492ac4645805ec23c7acc270b91d37d6a222ed03ebc5ae28c6a5
data/History.md CHANGED
@@ -2,6 +2,9 @@
2
2
 
3
3
  ### [Unreleased]
4
4
 
5
+ ### 1.16.0
6
+ - evaluate bulk request failures and reroute failed messages (#405)
7
+
5
8
  ### 1.15.2
6
9
  - handle case where stats not processed in order; add testing (#410)
7
10
 
@@ -3,7 +3,7 @@ $:.push File.expand_path('../lib', __FILE__)
3
3
 
4
4
  Gem::Specification.new do |s|
5
5
  s.name = 'fluent-plugin-elasticsearch'
6
- s.version = '1.15.2'
6
+ s.version = '1.16.0'
7
7
  s.authors = ['diogo', 'pitr']
8
8
  s.email = ['pitr.vern@gmail.com', 'me@diogoterror.com']
9
9
  s.description = %q{Elasticsearch output plugin for Fluent event collector}
@@ -1,23 +1,41 @@
1
+ require 'fluent/event'
1
2
  require_relative 'elasticsearch_constants'
2
3
 
3
4
  class Fluent::ElasticsearchErrorHandler
4
5
  include Fluent::ElasticsearchConstants
5
6
 
6
- attr_accessor :records, :bulk_message_count
7
- class BulkIndexQueueFull < StandardError; end
8
- class ElasticsearchOutOfMemory < StandardError; end
7
+ attr_accessor :bulk_message_count
9
8
  class ElasticsearchVersionMismatch < StandardError; end
10
- class UnrecognizedElasticsearchError < StandardError; end
11
9
  class ElasticsearchError < StandardError; end
12
- def initialize(plugin, records = 0, bulk_message_count = 0)
10
+
11
+ def initialize(plugin)
13
12
  @plugin = plugin
14
- @records = records
15
- @bulk_message_count = bulk_message_count
16
13
  end
17
14
 
18
- def handle_error(response)
15
+ def handle_error(response, tag, chunk, bulk_message_count)
16
+ items = response['items']
17
+ if items.nil? || !items.is_a?(Array)
18
+ raise ElasticsearchVersionMismatch, "The response format was unrecognized: #{response}"
19
+ end
20
+ if bulk_message_count != items.length
21
+ raise ElasticsearchError, "The number of records submitted #{bulk_message_count} do not match the number returned #{items.length}. Unable to process bulk response."
22
+ end
23
+ retry_stream = Fluent::MultiEventStream.new
19
24
  stats = Hash.new(0)
20
- response['items'].each do |item|
25
+ meta = {}
26
+ header = {}
27
+ chunk.msgpack_each do |time, rawrecord|
28
+ bulk_message = ''
29
+ next unless rawrecord.is_a? Hash
30
+ begin
31
+ # we need a deep copy for process_message to alter
32
+ processrecord = Marshal.load(Marshal.dump(rawrecord))
33
+ @plugin.process_message(tag, meta, header, time, processrecord, bulk_message)
34
+ rescue => e
35
+ stats[:bad_chunk_record] += 1
36
+ next
37
+ end
38
+ item = items.shift
21
39
  if item.has_key?(@plugin.write_operation)
22
40
  write_operation = @plugin.write_operation
23
41
  elsif INDEX_OP == @plugin.write_operation && item.has_key?(CREATE_OP)
@@ -41,13 +59,19 @@ class Fluent::ElasticsearchErrorHandler
41
59
  stats[:successes] += 1
42
60
  when CREATE_OP == write_operation && 409 == status
43
61
  stats[:duplicates] += 1
62
+ when 400 == status
63
+ stats[:bad_argument] += 1
64
+ @plugin.router.emit_error_event(tag, time, rawrecord, '400 - Rejected by Elasticsearch')
44
65
  else
45
66
  if item[write_operation].has_key?('error') && item[write_operation]['error'].has_key?('type')
46
67
  type = item[write_operation]['error']['type']
68
+ stats[type] += 1
69
+ retry_stream.add(time, rawrecord)
47
70
  else
48
71
  # When we don't have a type field, something changed in the API
49
72
  # expected return values (ES 2.x)
50
73
  stats[:errors_bad_resp] += 1
74
+ @plugin.router.emit_error_event(tag, time, rawrecord, "#{status} - No error type provided in the response")
51
75
  next
52
76
  end
53
77
  stats[type] += 1
@@ -58,19 +82,6 @@ class Fluent::ElasticsearchErrorHandler
58
82
  stats.each_pair { |key, value| msg << "#{value} #{key}" }
59
83
  @plugin.log.debug msg.join(', ')
60
84
  end
61
- case
62
- when stats[:errors_bad_resp] > 0
63
- @plugin.log.on_debug { @plugin.log.debug("Unable to parse response from elasticsearch, likely an API version mismatch: #{response}") }
64
- raise ElasticsearchVersionMismatch, "Unable to parse error response from Elasticsearch, likely an API version mismatch. Add '@log_level debug' to your config to see the full response"
65
- when stats[:successes] + stats[:duplicates] == bulk_message_count
66
- @plugin.log.info("retry succeeded - successes=#{stats[:successes]} duplicates=#{stats[:duplicates]}")
67
- when stats['es_rejected_execution_exception'] > 0
68
- raise BulkIndexQueueFull, 'Bulk index queue is full, retrying'
69
- when stats['out_of_memory_error'] > 0
70
- raise ElasticsearchOutOfMemory, 'Elasticsearch has exhausted its heap, retrying'
71
- else
72
- @plugin.log.on_debug { @plugin.log.debug("Elasticsearch errors returned, retrying: #{response}") }
73
- raise ElasticsearchError, "Elasticsearch returned errors, retrying. Add '@log_level debug' to your config to see the full response"
74
- end
85
+ raise Fluent::ElasticsearchOutput::RetryStreamError.new(retry_stream) unless retry_stream.empty?
75
86
  end
76
87
  end
@@ -10,6 +10,7 @@ rescue LoadError
10
10
  end
11
11
 
12
12
  require 'fluent/output'
13
+ require 'fluent/event'
13
14
  require_relative 'elasticsearch_constants'
14
15
  require_relative 'elasticsearch_error_handler'
15
16
  require_relative 'elasticsearch_index_template'
@@ -17,6 +18,16 @@ require_relative 'elasticsearch_index_template'
17
18
  class Fluent::ElasticsearchOutput < Fluent::ObjectBufferedOutput
18
19
  class ConnectionFailure < StandardError; end
19
20
 
21
+ # RetryStreamError privides a stream to be
22
+ # put back in the pipeline for cases where a bulk request
23
+ # failed (e.g some records succeed while others failed)
24
+ class RetryStreamError < StandardError
25
+ attr_reader :retry_stream
26
+ def initialize(retry_stream)
27
+ @retry_stream = retry_stream
28
+ end
29
+ end
30
+
20
31
  Fluent::Plugin.register_output('elasticsearch', self)
21
32
 
22
33
  config_param :host, :string, :default => 'localhost'
@@ -314,22 +325,21 @@ class Fluent::ElasticsearchOutput < Fluent::ObjectBufferedOutput
314
325
  end
315
326
 
316
327
  def write_objects(tag, chunk)
328
+ bulk_message_count = 0
317
329
  bulk_message = ''
318
330
  header = {}
319
331
  meta = {}
320
- @error = Fluent::ElasticsearchErrorHandler.new(self)
321
-
322
332
  chunk.msgpack_each do |time, record|
323
- @error.records += 1
324
333
  next unless record.is_a? Hash
325
334
  begin
326
335
  process_message(tag, meta, header, time, record, bulk_message)
336
+ bulk_message_count += 1
327
337
  rescue=>e
328
338
  router.emit_error_event(tag, time, record, e)
329
339
  end
330
340
  end
331
341
 
332
- send_bulk(bulk_message) unless bulk_message.empty?
342
+ send_bulk(bulk_message, tag, chunk, bulk_message_count) unless bulk_message.empty?
333
343
  bulk_message.clear
334
344
  end
335
345
 
@@ -398,7 +408,6 @@ class Fluent::ElasticsearchOutput < Fluent::ObjectBufferedOutput
398
408
  end
399
409
 
400
410
  append_record_to_messages(@write_operation, meta, header, record, bulk_message)
401
- @error.bulk_message_count += 1
402
411
  end
403
412
 
404
413
  # returns [parent, child_key] of child described by path array in record's tree
@@ -408,11 +417,18 @@ class Fluent::ElasticsearchOutput < Fluent::ObjectBufferedOutput
408
417
  [parent_object, path[-1]]
409
418
  end
410
419
 
411
- def send_bulk(data)
420
+ # send_bulk given a specific bulk request, the original tag,
421
+ # chunk, and bulk_message_count
422
+ def send_bulk(data, tag, chunk, bulk_message_count)
412
423
  retries = 0
413
424
  begin
414
425
  response = client.bulk body: data
415
- @error.handle_error(response) if response['errors']
426
+ if response['errors']
427
+ error = Fluent::ElasticsearchErrorHandler.new(self)
428
+ error.handle_error(response, tag, chunk, bulk_message_count)
429
+ end
430
+ rescue RetryStreamError => e
431
+ router.emit_stream(tag, e.retry_stream)
416
432
  rescue *client.transport.host_unreachable_exceptions => e
417
433
  if retries < 2
418
434
  retries += 1
@@ -1,4 +1,5 @@
1
1
  require 'helper'
2
+ require 'fluent/plugin/out_elasticsearch'
2
3
  require 'fluent/plugin/elasticsearch_error_handler'
3
4
  require 'json'
4
5
 
@@ -6,10 +7,35 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
6
7
 
7
8
  class TestPlugin
8
9
  attr_reader :log
9
- attr_reader :write_operation
10
+ attr_reader :write_operation, :error_events
10
11
  def initialize(log)
11
12
  @log = log
12
13
  @write_operation = 'index'
14
+ @error_events = Fluent::MultiEventStream.new
15
+ end
16
+
17
+ def router
18
+ self
19
+ end
20
+
21
+ def emit_error_event(tag, time, record, e)
22
+ @error_events.add(time, record)
23
+ end
24
+
25
+ def process_message(tag, meta, header, time, record, bulk_message)
26
+ if record.has_key?('raise') && record['raise']
27
+ raise Exception('process_message')
28
+ end
29
+ end
30
+ end
31
+
32
+ class MockChunk
33
+ def initialize(records)
34
+ @records = records
35
+ @index = 0
36
+ end
37
+ def msgpack_each
38
+ @records.each { |item| yield(item[:time],item[:record]) }
13
39
  end
14
40
  end
15
41
 
@@ -31,7 +57,8 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
31
57
  JSON.parse(value)
32
58
  end
33
59
 
34
- def test_errors
60
+ def test_dlq_400_responses
61
+ records = [{time: 123, record: {"foo" => "bar"}}]
35
62
  response = parse_response(%({
36
63
  "took" : 0,
37
64
  "errors" : true,
@@ -39,148 +66,44 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
39
66
  {
40
67
  "create" : {
41
68
  "_index" : "foo",
42
- "_type" : "bar",
43
- "_id" : "abc",
44
- "status" : 500,
45
- "error" : {
46
- "type" : "some unrecognized type",
47
- "reason":"unrecognized error"
48
- }
49
- }
50
- },
51
- {
52
- "create" : {
53
- "_index" : "foo",
54
- "_type" : "bar",
55
- "_id" : "abc",
56
- "status" : 500,
57
- "error" : {
58
- "type" : "some unrecognized type",
59
- "reason":"unrecognized error"
60
- }
61
- }
62
- },
63
- {
64
- "create" : {
65
- "_index" : "foo",
66
- "_type" : "bar",
67
- "_id" : "abc",
68
- "status" : 201
69
- }
70
- },
71
- {
72
- "create" : {
73
- "_index" : "foo",
74
- "_type" : "bar",
75
- "_id" : "abc",
76
- "status" : 409
77
- }
78
- },
79
- {
80
- "create" : {
81
- "_index" : "foo",
82
- "_type" : "bar",
83
- "_id" : "abc",
84
69
  "status" : 400,
85
- "error" : {
86
- "type" : "some unrecognized type",
70
+ "_type" : "bar",
87
71
  "reason":"unrecognized error"
88
72
  }
89
- }
90
73
  }
91
74
  ]
92
- }))
93
-
94
- assert_raise Fluent::ElasticsearchErrorHandler::ElasticsearchError do
95
- @handler.handle_error(response)
96
- end
97
-
75
+ }))
76
+ chunk = MockChunk.new(records)
77
+ @handler.handle_error(response, 'atag', chunk, records.length)
78
+ assert_equal(1, @plugin.error_events.instance_variable_get(:@time_array).size)
98
79
  end
99
80
 
100
- def test_elasticsearch_version_mismatch_raises_error
101
- response = parse_response(%(
102
- {
103
- "took" : 0,
104
- "errors" : true,
105
- "items" : [
106
- {
107
- "create" : {
108
- "_index" : "foo",
109
- "_type" : "bar",
110
- "_id" : "abc",
111
- "status" : 500,
112
- "error" : {
113
- "reason":"some error to cause version mismatch"
114
- }
115
- }
116
- }
117
- ]
118
- }
119
- ))
120
-
121
- assert_raise Fluent::ElasticsearchErrorHandler::ElasticsearchVersionMismatch do
122
- @handler.handle_error(response)
81
+ def test_retry_error
82
+ records = []
83
+ error_records = Hash.new(false)
84
+ error_records.merge!({0=>true, 4=>true, 9=>true})
85
+ 10.times do |i|
86
+ records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
123
87
  end
88
+ chunk = MockChunk.new(records)
124
89
 
125
- end
126
-
127
- def test_retry_with_successes_and_duplicates
128
- response = parse_response(%(
129
- {
130
- "took" : 0,
131
- "errors" : true,
132
- "items" : [
133
- {
134
- "create" : {
135
- "_index" : "foo",
136
- "_type" : "bar",
137
- "_id" : "abc",
138
- "status" : 409,
139
- "error" : {
140
- "reason":"duplicate ID"
141
- }
142
- }
143
- },
144
- {
145
- "create" : {
146
- "_index" : "foo",
147
- "_type" : "bar",
148
- "_id" : "abc",
149
- "status" : 201
150
- }
151
- }
152
- ]
153
- }
154
- ))
155
-
156
- @plugin.instance_variable_set(:@write_operation, 'create')
157
- @handler.instance_variable_set(:@bulk_message_count, 2)
158
- @handler.handle_error(response)
159
- assert_match /retry succeeded - successes=1 duplicates=1/, @log.out.logs[0]
160
- end
161
-
162
- def test_bulk_rejection_errors
163
90
  response = parse_response(%({
164
- "took" : 0,
91
+ "took" : 1,
165
92
  "errors" : true,
166
93
  "items" : [
167
94
  {
168
95
  "create" : {
169
96
  "_index" : "foo",
170
97
  "_type" : "bar",
171
- "_id" : "abc",
172
- "status" : 500,
173
- "error" : {
174
- "type" : "some unrecognized type",
175
- "reason":"unrecognized error"
176
- }
98
+ "_id" : "1",
99
+ "status" : 201
177
100
  }
178
101
  },
179
102
  {
180
103
  "create" : {
181
104
  "_index" : "foo",
182
105
  "_type" : "bar",
183
- "_id" : "abc",
106
+ "_id" : "2",
184
107
  "status" : 500,
185
108
  "error" : {
186
109
  "type" : "some unrecognized type",
@@ -192,50 +115,39 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
192
115
  "create" : {
193
116
  "_index" : "foo",
194
117
  "_type" : "bar",
195
- "_id" : "abc",
196
- "status" : 201
118
+ "_id" : "3",
119
+ "status" : 409
197
120
  }
198
121
  },
199
122
  {
200
123
  "create" : {
201
124
  "_index" : "foo",
202
125
  "_type" : "bar",
203
- "_id" : "abc",
204
- "status" : 409
126
+ "_id" : "5",
127
+ "status" : 500,
128
+ "error" : {
129
+ "reason":"unrecognized error - no type field"
130
+ }
205
131
  }
206
132
  },
207
133
  {
208
134
  "create" : {
209
135
  "_index" : "foo",
210
136
  "_type" : "bar",
211
- "_id" : "abc",
137
+ "_id" : "6",
212
138
  "status" : 429,
213
139
  "error" : {
214
140
  "type" : "es_rejected_execution_exception",
215
- "reason":"Elasticsearch could not process bulk index request"
141
+ "reason":"unable to fulfill request at this time, try again later"
216
142
  }
217
143
  }
218
- }
219
- ]
220
- }))
221
-
222
- assert_raise Fluent::ElasticsearchErrorHandler::BulkIndexQueueFull do
223
- @handler.handle_error(response)
224
- end
225
-
226
- end
227
-
228
- def test_out_of_memory_errors
229
- response = parse_response(%({
230
- "took" : 0,
231
- "errors" : true,
232
- "items" : [
144
+ },
233
145
  {
234
146
  "create" : {
235
147
  "_index" : "foo",
236
148
  "_type" : "bar",
237
- "_id" : "abc",
238
- "status" : 500,
149
+ "_id" : "7",
150
+ "status" : 400,
239
151
  "error" : {
240
152
  "type" : "some unrecognized type",
241
153
  "reason":"unrecognized error"
@@ -246,48 +158,35 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
246
158
  "create" : {
247
159
  "_index" : "foo",
248
160
  "_type" : "bar",
249
- "_id" : "abc",
161
+ "_id" : "8",
250
162
  "status" : 500,
251
163
  "error" : {
252
164
  "type" : "some unrecognized type",
253
165
  "reason":"unrecognized error"
254
166
  }
255
167
  }
256
- },
257
- {
258
- "create" : {
259
- "_index" : "foo",
260
- "_type" : "bar",
261
- "_id" : "abc",
262
- "status" : 201
263
- }
264
- },
265
- {
266
- "create" : {
267
- "_index" : "foo",
268
- "_type" : "bar",
269
- "_id" : "abc",
270
- "status" : 409
271
- }
272
- },
273
- {
274
- "create" : {
275
- "_index" : "foo",
276
- "_type" : "bar",
277
- "_id" : "abc",
278
- "status" : 400,
279
- "error" : {
280
- "type" : "out_of_memory_error",
281
- "reason":"Elasticsearch exhausted its heap"
282
- }
283
- }
284
168
  }
285
169
  ]
286
170
  }))
287
171
 
288
- assert_raise Fluent::ElasticsearchErrorHandler::ElasticsearchOutOfMemory do
289
- @handler.handle_error(response)
172
+ begin
173
+ failed = false
174
+ @handler.handle_error(response, 'atag', chunk, response['items'].length)
175
+ rescue Fluent::ElasticsearchOutput::RetryStreamError=>e
176
+ failed = true
177
+ records = [].tap do |records|
178
+ e.retry_stream.each {|time, record| records << record}
179
+ end
180
+ assert_equal 3, records.length
181
+ assert_equal 2, records[0]['_id']
182
+ assert_equal 6, records[1]['_id']
183
+ assert_equal 8, records[2]['_id']
184
+ errors = @plugin.error_events.collect {|time, record| record}
185
+ assert_equal 2, errors.length
186
+ assert_equal 5, errors[0]['_id']
187
+ assert_equal 7, errors[1]['_id']
290
188
  end
189
+ assert_true failed
291
190
 
292
191
  end
293
192
 
@@ -150,33 +150,6 @@ class ElasticsearchOutput < Test::Unit::TestCase
150
150
  stub_request(:post, url).to_return(lambda { |req| { :status => 200, :body => make_response_body(req, 1, 500, error), :headers => { 'Content-Type' => 'json' } } })
151
151
  end
152
152
 
153
- def stub_elastic_unrecognized_error(url="http://localhost:9200/_bulk")
154
- error = {
155
- "status" => 500,
156
- "type" => "some-other-type",
157
- "reason" => "some-other-reason"
158
- }
159
- stub_request(:post, url).to_return(lambda { |req| { :status => 200, :body => make_response_body(req, 1, 504, error), :headers => { 'Content-Type' => 'json' } } })
160
- end
161
-
162
- def stub_elastic_version_mismatch(url="http://localhost:9200/_bulk")
163
- error = {
164
- "status" => 500,
165
- "category" => "some-other-type",
166
- "reason" => "some-other-reason"
167
- }
168
- stub_request(:post, url).to_return(lambda { |req| { :status => 200, :body => make_response_body(req, 1, 500, error), :headers => { 'Content-Type' => 'json' } } })
169
- end
170
-
171
- def stub_elastic_index_to_create(url="http://localhost:9200/_bulk")
172
- error = {
173
- "category" => "some-other-type",
174
- "reason" => "some-other-reason",
175
- "type" => "some-other-type"
176
- }
177
- stub_request(:post, url).to_return(lambda { |req| { :status => 200, :body => make_response_body(req, 0, 500, error), :headers => { 'Content-Type' => 'json' } } })
178
- end
179
-
180
153
  def stub_elastic_unexpected_response_op(url="http://localhost:9200/_bulk")
181
154
  error = {
182
155
  "category" => "some-other-type",
@@ -1377,47 +1350,68 @@ class ElasticsearchOutput < Test::Unit::TestCase
1377
1350
 
1378
1351
  def test_bulk_error
1379
1352
  stub_elastic_ping
1380
- stub_elastic_bulk_error
1381
-
1382
- assert_raise(Fluent::ElasticsearchErrorHandler::ElasticsearchError) {
1383
- driver.emit(sample_record)
1384
- driver.emit(sample_record)
1385
- driver.emit(sample_record)
1386
- driver.run
1387
- }
1388
- end
1389
-
1390
- def test_bulk_error_version_mismatch
1391
- stub_elastic_ping
1392
- stub_elastic_version_mismatch
1393
-
1394
- assert_raise(Fluent::ElasticsearchErrorHandler::ElasticsearchVersionMismatch) {
1395
- driver.emit(sample_record)
1396
- driver.emit(sample_record)
1397
- driver.emit(sample_record)
1398
- driver.run
1399
- }
1400
- end
1401
-
1402
- def test_bulk_index_into_a_create
1403
- stub_elastic_ping
1404
- stub_elastic_index_to_create
1405
-
1406
- assert_raise(Fluent::ElasticsearchErrorHandler::ElasticsearchError) {
1407
- driver.emit(sample_record)
1408
- driver.run
1409
- }
1410
- assert(index_cmds[0].has_key?("create"))
1411
- end
1412
-
1413
- def test_bulk_unexpected_response_op
1414
- stub_elastic_ping
1415
- stub_elastic_unexpected_response_op
1416
-
1417
- assert_raise(Fluent::ElasticsearchErrorHandler::ElasticsearchVersionMismatch) {
1418
- driver.emit(sample_record)
1419
- driver.run
1420
- }
1353
+ stub_request(:post, 'http://localhost:9200/_bulk')
1354
+ .to_return(lambda do |req|
1355
+ { :status => 200,
1356
+ :headers => { 'Content-Type' => 'json' },
1357
+ :body => %({
1358
+ "took" : 1,
1359
+ "errors" : true,
1360
+ "items" : [
1361
+ {
1362
+ "create" : {
1363
+ "_index" : "foo",
1364
+ "_type" : "bar",
1365
+ "_id" : "abc",
1366
+ "status" : 500,
1367
+ "error" : {
1368
+ "type" : "some unrecognized type",
1369
+ "reason":"some error to cause version mismatch"
1370
+ }
1371
+ }
1372
+ },
1373
+ {
1374
+ "create" : {
1375
+ "_index" : "foo",
1376
+ "_type" : "bar",
1377
+ "_id" : "abc",
1378
+ "status" : 201
1379
+ }
1380
+ },
1381
+ {
1382
+ "create" : {
1383
+ "_index" : "foo",
1384
+ "_type" : "bar",
1385
+ "_id" : "abc",
1386
+ "status" : 500,
1387
+ "error" : {
1388
+ "type" : "some unrecognized type",
1389
+ "reason":"some error to cause version mismatch"
1390
+ }
1391
+ }
1392
+ },
1393
+ {
1394
+ "create" : {
1395
+ "_index" : "foo",
1396
+ "_type" : "bar",
1397
+ "_id" : "abc",
1398
+ "_id" : "abc",
1399
+ "status" : 409
1400
+ }
1401
+ }
1402
+ ]
1403
+ })
1404
+ }
1405
+ end)
1406
+
1407
+ driver.emit(sample_record, 1)
1408
+ driver.emit(sample_record, 2)
1409
+ driver.emit(sample_record, 3)
1410
+ driver.emit(sample_record, 4)
1411
+
1412
+ driver.expect_emit('test', 1, sample_record)
1413
+ driver.expect_emit('test', 3, sample_record)
1414
+ driver.run
1421
1415
  end
1422
1416
 
1423
1417
  def test_update_should_not_write_if_theres_no_id
@@ -1592,4 +1586,5 @@ class ElasticsearchOutput < Test::Unit::TestCase
1592
1586
  driver.run
1593
1587
  assert(index_cmds[0].has_key?("create"))
1594
1588
  end
1589
+
1595
1590
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-elasticsearch
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.15.2
4
+ version: 1.16.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - diogo
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2018-04-24 00:00:00.000000000 Z
12
+ date: 2018-05-02 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: fluentd