fluent-plugin-elasticsearch 2.8.6 → 2.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 282a2bfc80d90378c04ec44f035aeb941f2c98ed22b3a2f1b1add371ecf20aab
4
- data.tar.gz: 6853f9d7ea05f2bc553636d597eb00d8074dbbac78d62ffca2042e35fedaefd5
3
+ metadata.gz: 4e6ce96c7388d21261953d86138c5f385bfb6ba79dbec118fc611d14c90a8866
4
+ data.tar.gz: d9df64b807a27f60ed81a1b451a7916403c6d434a5af5699006dd404ad218151
5
5
  SHA512:
6
- metadata.gz: 195428011038d2a6249704c76b16bc0eb361b9f648b6c7e40cdde16ad7dee664e926e9ac71e18f840462ec5cd146a8abb039ec9ca77ad35bcf18fdff266d37dc
7
- data.tar.gz: 2fb58796f3f38972b43347e9d686251722943882de537b7a206d2bba9cabc1052fc53009486e4d6a40ae19c30217dae5d2d41aaf2cdcc2226a072c6c61841850
6
+ metadata.gz: 8449503bc7d12055ce8c1755a2656cf86f7066666561a9ddc4c2fe3fc15ba4934f94508dc55bad42a4286af7fc7d8fb1f628554571f860640ae8b3cdf6fe0cdc
7
+ data.tar.gz: c803b54c1c517942cd199710ef9dde9ac428989c7e2ab29c24e02d18fd9b4187d37bf468df9a86cda295359bd9201fea79a87a197b9bcda34814bb576d9efa56
data/History.md CHANGED
@@ -2,6 +2,9 @@
2
2
 
3
3
  ### [Unreleased]
4
4
 
5
+ ### 2.9.0
6
+ - DLQ revisited v1 uplifted #398, #402 (#404)
7
+
5
8
  ### 2.8.6
6
9
  - auth: Fix missing auth tokens after reloading connections (#394)
7
10
 
@@ -3,7 +3,7 @@ $:.push File.expand_path('../lib', __FILE__)
3
3
 
4
4
  Gem::Specification.new do |s|
5
5
  s.name = 'fluent-plugin-elasticsearch'
6
- s.version = '2.8.6'
6
+ s.version = '2.9.0'
7
7
  s.authors = ['diogo', 'pitr']
8
8
  s.email = ['pitr.vern@gmail.com', 'me@diogoterror.com']
9
9
  s.description = %q{Elasticsearch output plugin for Fluent event collector}
@@ -16,12 +16,7 @@ class Fluent::Plugin::ElasticsearchErrorHandler
16
16
  end
17
17
 
18
18
  def handle_error(response)
19
- errors = Hash.new(0)
20
- errors_bad_resp = 0
21
- errors_unrecognized = 0
22
- successes = 0
23
- duplicates = 0
24
- bad_arguments = 0
19
+ stats = Hash.new(0)
25
20
  response['items'].each do |item|
26
21
  if item.has_key?(@plugin.write_operation)
27
22
  write_operation = @plugin.write_operation
@@ -30,7 +25,7 @@ class Fluent::Plugin::ElasticsearchErrorHandler
30
25
  else
31
26
  # When we don't have an expected ops field, something changed in the API
32
27
  # expected return values (ES 2.x)
33
- errors_bad_resp += 1
28
+ stats[:errors_bad_resp] += 1
34
29
  next
35
30
  end
36
31
  if item[write_operation].has_key?('status')
@@ -38,58 +33,44 @@ class Fluent::Plugin::ElasticsearchErrorHandler
38
33
  else
39
34
  # When we don't have a status field, something changed in the API
40
35
  # expected return values (ES 2.x)
41
- errors_bad_resp += 1
36
+ stats[:errors_bad_resp] += 1
42
37
  next
43
38
  end
44
39
  case
40
+ when [200, 201].include?(status)
41
+ stats[:successes] += 1
45
42
  when CREATE_OP == write_operation && 409 == status
46
- duplicates += 1
47
- when 400 == status
48
- bad_arguments += 1
49
- @plugin.log.debug "Elasticsearch rejected document: #{item}"
50
- when [429, 500].include?(status)
43
+ stats[:duplicates] += 1
44
+ else
51
45
  if item[write_operation].has_key?('error') && item[write_operation]['error'].has_key?('type')
52
46
  type = item[write_operation]['error']['type']
53
47
  else
54
48
  # When we don't have a type field, something changed in the API
55
49
  # expected return values (ES 2.x)
56
- errors_bad_resp += 1
50
+ stats[:errors_bad_resp] += 1
57
51
  next
58
52
  end
59
- errors[type] += 1
60
- when [200, 201].include?(status)
61
- successes += 1
62
- else
63
- errors_unrecognized += 1
53
+ stats[type] += 1
64
54
  end
65
55
  end
66
- if errors_bad_resp > 0
67
- msg = "Unable to parse error response from Elasticsearch, likely an API version mismatch #{response}"
68
- @plugin.log.error msg
69
- raise ElasticsearchVersionMismatch, msg
70
- end
71
- if bad_arguments > 0
72
- @plugin.log.warn "Elasticsearch rejected #{bad_arguments} documents due to invalid field arguments"
73
- end
74
- if duplicates > 0
75
- @plugin.log.info "Encountered #{duplicates} duplicate(s) of #{successes} indexing chunk, ignoring"
76
- end
77
- msg = "Indexed (op = #{@plugin.write_operation}) #{successes} successfully, #{duplicates} duplicate(s), #{bad_arguments} bad argument(s), #{errors_unrecognized} unrecognized error(s)"
78
- errors.each_key do |key|
79
- msg << ", #{errors[key]} #{key} error(s)"
56
+ if stats[:errors_bad_resp] > 0
57
+ @plugin.log.on_debug { @plugin.log.debug("Unable to parse response from elasticsearch, likely an API version mismatch: #{response}") }
58
+ raise ElasticsearchVersionMismatch, "Unable to parse error response from Elasticsearch, likely an API version mismatch. Add '@log_level debug' to your config to see the full response"
80
59
  end
81
- @plugin.log.debug msg
82
- if errors_unrecognized > 0
83
- raise UnrecognizedElasticsearchError, "Unrecognized elasticsearch errors returned, retrying #{response}"
60
+ @plugin.log.on_debug do
61
+ msg = ["Indexed (op = #{@plugin.write_operation})"]
62
+ stats.each_pair { |key, value| msg << "#{value} #{key}" }
63
+ @plugin.log.debug msg.join(', ')
84
64
  end
85
- errors.each_key do |key|
65
+ stats.each_key do |key|
86
66
  case key
87
67
  when 'out_of_memory_error'
88
- raise ElasticsearchOutOfMemory, "Elasticsearch has exhausted its heap, retrying"
68
+ raise ElasticsearchOutOfMemory, 'Elasticsearch has exhausted its heap, retrying'
89
69
  when 'es_rejected_execution_exception'
90
- raise BulkIndexQueueFull, "Bulk index queue is full, retrying"
70
+ raise BulkIndexQueueFull, 'Bulk index queue is full, retrying'
91
71
  else
92
- raise ElasticsearchError, "Elasticsearch errors returned, retrying #{response}"
72
+ @plugin.log.on_debug { @plugin.log.debug("Elasticsearch errors returned, retrying: #{response}") }
73
+ raise ElasticsearchError, "Elasticsearch returned errors, retrying. Add '@log_level debug' to your config to see the full response"
93
74
  end
94
75
  end
95
76
  end
@@ -380,7 +380,7 @@ EOC
380
380
  meta = {}
381
381
 
382
382
  tag = chunk.metadata.tag
383
- logstash_prefix, index_name, type_name = expand_placeholders(chunk.metadata)
383
+ extracted_values = expand_placeholders(chunk.metadata)
384
384
  @error = Fluent::Plugin::ElasticsearchErrorHandler.new(self)
385
385
  @last_seen_major_version = detect_es_major_version rescue DEFAULT_ELASTICSEARCH_VERSION
386
386
 
@@ -388,89 +388,98 @@ EOC
388
388
  @error.records += 1
389
389
  next unless record.is_a? Hash
390
390
 
391
- if @flatten_hashes
392
- record = flatten_record(record)
391
+ begin
392
+ process_message(tag, meta, header, time, record, bulk_message, extracted_values)
393
+ rescue => e
394
+ router.emit_error_event(tag, time, record, e)
393
395
  end
396
+ end
397
+ send_bulk(bulk_message) unless bulk_message.empty?
398
+ bulk_message.clear
399
+ end
394
400
 
395
- if @hash_config
396
- record = generate_hash_id_key(record)
397
- end
401
+ def process_message(tag, meta, header, time, record, bulk_message, extracted_values)
402
+ logstash_prefix, index_name, type_name = extracted_values
398
403
 
399
- dt = nil
400
- if @logstash_format || @include_timestamp
401
- if record.has_key?(TIMESTAMP_FIELD)
402
- rts = record[TIMESTAMP_FIELD]
403
- dt = parse_time(rts, time, tag)
404
- elsif record.has_key?(@time_key)
405
- rts = record[@time_key]
406
- dt = parse_time(rts, time, tag)
407
- record[TIMESTAMP_FIELD] = dt.iso8601(@time_precision) unless @time_key_exclude_timestamp
408
- else
409
- dt = Time.at(time).to_datetime
410
- record[TIMESTAMP_FIELD] = dt.iso8601(@time_precision)
411
- end
412
- end
404
+ if @flatten_hashes
405
+ record = flatten_record(record)
406
+ end
413
407
 
414
- target_index_parent, target_index_child_key = @target_index_key ? get_parent_of(record, @target_index_key) : nil
415
- if target_index_parent && target_index_parent[target_index_child_key]
416
- target_index = target_index_parent.delete(target_index_child_key)
417
- elsif @logstash_format
418
- dt = dt.new_offset(0) if @utc_index
419
- target_index = "#{logstash_prefix}#{@logstash_prefix_separator}#{dt.strftime(@logstash_dateformat)}"
408
+ if @hash_config
409
+ record = generate_hash_id_key(record)
410
+ end
411
+
412
+ dt = nil
413
+ if @logstash_format || @include_timestamp
414
+ if record.has_key?(TIMESTAMP_FIELD)
415
+ rts = record[TIMESTAMP_FIELD]
416
+ dt = parse_time(rts, time, tag)
417
+ elsif record.has_key?(@time_key)
418
+ rts = record[@time_key]
419
+ dt = parse_time(rts, time, tag)
420
+ record[TIMESTAMP_FIELD] = dt.iso8601(@time_precision) unless @time_key_exclude_timestamp
420
421
  else
421
- target_index = index_name
422
+ dt = Time.at(time).to_datetime
423
+ record[TIMESTAMP_FIELD] = dt.iso8601(@time_precision)
422
424
  end
425
+ end
423
426
 
424
- # Change target_index to lower-case since Elasticsearch doesn't
425
- # allow upper-case characters in index names.
426
- target_index = target_index.downcase
427
- if @include_tag_key
428
- record[@tag_key] = tag
429
- end
427
+ target_index_parent, target_index_child_key = @target_index_key ? get_parent_of(record, @target_index_key) : nil
428
+ if target_index_parent && target_index_parent[target_index_child_key]
429
+ target_index = target_index_parent.delete(target_index_child_key)
430
+ elsif @logstash_format
431
+ dt = dt.new_offset(0) if @utc_index
432
+ target_index = "#{logstash_prefix}#{@logstash_prefix_separator}#{dt.strftime(@logstash_dateformat)}"
433
+ else
434
+ target_index = index_name
435
+ end
430
436
 
431
- target_type_parent, target_type_child_key = @target_type_key ? get_parent_of(record, @target_type_key) : nil
432
- if target_type_parent && target_type_parent[target_type_child_key]
433
- target_type = target_type_parent.delete(target_type_child_key)
434
- if @last_seen_major_version == 6
435
- log.warn "Detected ES 6.x: `@type_name` will be used as the document `_type`."
436
- target_type = type_name
437
- elsif @last_seen_major_version >= 7
438
- log.warn "Detected ES 7.x or above: `_doc` will be used as the document `_type`."
439
- target_type = '_doc'.freeze
440
- end
437
+ # Change target_index to lower-case since Elasticsearch doesn't
438
+ # allow upper-case characters in index names.
439
+ target_index = target_index.downcase
440
+ if @include_tag_key
441
+ record[@tag_key] = tag
442
+ end
443
+
444
+ target_type_parent, target_type_child_key = @target_type_key ? get_parent_of(record, @target_type_key) : nil
445
+ if target_type_parent && target_type_parent[target_type_child_key]
446
+ target_type = target_type_parent.delete(target_type_child_key)
447
+ if @last_seen_major_version == 6
448
+ log.warn "Detected ES 6.x: `@type_name` will be used as the document `_type`."
449
+ target_type = type_name
450
+ elsif @last_seen_major_version >= 7
451
+ log.warn "Detected ES 7.x or above: `_doc` will be used as the document `_type`."
452
+ target_type = '_doc'.freeze
453
+ end
454
+ else
455
+ if @last_seen_major_version >= 7 && target_type != DEFAULT_TYPE_NAME_ES_7x
456
+ log.warn "Detected ES 7.x or above: `_doc` will be used as the document `_type`."
457
+ target_type = '_doc'.freeze
441
458
  else
442
- if @last_seen_major_version >= 7 && target_type != DEFAULT_TYPE_NAME_ES_7x
443
- log.warn "Detected ES 7.x or above: `_doc` will be used as the document `_type`."
444
- target_type = '_doc'.freeze
445
- else
446
- target_type = type_name
447
- end
459
+ target_type = type_name
448
460
  end
461
+ end
449
462
 
450
- meta.clear
451
- meta["_index".freeze] = target_index
452
- meta["_type".freeze] = target_type
453
-
454
- if @pipeline
455
- meta["pipeline".freeze] = @pipeline
456
- end
463
+ meta.clear
464
+ meta["_index".freeze] = target_index
465
+ meta["_type".freeze] = target_type
457
466
 
458
- @meta_config_map.each do |record_accessor, meta_key|
459
- if raw_value = record_accessor.call(record)
460
- meta[meta_key] = raw_value
461
- end
462
- end
467
+ if @pipeline
468
+ meta["pipeline".freeze] = @pipeline
469
+ end
463
470
 
464
- if @remove_keys
465
- @remove_keys.each { |key| record.delete(key) }
471
+ @meta_config_map.each do |record_accessor, meta_key|
472
+ if raw_value = record_accessor.call(record)
473
+ meta[meta_key] = raw_value
466
474
  end
475
+ end
467
476
 
468
- append_record_to_messages(@write_operation, meta, header, record, bulk_message)
469
- @error.bulk_message_count += 1
477
+ if @remove_keys
478
+ @remove_keys.each { |key| record.delete(key) }
470
479
  end
471
480
 
472
- send_bulk(bulk_message) unless bulk_message.empty?
473
- bulk_message.clear
481
+ append_record_to_messages(@write_operation, meta, header, record, bulk_message)
482
+ @error.bulk_message_count += 1
474
483
  end
475
484
 
476
485
  # returns [parent, child_key] of child described by path array in record's tree
@@ -484,10 +493,7 @@ EOC
484
493
  retries = 0
485
494
  begin
486
495
  response = client.bulk body: data
487
- if response['errors']
488
- @error.handle_error(response)
489
- log.error "Could not push log to Elasticsearch: #{response}"
490
- end
496
+ @error.handle_error(response) if response['errors']
491
497
  rescue *client.transport.host_unreachable_exceptions => e
492
498
  if retries < 2
493
499
  retries += 1
@@ -0,0 +1,122 @@
1
+ require 'helper'
2
+ require 'fluent/plugin/elasticsearch_error_handler'
3
+ require 'json'
4
+
5
+ class TestElasticsearchErrorHandler < Test::Unit::TestCase
6
+
7
+ class TestPlugin
8
+ attr_reader :log
9
+ def initialize(log)
10
+ @log = log
11
+ end
12
+
13
+ def write_operation
14
+ 'index'
15
+ end
16
+ end
17
+
18
+ def setup
19
+ Fluent::Test.setup
20
+ @log = Fluent::Engine.log
21
+ plugin = TestPlugin.new(@log)
22
+ @handler = Fluent::Plugin::ElasticsearchErrorHandler.new(plugin)
23
+ end
24
+
25
+ def parse_response(value)
26
+ JSON.parse(value)
27
+ end
28
+
29
+ def test_errors
30
+ response = parse_response(%({
31
+ "took" : 0,
32
+ "errors" : true,
33
+ "items" : [
34
+ {
35
+ "create" : {
36
+ "_index" : "foo",
37
+ "_type" : "bar",
38
+ "_id" : "abc",
39
+ "status" : 500,
40
+ "error" : {
41
+ "type" : "some unrecognized type",
42
+ "reason":"some error to cause version mismatch"
43
+ }
44
+ }
45
+ },
46
+ {
47
+ "create" : {
48
+ "_index" : "foo",
49
+ "_type" : "bar",
50
+ "_id" : "abc",
51
+ "status" : 500,
52
+ "error" : {
53
+ "type" : "some unrecognized type",
54
+ "reason":"some error to cause version mismatch"
55
+ }
56
+ }
57
+ },
58
+ {
59
+ "create" : {
60
+ "_index" : "foo",
61
+ "_type" : "bar",
62
+ "_id" : "abc",
63
+ "status" : 201
64
+ }
65
+ },
66
+ {
67
+ "create" : {
68
+ "_index" : "foo",
69
+ "_type" : "bar",
70
+ "_id" : "abc",
71
+ "status" : 409
72
+ }
73
+ },
74
+ {
75
+ "create" : {
76
+ "_index" : "foo",
77
+ "_type" : "bar",
78
+ "_id" : "abc",
79
+ "status" : 400,
80
+ "error" : {
81
+ "type" : "some unrecognized type",
82
+ "reason":"some error to cause version mismatch"
83
+ }
84
+ }
85
+ }
86
+ ]
87
+ }))
88
+
89
+ assert_raise Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchError do
90
+ @handler.handle_error(response)
91
+ end
92
+
93
+ end
94
+
95
+ def test_elasticsearch_version_mismatch_raises_error
96
+ response = parse_response(%(
97
+ {
98
+ "took" : 0,
99
+ "errors" : true,
100
+ "items" : [
101
+ {
102
+ "create" : {
103
+ "_index" : "foo",
104
+ "_type" : "bar",
105
+ "_id" : "abc",
106
+ "status" : 500,
107
+ "error" : {
108
+ "reason":"some error to cause version mismatch"
109
+ }
110
+ }
111
+ }
112
+ ]
113
+ }
114
+ ))
115
+
116
+ assert_raise Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchVersionMismatch do
117
+ @handler.handle_error(response)
118
+ end
119
+
120
+ end
121
+
122
+ end
@@ -154,6 +154,7 @@ class ElasticsearchOutput < Test::Unit::TestCase
154
154
 
155
155
  def stub_elastic_bulk_rejected(url="http://localhost:9200/_bulk")
156
156
  error = {
157
+ "status" => 500,
157
158
  "type" => "es_rejected_execution_exception",
158
159
  "reason" => "rejected execution of org.elasticsearch.transport.TransportService$4@1a34d37a on EsThreadPoolExecutor[bulk, queue capacity = 50, org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor@312a2162[Running, pool size = 32, active threads = 32, queued tasks = 50, completed tasks = 327053]]"
159
160
  }
@@ -162,6 +163,7 @@ class ElasticsearchOutput < Test::Unit::TestCase
162
163
 
163
164
  def stub_elastic_out_of_memory(url="http://localhost:9200/_bulk")
164
165
  error = {
166
+ "status" => 500,
165
167
  "type" => "out_of_memory_error",
166
168
  "reason" => "Java heap space"
167
169
  }
@@ -170,6 +172,7 @@ class ElasticsearchOutput < Test::Unit::TestCase
170
172
 
171
173
  def stub_elastic_unrecognized_error(url="http://localhost:9200/_bulk")
172
174
  error = {
175
+ "status" => 500,
173
176
  "type" => "some-other-type",
174
177
  "reason" => "some-other-reason"
175
178
  }
@@ -178,6 +181,7 @@ class ElasticsearchOutput < Test::Unit::TestCase
178
181
 
179
182
  def stub_elastic_version_mismatch(url="http://localhost:9200/_bulk")
180
183
  error = {
184
+ "status" => 500,
181
185
  "category" => "some-other-type",
182
186
  "reason" => "some-other-reason"
183
187
  }
@@ -201,6 +205,11 @@ class ElasticsearchOutput < Test::Unit::TestCase
201
205
  stub_request(:post, url).to_return(lambda { |req| bodystr = make_response_body(req, 0, 500, error); body = JSON.parse(bodystr); body['items'][0]['unknown'] = body['items'][0].delete('create'); { :status => 200, :body => body.to_json, :headers => { 'Content-Type' => 'json' } } })
202
206
  end
203
207
 
208
+ def assert_logs_include(logs, msg)
209
+ matches = logs.grep /#{msg}/
210
+ assert_equal(1, matches.length, "Logs do not contain '#{msg}' '#{logs}'")
211
+ end
212
+
204
213
  def test_configure
205
214
  config = %{
206
215
  host logs.google.com
@@ -660,6 +669,18 @@ class ElasticsearchOutput < Test::Unit::TestCase
660
669
  assert_requested(elastic_request)
661
670
  end
662
671
 
672
+ def test_write_message_with_bad_chunk
673
+ driver.configure("target_index_key bad_value\n@log_level debug\n")
674
+ stub_elastic_ping
675
+ stub_elastic
676
+ driver.run(default_tag: 'test') do
677
+ driver.feed({'bad_value'=>"\255"})
678
+ end
679
+ error_log = driver.error_events.map {|e| e.last.message }
680
+
681
+ assert_logs_include(error_log, /(input string invalid)|(invalid byte sequence in UTF-8)/)
682
+ end
683
+
663
684
  def test_writes_to_default_index
664
685
  stub_elastic_ping
665
686
  stub_elastic
@@ -1724,23 +1745,6 @@ class ElasticsearchOutput < Test::Unit::TestCase
1724
1745
  assert_equal(connection_resets, 1)
1725
1746
  end
1726
1747
 
1727
- def test_bulk_bad_arguments
1728
- driver = driver('@log_level debug')
1729
-
1730
- stub_elastic_ping
1731
- stub_elastic_bad_argument
1732
-
1733
- driver.run(default_tag: 'test', shutdown: false) do
1734
- driver.feed(sample_record)
1735
- driver.feed(sample_record)
1736
- driver.feed(sample_record)
1737
- end
1738
-
1739
- matches = driver.logs.grep /Elasticsearch rejected document:/
1740
- assert_equal(1, matches.length, "Message 'Elasticsearch rejected document: ...' was not emitted")
1741
- matches = driver.logs.grep /documents due to invalid field arguments/
1742
- assert_equal(1, matches.length, "Message 'Elasticsearch rejected # documents due to invalid field arguments ...' was not emitted")
1743
- end
1744
1748
 
1745
1749
  def test_bulk_error
1746
1750
  stub_elastic_ping
@@ -1768,45 +1772,6 @@ class ElasticsearchOutput < Test::Unit::TestCase
1768
1772
  }
1769
1773
  end
1770
1774
 
1771
- def test_bulk_error_unrecognized_error
1772
- stub_elastic_ping
1773
- stub_elastic_unrecognized_error
1774
-
1775
- assert_raise(Fluent::Plugin::ElasticsearchErrorHandler::UnrecognizedElasticsearchError) {
1776
- driver.run(default_tag: 'test', shutdown: false) do
1777
- driver.feed(sample_record)
1778
- driver.feed(sample_record)
1779
- driver.feed(sample_record)
1780
- end
1781
- }
1782
- end
1783
-
1784
- def test_bulk_error_out_of_memory
1785
- stub_elastic_ping
1786
- stub_elastic_out_of_memory
1787
-
1788
- assert_raise(Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchOutOfMemory) {
1789
- driver.run(default_tag: 'test', shutdown: false) do
1790
- driver.feed(sample_record)
1791
- driver.feed(sample_record)
1792
- driver.feed(sample_record)
1793
- end
1794
- }
1795
- end
1796
-
1797
- def test_bulk_error_queue_full
1798
- stub_elastic_ping
1799
- stub_elastic_bulk_rejected
1800
-
1801
- assert_raise(Fluent::Plugin::ElasticsearchErrorHandler::BulkIndexQueueFull) {
1802
- driver.run(default_tag: 'test', shutdown: false) do
1803
- driver.feed(sample_record)
1804
- driver.feed(sample_record)
1805
- driver.feed(sample_record)
1806
- end
1807
- }
1808
- end
1809
-
1810
1775
  def test_bulk_index_into_a_create
1811
1776
  stub_elastic_ping
1812
1777
  stub_elastic_index_to_create
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-elasticsearch
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.8.6
4
+ version: 2.9.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - diogo
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2018-04-10 00:00:00.000000000 Z
12
+ date: 2018-04-19 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: fluentd
@@ -151,6 +151,7 @@ files:
151
151
  - lib/fluent/plugin/out_elasticsearch.rb
152
152
  - lib/fluent/plugin/out_elasticsearch_dynamic.rb
153
153
  - test/helper.rb
154
+ - test/plugin/test_elasticsearch_error_handler.rb
154
155
  - test/plugin/test_filter_elasticsearch_genid.rb
155
156
  - test/plugin/test_out_elasticsearch.rb
156
157
  - test/plugin/test_out_elasticsearch_dynamic.rb
@@ -181,6 +182,7 @@ specification_version: 4
181
182
  summary: Elasticsearch output plugin for Fluent event collector
182
183
  test_files:
183
184
  - test/helper.rb
185
+ - test/plugin/test_elasticsearch_error_handler.rb
184
186
  - test/plugin/test_filter_elasticsearch_genid.rb
185
187
  - test/plugin/test_out_elasticsearch.rb
186
188
  - test/plugin/test_out_elasticsearch_dynamic.rb