fluent-plugin-elasticsearch 2.11.8 → 2.11.9

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 7a1df6327a4af9049ed3fb5bbb10a153a79a183d1309996f00abcc20d8410108
4
- data.tar.gz: 8129c3a35a66eca22611fdaaebe36bb9f4122ff06f3b51e1ae29675125894c88
3
+ metadata.gz: 85a3883a0d6e6ca5ba98a945cd6a786e2aa9e6f65e4869809e4c8ccae5f25c1d
4
+ data.tar.gz: 29dec4932b60be332820bb039329c173102d4f7c4e292c3ef7d4092c992d422c
5
5
  SHA512:
6
- metadata.gz: 1e1c2f01409ca7f181e75c88c6b1f32d289259504a5cda50e3d99429e952b8364c2957e10aa77e2dc8f8b3c2cf7d832efe5bc88493e6537a1160b39ba1c2d833
7
- data.tar.gz: baf563a878fa159061e8232a5b9916b205aa6b9d00b2a5f67d0eb1c306decab30fd7a302e8da5478eb3200c62cf4de52ccf888b7fdda75d5c3bb688a1160de93
6
+ metadata.gz: 243fbca142efbcca3f98933ec49fb1cb760f32dfa1c54fd1b37d5a809003bab9db296e7d5eafb8409e13617db72673579d89cd62d6abcf30d1bfefd36b4d755d
7
+ data.tar.gz: b37685ecda626db6426ffec6ede2139486cb34a3e83a9ef6e94b96a8609142fa9bd38f1b8b42265dce5387f992e2636583d3b1e87625f3aa4007e2157958b1ec
data/History.md CHANGED
@@ -2,6 +2,11 @@
2
2
 
3
3
  ### [Unreleased]
4
4
 
5
+ ### 2.11.9
6
+ - Use ConnectionRetryFailure in plugin specific retrying for consistency (#468)
7
+ - Remove outdated generating hash_id_key code (#466)
8
+ - Tweak behavior for UnrecoverableError and #detect_es_major_version (#465)
9
+
5
10
  ### 2.11.8
6
11
  - Serialize requests with Oj (#464)
7
12
 
data/README.md CHANGED
@@ -715,6 +715,8 @@ The value for option `buffer_chunk_limit` should not exceed value `http.max_cont
715
715
 
716
716
  **Note**: If you use or evaluate Fluentd v0.14, you can use `<buffer>` directive to specify buffer configuration, too. In more detail, please refer to the [buffer configuration options for v0.14](https://docs.fluentd.org/v0.14/articles/buffer-plugin-overview#configuration-parameters)
717
717
 
718
+ **Note**: If you use `disable_retry_limit` in v0.12 or `retry_forever` in v0.14 or later, please be careful to consume memory inexhaustibly.
719
+
718
720
  ### Hash flattening
719
721
 
720
722
  Elasticsearch will complain if you send object and concrete values to the same field. For example, you might have logs that look this, from different places:
@@ -3,7 +3,7 @@ $:.push File.expand_path('../lib', __FILE__)
3
3
 
4
4
  Gem::Specification.new do |s|
5
5
  s.name = 'fluent-plugin-elasticsearch'
6
- s.version = '2.11.8'
6
+ s.version = '2.11.9'
7
7
  s.authors = ['diogo', 'pitr']
8
8
  s.email = ['pitr.vern@gmail.com', 'me@diogoterror.com']
9
9
  s.description = %q{Elasticsearch output plugin for Fluent event collector}
@@ -8,12 +8,21 @@ class Fluent::Plugin::ElasticsearchErrorHandler
8
8
  attr_accessor :bulk_message_count
9
9
  class ElasticsearchVersionMismatch < Fluent::UnrecoverableError; end
10
10
  class ElasticsearchSubmitMismatch < Fluent::UnrecoverableError; end
11
+ class ElasticsearchRequestAbortError < Fluent::UnrecoverableError; end
11
12
  class ElasticsearchError < StandardError; end
12
13
 
13
14
  def initialize(plugin)
14
15
  @plugin = plugin
15
16
  end
16
17
 
18
+ def unrecoverable_error_types
19
+ ["out_of_memory_error", "es_rejected_execution_exception"]
20
+ end
21
+
22
+ def unrecoverable_error?(type)
23
+ unrecoverable_error_types.include?(type)
24
+ end
25
+
17
26
  def handle_error(response, tag, chunk, bulk_message_count, extracted_values)
18
27
  items = response['items']
19
28
  if items.nil? || !items.is_a?(Array)
@@ -70,6 +79,9 @@ class Fluent::Plugin::ElasticsearchErrorHandler
70
79
  type = item[write_operation]['error']['type']
71
80
  stats[type] += 1
72
81
  retry_stream.add(time, rawrecord)
82
+ if unrecoverable_error?(type)
83
+ raise ElasticsearchRequestAbortError, "Rejected Elasticsearch due to #{type}"
84
+ end
73
85
  else
74
86
  # When we don't have a type field, something changed in the API
75
87
  # expected return values (ES 2.x)
@@ -22,7 +22,8 @@ end
22
22
 
23
23
  module Fluent::Plugin
24
24
  class ElasticsearchOutput < Output
25
- class ConnectionFailure < Fluent::UnrecoverableError; end
25
+ class ConnectionFailure < StandardError; end
26
+ class ConnectionRetryFailure < Fluent::UnrecoverableError; end
26
27
 
27
28
  # MissingIdFieldError is raised for records that do not
28
29
  # include the field for the unique record identifier
@@ -199,9 +200,6 @@ EOC
199
200
  @password = URI.encode_www_form_component(m["password"])
200
201
  end
201
202
 
202
- if @hash_config
203
- raise Fluent::ConfigError, "@hash_config.hash_id_key and id_key must be equal." unless @hash_config.hash_id_key == @id_key
204
- end
205
203
  @transport_logger = nil
206
204
  if @with_transporter_log
207
205
  @transport_logger = log
@@ -209,7 +207,13 @@ EOC
209
207
  log.warn "Consider to specify log_level with @log_level." unless log_level
210
208
  end
211
209
 
212
- @last_seen_major_version = detect_es_major_version rescue DEFAULT_ELASTICSEARCH_VERSION
210
+ @last_seen_major_version =
211
+ begin
212
+ detect_es_major_version
213
+ rescue ConnectionFailure
214
+ log.warn "Could not connect Elasticsearch. Assuming Elasticsearch 5."
215
+ DEFAULT_ELASTICSEARCH_VERSION
216
+ end
213
217
  if @last_seen_major_version == 6 && @type_name != DEFAULT_TYPE_NAME_ES_7x
214
218
  log.info "Detected ES 6.x: ES 7.x will only accept `_doc` in type_name."
215
219
  end
@@ -528,10 +532,6 @@ EOC
528
532
  record = flatten_record(record)
529
533
  end
530
534
 
531
- if @hash_config
532
- record = generate_hash_id_key(record)
533
- end
534
-
535
535
  dt = nil
536
536
  if @logstash_format || @include_timestamp
537
537
  if record.has_key?(TIMESTAMP_FIELD)
@@ -637,7 +637,7 @@ EOC
637
637
  sleep 2**retries
638
638
  retry
639
639
  end
640
- raise ConnectionFailure, "Could not push logs to Elasticsearch after #{retries} retries. #{e.message}"
640
+ raise ConnectionRetryFailure, "Could not push logs to Elasticsearch after #{retries} retries. #{e.message}"
641
641
  rescue Exception
642
642
  @_es = nil if @reconnect_on_error
643
643
  @_es_info = nil if @reconnect_on_error
@@ -136,10 +136,6 @@ module Fluent::Plugin
136
136
  chunk.msgpack_each do |time, record|
137
137
  next unless record.is_a? Hash
138
138
 
139
- if @hash_config
140
- record = generate_hash_id_key(record)
141
- end
142
-
143
139
  begin
144
140
  # evaluate all configurations here
145
141
  DYNAMIC_PARAM_SYMBOLS.each_with_index { |var, i|
@@ -238,7 +234,7 @@ module Fluent::Plugin
238
234
  sleep 2**retries
239
235
  retry
240
236
  end
241
- raise ConnectionFailure, "Could not push logs to Elasticsearch after #{retries} retries. #{e.message}"
237
+ raise ConnectionRetryFailure, "Could not push logs to Elasticsearch after #{retries} retries. #{e.message}"
242
238
  rescue Exception
243
239
  @_es = nil if @reconnect_on_error
244
240
  raise
@@ -81,6 +81,60 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
81
81
  assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
82
82
  end
83
83
 
84
+ def test_out_of_memory_responses
85
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
86
+ response = parse_response(%({
87
+ "took" : 0,
88
+ "errors" : true,
89
+ "items" : [
90
+ {
91
+ "create" : {
92
+ "_index" : "foo",
93
+ "status" : 500,
94
+ "_type" : "bar",
95
+ "error" : {
96
+ "type" : "out_of_memory_error",
97
+ "reason":"Java heap space"
98
+ }
99
+ }
100
+ }
101
+ ]
102
+ }))
103
+
104
+ chunk = MockChunk.new(records)
105
+ dummy_extracted_values = []
106
+ assert_raise(Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError) do
107
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
108
+ end
109
+ end
110
+
111
+ def test_es_rejected_execution_exception_responses
112
+ records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
113
+ response = parse_response(%({
114
+ "took" : 0,
115
+ "errors" : true,
116
+ "items" : [
117
+ {
118
+ "create" : {
119
+ "_index" : "foo",
120
+ "status" : 429,
121
+ "_type" : "bar",
122
+ "error" : {
123
+ "type" : "es_rejected_execution_exception",
124
+ "reason":"rejected execution of org.elasticsearch.transport.TransportService"
125
+ }
126
+ }
127
+ }
128
+ ]
129
+ }))
130
+
131
+ chunk = MockChunk.new(records)
132
+ dummy_extracted_values = []
133
+ assert_raise(Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError) do
134
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
135
+ end
136
+ end
137
+
84
138
  def test_retry_error
85
139
  records = []
86
140
  error_records = Hash.new(false)
@@ -138,10 +192,10 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
138
192
  "_index" : "foo",
139
193
  "_type" : "bar",
140
194
  "_id" : "6",
141
- "status" : 429,
195
+ "status" : 400,
142
196
  "error" : {
143
- "type" : "es_rejected_execution_exception",
144
- "reason":"unable to fulfill request at this time, try again later"
197
+ "type" : "mapper_parsing_exception",
198
+ "reason":"failed to parse"
145
199
  }
146
200
  }
147
201
  },
@@ -176,19 +230,18 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
176
230
  failed = false
177
231
  dummy_extracted_values = []
178
232
  @handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
179
- rescue Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
233
+ rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
180
234
  failed = true
181
235
  records = [].tap do |records|
236
+ next unless e.respond_to?(:retry_stream)
182
237
  e.retry_stream.each {|time, record| records << record}
183
238
  end
184
- assert_equal 3, records.length
239
+ assert_equal 2, records.length
185
240
  assert_equal 2, records[0]['_id']
186
- assert_equal 6, records[1]['_id']
187
- assert_equal 8, records[2]['_id']
241
+ assert_equal 8, records[1]['_id']
188
242
  error_ids = @plugin.error_events.collect {|h| h[:record]['_id']}
189
- assert_equal 2, error_ids.length
190
- assert_equal 5, error_ids[0]
191
- assert_equal 7, error_ids[1]
243
+ assert_equal 3, error_ids.length
244
+ assert_equal [5, 6, 7], error_ids
192
245
  @plugin.error_events.collect {|h| h[:error]}.each do |e|
193
246
  assert_true e.respond_to?(:backtrace)
194
247
  end
@@ -197,4 +250,113 @@ class TestElasticsearchErrorHandler < Test::Unit::TestCase
197
250
 
198
251
  end
199
252
 
253
+ def test_unrecoverable_error_included_in_responses
254
+ records = []
255
+ error_records = Hash.new(false)
256
+ error_records.merge!({0=>true, 4=>true, 9=>true})
257
+ 10.times do |i|
258
+ records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
259
+ end
260
+ chunk = MockChunk.new(records)
261
+
262
+ response = parse_response(%({
263
+ "took" : 1,
264
+ "errors" : true,
265
+ "items" : [
266
+ {
267
+ "create" : {
268
+ "_index" : "foo",
269
+ "_type" : "bar",
270
+ "_id" : "1",
271
+ "status" : 201
272
+ }
273
+ },
274
+ {
275
+ "create" : {
276
+ "_index" : "foo",
277
+ "_type" : "bar",
278
+ "_id" : "2",
279
+ "status" : 500,
280
+ "error" : {
281
+ "type" : "some unrecognized type",
282
+ "reason":"unrecognized error"
283
+ }
284
+ }
285
+ },
286
+ {
287
+ "create" : {
288
+ "_index" : "foo",
289
+ "_type" : "bar",
290
+ "_id" : "3",
291
+ "status" : 409
292
+ }
293
+ },
294
+ {
295
+ "create" : {
296
+ "_index" : "foo",
297
+ "_type" : "bar",
298
+ "_id" : "5",
299
+ "status" : 500,
300
+ "error" : {
301
+ "reason":"unrecognized error - no type field"
302
+ }
303
+ }
304
+ },
305
+ {
306
+ "create" : {
307
+ "_index" : "foo",
308
+ "_type" : "bar",
309
+ "_id" : "6",
310
+ "status" : 500,
311
+ "_type" : "bar",
312
+ "error" : {
313
+ "type" : "out_of_memory_error",
314
+ "reason":"Java heap space"
315
+ }
316
+ }
317
+ },
318
+ {
319
+ "create" : {
320
+ "_index" : "foo",
321
+ "_type" : "bar",
322
+ "_id" : "7",
323
+ "status" : 400,
324
+ "error" : {
325
+ "type" : "some unrecognized type",
326
+ "reason":"unrecognized error"
327
+ }
328
+ }
329
+ },
330
+ {
331
+ "create" : {
332
+ "_index" : "foo",
333
+ "_type" : "bar",
334
+ "_id" : "8",
335
+ "status" : 500,
336
+ "error" : {
337
+ "type" : "some unrecognized type",
338
+ "reason":"unrecognized error"
339
+ }
340
+ }
341
+ }
342
+ ]
343
+ }))
344
+
345
+ begin
346
+ failed = false
347
+ dummy_extracted_values = []
348
+ @handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
349
+ rescue Fluent::Plugin::ElasticsearchErrorHandler::ElasticsearchRequestAbortError, Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
350
+ failed = true
351
+ records = [].tap do |records|
352
+ next unless e.respond_to?(:retry_stream)
353
+ e.retry_stream.each {|time, record| records << record}
354
+ end
355
+ # should drop entire chunk when unrecoverable error response is replied
356
+ assert_equal 0, records.length
357
+ end
358
+ assert_true failed
359
+
360
+ end
361
+
200
362
  end
@@ -69,6 +69,10 @@ class ElasticsearchOutput < Test::Unit::TestCase
69
69
  stub_request(:post, url).to_return(:status => [503, "Service Unavailable"])
70
70
  end
71
71
 
72
+ def stub_elastic_timeout(url="http://localhost:9200/_bulk")
73
+ stub_request(:post, url).to_timeout
74
+ end
75
+
72
76
  def stub_elastic_with_store_index_command_counts(url="http://localhost:9200/_bulk")
73
77
  if @index_command_counts == nil
74
78
  @index_command_counts = {}
@@ -1821,6 +1825,24 @@ class ElasticsearchOutput < Test::Unit::TestCase
1821
1825
  }
1822
1826
  end
1823
1827
 
1828
+ def test_request_forever
1829
+ stub_elastic_ping
1830
+ stub_elastic
1831
+ driver.configure(Fluent::Config::Element.new(
1832
+ 'ROOT', '', {
1833
+ '@type' => 'elasticsearch',
1834
+ }, [
1835
+ Fluent::Config::Element.new('buffer', '', {
1836
+ 'retry_forever' => true
1837
+ }, [])
1838
+ ]
1839
+ ))
1840
+ stub_elastic_timeout
1841
+ driver.run(default_tag: 'test', timeout: 10) do
1842
+ driver.feed(sample_record)
1843
+ end
1844
+ end
1845
+
1824
1846
  def test_connection_failed_retry
1825
1847
  connection_resets = 0
1826
1848
 
@@ -62,6 +62,10 @@ class ElasticsearchOutputDynamic < Test::Unit::TestCase
62
62
  stub_request(:post, url).to_return(:status => [503, "Service Unavailable"])
63
63
  end
64
64
 
65
+ def stub_elastic_timeout(url="http://localhost:9200/_bulk")
66
+ stub_request(:post, url).to_timeout
67
+ end
68
+
65
69
  def stub_elastic_with_store_index_command_counts(url="http://localhost:9200/_bulk")
66
70
  if @index_command_counts == nil
67
71
  @index_command_counts = {}
@@ -843,6 +847,24 @@ class ElasticsearchOutputDynamic < Test::Unit::TestCase
843
847
  }
844
848
  end
845
849
 
850
+ def test_request_forever
851
+ stub_elastic_ping
852
+ stub_elastic
853
+ driver.configure(Fluent::Config::Element.new(
854
+ 'ROOT', '', {
855
+ '@type' => 'elasticsearch',
856
+ }, [
857
+ Fluent::Config::Element.new('buffer', '', {
858
+ 'retry_forever' => true
859
+ }, [])
860
+ ]
861
+ ))
862
+ stub_elastic_timeout
863
+ driver.run(default_tag: 'test', timeout: 10) do
864
+ driver.feed(sample_record)
865
+ end
866
+ end
867
+
846
868
  def test_tag_parts_index_error_event
847
869
  stub_elastic_ping
848
870
  stub_elastic
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-elasticsearch
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.11.8
4
+ version: 2.11.9
5
5
  platform: ruby
6
6
  authors:
7
7
  - diogo
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2018-09-06 00:00:00.000000000 Z
12
+ date: 2018-09-12 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: fluentd