fluent-plugin-elasticsearch 1.9.0.rc.1 → 1.9.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: ac0ae4b2396f3c809f7cb7e59fe8dbe1b26bf516
4
- data.tar.gz: 6354f35f3a2ef593c5d1e4e3bf5213f6d0d17efa
3
+ metadata.gz: 9b756e1f1c69c5e12941b6d96242c64a09553e65
4
+ data.tar.gz: b9153e7eaf0b4b3532dd89eb9f7c2cb5f58533a0
5
5
  SHA512:
6
- metadata.gz: 55af560dafa19e24f62819c95944673113ec8b9541f0bf338938fadfabf8b9cad48850c201f829365761739278fdde2356887eef1ebe13ae62f267b7946efc6b
7
- data.tar.gz: 71cd49d06e9e664d0143117669b8aab67efa228271b952bc2cae296ab69011fbe749553611f306a2cf44d7e7001ca5b08da7d23ddd8bc7892225d292f6f0553e
6
+ metadata.gz: 942deb8934cecb911d1f1751760ac1484bc303bbb03079953c66e0eff9a24644c7a0df1517af5436ceca4bc0afed068b4d8a502250492be400e1e412332ddc47
7
+ data.tar.gz: a4cc8d9c20624d9773859cf1546a446e575f4486ec8f80d27dbfa323108c183e2c39cf75650c31cf96fb4c447469227d936682b2311c07090e5f5a500d191670
data/History.md CHANGED
@@ -2,6 +2,10 @@
2
2
 
3
3
  ### [Unreleased]
4
4
 
5
+ ### 1.9.0
6
+ - add `time_parse_error_tag` (#211)
7
+ - add `reconnect_on_error` (#214)
8
+
5
9
  ### 1.9.0.rc.1
6
10
  - Optimize output plugins (#203)
7
11
 
data/README.md CHANGED
@@ -41,6 +41,8 @@ Note: For Amazon Elasticsearch Service please consider using [fluent-plugin-aws-
41
41
  + [remove_keys_on_update](#remove_keys_on_update)
42
42
  + [remove_keys_on_update_key](#remove_keys_on_update_key)
43
43
  + [write_operation](#write_operation)
44
+ + [time_parse_error_tag](#time_parse_error_tag)
45
+ + [reconnect_on_error](#reconnect_on_error)
44
46
  + [Client/host certificate options](#clienthost-certificate-options)
45
47
  + [Proxy Support](#proxy-support)
46
48
  + [Buffered output options](#buffered-output-options)
@@ -392,6 +394,21 @@ The write_operation can be any of:
392
394
 
393
395
  **Please note, id is required in create, update, and upsert scenario. Without id, the message will be dropped.**
394
396
 
397
+ ### time_parse_error_tag
398
+
399
+ With `logstash_format true`, elasticsearch plugin parses timestamp field for generating index name. If the record has invalid timestamp value, this plugin emits an error event to `@ERROR` label with `time_parse_error_tag` configured tag.
400
+
401
+ Default value is `Fluent::ElasticsearchOutput::TimeParser.error` for backward compatibility. `::` separated tag is not good for tag routing because some plugins assume tag is separated by `.`. We recommend to set this parameter like `time_parse_error_tag es_plugin.output.time.error`.
402
+ We will change default value to `.` separated tag.
403
+
404
+ ### reconnect_on_error
405
+ Indicates that the plugin should reset connection on any error (reconnect on next send).
406
+ By default it will reconnect only on "host unreachable exceptions".
407
+ We recommended to set this true in the presence of elasticsearch shield.
408
+ ```
409
+ reconnect_on_error true # defaults to false
410
+ ```
411
+
395
412
  ### Client/host certificate options
396
413
 
397
414
  Need to verify ElasticSearch's certificate? You can use the following parameter to specify a CA instead of using an environment variable.
@@ -3,7 +3,7 @@ $:.push File.expand_path('../lib', __FILE__)
3
3
 
4
4
  Gem::Specification.new do |s|
5
5
  s.name = 'fluent-plugin-elasticsearch'
6
- s.version = '1.9.0.rc.1'
6
+ s.version = '1.9.0'
7
7
  s.authors = ['diogo', 'pitr']
8
8
  s.email = ['pitr.vern@gmail.com', 'me@diogoterror.com']
9
9
  s.description = %q{ElasticSearch output plugin for Fluent event collector}
@@ -58,17 +58,18 @@ class Fluent::ElasticsearchOutput < Fluent::ObjectBufferedOutput
58
58
  config_param :templates, :hash, :default => nil
59
59
  config_param :include_tag_key, :bool, :default => false
60
60
  config_param :tag_key, :string, :default => 'tag'
61
+ config_param :time_parse_error_tag, :string, :default => 'Fluent::ElasticsearchOutput::TimeParser.error'
62
+ config_param :reconnect_on_error, :bool, :default => false
61
63
 
62
64
  include Fluent::ElasticsearchIndexTemplate
63
65
 
64
66
  def initialize
65
67
  super
66
- @time_parser = TimeParser.new(@time_key_format, @router)
67
68
  end
68
69
 
69
70
  def configure(conf)
70
71
  super
71
- @time_parser = TimeParser.new(@time_key_format, @router)
72
+ @time_parser = create_time_parser
72
73
 
73
74
  if @remove_keys
74
75
  @remove_keys = @remove_keys.split(/\s*,\s*/)
@@ -110,40 +111,32 @@ class Fluent::ElasticsearchOutput < Fluent::ObjectBufferedOutput
110
111
  result
111
112
  end
112
113
 
113
- def start
114
- super
115
- end
116
-
117
114
  # once fluent v0.14 is released we might be able to use
118
115
  # Fluent::Parser::TimeParser, but it doesn't quite do what we want - if gives
119
116
  # [sec,nsec] where as we want something we can call `strftime` on...
120
- class TimeParser
121
- def initialize(time_key_format, router)
122
- @time_key_format = time_key_format
123
- @router = router
124
- @parser = if time_key_format
125
- begin
126
- # Strptime doesn't support all formats, but for those it does it's
127
- # blazingly fast.
128
- strptime = Strptime.new(time_key_format)
129
- Proc.new { |value| strptime.exec(value).to_datetime }
130
- rescue
131
- # Can happen if Strptime doesn't recognize the format; or
132
- # if strptime couldn't be required (because it's not installed -- it's
133
- # ruby 2 only)
134
- Proc.new { |value| DateTime.strptime(value, time_key_format) }
135
- end
136
- else
137
- Proc.new { |value| DateTime.parse(value) }
117
+ def create_time_parser
118
+ if @time_key_format
119
+ begin
120
+ # Strptime doesn't support all formats, but for those it does it's
121
+ # blazingly fast.
122
+ strptime = Strptime.new(@time_key_format)
123
+ Proc.new { |value| strptime.exec(value).to_datetime }
124
+ rescue
125
+ # Can happen if Strptime doesn't recognize the format; or
126
+ # if strptime couldn't be required (because it's not installed -- it's
127
+ # ruby 2 only)
128
+ Proc.new { |value| DateTime.strptime(value, @time_key_format) }
138
129
  end
130
+ else
131
+ Proc.new { |value| DateTime.parse(value) }
139
132
  end
133
+ end
140
134
 
141
- def parse(value, event_time)
142
- @parser.call(value)
143
- rescue => e
144
- @router.emit_error_event("Fluent::ElasticsearchOutput::TimeParser.error", Fluent::Engine.now, {'time' => event_time, 'format' => @time_key_format, 'value' => value }, e)
145
- return Time.at(event_time).to_datetime
146
- end
135
+ def parse_time(value, event_time, tag)
136
+ @time_parser.call(value)
137
+ rescue => e
138
+ router.emit_error_event(@time_parse_error_tag, Fluent::Engine.now, {'tag' => tag, 'time' => event_time, 'format' => @time_key_format, 'value' => value}, e)
139
+ return Time.at(event_time).to_datetime
147
140
  end
148
141
 
149
142
  def client
@@ -215,10 +208,6 @@ class Fluent::ElasticsearchOutput < Fluent::ObjectBufferedOutput
215
208
  end.join(', ')
216
209
  end
217
210
 
218
- def shutdown
219
- super
220
- end
221
-
222
211
  BODY_DELIMITER = "\n".freeze
223
212
  UPDATE_OP = "update".freeze
224
213
  UPSERT_OP = "upsert".freeze
@@ -302,11 +291,12 @@ class Fluent::ElasticsearchOutput < Fluent::ObjectBufferedOutput
302
291
  target_index = target_index_parent.delete(target_index_child_key)
303
292
  elsif @logstash_format
304
293
  if record.has_key?(TIMESTAMP_FIELD)
305
- dt = record[TIMESTAMP_FIELD]
306
- dt = @time_parser.parse(record[TIMESTAMP_FIELD], time)
294
+ rts = record[TIMESTAMP_FIELD]
295
+ dt = parse_time(rts, time, tag)
307
296
  elsif record.has_key?(@time_key)
308
- dt = @time_parser.parse(record[@time_key], time)
309
- record[TIMESTAMP_FIELD] = record[@time_key] unless time_key_exclude_timestamp
297
+ rts = record[@time_key]
298
+ dt = parse_time(rts, time, tag)
299
+ record[TIMESTAMP_FIELD] = rts unless @time_key_exclude_timestamp
310
300
  else
311
301
  dt = Time.at(time).to_datetime
312
302
  record[TIMESTAMP_FIELD] = dt.to_s
@@ -370,6 +360,9 @@ class Fluent::ElasticsearchOutput < Fluent::ObjectBufferedOutput
370
360
  retry
371
361
  end
372
362
  raise ConnectionFailure, "Could not push logs to Elasticsearch after #{retries} retries. #{e.message}"
363
+ rescue Exception
364
+ @_es = nil if @reconnect_on_error
365
+ raise
373
366
  end
374
367
  end
375
368
  end
@@ -16,6 +16,7 @@ class Fluent::ElasticsearchOutputDynamic < Fluent::ElasticsearchOutput
16
16
  config_param :reload_on_failure, :string, :default => "false"
17
17
  config_param :resurrect_after, :string, :default => "60"
18
18
  config_param :ssl_verify, :string, :default => "true"
19
+ config_param :reconnect_on_error, :bool, :default => false
19
20
 
20
21
  def configure(conf)
21
22
  super
@@ -207,6 +208,9 @@ class Fluent::ElasticsearchOutputDynamic < Fluent::ElasticsearchOutput
207
208
  retry
208
209
  end
209
210
  raise ConnectionFailure, "Could not push logs to Elasticsearch after #{retries} retries. #{e.message}"
211
+ rescue Exception
212
+ @_es = nil if @reconnect_on_error
213
+ raise
210
214
  end
211
215
  end
212
216
 
@@ -367,8 +367,8 @@ class ElasticsearchOutput < Test::Unit::TestCase
367
367
  end
368
368
 
369
369
  def test_writes_to_target_index_key_logstash
370
- driver.configure("target_index_key @target_index\n")
371
- driver.configure("logstash_format true\n")
370
+ driver.configure("target_index_key @target_index
371
+ logstash_format true")
372
372
  time = Time.parse Date.today.to_s
373
373
  stub_elastic_ping
374
374
  stub_elastic
@@ -378,8 +378,8 @@ class ElasticsearchOutput < Test::Unit::TestCase
378
378
  end
379
379
 
380
380
  def test_writes_to_target_index_key_logstash_uppercase
381
- driver.configure("target_index_key @target_index\n")
382
- driver.configure("logstash_format true\n")
381
+ driver.configure("target_index_key @target_index
382
+ logstash_format true")
383
383
  time = Time.parse Date.today.to_s
384
384
  stub_elastic_ping
385
385
  stub_elastic
@@ -400,8 +400,8 @@ class ElasticsearchOutput < Test::Unit::TestCase
400
400
  end
401
401
 
402
402
  def test_writes_to_target_index_key_fallack_logstash
403
- driver.configure("target_index_key @target_index\n")
404
- driver.configure("logstash_format true\n")
403
+ driver.configure("target_index_key @target_index\n
404
+ logstash_format true")
405
405
  time = Time.parse Date.today.to_s
406
406
  logstash_index = "logstash-#{time.getutc.strftime("%Y.%m.%d")}"
407
407
  stub_elastic_ping
@@ -441,8 +441,8 @@ class ElasticsearchOutput < Test::Unit::TestCase
441
441
  end
442
442
 
443
443
  def test_writes_to_target_type_key_fallack_to_type_name
444
- driver.configure("target_type_key @target_type\n")
445
- driver.configure("type_name mytype\n")
444
+ driver.configure("target_type_key @target_type
445
+ type_name mytype")
446
446
  stub_elastic_ping
447
447
  stub_elastic
448
448
  driver.emit(sample_record)
@@ -724,9 +724,13 @@ class ElasticsearchOutput < Test::Unit::TestCase
724
724
  assert_equal(index_cmds[1]['@timestamp'], ts)
725
725
  end
726
726
 
727
- def test_uses_custom_time_key_format_logs_an_error
727
+ data(:default => nil,
728
+ :custom_tag => 'es_plugin.output.time.error')
729
+ def test_uses_custom_time_key_format_logs_an_error(tag_for_error)
730
+ tag_config = tag_for_error ? "time_parse_error_tag #{tag_for_error}" : ''
731
+ tag_for_error = 'Fluent::ElasticsearchOutput::TimeParser.error' if tag_for_error.nil?
728
732
  driver.configure("logstash_format true
729
- time_key_format %Y-%m-%dT%H:%M:%S.%N%z\n")
733
+ time_key_format %Y-%m-%dT%H:%M:%S.%N%z\n#{tag_config}\n")
730
734
  stub_elastic_ping
731
735
  stub_elastic
732
736
 
@@ -737,7 +741,7 @@ class ElasticsearchOutput < Test::Unit::TestCase
737
741
  driver.run
738
742
 
739
743
  log = driver.instance.router.emit_error_handler.log
740
- errors = log.out.logs.grep /tag="Fluent::ElasticsearchOutput::TimeParser.error"/
744
+ errors = log.out.logs.grep /tag="#{tag_for_error}"/
741
745
  assert_equal(1, errors.length, "Error was logged for timestamp parse failure")
742
746
 
743
747
  assert_equal(index, index_cmds[0]['index']['_index'])
@@ -910,6 +914,54 @@ class ElasticsearchOutput < Test::Unit::TestCase
910
914
  assert_equal(connection_resets, 3)
911
915
  end
912
916
 
917
+ def test_reconnect_on_error_enabled
918
+ connection_resets = 0
919
+
920
+ stub_elastic_ping(url="http://localhost:9200").with do |req|
921
+ connection_resets += 1
922
+ end
923
+
924
+ stub_request(:post, "http://localhost:9200/_bulk").with do |req|
925
+ raise ZeroDivisionError, "any not host_unreachable_exceptions exception"
926
+ end
927
+
928
+ driver.configure("reconnect_on_error true\n")
929
+ driver.emit(sample_record)
930
+
931
+ assert_raise(ZeroDivisionError) {
932
+ driver.run
933
+ }
934
+
935
+ assert_raise(ZeroDivisionError) {
936
+ driver.run
937
+ }
938
+ assert_equal(connection_resets, 2)
939
+ end
940
+
941
+ def test_reconnect_on_error_disabled
942
+ connection_resets = 0
943
+
944
+ stub_elastic_ping(url="http://localhost:9200").with do |req|
945
+ connection_resets += 1
946
+ end
947
+
948
+ stub_request(:post, "http://localhost:9200/_bulk").with do |req|
949
+ raise ZeroDivisionError, "any not host_unreachable_exceptions exception"
950
+ end
951
+
952
+ driver.configure("reconnect_on_error false\n")
953
+ driver.emit(sample_record)
954
+
955
+ assert_raise(ZeroDivisionError) {
956
+ driver.run
957
+ }
958
+
959
+ assert_raise(ZeroDivisionError) {
960
+ driver.run
961
+ }
962
+ assert_equal(connection_resets, 1)
963
+ end
964
+
913
965
  def test_update_should_not_write_if_theres_no_id
914
966
  driver.configure("write_operation update\n")
915
967
  stub_elastic_ping
@@ -538,6 +538,54 @@ class ElasticsearchOutputDynamic < Test::Unit::TestCase
538
538
  assert_equal(connection_resets, 3)
539
539
  end
540
540
 
541
+ def test_reconnect_on_error_enabled
542
+ connection_resets = 0
543
+
544
+ stub_elastic_ping(url="http://localhost:9200").with do |req|
545
+ connection_resets += 1
546
+ end
547
+
548
+ stub_request(:post, "http://localhost:9200/_bulk").with do |req|
549
+ raise ZeroDivisionError, "any not host_unreachable_exceptions exception"
550
+ end
551
+
552
+ driver.configure("reconnect_on_error true\n")
553
+ driver.emit(sample_record)
554
+
555
+ assert_raise(ZeroDivisionError) {
556
+ driver.run
557
+ }
558
+
559
+ assert_raise(ZeroDivisionError) {
560
+ driver.run
561
+ }
562
+ assert_equal(connection_resets, 2)
563
+ end
564
+
565
+ def test_reconnect_on_error_disabled
566
+ connection_resets = 0
567
+
568
+ stub_elastic_ping(url="http://localhost:9200").with do |req|
569
+ connection_resets += 1
570
+ end
571
+
572
+ stub_request(:post, "http://localhost:9200/_bulk").with do |req|
573
+ raise ZeroDivisionError, "any not host_unreachable_exceptions exception"
574
+ end
575
+
576
+ driver.configure("reconnect_on_error false\n")
577
+ driver.emit(sample_record)
578
+
579
+ assert_raise(ZeroDivisionError) {
580
+ driver.run
581
+ }
582
+
583
+ assert_raise(ZeroDivisionError) {
584
+ driver.run
585
+ }
586
+ assert_equal(connection_resets, 1)
587
+ end
588
+
541
589
  def test_update_should_not_write_if_theres_no_id
542
590
  driver.configure("write_operation update\n")
543
591
  stub_elastic_ping
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-elasticsearch
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.9.0.rc.1
4
+ version: 1.9.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - diogo
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2016-11-03 00:00:00.000000000 Z
12
+ date: 2016-11-17 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: fluentd
@@ -151,9 +151,9 @@ required_ruby_version: !ruby/object:Gem::Requirement
151
151
  version: '2.0'
152
152
  required_rubygems_version: !ruby/object:Gem::Requirement
153
153
  requirements:
154
- - - ">"
154
+ - - ">="
155
155
  - !ruby/object:Gem::Version
156
- version: 1.3.1
156
+ version: '0'
157
157
  requirements: []
158
158
  rubyforge_project:
159
159
  rubygems_version: 2.5.1