fluent-plugin-elasticsearch 3.5.4 → 3.5.5

Sign up to get free protection for your applications and to get access to all the features.
data/Rakefile CHANGED
@@ -1,11 +1,11 @@
1
- require 'bundler/gem_tasks'
2
- require 'rake/testtask'
3
-
4
- Rake::TestTask.new(:test) do |test|
5
- test.libs << 'test'
6
- test.pattern = 'test/**/test_*.rb'
7
- test.verbose = true
8
- test.warning = false
9
- end
10
-
11
- task :default => :test
1
+ require 'bundler/gem_tasks'
2
+ require 'rake/testtask'
3
+
4
+ Rake::TestTask.new(:test) do |test|
5
+ test.libs << 'test'
6
+ test.pattern = 'test/**/test_*.rb'
7
+ test.verbose = true
8
+ test.warning = false
9
+ end
10
+
11
+ task :default => :test
@@ -1,30 +1,30 @@
1
- version: '{build}'
2
- install:
3
- - SET PATH=C:\Ruby%ruby_version%\bin;%PATH%
4
- - "%devkit%\\devkitvars.bat"
5
- - IF EXIST "%devkit%\\bin\\ridk.cmd" ridk.cmd enable
6
- - ruby --version
7
- - gem --version
8
- - bundle install
9
- build: off
10
- test_script:
11
- - bundle exec rake test
12
-
13
- # https://www.appveyor.com/docs/installed-software/#ruby
14
- environment:
15
- matrix:
16
- - ruby_version: "25-x64"
17
- devkit: C:\Ruby23-x64\DevKit
18
- - ruby_version: "25"
19
- devkit: C:\Ruby23\DevKit
20
- - ruby_version: "24-x64"
21
- devkit: C:\Ruby23-x64\DevKit
22
- - ruby_version: "24"
23
- devkit: C:\Ruby23\DevKit
24
- - ruby_version: "23-x64"
25
- devkit: C:\Ruby23-x64\DevKit
26
- - ruby_version: "22-x64"
27
- devkit: C:\Ruby23-x64\DevKit
28
- matrix:
29
- allow_failures:
30
- - ruby_version: "21"
1
+ version: '{build}'
2
+ install:
3
+ - SET PATH=C:\Ruby%ruby_version%\bin;%PATH%
4
+ - "%devkit%\\devkitvars.bat"
5
+ - IF EXIST "%devkit%\\bin\\ridk.cmd" ridk.cmd enable
6
+ - ruby --version
7
+ - gem --version
8
+ - bundle install
9
+ build: off
10
+ test_script:
11
+ - bundle exec rake test
12
+
13
+ # https://www.appveyor.com/docs/installed-software/#ruby
14
+ environment:
15
+ matrix:
16
+ - ruby_version: "25-x64"
17
+ devkit: C:\Ruby23-x64\DevKit
18
+ - ruby_version: "25"
19
+ devkit: C:\Ruby23\DevKit
20
+ - ruby_version: "24-x64"
21
+ devkit: C:\Ruby23-x64\DevKit
22
+ - ruby_version: "24"
23
+ devkit: C:\Ruby23\DevKit
24
+ - ruby_version: "23-x64"
25
+ devkit: C:\Ruby23-x64\DevKit
26
+ - ruby_version: "22-x64"
27
+ devkit: C:\Ruby23-x64\DevKit
28
+ matrix:
29
+ allow_failures:
30
+ - ruby_version: "21"
@@ -1,31 +1,31 @@
1
- # -*- encoding: utf-8 -*-
2
- $:.push File.expand_path('../lib', __FILE__)
3
-
4
- Gem::Specification.new do |s|
5
- s.name = 'fluent-plugin-elasticsearch'
6
- s.version = '3.5.4'
7
- s.authors = ['diogo', 'pitr']
8
- s.email = ['pitr.vern@gmail.com', 'me@diogoterror.com']
9
- s.description = %q{Elasticsearch output plugin for Fluent event collector}
10
- s.summary = s.description
11
- s.homepage = 'https://github.com/uken/fluent-plugin-elasticsearch'
12
- s.license = 'Apache-2.0'
13
-
14
- s.files = `git ls-files`.split($/)
15
- s.executables = s.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
16
- s.test_files = s.files.grep(%r{^(test|spec|features)/})
17
- s.require_paths = ['lib']
18
-
19
- s.required_ruby_version = Gem::Requirement.new(">= 2.0".freeze)
20
-
21
- s.add_runtime_dependency 'fluentd', '>= 0.14.22'
22
- s.add_runtime_dependency 'excon', '>= 0'
23
- s.add_runtime_dependency 'elasticsearch'
24
-
25
-
26
- s.add_development_dependency 'rake', '>= 0'
27
- s.add_development_dependency 'webmock', '~> 1'
28
- s.add_development_dependency 'test-unit', '~> 3.1.0'
29
- s.add_development_dependency 'minitest', '~> 5.8'
30
- s.add_development_dependency 'flexmock', '~> 2.0'
31
- end
1
+ # -*- encoding: utf-8 -*-
2
+ $:.push File.expand_path('../lib', __FILE__)
3
+
4
+ Gem::Specification.new do |s|
5
+ s.name = 'fluent-plugin-elasticsearch'
6
+ s.version = '3.5.5'
7
+ s.authors = ['diogo', 'pitr']
8
+ s.email = ['pitr.vern@gmail.com', 'me@diogoterror.com']
9
+ s.description = %q{Elasticsearch output plugin for Fluent event collector}
10
+ s.summary = s.description
11
+ s.homepage = 'https://github.com/uken/fluent-plugin-elasticsearch'
12
+ s.license = 'Apache-2.0'
13
+
14
+ s.files = `git ls-files`.split($/)
15
+ s.executables = s.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
16
+ s.test_files = s.files.grep(%r{^(test|spec|features)/})
17
+ s.require_paths = ['lib']
18
+
19
+ s.required_ruby_version = Gem::Requirement.new(">= 2.0".freeze)
20
+
21
+ s.add_runtime_dependency 'fluentd', '>= 0.14.22'
22
+ s.add_runtime_dependency 'excon', '>= 0'
23
+ s.add_runtime_dependency 'elasticsearch'
24
+
25
+
26
+ s.add_development_dependency 'rake', '>= 0'
27
+ s.add_development_dependency 'webmock', '~> 1'
28
+ s.add_development_dependency 'test-unit', '~> 3.1.0'
29
+ s.add_development_dependency 'minitest', '~> 5.8'
30
+ s.add_development_dependency 'flexmock', '~> 2.0'
31
+ end
@@ -1,38 +1,38 @@
1
- require 'fluent/log'
2
- # For elasticsearch-ruby v7.0.0 or later
3
- # logger for Elasticsearch::Loggable required the following methods:
4
- #
5
- # * debug?
6
- # * info?
7
- # * warn?
8
- # * error?
9
- # * fatal?
10
-
11
- module Fluent
12
- class Log
13
- # Elasticsearch::Loggable does not request trace? method.
14
- # def trace?
15
- # @level <= LEVEL_TRACE
16
- # end
17
-
18
- def debug?
19
- @level <= LEVEL_DEBUG
20
- end
21
-
22
- def info?
23
- @level <= LEVEL_INFO
24
- end
25
-
26
- def warn?
27
- @level <= LEVEL_WARN
28
- end
29
-
30
- def error?
31
- @level <= LEVEL_ERROR
32
- end
33
-
34
- def fatal?
35
- @level <= LEVEL_FATAL
36
- end
37
- end
38
- end
1
+ require 'fluent/log'
2
+ # For elasticsearch-ruby v7.0.0 or later
3
+ # logger for Elasticsearch::Loggable required the following methods:
4
+ #
5
+ # * debug?
6
+ # * info?
7
+ # * warn?
8
+ # * error?
9
+ # * fatal?
10
+
11
+ module Fluent
12
+ class Log
13
+ # Elasticsearch::Loggable does not request trace? method.
14
+ # def trace?
15
+ # @level <= LEVEL_TRACE
16
+ # end
17
+
18
+ def debug?
19
+ @level <= LEVEL_DEBUG
20
+ end
21
+
22
+ def info?
23
+ @level <= LEVEL_INFO
24
+ end
25
+
26
+ def warn?
27
+ @level <= LEVEL_WARN
28
+ end
29
+
30
+ def error?
31
+ @level <= LEVEL_ERROR
32
+ end
33
+
34
+ def fatal?
35
+ @level <= LEVEL_FATAL
36
+ end
37
+ end
38
+ end
@@ -1,13 +1,13 @@
1
- module Fluent
2
- module Plugin
3
- module ElasticsearchConstants
4
- BODY_DELIMITER = "\n".freeze
5
- UPDATE_OP = "update".freeze
6
- UPSERT_OP = "upsert".freeze
7
- CREATE_OP = "create".freeze
8
- INDEX_OP = "index".freeze
9
- ID_FIELD = "_id".freeze
10
- TIMESTAMP_FIELD = "@timestamp".freeze
11
- end
12
- end
13
- end
1
+ module Fluent
2
+ module Plugin
3
+ module ElasticsearchConstants
4
+ BODY_DELIMITER = "\n".freeze
5
+ UPDATE_OP = "update".freeze
6
+ UPSERT_OP = "upsert".freeze
7
+ CREATE_OP = "create".freeze
8
+ INDEX_OP = "index".freeze
9
+ ID_FIELD = "_id".freeze
10
+ TIMESTAMP_FIELD = "@timestamp".freeze
11
+ end
12
+ end
13
+ end
@@ -1,5 +1,5 @@
1
- require 'fluent/error'
2
-
3
- class Fluent::Plugin::ElasticsearchError
4
- class RetryableOperationExhaustedFailure < Fluent::UnrecoverableError; end
5
- end
1
+ require 'fluent/error'
2
+
3
+ class Fluent::Plugin::ElasticsearchError
4
+ class RetryableOperationExhaustedFailure < Fluent::UnrecoverableError; end
5
+ end
@@ -1,122 +1,127 @@
1
- require 'fluent/event'
2
- require 'fluent/error'
3
- require_relative 'elasticsearch_constants'
4
-
5
- class Fluent::Plugin::ElasticsearchErrorHandler
6
- include Fluent::Plugin::ElasticsearchConstants
7
-
8
- attr_accessor :bulk_message_count
9
- class ElasticsearchVersionMismatch < Fluent::UnrecoverableError; end
10
- class ElasticsearchSubmitMismatch < Fluent::UnrecoverableError; end
11
- class ElasticsearchRequestAbortError < Fluent::UnrecoverableError; end
12
- class ElasticsearchError < StandardError; end
13
-
14
- def initialize(plugin)
15
- @plugin = plugin
16
- end
17
-
18
- def unrecoverable_error_types
19
- @plugin.unrecoverable_error_types
20
- end
21
-
22
- def unrecoverable_error?(type)
23
- unrecoverable_error_types.include?(type)
24
- end
25
-
26
- def log_es_400_reason(&block)
27
- if @plugin.log_es_400_reason
28
- block.call
29
- else
30
- @plugin.log.on_debug &block
31
- end
32
- end
33
-
34
- def handle_error(response, tag, chunk, bulk_message_count, extracted_values)
35
- items = response['items']
36
- if items.nil? || !items.is_a?(Array)
37
- raise ElasticsearchVersionMismatch, "The response format was unrecognized: #{response}"
38
- end
39
- if bulk_message_count != items.length
40
- raise ElasticsearchSubmitMismatch, "The number of records submitted #{bulk_message_count} do not match the number returned #{items.length}. Unable to process bulk response."
41
- end
42
- retry_stream = Fluent::MultiEventStream.new
43
- stats = Hash.new(0)
44
- meta = {}
45
- header = {}
46
- chunk.msgpack_each do |time, rawrecord|
47
- bulk_message = ''
48
- next unless rawrecord.is_a? Hash
49
- begin
50
- # we need a deep copy for process_message to alter
51
- processrecord = Marshal.load(Marshal.dump(rawrecord))
52
- meta, header, record = @plugin.process_message(tag, meta, header, time, processrecord, extracted_values)
53
- next unless @plugin.append_record_to_messages(@plugin.write_operation, meta, header, record, bulk_message)
54
- rescue => e
55
- stats[:bad_chunk_record] += 1
56
- next
57
- end
58
- item = items.shift
59
- if item.is_a?(Hash) && item.has_key?(@plugin.write_operation)
60
- write_operation = @plugin.write_operation
61
- elsif INDEX_OP == @plugin.write_operation && item.is_a?(Hash) && item.has_key?(CREATE_OP)
62
- write_operation = CREATE_OP
63
- elsif item.nil?
64
- stats[:errors_nil_resp] += 1
65
- next
66
- else
67
- # When we don't have an expected ops field, something changed in the API
68
- # expected return values (ES 2.x)
69
- stats[:errors_bad_resp] += 1
70
- next
71
- end
72
- if item[write_operation].has_key?('status')
73
- status = item[write_operation]['status']
74
- else
75
- # When we don't have a status field, something changed in the API
76
- # expected return values (ES 2.x)
77
- stats[:errors_bad_resp] += 1
78
- next
79
- end
80
- case
81
- when [200, 201].include?(status)
82
- stats[:successes] += 1
83
- when CREATE_OP == write_operation && 409 == status
84
- stats[:duplicates] += 1
85
- when 400 == status
86
- stats[:bad_argument] += 1
87
- reason = ""
88
- log_es_400_reason do
89
- if item[write_operation].has_key?('error') && item[write_operation]['error'].has_key?('type')
90
- reason = " [error type]: #{item[write_operation]['error']['type']}"
91
- end
92
- if item[write_operation].has_key?('error') && item[write_operation]['error'].has_key?('reason')
93
- reason += " [reason]: \'#{item[write_operation]['error']['reason']}\'"
94
- end
95
- end
96
- @plugin.router.emit_error_event(tag, time, rawrecord, ElasticsearchError.new("400 - Rejected by Elasticsearch#{reason}"))
97
- else
98
- if item[write_operation].has_key?('error') && item[write_operation]['error'].has_key?('type')
99
- type = item[write_operation]['error']['type']
100
- stats[type] += 1
101
- retry_stream.add(time, rawrecord)
102
- if unrecoverable_error?(type)
103
- raise ElasticsearchRequestAbortError, "Rejected Elasticsearch due to #{type}"
104
- end
105
- else
106
- # When we don't have a type field, something changed in the API
107
- # expected return values (ES 2.x)
108
- stats[:errors_bad_resp] += 1
109
- @plugin.router.emit_error_event(tag, time, rawrecord, ElasticsearchError.new("#{status} - No error type provided in the response"))
110
- next
111
- end
112
- stats[type] += 1
113
- end
114
- end
115
- @plugin.log.on_debug do
116
- msg = ["Indexed (op = #{@plugin.write_operation})"]
117
- stats.each_pair { |key, value| msg << "#{value} #{key}" }
118
- @plugin.log.debug msg.join(', ')
119
- end
120
- raise Fluent::Plugin::ElasticsearchOutput::RetryStreamError.new(retry_stream) unless retry_stream.empty?
121
- end
122
- end
1
+ require 'fluent/event'
2
+ require 'fluent/error'
3
+ require_relative 'elasticsearch_constants'
4
+
5
+ class Fluent::Plugin::ElasticsearchErrorHandler
6
+ include Fluent::Plugin::ElasticsearchConstants
7
+
8
+ attr_accessor :bulk_message_count
9
+ class ElasticsearchVersionMismatch < Fluent::UnrecoverableError; end
10
+ class ElasticsearchSubmitMismatch < Fluent::UnrecoverableError; end
11
+ class ElasticsearchRequestAbortError < Fluent::UnrecoverableError; end
12
+ class ElasticsearchError < StandardError; end
13
+
14
+ def initialize(plugin)
15
+ @plugin = plugin
16
+ end
17
+
18
+ def unrecoverable_error_types
19
+ @plugin.unrecoverable_error_types
20
+ end
21
+
22
+ def unrecoverable_error?(type)
23
+ unrecoverable_error_types.include?(type)
24
+ end
25
+
26
+ def log_es_400_reason(&block)
27
+ if @plugin.log_es_400_reason
28
+ block.call
29
+ else
30
+ @plugin.log.on_debug &block
31
+ end
32
+ end
33
+
34
+ def handle_error(response, tag, chunk, bulk_message_count, extracted_values)
35
+ items = response['items']
36
+ if items.nil? || !items.is_a?(Array)
37
+ raise ElasticsearchVersionMismatch, "The response format was unrecognized: #{response}"
38
+ end
39
+ if bulk_message_count != items.length
40
+ raise ElasticsearchSubmitMismatch, "The number of records submitted #{bulk_message_count} do not match the number returned #{items.length}. Unable to process bulk response."
41
+ end
42
+ retry_stream = Fluent::MultiEventStream.new
43
+ stats = Hash.new(0)
44
+ meta = {}
45
+ header = {}
46
+ chunk.msgpack_each do |time, rawrecord|
47
+ bulk_message = ''
48
+ next unless rawrecord.is_a? Hash
49
+ begin
50
+ # we need a deep copy for process_message to alter
51
+ processrecord = Marshal.load(Marshal.dump(rawrecord))
52
+ meta, header, record = @plugin.process_message(tag, meta, header, time, processrecord, extracted_values)
53
+ next unless @plugin.append_record_to_messages(@plugin.write_operation, meta, header, record, bulk_message)
54
+ rescue => e
55
+ stats[:bad_chunk_record] += 1
56
+ next
57
+ end
58
+ item = items.shift
59
+ if item.is_a?(Hash) && item.has_key?(@plugin.write_operation)
60
+ write_operation = @plugin.write_operation
61
+ elsif INDEX_OP == @plugin.write_operation && item.is_a?(Hash) && item.has_key?(CREATE_OP)
62
+ write_operation = CREATE_OP
63
+ elsif item.nil?
64
+ stats[:errors_nil_resp] += 1
65
+ next
66
+ else
67
+ # When we don't have an expected ops field, something changed in the API
68
+ # expected return values (ES 2.x)
69
+ stats[:errors_bad_resp] += 1
70
+ next
71
+ end
72
+ if item[write_operation].has_key?('status')
73
+ status = item[write_operation]['status']
74
+ else
75
+ # When we don't have a status field, something changed in the API
76
+ # expected return values (ES 2.x)
77
+ stats[:errors_bad_resp] += 1
78
+ next
79
+ end
80
+ case
81
+ when [200, 201].include?(status)
82
+ stats[:successes] += 1
83
+ when CREATE_OP == write_operation && 409 == status
84
+ stats[:duplicates] += 1
85
+ when 400 == status
86
+ stats[:bad_argument] += 1
87
+ reason = ""
88
+ log_es_400_reason do
89
+ if item[write_operation].has_key?('error') && item[write_operation]['error'].has_key?('type')
90
+ reason = " [error type]: #{item[write_operation]['error']['type']}"
91
+ end
92
+ if item[write_operation].has_key?('error') && item[write_operation]['error'].has_key?('reason')
93
+ reason += " [reason]: \'#{item[write_operation]['error']['reason']}\'"
94
+ end
95
+ end
96
+ @plugin.router.emit_error_event(tag, time, rawrecord, ElasticsearchError.new("400 - Rejected by Elasticsearch#{reason}"))
97
+ else
98
+ if item[write_operation]['error'].is_a?(String)
99
+ reason = item[write_operation]['error']
100
+ stats[:errors_block_resp] += 1
101
+ @plugin.router.emit_error_event(tag, time, rawrecord, ElasticsearchError.new("#{status} - #{reason}"))
102
+ next
103
+ elsif item[write_operation].has_key?('error') && item[write_operation]['error'].has_key?('type')
104
+ type = item[write_operation]['error']['type']
105
+ stats[type] += 1
106
+ retry_stream.add(time, rawrecord)
107
+ if unrecoverable_error?(type)
108
+ raise ElasticsearchRequestAbortError, "Rejected Elasticsearch due to #{type}"
109
+ end
110
+ else
111
+ # When we don't have a type field, something changed in the API
112
+ # expected return values (ES 2.x)
113
+ stats[:errors_bad_resp] += 1
114
+ @plugin.router.emit_error_event(tag, time, rawrecord, ElasticsearchError.new("#{status} - No error type provided in the response"))
115
+ next
116
+ end
117
+ stats[type] += 1
118
+ end
119
+ end
120
+ @plugin.log.on_debug do
121
+ msg = ["Indexed (op = #{@plugin.write_operation})"]
122
+ stats.each_pair { |key, value| msg << "#{value} #{key}" }
123
+ @plugin.log.debug msg.join(', ')
124
+ end
125
+ raise Fluent::Plugin::ElasticsearchOutput::RetryStreamError.new(retry_stream) unless retry_stream.empty?
126
+ end
127
+ end