fluent-plugin-elasticsearch 1.18.2 → 2.0.0.rc.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +5 -5
- data/.gitignore +0 -1
- data/.travis.yml +0 -1
- data/History.md +4 -96
- data/README.md +23 -162
- data/fluent-plugin-elasticsearch.gemspec +5 -5
- data/lib/fluent/plugin/elasticsearch_index_template.rb +3 -8
- data/lib/fluent/plugin/out_elasticsearch.rb +314 -420
- data/lib/fluent/plugin/out_elasticsearch_dynamic.rb +206 -220
- data/test/plugin/test_out_elasticsearch.rb +303 -806
- data/test/plugin/test_out_elasticsearch_dynamic.rb +180 -257
- metadata +13 -24
- data/Gemfile.v0.12 +0 -11
- data/lib/fluent/log-ext.rb +0 -38
- data/lib/fluent/plugin/elasticsearch_constants.rb +0 -11
- data/lib/fluent/plugin/elasticsearch_error_handler.rb +0 -89
- data/lib/fluent/plugin/elasticsearch_simple_sniffer.rb +0 -10
- data/lib/fluent/plugin/filter_elasticsearch_genid.rb +0 -25
- data/test/plugin/test_elasticsearch_error_handler.rb +0 -264
- data/test/plugin/test_filter_elasticsearch_genid.rb +0 -40
- data/test/test_log-ext.rb +0 -33
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-elasticsearch
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version:
|
4
|
+
version: 2.0.0.rc.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- diogo
|
@@ -9,7 +9,7 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date:
|
12
|
+
date: 2017-09-08 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: fluentd
|
@@ -17,14 +17,14 @@ dependencies:
|
|
17
17
|
requirements:
|
18
18
|
- - ">="
|
19
19
|
- !ruby/object:Gem::Version
|
20
|
-
version: 0.
|
20
|
+
version: 0.14.8
|
21
21
|
type: :runtime
|
22
22
|
prerelease: false
|
23
23
|
version_requirements: !ruby/object:Gem::Requirement
|
24
24
|
requirements:
|
25
25
|
- - ">="
|
26
26
|
- !ruby/object:Gem::Version
|
27
|
-
version: 0.
|
27
|
+
version: 0.14.8
|
28
28
|
- !ruby/object:Gem::Dependency
|
29
29
|
name: excon
|
30
30
|
requirement: !ruby/object:Gem::Requirement
|
@@ -115,15 +115,15 @@ dependencies:
|
|
115
115
|
requirements:
|
116
116
|
- - "~>"
|
117
117
|
- !ruby/object:Gem::Version
|
118
|
-
version: 2.
|
118
|
+
version: '2.0'
|
119
119
|
type: :development
|
120
120
|
prerelease: false
|
121
121
|
version_requirements: !ruby/object:Gem::Requirement
|
122
122
|
requirements:
|
123
123
|
- - "~>"
|
124
124
|
- !ruby/object:Gem::Version
|
125
|
-
version: 2.
|
126
|
-
description:
|
125
|
+
version: '2.0'
|
126
|
+
description: ElasticSearch output plugin for Fluent event collector
|
127
127
|
email:
|
128
128
|
- pitr.vern@gmail.com
|
129
129
|
- me@diogoterror.com
|
@@ -136,7 +136,6 @@ files:
|
|
136
136
|
- ".gitignore"
|
137
137
|
- ".travis.yml"
|
138
138
|
- Gemfile
|
139
|
-
- Gemfile.v0.12
|
140
139
|
- History.md
|
141
140
|
- ISSUE_TEMPLATE.md
|
142
141
|
- LICENSE.txt
|
@@ -144,24 +143,16 @@ files:
|
|
144
143
|
- README.md
|
145
144
|
- Rakefile
|
146
145
|
- fluent-plugin-elasticsearch.gemspec
|
147
|
-
- lib/fluent/log-ext.rb
|
148
|
-
- lib/fluent/plugin/elasticsearch_constants.rb
|
149
|
-
- lib/fluent/plugin/elasticsearch_error_handler.rb
|
150
146
|
- lib/fluent/plugin/elasticsearch_index_template.rb
|
151
|
-
- lib/fluent/plugin/elasticsearch_simple_sniffer.rb
|
152
|
-
- lib/fluent/plugin/filter_elasticsearch_genid.rb
|
153
147
|
- lib/fluent/plugin/out_elasticsearch.rb
|
154
148
|
- lib/fluent/plugin/out_elasticsearch_dynamic.rb
|
155
149
|
- test/helper.rb
|
156
|
-
- test/plugin/test_elasticsearch_error_handler.rb
|
157
|
-
- test/plugin/test_filter_elasticsearch_genid.rb
|
158
150
|
- test/plugin/test_out_elasticsearch.rb
|
159
151
|
- test/plugin/test_out_elasticsearch_dynamic.rb
|
160
152
|
- test/plugin/test_template.json
|
161
|
-
- test/test_log-ext.rb
|
162
153
|
homepage: https://github.com/uken/fluent-plugin-elasticsearch
|
163
154
|
licenses:
|
164
|
-
-
|
155
|
+
- MIT
|
165
156
|
metadata: {}
|
166
157
|
post_install_message:
|
167
158
|
rdoc_options: []
|
@@ -174,19 +165,17 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
174
165
|
version: '2.0'
|
175
166
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
176
167
|
requirements:
|
177
|
-
- - "
|
168
|
+
- - ">"
|
178
169
|
- !ruby/object:Gem::Version
|
179
|
-
version:
|
170
|
+
version: 1.3.1
|
180
171
|
requirements: []
|
181
|
-
|
172
|
+
rubyforge_project:
|
173
|
+
rubygems_version: 2.6.11
|
182
174
|
signing_key:
|
183
175
|
specification_version: 4
|
184
|
-
summary:
|
176
|
+
summary: ElasticSearch output plugin for Fluent event collector
|
185
177
|
test_files:
|
186
178
|
- test/helper.rb
|
187
|
-
- test/plugin/test_elasticsearch_error_handler.rb
|
188
|
-
- test/plugin/test_filter_elasticsearch_genid.rb
|
189
179
|
- test/plugin/test_out_elasticsearch.rb
|
190
180
|
- test/plugin/test_out_elasticsearch_dynamic.rb
|
191
181
|
- test/plugin/test_template.json
|
192
|
-
- test/test_log-ext.rb
|
data/Gemfile.v0.12
DELETED
@@ -1,11 +0,0 @@
|
|
1
|
-
source 'https://rubygems.org'
|
2
|
-
|
3
|
-
# Specify your gem's dependencies in fluent-plugin-elasticsearch.gemspec
|
4
|
-
gem 'fluentd', '~> 0.12.0'
|
5
|
-
|
6
|
-
gemspec
|
7
|
-
|
8
|
-
|
9
|
-
gem 'simplecov', require: false
|
10
|
-
gem 'coveralls', require: false
|
11
|
-
gem 'strptime', require: false if RUBY_ENGINE == "ruby" && RUBY_VERSION =~ /^2/
|
data/lib/fluent/log-ext.rb
DELETED
@@ -1,38 +0,0 @@
|
|
1
|
-
require 'fluent/log'
|
2
|
-
# For elasticsearch-ruby v7.0.0 or later
|
3
|
-
# logger for Elasticsearch::Loggable required the following methods:
|
4
|
-
#
|
5
|
-
# * debug?
|
6
|
-
# * info?
|
7
|
-
# * warn?
|
8
|
-
# * error?
|
9
|
-
# * fatal?
|
10
|
-
|
11
|
-
module Fluent
|
12
|
-
class Log
|
13
|
-
# Elasticsearch::Loggable does not request trace? method.
|
14
|
-
# def trace?
|
15
|
-
# @level <= LEVEL_TRACE
|
16
|
-
# end
|
17
|
-
|
18
|
-
def debug?
|
19
|
-
@level <= LEVEL_DEBUG
|
20
|
-
end
|
21
|
-
|
22
|
-
def info?
|
23
|
-
@level <= LEVEL_INFO
|
24
|
-
end
|
25
|
-
|
26
|
-
def warn?
|
27
|
-
@level <= LEVEL_WARN
|
28
|
-
end
|
29
|
-
|
30
|
-
def error?
|
31
|
-
@level <= LEVEL_ERROR
|
32
|
-
end
|
33
|
-
|
34
|
-
def fatal?
|
35
|
-
@level <= LEVEL_FATAL
|
36
|
-
end
|
37
|
-
end
|
38
|
-
end
|
@@ -1,11 +0,0 @@
|
|
1
|
-
module Fluent
|
2
|
-
module ElasticsearchConstants
|
3
|
-
BODY_DELIMITER = "\n".freeze
|
4
|
-
UPDATE_OP = "update".freeze
|
5
|
-
UPSERT_OP = "upsert".freeze
|
6
|
-
CREATE_OP = "create".freeze
|
7
|
-
INDEX_OP = "index".freeze
|
8
|
-
ID_FIELD = "_id".freeze
|
9
|
-
TIMESTAMP_FIELD = "@timestamp".freeze
|
10
|
-
end
|
11
|
-
end
|
@@ -1,89 +0,0 @@
|
|
1
|
-
require 'fluent/event'
|
2
|
-
require_relative 'elasticsearch_constants'
|
3
|
-
|
4
|
-
class Fluent::ElasticsearchErrorHandler
|
5
|
-
include Fluent::ElasticsearchConstants
|
6
|
-
|
7
|
-
attr_accessor :bulk_message_count
|
8
|
-
class ElasticsearchVersionMismatch < StandardError; end
|
9
|
-
class ElasticsearchError < StandardError; end
|
10
|
-
|
11
|
-
def initialize(plugin)
|
12
|
-
@plugin = plugin
|
13
|
-
end
|
14
|
-
|
15
|
-
def handle_error(response, tag, chunk, bulk_message_count)
|
16
|
-
items = response['items']
|
17
|
-
if items.nil? || !items.is_a?(Array)
|
18
|
-
raise ElasticsearchVersionMismatch, "The response format was unrecognized: #{response}"
|
19
|
-
end
|
20
|
-
if bulk_message_count != items.length
|
21
|
-
raise ElasticsearchError, "The number of records submitted #{bulk_message_count} do not match the number returned #{items.length}. Unable to process bulk response."
|
22
|
-
end
|
23
|
-
retry_stream = Fluent::MultiEventStream.new
|
24
|
-
stats = Hash.new(0)
|
25
|
-
meta = {}
|
26
|
-
header = {}
|
27
|
-
chunk.msgpack_each do |time, rawrecord|
|
28
|
-
bulk_message = ''
|
29
|
-
next unless rawrecord.is_a? Hash
|
30
|
-
begin
|
31
|
-
# we need a deep copy for process_message to alter
|
32
|
-
processrecord = Marshal.load(Marshal.dump(rawrecord))
|
33
|
-
next unless @plugin.process_message(tag, meta, header, time, processrecord, bulk_message)
|
34
|
-
rescue => e
|
35
|
-
stats[:bad_chunk_record] += 1
|
36
|
-
next
|
37
|
-
end
|
38
|
-
item = items.shift
|
39
|
-
if item.has_key?(@plugin.write_operation)
|
40
|
-
write_operation = @plugin.write_operation
|
41
|
-
elsif INDEX_OP == @plugin.write_operation && item.has_key?(CREATE_OP)
|
42
|
-
write_operation = CREATE_OP
|
43
|
-
elsif UPSERT_OP == @plugin.write_operation && item.has_key?(UPDATE_OP)
|
44
|
-
write_operation = UPDATE_OP
|
45
|
-
else
|
46
|
-
# When we don't have an expected ops field, something changed in the API
|
47
|
-
# expected return values (ES 2.x)
|
48
|
-
stats[:errors_bad_resp] += 1
|
49
|
-
next
|
50
|
-
end
|
51
|
-
if item[write_operation].has_key?('status')
|
52
|
-
status = item[write_operation]['status']
|
53
|
-
else
|
54
|
-
# When we don't have a status field, something changed in the API
|
55
|
-
# expected return values (ES 2.x)
|
56
|
-
stats[:errors_bad_resp] += 1
|
57
|
-
next
|
58
|
-
end
|
59
|
-
case
|
60
|
-
when [200, 201].include?(status)
|
61
|
-
stats[:successes] += 1
|
62
|
-
when CREATE_OP == write_operation && 409 == status
|
63
|
-
stats[:duplicates] += 1
|
64
|
-
when 400 == status
|
65
|
-
stats[:bad_argument] += 1
|
66
|
-
@plugin.router.emit_error_event(tag, time, rawrecord, ElasticsearchError.new('400 - Rejected by Elasticsearch'))
|
67
|
-
else
|
68
|
-
type = item[write_operation].fetch('error', {})['type']
|
69
|
-
if type
|
70
|
-
stats[type] += 1
|
71
|
-
retry_stream.add(time, rawrecord)
|
72
|
-
else
|
73
|
-
# When we don't have a type field, something changed in the API
|
74
|
-
# expected return values (ES 2.x)
|
75
|
-
stats[:errors_bad_resp] += 1
|
76
|
-
@plugin.router.emit_error_event(tag, time, rawrecord, ElasticsearchError.new("#{status} - No error type provided in the response"))
|
77
|
-
next
|
78
|
-
end
|
79
|
-
stats[type] += 1
|
80
|
-
end
|
81
|
-
end
|
82
|
-
@plugin.log.on_debug do
|
83
|
-
msg = ["Indexed (op = #{@plugin.write_operation})"]
|
84
|
-
stats.each_pair { |key, value| msg << "#{value} #{key}" }
|
85
|
-
@plugin.log.debug msg.join(', ')
|
86
|
-
end
|
87
|
-
raise Fluent::ElasticsearchOutput::RetryStreamError.new(retry_stream) unless retry_stream.empty?
|
88
|
-
end
|
89
|
-
end
|
@@ -1,10 +0,0 @@
|
|
1
|
-
require 'elasticsearch'
|
2
|
-
|
3
|
-
class Fluent::ElasticsearchSimpleSniffer < Elasticsearch::Transport::Transport::Sniffer
|
4
|
-
|
5
|
-
def hosts
|
6
|
-
@transport.logger.debug "In Fluent::ElasticsearchSimpleSniffer hosts #{@transport.hosts}" if @transport.logger
|
7
|
-
@transport.hosts
|
8
|
-
end
|
9
|
-
|
10
|
-
end
|
@@ -1,25 +0,0 @@
|
|
1
|
-
require 'securerandom'
|
2
|
-
require 'base64'
|
3
|
-
require 'fluent/filter'
|
4
|
-
|
5
|
-
module Fluent
|
6
|
-
class ElasticsearchGenidFilter < Filter
|
7
|
-
Fluent::Plugin.register_filter('elasticsearch_genid', self)
|
8
|
-
|
9
|
-
config_param :hash_id_key, :string, :default => '_hash'
|
10
|
-
|
11
|
-
def initialize
|
12
|
-
super
|
13
|
-
end
|
14
|
-
|
15
|
-
def configure(conf)
|
16
|
-
super
|
17
|
-
end
|
18
|
-
|
19
|
-
def filter(tag, time, record)
|
20
|
-
record[@hash_id_key] = Base64.strict_encode64(SecureRandom.uuid)
|
21
|
-
record
|
22
|
-
end
|
23
|
-
|
24
|
-
end
|
25
|
-
end
|
@@ -1,264 +0,0 @@
|
|
1
|
-
require 'helper'
|
2
|
-
require 'fluent/plugin/out_elasticsearch'
|
3
|
-
require 'fluent/plugin/elasticsearch_error_handler'
|
4
|
-
require 'json'
|
5
|
-
|
6
|
-
class TestElasticsearchErrorHandler < Test::Unit::TestCase
|
7
|
-
|
8
|
-
class TestPlugin
|
9
|
-
attr_reader :log
|
10
|
-
attr_reader :error_events
|
11
|
-
attr_accessor :write_operation
|
12
|
-
def initialize(log)
|
13
|
-
@log = log
|
14
|
-
@write_operation = 'index'
|
15
|
-
@error_events = []
|
16
|
-
end
|
17
|
-
|
18
|
-
def router
|
19
|
-
self
|
20
|
-
end
|
21
|
-
|
22
|
-
def emit_error_event(tag, time, record, e)
|
23
|
-
@error_events << {:tag => tag, :time=>time, :record=>record, :error=>e}
|
24
|
-
end
|
25
|
-
|
26
|
-
def process_message(tag, meta, header, time, record, bulk_message)
|
27
|
-
if record.has_key?('raise') && record['raise']
|
28
|
-
raise Exception('process_message')
|
29
|
-
end
|
30
|
-
return true
|
31
|
-
end
|
32
|
-
end
|
33
|
-
|
34
|
-
class MockChunk
|
35
|
-
def initialize(records)
|
36
|
-
@records = records
|
37
|
-
@index = 0
|
38
|
-
end
|
39
|
-
def msgpack_each
|
40
|
-
@records.each { |item| yield(item[:time],item[:record]) }
|
41
|
-
end
|
42
|
-
end
|
43
|
-
|
44
|
-
def setup
|
45
|
-
Fluent::Test.setup
|
46
|
-
@log_device = Fluent::Test::DummyLogDevice.new
|
47
|
-
if defined?(ServerEngine::DaemonLogger)
|
48
|
-
dl_opts = {:log_level => ServerEngine::DaemonLogger::INFO}
|
49
|
-
logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
|
50
|
-
@log = Fluent::Log.new(logger)
|
51
|
-
else
|
52
|
-
@log = Fluent::Log.new(@log_device, Fluent::Log::LEVEL_INFO)
|
53
|
-
end
|
54
|
-
@plugin = TestPlugin.new(@log)
|
55
|
-
@handler = Fluent::ElasticsearchErrorHandler.new(@plugin)
|
56
|
-
end
|
57
|
-
|
58
|
-
def parse_response(value)
|
59
|
-
JSON.parse(value)
|
60
|
-
end
|
61
|
-
|
62
|
-
def test_dlq_400_responses
|
63
|
-
records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
|
64
|
-
response = parse_response(%({
|
65
|
-
"took" : 0,
|
66
|
-
"errors" : true,
|
67
|
-
"items" : [
|
68
|
-
{
|
69
|
-
"create" : {
|
70
|
-
"_index" : "foo",
|
71
|
-
"status" : 400,
|
72
|
-
"_type" : "bar",
|
73
|
-
"reason":"unrecognized error"
|
74
|
-
}
|
75
|
-
}
|
76
|
-
]
|
77
|
-
}))
|
78
|
-
chunk = MockChunk.new(records)
|
79
|
-
@handler.handle_error(response, 'atag', chunk, records.length)
|
80
|
-
assert_equal(1, @plugin.error_events.size)
|
81
|
-
assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
|
82
|
-
end
|
83
|
-
|
84
|
-
def create_mock_response(method)
|
85
|
-
parse_response(%({
|
86
|
-
"took" : 1,
|
87
|
-
"errors" : true,
|
88
|
-
"items" : [
|
89
|
-
{
|
90
|
-
"#{method}" : {
|
91
|
-
"_index" : "foo",
|
92
|
-
"_type" : "bar",
|
93
|
-
"_id" : "1",
|
94
|
-
"status" : 201
|
95
|
-
}
|
96
|
-
},
|
97
|
-
{
|
98
|
-
"#{method}" : {
|
99
|
-
"_index" : "foo",
|
100
|
-
"_type" : "bar",
|
101
|
-
"_id" : "2",
|
102
|
-
"status" : 500,
|
103
|
-
"error" : {
|
104
|
-
"type" : "some unrecognized type",
|
105
|
-
"reason":"unrecognized error"
|
106
|
-
}
|
107
|
-
}
|
108
|
-
},
|
109
|
-
{
|
110
|
-
"#{method}" : {
|
111
|
-
"_index" : "foo",
|
112
|
-
"_type" : "bar",
|
113
|
-
"_id" : "3",
|
114
|
-
"status" : 409,
|
115
|
-
"error" : {
|
116
|
-
"type":"version_conflict_engine_exception",
|
117
|
-
"reason":"document already exists"
|
118
|
-
}
|
119
|
-
}
|
120
|
-
},
|
121
|
-
{
|
122
|
-
"#{method}" : {
|
123
|
-
"_index" : "foo",
|
124
|
-
"_type" : "bar",
|
125
|
-
"_id" : "5",
|
126
|
-
"status" : 500,
|
127
|
-
"error" : {
|
128
|
-
"reason":"unrecognized error - no type field"
|
129
|
-
}
|
130
|
-
}
|
131
|
-
},
|
132
|
-
{
|
133
|
-
"#{method}" : {
|
134
|
-
"_index" : "foo",
|
135
|
-
"_type" : "bar",
|
136
|
-
"_id" : "6",
|
137
|
-
"status" : 429,
|
138
|
-
"error" : {
|
139
|
-
"type" : "es_rejected_execution_exception",
|
140
|
-
"reason":"unable to fulfill request at this time, try again later"
|
141
|
-
}
|
142
|
-
}
|
143
|
-
},
|
144
|
-
{
|
145
|
-
"#{method}" : {
|
146
|
-
"_index" : "foo",
|
147
|
-
"_type" : "bar",
|
148
|
-
"_id" : "7",
|
149
|
-
"status" : 400,
|
150
|
-
"error" : {
|
151
|
-
"type" : "some unrecognized type",
|
152
|
-
"reason":"unrecognized error"
|
153
|
-
}
|
154
|
-
}
|
155
|
-
},
|
156
|
-
{
|
157
|
-
"#{method}" : {
|
158
|
-
"_index" : "foo",
|
159
|
-
"_type" : "bar",
|
160
|
-
"_id" : "8",
|
161
|
-
"status" : 500,
|
162
|
-
"error" : {
|
163
|
-
"type" : "some unrecognized type",
|
164
|
-
"reason":"unrecognized error"
|
165
|
-
}
|
166
|
-
}
|
167
|
-
}
|
168
|
-
]
|
169
|
-
}))
|
170
|
-
end
|
171
|
-
|
172
|
-
def test_retry_error_index
|
173
|
-
records = []
|
174
|
-
error_records = Hash.new(false)
|
175
|
-
error_records.merge!({0=>true, 4=>true, 9=>true})
|
176
|
-
10.times do |i|
|
177
|
-
records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
|
178
|
-
end
|
179
|
-
chunk = MockChunk.new(records)
|
180
|
-
|
181
|
-
response = create_mock_response('create')
|
182
|
-
begin
|
183
|
-
failed = false
|
184
|
-
@handler.handle_error(response, 'atag', chunk, response['items'].length)
|
185
|
-
rescue Fluent::ElasticsearchOutput::RetryStreamError=>e
|
186
|
-
failed = true
|
187
|
-
records = [].tap do |records|
|
188
|
-
e.retry_stream.each {|time, record| records << record}
|
189
|
-
end
|
190
|
-
assert_equal 3, records.length
|
191
|
-
assert_equal 2, records[0]['_id']
|
192
|
-
assert_equal 6, records[1]['_id']
|
193
|
-
assert_equal 8, records[2]['_id']
|
194
|
-
error_ids = @plugin.error_events.collect {|h| h[:record]['_id']}
|
195
|
-
assert_equal 2, error_ids.length
|
196
|
-
assert_equal 5, error_ids[0]
|
197
|
-
assert_equal 7, error_ids[1]
|
198
|
-
@plugin.error_events.collect {|h| h[:error]}.each do |e|
|
199
|
-
assert_true e.respond_to?(:backtrace)
|
200
|
-
end
|
201
|
-
end
|
202
|
-
assert_true failed
|
203
|
-
|
204
|
-
end
|
205
|
-
|
206
|
-
def test_retry_error_upsert
|
207
|
-
@plugin.write_operation = 'upsert'
|
208
|
-
records = []
|
209
|
-
error_records = Hash.new(false)
|
210
|
-
error_records.merge!({0=>true, 4=>true, 9=>true})
|
211
|
-
10.times do |i|
|
212
|
-
records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
|
213
|
-
end
|
214
|
-
chunk = MockChunk.new(records)
|
215
|
-
|
216
|
-
response = create_mock_response('update')
|
217
|
-
begin
|
218
|
-
failed = false
|
219
|
-
@handler.handle_error(response, 'atag', chunk, response['items'].length)
|
220
|
-
rescue Fluent::ElasticsearchOutput::RetryStreamError=>e
|
221
|
-
failed = true
|
222
|
-
records = [].tap do |records|
|
223
|
-
e.retry_stream.each {|time, record| records << record}
|
224
|
-
end
|
225
|
-
assert_equal 4, records.length
|
226
|
-
assert_equal 2, records[0]['_id']
|
227
|
-
# upsert is retried in case of conflict error.
|
228
|
-
assert_equal 3, records[1]['_id']
|
229
|
-
assert_equal 6, records[2]['_id']
|
230
|
-
assert_equal 8, records[3]['_id']
|
231
|
-
error_ids = @plugin.error_events.collect {|h| h[:record]['_id']}
|
232
|
-
assert_equal 2, error_ids.length
|
233
|
-
assert_equal 5, error_ids[0]
|
234
|
-
assert_equal 7, error_ids[1]
|
235
|
-
@plugin.error_events.collect {|h| h[:error]}.each do |e|
|
236
|
-
assert_true e.respond_to?(:backtrace)
|
237
|
-
end
|
238
|
-
end
|
239
|
-
assert_true failed
|
240
|
-
end
|
241
|
-
|
242
|
-
def test_old_es_1_X_responses
|
243
|
-
records = [{time: 123, record: {"foo" => "bar", '_id' => 'abc'}}]
|
244
|
-
response = parse_response(%({
|
245
|
-
"took" : 0,
|
246
|
-
"errors" : true,
|
247
|
-
"items" : [
|
248
|
-
{
|
249
|
-
"create" : {
|
250
|
-
"_index" : "foo",
|
251
|
-
"status" : 429,
|
252
|
-
"_type" : "bar",
|
253
|
-
"error" : "some unrecognized error"
|
254
|
-
}
|
255
|
-
}
|
256
|
-
]
|
257
|
-
}))
|
258
|
-
chunk = MockChunk.new(records)
|
259
|
-
@handler.handle_error(response, 'atag', chunk, records.length)
|
260
|
-
assert_equal(1, @plugin.error_events.size)
|
261
|
-
assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
|
262
|
-
end
|
263
|
-
|
264
|
-
end
|