fluent-plugin-elasticsearch 1.14.0 → 1.15.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/History.md +4 -0
- data/README.md +0 -12
- data/fluent-plugin-elasticsearch.gemspec +1 -1
- data/lib/fluent/plugin/elasticsearch_error_handler.rb +21 -40
- data/lib/fluent/plugin/out_elasticsearch.rb +2 -43
- data/test/plugin/test_elasticsearch_error_handler.rb +122 -0
- data/test/plugin/test_out_elasticsearch.rb +6 -103
- metadata +4 -4
- data/lib/fluent/plugin/dead_letter_queue_drop_handler.rb +0 -10
- data/lib/fluent/plugin/dead_letter_queue_file_handler.rb +0 -14
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 709ac4334da5ddeae76a8cd290cb54b3d972458515e8e2855c1c56f216a078ca
|
4
|
+
data.tar.gz: 17cb0aae657c8d2bafea64e95432483667ffa3e2d1ae531a0dd34880ce580347
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 157d3d0d15eef38c8632c4d1189bfa651ad2902300ce0f20cb56ad1a75bbb0ac6f7546a3ec93e8769d14d9feaf774cab075f049c97218de01c2bd52e89fb7ff5
|
7
|
+
data.tar.gz: 11cb945b1898b87921ea642a873b47352080d667d864965c933481c6601b45cbdac1246ce9e9d996e2a4a5d18f46bdb462a664ab001f8337a801c329aa627f36
|
data/History.md
CHANGED
data/README.md
CHANGED
@@ -49,7 +49,6 @@ Note: For Amazon Elasticsearch Service please consider using [fluent-plugin-aws-
|
|
49
49
|
+ [time_parse_error_tag](#time_parse_error_tag)
|
50
50
|
+ [reconnect_on_error](#reconnect_on_error)
|
51
51
|
+ [with_transporter_log](#with_transporter_log)
|
52
|
-
+ [dlq_handler](#dlq_handler)
|
53
52
|
+ [Client/host certificate options](#clienthost-certificate-options)
|
54
53
|
+ [Proxy Support](#proxy-support)
|
55
54
|
+ [Buffered output options](#buffered-output-options)
|
@@ -475,17 +474,6 @@ We recommend to set this true if you start to debug this plugin.
|
|
475
474
|
with_transporter_log true
|
476
475
|
```
|
477
476
|
|
478
|
-
### dlq_handler
|
479
|
-
Adds an error handler for processing corrupt messages from message buffers.
|
480
|
-
There are [known cases](https://bugzilla.redhat.com/show_bug.cgi?id=1562004) where
|
481
|
-
fluentd is stuck processing messages because the file buffer is corrupt. Fluentd
|
482
|
-
is unable to clear faulty buffer chunks.
|
483
|
-
|
484
|
-
```
|
485
|
-
dlq_handler {'type':'drop'} #default is to log and drop messages
|
486
|
-
dlq_handler {'type':'file', 'dir':'/tmp/fluentd/dlq', 'max_files':5, 'max_file_size':104857600}
|
487
|
-
```
|
488
|
-
|
489
477
|
### Client/host certificate options
|
490
478
|
|
491
479
|
Need to verify Elasticsearch's certificate? You can use the following parameter to specify a CA instead of using an environment variable.
|
@@ -3,7 +3,7 @@ $:.push File.expand_path('../lib', __FILE__)
|
|
3
3
|
|
4
4
|
Gem::Specification.new do |s|
|
5
5
|
s.name = 'fluent-plugin-elasticsearch'
|
6
|
-
s.version = '1.
|
6
|
+
s.version = '1.15.0'
|
7
7
|
s.authors = ['diogo', 'pitr']
|
8
8
|
s.email = ['pitr.vern@gmail.com', 'me@diogoterror.com']
|
9
9
|
s.description = %q{Elasticsearch output plugin for Fluent event collector}
|
@@ -16,12 +16,7 @@ class Fluent::ElasticsearchErrorHandler
|
|
16
16
|
end
|
17
17
|
|
18
18
|
def handle_error(response)
|
19
|
-
|
20
|
-
errors_bad_resp = 0
|
21
|
-
errors_unrecognized = 0
|
22
|
-
successes = 0
|
23
|
-
duplicates = 0
|
24
|
-
bad_arguments = 0
|
19
|
+
stats = Hash.new(0)
|
25
20
|
response['items'].each do |item|
|
26
21
|
if item.has_key?(@plugin.write_operation)
|
27
22
|
write_operation = @plugin.write_operation
|
@@ -30,7 +25,7 @@ class Fluent::ElasticsearchErrorHandler
|
|
30
25
|
else
|
31
26
|
# When we don't have an expected ops field, something changed in the API
|
32
27
|
# expected return values (ES 2.x)
|
33
|
-
errors_bad_resp += 1
|
28
|
+
stats[:errors_bad_resp] += 1
|
34
29
|
next
|
35
30
|
end
|
36
31
|
if item[write_operation].has_key?('status')
|
@@ -38,58 +33,44 @@ class Fluent::ElasticsearchErrorHandler
|
|
38
33
|
else
|
39
34
|
# When we don't have a status field, something changed in the API
|
40
35
|
# expected return values (ES 2.x)
|
41
|
-
errors_bad_resp += 1
|
36
|
+
stats[:errors_bad_resp] += 1
|
42
37
|
next
|
43
38
|
end
|
44
39
|
case
|
40
|
+
when [200, 201].include?(status)
|
41
|
+
stats[:successes] += 1
|
45
42
|
when CREATE_OP == write_operation && 409 == status
|
46
|
-
duplicates += 1
|
47
|
-
|
48
|
-
bad_arguments += 1
|
49
|
-
@plugin.log.debug "Elasticsearch rejected document: #{item}"
|
50
|
-
when [429, 500].include?(status)
|
43
|
+
stats[:duplicates] += 1
|
44
|
+
else
|
51
45
|
if item[write_operation].has_key?('error') && item[write_operation]['error'].has_key?('type')
|
52
46
|
type = item[write_operation]['error']['type']
|
53
47
|
else
|
54
48
|
# When we don't have a type field, something changed in the API
|
55
49
|
# expected return values (ES 2.x)
|
56
|
-
errors_bad_resp += 1
|
50
|
+
stats[:errors_bad_resp] += 1
|
57
51
|
next
|
58
52
|
end
|
59
|
-
|
60
|
-
when [200, 201].include?(status)
|
61
|
-
successes += 1
|
62
|
-
else
|
63
|
-
errors_unrecognized += 1
|
53
|
+
stats[type] += 1
|
64
54
|
end
|
65
55
|
end
|
66
|
-
if errors_bad_resp > 0
|
67
|
-
|
68
|
-
|
69
|
-
raise ElasticsearchVersionMismatch, msg
|
70
|
-
end
|
71
|
-
if bad_arguments > 0
|
72
|
-
@plugin.log.warn "Elasticsearch rejected #{bad_arguments} documents due to invalid field arguments"
|
73
|
-
end
|
74
|
-
if duplicates > 0
|
75
|
-
@plugin.log.info "Encountered #{duplicates} duplicate(s) of #{successes} indexing chunk, ignoring"
|
76
|
-
end
|
77
|
-
msg = "Indexed (op = #{@plugin.write_operation}) #{successes} successfully, #{duplicates} duplicate(s), #{bad_arguments} bad argument(s), #{errors_unrecognized} unrecognized error(s)"
|
78
|
-
errors.each_key do |key|
|
79
|
-
msg << ", #{errors[key]} #{key} error(s)"
|
56
|
+
if stats[:errors_bad_resp] > 0
|
57
|
+
@plugin.log.on_debug { @plugin.log.debug("Unable to parse response from elasticsearch, likely an API version mismatch: #{response}") }
|
58
|
+
raise ElasticsearchVersionMismatch, "Unable to parse error response from Elasticsearch, likely an API version mismatch. Add '@log_level debug' to your config to see the full response"
|
80
59
|
end
|
81
|
-
@plugin.log.
|
82
|
-
|
83
|
-
|
60
|
+
@plugin.log.on_debug do
|
61
|
+
msg = ["Indexed (op = #{@plugin.write_operation})"]
|
62
|
+
stats.each_pair { |key, value| msg << "#{value} #{key}" }
|
63
|
+
@plugin.log.debug msg.join(', ')
|
84
64
|
end
|
85
|
-
|
65
|
+
stats.each_key do |key|
|
86
66
|
case key
|
87
67
|
when 'out_of_memory_error'
|
88
|
-
raise ElasticsearchOutOfMemory,
|
68
|
+
raise ElasticsearchOutOfMemory, 'Elasticsearch has exhausted its heap, retrying'
|
89
69
|
when 'es_rejected_execution_exception'
|
90
|
-
raise BulkIndexQueueFull,
|
70
|
+
raise BulkIndexQueueFull, 'Bulk index queue is full, retrying'
|
91
71
|
else
|
92
|
-
|
72
|
+
@plugin.log.on_debug { @plugin.log.debug("Elasticsearch errors returned, retrying: #{response}") }
|
73
|
+
raise ElasticsearchError, "Elasticsearch returned errors, retrying. Add '@log_level debug' to your config to see the full response"
|
93
74
|
end
|
94
75
|
end
|
95
76
|
end
|
@@ -69,7 +69,6 @@ class Fluent::ElasticsearchOutput < Fluent::ObjectBufferedOutput
|
|
69
69
|
config_param :reconnect_on_error, :bool, :default => false
|
70
70
|
config_param :pipeline, :string, :default => nil
|
71
71
|
config_param :with_transporter_log, :bool, :default => false
|
72
|
-
config_param :dlq_handler, :hash, :default => { 'type' =>'drop' }
|
73
72
|
|
74
73
|
include Fluent::ElasticsearchIndexTemplate
|
75
74
|
include Fluent::ElasticsearchConstants
|
@@ -131,43 +130,6 @@ class Fluent::ElasticsearchOutput < Fluent::ObjectBufferedOutput
|
|
131
130
|
log.warn "Consider to specify log_level with @log_level." unless log_level
|
132
131
|
end
|
133
132
|
|
134
|
-
configure_dlq_handler
|
135
|
-
|
136
|
-
end
|
137
|
-
|
138
|
-
def configure_dlq_handler
|
139
|
-
dlq_type = @dlq_handler && @dlq_handler.is_a?(Hash) ? dlq_type = @dlq_handler['type'] : nil
|
140
|
-
return unless dlq_type
|
141
|
-
|
142
|
-
case dlq_type.downcase
|
143
|
-
when 'drop'
|
144
|
-
log.info('Configuring the DROP dead letter queue handler')
|
145
|
-
require_relative 'dead_letter_queue_drop_handler'
|
146
|
-
extend Fluent::DeadLetterQueueDropHandler
|
147
|
-
when 'file'
|
148
|
-
log.info("Configuring the File dead letter queue handler: ")
|
149
|
-
dir = @dlq_handler ['dir'] || '/var/lib/fluentd/dlq'
|
150
|
-
shift_age = @dlq_handler['max_files'] || 0
|
151
|
-
shift_size = @dlq_handler['max_file_size'] || 1048576
|
152
|
-
log.info("Configuring the File dead letter queue handler: ")
|
153
|
-
log.info(" Directory: #{dir}")
|
154
|
-
log.info(" Max number of DLQ files: #{shift_age}")
|
155
|
-
log.info(" Max file size: #{shift_size}")
|
156
|
-
unless Dir.exists?(dir)
|
157
|
-
Dir.mkdir(dir)
|
158
|
-
log.info("Created DLQ directory: '#{dir}'")
|
159
|
-
end
|
160
|
-
require 'logger'
|
161
|
-
require 'json'
|
162
|
-
file = File.join(dir, 'dlq')
|
163
|
-
@dlq_file = Logger.new(file, shift_age, shift_size)
|
164
|
-
@dlq_file.level = Logger::INFO
|
165
|
-
@dlq_file.formatter = proc { |severity, datetime, progname, msg| "#{msg.dump}\n" }
|
166
|
-
log.info ("Created DLQ file #{file}")
|
167
|
-
|
168
|
-
require_relative 'dead_letter_queue_file_handler'
|
169
|
-
extend Fluent::DeadLetterQueueFileHandler
|
170
|
-
end
|
171
133
|
end
|
172
134
|
|
173
135
|
def create_meta_config_map
|
@@ -363,7 +325,7 @@ class Fluent::ElasticsearchOutput < Fluent::ObjectBufferedOutput
|
|
363
325
|
begin
|
364
326
|
process_message(tag, meta, header, time, record, bulk_message)
|
365
327
|
rescue=>e
|
366
|
-
|
328
|
+
router.emit_error_event(tag, time, record, e)
|
367
329
|
end
|
368
330
|
end
|
369
331
|
|
@@ -450,10 +412,7 @@ class Fluent::ElasticsearchOutput < Fluent::ObjectBufferedOutput
|
|
450
412
|
retries = 0
|
451
413
|
begin
|
452
414
|
response = client.bulk body: data
|
453
|
-
if response['errors']
|
454
|
-
@error.handle_error(response)
|
455
|
-
log.error "Could not push log to Elasticsearch: #{response}"
|
456
|
-
end
|
415
|
+
@error.handle_error(response) if response['errors']
|
457
416
|
rescue *client.transport.host_unreachable_exceptions => e
|
458
417
|
if retries < 2
|
459
418
|
retries += 1
|
@@ -0,0 +1,122 @@
|
|
1
|
+
require 'helper'
|
2
|
+
require 'fluent/plugin/elasticsearch_error_handler'
|
3
|
+
require 'json'
|
4
|
+
|
5
|
+
class TestElasticsearchErrorHandler < Test::Unit::TestCase
|
6
|
+
|
7
|
+
class TestPlugin
|
8
|
+
attr_reader :log
|
9
|
+
def initialize(log)
|
10
|
+
@log = log
|
11
|
+
end
|
12
|
+
|
13
|
+
def write_operation
|
14
|
+
'index'
|
15
|
+
end
|
16
|
+
end
|
17
|
+
|
18
|
+
def setup
|
19
|
+
Fluent::Test.setup
|
20
|
+
@log = Fluent::Engine.log
|
21
|
+
plugin = TestPlugin.new(@log)
|
22
|
+
@handler = Fluent::ElasticsearchErrorHandler.new(plugin)
|
23
|
+
end
|
24
|
+
|
25
|
+
def parse_response(value)
|
26
|
+
JSON.parse(value)
|
27
|
+
end
|
28
|
+
|
29
|
+
def test_errors
|
30
|
+
response = parse_response(%({
|
31
|
+
"took" : 0,
|
32
|
+
"errors" : true,
|
33
|
+
"items" : [
|
34
|
+
{
|
35
|
+
"create" : {
|
36
|
+
"_index" : "foo",
|
37
|
+
"_type" : "bar",
|
38
|
+
"_id" : "abc",
|
39
|
+
"status" : 500,
|
40
|
+
"error" : {
|
41
|
+
"type" : "some unrecognized type",
|
42
|
+
"reason":"some error to cause version mismatch"
|
43
|
+
}
|
44
|
+
}
|
45
|
+
},
|
46
|
+
{
|
47
|
+
"create" : {
|
48
|
+
"_index" : "foo",
|
49
|
+
"_type" : "bar",
|
50
|
+
"_id" : "abc",
|
51
|
+
"status" : 500,
|
52
|
+
"error" : {
|
53
|
+
"type" : "some unrecognized type",
|
54
|
+
"reason":"some error to cause version mismatch"
|
55
|
+
}
|
56
|
+
}
|
57
|
+
},
|
58
|
+
{
|
59
|
+
"create" : {
|
60
|
+
"_index" : "foo",
|
61
|
+
"_type" : "bar",
|
62
|
+
"_id" : "abc",
|
63
|
+
"status" : 201
|
64
|
+
}
|
65
|
+
},
|
66
|
+
{
|
67
|
+
"create" : {
|
68
|
+
"_index" : "foo",
|
69
|
+
"_type" : "bar",
|
70
|
+
"_id" : "abc",
|
71
|
+
"status" : 409
|
72
|
+
}
|
73
|
+
},
|
74
|
+
{
|
75
|
+
"create" : {
|
76
|
+
"_index" : "foo",
|
77
|
+
"_type" : "bar",
|
78
|
+
"_id" : "abc",
|
79
|
+
"status" : 400,
|
80
|
+
"error" : {
|
81
|
+
"type" : "some unrecognized type",
|
82
|
+
"reason":"some error to cause version mismatch"
|
83
|
+
}
|
84
|
+
}
|
85
|
+
}
|
86
|
+
]
|
87
|
+
}))
|
88
|
+
|
89
|
+
assert_raise Fluent::ElasticsearchErrorHandler::ElasticsearchError do
|
90
|
+
@handler.handle_error(response)
|
91
|
+
end
|
92
|
+
|
93
|
+
end
|
94
|
+
|
95
|
+
def test_elasticsearch_version_mismatch_raises_error
|
96
|
+
response = parse_response(%(
|
97
|
+
{
|
98
|
+
"took" : 0,
|
99
|
+
"errors" : true,
|
100
|
+
"items" : [
|
101
|
+
{
|
102
|
+
"create" : {
|
103
|
+
"_index" : "foo",
|
104
|
+
"_type" : "bar",
|
105
|
+
"_id" : "abc",
|
106
|
+
"status" : 500,
|
107
|
+
"error" : {
|
108
|
+
"reason":"some error to cause version mismatch"
|
109
|
+
}
|
110
|
+
}
|
111
|
+
}
|
112
|
+
]
|
113
|
+
}
|
114
|
+
))
|
115
|
+
|
116
|
+
assert_raise Fluent::ElasticsearchErrorHandler::ElasticsearchVersionMismatch do
|
117
|
+
@handler.handle_error(response)
|
118
|
+
end
|
119
|
+
|
120
|
+
end
|
121
|
+
|
122
|
+
end
|
@@ -134,6 +134,7 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
134
134
|
|
135
135
|
def stub_elastic_bulk_rejected(url="http://localhost:9200/_bulk")
|
136
136
|
error = {
|
137
|
+
"status" => 500,
|
137
138
|
"type" => "es_rejected_execution_exception",
|
138
139
|
"reason" => "rejected execution of org.elasticsearch.transport.TransportService$4@1a34d37a on EsThreadPoolExecutor[bulk, queue capacity = 50, org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor@312a2162[Running, pool size = 32, active threads = 32, queued tasks = 50, completed tasks = 327053]]"
|
139
140
|
}
|
@@ -142,6 +143,7 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
142
143
|
|
143
144
|
def stub_elastic_out_of_memory(url="http://localhost:9200/_bulk")
|
144
145
|
error = {
|
146
|
+
"status" => 500,
|
145
147
|
"type" => "out_of_memory_error",
|
146
148
|
"reason" => "Java heap space"
|
147
149
|
}
|
@@ -150,6 +152,7 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
150
152
|
|
151
153
|
def stub_elastic_unrecognized_error(url="http://localhost:9200/_bulk")
|
152
154
|
error = {
|
155
|
+
"status" => 500,
|
153
156
|
"type" => "some-other-type",
|
154
157
|
"reason" => "some-other-reason"
|
155
158
|
}
|
@@ -158,6 +161,7 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
158
161
|
|
159
162
|
def stub_elastic_version_mismatch(url="http://localhost:9200/_bulk")
|
160
163
|
error = {
|
164
|
+
"status" => 500,
|
161
165
|
"category" => "some-other-type",
|
162
166
|
"reason" => "some-other-reason"
|
163
167
|
}
|
@@ -208,38 +212,6 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
208
212
|
assert_nil instance.client_cert
|
209
213
|
assert_nil instance.client_key_pass
|
210
214
|
assert_false instance.with_transporter_log
|
211
|
-
assert_not_nil instance.dlq_handler
|
212
|
-
assert_equal 'drop', instance.dlq_handler['type']
|
213
|
-
end
|
214
|
-
|
215
|
-
def test_configure_with_dlq_file_handler
|
216
|
-
require 'tmpdir'
|
217
|
-
dir = Dir.mktmpdir
|
218
|
-
config = %Q{
|
219
|
-
host logs.google.com
|
220
|
-
port 777
|
221
|
-
scheme https
|
222
|
-
path /es/
|
223
|
-
user john
|
224
|
-
password doe
|
225
|
-
dlq_handler {"type":"file", "dir":"#{dir}"}
|
226
|
-
}
|
227
|
-
instance = driver('test', config).instance
|
228
|
-
|
229
|
-
assert_equal 'logs.google.com', instance.host
|
230
|
-
assert_equal 777, instance.port
|
231
|
-
assert_equal 'https', instance.scheme
|
232
|
-
assert_equal '/es/', instance.path
|
233
|
-
assert_equal 'john', instance.user
|
234
|
-
assert_equal 'doe', instance.password
|
235
|
-
assert_equal :TLSv1, instance.ssl_version
|
236
|
-
assert_nil instance.client_key
|
237
|
-
assert_nil instance.client_cert
|
238
|
-
assert_nil instance.client_key_pass
|
239
|
-
assert_false instance.with_transporter_log
|
240
|
-
assert_not_nil instance.dlq_handler
|
241
|
-
assert_equal 'file', instance.dlq_handler['type']
|
242
|
-
assert_true Dir.exists?(dir)
|
243
215
|
end
|
244
216
|
|
245
217
|
def test_template_already_present
|
@@ -629,28 +601,14 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
629
601
|
assert_requested(elastic_request)
|
630
602
|
end
|
631
603
|
|
632
|
-
def
|
604
|
+
def test_write_message_with_bad_chunk
|
633
605
|
driver.configure("target_index_key bad_value\n")
|
634
606
|
log = driver.instance.router.emit_error_handler.log
|
635
607
|
stub_elastic_ping
|
636
608
|
stub_elastic
|
637
609
|
driver.emit({'bad_value'=>"\255"})
|
638
610
|
driver.run
|
639
|
-
assert_logs_include(log.out.logs,
|
640
|
-
end
|
641
|
-
|
642
|
-
def test_write_message_with_dlq_file_handler
|
643
|
-
log = driver.instance.router.emit_error_handler.log
|
644
|
-
dir = Dir.mktmpdir
|
645
|
-
driver.configure("dlq_handler {\"type\":\"file\", \"dir\":\"#{dir}\"}\n
|
646
|
-
target_index_key bad_value\n
|
647
|
-
")
|
648
|
-
stub_elastic_ping
|
649
|
-
stub_elastic
|
650
|
-
driver.emit({'bad_value'=>"\255"})
|
651
|
-
driver.run
|
652
|
-
logs = File.readlines(File.join(dir,'dlq'))
|
653
|
-
assert_logs_include(logs, 'invalid')
|
611
|
+
assert_logs_include(log.out.logs, /(input string invalid)|(invalid byte sequence in UTF-8)/)
|
654
612
|
end
|
655
613
|
|
656
614
|
def test_writes_to_default_index
|
@@ -1417,25 +1375,6 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
1417
1375
|
assert_equal(connection_resets, 1)
|
1418
1376
|
end
|
1419
1377
|
|
1420
|
-
def test_bulk_bad_arguments
|
1421
|
-
log = driver.instance.router.emit_error_handler.log
|
1422
|
-
log.level = 'debug'
|
1423
|
-
driver = driver('@log_level debug')
|
1424
|
-
|
1425
|
-
stub_elastic_ping
|
1426
|
-
stub_elastic_bad_argument
|
1427
|
-
|
1428
|
-
driver.emit(sample_record)
|
1429
|
-
driver.emit(sample_record)
|
1430
|
-
driver.emit(sample_record)
|
1431
|
-
driver.run
|
1432
|
-
|
1433
|
-
matches = log.out.logs.grep /Elasticsearch rejected document:/
|
1434
|
-
assert_equal(1, matches.length, "Message 'Elasticsearch rejected document: ...' was not emitted")
|
1435
|
-
matches = log.out.logs.grep /documents due to invalid field arguments/
|
1436
|
-
assert_equal(1, matches.length, "Message 'Elasticsearch rejected # documents due to invalid field arguments ...' was not emitted")
|
1437
|
-
end
|
1438
|
-
|
1439
1378
|
def test_bulk_error
|
1440
1379
|
stub_elastic_ping
|
1441
1380
|
stub_elastic_bulk_error
|
@@ -1460,42 +1399,6 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
1460
1399
|
}
|
1461
1400
|
end
|
1462
1401
|
|
1463
|
-
def test_bulk_error_unrecognized_error
|
1464
|
-
stub_elastic_ping
|
1465
|
-
stub_elastic_unrecognized_error
|
1466
|
-
|
1467
|
-
assert_raise(Fluent::ElasticsearchErrorHandler::UnrecognizedElasticsearchError) {
|
1468
|
-
driver.emit(sample_record)
|
1469
|
-
driver.emit(sample_record)
|
1470
|
-
driver.emit(sample_record)
|
1471
|
-
driver.run
|
1472
|
-
}
|
1473
|
-
end
|
1474
|
-
|
1475
|
-
def test_bulk_error_out_of_memory
|
1476
|
-
stub_elastic_ping
|
1477
|
-
stub_elastic_out_of_memory
|
1478
|
-
|
1479
|
-
assert_raise(Fluent::ElasticsearchErrorHandler::ElasticsearchOutOfMemory) {
|
1480
|
-
driver.emit(sample_record)
|
1481
|
-
driver.emit(sample_record)
|
1482
|
-
driver.emit(sample_record)
|
1483
|
-
driver.run
|
1484
|
-
}
|
1485
|
-
end
|
1486
|
-
|
1487
|
-
def test_bulk_error_queue_full
|
1488
|
-
stub_elastic_ping
|
1489
|
-
stub_elastic_bulk_rejected
|
1490
|
-
|
1491
|
-
assert_raise(Fluent::ElasticsearchErrorHandler::BulkIndexQueueFull) {
|
1492
|
-
driver.emit(sample_record)
|
1493
|
-
driver.emit(sample_record)
|
1494
|
-
driver.emit(sample_record)
|
1495
|
-
driver.run
|
1496
|
-
}
|
1497
|
-
end
|
1498
|
-
|
1499
1402
|
def test_bulk_index_into_a_create
|
1500
1403
|
stub_elastic_ping
|
1501
1404
|
stub_elastic_index_to_create
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-elasticsearch
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.
|
4
|
+
version: 1.15.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- diogo
|
@@ -9,7 +9,7 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date: 2018-04-
|
12
|
+
date: 2018-04-18 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: fluentd
|
@@ -144,8 +144,6 @@ files:
|
|
144
144
|
- README.md
|
145
145
|
- Rakefile
|
146
146
|
- fluent-plugin-elasticsearch.gemspec
|
147
|
-
- lib/fluent/plugin/dead_letter_queue_drop_handler.rb
|
148
|
-
- lib/fluent/plugin/dead_letter_queue_file_handler.rb
|
149
147
|
- lib/fluent/plugin/elasticsearch_constants.rb
|
150
148
|
- lib/fluent/plugin/elasticsearch_error_handler.rb
|
151
149
|
- lib/fluent/plugin/elasticsearch_index_template.rb
|
@@ -153,6 +151,7 @@ files:
|
|
153
151
|
- lib/fluent/plugin/out_elasticsearch.rb
|
154
152
|
- lib/fluent/plugin/out_elasticsearch_dynamic.rb
|
155
153
|
- test/helper.rb
|
154
|
+
- test/plugin/test_elasticsearch_error_handler.rb
|
156
155
|
- test/plugin/test_filter_elasticsearch_genid.rb
|
157
156
|
- test/plugin/test_out_elasticsearch.rb
|
158
157
|
- test/plugin/test_out_elasticsearch_dynamic.rb
|
@@ -183,6 +182,7 @@ specification_version: 4
|
|
183
182
|
summary: Elasticsearch output plugin for Fluent event collector
|
184
183
|
test_files:
|
185
184
|
- test/helper.rb
|
185
|
+
- test/plugin/test_elasticsearch_error_handler.rb
|
186
186
|
- test/plugin/test_filter_elasticsearch_genid.rb
|
187
187
|
- test/plugin/test_out_elasticsearch.rb
|
188
188
|
- test/plugin/test_out_elasticsearch_dynamic.rb
|
@@ -1,10 +0,0 @@
|
|
1
|
-
|
2
|
-
module Fluent::DeadLetterQueueDropHandler
|
3
|
-
def handle_chunk_error(out_plugin, tag, error, time, record)
|
4
|
-
begin
|
5
|
-
log.error("Dropping record from '#{tag}': error:#{error} time:#{time} record:#{record}")
|
6
|
-
rescue=>e
|
7
|
-
log.error("Error while trying to log and drop message from chunk '#{tag}' #{e.message}")
|
8
|
-
end
|
9
|
-
end
|
10
|
-
end
|
@@ -1,14 +0,0 @@
|
|
1
|
-
# encoding: UTF-8
|
2
|
-
|
3
|
-
module Fluent::DeadLetterQueueFileHandler
|
4
|
-
|
5
|
-
def handle_chunk_error(out_plugin, tag, error, time, record)
|
6
|
-
begin
|
7
|
-
@dlq_file.info({processed_at: Time.now.utc, tag: tag, error: "#{error.message}", time: time, record: record}.to_json)
|
8
|
-
rescue=>e
|
9
|
-
log.error("Error while trying to log and drop message from chunk '#{tag}' #{e.message}")
|
10
|
-
end
|
11
|
-
end
|
12
|
-
|
13
|
-
end
|
14
|
-
|