fluent-plugin-elasticsearch 2.10.3 → 2.10.4

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,24 +1,24 @@
1
- require 'simplecov'
2
- SimpleCov.start do
3
- add_filter do |src|
4
- !(src.filename =~ /^#{SimpleCov.root}\/lib/)
5
- end
6
- end
7
-
8
- require 'coveralls'
9
- Coveralls.wear!
10
-
11
- # needs to be after simplecov but before test/unit, because fluentd sets default
12
- # encoding to ASCII-8BIT, but coverall might load git data which could contain a
13
- # UTF-8 character
14
- at_exit do
15
- Encoding.default_internal = 'UTF-8' if defined?(Encoding) && Encoding.respond_to?(:default_internal)
16
- Encoding.default_external = 'UTF-8' if defined?(Encoding) && Encoding.respond_to?(:default_external)
17
- end
18
-
19
- require 'test/unit'
20
- require 'fluent/test'
21
- require 'minitest/pride'
22
-
23
- require 'webmock/test_unit'
24
- WebMock.disable_net_connect!
1
+ require 'simplecov'
2
+ SimpleCov.start do
3
+ add_filter do |src|
4
+ !(src.filename =~ /^#{SimpleCov.root}\/lib/)
5
+ end
6
+ end
7
+
8
+ require 'coveralls'
9
+ Coveralls.wear!
10
+
11
+ # needs to be after simplecov but before test/unit, because fluentd sets default
12
+ # encoding to ASCII-8BIT, but coverall might load git data which could contain a
13
+ # UTF-8 character
14
+ at_exit do
15
+ Encoding.default_internal = 'UTF-8' if defined?(Encoding) && Encoding.respond_to?(:default_internal)
16
+ Encoding.default_external = 'UTF-8' if defined?(Encoding) && Encoding.respond_to?(:default_external)
17
+ end
18
+
19
+ require 'test/unit'
20
+ require 'fluent/test'
21
+ require 'minitest/pride'
22
+
23
+ require 'webmock/test_unit'
24
+ WebMock.disable_net_connect!
@@ -1,195 +1,195 @@
1
- require 'helper'
2
- require 'fluent/plugin/out_elasticsearch'
3
- require 'fluent/plugin/elasticsearch_error_handler'
4
- require 'json'
5
-
6
- class TestElasticsearchErrorHandler < Test::Unit::TestCase
7
-
8
- class TestPlugin
9
- attr_reader :log
10
- attr_reader :write_operation, :error_events
11
- def initialize(log)
12
- @log = log
13
- @write_operation = 'index'
14
- @error_events = []
15
- end
16
-
17
- def router
18
- self
19
- end
20
-
21
- def emit_error_event(tag, time, record, e)
22
- @error_events << {:tag => tag, :time=>time, :record=>record, :error=>e}
23
- end
24
-
25
- def process_message(tag, meta, header, time, record, bulk_message, extracted_values)
26
- if record.has_key?('raise') && record['raise']
27
- raise Exception('process_message')
28
- end
29
- end
30
- end
31
-
32
- class MockChunk
33
- def initialize(records)
34
- @records = records
35
- @index = 0
36
- end
37
- def msgpack_each
38
- @records.each { |item| yield(item[:time],item[:record]) }
39
- end
40
- end
41
-
42
- def setup
43
- Fluent::Test.setup
44
- @log_device = Fluent::Test::DummyLogDevice.new
45
- dl_opts = {:log_level => ServerEngine::DaemonLogger::INFO}
46
- logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
47
- @log = Fluent::Log.new(logger)
48
- @plugin = TestPlugin.new(@log)
49
- @handler = Fluent::Plugin::ElasticsearchErrorHandler.new(@plugin)
50
- end
51
-
52
- def parse_response(value)
53
- JSON.parse(value)
54
- end
55
-
56
- def test_dlq_400_responses
57
- records = [{time: 123, record: {"foo" => "bar"}}]
58
- response = parse_response(%({
59
- "took" : 0,
60
- "errors" : true,
61
- "items" : [
62
- {
63
- "create" : {
64
- "_index" : "foo",
65
- "status" : 400,
66
- "_type" : "bar",
67
- "reason":"unrecognized error"
68
- }
69
- }
70
- ]
71
- }))
72
- chunk = MockChunk.new(records)
73
- dummy_extracted_values = []
74
- @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
75
- assert_equal(1, @plugin.error_events.size)
76
- assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
77
- end
78
-
79
- def test_retry_error
80
- records = []
81
- error_records = Hash.new(false)
82
- error_records.merge!({0=>true, 4=>true, 9=>true})
83
- 10.times do |i|
84
- records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
85
- end
86
- chunk = MockChunk.new(records)
87
-
88
- response = parse_response(%({
89
- "took" : 1,
90
- "errors" : true,
91
- "items" : [
92
- {
93
- "create" : {
94
- "_index" : "foo",
95
- "_type" : "bar",
96
- "_id" : "1",
97
- "status" : 201
98
- }
99
- },
100
- {
101
- "create" : {
102
- "_index" : "foo",
103
- "_type" : "bar",
104
- "_id" : "2",
105
- "status" : 500,
106
- "error" : {
107
- "type" : "some unrecognized type",
108
- "reason":"unrecognized error"
109
- }
110
- }
111
- },
112
- {
113
- "create" : {
114
- "_index" : "foo",
115
- "_type" : "bar",
116
- "_id" : "3",
117
- "status" : 409
118
- }
119
- },
120
- {
121
- "create" : {
122
- "_index" : "foo",
123
- "_type" : "bar",
124
- "_id" : "5",
125
- "status" : 500,
126
- "error" : {
127
- "reason":"unrecognized error - no type field"
128
- }
129
- }
130
- },
131
- {
132
- "create" : {
133
- "_index" : "foo",
134
- "_type" : "bar",
135
- "_id" : "6",
136
- "status" : 429,
137
- "error" : {
138
- "type" : "es_rejected_execution_exception",
139
- "reason":"unable to fulfill request at this time, try again later"
140
- }
141
- }
142
- },
143
- {
144
- "create" : {
145
- "_index" : "foo",
146
- "_type" : "bar",
147
- "_id" : "7",
148
- "status" : 400,
149
- "error" : {
150
- "type" : "some unrecognized type",
151
- "reason":"unrecognized error"
152
- }
153
- }
154
- },
155
- {
156
- "create" : {
157
- "_index" : "foo",
158
- "_type" : "bar",
159
- "_id" : "8",
160
- "status" : 500,
161
- "error" : {
162
- "type" : "some unrecognized type",
163
- "reason":"unrecognized error"
164
- }
165
- }
166
- }
167
- ]
168
- }))
169
-
170
- begin
171
- failed = false
172
- dummy_extracted_values = []
173
- @handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
174
- rescue Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
175
- failed = true
176
- records = [].tap do |records|
177
- e.retry_stream.each {|time, record| records << record}
178
- end
179
- assert_equal 3, records.length
180
- assert_equal 2, records[0]['_id']
181
- assert_equal 6, records[1]['_id']
182
- assert_equal 8, records[2]['_id']
183
- error_ids = @plugin.error_events.collect {|h| h[:record]['_id']}
184
- assert_equal 2, error_ids.length
185
- assert_equal 5, error_ids[0]
186
- assert_equal 7, error_ids[1]
187
- @plugin.error_events.collect {|h| h[:error]}.each do |e|
188
- assert_true e.respond_to?(:backtrace)
189
- end
190
- end
191
- assert_true failed
192
-
193
- end
194
-
195
- end
1
+ require 'helper'
2
+ require 'fluent/plugin/out_elasticsearch'
3
+ require 'fluent/plugin/elasticsearch_error_handler'
4
+ require 'json'
5
+
6
+ class TestElasticsearchErrorHandler < Test::Unit::TestCase
7
+
8
+ class TestPlugin
9
+ attr_reader :log
10
+ attr_reader :write_operation, :error_events
11
+ def initialize(log)
12
+ @log = log
13
+ @write_operation = 'index'
14
+ @error_events = []
15
+ end
16
+
17
+ def router
18
+ self
19
+ end
20
+
21
+ def emit_error_event(tag, time, record, e)
22
+ @error_events << {:tag => tag, :time=>time, :record=>record, :error=>e}
23
+ end
24
+
25
+ def process_message(tag, meta, header, time, record, bulk_message, extracted_values)
26
+ if record.has_key?('raise') && record['raise']
27
+ raise Exception('process_message')
28
+ end
29
+ end
30
+ end
31
+
32
+ class MockChunk
33
+ def initialize(records)
34
+ @records = records
35
+ @index = 0
36
+ end
37
+ def msgpack_each
38
+ @records.each { |item| yield(item[:time],item[:record]) }
39
+ end
40
+ end
41
+
42
+ def setup
43
+ Fluent::Test.setup
44
+ @log_device = Fluent::Test::DummyLogDevice.new
45
+ dl_opts = {:log_level => ServerEngine::DaemonLogger::INFO}
46
+ logger = ServerEngine::DaemonLogger.new(@log_device, dl_opts)
47
+ @log = Fluent::Log.new(logger)
48
+ @plugin = TestPlugin.new(@log)
49
+ @handler = Fluent::Plugin::ElasticsearchErrorHandler.new(@plugin)
50
+ end
51
+
52
+ def parse_response(value)
53
+ JSON.parse(value)
54
+ end
55
+
56
+ def test_dlq_400_responses
57
+ records = [{time: 123, record: {"foo" => "bar"}}]
58
+ response = parse_response(%({
59
+ "took" : 0,
60
+ "errors" : true,
61
+ "items" : [
62
+ {
63
+ "create" : {
64
+ "_index" : "foo",
65
+ "status" : 400,
66
+ "_type" : "bar",
67
+ "reason":"unrecognized error"
68
+ }
69
+ }
70
+ ]
71
+ }))
72
+ chunk = MockChunk.new(records)
73
+ dummy_extracted_values = []
74
+ @handler.handle_error(response, 'atag', chunk, records.length, dummy_extracted_values)
75
+ assert_equal(1, @plugin.error_events.size)
76
+ assert_true(@plugin.error_events[0][:error].respond_to?(:backtrace))
77
+ end
78
+
79
+ def test_retry_error
80
+ records = []
81
+ error_records = Hash.new(false)
82
+ error_records.merge!({0=>true, 4=>true, 9=>true})
83
+ 10.times do |i|
84
+ records << {time: 12345, record: {"message"=>"record #{i}","_id"=>i,"raise"=>error_records[i]}}
85
+ end
86
+ chunk = MockChunk.new(records)
87
+
88
+ response = parse_response(%({
89
+ "took" : 1,
90
+ "errors" : true,
91
+ "items" : [
92
+ {
93
+ "create" : {
94
+ "_index" : "foo",
95
+ "_type" : "bar",
96
+ "_id" : "1",
97
+ "status" : 201
98
+ }
99
+ },
100
+ {
101
+ "create" : {
102
+ "_index" : "foo",
103
+ "_type" : "bar",
104
+ "_id" : "2",
105
+ "status" : 500,
106
+ "error" : {
107
+ "type" : "some unrecognized type",
108
+ "reason":"unrecognized error"
109
+ }
110
+ }
111
+ },
112
+ {
113
+ "create" : {
114
+ "_index" : "foo",
115
+ "_type" : "bar",
116
+ "_id" : "3",
117
+ "status" : 409
118
+ }
119
+ },
120
+ {
121
+ "create" : {
122
+ "_index" : "foo",
123
+ "_type" : "bar",
124
+ "_id" : "5",
125
+ "status" : 500,
126
+ "error" : {
127
+ "reason":"unrecognized error - no type field"
128
+ }
129
+ }
130
+ },
131
+ {
132
+ "create" : {
133
+ "_index" : "foo",
134
+ "_type" : "bar",
135
+ "_id" : "6",
136
+ "status" : 429,
137
+ "error" : {
138
+ "type" : "es_rejected_execution_exception",
139
+ "reason":"unable to fulfill request at this time, try again later"
140
+ }
141
+ }
142
+ },
143
+ {
144
+ "create" : {
145
+ "_index" : "foo",
146
+ "_type" : "bar",
147
+ "_id" : "7",
148
+ "status" : 400,
149
+ "error" : {
150
+ "type" : "some unrecognized type",
151
+ "reason":"unrecognized error"
152
+ }
153
+ }
154
+ },
155
+ {
156
+ "create" : {
157
+ "_index" : "foo",
158
+ "_type" : "bar",
159
+ "_id" : "8",
160
+ "status" : 500,
161
+ "error" : {
162
+ "type" : "some unrecognized type",
163
+ "reason":"unrecognized error"
164
+ }
165
+ }
166
+ }
167
+ ]
168
+ }))
169
+
170
+ begin
171
+ failed = false
172
+ dummy_extracted_values = []
173
+ @handler.handle_error(response, 'atag', chunk, response['items'].length, dummy_extracted_values)
174
+ rescue Fluent::Plugin::ElasticsearchOutput::RetryStreamError=>e
175
+ failed = true
176
+ records = [].tap do |records|
177
+ e.retry_stream.each {|time, record| records << record}
178
+ end
179
+ assert_equal 3, records.length
180
+ assert_equal 2, records[0]['_id']
181
+ assert_equal 6, records[1]['_id']
182
+ assert_equal 8, records[2]['_id']
183
+ error_ids = @plugin.error_events.collect {|h| h[:record]['_id']}
184
+ assert_equal 2, error_ids.length
185
+ assert_equal 5, error_ids[0]
186
+ assert_equal 7, error_ids[1]
187
+ @plugin.error_events.collect {|h| h[:error]}.each do |e|
188
+ assert_true e.respond_to?(:backtrace)
189
+ end
190
+ end
191
+ assert_true failed
192
+
193
+ end
194
+
195
+ end
@@ -1,44 +1,44 @@
1
- require 'helper'
2
- require 'date'
3
- require 'fluent/test/helpers'
4
- require 'json'
5
- require 'fluent/test/driver/filter'
6
- require 'flexmock/test_unit'
7
- require 'fluent/plugin/filter_elasticsearch_genid'
8
-
9
- class ElasticsearchGenidFilterTest < Test::Unit::TestCase
10
- include FlexMock::TestCase
11
- include Fluent::Test::Helpers
12
-
13
- def setup
14
- Fluent::Test.setup
15
- end
16
-
17
- def create_driver(conf='')
18
- Fluent::Test::Driver::Filter.new(Fluent::Plugin::ElasticsearchGenidFilter).configure(conf)
19
- end
20
-
21
- def sample_record
22
- {'age' => 26, 'request_id' => '42', 'parent_id' => 'parent', 'routing_id' => 'routing'}
23
- end
24
-
25
- def test_configure
26
- d = create_driver
27
- assert_equal '_hash', d.instance.hash_id_key
28
- end
29
-
30
- data("default" => {"hash_id_key" => "_hash"},
31
- "custom_key" => {"hash_id_key" => "_edited"},
32
- )
33
- def test_filter(data)
34
- d = create_driver("hash_id_key #{data["hash_id_key"]}")
35
- flexmock(SecureRandom).should_receive(:uuid)
36
- .and_return("13a0c028-bf7c-4ae2-ad03-ec09a40006df")
37
- time = event_time("2017-10-15 15:00:23.34567890 UTC")
38
- d.run(default_tag: 'test') do
39
- d.feed(time, sample_record)
40
- end
41
- assert_equal(Base64.strict_encode64(SecureRandom.uuid),
42
- d.filtered.map {|e| e.last}.first[d.instance.hash_id_key])
43
- end
44
- end
1
+ require 'helper'
2
+ require 'date'
3
+ require 'fluent/test/helpers'
4
+ require 'json'
5
+ require 'fluent/test/driver/filter'
6
+ require 'flexmock/test_unit'
7
+ require 'fluent/plugin/filter_elasticsearch_genid'
8
+
9
+ class ElasticsearchGenidFilterTest < Test::Unit::TestCase
10
+ include FlexMock::TestCase
11
+ include Fluent::Test::Helpers
12
+
13
+ def setup
14
+ Fluent::Test.setup
15
+ end
16
+
17
+ def create_driver(conf='')
18
+ Fluent::Test::Driver::Filter.new(Fluent::Plugin::ElasticsearchGenidFilter).configure(conf)
19
+ end
20
+
21
+ def sample_record
22
+ {'age' => 26, 'request_id' => '42', 'parent_id' => 'parent', 'routing_id' => 'routing'}
23
+ end
24
+
25
+ def test_configure
26
+ d = create_driver
27
+ assert_equal '_hash', d.instance.hash_id_key
28
+ end
29
+
30
+ data("default" => {"hash_id_key" => "_hash"},
31
+ "custom_key" => {"hash_id_key" => "_edited"},
32
+ )
33
+ def test_filter(data)
34
+ d = create_driver("hash_id_key #{data["hash_id_key"]}")
35
+ flexmock(SecureRandom).should_receive(:uuid)
36
+ .and_return("13a0c028-bf7c-4ae2-ad03-ec09a40006df")
37
+ time = event_time("2017-10-15 15:00:23.34567890 UTC")
38
+ d.run(default_tag: 'test') do
39
+ d.feed(time, sample_record)
40
+ end
41
+ assert_equal(Base64.strict_encode64(SecureRandom.uuid),
42
+ d.filtered.map {|e| e.last}.first[d.instance.hash_id_key])
43
+ end
44
+ end