fluentd 0.14.5-x86-mingw32 → 0.14.7-x86-mingw32
Sign up to get free protection for your applications and to get access to all the features.
Potentially problematic release.
This version of fluentd might be problematic. Click here for more details.
- checksums.yaml +4 -4
- data/ChangeLog +55 -0
- data/bin/fluent-binlog-reader +7 -0
- data/example/in_dummy_with_compression.conf +23 -0
- data/lib/fluent/agent.rb +8 -12
- data/lib/fluent/command/binlog_reader.rb +234 -0
- data/lib/fluent/command/fluentd.rb +17 -1
- data/lib/fluent/compat/file_util.rb +1 -1
- data/lib/fluent/compat/output.rb +5 -1
- data/lib/fluent/config/configure_proxy.rb +18 -3
- data/lib/fluent/config/element.rb +1 -1
- data/lib/fluent/config/section.rb +1 -1
- data/lib/fluent/config/v1_parser.rb +1 -1
- data/lib/fluent/env.rb +1 -0
- data/lib/fluent/event.rb +49 -2
- data/lib/fluent/event_router.rb +6 -2
- data/lib/fluent/label.rb +8 -0
- data/lib/fluent/log.rb +30 -1
- data/lib/fluent/plugin.rb +1 -1
- data/lib/fluent/plugin/base.rb +3 -0
- data/lib/fluent/plugin/buf_file.rb +2 -2
- data/lib/fluent/plugin/buf_memory.rb +1 -1
- data/lib/fluent/plugin/buffer.rb +12 -2
- data/lib/fluent/plugin/buffer/chunk.rb +68 -7
- data/lib/fluent/plugin/buffer/file_chunk.rb +4 -4
- data/lib/fluent/plugin/buffer/memory_chunk.rb +4 -4
- data/lib/fluent/plugin/compressable.rb +91 -0
- data/lib/fluent/plugin/filter_grep.rb +4 -4
- data/lib/fluent/plugin/formatter.rb +2 -2
- data/lib/fluent/plugin/formatter_json.rb +2 -1
- data/lib/fluent/plugin/formatter_out_file.rb +3 -30
- data/lib/fluent/plugin/in_forward.rb +6 -5
- data/lib/fluent/plugin/in_monitor_agent.rb +7 -21
- data/lib/fluent/plugin/in_syslog.rb +1 -1
- data/lib/fluent/plugin/in_tail.rb +11 -2
- data/lib/fluent/plugin/multi_output.rb +63 -3
- data/lib/fluent/plugin/out_exec.rb +1 -1
- data/lib/fluent/plugin/out_file.rb +5 -1
- data/lib/fluent/plugin/out_forward.rb +17 -5
- data/lib/fluent/plugin/out_stdout.rb +2 -1
- data/lib/fluent/plugin/output.rb +205 -19
- data/lib/fluent/plugin/parser.rb +5 -49
- data/lib/fluent/plugin/parser_apache2.rb +1 -1
- data/lib/fluent/plugin/parser_json.rb +4 -4
- data/lib/fluent/plugin/parser_multiline.rb +5 -5
- data/lib/fluent/plugin/parser_regexp.rb +1 -2
- data/lib/fluent/plugin/parser_syslog.rb +2 -2
- data/lib/fluent/plugin/storage_local.rb +2 -1
- data/lib/fluent/plugin_helper.rb +1 -0
- data/lib/fluent/plugin_helper/compat_parameters.rb +39 -21
- data/lib/fluent/plugin_helper/extract.rb +92 -0
- data/lib/fluent/plugin_helper/inject.rb +10 -12
- data/lib/fluent/plugin_helper/thread.rb +23 -3
- data/lib/fluent/registry.rb +1 -1
- data/lib/fluent/root_agent.rb +2 -1
- data/lib/fluent/supervisor.rb +28 -8
- data/lib/fluent/test/base.rb +0 -7
- data/lib/fluent/test/driver/base.rb +1 -0
- data/lib/fluent/test/driver/output.rb +3 -0
- data/lib/fluent/test/helpers.rb +18 -0
- data/lib/fluent/test/input_test.rb +4 -2
- data/lib/fluent/test/log.rb +3 -1
- data/lib/fluent/time.rb +232 -1
- data/lib/fluent/timezone.rb +1 -1
- data/lib/fluent/version.rb +1 -1
- data/test/command/test_binlog_reader.rb +351 -0
- data/test/config/test_config_parser.rb +6 -0
- data/test/config/test_configurable.rb +47 -1
- data/test/helper.rb +0 -1
- data/test/plugin/test_buffer.rb +22 -2
- data/test/plugin/test_buffer_chunk.rb +34 -4
- data/test/plugin/test_buffer_file_chunk.rb +73 -0
- data/test/plugin/test_buffer_memory_chunk.rb +73 -0
- data/test/plugin/test_compressable.rb +81 -0
- data/test/plugin/test_formatter_json.rb +14 -1
- data/test/plugin/test_in_forward.rb +67 -3
- data/test/plugin/test_in_monitor_agent.rb +17 -1
- data/test/plugin/test_in_tail.rb +8 -8
- data/test/plugin/test_out_file.rb +0 -8
- data/test/plugin/test_out_forward.rb +85 -0
- data/test/plugin/test_out_secondary_file.rb +20 -12
- data/test/plugin/test_out_stdout.rb +11 -10
- data/test/plugin/test_output.rb +234 -0
- data/test/plugin/test_output_as_buffered.rb +223 -0
- data/test/plugin/test_output_as_buffered_compress.rb +165 -0
- data/test/plugin/test_parser_json.rb +8 -0
- data/test/plugin/test_parser_regexp.rb +1 -1
- data/test/plugin_helper/test_child_process.rb +2 -2
- data/test/plugin_helper/test_extract.rb +195 -0
- data/test/plugin_helper/test_inject.rb +0 -7
- data/test/scripts/fluent/plugin/formatter1/formatter_test1.rb +7 -0
- data/test/scripts/fluent/plugin/formatter2/formatter_test2.rb +7 -0
- data/test/test_event.rb +186 -0
- data/test/test_event_router.rb +1 -1
- data/test/test_formatter.rb +0 -7
- data/test/test_log.rb +121 -0
- data/test/test_plugin_classes.rb +62 -0
- data/test/test_root_agent.rb +125 -0
- data/test/test_supervisor.rb +25 -2
- data/test/test_time_formatter.rb +103 -7
- data/test/test_time_parser.rb +211 -0
- metadata +22 -4
- data/test/plugin/test_parser_time.rb +0 -46
@@ -56,6 +56,47 @@ module FluentPluginOutputAsBufferedTest
|
|
56
56
|
super
|
57
57
|
end
|
58
58
|
end
|
59
|
+
class DummyStandardBufferedOutput < DummyBareOutput
|
60
|
+
def initialize
|
61
|
+
super
|
62
|
+
@prefer_delayed_commit = nil
|
63
|
+
@write = nil
|
64
|
+
@try_write = nil
|
65
|
+
end
|
66
|
+
def prefer_delayed_commit
|
67
|
+
@prefer_delayed_commit ? @prefer_delayed_commit.call : false
|
68
|
+
end
|
69
|
+
def write(chunk)
|
70
|
+
@write ? @write.call(chunk) : nil
|
71
|
+
end
|
72
|
+
def try_write(chunk)
|
73
|
+
@try_write ? @try_write.call(chunk) : nil
|
74
|
+
end
|
75
|
+
end
|
76
|
+
class DummyCustomFormatBufferedOutput < DummyBareOutput
|
77
|
+
def initialize
|
78
|
+
super
|
79
|
+
@format_type_is_msgpack = nil
|
80
|
+
@prefer_delayed_commit = nil
|
81
|
+
@write = nil
|
82
|
+
@try_write = nil
|
83
|
+
end
|
84
|
+
def format(tag, time, record)
|
85
|
+
@format ? @format.call(tag, time, record) : [tag, time, record].to_json
|
86
|
+
end
|
87
|
+
def formatted_to_msgpack_binary
|
88
|
+
@format_type_is_msgpack ? @format_type_is_msgpack.call : false
|
89
|
+
end
|
90
|
+
def prefer_delayed_commit
|
91
|
+
@prefer_delayed_commit ? @prefer_delayed_commit.call : false
|
92
|
+
end
|
93
|
+
def write(chunk)
|
94
|
+
@write ? @write.call(chunk) : nil
|
95
|
+
end
|
96
|
+
def try_write(chunk)
|
97
|
+
@try_write ? @try_write.call(chunk) : nil
|
98
|
+
end
|
99
|
+
end
|
59
100
|
class DummyFullFeatureOutput < DummyBareOutput
|
60
101
|
def initialize
|
61
102
|
super
|
@@ -94,6 +135,8 @@ class BufferedOutputTest < Test::Unit::TestCase
|
|
94
135
|
when :sync then FluentPluginOutputAsBufferedTest::DummySyncOutput.new
|
95
136
|
when :buffered then FluentPluginOutputAsBufferedTest::DummyAsyncOutput.new
|
96
137
|
when :delayed then FluentPluginOutputAsBufferedTest::DummyDelayedOutput.new
|
138
|
+
when :standard then FluentPluginOutputAsBufferedTest::DummyStandardBufferedOutput.new
|
139
|
+
when :custom then FluentPluginOutputAsBufferedTest::DummyCustomFormatBufferedOutput.new
|
97
140
|
when :full then FluentPluginOutputAsBufferedTest::DummyFullFeatureOutput.new
|
98
141
|
else
|
99
142
|
raise ArgumentError, "unknown type: #{type}"
|
@@ -125,6 +168,186 @@ class BufferedOutputTest < Test::Unit::TestCase
|
|
125
168
|
Timecop.return
|
126
169
|
end
|
127
170
|
|
171
|
+
sub_test_case 'chunk feature in #write for output plugins' do
|
172
|
+
setup do
|
173
|
+
@stored_global_logger = $log
|
174
|
+
$log = Fluent::Test::TestLogger.new
|
175
|
+
@hash = {
|
176
|
+
'flush_mode' => 'immediate',
|
177
|
+
'flush_thread_interval' => '0.01',
|
178
|
+
'flush_thread_burst_interval' => '0.01',
|
179
|
+
}
|
180
|
+
end
|
181
|
+
|
182
|
+
teardown do
|
183
|
+
$log = @stored_global_logger
|
184
|
+
end
|
185
|
+
|
186
|
+
test 'plugin using standard format can iterate chunk for time, record in #write' do
|
187
|
+
events_from_chunk = []
|
188
|
+
@i = create_output(:standard)
|
189
|
+
@i.configure(config_element('ROOT','',{},[config_element('buffer','',@hash)]))
|
190
|
+
@i.register(:prefer_delayed_commit){ false }
|
191
|
+
@i.register(:write){ |chunk| e = []; assert chunk.respond_to?(:each); chunk.each{|t,r| e << [t,r]}; events_from_chunk << [:write, e] }
|
192
|
+
@i.register(:try_write){ |chunk| e = []; assert chunk.respond_to?(:each); chunk.each{|t,r| e << [t,r]}; events_from_chunk << [:try_write, e] }
|
193
|
+
@i.start
|
194
|
+
@i.after_start
|
195
|
+
|
196
|
+
events = [
|
197
|
+
[event_time('2016-10-05 16:16:16 -0700'), {"message" => "yaaaaaaaaay!"}],
|
198
|
+
[event_time('2016-10-05 16:16:17 -0700'), {"message" => "yoooooooooy!"}],
|
199
|
+
]
|
200
|
+
|
201
|
+
@i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
|
202
|
+
@i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
|
203
|
+
|
204
|
+
waiting(5){ sleep 0.1 until events_from_chunk.size == 2 }
|
205
|
+
|
206
|
+
assert_equal 2, events_from_chunk.size
|
207
|
+
2.times.each do |i|
|
208
|
+
assert_equal :write, events_from_chunk[i][0]
|
209
|
+
assert_equal events, events_from_chunk[i][1]
|
210
|
+
end
|
211
|
+
end
|
212
|
+
|
213
|
+
test 'plugin using standard format can iterate chunk for time, record in #try_write' do
|
214
|
+
events_from_chunk = []
|
215
|
+
@i = create_output(:standard)
|
216
|
+
@i.configure(config_element('ROOT','',{},[config_element('buffer','',@hash)]))
|
217
|
+
@i.register(:prefer_delayed_commit){ true }
|
218
|
+
@i.register(:write){ |chunk| e = []; assert chunk.respond_to?(:each); chunk.each{|t,r| e << [t,r]}; events_from_chunk << [:write, e] }
|
219
|
+
@i.register(:try_write){ |chunk| e = []; assert chunk.respond_to?(:each); chunk.each{|t,r| e << [t,r]}; events_from_chunk << [:try_write, e] }
|
220
|
+
@i.start
|
221
|
+
@i.after_start
|
222
|
+
|
223
|
+
events = [
|
224
|
+
[event_time('2016-10-05 16:16:16 -0700'), {"message" => "yaaaaaaaaay!"}],
|
225
|
+
[event_time('2016-10-05 16:16:17 -0700'), {"message" => "yoooooooooy!"}],
|
226
|
+
]
|
227
|
+
|
228
|
+
@i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
|
229
|
+
@i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
|
230
|
+
|
231
|
+
waiting(5){ sleep 0.1 until events_from_chunk.size == 2 }
|
232
|
+
|
233
|
+
assert_equal 2, events_from_chunk.size
|
234
|
+
2.times.each do |i|
|
235
|
+
assert_equal :try_write, events_from_chunk[i][0]
|
236
|
+
assert_equal events, events_from_chunk[i][1]
|
237
|
+
end
|
238
|
+
end
|
239
|
+
|
240
|
+
test 'plugin using custom format cannot iterate chunk in #write' do
|
241
|
+
events_from_chunk = []
|
242
|
+
@i = create_output(:custom)
|
243
|
+
@i.configure(config_element('ROOT','',{},[config_element('buffer','',@hash)]))
|
244
|
+
@i.register(:prefer_delayed_commit){ false }
|
245
|
+
@i.register(:format){ |tag, time, record| [tag,time,record].to_json }
|
246
|
+
@i.register(:format_type_is_msgpack){ false }
|
247
|
+
@i.register(:write){ |chunk| assert !(chunk.respond_to?(:each)) }
|
248
|
+
@i.register(:try_write){ |chunk| assert !(chunk.respond_to?(:each)) }
|
249
|
+
@i.start
|
250
|
+
@i.after_start
|
251
|
+
|
252
|
+
events = [
|
253
|
+
[event_time('2016-10-05 16:16:16 -0700'), {"message" => "yaaaaaaaaay!"}],
|
254
|
+
[event_time('2016-10-05 16:16:17 -0700'), {"message" => "yoooooooooy!"}],
|
255
|
+
]
|
256
|
+
|
257
|
+
@i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
|
258
|
+
|
259
|
+
assert_equal 0, events_from_chunk.size
|
260
|
+
end
|
261
|
+
|
262
|
+
test 'plugin using custom format cannot iterate chunk in #try_write' do
|
263
|
+
events_from_chunk = []
|
264
|
+
@i = create_output(:custom)
|
265
|
+
@i.configure(config_element('ROOT','',{},[config_element('buffer','',@hash)]))
|
266
|
+
@i.register(:prefer_delayed_commit){ true }
|
267
|
+
@i.register(:format){ |tag, time, record| [tag,time,record].to_json }
|
268
|
+
@i.register(:format_type_is_msgpack){ false }
|
269
|
+
@i.register(:write){ |chunk| assert !(chunk.respond_to?(:each)) }
|
270
|
+
@i.register(:try_write){ |chunk| assert !(chunk.respond_to?(:each)) }
|
271
|
+
@i.start
|
272
|
+
@i.after_start
|
273
|
+
|
274
|
+
events = [
|
275
|
+
[event_time('2016-10-05 16:16:16 -0700'), {"message" => "yaaaaaaaaay!"}],
|
276
|
+
[event_time('2016-10-05 16:16:17 -0700'), {"message" => "yoooooooooy!"}],
|
277
|
+
]
|
278
|
+
|
279
|
+
@i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
|
280
|
+
|
281
|
+
assert_equal 0, events_from_chunk.size
|
282
|
+
end
|
283
|
+
|
284
|
+
test 'plugin using custom format can iterate chunk in #write if #format returns msgpack' do
|
285
|
+
events_from_chunk = []
|
286
|
+
@i = create_output(:custom)
|
287
|
+
@i.configure(config_element('ROOT','',{},[config_element('buffer','',@hash)]))
|
288
|
+
@i.register(:prefer_delayed_commit){ false }
|
289
|
+
@i.register(:format){ |tag, time, record| [tag,time,record].to_msgpack }
|
290
|
+
@i.register(:format_type_is_msgpack){ true }
|
291
|
+
@i.register(:write){ |chunk| e = []; assert chunk.respond_to?(:each); chunk.each{|ta,t,r| e << [ta,t,r]}; events_from_chunk << [:write, e] }
|
292
|
+
@i.register(:try_write){ |chunk| e = []; assert chunk.respond_to?(:each); chunk.each{|ta,t,r| e << [ta,t,r]}; events_from_chunk << [:try_write, e] }
|
293
|
+
@i.start
|
294
|
+
@i.after_start
|
295
|
+
|
296
|
+
events = [
|
297
|
+
[event_time('2016-10-05 16:16:16 -0700'), {"message" => "yaaaaaaaaay!"}],
|
298
|
+
[event_time('2016-10-05 16:16:17 -0700'), {"message" => "yoooooooooy!"}],
|
299
|
+
]
|
300
|
+
|
301
|
+
@i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
|
302
|
+
@i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
|
303
|
+
|
304
|
+
waiting(5){ sleep 0.1 until events_from_chunk.size == 2 }
|
305
|
+
|
306
|
+
assert_equal 2, events_from_chunk.size
|
307
|
+
2.times.each do |i|
|
308
|
+
assert_equal :write, events_from_chunk[i][0]
|
309
|
+
each_pushed = events_from_chunk[i][1]
|
310
|
+
assert_equal 2, each_pushed.size
|
311
|
+
assert_equal 'test.tag', each_pushed[0][0]
|
312
|
+
assert_equal 'test.tag', each_pushed[1][0]
|
313
|
+
assert_equal events, each_pushed.map{|tag,time,record| [time,record]}
|
314
|
+
end
|
315
|
+
end
|
316
|
+
|
317
|
+
test 'plugin using custom format can iterate chunk in #try_write if #format returns msgpack' do
|
318
|
+
events_from_chunk = []
|
319
|
+
@i = create_output(:custom)
|
320
|
+
@i.configure(config_element('ROOT','',{},[config_element('buffer','',@hash)]))
|
321
|
+
@i.register(:prefer_delayed_commit){ true }
|
322
|
+
@i.register(:format){ |tag, time, record| [tag,time,record].to_msgpack }
|
323
|
+
@i.register(:format_type_is_msgpack){ true }
|
324
|
+
@i.register(:write){ |chunk| events_from_chunk = []; assert chunk.respond_to?(:each); chunk.each{|ta,t,r| e << [ta,t,r]}; events_from_chunk << [:write, e] }
|
325
|
+
@i.register(:try_write){ |chunk| e = []; assert chunk.respond_to?(:each); chunk.each{|ta,t,r| e << [ta,t,r]}; events_from_chunk << [:try_write, e] }
|
326
|
+
@i.start
|
327
|
+
@i.after_start
|
328
|
+
|
329
|
+
events = [
|
330
|
+
[event_time('2016-10-05 16:16:16 -0700'), {"message" => "yaaaaaaaaay!"}],
|
331
|
+
[event_time('2016-10-05 16:16:17 -0700'), {"message" => "yoooooooooy!"}],
|
332
|
+
]
|
333
|
+
|
334
|
+
@i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
|
335
|
+
@i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
|
336
|
+
|
337
|
+
waiting(5){ sleep 0.1 until events_from_chunk.size == 2 }
|
338
|
+
|
339
|
+
assert_equal 2, events_from_chunk.size
|
340
|
+
2.times.each do |i|
|
341
|
+
assert_equal :try_write, events_from_chunk[i][0]
|
342
|
+
each_pushed = events_from_chunk[i][1]
|
343
|
+
assert_equal 2, each_pushed.size
|
344
|
+
assert_equal 'test.tag', each_pushed[0][0]
|
345
|
+
assert_equal 'test.tag', each_pushed[1][0]
|
346
|
+
assert_equal events, each_pushed.map{|tag,time,record| [time,record]}
|
347
|
+
end
|
348
|
+
end
|
349
|
+
end
|
350
|
+
|
128
351
|
sub_test_case 'buffered output configured with many chunk keys' do
|
129
352
|
setup do
|
130
353
|
@stored_global_logger = $log
|
@@ -0,0 +1,165 @@
|
|
1
|
+
require_relative '../helper'
|
2
|
+
require 'fluent/plugin/output'
|
3
|
+
require 'fluent/plugin/buffer'
|
4
|
+
require 'fluent/plugin/compressable'
|
5
|
+
require 'fluent/event'
|
6
|
+
|
7
|
+
require 'timeout'
|
8
|
+
|
9
|
+
module FluentPluginOutputAsBufferedCompressTest
|
10
|
+
class DummyBareOutput < Fluent::Plugin::Output
|
11
|
+
def register(name, &block)
|
12
|
+
instance_variable_set("@#{name}", block)
|
13
|
+
end
|
14
|
+
end
|
15
|
+
|
16
|
+
class DummyAsyncOutput < DummyBareOutput
|
17
|
+
def initialize
|
18
|
+
super
|
19
|
+
@format = @write = nil
|
20
|
+
end
|
21
|
+
def write(chunk)
|
22
|
+
@write ? @write.call(chunk) : nil
|
23
|
+
end
|
24
|
+
end
|
25
|
+
|
26
|
+
class DummyAsyncOutputWithFormat < DummyBareOutput
|
27
|
+
def initialize
|
28
|
+
super
|
29
|
+
@format = nil
|
30
|
+
end
|
31
|
+
def write(chunk)
|
32
|
+
@write ? @write.call(chunk) : nil
|
33
|
+
end
|
34
|
+
def format(tag, time, record)
|
35
|
+
@format ? @format.call(tag, time, record) : [tag, time, record].to_json
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
class BufferedOutputCompressTest < Test::Unit::TestCase
|
41
|
+
include Fluent::Plugin::Compressable
|
42
|
+
|
43
|
+
def create_output(type=:async)
|
44
|
+
case type
|
45
|
+
when :async then FluentPluginOutputAsBufferedCompressTest::DummyAsyncOutput.new
|
46
|
+
when :async_with_format then FluentPluginOutputAsBufferedCompressTest::DummyAsyncOutputWithFormat.new
|
47
|
+
else
|
48
|
+
raise ArgumentError, "unknown type: #{type}"
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
def waiting(seconds)
|
53
|
+
begin
|
54
|
+
Timeout.timeout(seconds) do
|
55
|
+
yield
|
56
|
+
end
|
57
|
+
rescue Timeout::Error
|
58
|
+
STDERR.print(*@i.log.out.logs)
|
59
|
+
raise
|
60
|
+
end
|
61
|
+
end
|
62
|
+
|
63
|
+
def dummy_event_stream
|
64
|
+
Fluent::ArrayEventStream.new(
|
65
|
+
[
|
66
|
+
[event_time('2016-04-13 18:33:00'), { 'name' => 'moris', 'age' => 36, 'message' => 'data1' }],
|
67
|
+
[event_time('2016-04-13 18:33:13'), { 'name' => 'moris', 'age' => 36, 'message' => 'data2' }],
|
68
|
+
[event_time('2016-04-13 18:33:32'), { 'name' => 'moris', 'age' => 36, 'message' => 'data3' }],
|
69
|
+
]
|
70
|
+
)
|
71
|
+
end
|
72
|
+
|
73
|
+
TMP_DIR = File.expand_path('../../tmp/test_output_as_buffered_compress', __FILE__)
|
74
|
+
|
75
|
+
setup do
|
76
|
+
FileUtils.rm_r TMP_DIR rescue nil
|
77
|
+
FileUtils.mkdir_p TMP_DIR
|
78
|
+
end
|
79
|
+
|
80
|
+
teardown do
|
81
|
+
if @i
|
82
|
+
@i.stop unless @i.stopped?
|
83
|
+
@i.before_shutdown unless @i.before_shutdown?
|
84
|
+
@i.shutdown unless @i.shutdown?
|
85
|
+
@i.after_shutdown unless @i.after_shutdown?
|
86
|
+
@i.close unless @i.closed?
|
87
|
+
@i.terminate unless @i.terminated?
|
88
|
+
end
|
89
|
+
end
|
90
|
+
|
91
|
+
data(
|
92
|
+
handle_simple_stream: config_element('buffer', '', { 'flush_interval' => 1, 'compress' => 'gzip' }),
|
93
|
+
handle_stream_with_standard_format: config_element('buffer', 'tag', { 'flush_interval' => 1, 'compress' => 'gzip' }),
|
94
|
+
handle_simple_stream_and_file_chunk: config_element('buffer', '', { '@type' => 'file', 'path' => File.join(TMP_DIR,'test.*.log'), 'flush_interval' => 1, 'compress' => 'gzip' }),
|
95
|
+
handle_stream_with_standard_format_and_file_chunk: config_element('buffer', 'tag', { '@type' => 'file', 'path' => File.join(TMP_DIR,'test.*.log'), 'flush_interval' => 1, 'compress' => 'gzip' }),
|
96
|
+
)
|
97
|
+
test 'call a standard format when output plugin adds data to chunk' do |buffer_config|
|
98
|
+
@i = create_output(:async)
|
99
|
+
@i.configure(config_element('ROOT','', {}, [buffer_config]))
|
100
|
+
@i.start
|
101
|
+
@i.after_start
|
102
|
+
|
103
|
+
io = StringIO.new
|
104
|
+
es = dummy_event_stream
|
105
|
+
expected = es.map { |e| e }
|
106
|
+
compressed_data = ''
|
107
|
+
|
108
|
+
assert_equal :gzip, @i.buffer.compress
|
109
|
+
|
110
|
+
@i.register(:write) do |c|
|
111
|
+
compressed_data = c.instance_variable_get(:@chunk)
|
112
|
+
if compressed_data.is_a?(File)
|
113
|
+
compressed_data.seek(0, IO::SEEK_SET)
|
114
|
+
compressed_data = compressed_data.read
|
115
|
+
end
|
116
|
+
c.write_to(io)
|
117
|
+
end
|
118
|
+
|
119
|
+
@i.emit_events('tag', es)
|
120
|
+
@i.enqueue_thread_wait
|
121
|
+
@i.flush_thread_wakeup
|
122
|
+
waiting(4) { Thread.pass until io.size > 0 }
|
123
|
+
|
124
|
+
assert_equal expected, Fluent::MessagePackEventStream.new(decompress(compressed_data)).map { |t, r| [t, r] }
|
125
|
+
assert_equal expected, Fluent::MessagePackEventStream.new(io.string).map { |t, r| [t, r] }
|
126
|
+
end
|
127
|
+
|
128
|
+
data(
|
129
|
+
handle_simple_stream: config_element('buffer', '', { 'flush_interval' => 1, 'compress' => 'gzip' }),
|
130
|
+
handle_stream_with_custom_format: config_element('buffer', 'tag', { 'flush_interval' => 1, 'compress' => 'gzip' }),
|
131
|
+
handle_simple_stream_and_file_chunk: config_element('buffer', '', { '@type' => 'file', 'path' => File.join(TMP_DIR,'test.*.log'), 'flush_interval' => 1, 'compress' => 'gzip' }),
|
132
|
+
handle_stream_with_custom_format_and_file_chunk: config_element('buffer', 'tag', { '@type' => 'file', 'path' => File.join(TMP_DIR,'test.*.log'), 'flush_interval' => 1, 'compress' => 'gzip' }),
|
133
|
+
)
|
134
|
+
test 'call a custom format when output plugin adds data to chunk' do |buffer_config|
|
135
|
+
@i = create_output(:async_with_format)
|
136
|
+
@i.configure(config_element('ROOT','', {}, [buffer_config]))
|
137
|
+
@i.start
|
138
|
+
@i.after_start
|
139
|
+
|
140
|
+
io = StringIO.new
|
141
|
+
es = dummy_event_stream
|
142
|
+
expected = es.map { |e| "#{e[1]}\n" }.join # e[1] is record
|
143
|
+
compressed_data = ''
|
144
|
+
|
145
|
+
assert_equal :gzip, @i.buffer.compress
|
146
|
+
|
147
|
+
@i.register(:format) { |tag, time, record| "#{record}\n" }
|
148
|
+
@i.register(:write) { |c|
|
149
|
+
compressed_data = c.instance_variable_get(:@chunk)
|
150
|
+
if compressed_data.is_a?(File)
|
151
|
+
compressed_data.seek(0, IO::SEEK_SET)
|
152
|
+
compressed_data = compressed_data.read
|
153
|
+
end
|
154
|
+
c.write_to(io)
|
155
|
+
}
|
156
|
+
|
157
|
+
@i.emit_events('tag', es)
|
158
|
+
@i.enqueue_thread_wait
|
159
|
+
@i.flush_thread_wakeup
|
160
|
+
waiting(4) { Thread.pass until io.size > 0 }
|
161
|
+
|
162
|
+
assert_equal expected, decompress(compressed_data)
|
163
|
+
assert_equal expected, io.string
|
164
|
+
end
|
165
|
+
end
|
@@ -56,6 +56,14 @@ class JsonParserTest < ::Test::Unit::TestCase
|
|
56
56
|
}
|
57
57
|
end
|
58
58
|
|
59
|
+
data('oj' => 'oj', 'yajl' => 'yajl')
|
60
|
+
def test_parse_with_colon_string(data)
|
61
|
+
@parser.configure('json_parser' => data)
|
62
|
+
@parser.instance.parse('{"time":1362020400,"log":":message"}') { |time, record|
|
63
|
+
assert_equal(record['log'], ':message')
|
64
|
+
}
|
65
|
+
end
|
66
|
+
|
59
67
|
data('oj' => 'oj', 'yajl' => 'yajl')
|
60
68
|
def test_parse_with_invalid_time(data)
|
61
69
|
@parser.configure('json_parser' => data)
|
@@ -44,7 +44,7 @@ class RegexpParserTest < ::Test::Unit::TestCase
|
|
44
44
|
end
|
45
45
|
|
46
46
|
def test_parse_with_configure
|
47
|
-
# Specify conf by configure method
|
47
|
+
# Specify conf by configure method instead of initializer
|
48
48
|
regexp = Regexp.new(%q!^(?<host>[^ ]*) [^ ]* (?<user>[^ ]*) \[(?<time>[^\]]*)\] \[(?<date>[^\]]*)\] "(?<flag>\S+)(?: +(?<path>[^ ]*) +\S*)?" (?<code>[^ ]*) (?<size>[^ ]*)$!)
|
49
49
|
conf = {
|
50
50
|
'time_format' => "%d/%b/%Y:%H:%M:%S %z",
|
@@ -213,7 +213,7 @@ class ChildProcessTest < Test::Unit::TestCase
|
|
213
213
|
end
|
214
214
|
sleep TEST_WAIT_INTERVAL_FOR_BLOCK_RUNNING until m.locked? || ran
|
215
215
|
sleep TEST_WAIT_INTERVAL_FOR_LOOP * 10
|
216
|
-
@d.stop # nothing
|
216
|
+
@d.stop # nothing occurs
|
217
217
|
@d.shutdown
|
218
218
|
|
219
219
|
assert{ ary.size > 5 }
|
@@ -249,7 +249,7 @@ class ChildProcessTest < Test::Unit::TestCase
|
|
249
249
|
|
250
250
|
assert_equal [], @d.log.out.logs
|
251
251
|
|
252
|
-
@d.stop # nothing
|
252
|
+
@d.stop # nothing occurs
|
253
253
|
sleep TEST_WAIT_INTERVAL_FOR_LOOP * 5
|
254
254
|
lines1 = ary.size
|
255
255
|
assert{ lines1 > 1 }
|