fluentd 0.14.4 → 0.14.5
Sign up to get free protection for your applications and to get access to all the features.
Potentially problematic release.
This version of fluentd might be problematic. Click here for more details.
- checksums.yaml +4 -4
- data/ChangeLog +18 -0
- data/example/in_forward.conf +3 -0
- data/example/in_forward_client.conf +37 -0
- data/example/in_forward_shared_key.conf +15 -0
- data/example/in_forward_users.conf +24 -0
- data/example/out_forward.conf +13 -13
- data/example/out_forward_client.conf +109 -0
- data/example/out_forward_shared_key.conf +36 -0
- data/example/out_forward_users.conf +65 -0
- data/example/{out_buffered_null.conf → out_null.conf} +10 -6
- data/example/secondary_file.conf +41 -0
- data/lib/fluent/agent.rb +3 -1
- data/lib/fluent/plugin/buffer.rb +5 -1
- data/lib/fluent/plugin/in_forward.rb +300 -50
- data/lib/fluent/plugin/in_tail.rb +41 -85
- data/lib/fluent/plugin/multi_output.rb +4 -0
- data/lib/fluent/plugin/out_forward.rb +326 -209
- data/lib/fluent/plugin/out_null.rb +37 -0
- data/lib/fluent/plugin/out_secondary_file.rb +128 -0
- data/lib/fluent/plugin/out_stdout.rb +38 -2
- data/lib/fluent/plugin/output.rb +13 -5
- data/lib/fluent/root_agent.rb +1 -1
- data/lib/fluent/test/startup_shutdown.rb +33 -0
- data/lib/fluent/version.rb +1 -1
- data/test/plugin/test_in_forward.rb +906 -441
- data/test/plugin/test_in_monitor_agent.rb +4 -0
- data/test/plugin/test_in_tail.rb +681 -663
- data/test/plugin/test_out_forward.rb +150 -208
- data/test/plugin/test_out_null.rb +85 -9
- data/test/plugin/test_out_secondary_file.rb +432 -0
- data/test/plugin/test_out_stdout.rb +143 -45
- data/test/test_root_agent.rb +42 -0
- metadata +14 -9
- data/lib/fluent/plugin/out_buffered_null.rb +0 -59
- data/lib/fluent/plugin/out_buffered_stdout.rb +0 -70
- data/test/plugin/test_out_buffered_null.rb +0 -79
- data/test/plugin/test_out_buffered_stdout.rb +0 -122
@@ -11,19 +11,95 @@ class NullOutputTest < Test::Unit::TestCase
|
|
11
11
|
Fluent::Test::Driver::Output.new(Fluent::Plugin::NullOutput).configure(conf)
|
12
12
|
end
|
13
13
|
|
14
|
-
|
15
|
-
|
16
|
-
|
14
|
+
sub_test_case 'non-buffered' do
|
15
|
+
test 'configure' do
|
16
|
+
assert_nothing_raised do
|
17
|
+
create_driver
|
18
|
+
end
|
19
|
+
end
|
20
|
+
|
21
|
+
test 'process' do
|
22
|
+
d = create_driver
|
23
|
+
assert_nothing_raised do
|
24
|
+
d.run do
|
25
|
+
d.feed("test", Fluent::EventTime.now, {"test" => "null"})
|
26
|
+
end
|
27
|
+
end
|
28
|
+
assert_equal([], d.events(tag: "test"))
|
17
29
|
end
|
18
30
|
end
|
19
31
|
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
d.
|
24
|
-
|
32
|
+
sub_test_case 'buffered' do
|
33
|
+
test 'default chunk limit size is 100' do
|
34
|
+
d = create_driver(config_element("ROOT", "", {}, [config_element("buffer")]))
|
35
|
+
assert_equal 10 * 1024, d.instance.buffer_config.chunk_limit_size
|
36
|
+
assert d.instance.buffer_config.flush_at_shutdown
|
37
|
+
assert_equal ['tag'], d.instance.buffer_config.chunk_keys
|
38
|
+
assert d.instance.chunk_key_tag
|
39
|
+
assert !d.instance.chunk_key_time
|
40
|
+
assert_equal [], d.instance.chunk_keys
|
41
|
+
end
|
42
|
+
|
43
|
+
test 'writes standard formattted chunks' do
|
44
|
+
d = create_driver(config_element("ROOT", "", {}, [config_element("buffer")]))
|
45
|
+
t = event_time("2016-05-23 00:22:13 -0800")
|
46
|
+
d.run(default_tag: 'test', flush: true) do
|
47
|
+
d.feed(t, {"message" => "null null null"})
|
48
|
+
d.feed(t, {"message" => "null null"})
|
49
|
+
d.feed(t, {"message" => "null"})
|
25
50
|
end
|
51
|
+
|
52
|
+
assert_equal 3, d.instance.emit_count
|
53
|
+
assert_equal 3, d.instance.emit_records
|
54
|
+
end
|
55
|
+
|
56
|
+
test 'check for chunk passed to #write' do
|
57
|
+
d = create_driver(config_element("ROOT", "", {}, [config_element("buffer")]))
|
58
|
+
data = []
|
59
|
+
d.instance.feed_proc = ->(chunk){ data << [chunk.unique_id, chunk.metadata.tag, chunk.read] }
|
60
|
+
|
61
|
+
t = event_time("2016-05-23 00:22:13 -0800")
|
62
|
+
d.run(default_tag: 'test', flush: true) do
|
63
|
+
d.feed(t, {"message" => "null null null"})
|
64
|
+
d.feed(t, {"message" => "null null"})
|
65
|
+
d.feed(t, {"message" => "null"})
|
66
|
+
end
|
67
|
+
|
68
|
+
assert_equal 1, data.size
|
69
|
+
_, tag, binary = data.first
|
70
|
+
events = []
|
71
|
+
Fluent::MessagePackFactory.unpacker.feed_each(binary){|obj| events << obj }
|
72
|
+
assert_equal 'test', tag
|
73
|
+
assert_equal [ [t, {"message" => "null null null"}], [t, {"message" => "null null"}], [t, {"message" => "null"}] ], events
|
74
|
+
end
|
75
|
+
|
76
|
+
test 'check for chunk passed to #try_write' do
|
77
|
+
d = create_driver(config_element("ROOT", "", {}, [config_element("buffer")]))
|
78
|
+
data = []
|
79
|
+
d.instance.feed_proc = ->(chunk){ data << [chunk.unique_id, chunk.metadata.tag, chunk.read] }
|
80
|
+
d.instance.delayed = true
|
81
|
+
|
82
|
+
t = event_time("2016-05-23 00:22:13 -0800")
|
83
|
+
d.run(default_tag: 'test', flush: true, shutdown: false) do
|
84
|
+
d.feed(t, {"message" => "null null null"})
|
85
|
+
d.feed(t, {"message" => "null null"})
|
86
|
+
d.feed(t, {"message" => "null"})
|
87
|
+
end
|
88
|
+
|
89
|
+
assert_equal 1, data.size
|
90
|
+
chunk_id, tag, binary = data.first
|
91
|
+
events = []
|
92
|
+
Fluent::MessagePackFactory.unpacker.feed_each(binary){|obj| events << obj }
|
93
|
+
assert_equal 'test', tag
|
94
|
+
assert_equal [ [t, {"message" => "null null null"}], [t, {"message" => "null null"}], [t, {"message" => "null"}] ], events
|
95
|
+
|
96
|
+
assert_equal [chunk_id], d.instance.buffer.dequeued.keys
|
97
|
+
|
98
|
+
d.instance.commit_write(chunk_id)
|
99
|
+
|
100
|
+
assert_equal [], d.instance.buffer.dequeued.keys
|
101
|
+
|
102
|
+
d.instance_shutdown
|
26
103
|
end
|
27
|
-
assert_equal([], d.events(tag: "test"))
|
28
104
|
end
|
29
105
|
end
|
@@ -0,0 +1,432 @@
|
|
1
|
+
require_relative '../helper'
|
2
|
+
require 'time'
|
3
|
+
require 'fileutils'
|
4
|
+
require 'fluent/event'
|
5
|
+
require 'fluent/unique_id'
|
6
|
+
require 'fluent/plugin/buffer'
|
7
|
+
require 'fluent/plugin/out_secondary_file'
|
8
|
+
require 'fluent/plugin/buffer/memory_chunk'
|
9
|
+
require 'fluent/test/driver/output'
|
10
|
+
|
11
|
+
class FileOutputSecondaryTest < Test::Unit::TestCase
|
12
|
+
include Fluent::UniqueId::Mixin
|
13
|
+
|
14
|
+
def setup
|
15
|
+
Fluent::Test.setup
|
16
|
+
FileUtils.rm_rf(TMP_DIR)
|
17
|
+
FileUtils.mkdir_p(TMP_DIR)
|
18
|
+
end
|
19
|
+
|
20
|
+
TMP_DIR = File.expand_path(File.dirname(__FILE__) + "/../tmp/out_secondary_file#{ENV['TEST_ENV_NUMBER']}")
|
21
|
+
|
22
|
+
CONFIG = %[
|
23
|
+
directory #{TMP_DIR}
|
24
|
+
basename out_file_test
|
25
|
+
compress gzip
|
26
|
+
]
|
27
|
+
|
28
|
+
class DummyOutput < Fluent::Plugin::Output
|
29
|
+
def write(chunk); end
|
30
|
+
end
|
31
|
+
|
32
|
+
def create_primary(buffer_cofig = config_element('buffer'))
|
33
|
+
DummyOutput.new.configure(config_element('ROOT','',{}, [buffer_cofig]))
|
34
|
+
end
|
35
|
+
|
36
|
+
def create_driver(conf = CONFIG, primary = create_primary)
|
37
|
+
c = Fluent::Test::Driver::Output.new(Fluent::Plugin::SecondaryFileOutput)
|
38
|
+
c.instance.acts_as_secondary(primary)
|
39
|
+
c.configure(conf)
|
40
|
+
end
|
41
|
+
|
42
|
+
sub_test_case 'configture' do
|
43
|
+
test 'default configuration' do
|
44
|
+
d = create_driver %[directory #{TMP_DIR}]
|
45
|
+
assert_equal 'dump.bin', d.instance.basename
|
46
|
+
assert_equal TMP_DIR, d.instance.directory
|
47
|
+
assert_equal :text, d.instance.compress
|
48
|
+
assert_equal false, d.instance.append
|
49
|
+
end
|
50
|
+
|
51
|
+
test 'should be configurable' do
|
52
|
+
d = create_driver %[
|
53
|
+
directory #{TMP_DIR}
|
54
|
+
basename out_file_test
|
55
|
+
compress gzip
|
56
|
+
append true
|
57
|
+
]
|
58
|
+
assert_equal 'out_file_test', d.instance.basename
|
59
|
+
assert_equal TMP_DIR, d.instance.directory
|
60
|
+
assert_equal :gzip, d.instance.compress
|
61
|
+
assert_equal true, d.instance.append
|
62
|
+
end
|
63
|
+
|
64
|
+
test 'should only use in secondary' do
|
65
|
+
c = Fluent::Test::Driver::Output.new(Fluent::Plugin::SecondaryFileOutput)
|
66
|
+
assert_raise Fluent::ConfigError.new("This plugin can only be used in the <secondary> section") do
|
67
|
+
c.configure(CONFIG)
|
68
|
+
end
|
69
|
+
end
|
70
|
+
|
71
|
+
test 'basename should not include `/`' do
|
72
|
+
assert_raise Fluent::ConfigError.new("basename should not include `/`") do
|
73
|
+
create_driver %[
|
74
|
+
directory #{TMP_DIR}
|
75
|
+
basename out/file
|
76
|
+
]
|
77
|
+
end
|
78
|
+
end
|
79
|
+
|
80
|
+
test 'directory should be writable' do
|
81
|
+
assert_nothing_raised do
|
82
|
+
create_driver %[directory #{TMP_DIR}/test_dir/foo/bar/]
|
83
|
+
end
|
84
|
+
|
85
|
+
assert_nothing_raised do
|
86
|
+
FileUtils.mkdir_p("#{TMP_DIR}/test_dir")
|
87
|
+
File.chmod(0777, "#{TMP_DIR}/test_dir")
|
88
|
+
create_driver %[directory #{TMP_DIR}/test_dir/foo/bar/]
|
89
|
+
end
|
90
|
+
|
91
|
+
assert_raise Fluent::ConfigError.new("out_secondary_file: `#{TMP_DIR}/test_dir/foo/bar/` should be writable") do
|
92
|
+
FileUtils.mkdir_p("#{TMP_DIR}/test_dir")
|
93
|
+
File.chmod(0555, "#{TMP_DIR}/test_dir")
|
94
|
+
create_driver %[directory #{TMP_DIR}/test_dir/foo/bar/]
|
95
|
+
end
|
96
|
+
end
|
97
|
+
|
98
|
+
test 'should be passed directory' do
|
99
|
+
assert_raise Fluent::ConfigError do
|
100
|
+
create_driver %[]
|
101
|
+
end
|
102
|
+
|
103
|
+
assert_nothing_raised do
|
104
|
+
create_driver %[directory #{TMP_DIR}/test_dir/foo/bar/]
|
105
|
+
end
|
106
|
+
end
|
107
|
+
end
|
108
|
+
|
109
|
+
def check_gzipped_result(path, expect)
|
110
|
+
# Zlib::GzipReader has a bug of concatenated file: https://bugs.ruby-lang.org/issues/9790
|
111
|
+
# Following code from https://www.ruby-forum.com/topic/971591#979520
|
112
|
+
result = ""
|
113
|
+
File.open(path, "rb") { |io|
|
114
|
+
loop do
|
115
|
+
gzr = Zlib::GzipReader.new(io)
|
116
|
+
result << gzr.read
|
117
|
+
unused = gzr.unused
|
118
|
+
gzr.finish
|
119
|
+
break if unused.nil?
|
120
|
+
io.pos -= unused.length
|
121
|
+
end
|
122
|
+
}
|
123
|
+
|
124
|
+
assert_equal expect, result
|
125
|
+
end
|
126
|
+
|
127
|
+
def create_chunk(primary, metadata, es)
|
128
|
+
primary.buffer.generate_chunk(metadata).tap do |c|
|
129
|
+
c.concat(es.to_msgpack_stream, es.size) # to_msgpack_stream is standard_format
|
130
|
+
c.commit
|
131
|
+
end
|
132
|
+
end
|
133
|
+
|
134
|
+
sub_test_case 'write' do
|
135
|
+
setup do
|
136
|
+
@record = { 'key' => 'value' }
|
137
|
+
@time = event_time
|
138
|
+
@es = Fluent::OneEventStream.new(@time, @record)
|
139
|
+
@primary = create_primary
|
140
|
+
metadata = @primary.buffer.new_metadata
|
141
|
+
@chunk = create_chunk(@primary, metadata, @es)
|
142
|
+
end
|
143
|
+
|
144
|
+
test 'should output compressed file when compress option is gzip' do
|
145
|
+
d = create_driver(CONFIG, @primary)
|
146
|
+
path = d.instance.write(@chunk)
|
147
|
+
|
148
|
+
assert_equal "#{TMP_DIR}/out_file_test.0.gz", path
|
149
|
+
check_gzipped_result(path, @es.to_msgpack_stream.force_encoding('ASCII-8BIT'))
|
150
|
+
end
|
151
|
+
|
152
|
+
test 'should output plain text when compress option is default(text)' do
|
153
|
+
d = create_driver(%[
|
154
|
+
directory #{TMP_DIR}/
|
155
|
+
basename out_file_test
|
156
|
+
], @primary)
|
157
|
+
|
158
|
+
path = d.instance.write(@chunk)
|
159
|
+
|
160
|
+
assert_equal "#{TMP_DIR}/out_file_test.0", path
|
161
|
+
assert_equal File.read(path), @es.to_msgpack_stream.force_encoding('ASCII-8BIT')
|
162
|
+
end
|
163
|
+
|
164
|
+
test 'path should be incremental when append option is false' do
|
165
|
+
d = create_driver(CONFIG, @primary)
|
166
|
+
packed_value = @es.to_msgpack_stream.force_encoding('ASCII-8BIT')
|
167
|
+
|
168
|
+
5.times do |i|
|
169
|
+
path = d.instance.write(@chunk)
|
170
|
+
assert_equal "#{TMP_DIR}/out_file_test.#{i}.gz", path
|
171
|
+
check_gzipped_result(path, packed_value)
|
172
|
+
end
|
173
|
+
end
|
174
|
+
|
175
|
+
test 'path should be unchanged when append option is true' do
|
176
|
+
d = create_driver(CONFIG + %[append true], @primary)
|
177
|
+
packed_value = @es.to_msgpack_stream.force_encoding('ASCII-8BIT')
|
178
|
+
|
179
|
+
[*1..5].each do |i|
|
180
|
+
path = d.instance.write(@chunk)
|
181
|
+
assert_equal "#{TMP_DIR}/out_file_test.gz", path
|
182
|
+
check_gzipped_result(path, packed_value * i)
|
183
|
+
end
|
184
|
+
end
|
185
|
+
end
|
186
|
+
|
187
|
+
sub_test_case 'Syntax of placeholders' do
|
188
|
+
data(
|
189
|
+
tag: '${tag}',
|
190
|
+
tag_index: '${tag[0]}',
|
191
|
+
tag_index1: '${tag[10]}',
|
192
|
+
variable: '${key1}',
|
193
|
+
variable2: '${key@value}',
|
194
|
+
variable3: '${key_value}',
|
195
|
+
variable4: '${key.value}',
|
196
|
+
variable5: '${key-value}',
|
197
|
+
variable6: '${KEYVALUE}',
|
198
|
+
variable7: '${tags}',
|
199
|
+
variable8: '${tag${key}', # matched ${key}
|
200
|
+
)
|
201
|
+
test 'matches with a valid placeholder' do |path|
|
202
|
+
assert Fluent::Plugin::SecondaryFileOutput::PLACEHOLDER_REGEX.match(path)
|
203
|
+
end
|
204
|
+
|
205
|
+
data(
|
206
|
+
invalid_tag: 'tag',
|
207
|
+
invalid_tag2: '{tag}',
|
208
|
+
invalid_tag3: '${tag',
|
209
|
+
invalid_tag4: '${tag0]}',
|
210
|
+
invalid_tag5: '${tag[]]}',
|
211
|
+
invalid_variable: '${key[0]}',
|
212
|
+
invalid_variable2: '${key{key2}}',
|
213
|
+
)
|
214
|
+
test "doesn't match with an invalid placeholder" do |path|
|
215
|
+
assert !Fluent::Plugin::SecondaryFileOutput::PLACEHOLDER_REGEX.match(path)
|
216
|
+
end
|
217
|
+
end
|
218
|
+
|
219
|
+
sub_test_case 'path' do
|
220
|
+
setup do
|
221
|
+
@record = { 'key' => 'value' }
|
222
|
+
@time = event_time
|
223
|
+
@es = Fluent::OneEventStream.new(@time, @record)
|
224
|
+
primary = create_primary
|
225
|
+
m = primary.buffer.new_metadata
|
226
|
+
@c = create_chunk(primary, m, @es)
|
227
|
+
end
|
228
|
+
|
229
|
+
test 'normal path when compress option is gzip' do
|
230
|
+
d = create_driver
|
231
|
+
path = d.instance.write(@c)
|
232
|
+
assert_equal "#{TMP_DIR}/out_file_test.0.gz", path
|
233
|
+
end
|
234
|
+
|
235
|
+
test 'normal path when compress option is default' do
|
236
|
+
d = create_driver %[
|
237
|
+
directory #{TMP_DIR}
|
238
|
+
basename out_file_test
|
239
|
+
]
|
240
|
+
path = d.instance.write(@c)
|
241
|
+
assert_equal "#{TMP_DIR}/out_file_test.0", path
|
242
|
+
end
|
243
|
+
|
244
|
+
test 'normal path when append option is true' do
|
245
|
+
d = create_driver %[
|
246
|
+
directory #{TMP_DIR}
|
247
|
+
append true
|
248
|
+
]
|
249
|
+
path = d.instance.write(@c)
|
250
|
+
assert_equal "#{TMP_DIR}/dump.bin", path
|
251
|
+
end
|
252
|
+
|
253
|
+
data(
|
254
|
+
invalid_tag: [/tag/, '${tag}'],
|
255
|
+
invalid_tag0: [/tag\[0\]/, '${tag[0]}'],
|
256
|
+
invalid_variable: [/dummy/, '${dummy}'],
|
257
|
+
invalid_timeformat: [/time/, '%Y%m%d'],
|
258
|
+
)
|
259
|
+
test 'raise an error when basename includes incompatible placeholder' do |(expected_message, invalid_basename)|
|
260
|
+
c = Fluent::Test::Driver::Output.new(Fluent::Plugin::SecondaryFileOutput)
|
261
|
+
c.instance.acts_as_secondary(DummyOutput.new)
|
262
|
+
|
263
|
+
assert_raise_message(expected_message) do
|
264
|
+
c.configure %[
|
265
|
+
directory #{TMP_DIR}/
|
266
|
+
basename #{invalid_basename}
|
267
|
+
compress gzip
|
268
|
+
]
|
269
|
+
end
|
270
|
+
end
|
271
|
+
|
272
|
+
data(
|
273
|
+
invalid_tag: [/tag/, '${tag}'],
|
274
|
+
invalid_tag0: [/tag\[0\]/, '${tag[0]}'],
|
275
|
+
invalid_variable: [/dummy/, '${dummy}'],
|
276
|
+
invalid_timeformat: [/time/, '%Y%m%d'],
|
277
|
+
)
|
278
|
+
test 'raise an error when directory includes incompatible placeholder' do |(expected_message, invalid_directory)|
|
279
|
+
c = Fluent::Test::Driver::Output.new(Fluent::Plugin::SecondaryFileOutput)
|
280
|
+
c.instance.acts_as_secondary(DummyOutput.new)
|
281
|
+
|
282
|
+
assert_raise_message(expected_message) do
|
283
|
+
c.configure %[
|
284
|
+
directory #{invalid_directory}/
|
285
|
+
compress gzip
|
286
|
+
]
|
287
|
+
end
|
288
|
+
end
|
289
|
+
|
290
|
+
test 'basename includes tag' do
|
291
|
+
primary = create_primary(config_element('buffer', 'tag'))
|
292
|
+
|
293
|
+
d = create_driver(%[
|
294
|
+
directory #{TMP_DIR}/
|
295
|
+
basename cool_${tag}
|
296
|
+
compress gzip
|
297
|
+
], primary)
|
298
|
+
|
299
|
+
m = primary.buffer.new_metadata(tag: 'test.dummy')
|
300
|
+
c = create_chunk(primary, m, @es)
|
301
|
+
|
302
|
+
path = d.instance.write(c)
|
303
|
+
assert_equal "#{TMP_DIR}/cool_test.dummy.0.gz", path
|
304
|
+
end
|
305
|
+
|
306
|
+
test 'basename includes /tag[\d+]/' do
|
307
|
+
primary = create_primary(config_element('buffer', 'tag'))
|
308
|
+
|
309
|
+
d = create_driver(%[
|
310
|
+
directory #{TMP_DIR}/
|
311
|
+
basename cool_${tag[0]}_${tag[1]}
|
312
|
+
compress gzip
|
313
|
+
], primary)
|
314
|
+
|
315
|
+
m = primary.buffer.new_metadata(tag: 'test.dummy')
|
316
|
+
c = create_chunk(primary, m, @es)
|
317
|
+
|
318
|
+
path = d.instance.write(c)
|
319
|
+
assert_equal "#{TMP_DIR}/cool_test_dummy.0.gz", path
|
320
|
+
end
|
321
|
+
|
322
|
+
test 'basename includes time format' do
|
323
|
+
primary = create_primary(
|
324
|
+
config_element('buffer', 'time', { 'timekey_zone' => '+0900', 'timekey' => 1 })
|
325
|
+
)
|
326
|
+
|
327
|
+
d = create_driver(%[
|
328
|
+
directory #{TMP_DIR}/
|
329
|
+
basename cool_%Y%m%d%H
|
330
|
+
compress gzip
|
331
|
+
], primary)
|
332
|
+
|
333
|
+
m = primary.buffer.new_metadata(timekey: event_time("2011-01-02 13:14:15 UTC"))
|
334
|
+
c = create_chunk(primary, m, @es)
|
335
|
+
|
336
|
+
path = d.instance.write(c)
|
337
|
+
assert_equal "#{TMP_DIR}/cool_2011010222.0.gz", path
|
338
|
+
end
|
339
|
+
|
340
|
+
test 'basename includes time format with timekey_use_utc option' do
|
341
|
+
primary = create_primary(
|
342
|
+
config_element('buffer', 'time', { 'timekey_zone' => '+0900', 'timekey' => 1, 'timekey_use_utc' => true })
|
343
|
+
)
|
344
|
+
|
345
|
+
d = create_driver(%[
|
346
|
+
directory #{TMP_DIR}/
|
347
|
+
basename cool_%Y%m%d%H
|
348
|
+
compress gzip
|
349
|
+
], primary)
|
350
|
+
|
351
|
+
m = primary.buffer.new_metadata(timekey: event_time("2011-01-02 13:14:15 UTC"))
|
352
|
+
c = create_chunk(primary, m, @es)
|
353
|
+
|
354
|
+
path = d.instance.write(c)
|
355
|
+
assert_equal "#{TMP_DIR}/cool_2011010213.0.gz", path
|
356
|
+
end
|
357
|
+
|
358
|
+
test 'basename includes variable' do
|
359
|
+
primary = create_primary(config_element('buffer', 'test1'))
|
360
|
+
|
361
|
+
d = create_driver(%[
|
362
|
+
directory #{TMP_DIR}/
|
363
|
+
basename cool_${test1}
|
364
|
+
compress gzip
|
365
|
+
], primary)
|
366
|
+
|
367
|
+
m = primary.buffer.new_metadata(variables: { "test1".to_sym => "dummy" })
|
368
|
+
c = create_chunk(primary, m, @es)
|
369
|
+
|
370
|
+
path = d.instance.write(c)
|
371
|
+
assert_equal "#{TMP_DIR}/cool_dummy.0.gz", path
|
372
|
+
end
|
373
|
+
|
374
|
+
test 'basename includes unnecessary variable' do
|
375
|
+
primary = create_primary(config_element('buffer', 'test1'))
|
376
|
+
c = Fluent::Test::Driver::Output.new(Fluent::Plugin::SecondaryFileOutput)
|
377
|
+
c.instance.acts_as_secondary(primary)
|
378
|
+
|
379
|
+
assert_raise_message(/test2/) do
|
380
|
+
c.configure %[
|
381
|
+
directory #{TMP_DIR}/
|
382
|
+
basename ${test1}_${test2}
|
383
|
+
compress gzip
|
384
|
+
]
|
385
|
+
end
|
386
|
+
end
|
387
|
+
|
388
|
+
test 'basename includes tag, time format, and variables' do
|
389
|
+
primary = create_primary(
|
390
|
+
config_element('buffer', 'time,tag,test1', { 'timekey_zone' => '+0000', 'timekey' => 1 })
|
391
|
+
)
|
392
|
+
|
393
|
+
d = create_driver(%[
|
394
|
+
directory #{TMP_DIR}/
|
395
|
+
basename cool_%Y%m%d%H_${tag}_${test1}
|
396
|
+
compress gzip
|
397
|
+
], primary)
|
398
|
+
|
399
|
+
m = primary.buffer.new_metadata(
|
400
|
+
timekey: event_time("2011-01-02 13:14:15 UTC"),
|
401
|
+
tag: 'test.tag',
|
402
|
+
variables: { "test1".to_sym => "dummy" }
|
403
|
+
)
|
404
|
+
|
405
|
+
c = create_chunk(primary, m, @es)
|
406
|
+
|
407
|
+
path = d.instance.write(c)
|
408
|
+
assert_equal "#{TMP_DIR}/cool_2011010213_test.tag_dummy.0.gz", path
|
409
|
+
end
|
410
|
+
|
411
|
+
test 'directory includes tag, time format, and variables' do
|
412
|
+
primary = create_primary(
|
413
|
+
config_element('buffer', 'time,tag,test1', { 'timekey_zone' => '+0000', 'timekey' => 1 })
|
414
|
+
)
|
415
|
+
|
416
|
+
d = create_driver(%[
|
417
|
+
directory #{TMP_DIR}/%Y%m%d%H/${tag}/${test1}
|
418
|
+
compress gzip
|
419
|
+
], primary)
|
420
|
+
|
421
|
+
m = primary.buffer.new_metadata(
|
422
|
+
timekey: event_time("2011-01-02 13:14:15 UTC"),
|
423
|
+
tag: 'test.tag',
|
424
|
+
variables: { "test1".to_sym => "dummy" }
|
425
|
+
)
|
426
|
+
c = create_chunk(primary, m, @es)
|
427
|
+
|
428
|
+
path = d.instance.write(c)
|
429
|
+
assert_equal "#{TMP_DIR}/2011010213/test.tag/dummy/dump.bin.0.gz", path
|
430
|
+
end
|
431
|
+
end
|
432
|
+
end
|