fluentd 1.6.3-x64-mingw32 → 1.7.0-x64-mingw32
Sign up to get free protection for your applications and to get access to all the features.
Potentially problematic release.
This version of fluentd might be problematic. Click here for more details.
- checksums.yaml +4 -4
- data/.drone.yml +35 -0
- data/.github/ISSUE_TEMPLATE/bug_report.md +2 -0
- data/CHANGELOG.md +58 -0
- data/README.md +5 -1
- data/fluentd.gemspec +1 -1
- data/lib/fluent/clock.rb +4 -0
- data/lib/fluent/compat/output.rb +3 -3
- data/lib/fluent/compat/socket_util.rb +1 -1
- data/lib/fluent/config/element.rb +3 -3
- data/lib/fluent/config/literal_parser.rb +1 -1
- data/lib/fluent/config/section.rb +4 -1
- data/lib/fluent/error.rb +4 -0
- data/lib/fluent/event.rb +28 -24
- data/lib/fluent/event_router.rb +2 -1
- data/lib/fluent/log.rb +1 -1
- data/lib/fluent/msgpack_factory.rb +8 -0
- data/lib/fluent/plugin/bare_output.rb +4 -4
- data/lib/fluent/plugin/buf_file_single.rb +211 -0
- data/lib/fluent/plugin/buffer.rb +62 -63
- data/lib/fluent/plugin/buffer/chunk.rb +21 -3
- data/lib/fluent/plugin/buffer/file_chunk.rb +37 -12
- data/lib/fluent/plugin/buffer/file_single_chunk.rb +314 -0
- data/lib/fluent/plugin/buffer/memory_chunk.rb +2 -1
- data/lib/fluent/plugin/compressable.rb +10 -6
- data/lib/fluent/plugin/filter_grep.rb +2 -2
- data/lib/fluent/plugin/formatter_csv.rb +10 -6
- data/lib/fluent/plugin/in_syslog.rb +10 -3
- data/lib/fluent/plugin/in_tail.rb +7 -2
- data/lib/fluent/plugin/in_tcp.rb +34 -7
- data/lib/fluent/plugin/multi_output.rb +4 -4
- data/lib/fluent/plugin/out_exec_filter.rb +1 -0
- data/lib/fluent/plugin/out_file.rb +13 -3
- data/lib/fluent/plugin/out_forward.rb +126 -588
- data/lib/fluent/plugin/out_forward/ack_handler.rb +161 -0
- data/lib/fluent/plugin/out_forward/connection_manager.rb +113 -0
- data/lib/fluent/plugin/out_forward/error.rb +28 -0
- data/lib/fluent/plugin/out_forward/failure_detector.rb +84 -0
- data/lib/fluent/plugin/out_forward/handshake_protocol.rb +121 -0
- data/lib/fluent/plugin/out_forward/load_balancer.rb +111 -0
- data/lib/fluent/plugin/out_forward/socket_cache.rb +138 -0
- data/lib/fluent/plugin/out_http.rb +231 -0
- data/lib/fluent/plugin/output.rb +29 -35
- data/lib/fluent/plugin/parser.rb +77 -0
- data/lib/fluent/plugin/parser_csv.rb +75 -0
- data/lib/fluent/plugin_helper/server.rb +1 -1
- data/lib/fluent/plugin_helper/thread.rb +1 -0
- data/lib/fluent/root_agent.rb +1 -1
- data/lib/fluent/time.rb +4 -2
- data/lib/fluent/timezone.rb +21 -7
- data/lib/fluent/version.rb +1 -1
- data/test/command/test_fluentd.rb +1 -1
- data/test/command/test_plugin_generator.rb +18 -2
- data/test/config/test_configurable.rb +78 -40
- data/test/counter/test_store.rb +1 -1
- data/test/helper.rb +1 -0
- data/test/helpers/process_extenstion.rb +33 -0
- data/test/plugin/out_forward/test_ack_handler.rb +101 -0
- data/test/plugin/out_forward/test_connection_manager.rb +145 -0
- data/test/plugin/out_forward/test_handshake_protocol.rb +103 -0
- data/test/plugin/out_forward/test_load_balancer.rb +60 -0
- data/test/plugin/out_forward/test_socket_cache.rb +139 -0
- data/test/plugin/test_buf_file.rb +118 -2
- data/test/plugin/test_buf_file_single.rb +734 -0
- data/test/plugin/test_buffer.rb +4 -48
- data/test/plugin/test_buffer_file_chunk.rb +19 -1
- data/test/plugin/test_buffer_file_single_chunk.rb +620 -0
- data/test/plugin/test_formatter_csv.rb +16 -0
- data/test/plugin/test_in_syslog.rb +56 -6
- data/test/plugin/test_in_tail.rb +1 -1
- data/test/plugin/test_in_tcp.rb +25 -0
- data/test/plugin/test_out_forward.rb +75 -201
- data/test/plugin/test_out_http.rb +352 -0
- data/test/plugin/test_output_as_buffered.rb +27 -24
- data/test/plugin/test_parser.rb +40 -0
- data/test/plugin/test_parser_csv.rb +83 -0
- data/test/plugin_helper/test_record_accessor.rb +1 -1
- data/test/test_time_formatter.rb +140 -121
- metadata +33 -4
@@ -27,7 +27,7 @@ class FileBufferTest < Test::Unit::TestCase
|
|
27
27
|
Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
|
28
28
|
end
|
29
29
|
|
30
|
-
def
|
30
|
+
def write_metadata_old(path, chunk_id, metadata, size, ctime, mtime)
|
31
31
|
metadata = {
|
32
32
|
timekey: metadata.timekey, tag: metadata.tag, variables: metadata.variables,
|
33
33
|
id: chunk_id,
|
@@ -40,6 +40,22 @@ class FileBufferTest < Test::Unit::TestCase
|
|
40
40
|
end
|
41
41
|
end
|
42
42
|
|
43
|
+
def write_metadata(path, chunk_id, metadata, size, ctime, mtime)
|
44
|
+
metadata = {
|
45
|
+
timekey: metadata.timekey, tag: metadata.tag, variables: metadata.variables,
|
46
|
+
id: chunk_id,
|
47
|
+
s: size,
|
48
|
+
c: ctime,
|
49
|
+
m: mtime,
|
50
|
+
}
|
51
|
+
|
52
|
+
data = metadata.to_msgpack
|
53
|
+
size = [data.size].pack('N')
|
54
|
+
File.open(path, 'wb') do |f|
|
55
|
+
f.write(Fluent::Plugin::Buffer::FileChunk::BUFFER_HEADER + size + data)
|
56
|
+
end
|
57
|
+
end
|
58
|
+
|
43
59
|
sub_test_case 'non configured buffer plugin instance' do
|
44
60
|
setup do
|
45
61
|
Fluent::Test.setup
|
@@ -683,7 +699,107 @@ class FileBufferTest < Test::Unit::TestCase
|
|
683
699
|
end
|
684
700
|
end
|
685
701
|
|
686
|
-
sub_test_case 'there are some existing file chunks
|
702
|
+
sub_test_case 'there are some existing file chunks with old format metadta' do
|
703
|
+
setup do
|
704
|
+
@bufdir = File.expand_path('../../tmp/buffer_file', __FILE__)
|
705
|
+
FileUtils.mkdir_p @bufdir unless File.exist?(@bufdir)
|
706
|
+
|
707
|
+
@c1id = Fluent::UniqueId.generate
|
708
|
+
p1 = File.join(@bufdir, "etest.q#{Fluent::UniqueId.hex(@c1id)}.log")
|
709
|
+
File.open(p1, 'wb') do |f|
|
710
|
+
f.write ["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
711
|
+
f.write ["t2.test", event_time('2016-04-17 13:58:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
712
|
+
f.write ["t3.test", event_time('2016-04-17 13:58:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
713
|
+
f.write ["t4.test", event_time('2016-04-17 13:58:22 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
714
|
+
end
|
715
|
+
write_metadata_old(
|
716
|
+
p1 + '.meta', @c1id, metadata(timekey: event_time('2016-04-17 13:58:00 -0700').to_i),
|
717
|
+
4, event_time('2016-04-17 13:58:00 -0700').to_i, event_time('2016-04-17 13:58:22 -0700').to_i
|
718
|
+
)
|
719
|
+
|
720
|
+
@c2id = Fluent::UniqueId.generate
|
721
|
+
p2 = File.join(@bufdir, "etest.q#{Fluent::UniqueId.hex(@c2id)}.log")
|
722
|
+
File.open(p2, 'wb') do |f|
|
723
|
+
f.write ["t1.test", event_time('2016-04-17 13:59:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
724
|
+
f.write ["t2.test", event_time('2016-04-17 13:59:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
725
|
+
f.write ["t3.test", event_time('2016-04-17 13:59:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
726
|
+
end
|
727
|
+
write_metadata_old(
|
728
|
+
p2 + '.meta', @c2id, metadata(timekey: event_time('2016-04-17 13:59:00 -0700').to_i),
|
729
|
+
3, event_time('2016-04-17 13:59:00 -0700').to_i, event_time('2016-04-17 13:59:23 -0700').to_i
|
730
|
+
)
|
731
|
+
|
732
|
+
@c3id = Fluent::UniqueId.generate
|
733
|
+
p3 = File.join(@bufdir, "etest.b#{Fluent::UniqueId.hex(@c3id)}.log")
|
734
|
+
File.open(p3, 'wb') do |f|
|
735
|
+
f.write ["t1.test", event_time('2016-04-17 14:00:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
736
|
+
f.write ["t2.test", event_time('2016-04-17 14:00:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
737
|
+
f.write ["t3.test", event_time('2016-04-17 14:00:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
738
|
+
f.write ["t4.test", event_time('2016-04-17 14:00:28 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
739
|
+
end
|
740
|
+
write_metadata_old(
|
741
|
+
p3 + '.meta', @c3id, metadata(timekey: event_time('2016-04-17 14:00:00 -0700').to_i),
|
742
|
+
4, event_time('2016-04-17 14:00:00 -0700').to_i, event_time('2016-04-17 14:00:28 -0700').to_i
|
743
|
+
)
|
744
|
+
|
745
|
+
@c4id = Fluent::UniqueId.generate
|
746
|
+
p4 = File.join(@bufdir, "etest.b#{Fluent::UniqueId.hex(@c4id)}.log")
|
747
|
+
File.open(p4, 'wb') do |f|
|
748
|
+
f.write ["t1.test", event_time('2016-04-17 14:01:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
749
|
+
f.write ["t2.test", event_time('2016-04-17 14:01:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
750
|
+
f.write ["t3.test", event_time('2016-04-17 14:01:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
751
|
+
end
|
752
|
+
write_metadata_old(
|
753
|
+
p4 + '.meta', @c4id, metadata(timekey: event_time('2016-04-17 14:01:00 -0700').to_i),
|
754
|
+
3, event_time('2016-04-17 14:01:00 -0700').to_i, event_time('2016-04-17 14:01:25 -0700').to_i
|
755
|
+
)
|
756
|
+
|
757
|
+
@bufpath = File.join(@bufdir, 'etest.*.log')
|
758
|
+
|
759
|
+
Fluent::Test.setup
|
760
|
+
@d = FluentPluginFileBufferTest::DummyOutputPlugin.new
|
761
|
+
@p = Fluent::Plugin::FileBuffer.new
|
762
|
+
@p.owner = @d
|
763
|
+
@p.configure(config_element('buffer', '', {'path' => @bufpath}))
|
764
|
+
@p.start
|
765
|
+
end
|
766
|
+
|
767
|
+
teardown do
|
768
|
+
if @p
|
769
|
+
@p.stop unless @p.stopped?
|
770
|
+
@p.before_shutdown unless @p.before_shutdown?
|
771
|
+
@p.shutdown unless @p.shutdown?
|
772
|
+
@p.after_shutdown unless @p.after_shutdown?
|
773
|
+
@p.close unless @p.closed?
|
774
|
+
@p.terminate unless @p.terminated?
|
775
|
+
end
|
776
|
+
if @bufdir
|
777
|
+
Dir.glob(File.join(@bufdir, '*')).each do |path|
|
778
|
+
next if ['.', '..'].include?(File.basename(path))
|
779
|
+
File.delete(path)
|
780
|
+
end
|
781
|
+
end
|
782
|
+
end
|
783
|
+
|
784
|
+
test '#resume returns staged/queued chunks with metadata' do
|
785
|
+
assert_equal 2, @p.stage.size
|
786
|
+
assert_equal 2, @p.queue.size
|
787
|
+
|
788
|
+
stage = @p.stage
|
789
|
+
|
790
|
+
m3 = metadata(timekey: event_time('2016-04-17 14:00:00 -0700').to_i)
|
791
|
+
assert_equal @c3id, stage[m3].unique_id
|
792
|
+
assert_equal 4, stage[m3].size
|
793
|
+
assert_equal :staged, stage[m3].state
|
794
|
+
|
795
|
+
m4 = metadata(timekey: event_time('2016-04-17 14:01:00 -0700').to_i)
|
796
|
+
assert_equal @c4id, stage[m4].unique_id
|
797
|
+
assert_equal 3, stage[m4].size
|
798
|
+
assert_equal :staged, stage[m4].state
|
799
|
+
end
|
800
|
+
end
|
801
|
+
|
802
|
+
sub_test_case 'there are some existing file chunks with old format metadata file' do
|
687
803
|
setup do
|
688
804
|
@bufdir = File.expand_path('../../tmp/buffer_file', __FILE__)
|
689
805
|
|
@@ -0,0 +1,734 @@
|
|
1
|
+
require_relative '../helper'
|
2
|
+
require 'fluent/plugin/buf_file_single'
|
3
|
+
require 'fluent/plugin/output'
|
4
|
+
require 'fluent/unique_id'
|
5
|
+
require 'fluent/system_config'
|
6
|
+
require 'fluent/env'
|
7
|
+
require 'fluent/test/driver/output'
|
8
|
+
|
9
|
+
require 'msgpack'
|
10
|
+
|
11
|
+
module FluentPluginFileSingleBufferTest
|
12
|
+
class DummyOutputPlugin < Fluent::Plugin::Output
|
13
|
+
Fluent::Plugin.register_output('buf_file_single_test', self)
|
14
|
+
config_section :buffer do
|
15
|
+
config_set_default :@type, 'file_single'
|
16
|
+
end
|
17
|
+
def multi_workers_ready?
|
18
|
+
true
|
19
|
+
end
|
20
|
+
def write(chunk)
|
21
|
+
# drop
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
class DummyOutputMPPlugin < Fluent::Plugin::Output
|
26
|
+
Fluent::Plugin.register_output('buf_file_single_mp_test', self)
|
27
|
+
config_section :buffer do
|
28
|
+
config_set_default :@type, 'file_single'
|
29
|
+
end
|
30
|
+
def formatted_to_msgpack_binary?
|
31
|
+
true
|
32
|
+
end
|
33
|
+
def multi_workers_ready?
|
34
|
+
true
|
35
|
+
end
|
36
|
+
def write(chunk)
|
37
|
+
# drop
|
38
|
+
end
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
42
|
+
class FileSingleBufferTest < Test::Unit::TestCase
|
43
|
+
def metadata(timekey: nil, tag: 'testing', variables: nil)
|
44
|
+
Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
|
45
|
+
end
|
46
|
+
|
47
|
+
PATH = File.expand_path('../../tmp/buffer_file_single_dir', __FILE__)
|
48
|
+
TAG_CONF = %[
|
49
|
+
<buffer tag>
|
50
|
+
@type file_single
|
51
|
+
path #{PATH}
|
52
|
+
</buffer>
|
53
|
+
]
|
54
|
+
FIELD_CONF = %[
|
55
|
+
<buffer k>
|
56
|
+
@type file_single
|
57
|
+
path #{PATH}
|
58
|
+
</buffer>
|
59
|
+
]
|
60
|
+
|
61
|
+
setup do
|
62
|
+
Fluent::Test.setup
|
63
|
+
|
64
|
+
@d = nil
|
65
|
+
@bufdir = PATH
|
66
|
+
FileUtils.rm_rf(@bufdir) rescue nil
|
67
|
+
FileUtils.mkdir_p(@bufdir)
|
68
|
+
end
|
69
|
+
|
70
|
+
teardown do
|
71
|
+
FileUtils.rm_rf(@bufdir) rescue nil
|
72
|
+
end
|
73
|
+
|
74
|
+
def create_driver(conf = TAG_CONF, klass = FluentPluginFileSingleBufferTest::DummyOutputPlugin)
|
75
|
+
Fluent::Test::Driver::Output.new(klass).configure(conf)
|
76
|
+
end
|
77
|
+
|
78
|
+
sub_test_case 'configuration' do
|
79
|
+
test 'path has "fsb" prefix and "buf" suffix by default' do
|
80
|
+
@d = create_driver
|
81
|
+
p = @d.instance.buffer
|
82
|
+
assert_equal File.join(@bufdir, 'fsb.*.buf'), p.path
|
83
|
+
end
|
84
|
+
|
85
|
+
data('text based chunk' => [FluentPluginFileSingleBufferTest::DummyOutputPlugin, :text],
|
86
|
+
'msgpack based chunk' => [FluentPluginFileSingleBufferTest::DummyOutputMPPlugin, :msgpack])
|
87
|
+
test 'detect chunk_format' do |param|
|
88
|
+
klass, expected = param
|
89
|
+
@d = create_driver(TAG_CONF, klass)
|
90
|
+
p = @d.instance.buffer
|
91
|
+
assert_equal expected, p.chunk_format
|
92
|
+
end
|
93
|
+
|
94
|
+
test '"prefix.*.suffix" path will be replaced with default' do
|
95
|
+
@d = create_driver(%[
|
96
|
+
<buffer tag>
|
97
|
+
@type file_single
|
98
|
+
path #{@bufdir}/foo.*.bar
|
99
|
+
</buffer>
|
100
|
+
])
|
101
|
+
p = @d.instance.buffer
|
102
|
+
assert_equal File.join(@bufdir, 'fsb.*.buf'), p.path
|
103
|
+
end
|
104
|
+
end
|
105
|
+
|
106
|
+
sub_test_case 'buffer configurations and workers' do
|
107
|
+
setup do
|
108
|
+
@d = FluentPluginFileSingleBufferTest::DummyOutputPlugin.new
|
109
|
+
end
|
110
|
+
|
111
|
+
test 'enables multi worker configuration with unexisting directory path' do
|
112
|
+
FileUtils.rm_rf(@bufdir)
|
113
|
+
buf_conf = config_element('buffer', '', {'path' => @bufdir})
|
114
|
+
assert_nothing_raised do
|
115
|
+
Fluent::SystemConfig.overwrite_system_config('root_dir' => @bufdir, 'workers' => 4) do
|
116
|
+
@d.configure(config_element('ROOT', '', {}, [buf_conf]))
|
117
|
+
end
|
118
|
+
end
|
119
|
+
end
|
120
|
+
|
121
|
+
test 'enables multi worker configuration with existing directory path' do
|
122
|
+
FileUtils.mkdir_p @bufdir
|
123
|
+
buf_conf = config_element('buffer', '', {'path' => @bufdir})
|
124
|
+
assert_nothing_raised do
|
125
|
+
Fluent::SystemConfig.overwrite_system_config('root_dir' => @bufdir, 'workers' => 4) do
|
126
|
+
@d.configure(config_element('ROOT', '', {}, [buf_conf]))
|
127
|
+
end
|
128
|
+
end
|
129
|
+
end
|
130
|
+
|
131
|
+
test 'enables multi worker configuration with root dir' do
|
132
|
+
buf_conf = config_element('buffer', '')
|
133
|
+
assert_nothing_raised do
|
134
|
+
Fluent::SystemConfig.overwrite_system_config('root_dir' => @bufdir, 'workers' => 4) do
|
135
|
+
@d.configure(config_element('ROOT', '', {'@id' => 'dummy_output_with_buf'}, [buf_conf]))
|
136
|
+
end
|
137
|
+
end
|
138
|
+
end
|
139
|
+
end
|
140
|
+
|
141
|
+
sub_test_case 'buffer plugin configured only with path' do
|
142
|
+
setup do
|
143
|
+
@bufpath = File.join(@bufdir, 'testbuf.*.buf')
|
144
|
+
FileUtils.rm_rf(@bufdir) if File.exist?(@bufdir)
|
145
|
+
|
146
|
+
@d = create_driver
|
147
|
+
@p = @d.instance.buffer
|
148
|
+
end
|
149
|
+
|
150
|
+
teardown do
|
151
|
+
if @p
|
152
|
+
@p.stop unless @p.stopped?
|
153
|
+
@p.before_shutdown unless @p.before_shutdown?
|
154
|
+
@p.shutdown unless @p.shutdown?
|
155
|
+
@p.after_shutdown unless @p.after_shutdown?
|
156
|
+
@p.close unless @p.closed?
|
157
|
+
@p.terminate unless @p.terminated?
|
158
|
+
end
|
159
|
+
end
|
160
|
+
|
161
|
+
test 'this is persistent plugin' do
|
162
|
+
assert @p.persistent?
|
163
|
+
end
|
164
|
+
|
165
|
+
test '#start creates directory for buffer chunks' do
|
166
|
+
@d = create_driver
|
167
|
+
@p = @d.instance.buffer
|
168
|
+
|
169
|
+
FileUtils.rm_rf(@bufdir) if File.exist?(@bufdir)
|
170
|
+
assert !File.exist?(@bufdir)
|
171
|
+
|
172
|
+
@p.start
|
173
|
+
assert File.exist?(@bufdir)
|
174
|
+
assert { File.stat(@bufdir).mode.to_s(8).end_with?('755') }
|
175
|
+
end
|
176
|
+
|
177
|
+
test '#start creates directory for buffer chunks with specified permission' do
|
178
|
+
omit "NTFS doesn't support UNIX like permissions" if Fluent.windows?
|
179
|
+
|
180
|
+
@d = create_driver(%[
|
181
|
+
<buffer tag>
|
182
|
+
@type file_single
|
183
|
+
path #{PATH}
|
184
|
+
dir_permission 700
|
185
|
+
</buffer>
|
186
|
+
])
|
187
|
+
@p = @d.instance.buffer
|
188
|
+
|
189
|
+
FileUtils.rm_rf(@bufdir) if File.exist?(@bufdir)
|
190
|
+
assert !File.exist?(@bufdir)
|
191
|
+
|
192
|
+
@p.start
|
193
|
+
assert File.exist?(@bufdir)
|
194
|
+
assert { File.stat(@bufdir).mode.to_s(8).end_with?('700') }
|
195
|
+
end
|
196
|
+
|
197
|
+
test '#start creates directory for buffer chunks with specified permission via system config' do
|
198
|
+
omit "NTFS doesn't support UNIX like permissions" if Fluent.windows?
|
199
|
+
|
200
|
+
sysconf = {'dir_permission' => '700'}
|
201
|
+
Fluent::SystemConfig.overwrite_system_config(sysconf) do
|
202
|
+
@d = create_driver
|
203
|
+
@p = @d.instance.buffer
|
204
|
+
|
205
|
+
FileUtils.rm_r @bufdir if File.exist?(@bufdir)
|
206
|
+
assert !File.exist?(@bufdir)
|
207
|
+
|
208
|
+
@p.start
|
209
|
+
assert File.exist?(@bufdir)
|
210
|
+
assert { File.stat(@bufdir).mode.to_s(8).end_with?('700') }
|
211
|
+
end
|
212
|
+
end
|
213
|
+
|
214
|
+
test '#generate_chunk generates blank file chunk on path with unique_id and tag' do
|
215
|
+
FileUtils.mkdir_p(@bufdir) unless File.exist?(@bufdir)
|
216
|
+
|
217
|
+
m1 = metadata()
|
218
|
+
c1 = @p.generate_chunk(m1)
|
219
|
+
assert c1.is_a? Fluent::Plugin::Buffer::FileSingleChunk
|
220
|
+
assert_equal m1, c1.metadata
|
221
|
+
assert c1.empty?
|
222
|
+
assert_equal :unstaged, c1.state
|
223
|
+
assert_equal Fluent::Plugin::Buffer::FileSingleChunk::FILE_PERMISSION, c1.permission
|
224
|
+
assert_equal File.join(@bufdir, "fsb.testing.b#{Fluent::UniqueId.hex(c1.unique_id)}.buf"), c1.path
|
225
|
+
assert{ File.stat(c1.path).mode.to_s(8).end_with?('644') }
|
226
|
+
|
227
|
+
c1.purge
|
228
|
+
end
|
229
|
+
|
230
|
+
test '#generate_chunk generates blank file chunk on path with unique_id and field key' do
|
231
|
+
FileUtils.mkdir_p(@bufdir) unless File.exist?(@bufdir)
|
232
|
+
|
233
|
+
@d = create_driver(FIELD_CONF)
|
234
|
+
@p = @d.instance.buffer
|
235
|
+
|
236
|
+
m1 = metadata(tag: nil, variables: {:k => 'foo_bar'})
|
237
|
+
c1 = @p.generate_chunk(m1)
|
238
|
+
assert c1.is_a? Fluent::Plugin::Buffer::FileSingleChunk
|
239
|
+
assert_equal m1, c1.metadata
|
240
|
+
assert c1.empty?
|
241
|
+
assert_equal :unstaged, c1.state
|
242
|
+
assert_equal Fluent::Plugin::Buffer::FileSingleChunk::FILE_PERMISSION, c1.permission
|
243
|
+
assert_equal File.join(@bufdir, "fsb.foo_bar.b#{Fluent::UniqueId.hex(c1.unique_id)}.buf"), c1.path
|
244
|
+
|
245
|
+
c1.purge
|
246
|
+
end
|
247
|
+
|
248
|
+
test '#generate_chunk generates blank file chunk with specified permission' do
|
249
|
+
omit "NTFS doesn't support UNIX like permissions" if Fluent.windows?
|
250
|
+
|
251
|
+
@d = create_driver(%[
|
252
|
+
<buffer tag>
|
253
|
+
@type file_single
|
254
|
+
path #{PATH}
|
255
|
+
file_permission 600
|
256
|
+
</buffer>
|
257
|
+
])
|
258
|
+
@p = @d.instance.buffer
|
259
|
+
|
260
|
+
FileUtils.rm_r @bufdir if File.exist?(@bufdir)
|
261
|
+
assert !File.exist?(@bufdir)
|
262
|
+
|
263
|
+
@p.start
|
264
|
+
|
265
|
+
m = metadata()
|
266
|
+
c = @p.generate_chunk(m)
|
267
|
+
assert c.is_a? Fluent::Plugin::Buffer::FileSingleChunk
|
268
|
+
assert_equal m, c.metadata
|
269
|
+
assert c.empty?
|
270
|
+
assert_equal :unstaged, c.state
|
271
|
+
assert_equal 0600, c.permission
|
272
|
+
assert_equal File.join(@bufdir, "fsb.testing.b#{Fluent::UniqueId.hex(c.unique_id)}.buf"), c.path
|
273
|
+
assert{ File.stat(c.path).mode.to_s(8).end_with?('600') }
|
274
|
+
|
275
|
+
c.purge
|
276
|
+
end
|
277
|
+
end
|
278
|
+
|
279
|
+
sub_test_case 'configured with system root directory and plugin @id' do
|
280
|
+
setup do
|
281
|
+
@root_dir = File.expand_path('../../tmp/buffer_file_single_root', __FILE__)
|
282
|
+
FileUtils.rm_rf(@root_dir)
|
283
|
+
|
284
|
+
@d = FluentPluginFileSingleBufferTest::DummyOutputPlugin.new
|
285
|
+
@p = nil
|
286
|
+
end
|
287
|
+
|
288
|
+
teardown do
|
289
|
+
if @p
|
290
|
+
@p.stop unless @p.stopped?
|
291
|
+
@p.before_shutdown unless @p.before_shutdown?
|
292
|
+
@p.shutdown unless @p.shutdown?
|
293
|
+
@p.after_shutdown unless @p.after_shutdown?
|
294
|
+
@p.close unless @p.closed?
|
295
|
+
@p.terminate unless @p.terminated?
|
296
|
+
end
|
297
|
+
end
|
298
|
+
|
299
|
+
test '#start creates directory for buffer chunks' do
|
300
|
+
Fluent::SystemConfig.overwrite_system_config('root_dir' => @root_dir) do
|
301
|
+
@d.configure(config_element('ROOT', '', {'@id' => 'dummy_output_with_buf'}, [config_element('buffer', '', {})]))
|
302
|
+
@p = @d.buffer
|
303
|
+
end
|
304
|
+
|
305
|
+
expected_buffer_path = File.join(@root_dir, 'worker0', 'dummy_output_with_buf', 'buffer', "fsb.*.buf")
|
306
|
+
expected_buffer_dir = File.dirname(expected_buffer_path)
|
307
|
+
assert_equal expected_buffer_path, @d.buffer.path
|
308
|
+
assert_false Dir.exist?(expected_buffer_dir)
|
309
|
+
|
310
|
+
@p.start
|
311
|
+
assert Dir.exist?(expected_buffer_dir)
|
312
|
+
end
|
313
|
+
end
|
314
|
+
|
315
|
+
sub_test_case 'buffer plugin configuration errors' do
|
316
|
+
data('tag and key' => 'tag,key',
|
317
|
+
'multiple keys' => 'key1,key2')
|
318
|
+
test 'invalid chunk keys' do |param|
|
319
|
+
assert_raise Fluent::ConfigError do
|
320
|
+
@d = create_driver(%[
|
321
|
+
<buffer #{param}>
|
322
|
+
@type file_single
|
323
|
+
path #{PATH}
|
324
|
+
calc_num_records false
|
325
|
+
</buffer>
|
326
|
+
])
|
327
|
+
end
|
328
|
+
end
|
329
|
+
|
330
|
+
test 'path is not specified' do
|
331
|
+
assert_raise Fluent::ConfigError do
|
332
|
+
@d = create_driver(%[
|
333
|
+
<buffer tag>
|
334
|
+
@type file_single
|
335
|
+
</buffer>
|
336
|
+
])
|
337
|
+
end
|
338
|
+
end
|
339
|
+
end
|
340
|
+
|
341
|
+
sub_test_case 'there are no existing file chunks' do
|
342
|
+
setup do
|
343
|
+
FileUtils.rm_rf(@bufdir) if File.exist?(@bufdir)
|
344
|
+
|
345
|
+
@d = create_driver
|
346
|
+
@p = @d.instance.buffer
|
347
|
+
@p.start
|
348
|
+
end
|
349
|
+
teardown do
|
350
|
+
if @p
|
351
|
+
@p.stop unless @p.stopped?
|
352
|
+
@p.before_shutdown unless @p.before_shutdown?
|
353
|
+
@p.shutdown unless @p.shutdown?
|
354
|
+
@p.after_shutdown unless @p.after_shutdown?
|
355
|
+
@p.close unless @p.closed?
|
356
|
+
@p.terminate unless @p.terminated?
|
357
|
+
end
|
358
|
+
if @bufdir
|
359
|
+
Dir.glob(File.join(@bufdir, '*')).each do |path|
|
360
|
+
next if ['.', '..'].include?(File.basename(path))
|
361
|
+
File.delete(path)
|
362
|
+
end
|
363
|
+
end
|
364
|
+
end
|
365
|
+
|
366
|
+
test '#resume returns empty buffer state' do
|
367
|
+
ary = @p.resume
|
368
|
+
assert_equal({}, ary[0])
|
369
|
+
assert_equal([], ary[1])
|
370
|
+
end
|
371
|
+
end
|
372
|
+
|
373
|
+
sub_test_case 'there are some existing file chunks' do
|
374
|
+
setup do
|
375
|
+
@c1id = Fluent::UniqueId.generate
|
376
|
+
p1 = File.join(@bufdir, "fsb.testing.q#{Fluent::UniqueId.hex(@c1id)}.buf")
|
377
|
+
File.open(p1, 'wb') do |f|
|
378
|
+
f.write ["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
379
|
+
f.write ["t2.test", event_time('2016-04-17 13:58:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
380
|
+
f.write ["t3.test", event_time('2016-04-17 13:58:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
381
|
+
f.write ["t4.test", event_time('2016-04-17 13:58:22 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
382
|
+
end
|
383
|
+
t = Time.now - 50000
|
384
|
+
File.utime(t, t, p1)
|
385
|
+
|
386
|
+
@c2id = Fluent::UniqueId.generate
|
387
|
+
p2 = File.join(@bufdir, "fsb.testing.q#{Fluent::UniqueId.hex(@c2id)}.buf")
|
388
|
+
File.open(p2, 'wb') do |f|
|
389
|
+
f.write ["t1.test", event_time('2016-04-17 13:59:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
390
|
+
f.write ["t2.test", event_time('2016-04-17 13:59:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
391
|
+
f.write ["t3.test", event_time('2016-04-17 13:59:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
392
|
+
end
|
393
|
+
t = Time.now - 40000
|
394
|
+
File.utime(t, t, p2)
|
395
|
+
|
396
|
+
@c3id = Fluent::UniqueId.generate
|
397
|
+
p3 = File.join(@bufdir, "fsb.testing.b#{Fluent::UniqueId.hex(@c3id)}.buf")
|
398
|
+
File.open(p3, 'wb') do |f|
|
399
|
+
f.write ["t1.test", event_time('2016-04-17 14:00:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
400
|
+
f.write ["t2.test", event_time('2016-04-17 14:00:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
401
|
+
f.write ["t3.test", event_time('2016-04-17 14:00:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
402
|
+
f.write ["t4.test", event_time('2016-04-17 14:00:28 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
403
|
+
end
|
404
|
+
|
405
|
+
@c4id = Fluent::UniqueId.generate
|
406
|
+
p4 = File.join(@bufdir, "fsb.foo.b#{Fluent::UniqueId.hex(@c4id)}.buf")
|
407
|
+
File.open(p4, 'wb') do |f|
|
408
|
+
f.write ["t1.test", event_time('2016-04-17 14:01:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
409
|
+
f.write ["t2.test", event_time('2016-04-17 14:01:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
410
|
+
f.write ["t3.test", event_time('2016-04-17 14:01:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
411
|
+
end
|
412
|
+
end
|
413
|
+
|
414
|
+
teardown do
|
415
|
+
if @p
|
416
|
+
@p.stop unless @p.stopped?
|
417
|
+
@p.before_shutdown unless @p.before_shutdown?
|
418
|
+
@p.shutdown unless @p.shutdown?
|
419
|
+
@p.after_shutdown unless @p.after_shutdown?
|
420
|
+
@p.close unless @p.closed?
|
421
|
+
@p.terminate unless @p.terminated?
|
422
|
+
end
|
423
|
+
if @bufdir
|
424
|
+
Dir.glob(File.join(@bufdir, '*')).each do |path|
|
425
|
+
next if ['.', '..'].include?(File.basename(path))
|
426
|
+
File.delete(path)
|
427
|
+
end
|
428
|
+
end
|
429
|
+
end
|
430
|
+
|
431
|
+
test '#resume returns staged/queued chunks with metadata' do
|
432
|
+
@d = create_driver
|
433
|
+
@p = @d.instance.buffer
|
434
|
+
@p.start
|
435
|
+
|
436
|
+
assert_equal 2, @p.stage.size
|
437
|
+
assert_equal 2, @p.queue.size
|
438
|
+
|
439
|
+
stage = @p.stage
|
440
|
+
|
441
|
+
m3 = metadata()
|
442
|
+
assert_equal @c3id, stage[m3].unique_id
|
443
|
+
assert_equal 4, stage[m3].size
|
444
|
+
assert_equal :staged, stage[m3].state
|
445
|
+
|
446
|
+
m4 = metadata(tag: 'foo')
|
447
|
+
assert_equal @c4id, stage[m4].unique_id
|
448
|
+
assert_equal 3, stage[m4].size
|
449
|
+
assert_equal :staged, stage[m4].state
|
450
|
+
end
|
451
|
+
|
452
|
+
test '#resume returns queued chunks ordered by last modified time (FIFO)' do
|
453
|
+
@d = create_driver
|
454
|
+
@p = @d.instance.buffer
|
455
|
+
@p.start
|
456
|
+
|
457
|
+
assert_equal 2, @p.stage.size
|
458
|
+
assert_equal 2, @p.queue.size
|
459
|
+
|
460
|
+
queue = @p.queue
|
461
|
+
|
462
|
+
assert{ queue[0].modified_at <= queue[1].modified_at }
|
463
|
+
|
464
|
+
assert_equal @c1id, queue[0].unique_id
|
465
|
+
assert_equal :queued, queue[0].state
|
466
|
+
assert_equal 'testing', queue[0].metadata.tag
|
467
|
+
assert_nil queue[0].metadata.variables
|
468
|
+
assert_equal 4, queue[0].size
|
469
|
+
|
470
|
+
assert_equal @c2id, queue[1].unique_id
|
471
|
+
assert_equal :queued, queue[1].state
|
472
|
+
assert_equal 'testing', queue[1].metadata.tag
|
473
|
+
assert_nil queue[1].metadata.variables
|
474
|
+
assert_equal 3, queue[1].size
|
475
|
+
end
|
476
|
+
|
477
|
+
test '#resume returns staged/queued chunks but skips size calculation by calc_num_records' do
|
478
|
+
@d = create_driver(%[
|
479
|
+
<buffer tag>
|
480
|
+
@type file_single
|
481
|
+
path #{PATH}
|
482
|
+
calc_num_records false
|
483
|
+
</buffer>
|
484
|
+
])
|
485
|
+
@p = @d.instance.buffer
|
486
|
+
@p.start
|
487
|
+
|
488
|
+
assert_equal 2, @p.stage.size
|
489
|
+
assert_equal 2, @p.queue.size
|
490
|
+
|
491
|
+
stage = @p.stage
|
492
|
+
|
493
|
+
m3 = metadata()
|
494
|
+
assert_equal @c3id, stage[m3].unique_id
|
495
|
+
assert_equal 0, stage[m3].size
|
496
|
+
assert_equal :staged, stage[m3].state
|
497
|
+
|
498
|
+
m4 = metadata(tag: 'foo')
|
499
|
+
assert_equal @c4id, stage[m4].unique_id
|
500
|
+
assert_equal 0, stage[m4].size
|
501
|
+
assert_equal :staged, stage[m4].state
|
502
|
+
end
|
503
|
+
end
|
504
|
+
|
505
|
+
sub_test_case 'there are some existing msgpack file chunks' do
|
506
|
+
setup do
|
507
|
+
packer = Fluent::MessagePackFactory.packer
|
508
|
+
@c1id = Fluent::UniqueId.generate
|
509
|
+
p1 = File.join(@bufdir, "fsb.testing.q#{Fluent::UniqueId.hex(@c1id)}.buf")
|
510
|
+
File.open(p1, 'wb') do |f|
|
511
|
+
packer.write(["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}])
|
512
|
+
packer.write(["t2.test", event_time('2016-04-17 13:58:17 -0700').to_i, {"message" => "yay"}])
|
513
|
+
packer.write(["t3.test", event_time('2016-04-17 13:58:21 -0700').to_i, {"message" => "yay"}])
|
514
|
+
packer.write(["t4.test", event_time('2016-04-17 13:58:22 -0700').to_i, {"message" => "yay"}])
|
515
|
+
f.write packer.full_pack
|
516
|
+
end
|
517
|
+
t = Time.now - 50000
|
518
|
+
File.utime(t, t, p1)
|
519
|
+
|
520
|
+
@c2id = Fluent::UniqueId.generate
|
521
|
+
p2 = File.join(@bufdir, "fsb.testing.q#{Fluent::UniqueId.hex(@c2id)}.buf")
|
522
|
+
File.open(p2, 'wb') do |f|
|
523
|
+
packer.write(["t1.test", event_time('2016-04-17 13:59:15 -0700').to_i, {"message" => "yay"}])
|
524
|
+
packer.write(["t2.test", event_time('2016-04-17 13:59:17 -0700').to_i, {"message" => "yay"}])
|
525
|
+
packer.write(["t3.test", event_time('2016-04-17 13:59:21 -0700').to_i, {"message" => "yay"}])
|
526
|
+
f.write packer.full_pack
|
527
|
+
end
|
528
|
+
t = Time.now - 40000
|
529
|
+
File.utime(t, t, p2)
|
530
|
+
|
531
|
+
@c3id = Fluent::UniqueId.generate
|
532
|
+
p3 = File.join(@bufdir, "fsb.testing.b#{Fluent::UniqueId.hex(@c3id)}.buf")
|
533
|
+
File.open(p3, 'wb') do |f|
|
534
|
+
packer.write(["t1.test", event_time('2016-04-17 14:00:15 -0700').to_i, {"message" => "yay"}])
|
535
|
+
packer.write(["t2.test", event_time('2016-04-17 14:00:17 -0700').to_i, {"message" => "yay"}])
|
536
|
+
packer.write(["t3.test", event_time('2016-04-17 14:00:21 -0700').to_i, {"message" => "yay"}])
|
537
|
+
packer.write(["t4.test", event_time('2016-04-17 14:00:28 -0700').to_i, {"message" => "yay"}])
|
538
|
+
f.write packer.full_pack
|
539
|
+
end
|
540
|
+
|
541
|
+
@c4id = Fluent::UniqueId.generate
|
542
|
+
p4 = File.join(@bufdir, "fsb.foo.b#{Fluent::UniqueId.hex(@c4id)}.buf")
|
543
|
+
File.open(p4, 'wb') do |f|
|
544
|
+
packer.write(["t1.test", event_time('2016-04-17 14:01:15 -0700').to_i, {"message" => "yay"}])
|
545
|
+
packer.write(["t2.test", event_time('2016-04-17 14:01:17 -0700').to_i, {"message" => "yay"}])
|
546
|
+
packer.write(["t3.test", event_time('2016-04-17 14:01:21 -0700').to_i, {"message" => "yay"}])
|
547
|
+
f.write packer.full_pack
|
548
|
+
end
|
549
|
+
end
|
550
|
+
|
551
|
+
teardown do
|
552
|
+
if @p
|
553
|
+
@p.stop unless @p.stopped?
|
554
|
+
@p.before_shutdown unless @p.before_shutdown?
|
555
|
+
@p.shutdown unless @p.shutdown?
|
556
|
+
@p.after_shutdown unless @p.after_shutdown?
|
557
|
+
@p.close unless @p.closed?
|
558
|
+
@p.terminate unless @p.terminated?
|
559
|
+
end
|
560
|
+
if @bufdir
|
561
|
+
Dir.glob(File.join(@bufdir, '*')).each do |path|
|
562
|
+
next if ['.', '..'].include?(File.basename(path))
|
563
|
+
File.delete(path)
|
564
|
+
end
|
565
|
+
end
|
566
|
+
end
|
567
|
+
|
568
|
+
test '#resume returns staged/queued chunks with msgpack format' do
|
569
|
+
@d = create_driver(%[
|
570
|
+
<buffer tag>
|
571
|
+
@type file_single
|
572
|
+
path #{PATH}
|
573
|
+
chunk_format msgpack
|
574
|
+
</buffer>
|
575
|
+
])
|
576
|
+
@p = @d.instance.buffer
|
577
|
+
@p.start
|
578
|
+
|
579
|
+
assert_equal 2, @p.stage.size
|
580
|
+
assert_equal 2, @p.queue.size
|
581
|
+
|
582
|
+
stage = @p.stage
|
583
|
+
|
584
|
+
m3 = metadata()
|
585
|
+
assert_equal @c3id, stage[m3].unique_id
|
586
|
+
assert_equal 4, stage[m3].size
|
587
|
+
assert_equal :staged, stage[m3].state
|
588
|
+
|
589
|
+
m4 = metadata(tag: 'foo')
|
590
|
+
assert_equal @c4id, stage[m4].unique_id
|
591
|
+
assert_equal 3, stage[m4].size
|
592
|
+
assert_equal :staged, stage[m4].state
|
593
|
+
end
|
594
|
+
end
|
595
|
+
|
596
|
+
sub_test_case 'there are some existing file chunks, both in specified path and per-worker directory under specified path, configured as multi workers' do
|
597
|
+
setup do
|
598
|
+
@worker0_dir = File.join(@bufdir, "worker0")
|
599
|
+
@worker1_dir = File.join(@bufdir, "worker1")
|
600
|
+
FileUtils.rm_rf(@bufdir)
|
601
|
+
FileUtils.mkdir_p(@worker0_dir)
|
602
|
+
FileUtils.mkdir_p(@worker1_dir)
|
603
|
+
|
604
|
+
@bufdir_chunk_1 = Fluent::UniqueId.generate
|
605
|
+
bc1 = File.join(@bufdir, "fsb.testing.q#{Fluent::UniqueId.hex(@bufdir_chunk_1)}.buf")
|
606
|
+
File.open(bc1, 'wb') do |f|
|
607
|
+
f.write ["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
608
|
+
f.write ["t2.test", event_time('2016-04-17 13:58:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
609
|
+
f.write ["t3.test", event_time('2016-04-17 13:58:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
610
|
+
f.write ["t4.test", event_time('2016-04-17 13:58:22 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
611
|
+
end
|
612
|
+
|
613
|
+
@bufdir_chunk_2 = Fluent::UniqueId.generate
|
614
|
+
bc2 = File.join(@bufdir, "fsb.testing.q#{Fluent::UniqueId.hex(@bufdir_chunk_2)}.buf")
|
615
|
+
File.open(bc2, 'wb') do |f|
|
616
|
+
f.write ["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
617
|
+
f.write ["t2.test", event_time('2016-04-17 13:58:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
618
|
+
f.write ["t3.test", event_time('2016-04-17 13:58:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
619
|
+
f.write ["t4.test", event_time('2016-04-17 13:58:22 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
620
|
+
end
|
621
|
+
|
622
|
+
@worker_dir_chunk_1 = Fluent::UniqueId.generate
|
623
|
+
wc0_1 = File.join(@worker0_dir, "fsb.testing.q#{Fluent::UniqueId.hex(@worker_dir_chunk_1)}.buf")
|
624
|
+
wc1_1 = File.join(@worker1_dir, "fsb.testing.q#{Fluent::UniqueId.hex(@worker_dir_chunk_1)}.buf")
|
625
|
+
[wc0_1, wc1_1].each do |chunk_path|
|
626
|
+
File.open(chunk_path, 'wb') do |f|
|
627
|
+
f.write ["t1.test", event_time('2016-04-17 13:59:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
628
|
+
f.write ["t2.test", event_time('2016-04-17 13:59:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
629
|
+
f.write ["t3.test", event_time('2016-04-17 13:59:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
630
|
+
end
|
631
|
+
end
|
632
|
+
|
633
|
+
@worker_dir_chunk_2 = Fluent::UniqueId.generate
|
634
|
+
wc0_2 = File.join(@worker0_dir, "fsb.testing.b#{Fluent::UniqueId.hex(@worker_dir_chunk_2)}.buf")
|
635
|
+
wc1_2 = File.join(@worker1_dir, "fsb.foo.b#{Fluent::UniqueId.hex(@worker_dir_chunk_2)}.buf")
|
636
|
+
[wc0_2, wc1_2].each do |chunk_path|
|
637
|
+
File.open(chunk_path, 'wb') do |f|
|
638
|
+
f.write ["t1.test", event_time('2016-04-17 14:00:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
639
|
+
f.write ["t2.test", event_time('2016-04-17 14:00:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
640
|
+
f.write ["t3.test", event_time('2016-04-17 14:00:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
641
|
+
f.write ["t4.test", event_time('2016-04-17 14:00:28 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
642
|
+
end
|
643
|
+
end
|
644
|
+
|
645
|
+
@worker_dir_chunk_3 = Fluent::UniqueId.generate
|
646
|
+
wc0_3 = File.join(@worker0_dir, "fsb.bar.b#{Fluent::UniqueId.hex(@worker_dir_chunk_3)}.buf")
|
647
|
+
wc1_3 = File.join(@worker1_dir, "fsb.baz.b#{Fluent::UniqueId.hex(@worker_dir_chunk_3)}.buf")
|
648
|
+
[wc0_3, wc1_3].each do |chunk_path|
|
649
|
+
File.open(chunk_path, 'wb') do |f|
|
650
|
+
f.write ["t1.test", event_time('2016-04-17 14:01:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
651
|
+
f.write ["t2.test", event_time('2016-04-17 14:01:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
652
|
+
f.write ["t3.test", event_time('2016-04-17 14:01:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
|
653
|
+
end
|
654
|
+
end
|
655
|
+
end
|
656
|
+
|
657
|
+
teardown do
|
658
|
+
if @p
|
659
|
+
@p.stop unless @p.stopped?
|
660
|
+
@p.before_shutdown unless @p.before_shutdown?
|
661
|
+
@p.shutdown unless @p.shutdown?
|
662
|
+
@p.after_shutdown unless @p.after_shutdown?
|
663
|
+
@p.close unless @p.closed?
|
664
|
+
@p.terminate unless @p.terminated?
|
665
|
+
end
|
666
|
+
end
|
667
|
+
|
668
|
+
test 'worker(id=0) #resume returns staged/queued chunks with metadata, not only in worker dir, including the directory specified by path' do
|
669
|
+
ENV['SERVERENGINE_WORKER_ID'] = '0'
|
670
|
+
|
671
|
+
buf_conf = config_element('buffer', '', {'path' => @bufdir})
|
672
|
+
@d = FluentPluginFileSingleBufferTest::DummyOutputPlugin.new
|
673
|
+
with_worker_config(workers: 2, worker_id: 0) do
|
674
|
+
@d.configure(config_element('output', '', {}, [buf_conf]))
|
675
|
+
end
|
676
|
+
|
677
|
+
@d.start
|
678
|
+
@p = @d.buffer
|
679
|
+
|
680
|
+
assert_equal 2, @p.stage.size
|
681
|
+
assert_equal 3, @p.queue.size
|
682
|
+
|
683
|
+
stage = @p.stage
|
684
|
+
|
685
|
+
m1 = metadata(tag: 'testing')
|
686
|
+
assert_equal @worker_dir_chunk_2, stage[m1].unique_id
|
687
|
+
assert_equal 4, stage[m1].size
|
688
|
+
assert_equal :staged, stage[m1].state
|
689
|
+
|
690
|
+
m2 = metadata(tag: 'bar')
|
691
|
+
assert_equal @worker_dir_chunk_3, stage[m2].unique_id
|
692
|
+
assert_equal 3, stage[m2].size
|
693
|
+
assert_equal :staged, stage[m2].state
|
694
|
+
|
695
|
+
queue = @p.queue
|
696
|
+
|
697
|
+
assert_equal [@bufdir_chunk_1, @bufdir_chunk_2, @worker_dir_chunk_1].sort, queue.map(&:unique_id).sort
|
698
|
+
assert_equal [3, 4, 4], queue.map(&:size).sort
|
699
|
+
assert_equal [:queued, :queued, :queued], queue.map(&:state)
|
700
|
+
end
|
701
|
+
|
702
|
+
test 'worker(id=1) #resume returns staged/queued chunks with metadata, only in worker dir' do
|
703
|
+
buf_conf = config_element('buffer', '', {'path' => @bufdir})
|
704
|
+
@d = FluentPluginFileSingleBufferTest::DummyOutputPlugin.new
|
705
|
+
with_worker_config(workers: 2, worker_id: 1) do
|
706
|
+
@d.configure(config_element('output', '', {}, [buf_conf]))
|
707
|
+
end
|
708
|
+
|
709
|
+
@d.start
|
710
|
+
@p = @d.buffer
|
711
|
+
|
712
|
+
assert_equal 2, @p.stage.size
|
713
|
+
assert_equal 1, @p.queue.size
|
714
|
+
|
715
|
+
stage = @p.stage
|
716
|
+
|
717
|
+
m1 = metadata(tag: 'foo')
|
718
|
+
assert_equal @worker_dir_chunk_2, stage[m1].unique_id
|
719
|
+
assert_equal 4, stage[m1].size
|
720
|
+
assert_equal :staged, stage[m1].state
|
721
|
+
|
722
|
+
m2 = metadata(tag: 'baz')
|
723
|
+
assert_equal @worker_dir_chunk_3, stage[m2].unique_id
|
724
|
+
assert_equal 3, stage[m2].size
|
725
|
+
assert_equal :staged, stage[m2].state
|
726
|
+
|
727
|
+
queue = @p.queue
|
728
|
+
|
729
|
+
assert_equal @worker_dir_chunk_1, queue[0].unique_id
|
730
|
+
assert_equal 3, queue[0].size
|
731
|
+
assert_equal :queued, queue[0].state
|
732
|
+
end
|
733
|
+
end
|
734
|
+
end
|