fluentd 0.12.43 → 0.14.0
Sign up to get free protection for your applications and to get access to all the features.
Potentially problematic release.
This version of fluentd might be problematic. Click here for more details.
- checksums.yaml +4 -4
- data/.github/ISSUE_TEMPLATE.md +6 -0
- data/.gitignore +2 -0
- data/.travis.yml +33 -21
- data/CONTRIBUTING.md +1 -0
- data/ChangeLog +1239 -0
- data/README.md +0 -25
- data/Rakefile +2 -1
- data/Vagrantfile +17 -0
- data/appveyor.yml +35 -0
- data/example/filter_stdout.conf +5 -5
- data/example/in_forward.conf +2 -2
- data/example/in_http.conf +2 -2
- data/example/in_out_forward.conf +17 -0
- data/example/in_syslog.conf +2 -2
- data/example/in_tail.conf +2 -2
- data/example/in_tcp.conf +2 -2
- data/example/in_udp.conf +2 -2
- data/example/out_copy.conf +4 -4
- data/example/out_file.conf +2 -2
- data/example/out_forward.conf +2 -2
- data/example/out_forward_buf_file.conf +23 -0
- data/example/v0_12_filter.conf +8 -8
- data/fluent.conf +29 -0
- data/fluentd.gemspec +18 -11
- data/lib/fluent/agent.rb +60 -58
- data/lib/fluent/command/cat.rb +1 -1
- data/lib/fluent/command/debug.rb +7 -5
- data/lib/fluent/command/fluentd.rb +97 -2
- data/lib/fluent/compat/call_super_mixin.rb +67 -0
- data/lib/fluent/compat/filter.rb +50 -0
- data/lib/fluent/compat/formatter.rb +109 -0
- data/lib/fluent/compat/input.rb +50 -0
- data/lib/fluent/compat/output.rb +617 -0
- data/lib/fluent/compat/output_chain.rb +60 -0
- data/lib/fluent/compat/parser.rb +163 -0
- data/lib/fluent/compat/propagate_default.rb +62 -0
- data/lib/fluent/config.rb +23 -20
- data/lib/fluent/config/configure_proxy.rb +119 -70
- data/lib/fluent/config/dsl.rb +5 -18
- data/lib/fluent/config/element.rb +72 -8
- data/lib/fluent/config/error.rb +0 -3
- data/lib/fluent/config/literal_parser.rb +0 -2
- data/lib/fluent/config/parser.rb +4 -4
- data/lib/fluent/config/section.rb +39 -28
- data/lib/fluent/config/types.rb +2 -13
- data/lib/fluent/config/v1_parser.rb +1 -3
- data/lib/fluent/configurable.rb +48 -16
- data/lib/fluent/daemon.rb +15 -0
- data/lib/fluent/engine.rb +26 -52
- data/lib/fluent/env.rb +6 -4
- data/lib/fluent/event.rb +58 -11
- data/lib/fluent/event_router.rb +5 -5
- data/lib/fluent/filter.rb +2 -50
- data/lib/fluent/formatter.rb +4 -293
- data/lib/fluent/input.rb +2 -32
- data/lib/fluent/label.rb +2 -2
- data/lib/fluent/load.rb +3 -2
- data/lib/fluent/log.rb +107 -38
- data/lib/fluent/match.rb +0 -36
- data/lib/fluent/mixin.rb +117 -7
- data/lib/fluent/msgpack_factory.rb +62 -0
- data/lib/fluent/output.rb +7 -612
- data/lib/fluent/output_chain.rb +23 -0
- data/lib/fluent/parser.rb +4 -800
- data/lib/fluent/plugin.rb +100 -121
- data/lib/fluent/plugin/bare_output.rb +63 -0
- data/lib/fluent/plugin/base.rb +121 -0
- data/lib/fluent/plugin/buf_file.rb +101 -182
- data/lib/fluent/plugin/buf_memory.rb +9 -92
- data/lib/fluent/plugin/buffer.rb +473 -0
- data/lib/fluent/plugin/buffer/chunk.rb +135 -0
- data/lib/fluent/plugin/buffer/file_chunk.rb +339 -0
- data/lib/fluent/plugin/buffer/memory_chunk.rb +100 -0
- data/lib/fluent/plugin/exec_util.rb +80 -75
- data/lib/fluent/plugin/file_util.rb +33 -28
- data/lib/fluent/plugin/file_wrapper.rb +120 -0
- data/lib/fluent/plugin/filter.rb +51 -0
- data/lib/fluent/plugin/filter_grep.rb +13 -40
- data/lib/fluent/plugin/filter_record_transformer.rb +22 -18
- data/lib/fluent/plugin/formatter.rb +93 -0
- data/lib/fluent/plugin/formatter_csv.rb +48 -0
- data/lib/fluent/plugin/formatter_hash.rb +32 -0
- data/lib/fluent/plugin/formatter_json.rb +47 -0
- data/lib/fluent/plugin/formatter_ltsv.rb +42 -0
- data/lib/fluent/plugin/formatter_msgpack.rb +32 -0
- data/lib/fluent/plugin/formatter_out_file.rb +45 -0
- data/lib/fluent/plugin/formatter_single_value.rb +34 -0
- data/lib/fluent/plugin/formatter_stdout.rb +39 -0
- data/lib/fluent/plugin/in_debug_agent.rb +4 -0
- data/lib/fluent/plugin/in_dummy.rb +22 -18
- data/lib/fluent/plugin/in_exec.rb +18 -8
- data/lib/fluent/plugin/in_forward.rb +36 -79
- data/lib/fluent/plugin/in_gc_stat.rb +4 -0
- data/lib/fluent/plugin/in_http.rb +21 -18
- data/lib/fluent/plugin/in_monitor_agent.rb +15 -48
- data/lib/fluent/plugin/in_object_space.rb +6 -1
- data/lib/fluent/plugin/in_stream.rb +7 -3
- data/lib/fluent/plugin/in_syslog.rb +46 -95
- data/lib/fluent/plugin/in_tail.rb +58 -640
- data/lib/fluent/plugin/in_tcp.rb +8 -1
- data/lib/fluent/plugin/in_udp.rb +8 -18
- data/lib/fluent/plugin/input.rb +33 -0
- data/lib/fluent/plugin/multi_output.rb +95 -0
- data/lib/fluent/plugin/out_buffered_null.rb +59 -0
- data/lib/fluent/plugin/out_copy.rb +11 -7
- data/lib/fluent/plugin/out_exec.rb +15 -11
- data/lib/fluent/plugin/out_exec_filter.rb +18 -10
- data/lib/fluent/plugin/out_file.rb +34 -5
- data/lib/fluent/plugin/out_forward.rb +25 -19
- data/lib/fluent/plugin/out_null.rb +0 -14
- data/lib/fluent/plugin/out_roundrobin.rb +11 -7
- data/lib/fluent/plugin/out_stdout.rb +5 -7
- data/lib/fluent/plugin/out_stream.rb +3 -1
- data/lib/fluent/plugin/output.rb +979 -0
- data/lib/fluent/plugin/owned_by_mixin.rb +42 -0
- data/lib/fluent/plugin/parser.rb +244 -0
- data/lib/fluent/plugin/parser_apache.rb +24 -0
- data/lib/fluent/plugin/parser_apache2.rb +84 -0
- data/lib/fluent/plugin/parser_apache_error.rb +21 -0
- data/lib/fluent/plugin/parser_csv.rb +31 -0
- data/lib/fluent/plugin/parser_json.rb +79 -0
- data/lib/fluent/plugin/parser_ltsv.rb +50 -0
- data/lib/fluent/plugin/parser_multiline.rb +102 -0
- data/lib/fluent/plugin/parser_nginx.rb +24 -0
- data/lib/fluent/plugin/parser_none.rb +36 -0
- data/lib/fluent/plugin/parser_syslog.rb +82 -0
- data/lib/fluent/plugin/parser_tsv.rb +37 -0
- data/lib/fluent/plugin/socket_util.rb +119 -117
- data/lib/fluent/plugin/storage.rb +84 -0
- data/lib/fluent/plugin/storage_local.rb +116 -0
- data/lib/fluent/plugin/string_util.rb +16 -13
- data/lib/fluent/plugin_helper.rb +39 -0
- data/lib/fluent/plugin_helper/child_process.rb +298 -0
- data/lib/fluent/plugin_helper/compat_parameters.rb +99 -0
- data/lib/fluent/plugin_helper/event_emitter.rb +80 -0
- data/lib/fluent/plugin_helper/event_loop.rb +118 -0
- data/lib/fluent/plugin_helper/retry_state.rb +177 -0
- data/lib/fluent/plugin_helper/storage.rb +308 -0
- data/lib/fluent/plugin_helper/thread.rb +147 -0
- data/lib/fluent/plugin_helper/timer.rb +85 -0
- data/lib/fluent/plugin_id.rb +63 -0
- data/lib/fluent/process.rb +21 -30
- data/lib/fluent/registry.rb +21 -9
- data/lib/fluent/root_agent.rb +115 -40
- data/lib/fluent/supervisor.rb +330 -320
- data/lib/fluent/system_config.rb +42 -18
- data/lib/fluent/test.rb +6 -1
- data/lib/fluent/test/base.rb +23 -3
- data/lib/fluent/test/driver/base.rb +247 -0
- data/lib/fluent/test/driver/event_feeder.rb +98 -0
- data/lib/fluent/test/driver/filter.rb +35 -0
- data/lib/fluent/test/driver/input.rb +31 -0
- data/lib/fluent/test/driver/output.rb +78 -0
- data/lib/fluent/test/driver/test_event_router.rb +45 -0
- data/lib/fluent/test/filter_test.rb +0 -1
- data/lib/fluent/test/formatter_test.rb +2 -1
- data/lib/fluent/test/input_test.rb +23 -17
- data/lib/fluent/test/output_test.rb +28 -39
- data/lib/fluent/test/parser_test.rb +1 -1
- data/lib/fluent/time.rb +104 -1
- data/lib/fluent/{status.rb → unique_id.rb} +15 -24
- data/lib/fluent/version.rb +1 -1
- data/lib/fluent/winsvc.rb +72 -0
- data/test/compat/test_calls_super.rb +164 -0
- data/test/config/test_config_parser.rb +83 -0
- data/test/config/test_configurable.rb +547 -274
- data/test/config/test_configure_proxy.rb +146 -29
- data/test/config/test_dsl.rb +3 -181
- data/test/config/test_element.rb +274 -0
- data/test/config/test_literal_parser.rb +1 -1
- data/test/config/test_section.rb +79 -7
- data/test/config/test_system_config.rb +21 -0
- data/test/config/test_types.rb +3 -26
- data/test/helper.rb +78 -8
- data/test/plugin/test_bare_output.rb +118 -0
- data/test/plugin/test_base.rb +75 -0
- data/test/plugin/test_buf_file.rb +420 -521
- data/test/plugin/test_buf_memory.rb +32 -194
- data/test/plugin/test_buffer.rb +981 -0
- data/test/plugin/test_buffer_chunk.rb +110 -0
- data/test/plugin/test_buffer_file_chunk.rb +770 -0
- data/test/plugin/test_buffer_memory_chunk.rb +265 -0
- data/test/plugin/test_filter.rb +255 -0
- data/test/plugin/test_filter_grep.rb +2 -73
- data/test/plugin/test_filter_record_transformer.rb +24 -68
- data/test/plugin/test_filter_stdout.rb +6 -6
- data/test/plugin/test_in_debug_agent.rb +2 -0
- data/test/plugin/test_in_dummy.rb +11 -17
- data/test/plugin/test_in_exec.rb +6 -25
- data/test/plugin/test_in_forward.rb +112 -151
- data/test/plugin/test_in_gc_stat.rb +2 -0
- data/test/plugin/test_in_http.rb +106 -157
- data/test/plugin/test_in_object_space.rb +21 -5
- data/test/plugin/test_in_stream.rb +14 -13
- data/test/plugin/test_in_syslog.rb +30 -275
- data/test/plugin/test_in_tail.rb +95 -282
- data/test/plugin/test_in_tcp.rb +14 -0
- data/test/plugin/test_in_udp.rb +21 -67
- data/test/plugin/test_input.rb +122 -0
- data/test/plugin/test_multi_output.rb +180 -0
- data/test/plugin/test_out_buffered_null.rb +79 -0
- data/test/plugin/test_out_copy.rb +15 -2
- data/test/plugin/test_out_exec.rb +75 -25
- data/test/plugin/test_out_exec_filter.rb +74 -8
- data/test/plugin/test_out_file.rb +61 -7
- data/test/plugin/test_out_forward.rb +92 -15
- data/test/plugin/test_out_roundrobin.rb +1 -0
- data/test/plugin/test_out_stdout.rb +22 -13
- data/test/plugin/test_out_stream.rb +18 -0
- data/test/plugin/test_output.rb +515 -0
- data/test/plugin/test_output_as_buffered.rb +1540 -0
- data/test/plugin/test_output_as_buffered_overflow.rb +247 -0
- data/test/plugin/test_output_as_buffered_retries.rb +808 -0
- data/test/plugin/test_output_as_buffered_secondary.rb +776 -0
- data/test/plugin/test_output_as_standard.rb +362 -0
- data/test/plugin/test_owned_by.rb +35 -0
- data/test/plugin/test_storage.rb +167 -0
- data/test/plugin/test_storage_local.rb +8 -0
- data/test/plugin_helper/test_child_process.rb +599 -0
- data/test/plugin_helper/test_compat_parameters.rb +175 -0
- data/test/plugin_helper/test_event_emitter.rb +51 -0
- data/test/plugin_helper/test_event_loop.rb +52 -0
- data/test/plugin_helper/test_retry_state.rb +399 -0
- data/test/plugin_helper/test_storage.rb +411 -0
- data/test/plugin_helper/test_thread.rb +164 -0
- data/test/plugin_helper/test_timer.rb +100 -0
- data/test/scripts/exec_script.rb +0 -6
- data/test/scripts/fluent/plugin/out_test.rb +3 -0
- data/test/test_config.rb +13 -4
- data/test/test_event.rb +24 -13
- data/test/test_event_router.rb +8 -7
- data/test/test_event_time.rb +187 -0
- data/test/test_formatter.rb +13 -51
- data/test/test_input.rb +1 -1
- data/test/test_log.rb +239 -16
- data/test/test_mixin.rb +1 -1
- data/test/test_output.rb +53 -66
- data/test/test_parser.rb +105 -323
- data/test/test_plugin_helper.rb +81 -0
- data/test/test_root_agent.rb +4 -52
- data/test/test_supervisor.rb +272 -0
- data/test/test_unique_id.rb +47 -0
- metadata +181 -55
- data/CHANGELOG.md +0 -710
- data/lib/fluent/buffer.rb +0 -365
- data/lib/fluent/plugin/filter_parser.rb +0 -107
- data/lib/fluent/plugin/in_status.rb +0 -76
- data/lib/fluent/test/helpers.rb +0 -86
- data/test/plugin/data/log/foo/bar2 +0 -0
- data/test/plugin/test_filter_parser.rb +0 -744
- data/test/plugin/test_in_status.rb +0 -38
- data/test/test_buffer.rb +0 -624
@@ -1,204 +1,42 @@
|
|
1
|
-
# -*- coding: utf-8 -*-
|
2
1
|
require_relative '../helper'
|
3
|
-
require 'fluent/test'
|
4
2
|
require 'fluent/plugin/buf_memory'
|
3
|
+
require 'fluent/plugin/output'
|
4
|
+
require 'flexmock/test_unit'
|
5
5
|
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
module FluentMemoryBufferTest
|
10
|
-
class MemoryBufferChunkTest < Test::Unit::TestCase
|
11
|
-
def test_init
|
12
|
-
chunk = Fluent::MemoryBufferChunk.new('key')
|
13
|
-
assert_equal 'key', chunk.key
|
14
|
-
assert_equal '', chunk.instance_eval{ @data }
|
15
|
-
assert_equal 'ASCII-8BIT', chunk.instance_eval{ @data }.encoding.to_s
|
16
|
-
assert chunk.unique_id # non nil
|
17
|
-
|
18
|
-
chunk2 = Fluent::MemoryBufferChunk.new('initdata', 'data')
|
19
|
-
assert_equal 'initdata', chunk2.key
|
20
|
-
assert_equal 'data', chunk2.instance_eval{ @data }
|
21
|
-
end
|
22
|
-
|
23
|
-
def test_buffer_chunk_interface
|
24
|
-
chunk = Fluent::BufferChunk.new('key')
|
25
|
-
|
26
|
-
assert chunk.respond_to?(:empty?)
|
27
|
-
assert chunk.respond_to?(:<<)
|
28
|
-
assert chunk.respond_to?(:size)
|
29
|
-
assert chunk.respond_to?(:close)
|
30
|
-
assert chunk.respond_to?(:purge)
|
31
|
-
assert chunk.respond_to?(:read)
|
32
|
-
assert chunk.respond_to?(:open)
|
33
|
-
assert chunk.respond_to?(:write_to)
|
34
|
-
assert chunk.respond_to?(:msgpack_each)
|
35
|
-
end
|
36
|
-
|
37
|
-
def test_empty?
|
38
|
-
chunk = Fluent::MemoryBufferChunk.new('key')
|
39
|
-
assert chunk.empty?
|
40
|
-
|
41
|
-
chunk.instance_eval{ @data = "non empty" }
|
42
|
-
assert !(chunk.empty?)
|
43
|
-
end
|
44
|
-
|
45
|
-
def test_append_data_and_size
|
46
|
-
chunk = Fluent::MemoryBufferChunk.new('key')
|
47
|
-
assert_equal '', chunk.instance_eval{ @data }
|
48
|
-
|
49
|
-
chunk << "foo bar baz\n".force_encoding('UTF-8')
|
50
|
-
assert_equal "foo bar baz\n", chunk.instance_eval{ @data }
|
51
|
-
assert_equal 'ASCII-8BIT', chunk.instance_eval{ @data }.encoding.to_s
|
52
|
-
|
53
|
-
assert_equal 12, chunk.size # bytesize
|
54
|
-
|
55
|
-
chunk << "日本語Japanese\n".force_encoding('UTF-8')
|
56
|
-
assert_equal "foo bar baz\n日本語Japanese\n".force_encoding('ASCII-8BIT'), chunk.instance_eval{ @data }
|
57
|
-
assert_equal 'ASCII-8BIT', chunk.instance_eval{ @data }.encoding.to_s
|
58
|
-
|
59
|
-
assert_equal 30, chunk.size # bytesize
|
60
|
-
end
|
61
|
-
|
62
|
-
def test_close_and_purge_does_nothing
|
63
|
-
chunk = Fluent::MemoryBufferChunk.new('key', 'data')
|
64
|
-
chunk.close
|
65
|
-
chunk.close
|
66
|
-
chunk.close
|
67
|
-
chunk.close
|
68
|
-
chunk.purge
|
69
|
-
chunk.purge
|
70
|
-
chunk.purge
|
71
|
-
chunk.purge
|
72
|
-
assert_equal 'data', chunk.instance_eval{ @data }
|
73
|
-
end
|
74
|
-
|
75
|
-
def test_read_just_returns_data
|
76
|
-
data = "data1\ndata2\n"
|
77
|
-
chunk = Fluent::MemoryBufferChunk.new('key', data)
|
78
|
-
assert_equal data, chunk.read
|
79
|
-
assert_equal data.object_id, chunk.read.object_id
|
80
|
-
end
|
81
|
-
|
82
|
-
def test_open
|
83
|
-
# StringIO.open(@data, &block)
|
84
|
-
chunk = Fluent::MemoryBufferChunk.new('key', 'foo bar baz')
|
85
|
-
chunk.open do |io|
|
86
|
-
assert 'foo bar baz', io.read
|
87
|
-
end
|
88
|
-
end
|
89
|
-
|
90
|
-
def test_write_to
|
91
|
-
chunk = Fluent::MemoryBufferChunk.new('key', 'foo bar baz')
|
92
|
-
dummy_dst = StringIO.new
|
93
|
-
chunk.write_to(dummy_dst)
|
94
|
-
assert_equal 'foo bar baz', dummy_dst.string
|
95
|
-
end
|
96
|
-
|
97
|
-
def test_msgpack_each
|
98
|
-
d0 = MessagePack.pack([[1, "foo"], [2, "bar"], [3, "baz"]])
|
99
|
-
d1 = MessagePack.pack({"key1" => "value1", "key2" => "value2"})
|
100
|
-
d2 = MessagePack.pack("string1")
|
101
|
-
d3 = MessagePack.pack(1)
|
102
|
-
d4 = MessagePack.pack(nil)
|
103
|
-
chunk = Fluent::MemoryBufferChunk.new('key', d0 + d1 + d2 + d3 + d4)
|
104
|
-
|
105
|
-
store = []
|
106
|
-
chunk.msgpack_each do |data|
|
107
|
-
store << data
|
108
|
-
end
|
109
|
-
|
110
|
-
assert_equal 5, store.size
|
111
|
-
assert_equal [[1, "foo"], [2, "bar"], [3, "baz"]], store[0]
|
112
|
-
assert_equal({"key1" => "value1", "key2" => "value2"}, store[1])
|
113
|
-
assert_equal "string1", store[2]
|
114
|
-
assert_equal 1, store[3]
|
115
|
-
assert_equal nil, store[4]
|
116
|
-
end
|
6
|
+
module FluentPluginMemoryBufferTest
|
7
|
+
class DummyOutputPlugin < Fluent::Plugin::Output
|
117
8
|
end
|
9
|
+
end
|
118
10
|
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
end
|
127
|
-
|
128
|
-
class DummyOutput
|
129
|
-
attr_accessor :written
|
130
|
-
|
131
|
-
def write(chunk)
|
132
|
-
@written ||= []
|
133
|
-
@written.push chunk
|
134
|
-
"return value"
|
135
|
-
end
|
136
|
-
end
|
137
|
-
|
138
|
-
def test_before_shutdown
|
139
|
-
buf = Fluent::MemoryBuffer.new
|
140
|
-
buf.start
|
141
|
-
|
142
|
-
# before_shutdown flushes all chunks in @map and @queue
|
143
|
-
|
144
|
-
c1 = [ buf.new_chunk('k0'), buf.new_chunk('k1'), buf.new_chunk('k2'), buf.new_chunk('k3') ]
|
145
|
-
c2 = [ buf.new_chunk('q0'), buf.new_chunk('q1') ]
|
146
|
-
|
147
|
-
buf.instance_eval do
|
148
|
-
@map = {
|
149
|
-
'k0' => c1[0], 'k1' => c1[1], 'k2' => c1[2], 'k3' => c1[3],
|
150
|
-
'q0' => c2[0], 'q1' => c2[1]
|
151
|
-
}
|
152
|
-
end
|
153
|
-
c1[0] << "data1\ndata2\n"
|
154
|
-
c1[1] << "data1\ndata2\n"
|
155
|
-
c1[2] << "data1\ndata2\n"
|
156
|
-
# k3 chunk is empty!
|
157
|
-
|
158
|
-
c2[0] << "data1\ndata2\n"
|
159
|
-
c2[1] << "data1\ndata2\n"
|
160
|
-
buf.push('q0')
|
161
|
-
buf.push('q1')
|
162
|
-
|
163
|
-
buf.instance_eval do
|
164
|
-
@enqueue_hook_times = 0
|
165
|
-
def enqueue(chunk)
|
166
|
-
@enqueue_hook_times += 1
|
167
|
-
end
|
168
|
-
end
|
169
|
-
assert_equal 0, buf.instance_eval{ @enqueue_hook_times }
|
170
|
-
|
171
|
-
out = DummyOutput.new
|
172
|
-
assert_equal nil, out.written
|
173
|
-
|
174
|
-
buf.before_shutdown(out)
|
175
|
-
|
176
|
-
assert_equal 3, buf.instance_eval{ @enqueue_hook_times } # k0, k1, k2
|
177
|
-
assert_equal 5, out.written.size
|
178
|
-
assert_equal [c2[0], c2[1], c1[0], c1[1], c1[2]], out.written
|
179
|
-
end
|
11
|
+
class MemoryBufferTest < Test::Unit::TestCase
|
12
|
+
setup do
|
13
|
+
Fluent::Test.setup
|
14
|
+
@d = FluentPluginMemoryBufferTest::DummyOutputPlugin.new
|
15
|
+
@p = Fluent::Plugin::MemoryBuffer.new
|
16
|
+
@p.owner = @d
|
17
|
+
end
|
180
18
|
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
assert_equal Fluent::MemoryBufferChunk, chunk.class
|
185
|
-
assert_equal 'key', chunk.key
|
186
|
-
assert chunk.empty?
|
187
|
-
end
|
19
|
+
test 'this is non persistent plugin' do
|
20
|
+
assert !@p.persistent?
|
21
|
+
end
|
188
22
|
|
189
|
-
|
190
|
-
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
end
|
23
|
+
test '#resume always returns empty stage and queue' do
|
24
|
+
ary = @p.resume
|
25
|
+
assert_equal({}, ary[0])
|
26
|
+
assert_equal([], ary[1])
|
27
|
+
end
|
195
28
|
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
|
29
|
+
test '#generate_chunk returns memory chunk instance' do
|
30
|
+
m1 = Fluent::Plugin::Buffer::Metadata.new(nil, nil, nil)
|
31
|
+
c1 = @p.generate_chunk(m1)
|
32
|
+
assert c1.is_a? Fluent::Plugin::Buffer::MemoryChunk
|
33
|
+
assert_equal m1, c1.metadata
|
34
|
+
|
35
|
+
require 'time'
|
36
|
+
t2 = Time.parse('2016-04-08 19:55:00 +0900').to_i
|
37
|
+
m2 = Fluent::Plugin::Buffer::Metadata.new(t2, 'test.tag', {k1: 'v1', k2: 0})
|
38
|
+
c2 = @p.generate_chunk(m2)
|
39
|
+
assert c2.is_a? Fluent::Plugin::Buffer::MemoryChunk
|
40
|
+
assert_equal m2, c2.metadata
|
203
41
|
end
|
204
42
|
end
|
@@ -0,0 +1,981 @@
|
|
1
|
+
require_relative '../helper'
|
2
|
+
require 'fluent/plugin/buffer'
|
3
|
+
require 'fluent/plugin/buffer/memory_chunk'
|
4
|
+
require 'flexmock/test_unit'
|
5
|
+
|
6
|
+
require 'fluent/log'
|
7
|
+
require 'fluent/plugin_id'
|
8
|
+
|
9
|
+
require 'time'
|
10
|
+
|
11
|
+
module FluentPluginBufferTest
|
12
|
+
class DummyOutputPlugin < Fluent::Plugin::Base
|
13
|
+
include Fluent::PluginId
|
14
|
+
include Fluent::PluginLoggerMixin
|
15
|
+
end
|
16
|
+
class DummyMemoryChunkError < StandardError; end
|
17
|
+
class DummyMemoryChunk < Fluent::Plugin::Buffer::MemoryChunk
|
18
|
+
attr_reader :append_count, :rollbacked, :closed, :purged
|
19
|
+
attr_accessor :failing
|
20
|
+
def initialize(metadata)
|
21
|
+
super
|
22
|
+
@append_count = 0
|
23
|
+
@rollbacked = false
|
24
|
+
@closed = false
|
25
|
+
@purged = false
|
26
|
+
@failing = false
|
27
|
+
end
|
28
|
+
def append(data)
|
29
|
+
@append_count += 1
|
30
|
+
raise DummyMemoryChunkError if @failing
|
31
|
+
super
|
32
|
+
end
|
33
|
+
def concat(data, size)
|
34
|
+
@append_count += 1
|
35
|
+
raise DummyMemoryChunkError if @failing
|
36
|
+
super
|
37
|
+
end
|
38
|
+
def rollback
|
39
|
+
super
|
40
|
+
@rollbacked = true
|
41
|
+
end
|
42
|
+
def close
|
43
|
+
super
|
44
|
+
@closed = true
|
45
|
+
end
|
46
|
+
def purge
|
47
|
+
super
|
48
|
+
@purged = true
|
49
|
+
end
|
50
|
+
end
|
51
|
+
class DummyPlugin < Fluent::Plugin::Buffer
|
52
|
+
def create_metadata(timekey=nil, tag=nil, variables=nil)
|
53
|
+
Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
|
54
|
+
end
|
55
|
+
def create_chunk(metadata, data)
|
56
|
+
c = FluentPluginBufferTest::DummyMemoryChunk.new(metadata)
|
57
|
+
c.append(data)
|
58
|
+
c.commit
|
59
|
+
c
|
60
|
+
end
|
61
|
+
def resume
|
62
|
+
dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
|
63
|
+
dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
|
64
|
+
dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
|
65
|
+
dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
|
66
|
+
staged = {
|
67
|
+
dm2 => create_chunk(dm2, ["b" * 100]),
|
68
|
+
dm3 => create_chunk(dm3, ["c" * 100]),
|
69
|
+
}
|
70
|
+
queued = [
|
71
|
+
create_chunk(dm0, ["0" * 100]),
|
72
|
+
create_chunk(dm1, ["a" * 100]),
|
73
|
+
create_chunk(dm1, ["a" * 3]),
|
74
|
+
]
|
75
|
+
return staged, queued
|
76
|
+
end
|
77
|
+
def generate_chunk(metadata)
|
78
|
+
DummyMemoryChunk.new(metadata)
|
79
|
+
end
|
80
|
+
end
|
81
|
+
end
|
82
|
+
|
83
|
+
class BufferTest < Test::Unit::TestCase
|
84
|
+
def create_buffer(hash)
|
85
|
+
buffer_conf = config_element('buffer', '', hash, [])
|
86
|
+
owner = FluentPluginBufferTest::DummyOutputPlugin.new
|
87
|
+
owner.configure(config_element('ROOT', '', {}, [ buffer_conf ]))
|
88
|
+
p = FluentPluginBufferTest::DummyPlugin.new
|
89
|
+
p.owner = owner
|
90
|
+
p.configure(buffer_conf)
|
91
|
+
p
|
92
|
+
end
|
93
|
+
|
94
|
+
def create_metadata(timekey=nil, tag=nil, variables=nil)
|
95
|
+
Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
|
96
|
+
end
|
97
|
+
|
98
|
+
def create_chunk(metadata, data)
|
99
|
+
c = FluentPluginBufferTest::DummyMemoryChunk.new(metadata)
|
100
|
+
c.append(data)
|
101
|
+
c.commit
|
102
|
+
c
|
103
|
+
end
|
104
|
+
|
105
|
+
setup do
|
106
|
+
Fluent::Test.setup
|
107
|
+
end
|
108
|
+
|
109
|
+
sub_test_case 'using base buffer class' do
|
110
|
+
setup do
|
111
|
+
buffer_conf = config_element('buffer', '', {}, [])
|
112
|
+
owner = FluentPluginBufferTest::DummyOutputPlugin.new
|
113
|
+
owner.configure(config_element('ROOT', '', {}, [ buffer_conf ]))
|
114
|
+
p = Fluent::Plugin::Buffer.new
|
115
|
+
p.owner = owner
|
116
|
+
p.configure(buffer_conf)
|
117
|
+
@p = p
|
118
|
+
end
|
119
|
+
|
120
|
+
test 'default persistency is false' do
|
121
|
+
assert !@p.persistent?
|
122
|
+
end
|
123
|
+
|
124
|
+
test 'chunk bytes limit is 8MB, and total bytes limit is 512MB' do
|
125
|
+
assert_equal 8*1024*1024, @p.chunk_limit_size
|
126
|
+
assert_equal 512*1024*1024, @p.total_limit_size
|
127
|
+
end
|
128
|
+
|
129
|
+
test 'chunk records limit is ignored in default' do
|
130
|
+
assert_nil @p.chunk_records_limit
|
131
|
+
end
|
132
|
+
|
133
|
+
test '#storable? checks total size of staged and enqueued(includes dequeued chunks) against total_limit_size' do
|
134
|
+
assert_equal 512*1024*1024, @p.total_limit_size
|
135
|
+
assert_equal 0, @p.stage_size
|
136
|
+
assert_equal 0, @p.queue_size
|
137
|
+
assert @p.storable?
|
138
|
+
|
139
|
+
@p.stage_size = 256 * 1024 * 1024
|
140
|
+
@p.queue_size = 256 * 1024 * 1024 - 1
|
141
|
+
assert @p.storable?
|
142
|
+
|
143
|
+
@p.queue_size = 256 * 1024 * 1024
|
144
|
+
assert !@p.storable?
|
145
|
+
end
|
146
|
+
|
147
|
+
test '#resume must be implemented by subclass' do
|
148
|
+
assert_raise NotImplementedError do
|
149
|
+
@p.resume
|
150
|
+
end
|
151
|
+
end
|
152
|
+
|
153
|
+
test '#generate_chunk must be implemented by subclass' do
|
154
|
+
assert_raise NotImplementedError do
|
155
|
+
@p.generate_chunk(Object.new)
|
156
|
+
end
|
157
|
+
end
|
158
|
+
end
|
159
|
+
|
160
|
+
sub_test_case 'with default configuration and dummy implementation' do
|
161
|
+
setup do
|
162
|
+
@p = create_buffer({})
|
163
|
+
@dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
|
164
|
+
@dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
|
165
|
+
@dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
|
166
|
+
@dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
|
167
|
+
@p.start
|
168
|
+
end
|
169
|
+
|
170
|
+
test '#start resumes buffer states and update queued numbers per metadata' do
|
171
|
+
plugin = create_buffer({})
|
172
|
+
|
173
|
+
assert_equal({}, plugin.stage)
|
174
|
+
assert_equal([], plugin.queue)
|
175
|
+
assert_equal({}, plugin.dequeued)
|
176
|
+
assert_equal({}, plugin.queued_num)
|
177
|
+
assert_equal([], plugin.metadata_list)
|
178
|
+
|
179
|
+
assert_equal 0, plugin.stage_size
|
180
|
+
assert_equal 0, plugin.queue_size
|
181
|
+
|
182
|
+
# @p is started plugin
|
183
|
+
|
184
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
185
|
+
assert_equal "b" * 100, @p.stage[@dm2].read
|
186
|
+
assert_equal "c" * 100, @p.stage[@dm3].read
|
187
|
+
|
188
|
+
assert_equal 200, @p.stage_size
|
189
|
+
|
190
|
+
assert_equal 3, @p.queue.size
|
191
|
+
assert_equal "0" * 100, @p.queue[0].read
|
192
|
+
assert_equal "a" * 100, @p.queue[1].read
|
193
|
+
assert_equal "a" * 3, @p.queue[2].read
|
194
|
+
|
195
|
+
assert_equal 203, @p.queue_size
|
196
|
+
|
197
|
+
# staged, queued
|
198
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
|
199
|
+
assert_equal 1, @p.queued_num[@dm0]
|
200
|
+
assert_equal 2, @p.queued_num[@dm1]
|
201
|
+
end
|
202
|
+
|
203
|
+
test '#close closes all chunks in in dequeued, enqueued and staged' do
|
204
|
+
dmx = create_metadata(Time.parse('2016-04-11 15:50:00 +0000').to_i, nil, nil)
|
205
|
+
cx = create_chunk(dmx, ["x" * 1024])
|
206
|
+
@p.dequeued[cx.unique_id] = cx
|
207
|
+
|
208
|
+
staged_chunks = @p.stage.values.dup
|
209
|
+
queued_chunks = @p.queue.dup
|
210
|
+
|
211
|
+
@p.close
|
212
|
+
|
213
|
+
assert cx.closed
|
214
|
+
assert{ staged_chunks.all?{|c| c.closed } }
|
215
|
+
assert{ queued_chunks.all?{|c| c.closed } }
|
216
|
+
end
|
217
|
+
|
218
|
+
test '#terminate initializes all internal states' do
|
219
|
+
dmx = create_metadata(Time.parse('2016-04-11 15:50:00 +0000').to_i, nil, nil)
|
220
|
+
cx = create_chunk(dmx, ["x" * 1024])
|
221
|
+
@p.dequeued[cx.unique_id] = cx
|
222
|
+
|
223
|
+
@p.close
|
224
|
+
|
225
|
+
@p.terminate
|
226
|
+
|
227
|
+
assert_nil @p.stage
|
228
|
+
assert_nil @p.queue
|
229
|
+
assert_nil @p.dequeued
|
230
|
+
assert_nil @p.queued_num
|
231
|
+
assert_nil @p.instance_eval{ @metadata_list } # #metadata_list does #dup for @metadata_list
|
232
|
+
assert_equal 0, @p.stage_size
|
233
|
+
assert_equal 0, @p.queue_size
|
234
|
+
end
|
235
|
+
|
236
|
+
test '#metadata_list returns list of metadata on stage or in queue' do
|
237
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
|
238
|
+
end
|
239
|
+
|
240
|
+
test '#new_metadata creates metadata instance without inserting metadata_list' do
|
241
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
|
242
|
+
_m = @p.new_metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
243
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
|
244
|
+
end
|
245
|
+
|
246
|
+
test '#add_metadata adds unknown metadata into list, or return known metadata if already exists' do
|
247
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
|
248
|
+
|
249
|
+
m = @p.new_metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
250
|
+
_mx = @p.add_metadata(m)
|
251
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1,m], @p.metadata_list
|
252
|
+
assert_equal m.object_id, m.object_id
|
253
|
+
|
254
|
+
my = @p.add_metadata(@dm1)
|
255
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1,m], @p.metadata_list
|
256
|
+
assert_equal @dm1, my
|
257
|
+
assert{ @dm1.object_id != my.object_id } # 'my' is an object created in #resume
|
258
|
+
end
|
259
|
+
|
260
|
+
test '#metadata is utility method to create-add-and-return metadata' do
|
261
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
|
262
|
+
|
263
|
+
m1 = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
264
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1,m1], @p.metadata_list
|
265
|
+
m2 = @p.metadata(timekey: @dm3.timekey)
|
266
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1,m1], @p.metadata_list
|
267
|
+
assert_equal @dm3, m2
|
268
|
+
end
|
269
|
+
|
270
|
+
test '#queued_records returns total number of size in all chunks in queue' do
|
271
|
+
assert_equal 3, @p.queue.size
|
272
|
+
|
273
|
+
r0 = @p.queue[0].size
|
274
|
+
assert_equal 1, r0
|
275
|
+
r1 = @p.queue[1].size
|
276
|
+
assert_equal 1, r1
|
277
|
+
r2 = @p.queue[2].size
|
278
|
+
assert_equal 1, r2
|
279
|
+
|
280
|
+
assert_equal (r0+r1+r2), @p.queued_records
|
281
|
+
end
|
282
|
+
|
283
|
+
test '#queued? returns queue has any chunks or not without arguments' do
|
284
|
+
assert @p.queued?
|
285
|
+
|
286
|
+
@p.queue.reject!{|_c| true }
|
287
|
+
assert !@p.queued?
|
288
|
+
end
|
289
|
+
|
290
|
+
test '#queued? returns queue has chunks for specified metadata with an argument' do
|
291
|
+
assert @p.queued?(@dm0)
|
292
|
+
assert @p.queued?(@dm1)
|
293
|
+
assert !@p.queued?(@dm2)
|
294
|
+
end
|
295
|
+
|
296
|
+
test '#enqueue_chunk enqueues a chunk on stage with specified metadata' do
|
297
|
+
assert_equal 2, @p.stage.size
|
298
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
299
|
+
assert_equal 3, @p.queue.size
|
300
|
+
assert_nil @p.queued_num[@dm2]
|
301
|
+
|
302
|
+
assert_equal 200, @p.stage_size
|
303
|
+
assert_equal 203, @p.queue_size
|
304
|
+
|
305
|
+
@p.enqueue_chunk(@dm2)
|
306
|
+
|
307
|
+
assert_equal [@dm3], @p.stage.keys
|
308
|
+
assert_equal @dm2, @p.queue.last.metadata
|
309
|
+
assert_equal 1, @p.queued_num[@dm2]
|
310
|
+
assert_equal 100, @p.stage_size
|
311
|
+
assert_equal 303, @p.queue_size
|
312
|
+
end
|
313
|
+
|
314
|
+
test '#enqueue_chunk ignores empty chunks' do
|
315
|
+
assert_equal 3, @p.queue.size
|
316
|
+
|
317
|
+
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
318
|
+
c = create_chunk(m, [''])
|
319
|
+
@p.stage[m] = c
|
320
|
+
assert @p.stage[m].empty?
|
321
|
+
assert !c.closed
|
322
|
+
|
323
|
+
@p.enqueue_chunk(m)
|
324
|
+
|
325
|
+
assert_nil @p.stage[m]
|
326
|
+
assert_equal 3, @p.queue.size
|
327
|
+
assert_nil @p.queued_num[m]
|
328
|
+
assert c.closed
|
329
|
+
end
|
330
|
+
|
331
|
+
test '#enqueue_chunk calls #enqueued! if chunk responds to it' do
|
332
|
+
assert_equal 3, @p.queue.size
|
333
|
+
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
334
|
+
c = create_chunk(m, ['c' * 256])
|
335
|
+
callback_called = false
|
336
|
+
(class << c; self; end).module_eval do
|
337
|
+
define_method(:enqueued!){ callback_called = true }
|
338
|
+
end
|
339
|
+
|
340
|
+
@p.stage[m] = c
|
341
|
+
@p.enqueue_chunk(m)
|
342
|
+
|
343
|
+
assert_equal c, @p.queue.last
|
344
|
+
assert callback_called
|
345
|
+
end
|
346
|
+
|
347
|
+
test '#enqueue_all enqueues chunks on stage which given block returns true with' do
|
348
|
+
m1 = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
349
|
+
c1 = create_chunk(m1, ['c' * 256])
|
350
|
+
@p.stage[m1] = c1
|
351
|
+
m2 = @p.metadata(timekey: Time.parse('2016-04-11 16:50:00 +0000').to_i)
|
352
|
+
c2 = create_chunk(m2, ['c' * 256])
|
353
|
+
@p.stage[m2] = c2
|
354
|
+
|
355
|
+
assert_equal [@dm2,@dm3,m1,m2], @p.stage.keys
|
356
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
357
|
+
|
358
|
+
@p.enqueue_all{ |m, c| m.timekey < Time.parse('2016-04-11 16:41:00 +0000').to_i }
|
359
|
+
|
360
|
+
assert_equal [m2], @p.stage.keys
|
361
|
+
assert_equal [@dm0,@dm1,@dm1,@dm2,@dm3,m1], @p.queue.map(&:metadata)
|
362
|
+
end
|
363
|
+
|
364
|
+
test '#enqueue_all enqueues all chunks on stage without block' do
|
365
|
+
m1 = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
366
|
+
c1 = create_chunk(m1, ['c' * 256])
|
367
|
+
@p.stage[m1] = c1
|
368
|
+
m2 = @p.metadata(timekey: Time.parse('2016-04-11 16:50:00 +0000').to_i)
|
369
|
+
c2 = create_chunk(m2, ['c' * 256])
|
370
|
+
@p.stage[m2] = c2
|
371
|
+
|
372
|
+
assert_equal [@dm2,@dm3,m1,m2], @p.stage.keys
|
373
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
374
|
+
|
375
|
+
@p.enqueue_all
|
376
|
+
|
377
|
+
assert_equal [], @p.stage.keys
|
378
|
+
assert_equal [@dm0,@dm1,@dm1,@dm2,@dm3,m1,m2], @p.queue.map(&:metadata)
|
379
|
+
end
|
380
|
+
|
381
|
+
test '#dequeue_chunk dequeues a chunk from queue if a chunk exists' do
|
382
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
383
|
+
assert_equal({}, @p.dequeued)
|
384
|
+
|
385
|
+
m1 = @p.dequeue_chunk
|
386
|
+
assert_equal @dm0, m1.metadata
|
387
|
+
assert_equal @dm0, @p.dequeued[m1.unique_id].metadata
|
388
|
+
|
389
|
+
m2 = @p.dequeue_chunk
|
390
|
+
assert_equal @dm1, m2.metadata
|
391
|
+
assert_equal @dm1, @p.dequeued[m2.unique_id].metadata
|
392
|
+
|
393
|
+
m3 = @p.dequeue_chunk
|
394
|
+
assert_equal @dm1, m3.metadata
|
395
|
+
assert_equal @dm1, @p.dequeued[m3.unique_id].metadata
|
396
|
+
|
397
|
+
m4 = @p.dequeue_chunk
|
398
|
+
assert_nil m4
|
399
|
+
end
|
400
|
+
|
401
|
+
test '#takeback_chunk resumes a chunk from dequeued to queued at the head of queue, and returns true' do
|
402
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
403
|
+
assert_equal({}, @p.dequeued)
|
404
|
+
|
405
|
+
m1 = @p.dequeue_chunk
|
406
|
+
assert_equal @dm0, m1.metadata
|
407
|
+
assert_equal @dm0, @p.dequeued[m1.unique_id].metadata
|
408
|
+
assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
|
409
|
+
assert_equal({m1.unique_id => m1}, @p.dequeued)
|
410
|
+
|
411
|
+
assert @p.takeback_chunk(m1.unique_id)
|
412
|
+
|
413
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
414
|
+
assert_equal({}, @p.dequeued)
|
415
|
+
end
|
416
|
+
|
417
|
+
test '#purge_chunk removes a chunk specified by argument id from dequeued chunks' do
|
418
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
419
|
+
assert_equal({}, @p.dequeued)
|
420
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
|
421
|
+
|
422
|
+
m0 = @p.dequeue_chunk
|
423
|
+
m1 = @p.dequeue_chunk
|
424
|
+
|
425
|
+
assert @p.takeback_chunk(m0.unique_id)
|
426
|
+
|
427
|
+
assert_equal [@dm0,@dm1], @p.queue.map(&:metadata)
|
428
|
+
assert_equal({m1.unique_id => m1}, @p.dequeued)
|
429
|
+
|
430
|
+
assert !m1.purged
|
431
|
+
|
432
|
+
@p.purge_chunk(m1.unique_id)
|
433
|
+
assert m1.purged
|
434
|
+
|
435
|
+
assert_equal [@dm0,@dm1], @p.queue.map(&:metadata)
|
436
|
+
assert_equal({}, @p.dequeued)
|
437
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
|
438
|
+
end
|
439
|
+
|
440
|
+
test '#purge_chunk removes an argument metadata from metadata_list if no chunks exist on stage or in queue' do
|
441
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
442
|
+
assert_equal({}, @p.dequeued)
|
443
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
|
444
|
+
|
445
|
+
m0 = @p.dequeue_chunk
|
446
|
+
|
447
|
+
assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
|
448
|
+
assert_equal({m0.unique_id => m0}, @p.dequeued)
|
449
|
+
|
450
|
+
assert !m0.purged
|
451
|
+
|
452
|
+
@p.purge_chunk(m0.unique_id)
|
453
|
+
assert m0.purged
|
454
|
+
|
455
|
+
assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
|
456
|
+
assert_equal({}, @p.dequeued)
|
457
|
+
assert_equal [@dm2,@dm3,@dm1], @p.metadata_list
|
458
|
+
end
|
459
|
+
|
460
|
+
test '#takeback_chunk returns false if specified chunk_id is already purged' do
|
461
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
462
|
+
assert_equal({}, @p.dequeued)
|
463
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
|
464
|
+
|
465
|
+
m0 = @p.dequeue_chunk
|
466
|
+
|
467
|
+
assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
|
468
|
+
assert_equal({m0.unique_id => m0}, @p.dequeued)
|
469
|
+
|
470
|
+
assert !m0.purged
|
471
|
+
|
472
|
+
@p.purge_chunk(m0.unique_id)
|
473
|
+
assert m0.purged
|
474
|
+
|
475
|
+
assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
|
476
|
+
assert_equal({}, @p.dequeued)
|
477
|
+
assert_equal [@dm2,@dm3,@dm1], @p.metadata_list
|
478
|
+
|
479
|
+
assert !@p.takeback_chunk(m0.unique_id)
|
480
|
+
|
481
|
+
assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
|
482
|
+
assert_equal({}, @p.dequeued)
|
483
|
+
assert_equal [@dm2,@dm3,@dm1], @p.metadata_list
|
484
|
+
end
|
485
|
+
|
486
|
+
test '#clear_queue! removes all chunks in queue, but leaves staged chunks' do
|
487
|
+
qchunks = @p.queue.dup
|
488
|
+
|
489
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
490
|
+
assert_equal 2, @p.stage.size
|
491
|
+
assert_equal({}, @p.dequeued)
|
492
|
+
|
493
|
+
@p.clear_queue!
|
494
|
+
|
495
|
+
assert_equal [], @p.queue
|
496
|
+
assert_equal 0, @p.queue_size
|
497
|
+
assert_equal 2, @p.stage.size
|
498
|
+
assert_equal({}, @p.dequeued)
|
499
|
+
|
500
|
+
assert{ qchunks.all?{ |c| c.purged } }
|
501
|
+
end
|
502
|
+
|
503
|
+
test '#write returns immediately if argument data is empty array' do
|
504
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
505
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
506
|
+
|
507
|
+
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
508
|
+
|
509
|
+
@p.write({m => []})
|
510
|
+
|
511
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
512
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
513
|
+
end
|
514
|
+
|
515
|
+
test '#write raises BufferOverflowError if buffer is not storable' do
|
516
|
+
@p.stage_size = 256 * 1024 * 1024
|
517
|
+
@p.queue_size = 256 * 1024 * 1024
|
518
|
+
|
519
|
+
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
520
|
+
|
521
|
+
assert_raise Fluent::Plugin::Buffer::BufferOverflowError do
|
522
|
+
@p.write({m => ["x" * 256]})
|
523
|
+
end
|
524
|
+
end
|
525
|
+
|
526
|
+
test '#write stores data into an existing chunk with metadata specified' do
|
527
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
528
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
529
|
+
|
530
|
+
dm3data = @p.stage[@dm3].read.dup
|
531
|
+
prev_stage_size = @p.stage_size
|
532
|
+
|
533
|
+
assert_equal 1, @p.stage[@dm3].append_count
|
534
|
+
|
535
|
+
@p.write({@dm3 => ["x" * 256, "y" * 256, "z" * 256]})
|
536
|
+
|
537
|
+
assert_equal 2, @p.stage[@dm3].append_count
|
538
|
+
assert_equal (dm3data + ("x" * 256) + ("y" * 256) + ("z" * 256)), @p.stage[@dm3].read
|
539
|
+
assert_equal (prev_stage_size + 768), @p.stage_size
|
540
|
+
|
541
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
542
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
543
|
+
end
|
544
|
+
|
545
|
+
test '#write creates new chunk and store data into it if there are no chunks for specified metadata' do
|
546
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
547
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
548
|
+
|
549
|
+
prev_stage_size = @p.stage_size
|
550
|
+
|
551
|
+
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
552
|
+
|
553
|
+
@p.write({m => ["x" * 256, "y" * 256, "z" * 256]})
|
554
|
+
|
555
|
+
assert_equal 1, @p.stage[m].append_count
|
556
|
+
assert_equal ("x" * 256 + "y" * 256 + "z" * 256), @p.stage[m].read
|
557
|
+
assert_equal (prev_stage_size + 768), @p.stage_size
|
558
|
+
|
559
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
560
|
+
assert_equal [@dm2,@dm3,m], @p.stage.keys
|
561
|
+
end
|
562
|
+
|
563
|
+
test '#write tries to enqueue and store data into a new chunk if existing chunk is full' do
|
564
|
+
assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
|
565
|
+
assert_equal 0.95, @p.chunk_full_threshold
|
566
|
+
|
567
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
568
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
569
|
+
|
570
|
+
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
571
|
+
|
572
|
+
row = "x" * 1024 * 1024
|
573
|
+
small_row = "x" * 1024 * 512
|
574
|
+
@p.write({m => [row] * 7 + [small_row]})
|
575
|
+
|
576
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
577
|
+
assert_equal [@dm2,@dm3,m], @p.stage.keys
|
578
|
+
assert_equal 1, @p.stage[m].append_count
|
579
|
+
|
580
|
+
@p.write({m => [row]})
|
581
|
+
|
582
|
+
assert_equal [@dm0,@dm1,@dm1,m], @p.queue.map(&:metadata)
|
583
|
+
assert_equal [@dm2,@dm3,m], @p.stage.keys
|
584
|
+
assert_equal 1, @p.stage[m].append_count
|
585
|
+
assert_equal 1024*1024, @p.stage[m].bytesize
|
586
|
+
assert_equal 3, @p.queue.last.append_count # 1 -> write (2) -> write_step_by_step (3)
|
587
|
+
assert @p.queue.last.rollbacked
|
588
|
+
end
|
589
|
+
|
590
|
+
test '#write rollbacks if commit raises errors' do
|
591
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
592
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
593
|
+
|
594
|
+
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
595
|
+
|
596
|
+
row = "x" * 1024
|
597
|
+
@p.write({m => [row] * 8})
|
598
|
+
|
599
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
600
|
+
assert_equal [@dm2,@dm3,m], @p.stage.keys
|
601
|
+
|
602
|
+
target_chunk = @p.stage[m]
|
603
|
+
|
604
|
+
assert_equal 1, target_chunk.append_count
|
605
|
+
assert !target_chunk.rollbacked
|
606
|
+
|
607
|
+
(class << target_chunk; self; end).module_eval do
|
608
|
+
define_method(:commit){ raise "yay" }
|
609
|
+
end
|
610
|
+
|
611
|
+
assert_raise "yay" do
|
612
|
+
@p.write({m => [row]})
|
613
|
+
end
|
614
|
+
|
615
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
616
|
+
assert_equal [@dm2,@dm3,m], @p.stage.keys
|
617
|
+
|
618
|
+
assert_equal 2, target_chunk.append_count
|
619
|
+
assert target_chunk.rollbacked
|
620
|
+
assert_equal row * 8, target_chunk.read
|
621
|
+
end
|
622
|
+
|
623
|
+
test '#write w/ bulk returns immediately if argument data is nil or empty string' do
|
624
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
625
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
626
|
+
|
627
|
+
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
628
|
+
|
629
|
+
@p.write({}, bulk: true)
|
630
|
+
@p.write({m => ['', 0]}, bulk: true)
|
631
|
+
|
632
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
633
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
634
|
+
end
|
635
|
+
|
636
|
+
test '#write w/ bulk raises BufferOverflowError if buffer is not storable' do
|
637
|
+
@p.stage_size = 256 * 1024 * 1024
|
638
|
+
@p.queue_size = 256 * 1024 * 1024
|
639
|
+
|
640
|
+
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
641
|
+
|
642
|
+
assert_raise Fluent::Plugin::Buffer::BufferOverflowError do
|
643
|
+
@p.write({m => ["x" * 256, 1]}, bulk: true)
|
644
|
+
end
|
645
|
+
end
|
646
|
+
|
647
|
+
test '#write w/ bulk stores data into an existing chunk with metadata specified' do
|
648
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
649
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
650
|
+
|
651
|
+
dm3data = @p.stage[@dm3].read.dup
|
652
|
+
prev_stage_size = @p.stage_size
|
653
|
+
|
654
|
+
assert_equal 1, @p.stage[@dm3].append_count
|
655
|
+
|
656
|
+
@p.write({@dm3 => [("x"*256 + "y"*256 + "z"*256), 3]}, bulk: true)
|
657
|
+
|
658
|
+
assert_equal 2, @p.stage[@dm3].append_count
|
659
|
+
assert_equal (dm3data + ("x" * 256) + ("y" * 256) + ("z" * 256)), @p.stage[@dm3].read
|
660
|
+
assert_equal (prev_stage_size + 768), @p.stage_size
|
661
|
+
|
662
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
663
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
664
|
+
end
|
665
|
+
|
666
|
+
test '#write w/ bulk creates new chunk and store data into it if there are not chunks for specified metadata' do
|
667
|
+
assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
|
668
|
+
|
669
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
670
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
671
|
+
|
672
|
+
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
673
|
+
|
674
|
+
row = "x" * 1024 * 1024
|
675
|
+
row_half = "x" * 1024 * 512
|
676
|
+
@p.write({m => [row*7 + row_half, 8]}, bulk: true)
|
677
|
+
|
678
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
679
|
+
assert_equal [@dm2,@dm3,m], @p.stage.keys
|
680
|
+
assert_equal 1, @p.stage[m].append_count
|
681
|
+
end
|
682
|
+
|
683
|
+
test '#write w/ bulk tries to enqueue and store data into a new chunk if existing chunk does not have space for bulk' do
|
684
|
+
assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
|
685
|
+
|
686
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
687
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
688
|
+
|
689
|
+
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
690
|
+
|
691
|
+
row = "x" * 1024 * 1024
|
692
|
+
row_half = "x" * 1024 * 512
|
693
|
+
@p.write({m => [row*7 + row_half, 8]}, bulk: true)
|
694
|
+
|
695
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
696
|
+
assert_equal [@dm2,@dm3,m], @p.stage.keys
|
697
|
+
assert_equal 1, @p.stage[m].append_count
|
698
|
+
|
699
|
+
@p.write({m => [row, 1]}, bulk: true)
|
700
|
+
|
701
|
+
assert_equal [@dm0,@dm1,@dm1,m], @p.queue.map(&:metadata)
|
702
|
+
assert_equal [@dm2,@dm3,m], @p.stage.keys
|
703
|
+
assert_equal 1, @p.stage[m].append_count
|
704
|
+
assert_equal 1024*1024, @p.stage[m].bytesize
|
705
|
+
assert_equal 2, @p.queue.last.append_count # 1 -> write (2) -> rollback&enqueue
|
706
|
+
assert @p.queue.last.rollbacked
|
707
|
+
end
|
708
|
+
|
709
|
+
test '#write w/ bulk enqueues chunk if it is already full after adding bulk data' do
|
710
|
+
assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
|
711
|
+
|
712
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
713
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
714
|
+
|
715
|
+
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
716
|
+
|
717
|
+
row = "x" * 1024 * 1024
|
718
|
+
@p.write({m => [row * 8, 8]}, bulk: true)
|
719
|
+
|
720
|
+
assert_equal [@dm0,@dm1,@dm1,m], @p.queue.map(&:metadata)
|
721
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
722
|
+
assert_equal 1, @p.queue.last.append_count
|
723
|
+
end
|
724
|
+
|
725
|
+
test '#write w/ bulk rollbacks if commit raises errors' do
|
726
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
727
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
728
|
+
|
729
|
+
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
730
|
+
|
731
|
+
row = "x" * 1024
|
732
|
+
row_half = "x" * 512
|
733
|
+
@p.write({m => [row * 7 + row_half, 8]}, bulk: true)
|
734
|
+
|
735
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
736
|
+
assert_equal [@dm2,@dm3,m], @p.stage.keys
|
737
|
+
|
738
|
+
target_chunk = @p.stage[m]
|
739
|
+
|
740
|
+
assert_equal 1, target_chunk.append_count
|
741
|
+
assert !target_chunk.rollbacked
|
742
|
+
|
743
|
+
(class << target_chunk; self; end).module_eval do
|
744
|
+
define_method(:commit){ raise "yay" }
|
745
|
+
end
|
746
|
+
|
747
|
+
assert_raise "yay" do
|
748
|
+
@p.write({m => [row, 1]}, bulk: true)
|
749
|
+
end
|
750
|
+
|
751
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
752
|
+
assert_equal [@dm2,@dm3,m], @p.stage.keys
|
753
|
+
|
754
|
+
assert_equal 2, target_chunk.append_count
|
755
|
+
assert target_chunk.rollbacked
|
756
|
+
assert_equal row * 7 + row_half, target_chunk.read
|
757
|
+
end
|
758
|
+
|
759
|
+
test '#write writes many metadata and data pairs at once' do
|
760
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
761
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
762
|
+
|
763
|
+
row = "x" * 1024
|
764
|
+
@p.write({ @dm0 => [row, row, row], @dm1 => [row, row] }, bulk: false)
|
765
|
+
|
766
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1], @p.stage.keys
|
767
|
+
end
|
768
|
+
|
769
|
+
test '#write does not commit on any chunks if any append operation on chunk fails' do
|
770
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
771
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
772
|
+
|
773
|
+
row = "x" * 1024
|
774
|
+
@p.write({ @dm0 => [row, row, row], @dm1 => [row, row] }, bulk: false)
|
775
|
+
|
776
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1], @p.stage.keys
|
777
|
+
|
778
|
+
dm2_size = @p.stage[@dm2].size
|
779
|
+
assert !@p.stage[@dm2].rollbacked
|
780
|
+
dm3_size = @p.stage[@dm3].size
|
781
|
+
assert !@p.stage[@dm3].rollbacked
|
782
|
+
|
783
|
+
assert{ @p.stage[@dm0].size == 3 }
|
784
|
+
assert !@p.stage[@dm0].rollbacked
|
785
|
+
assert{ @p.stage[@dm1].size == 2 }
|
786
|
+
assert !@p.stage[@dm1].rollbacked
|
787
|
+
|
788
|
+
@p.stage[@dm1].failing = true
|
789
|
+
|
790
|
+
assert_raise(FluentPluginBufferTest::DummyMemoryChunkError) do
|
791
|
+
@p.write({ @dm2 => [row], @dm3 => [row], @dm0 => [row, row, row], @dm1 => [row, row] }, bulk: false)
|
792
|
+
end
|
793
|
+
|
794
|
+
assert{ @p.stage[@dm2].size == dm2_size }
|
795
|
+
assert @p.stage[@dm2].rollbacked
|
796
|
+
assert{ @p.stage[@dm3].size == dm3_size }
|
797
|
+
assert @p.stage[@dm3].rollbacked
|
798
|
+
|
799
|
+
assert{ @p.stage[@dm0].size == 3 }
|
800
|
+
assert @p.stage[@dm0].rollbacked
|
801
|
+
assert{ @p.stage[@dm1].size == 2 }
|
802
|
+
assert @p.stage[@dm1].rollbacked
|
803
|
+
end
|
804
|
+
end
|
805
|
+
|
806
|
+
sub_test_case 'with configuration for test with lower limits' do
|
807
|
+
setup do
|
808
|
+
@p = create_buffer({"chunk_limit_size" => 1024, "total_limit_size" => 10240})
|
809
|
+
@dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
|
810
|
+
@dm1 = dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
|
811
|
+
@dm2 = dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
|
812
|
+
@dm3 = dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
|
813
|
+
(class << @p; self; end).module_eval do
|
814
|
+
define_method(:resume) {
|
815
|
+
staged = {
|
816
|
+
dm2 => create_chunk(dm2, ["b" * 128] * 7),
|
817
|
+
dm3 => create_chunk(dm3, ["c" * 128] * 5),
|
818
|
+
}
|
819
|
+
queued = [
|
820
|
+
create_chunk(dm0, ["0" * 128] * 8),
|
821
|
+
create_chunk(dm0, ["0" * 128] * 8),
|
822
|
+
create_chunk(dm0, ["0" * 128] * 8),
|
823
|
+
create_chunk(dm0, ["0" * 128] * 8),
|
824
|
+
create_chunk(dm0, ["0" * 128] * 8),
|
825
|
+
create_chunk(dm1, ["a" * 128] * 8),
|
826
|
+
create_chunk(dm1, ["a" * 128] * 8),
|
827
|
+
create_chunk(dm1, ["a" * 128] * 8), # 8
|
828
|
+
create_chunk(dm1, ["a" * 128] * 3),
|
829
|
+
]
|
830
|
+
return staged, queued
|
831
|
+
}
|
832
|
+
end
|
833
|
+
@p.start
|
834
|
+
end
|
835
|
+
|
836
|
+
test '#storable? returns false when too many data exist' do
|
837
|
+
assert_equal [@dm0,@dm0,@dm0,@dm0,@dm0,@dm1,@dm1,@dm1,@dm1], @p.queue.map(&:metadata)
|
838
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
839
|
+
|
840
|
+
assert_equal 128*8*8+128*3, @p.queue_size
|
841
|
+
assert_equal 128*7+128*5, @p.stage_size
|
842
|
+
|
843
|
+
assert @p.storable?
|
844
|
+
|
845
|
+
dm3 = @p.metadata(timekey: @dm3.timekey)
|
846
|
+
@p.write({dm3 => ["c" * 128]})
|
847
|
+
|
848
|
+
assert_equal 10240, (@p.stage_size + @p.queue_size)
|
849
|
+
assert !@p.storable?
|
850
|
+
end
|
851
|
+
|
852
|
+
test '#chunk_size_over? returns true if chunk size is bigger than limit' do
|
853
|
+
m = create_metadata(Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
854
|
+
|
855
|
+
c1 = create_chunk(m, ["a" * 128] * 8)
|
856
|
+
assert !@p.chunk_size_over?(c1)
|
857
|
+
|
858
|
+
c2 = create_chunk(m, ["a" * 128] * 9)
|
859
|
+
assert @p.chunk_size_over?(c2)
|
860
|
+
|
861
|
+
c3 = create_chunk(m, ["a" * 128] * 8 + ["a"])
|
862
|
+
assert @p.chunk_size_over?(c3)
|
863
|
+
end
|
864
|
+
|
865
|
+
test '#chunk_size_full? returns true if chunk size is enough big against limit' do
|
866
|
+
m = create_metadata(Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
867
|
+
|
868
|
+
c1 = create_chunk(m, ["a" * 128] * 7)
|
869
|
+
assert !@p.chunk_size_full?(c1)
|
870
|
+
|
871
|
+
c2 = create_chunk(m, ["a" * 128] * 8)
|
872
|
+
assert @p.chunk_size_full?(c2)
|
873
|
+
|
874
|
+
assert_equal 0.95, @p.chunk_full_threshold
|
875
|
+
c3 = create_chunk(m, ["a" * 128] * 6 + ["a" * 64])
|
876
|
+
assert !@p.chunk_size_full?(c3)
|
877
|
+
end
|
878
|
+
|
879
|
+
test '#write raises BufferChunkOverflowError if incoming data is bigger than chunk bytes limit' do
|
880
|
+
assert_equal [@dm0,@dm0,@dm0,@dm0,@dm0,@dm1,@dm1,@dm1,@dm1], @p.queue.map(&:metadata)
|
881
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
882
|
+
|
883
|
+
m = create_metadata(Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
884
|
+
|
885
|
+
assert_raise Fluent::Plugin::Buffer::BufferChunkOverflowError do
|
886
|
+
@p.write({m => ["a" * 128] * 9})
|
887
|
+
end
|
888
|
+
end
|
889
|
+
end
|
890
|
+
|
891
|
+
sub_test_case 'with configuration includes chunk_records_limit' do
|
892
|
+
setup do
|
893
|
+
@p = create_buffer({"chunk_limit_size" => 1024, "total_limit_size" => 10240, "chunk_records_limit" => 6})
|
894
|
+
@dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
|
895
|
+
@dm1 = dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
|
896
|
+
@dm2 = dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
|
897
|
+
@dm3 = dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
|
898
|
+
(class << @p; self; end).module_eval do
|
899
|
+
define_method(:resume) {
|
900
|
+
staged = {
|
901
|
+
dm2 => create_chunk(dm2, ["b" * 128] * 1),
|
902
|
+
dm3 => create_chunk(dm3, ["c" * 128] * 2),
|
903
|
+
}
|
904
|
+
queued = [
|
905
|
+
create_chunk(dm0, ["0" * 128] * 6),
|
906
|
+
create_chunk(dm1, ["a" * 128] * 6),
|
907
|
+
create_chunk(dm1, ["a" * 128] * 6),
|
908
|
+
create_chunk(dm1, ["a" * 128] * 3),
|
909
|
+
]
|
910
|
+
return staged, queued
|
911
|
+
}
|
912
|
+
end
|
913
|
+
@p.start
|
914
|
+
end
|
915
|
+
|
916
|
+
test '#chunk_size_over? returns true if too many records exists in a chunk even if its bytes is less than limit' do
|
917
|
+
assert_equal 6, @p.chunk_records_limit
|
918
|
+
|
919
|
+
m = create_metadata(Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
920
|
+
|
921
|
+
c1 = create_chunk(m, ["a" * 128] * 6)
|
922
|
+
assert_equal 6, c1.size
|
923
|
+
assert !@p.chunk_size_over?(c1)
|
924
|
+
|
925
|
+
c2 = create_chunk(m, ["a" * 128] * 7)
|
926
|
+
assert @p.chunk_size_over?(c2)
|
927
|
+
|
928
|
+
c3 = create_chunk(m, ["a" * 128] * 6 + ["a"])
|
929
|
+
assert @p.chunk_size_over?(c3)
|
930
|
+
end
|
931
|
+
|
932
|
+
test '#chunk_size_full? returns true if enough many records exists in a chunk even if its bytes is less than limit' do
|
933
|
+
assert_equal 6, @p.chunk_records_limit
|
934
|
+
|
935
|
+
m = create_metadata(Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
936
|
+
|
937
|
+
c1 = create_chunk(m, ["a" * 128] * 5)
|
938
|
+
assert_equal 5, c1.size
|
939
|
+
assert !@p.chunk_size_full?(c1)
|
940
|
+
|
941
|
+
c2 = create_chunk(m, ["a" * 128] * 6)
|
942
|
+
assert @p.chunk_size_full?(c2)
|
943
|
+
|
944
|
+
c3 = create_chunk(m, ["a" * 128] * 5 + ["a"])
|
945
|
+
assert @p.chunk_size_full?(c3)
|
946
|
+
end
|
947
|
+
end
|
948
|
+
|
949
|
+
sub_test_case 'with configuration includes queue_length_limit' do
|
950
|
+
setup do
|
951
|
+
@p = create_buffer({"chunk_limit_size" => 1024, "total_limit_size" => 10240, "queue_length_limit" => 5})
|
952
|
+
@dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
|
953
|
+
@dm1 = dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
|
954
|
+
@dm2 = dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
|
955
|
+
@dm3 = dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
|
956
|
+
(class << @p; self; end).module_eval do
|
957
|
+
define_method(:resume) {
|
958
|
+
staged = {
|
959
|
+
dm2 => create_chunk(dm2, ["b" * 128] * 1),
|
960
|
+
dm3 => create_chunk(dm3, ["c" * 128] * 2),
|
961
|
+
}
|
962
|
+
queued = [
|
963
|
+
create_chunk(dm0, ["0" * 128] * 6),
|
964
|
+
create_chunk(dm1, ["a" * 128] * 6),
|
965
|
+
create_chunk(dm1, ["a" * 128] * 6),
|
966
|
+
create_chunk(dm1, ["a" * 128] * 3),
|
967
|
+
]
|
968
|
+
return staged, queued
|
969
|
+
}
|
970
|
+
end
|
971
|
+
@p.start
|
972
|
+
end
|
973
|
+
|
974
|
+
test '#configure will overwrite standard configuration if queue_length_limit' do
|
975
|
+
assert_equal 1024, @p.chunk_limit_size
|
976
|
+
assert_equal 5, @p.queue_length_limit
|
977
|
+
assert_equal (1024*5), @p.total_limit_size
|
978
|
+
end
|
979
|
+
end
|
980
|
+
|
981
|
+
end
|