fluentd 0.14.4-x86-mingw32
Sign up to get free protection for your applications and to get access to all the features.
Potentially problematic release.
This version of fluentd might be problematic. Click here for more details.
- checksums.yaml +7 -0
- data/.github/ISSUE_TEMPLATE.md +6 -0
- data/.gitignore +26 -0
- data/.travis.yml +45 -0
- data/AUTHORS +2 -0
- data/CONTRIBUTING.md +35 -0
- data/COPYING +14 -0
- data/ChangeLog +276 -0
- data/Gemfile +9 -0
- data/README.md +51 -0
- data/Rakefile +53 -0
- data/Vagrantfile +17 -0
- data/appveyor.yml +41 -0
- data/bin/fluent-debug +5 -0
- data/example/copy_roundrobin.conf +39 -0
- data/example/filter_stdout.conf +22 -0
- data/example/in_forward.conf +11 -0
- data/example/in_http.conf +14 -0
- data/example/in_out_forward.conf +17 -0
- data/example/in_syslog.conf +15 -0
- data/example/in_tail.conf +14 -0
- data/example/in_tcp.conf +13 -0
- data/example/in_udp.conf +13 -0
- data/example/multi_filters.conf +61 -0
- data/example/out_buffered_null.conf +32 -0
- data/example/out_copy.conf +20 -0
- data/example/out_file.conf +13 -0
- data/example/out_forward.conf +35 -0
- data/example/out_forward_buf_file.conf +23 -0
- data/example/v0_12_filter.conf +78 -0
- data/example/v1_literal_example.conf +36 -0
- data/fluent.conf +139 -0
- data/fluentd.gemspec +51 -0
- data/lib/fluent/agent.rb +194 -0
- data/lib/fluent/command/bundler_injection.rb +45 -0
- data/lib/fluent/command/cat.rb +319 -0
- data/lib/fluent/command/debug.rb +102 -0
- data/lib/fluent/command/fluentd.rb +273 -0
- data/lib/fluent/compat/call_super_mixin.rb +67 -0
- data/lib/fluent/compat/exec_util.rb +129 -0
- data/lib/fluent/compat/file_util.rb +54 -0
- data/lib/fluent/compat/filter.rb +68 -0
- data/lib/fluent/compat/formatter.rb +111 -0
- data/lib/fluent/compat/formatter_utils.rb +85 -0
- data/lib/fluent/compat/handle_tag_and_time_mixin.rb +62 -0
- data/lib/fluent/compat/handle_tag_name_mixin.rb +53 -0
- data/lib/fluent/compat/input.rb +49 -0
- data/lib/fluent/compat/output.rb +677 -0
- data/lib/fluent/compat/output_chain.rb +60 -0
- data/lib/fluent/compat/parser.rb +180 -0
- data/lib/fluent/compat/parser_utils.rb +40 -0
- data/lib/fluent/compat/propagate_default.rb +62 -0
- data/lib/fluent/compat/record_filter_mixin.rb +34 -0
- data/lib/fluent/compat/set_tag_key_mixin.rb +50 -0
- data/lib/fluent/compat/set_time_key_mixin.rb +69 -0
- data/lib/fluent/compat/socket_util.rb +165 -0
- data/lib/fluent/compat/string_util.rb +34 -0
- data/lib/fluent/compat/structured_format_mixin.rb +26 -0
- data/lib/fluent/compat/type_converter.rb +90 -0
- data/lib/fluent/config.rb +56 -0
- data/lib/fluent/config/basic_parser.rb +123 -0
- data/lib/fluent/config/configure_proxy.rb +366 -0
- data/lib/fluent/config/dsl.rb +149 -0
- data/lib/fluent/config/element.rb +218 -0
- data/lib/fluent/config/error.rb +26 -0
- data/lib/fluent/config/literal_parser.rb +251 -0
- data/lib/fluent/config/parser.rb +107 -0
- data/lib/fluent/config/section.rb +212 -0
- data/lib/fluent/config/types.rb +136 -0
- data/lib/fluent/config/v1_parser.rb +190 -0
- data/lib/fluent/configurable.rb +176 -0
- data/lib/fluent/daemon.rb +15 -0
- data/lib/fluent/engine.rb +220 -0
- data/lib/fluent/env.rb +27 -0
- data/lib/fluent/event.rb +287 -0
- data/lib/fluent/event_router.rb +259 -0
- data/lib/fluent/filter.rb +21 -0
- data/lib/fluent/formatter.rb +23 -0
- data/lib/fluent/input.rb +21 -0
- data/lib/fluent/label.rb +38 -0
- data/lib/fluent/load.rb +36 -0
- data/lib/fluent/log.rb +445 -0
- data/lib/fluent/match.rb +141 -0
- data/lib/fluent/mixin.rb +31 -0
- data/lib/fluent/msgpack_factory.rb +62 -0
- data/lib/fluent/output.rb +26 -0
- data/lib/fluent/output_chain.rb +23 -0
- data/lib/fluent/parser.rb +23 -0
- data/lib/fluent/plugin.rb +161 -0
- data/lib/fluent/plugin/bare_output.rb +63 -0
- data/lib/fluent/plugin/base.rb +130 -0
- data/lib/fluent/plugin/buf_file.rb +154 -0
- data/lib/fluent/plugin/buf_memory.rb +34 -0
- data/lib/fluent/plugin/buffer.rb +603 -0
- data/lib/fluent/plugin/buffer/chunk.rb +160 -0
- data/lib/fluent/plugin/buffer/file_chunk.rb +323 -0
- data/lib/fluent/plugin/buffer/memory_chunk.rb +90 -0
- data/lib/fluent/plugin/exec_util.rb +22 -0
- data/lib/fluent/plugin/file_util.rb +22 -0
- data/lib/fluent/plugin/file_wrapper.rb +120 -0
- data/lib/fluent/plugin/filter.rb +93 -0
- data/lib/fluent/plugin/filter_grep.rb +75 -0
- data/lib/fluent/plugin/filter_record_transformer.rb +342 -0
- data/lib/fluent/plugin/filter_stdout.rb +53 -0
- data/lib/fluent/plugin/formatter.rb +45 -0
- data/lib/fluent/plugin/formatter_csv.rb +47 -0
- data/lib/fluent/plugin/formatter_hash.rb +29 -0
- data/lib/fluent/plugin/formatter_json.rb +44 -0
- data/lib/fluent/plugin/formatter_ltsv.rb +41 -0
- data/lib/fluent/plugin/formatter_msgpack.rb +29 -0
- data/lib/fluent/plugin/formatter_out_file.rb +78 -0
- data/lib/fluent/plugin/formatter_single_value.rb +34 -0
- data/lib/fluent/plugin/formatter_stdout.rb +74 -0
- data/lib/fluent/plugin/in_debug_agent.rb +64 -0
- data/lib/fluent/plugin/in_dummy.rb +135 -0
- data/lib/fluent/plugin/in_exec.rb +149 -0
- data/lib/fluent/plugin/in_forward.rb +366 -0
- data/lib/fluent/plugin/in_gc_stat.rb +52 -0
- data/lib/fluent/plugin/in_http.rb +422 -0
- data/lib/fluent/plugin/in_monitor_agent.rb +401 -0
- data/lib/fluent/plugin/in_object_space.rb +90 -0
- data/lib/fluent/plugin/in_syslog.rb +204 -0
- data/lib/fluent/plugin/in_tail.rb +838 -0
- data/lib/fluent/plugin/in_tcp.rb +41 -0
- data/lib/fluent/plugin/in_udp.rb +37 -0
- data/lib/fluent/plugin/in_unix.rb +201 -0
- data/lib/fluent/plugin/input.rb +33 -0
- data/lib/fluent/plugin/multi_output.rb +95 -0
- data/lib/fluent/plugin/out_buffered_null.rb +59 -0
- data/lib/fluent/plugin/out_buffered_stdout.rb +70 -0
- data/lib/fluent/plugin/out_copy.rb +42 -0
- data/lib/fluent/plugin/out_exec.rb +114 -0
- data/lib/fluent/plugin/out_exec_filter.rb +393 -0
- data/lib/fluent/plugin/out_file.rb +167 -0
- data/lib/fluent/plugin/out_forward.rb +646 -0
- data/lib/fluent/plugin/out_null.rb +27 -0
- data/lib/fluent/plugin/out_relabel.rb +28 -0
- data/lib/fluent/plugin/out_roundrobin.rb +80 -0
- data/lib/fluent/plugin/out_stdout.rb +48 -0
- data/lib/fluent/plugin/out_stream.rb +130 -0
- data/lib/fluent/plugin/output.rb +1020 -0
- data/lib/fluent/plugin/owned_by_mixin.rb +42 -0
- data/lib/fluent/plugin/parser.rb +175 -0
- data/lib/fluent/plugin/parser_apache.rb +28 -0
- data/lib/fluent/plugin/parser_apache2.rb +84 -0
- data/lib/fluent/plugin/parser_apache_error.rb +26 -0
- data/lib/fluent/plugin/parser_csv.rb +33 -0
- data/lib/fluent/plugin/parser_json.rb +79 -0
- data/lib/fluent/plugin/parser_ltsv.rb +50 -0
- data/lib/fluent/plugin/parser_multiline.rb +104 -0
- data/lib/fluent/plugin/parser_nginx.rb +28 -0
- data/lib/fluent/plugin/parser_none.rb +36 -0
- data/lib/fluent/plugin/parser_regexp.rb +73 -0
- data/lib/fluent/plugin/parser_syslog.rb +82 -0
- data/lib/fluent/plugin/parser_tsv.rb +37 -0
- data/lib/fluent/plugin/socket_util.rb +22 -0
- data/lib/fluent/plugin/storage.rb +84 -0
- data/lib/fluent/plugin/storage_local.rb +132 -0
- data/lib/fluent/plugin/string_util.rb +22 -0
- data/lib/fluent/plugin_helper.rb +42 -0
- data/lib/fluent/plugin_helper/child_process.rb +298 -0
- data/lib/fluent/plugin_helper/compat_parameters.rb +224 -0
- data/lib/fluent/plugin_helper/event_emitter.rb +80 -0
- data/lib/fluent/plugin_helper/event_loop.rb +118 -0
- data/lib/fluent/plugin_helper/formatter.rb +149 -0
- data/lib/fluent/plugin_helper/inject.rb +125 -0
- data/lib/fluent/plugin_helper/parser.rb +147 -0
- data/lib/fluent/plugin_helper/retry_state.rb +177 -0
- data/lib/fluent/plugin_helper/storage.rb +331 -0
- data/lib/fluent/plugin_helper/thread.rb +147 -0
- data/lib/fluent/plugin_helper/timer.rb +90 -0
- data/lib/fluent/plugin_id.rb +63 -0
- data/lib/fluent/process.rb +504 -0
- data/lib/fluent/registry.rb +99 -0
- data/lib/fluent/root_agent.rb +314 -0
- data/lib/fluent/rpc.rb +94 -0
- data/lib/fluent/supervisor.rb +680 -0
- data/lib/fluent/system_config.rb +122 -0
- data/lib/fluent/test.rb +56 -0
- data/lib/fluent/test/base.rb +85 -0
- data/lib/fluent/test/driver/base.rb +179 -0
- data/lib/fluent/test/driver/base_owned.rb +70 -0
- data/lib/fluent/test/driver/base_owner.rb +125 -0
- data/lib/fluent/test/driver/event_feeder.rb +98 -0
- data/lib/fluent/test/driver/filter.rb +57 -0
- data/lib/fluent/test/driver/formatter.rb +30 -0
- data/lib/fluent/test/driver/input.rb +31 -0
- data/lib/fluent/test/driver/multi_output.rb +52 -0
- data/lib/fluent/test/driver/output.rb +76 -0
- data/lib/fluent/test/driver/parser.rb +30 -0
- data/lib/fluent/test/driver/test_event_router.rb +45 -0
- data/lib/fluent/test/filter_test.rb +77 -0
- data/lib/fluent/test/formatter_test.rb +65 -0
- data/lib/fluent/test/helpers.rb +79 -0
- data/lib/fluent/test/input_test.rb +172 -0
- data/lib/fluent/test/log.rb +73 -0
- data/lib/fluent/test/output_test.rb +156 -0
- data/lib/fluent/test/parser_test.rb +70 -0
- data/lib/fluent/time.rb +175 -0
- data/lib/fluent/timezone.rb +133 -0
- data/lib/fluent/unique_id.rb +39 -0
- data/lib/fluent/version.rb +21 -0
- data/lib/fluent/winsvc.rb +71 -0
- data/test/compat/test_calls_super.rb +166 -0
- data/test/compat/test_parser.rb +82 -0
- data/test/config/assertions.rb +42 -0
- data/test/config/test_config_parser.rb +507 -0
- data/test/config/test_configurable.rb +1194 -0
- data/test/config/test_configure_proxy.rb +386 -0
- data/test/config/test_dsl.rb +415 -0
- data/test/config/test_element.rb +403 -0
- data/test/config/test_literal_parser.rb +297 -0
- data/test/config/test_section.rb +184 -0
- data/test/config/test_system_config.rb +120 -0
- data/test/config/test_types.rb +171 -0
- data/test/helper.rb +119 -0
- data/test/plugin/data/2010/01/20100102-030405.log +0 -0
- data/test/plugin/data/2010/01/20100102-030406.log +0 -0
- data/test/plugin/data/2010/01/20100102.log +0 -0
- data/test/plugin/data/log/bar +0 -0
- data/test/plugin/data/log/foo/bar.log +0 -0
- data/test/plugin/data/log/test.log +0 -0
- data/test/plugin/test_bare_output.rb +118 -0
- data/test/plugin/test_base.rb +75 -0
- data/test/plugin/test_buf_file.rb +571 -0
- data/test/plugin/test_buf_memory.rb +42 -0
- data/test/plugin/test_buffer.rb +1200 -0
- data/test/plugin/test_buffer_chunk.rb +168 -0
- data/test/plugin/test_buffer_file_chunk.rb +771 -0
- data/test/plugin/test_buffer_memory_chunk.rb +265 -0
- data/test/plugin/test_file_util.rb +96 -0
- data/test/plugin/test_filter.rb +353 -0
- data/test/plugin/test_filter_grep.rb +119 -0
- data/test/plugin/test_filter_record_transformer.rb +600 -0
- data/test/plugin/test_filter_stdout.rb +211 -0
- data/test/plugin/test_formatter_csv.rb +94 -0
- data/test/plugin/test_formatter_json.rb +30 -0
- data/test/plugin/test_formatter_ltsv.rb +52 -0
- data/test/plugin/test_formatter_msgpack.rb +28 -0
- data/test/plugin/test_formatter_out_file.rb +95 -0
- data/test/plugin/test_formatter_single_value.rb +38 -0
- data/test/plugin/test_in_debug_agent.rb +28 -0
- data/test/plugin/test_in_dummy.rb +188 -0
- data/test/plugin/test_in_exec.rb +133 -0
- data/test/plugin/test_in_forward.rb +635 -0
- data/test/plugin/test_in_gc_stat.rb +39 -0
- data/test/plugin/test_in_http.rb +442 -0
- data/test/plugin/test_in_monitor_agent.rb +329 -0
- data/test/plugin/test_in_object_space.rb +64 -0
- data/test/plugin/test_in_syslog.rb +205 -0
- data/test/plugin/test_in_tail.rb +1001 -0
- data/test/plugin/test_in_tcp.rb +102 -0
- data/test/plugin/test_in_udp.rb +121 -0
- data/test/plugin/test_in_unix.rb +126 -0
- data/test/plugin/test_input.rb +122 -0
- data/test/plugin/test_multi_output.rb +180 -0
- data/test/plugin/test_out_buffered_null.rb +79 -0
- data/test/plugin/test_out_buffered_stdout.rb +122 -0
- data/test/plugin/test_out_copy.rb +160 -0
- data/test/plugin/test_out_exec.rb +155 -0
- data/test/plugin/test_out_exec_filter.rb +262 -0
- data/test/plugin/test_out_file.rb +383 -0
- data/test/plugin/test_out_forward.rb +590 -0
- data/test/plugin/test_out_null.rb +29 -0
- data/test/plugin/test_out_relabel.rb +28 -0
- data/test/plugin/test_out_roundrobin.rb +146 -0
- data/test/plugin/test_out_stdout.rb +92 -0
- data/test/plugin/test_out_stream.rb +93 -0
- data/test/plugin/test_output.rb +568 -0
- data/test/plugin/test_output_as_buffered.rb +1604 -0
- data/test/plugin/test_output_as_buffered_overflow.rb +250 -0
- data/test/plugin/test_output_as_buffered_retries.rb +839 -0
- data/test/plugin/test_output_as_buffered_secondary.rb +817 -0
- data/test/plugin/test_output_as_standard.rb +374 -0
- data/test/plugin/test_owned_by.rb +35 -0
- data/test/plugin/test_parser_apache.rb +42 -0
- data/test/plugin/test_parser_apache2.rb +38 -0
- data/test/plugin/test_parser_apache_error.rb +45 -0
- data/test/plugin/test_parser_base.rb +32 -0
- data/test/plugin/test_parser_csv.rb +104 -0
- data/test/plugin/test_parser_json.rb +107 -0
- data/test/plugin/test_parser_labeled_tsv.rb +129 -0
- data/test/plugin/test_parser_multiline.rb +100 -0
- data/test/plugin/test_parser_nginx.rb +48 -0
- data/test/plugin/test_parser_none.rb +53 -0
- data/test/plugin/test_parser_regexp.rb +277 -0
- data/test/plugin/test_parser_syslog.rb +66 -0
- data/test/plugin/test_parser_time.rb +46 -0
- data/test/plugin/test_parser_tsv.rb +121 -0
- data/test/plugin/test_storage.rb +167 -0
- data/test/plugin/test_storage_local.rb +8 -0
- data/test/plugin/test_string_util.rb +26 -0
- data/test/plugin_helper/test_child_process.rb +608 -0
- data/test/plugin_helper/test_compat_parameters.rb +242 -0
- data/test/plugin_helper/test_event_emitter.rb +51 -0
- data/test/plugin_helper/test_event_loop.rb +52 -0
- data/test/plugin_helper/test_formatter.rb +252 -0
- data/test/plugin_helper/test_inject.rb +487 -0
- data/test/plugin_helper/test_parser.rb +263 -0
- data/test/plugin_helper/test_retry_state.rb +399 -0
- data/test/plugin_helper/test_storage.rb +521 -0
- data/test/plugin_helper/test_thread.rb +164 -0
- data/test/plugin_helper/test_timer.rb +131 -0
- data/test/scripts/exec_script.rb +32 -0
- data/test/scripts/fluent/plugin/formatter_known.rb +8 -0
- data/test/scripts/fluent/plugin/out_test.rb +81 -0
- data/test/scripts/fluent/plugin/out_test2.rb +80 -0
- data/test/scripts/fluent/plugin/parser_known.rb +4 -0
- data/test/test_config.rb +179 -0
- data/test/test_configdsl.rb +148 -0
- data/test/test_event.rb +329 -0
- data/test/test_event_router.rb +331 -0
- data/test/test_event_time.rb +184 -0
- data/test/test_filter.rb +121 -0
- data/test/test_formatter.rb +319 -0
- data/test/test_input.rb +31 -0
- data/test/test_log.rb +572 -0
- data/test/test_match.rb +137 -0
- data/test/test_mixin.rb +351 -0
- data/test/test_output.rb +214 -0
- data/test/test_plugin_classes.rb +136 -0
- data/test/test_plugin_helper.rb +81 -0
- data/test/test_process.rb +48 -0
- data/test/test_root_agent.rb +278 -0
- data/test/test_supervisor.rb +339 -0
- data/test/test_time_formatter.rb +186 -0
- data/test/test_unique_id.rb +47 -0
- metadata +823 -0
@@ -0,0 +1,42 @@
|
|
1
|
+
require_relative '../helper'
|
2
|
+
require 'fluent/plugin/buf_memory'
|
3
|
+
require 'fluent/plugin/output'
|
4
|
+
require 'flexmock/test_unit'
|
5
|
+
|
6
|
+
module FluentPluginMemoryBufferTest
|
7
|
+
class DummyOutputPlugin < Fluent::Plugin::Output
|
8
|
+
end
|
9
|
+
end
|
10
|
+
|
11
|
+
class MemoryBufferTest < Test::Unit::TestCase
|
12
|
+
setup do
|
13
|
+
Fluent::Test.setup
|
14
|
+
@d = FluentPluginMemoryBufferTest::DummyOutputPlugin.new
|
15
|
+
@p = Fluent::Plugin::MemoryBuffer.new
|
16
|
+
@p.owner = @d
|
17
|
+
end
|
18
|
+
|
19
|
+
test 'this is non persistent plugin' do
|
20
|
+
assert !@p.persistent?
|
21
|
+
end
|
22
|
+
|
23
|
+
test '#resume always returns empty stage and queue' do
|
24
|
+
ary = @p.resume
|
25
|
+
assert_equal({}, ary[0])
|
26
|
+
assert_equal([], ary[1])
|
27
|
+
end
|
28
|
+
|
29
|
+
test '#generate_chunk returns memory chunk instance' do
|
30
|
+
m1 = Fluent::Plugin::Buffer::Metadata.new(nil, nil, nil)
|
31
|
+
c1 = @p.generate_chunk(m1)
|
32
|
+
assert c1.is_a? Fluent::Plugin::Buffer::MemoryChunk
|
33
|
+
assert_equal m1, c1.metadata
|
34
|
+
|
35
|
+
require 'time'
|
36
|
+
t2 = Time.parse('2016-04-08 19:55:00 +0900').to_i
|
37
|
+
m2 = Fluent::Plugin::Buffer::Metadata.new(t2, 'test.tag', {k1: 'v1', k2: 0})
|
38
|
+
c2 = @p.generate_chunk(m2)
|
39
|
+
assert c2.is_a? Fluent::Plugin::Buffer::MemoryChunk
|
40
|
+
assert_equal m2, c2.metadata
|
41
|
+
end
|
42
|
+
end
|
@@ -0,0 +1,1200 @@
|
|
1
|
+
require_relative '../helper'
|
2
|
+
require 'fluent/plugin/buffer'
|
3
|
+
require 'fluent/plugin/buffer/memory_chunk'
|
4
|
+
require 'fluent/event'
|
5
|
+
require 'flexmock/test_unit'
|
6
|
+
|
7
|
+
require 'fluent/log'
|
8
|
+
require 'fluent/plugin_id'
|
9
|
+
|
10
|
+
require 'time'
|
11
|
+
|
12
|
+
module FluentPluginBufferTest
|
13
|
+
class DummyOutputPlugin < Fluent::Plugin::Base
|
14
|
+
include Fluent::PluginId
|
15
|
+
include Fluent::PluginLoggerMixin
|
16
|
+
end
|
17
|
+
class DummyMemoryChunkError < StandardError; end
|
18
|
+
class DummyMemoryChunk < Fluent::Plugin::Buffer::MemoryChunk
|
19
|
+
attr_reader :append_count, :rollbacked, :closed, :purged
|
20
|
+
attr_accessor :failing
|
21
|
+
def initialize(metadata)
|
22
|
+
super
|
23
|
+
@append_count = 0
|
24
|
+
@rollbacked = false
|
25
|
+
@closed = false
|
26
|
+
@purged = false
|
27
|
+
@failing = false
|
28
|
+
end
|
29
|
+
def concat(data, size)
|
30
|
+
@append_count += 1
|
31
|
+
raise DummyMemoryChunkError if @failing
|
32
|
+
super
|
33
|
+
end
|
34
|
+
def rollback
|
35
|
+
super
|
36
|
+
@rollbacked = true
|
37
|
+
end
|
38
|
+
def close
|
39
|
+
super
|
40
|
+
@closed = true
|
41
|
+
end
|
42
|
+
def purge
|
43
|
+
super
|
44
|
+
@purged = true
|
45
|
+
end
|
46
|
+
end
|
47
|
+
class DummyPlugin < Fluent::Plugin::Buffer
|
48
|
+
def create_metadata(timekey=nil, tag=nil, variables=nil)
|
49
|
+
Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
|
50
|
+
end
|
51
|
+
def create_chunk(metadata, data)
|
52
|
+
c = FluentPluginBufferTest::DummyMemoryChunk.new(metadata)
|
53
|
+
c.append(data)
|
54
|
+
c.commit
|
55
|
+
c
|
56
|
+
end
|
57
|
+
def create_chunk_es(metadata, es)
|
58
|
+
c = FluentPluginBufferTest::DummyMemoryChunk.new(metadata)
|
59
|
+
c.concat(es.to_msgpack_stream, es.size)
|
60
|
+
c.commit
|
61
|
+
c
|
62
|
+
end
|
63
|
+
def resume
|
64
|
+
dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
|
65
|
+
dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
|
66
|
+
dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
|
67
|
+
dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
|
68
|
+
staged = {
|
69
|
+
dm2 => create_chunk(dm2, ["b" * 100]).staged!,
|
70
|
+
dm3 => create_chunk(dm3, ["c" * 100]).staged!,
|
71
|
+
}
|
72
|
+
queued = [
|
73
|
+
create_chunk(dm0, ["0" * 100]).enqueued!,
|
74
|
+
create_chunk(dm1, ["a" * 100]).enqueued!,
|
75
|
+
create_chunk(dm1, ["a" * 3]).enqueued!,
|
76
|
+
]
|
77
|
+
return staged, queued
|
78
|
+
end
|
79
|
+
def generate_chunk(metadata)
|
80
|
+
DummyMemoryChunk.new(metadata)
|
81
|
+
end
|
82
|
+
end
|
83
|
+
end
|
84
|
+
|
85
|
+
class BufferTest < Test::Unit::TestCase
|
86
|
+
def create_buffer(hash)
|
87
|
+
buffer_conf = config_element('buffer', '', hash, [])
|
88
|
+
owner = FluentPluginBufferTest::DummyOutputPlugin.new
|
89
|
+
owner.configure(config_element('ROOT', '', {}, [ buffer_conf ]))
|
90
|
+
p = FluentPluginBufferTest::DummyPlugin.new
|
91
|
+
p.owner = owner
|
92
|
+
p.configure(buffer_conf)
|
93
|
+
p
|
94
|
+
end
|
95
|
+
|
96
|
+
def create_metadata(timekey=nil, tag=nil, variables=nil)
|
97
|
+
Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
|
98
|
+
end
|
99
|
+
|
100
|
+
def create_chunk(metadata, data)
|
101
|
+
c = FluentPluginBufferTest::DummyMemoryChunk.new(metadata)
|
102
|
+
c.append(data)
|
103
|
+
c.commit
|
104
|
+
c
|
105
|
+
end
|
106
|
+
|
107
|
+
def create_chunk_es(metadata, es)
|
108
|
+
c = FluentPluginBufferTest::DummyMemoryChunk.new(metadata)
|
109
|
+
c.concat(es.to_msgpack_stream, es.size)
|
110
|
+
c.commit
|
111
|
+
c
|
112
|
+
end
|
113
|
+
|
114
|
+
setup do
|
115
|
+
Fluent::Test.setup
|
116
|
+
end
|
117
|
+
|
118
|
+
sub_test_case 'using base buffer class' do
|
119
|
+
setup do
|
120
|
+
buffer_conf = config_element('buffer', '', {}, [])
|
121
|
+
owner = FluentPluginBufferTest::DummyOutputPlugin.new
|
122
|
+
owner.configure(config_element('ROOT', '', {}, [ buffer_conf ]))
|
123
|
+
p = Fluent::Plugin::Buffer.new
|
124
|
+
p.owner = owner
|
125
|
+
p.configure(buffer_conf)
|
126
|
+
@p = p
|
127
|
+
end
|
128
|
+
|
129
|
+
test 'default persistency is false' do
|
130
|
+
assert !@p.persistent?
|
131
|
+
end
|
132
|
+
|
133
|
+
test 'chunk bytes limit is 8MB, and total bytes limit is 512MB' do
|
134
|
+
assert_equal 8*1024*1024, @p.chunk_limit_size
|
135
|
+
assert_equal 512*1024*1024, @p.total_limit_size
|
136
|
+
end
|
137
|
+
|
138
|
+
test 'chunk records limit is ignored in default' do
|
139
|
+
assert_nil @p.chunk_records_limit
|
140
|
+
end
|
141
|
+
|
142
|
+
test '#storable? checks total size of staged and enqueued(includes dequeued chunks) against total_limit_size' do
|
143
|
+
assert_equal 512*1024*1024, @p.total_limit_size
|
144
|
+
assert_equal 0, @p.stage_size
|
145
|
+
assert_equal 0, @p.queue_size
|
146
|
+
assert @p.storable?
|
147
|
+
|
148
|
+
@p.stage_size = 256 * 1024 * 1024
|
149
|
+
@p.queue_size = 256 * 1024 * 1024 - 1
|
150
|
+
assert @p.storable?
|
151
|
+
|
152
|
+
@p.queue_size = 256 * 1024 * 1024
|
153
|
+
assert !@p.storable?
|
154
|
+
end
|
155
|
+
|
156
|
+
test '#resume must be implemented by subclass' do
|
157
|
+
assert_raise NotImplementedError do
|
158
|
+
@p.resume
|
159
|
+
end
|
160
|
+
end
|
161
|
+
|
162
|
+
test '#generate_chunk must be implemented by subclass' do
|
163
|
+
assert_raise NotImplementedError do
|
164
|
+
@p.generate_chunk(Object.new)
|
165
|
+
end
|
166
|
+
end
|
167
|
+
end
|
168
|
+
|
169
|
+
sub_test_case 'with default configuration and dummy implementation' do
|
170
|
+
setup do
|
171
|
+
@p = create_buffer({})
|
172
|
+
@dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
|
173
|
+
@dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
|
174
|
+
@dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
|
175
|
+
@dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
|
176
|
+
@p.start
|
177
|
+
end
|
178
|
+
|
179
|
+
test '#start resumes buffer states and update queued numbers per metadata' do
|
180
|
+
plugin = create_buffer({})
|
181
|
+
|
182
|
+
assert_equal({}, plugin.stage)
|
183
|
+
assert_equal([], plugin.queue)
|
184
|
+
assert_equal({}, plugin.dequeued)
|
185
|
+
assert_equal({}, plugin.queued_num)
|
186
|
+
assert_equal([], plugin.metadata_list)
|
187
|
+
|
188
|
+
assert_equal 0, plugin.stage_size
|
189
|
+
assert_equal 0, plugin.queue_size
|
190
|
+
|
191
|
+
# @p is started plugin
|
192
|
+
|
193
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
194
|
+
assert_equal "b" * 100, @p.stage[@dm2].read
|
195
|
+
assert_equal "c" * 100, @p.stage[@dm3].read
|
196
|
+
|
197
|
+
assert_equal 200, @p.stage_size
|
198
|
+
|
199
|
+
assert_equal 3, @p.queue.size
|
200
|
+
assert_equal "0" * 100, @p.queue[0].read
|
201
|
+
assert_equal "a" * 100, @p.queue[1].read
|
202
|
+
assert_equal "a" * 3, @p.queue[2].read
|
203
|
+
|
204
|
+
assert_equal 203, @p.queue_size
|
205
|
+
|
206
|
+
# staged, queued
|
207
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
|
208
|
+
assert_equal 1, @p.queued_num[@dm0]
|
209
|
+
assert_equal 2, @p.queued_num[@dm1]
|
210
|
+
end
|
211
|
+
|
212
|
+
test '#close closes all chunks in in dequeued, enqueued and staged' do
|
213
|
+
dmx = create_metadata(Time.parse('2016-04-11 15:50:00 +0000').to_i, nil, nil)
|
214
|
+
cx = create_chunk(dmx, ["x" * 1024])
|
215
|
+
@p.dequeued[cx.unique_id] = cx
|
216
|
+
|
217
|
+
staged_chunks = @p.stage.values.dup
|
218
|
+
queued_chunks = @p.queue.dup
|
219
|
+
|
220
|
+
@p.close
|
221
|
+
|
222
|
+
assert cx.closed
|
223
|
+
assert{ staged_chunks.all?{|c| c.closed } }
|
224
|
+
assert{ queued_chunks.all?{|c| c.closed } }
|
225
|
+
end
|
226
|
+
|
227
|
+
test '#terminate initializes all internal states' do
|
228
|
+
dmx = create_metadata(Time.parse('2016-04-11 15:50:00 +0000').to_i, nil, nil)
|
229
|
+
cx = create_chunk(dmx, ["x" * 1024])
|
230
|
+
@p.dequeued[cx.unique_id] = cx
|
231
|
+
|
232
|
+
@p.close
|
233
|
+
|
234
|
+
@p.terminate
|
235
|
+
|
236
|
+
assert_nil @p.stage
|
237
|
+
assert_nil @p.queue
|
238
|
+
assert_nil @p.dequeued
|
239
|
+
assert_nil @p.queued_num
|
240
|
+
assert_nil @p.instance_eval{ @metadata_list } # #metadata_list does #dup for @metadata_list
|
241
|
+
assert_equal 0, @p.stage_size
|
242
|
+
assert_equal 0, @p.queue_size
|
243
|
+
end
|
244
|
+
|
245
|
+
test '#metadata_list returns list of metadata on stage or in queue' do
|
246
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
|
247
|
+
end
|
248
|
+
|
249
|
+
test '#new_metadata creates metadata instance without inserting metadata_list' do
|
250
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
|
251
|
+
_m = @p.new_metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
252
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
|
253
|
+
end
|
254
|
+
|
255
|
+
test '#add_metadata adds unknown metadata into list, or return known metadata if already exists' do
|
256
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
|
257
|
+
|
258
|
+
m = @p.new_metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
259
|
+
_mx = @p.add_metadata(m)
|
260
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1,m], @p.metadata_list
|
261
|
+
assert_equal m.object_id, m.object_id
|
262
|
+
|
263
|
+
my = @p.add_metadata(@dm1)
|
264
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1,m], @p.metadata_list
|
265
|
+
assert_equal @dm1, my
|
266
|
+
assert{ @dm1.object_id != my.object_id } # 'my' is an object created in #resume
|
267
|
+
end
|
268
|
+
|
269
|
+
test '#metadata is utility method to create-add-and-return metadata' do
|
270
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
|
271
|
+
|
272
|
+
m1 = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
273
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1,m1], @p.metadata_list
|
274
|
+
m2 = @p.metadata(timekey: @dm3.timekey)
|
275
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1,m1], @p.metadata_list
|
276
|
+
assert_equal @dm3, m2
|
277
|
+
end
|
278
|
+
|
279
|
+
test '#queued_records returns total number of size in all chunks in queue' do
|
280
|
+
assert_equal 3, @p.queue.size
|
281
|
+
|
282
|
+
r0 = @p.queue[0].size
|
283
|
+
assert_equal 1, r0
|
284
|
+
r1 = @p.queue[1].size
|
285
|
+
assert_equal 1, r1
|
286
|
+
r2 = @p.queue[2].size
|
287
|
+
assert_equal 1, r2
|
288
|
+
|
289
|
+
assert_equal (r0+r1+r2), @p.queued_records
|
290
|
+
end
|
291
|
+
|
292
|
+
test '#queued? returns queue has any chunks or not without arguments' do
|
293
|
+
assert @p.queued?
|
294
|
+
|
295
|
+
@p.queue.reject!{|_c| true }
|
296
|
+
assert !@p.queued?
|
297
|
+
end
|
298
|
+
|
299
|
+
test '#queued? returns queue has chunks for specified metadata with an argument' do
|
300
|
+
assert @p.queued?(@dm0)
|
301
|
+
assert @p.queued?(@dm1)
|
302
|
+
assert !@p.queued?(@dm2)
|
303
|
+
end
|
304
|
+
|
305
|
+
test '#enqueue_chunk enqueues a chunk on stage with specified metadata' do
|
306
|
+
assert_equal 2, @p.stage.size
|
307
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
308
|
+
assert_equal 3, @p.queue.size
|
309
|
+
assert_nil @p.queued_num[@dm2]
|
310
|
+
|
311
|
+
assert_equal 200, @p.stage_size
|
312
|
+
assert_equal 203, @p.queue_size
|
313
|
+
|
314
|
+
@p.enqueue_chunk(@dm2)
|
315
|
+
|
316
|
+
assert_equal [@dm3], @p.stage.keys
|
317
|
+
assert_equal @dm2, @p.queue.last.metadata
|
318
|
+
assert_equal 1, @p.queued_num[@dm2]
|
319
|
+
assert_equal 100, @p.stage_size
|
320
|
+
assert_equal 303, @p.queue_size
|
321
|
+
end
|
322
|
+
|
323
|
+
test '#enqueue_chunk ignores empty chunks' do
|
324
|
+
assert_equal 3, @p.queue.size
|
325
|
+
|
326
|
+
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
327
|
+
c = create_chunk(m, [''])
|
328
|
+
@p.stage[m] = c
|
329
|
+
assert @p.stage[m].empty?
|
330
|
+
assert !c.closed
|
331
|
+
|
332
|
+
@p.enqueue_chunk(m)
|
333
|
+
|
334
|
+
assert_nil @p.stage[m]
|
335
|
+
assert_equal 3, @p.queue.size
|
336
|
+
assert_nil @p.queued_num[m]
|
337
|
+
assert c.closed
|
338
|
+
end
|
339
|
+
|
340
|
+
test '#enqueue_chunk calls #enqueued! if chunk responds to it' do
|
341
|
+
assert_equal 3, @p.queue.size
|
342
|
+
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
343
|
+
c = create_chunk(m, ['c' * 256])
|
344
|
+
callback_called = false
|
345
|
+
(class << c; self; end).module_eval do
|
346
|
+
define_method(:enqueued!){ callback_called = true }
|
347
|
+
end
|
348
|
+
|
349
|
+
@p.stage[m] = c
|
350
|
+
@p.enqueue_chunk(m)
|
351
|
+
|
352
|
+
assert_equal c, @p.queue.last
|
353
|
+
assert callback_called
|
354
|
+
end
|
355
|
+
|
356
|
+
test '#enqueue_all enqueues chunks on stage which given block returns true with' do
|
357
|
+
m1 = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
358
|
+
c1 = create_chunk(m1, ['c' * 256])
|
359
|
+
@p.stage[m1] = c1
|
360
|
+
m2 = @p.metadata(timekey: Time.parse('2016-04-11 16:50:00 +0000').to_i)
|
361
|
+
c2 = create_chunk(m2, ['c' * 256])
|
362
|
+
@p.stage[m2] = c2
|
363
|
+
|
364
|
+
assert_equal [@dm2,@dm3,m1,m2], @p.stage.keys
|
365
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
366
|
+
|
367
|
+
@p.enqueue_all{ |m, c| m.timekey < Time.parse('2016-04-11 16:41:00 +0000').to_i }
|
368
|
+
|
369
|
+
assert_equal [m2], @p.stage.keys
|
370
|
+
assert_equal [@dm0,@dm1,@dm1,@dm2,@dm3,m1], @p.queue.map(&:metadata)
|
371
|
+
end
|
372
|
+
|
373
|
+
test '#enqueue_all enqueues all chunks on stage without block' do
|
374
|
+
m1 = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
375
|
+
c1 = create_chunk(m1, ['c' * 256])
|
376
|
+
@p.stage[m1] = c1
|
377
|
+
m2 = @p.metadata(timekey: Time.parse('2016-04-11 16:50:00 +0000').to_i)
|
378
|
+
c2 = create_chunk(m2, ['c' * 256])
|
379
|
+
@p.stage[m2] = c2
|
380
|
+
|
381
|
+
assert_equal [@dm2,@dm3,m1,m2], @p.stage.keys
|
382
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
383
|
+
|
384
|
+
@p.enqueue_all
|
385
|
+
|
386
|
+
assert_equal [], @p.stage.keys
|
387
|
+
assert_equal [@dm0,@dm1,@dm1,@dm2,@dm3,m1,m2], @p.queue.map(&:metadata)
|
388
|
+
end
|
389
|
+
|
390
|
+
test '#dequeue_chunk dequeues a chunk from queue if a chunk exists' do
|
391
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
392
|
+
assert_equal({}, @p.dequeued)
|
393
|
+
|
394
|
+
m1 = @p.dequeue_chunk
|
395
|
+
assert_equal @dm0, m1.metadata
|
396
|
+
assert_equal @dm0, @p.dequeued[m1.unique_id].metadata
|
397
|
+
|
398
|
+
m2 = @p.dequeue_chunk
|
399
|
+
assert_equal @dm1, m2.metadata
|
400
|
+
assert_equal @dm1, @p.dequeued[m2.unique_id].metadata
|
401
|
+
|
402
|
+
m3 = @p.dequeue_chunk
|
403
|
+
assert_equal @dm1, m3.metadata
|
404
|
+
assert_equal @dm1, @p.dequeued[m3.unique_id].metadata
|
405
|
+
|
406
|
+
m4 = @p.dequeue_chunk
|
407
|
+
assert_nil m4
|
408
|
+
end
|
409
|
+
|
410
|
+
test '#takeback_chunk resumes a chunk from dequeued to queued at the head of queue, and returns true' do
|
411
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
412
|
+
assert_equal({}, @p.dequeued)
|
413
|
+
|
414
|
+
m1 = @p.dequeue_chunk
|
415
|
+
assert_equal @dm0, m1.metadata
|
416
|
+
assert_equal @dm0, @p.dequeued[m1.unique_id].metadata
|
417
|
+
assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
|
418
|
+
assert_equal({m1.unique_id => m1}, @p.dequeued)
|
419
|
+
|
420
|
+
assert @p.takeback_chunk(m1.unique_id)
|
421
|
+
|
422
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
423
|
+
assert_equal({}, @p.dequeued)
|
424
|
+
end
|
425
|
+
|
426
|
+
test '#purge_chunk removes a chunk specified by argument id from dequeued chunks' do
|
427
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
428
|
+
assert_equal({}, @p.dequeued)
|
429
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
|
430
|
+
|
431
|
+
m0 = @p.dequeue_chunk
|
432
|
+
m1 = @p.dequeue_chunk
|
433
|
+
|
434
|
+
assert @p.takeback_chunk(m0.unique_id)
|
435
|
+
|
436
|
+
assert_equal [@dm0,@dm1], @p.queue.map(&:metadata)
|
437
|
+
assert_equal({m1.unique_id => m1}, @p.dequeued)
|
438
|
+
|
439
|
+
assert !m1.purged
|
440
|
+
|
441
|
+
@p.purge_chunk(m1.unique_id)
|
442
|
+
assert m1.purged
|
443
|
+
|
444
|
+
assert_equal [@dm0,@dm1], @p.queue.map(&:metadata)
|
445
|
+
assert_equal({}, @p.dequeued)
|
446
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
|
447
|
+
end
|
448
|
+
|
449
|
+
test '#purge_chunk removes an argument metadata from metadata_list if no chunks exist on stage or in queue' do
|
450
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
451
|
+
assert_equal({}, @p.dequeued)
|
452
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
|
453
|
+
|
454
|
+
m0 = @p.dequeue_chunk
|
455
|
+
|
456
|
+
assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
|
457
|
+
assert_equal({m0.unique_id => m0}, @p.dequeued)
|
458
|
+
|
459
|
+
assert !m0.purged
|
460
|
+
|
461
|
+
@p.purge_chunk(m0.unique_id)
|
462
|
+
assert m0.purged
|
463
|
+
|
464
|
+
assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
|
465
|
+
assert_equal({}, @p.dequeued)
|
466
|
+
assert_equal [@dm2,@dm3,@dm1], @p.metadata_list
|
467
|
+
end
|
468
|
+
|
469
|
+
test '#takeback_chunk returns false if specified chunk_id is already purged' do
|
470
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
471
|
+
assert_equal({}, @p.dequeued)
|
472
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
|
473
|
+
|
474
|
+
m0 = @p.dequeue_chunk
|
475
|
+
|
476
|
+
assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
|
477
|
+
assert_equal({m0.unique_id => m0}, @p.dequeued)
|
478
|
+
|
479
|
+
assert !m0.purged
|
480
|
+
|
481
|
+
@p.purge_chunk(m0.unique_id)
|
482
|
+
assert m0.purged
|
483
|
+
|
484
|
+
assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
|
485
|
+
assert_equal({}, @p.dequeued)
|
486
|
+
assert_equal [@dm2,@dm3,@dm1], @p.metadata_list
|
487
|
+
|
488
|
+
assert !@p.takeback_chunk(m0.unique_id)
|
489
|
+
|
490
|
+
assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
|
491
|
+
assert_equal({}, @p.dequeued)
|
492
|
+
assert_equal [@dm2,@dm3,@dm1], @p.metadata_list
|
493
|
+
end
|
494
|
+
|
495
|
+
test '#clear_queue! removes all chunks in queue, but leaves staged chunks' do
|
496
|
+
qchunks = @p.queue.dup
|
497
|
+
|
498
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
499
|
+
assert_equal 2, @p.stage.size
|
500
|
+
assert_equal({}, @p.dequeued)
|
501
|
+
|
502
|
+
@p.clear_queue!
|
503
|
+
|
504
|
+
assert_equal [], @p.queue
|
505
|
+
assert_equal 0, @p.queue_size
|
506
|
+
assert_equal 2, @p.stage.size
|
507
|
+
assert_equal({}, @p.dequeued)
|
508
|
+
|
509
|
+
assert{ qchunks.all?{ |c| c.purged } }
|
510
|
+
end
|
511
|
+
|
512
|
+
test '#write returns immediately if argument data is empty array' do
|
513
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
514
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
515
|
+
|
516
|
+
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
517
|
+
|
518
|
+
@p.write({m => []})
|
519
|
+
|
520
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
521
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
522
|
+
end
|
523
|
+
|
524
|
+
test '#write returns immediately if argument data is empty event stream' do
|
525
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
526
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
527
|
+
|
528
|
+
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
529
|
+
|
530
|
+
@p.write({m => Fluent::ArrayEventStream.new([])})
|
531
|
+
|
532
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
533
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
534
|
+
end
|
535
|
+
|
536
|
+
test '#write raises BufferOverflowError if buffer is not storable' do
|
537
|
+
@p.stage_size = 256 * 1024 * 1024
|
538
|
+
@p.queue_size = 256 * 1024 * 1024
|
539
|
+
|
540
|
+
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
541
|
+
|
542
|
+
assert_raise Fluent::Plugin::Buffer::BufferOverflowError do
|
543
|
+
@p.write({m => ["x" * 256]})
|
544
|
+
end
|
545
|
+
end
|
546
|
+
|
547
|
+
test '#write stores data into an existing chunk with metadata specified' do
|
548
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
549
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
550
|
+
|
551
|
+
dm3data = @p.stage[@dm3].read.dup
|
552
|
+
prev_stage_size = @p.stage_size
|
553
|
+
|
554
|
+
assert_equal 1, @p.stage[@dm3].append_count
|
555
|
+
|
556
|
+
@p.write({@dm3 => ["x" * 256, "y" * 256, "z" * 256]})
|
557
|
+
|
558
|
+
assert_equal 2, @p.stage[@dm3].append_count
|
559
|
+
assert_equal (dm3data + ("x" * 256) + ("y" * 256) + ("z" * 256)), @p.stage[@dm3].read
|
560
|
+
assert_equal (prev_stage_size + 768), @p.stage_size
|
561
|
+
|
562
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
563
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
564
|
+
end
|
565
|
+
|
566
|
+
test '#write creates new chunk and store data into it if there are no chunks for specified metadata' do
|
567
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
568
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
569
|
+
|
570
|
+
prev_stage_size = @p.stage_size
|
571
|
+
|
572
|
+
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
573
|
+
|
574
|
+
@p.write({m => ["x" * 256, "y" * 256, "z" * 256]})
|
575
|
+
|
576
|
+
assert_equal 1, @p.stage[m].append_count
|
577
|
+
assert_equal ("x" * 256 + "y" * 256 + "z" * 256), @p.stage[m].read
|
578
|
+
assert_equal (prev_stage_size + 768), @p.stage_size
|
579
|
+
|
580
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
581
|
+
assert_equal [@dm2,@dm3,m], @p.stage.keys
|
582
|
+
end
|
583
|
+
|
584
|
+
test '#write tries to enqueue and store data into a new chunk if existing chunk is full' do
|
585
|
+
assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
|
586
|
+
assert_equal 0.95, @p.chunk_full_threshold
|
587
|
+
|
588
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
589
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
590
|
+
|
591
|
+
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
592
|
+
|
593
|
+
row = "x" * 1024 * 1024
|
594
|
+
small_row = "x" * 1024 * 512
|
595
|
+
@p.write({m => [row] * 7 + [small_row]})
|
596
|
+
|
597
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
598
|
+
assert_equal [@dm2,@dm3,m], @p.stage.keys
|
599
|
+
assert_equal 1, @p.stage[m].append_count
|
600
|
+
|
601
|
+
@p.write({m => [row]})
|
602
|
+
|
603
|
+
assert_equal [@dm0,@dm1,@dm1,m], @p.queue.map(&:metadata)
|
604
|
+
assert_equal [@dm2,@dm3,m], @p.stage.keys
|
605
|
+
assert_equal 1, @p.stage[m].append_count
|
606
|
+
assert_equal 1024*1024, @p.stage[m].bytesize
|
607
|
+
assert_equal 3, @p.queue.last.append_count # 1 -> write (2) -> write_step_by_step (3)
|
608
|
+
assert @p.queue.last.rollbacked
|
609
|
+
end
|
610
|
+
|
611
|
+
test '#write rollbacks if commit raises errors' do
|
612
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
613
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
614
|
+
|
615
|
+
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
616
|
+
|
617
|
+
row = "x" * 1024
|
618
|
+
@p.write({m => [row] * 8})
|
619
|
+
|
620
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
621
|
+
assert_equal [@dm2,@dm3,m], @p.stage.keys
|
622
|
+
|
623
|
+
target_chunk = @p.stage[m]
|
624
|
+
|
625
|
+
assert_equal 1, target_chunk.append_count
|
626
|
+
assert !target_chunk.rollbacked
|
627
|
+
|
628
|
+
(class << target_chunk; self; end).module_eval do
|
629
|
+
define_method(:commit){ raise "yay" }
|
630
|
+
end
|
631
|
+
|
632
|
+
assert_raise "yay" do
|
633
|
+
@p.write({m => [row]})
|
634
|
+
end
|
635
|
+
|
636
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
637
|
+
assert_equal [@dm2,@dm3,m], @p.stage.keys
|
638
|
+
|
639
|
+
assert_equal 2, target_chunk.append_count
|
640
|
+
assert target_chunk.rollbacked
|
641
|
+
assert_equal row * 8, target_chunk.read
|
642
|
+
end
|
643
|
+
|
644
|
+
test '#write w/ format raises BufferOverflowError if buffer is not storable' do
|
645
|
+
@p.stage_size = 256 * 1024 * 1024
|
646
|
+
@p.queue_size = 256 * 1024 * 1024
|
647
|
+
|
648
|
+
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
649
|
+
|
650
|
+
es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "xxxxxxxxxxxxxx"} ] ])
|
651
|
+
|
652
|
+
assert_raise Fluent::Plugin::Buffer::BufferOverflowError do
|
653
|
+
@p.write({m => es}, format: ->(e){e.to_msgpack_stream})
|
654
|
+
end
|
655
|
+
end
|
656
|
+
|
657
|
+
test '#write w/ format stores data into an existing chunk with metadata specified' do
|
658
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
659
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
660
|
+
|
661
|
+
dm3data = @p.stage[@dm3].read.dup
|
662
|
+
prev_stage_size = @p.stage_size
|
663
|
+
|
664
|
+
assert_equal 1, @p.stage[@dm3].append_count
|
665
|
+
|
666
|
+
es = Fluent::ArrayEventStream.new(
|
667
|
+
[
|
668
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 128}],
|
669
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "y" * 128}],
|
670
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "z" * 128}],
|
671
|
+
]
|
672
|
+
)
|
673
|
+
|
674
|
+
@p.write({@dm3 => es}, format: ->(e){e.to_msgpack_stream})
|
675
|
+
|
676
|
+
assert_equal 2, @p.stage[@dm3].append_count
|
677
|
+
assert_equal (dm3data + es.to_msgpack_stream), @p.stage[@dm3].read
|
678
|
+
assert_equal (prev_stage_size + es.to_msgpack_stream.bytesize), @p.stage_size
|
679
|
+
|
680
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
681
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
682
|
+
end
|
683
|
+
|
684
|
+
test '#write w/ format creates new chunk and store data into it if there are not chunks for specified metadata' do
|
685
|
+
assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
|
686
|
+
|
687
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
688
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
689
|
+
|
690
|
+
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
691
|
+
|
692
|
+
es = Fluent::ArrayEventStream.new(
|
693
|
+
[
|
694
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
|
695
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
|
696
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
|
697
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
|
698
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
|
699
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
|
700
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
|
701
|
+
[event_time('2016-04-11 16:40:03 +0000'), {"message" => "z" * 1024 * 512}],
|
702
|
+
]
|
703
|
+
)
|
704
|
+
@p.write({m => es}, format: ->(e){e.to_msgpack_stream})
|
705
|
+
|
706
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
707
|
+
assert_equal [@dm2,@dm3,m], @p.stage.keys
|
708
|
+
assert_equal 1, @p.stage[m].append_count
|
709
|
+
end
|
710
|
+
|
711
|
+
test '#write w/ format tries to enqueue and store data into a new chunk if existing chunk does not have enough space' do
|
712
|
+
assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
|
713
|
+
|
714
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
715
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
716
|
+
|
717
|
+
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
718
|
+
|
719
|
+
es = Fluent::ArrayEventStream.new(
|
720
|
+
[
|
721
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
|
722
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
|
723
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
|
724
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
|
725
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
|
726
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
|
727
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
|
728
|
+
[event_time('2016-04-11 16:40:03 +0000'), {"message" => "z" * 1024 * 512}],
|
729
|
+
]
|
730
|
+
)
|
731
|
+
@p.write({m => es}, format: ->(e){e.to_msgpack_stream})
|
732
|
+
|
733
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
734
|
+
assert_equal [@dm2,@dm3,m], @p.stage.keys
|
735
|
+
assert_equal 1, @p.stage[m].append_count
|
736
|
+
|
737
|
+
es2 = Fluent::OneEventStream.new(event_time('2016-04-11 16:40:03 +0000'), {"message" => "z" * 1024 * 1024})
|
738
|
+
@p.write({m => es2}, format: ->(e){e.to_msgpack_stream})
|
739
|
+
|
740
|
+
assert_equal [@dm0,@dm1,@dm1,m], @p.queue.map(&:metadata)
|
741
|
+
assert_equal [@dm2,@dm3,m], @p.stage.keys
|
742
|
+
assert_equal 1, @p.stage[m].append_count
|
743
|
+
assert_equal es2.to_msgpack_stream.bytesize, @p.stage[m].bytesize
|
744
|
+
assert_equal 2, @p.queue.last.append_count # 1 -> write (2) -> rollback&enqueue
|
745
|
+
assert @p.queue.last.rollbacked
|
746
|
+
end
|
747
|
+
|
748
|
+
test '#write w/ format enqueues chunk if it is already full after adding data' do
|
749
|
+
assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
|
750
|
+
|
751
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
752
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
753
|
+
|
754
|
+
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
755
|
+
es = Fluent::ArrayEventStream.new(
|
756
|
+
[
|
757
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}], # 1024 * 1024 bytes as msgpack stream
|
758
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
|
759
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
|
760
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
|
761
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
|
762
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
|
763
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
|
764
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
|
765
|
+
]
|
766
|
+
)
|
767
|
+
@p.write({m => es}, format: ->(e){e.to_msgpack_stream})
|
768
|
+
|
769
|
+
assert_equal [@dm0,@dm1,@dm1,m], @p.queue.map(&:metadata)
|
770
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
771
|
+
assert_equal 1, @p.queue.last.append_count
|
772
|
+
end
|
773
|
+
|
774
|
+
test '#write w/ format rollbacks if commit raises errors' do
|
775
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
776
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
777
|
+
|
778
|
+
m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
779
|
+
|
780
|
+
es = Fluent::ArrayEventStream.new(
|
781
|
+
[
|
782
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
|
783
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
|
784
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
|
785
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
|
786
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
|
787
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
|
788
|
+
[event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
|
789
|
+
[event_time('2016-04-11 16:40:03 +0000'), {"message" => "z" * 1024 * 512}],
|
790
|
+
]
|
791
|
+
)
|
792
|
+
@p.write({m => es}, format: ->(e){e.to_msgpack_stream})
|
793
|
+
|
794
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
795
|
+
assert_equal [@dm2,@dm3,m], @p.stage.keys
|
796
|
+
|
797
|
+
target_chunk = @p.stage[m]
|
798
|
+
|
799
|
+
assert_equal 1, target_chunk.append_count
|
800
|
+
assert !target_chunk.rollbacked
|
801
|
+
|
802
|
+
(class << target_chunk; self; end).module_eval do
|
803
|
+
define_method(:commit){ raise "yay" }
|
804
|
+
end
|
805
|
+
|
806
|
+
es2 = Fluent::ArrayEventStream.new(
|
807
|
+
[
|
808
|
+
[event_time('2016-04-11 16:40:04 +0000'), {"message" => "z" * 1024 * 128}],
|
809
|
+
]
|
810
|
+
)
|
811
|
+
assert_raise "yay" do
|
812
|
+
@p.write({m => es2}, format: ->(e){e.to_msgpack_stream})
|
813
|
+
end
|
814
|
+
|
815
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
816
|
+
assert_equal [@dm2,@dm3,m], @p.stage.keys
|
817
|
+
|
818
|
+
assert_equal 2, target_chunk.append_count
|
819
|
+
assert target_chunk.rollbacked
|
820
|
+
assert_equal es.to_msgpack_stream, target_chunk.read
|
821
|
+
end
|
822
|
+
|
823
|
+
test '#write writes many metadata and data pairs at once' do
|
824
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
825
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
826
|
+
|
827
|
+
row = "x" * 1024
|
828
|
+
@p.write({ @dm0 => [row, row, row], @dm1 => [row, row] })
|
829
|
+
|
830
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1], @p.stage.keys
|
831
|
+
end
|
832
|
+
|
833
|
+
test '#write does not commit on any chunks if any append operation on chunk fails' do
|
834
|
+
assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
|
835
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
836
|
+
|
837
|
+
row = "x" * 1024
|
838
|
+
@p.write({ @dm0 => [row, row, row], @dm1 => [row, row] })
|
839
|
+
|
840
|
+
assert_equal [@dm2,@dm3,@dm0,@dm1], @p.stage.keys
|
841
|
+
|
842
|
+
dm2_size = @p.stage[@dm2].size
|
843
|
+
assert !@p.stage[@dm2].rollbacked
|
844
|
+
dm3_size = @p.stage[@dm3].size
|
845
|
+
assert !@p.stage[@dm3].rollbacked
|
846
|
+
|
847
|
+
assert{ @p.stage[@dm0].size == 3 }
|
848
|
+
assert !@p.stage[@dm0].rollbacked
|
849
|
+
assert{ @p.stage[@dm1].size == 2 }
|
850
|
+
assert !@p.stage[@dm1].rollbacked
|
851
|
+
|
852
|
+
@p.stage[@dm1].failing = true
|
853
|
+
|
854
|
+
assert_raise(FluentPluginBufferTest::DummyMemoryChunkError) do
|
855
|
+
@p.write({ @dm2 => [row], @dm3 => [row], @dm0 => [row, row, row], @dm1 => [row, row] })
|
856
|
+
end
|
857
|
+
|
858
|
+
assert{ @p.stage[@dm2].size == dm2_size }
|
859
|
+
assert @p.stage[@dm2].rollbacked
|
860
|
+
assert{ @p.stage[@dm3].size == dm3_size }
|
861
|
+
assert @p.stage[@dm3].rollbacked
|
862
|
+
|
863
|
+
assert{ @p.stage[@dm0].size == 3 }
|
864
|
+
assert @p.stage[@dm0].rollbacked
|
865
|
+
assert{ @p.stage[@dm1].size == 2 }
|
866
|
+
assert @p.stage[@dm1].rollbacked
|
867
|
+
end
|
868
|
+
end
|
869
|
+
|
870
|
+
sub_test_case 'standard format with configuration for test with lower chunk limit size' do
|
871
|
+
setup do
|
872
|
+
@p = create_buffer({"chunk_limit_size" => 1_280_000})
|
873
|
+
@format = ->(e){e.to_msgpack_stream}
|
874
|
+
@dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
|
875
|
+
# 1 record is 128bytes in msgpack stream
|
876
|
+
@es0 = es0 = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:01 +0000'), {"message" => "x" * (128 - 22)}] ] * 5000)
|
877
|
+
(class << @p; self; end).module_eval do
|
878
|
+
define_method(:resume) {
|
879
|
+
staged = {
|
880
|
+
dm0 => create_chunk_es(dm0, es0).staged!,
|
881
|
+
}
|
882
|
+
queued = []
|
883
|
+
return staged, queued
|
884
|
+
}
|
885
|
+
end
|
886
|
+
@p.start
|
887
|
+
end
|
888
|
+
|
889
|
+
test '#write appends event stream into staged chunk' do
|
890
|
+
assert_equal [@dm0], @p.stage.keys
|
891
|
+
assert_equal [], @p.queue.map(&:metadata)
|
892
|
+
|
893
|
+
assert_equal 1_280_000, @p.chunk_limit_size
|
894
|
+
|
895
|
+
es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * (128 - 22)}] ] * 1000)
|
896
|
+
@p.write({@dm0 => es}, format: @format)
|
897
|
+
|
898
|
+
assert_equal [@dm0], @p.stage.keys
|
899
|
+
assert_equal [], @p.queue.map(&:metadata)
|
900
|
+
|
901
|
+
assert_equal (@es0.to_msgpack_stream + es.to_msgpack_stream), @p.stage[@dm0].read
|
902
|
+
end
|
903
|
+
|
904
|
+
test '#write writes event stream into a new chunk with enqueueing existing chunk if event stream is larger than available space of existing chunk' do
|
905
|
+
assert_equal [@dm0], @p.stage.keys
|
906
|
+
assert_equal [], @p.queue.map(&:metadata)
|
907
|
+
|
908
|
+
assert_equal 1_280_000, @p.chunk_limit_size
|
909
|
+
|
910
|
+
es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * (128 - 22)}] ] * 8000)
|
911
|
+
@p.write({@dm0 => es}, format: @format)
|
912
|
+
|
913
|
+
assert_equal [@dm0], @p.stage.keys
|
914
|
+
assert_equal [@dm0], @p.queue.map(&:metadata)
|
915
|
+
|
916
|
+
assert_equal (es.to_msgpack_stream), @p.stage[@dm0].read
|
917
|
+
end
|
918
|
+
|
919
|
+
test '#write writes event stream into many chunks excluding staged chunk if event stream is larger than chunk limit size' do
|
920
|
+
assert_equal [@dm0], @p.stage.keys
|
921
|
+
assert_equal [], @p.queue.map(&:metadata)
|
922
|
+
|
923
|
+
assert_equal 1_280_000, @p.chunk_limit_size
|
924
|
+
|
925
|
+
es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * (128 - 22)}] ] * 45000)
|
926
|
+
@p.write({@dm0 => es}, format: @format)
|
927
|
+
|
928
|
+
assert_equal [@dm0], @p.stage.keys
|
929
|
+
assert_equal 5400, @p.stage[@dm0].size
|
930
|
+
assert_equal [@dm0,@dm0,@dm0,@dm0,@dm0], @p.queue.map(&:metadata)
|
931
|
+
assert_equal [5000, 9900, 9900, 9900, 9900], @p.queue.map(&:size) # splits: 45000 / 100 => 450 * ...
|
932
|
+
# 9900 * 4 + 5400 == 45000
|
933
|
+
end
|
934
|
+
|
935
|
+
test '#write raises BufferChunkOverflowError if a record is biggar than chunk limit size' do
|
936
|
+
assert_equal [@dm0], @p.stage.keys
|
937
|
+
assert_equal [], @p.queue.map(&:metadata)
|
938
|
+
|
939
|
+
assert_equal 1_280_000, @p.chunk_limit_size
|
940
|
+
|
941
|
+
es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * 1_280_000}] ])
|
942
|
+
assert_raise Fluent::Plugin::Buffer::BufferChunkOverflowError do
|
943
|
+
@p.write({@dm0 => es}, format: @format)
|
944
|
+
end
|
945
|
+
end
|
946
|
+
end
|
947
|
+
|
948
|
+
sub_test_case 'custom format with configuration for test with lower chunk limit size' do
|
949
|
+
setup do
|
950
|
+
@p = create_buffer({"chunk_limit_size" => 1_280_000})
|
951
|
+
@dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
|
952
|
+
@row = "x" * 128
|
953
|
+
@data0 = data0 = [@row] * 5000
|
954
|
+
(class << @p; self; end).module_eval do
|
955
|
+
define_method(:resume) {
|
956
|
+
staged = {
|
957
|
+
dm0 => create_chunk(dm0, data0).staged!,
|
958
|
+
}
|
959
|
+
queued = []
|
960
|
+
return staged, queued
|
961
|
+
}
|
962
|
+
end
|
963
|
+
@p.start
|
964
|
+
end
|
965
|
+
|
966
|
+
test '#write appends event stream into staged chunk' do
|
967
|
+
assert_equal [@dm0], @p.stage.keys
|
968
|
+
assert_equal [], @p.queue.map(&:metadata)
|
969
|
+
|
970
|
+
assert_equal 1_280_000, @p.chunk_limit_size
|
971
|
+
|
972
|
+
data = [@row] * 1000
|
973
|
+
@p.write({@dm0 => data})
|
974
|
+
|
975
|
+
assert_equal [@dm0], @p.stage.keys
|
976
|
+
assert_equal [], @p.queue.map(&:metadata)
|
977
|
+
|
978
|
+
assert_equal (@row * 6000), @p.stage[@dm0].read
|
979
|
+
end
|
980
|
+
|
981
|
+
test '#write writes event stream into a new chunk with enqueueing existing chunk if event stream is larger than available space of existing chunk' do
|
982
|
+
assert_equal [@dm0], @p.stage.keys
|
983
|
+
assert_equal [], @p.queue.map(&:metadata)
|
984
|
+
|
985
|
+
staged_chunk_object_id = @p.stage[@dm0].object_id
|
986
|
+
|
987
|
+
assert_equal 1_280_000, @p.chunk_limit_size
|
988
|
+
|
989
|
+
data = [@row] * 8000
|
990
|
+
@p.write({@dm0 => data})
|
991
|
+
|
992
|
+
assert_equal [@dm0], @p.queue.map(&:metadata)
|
993
|
+
assert_equal [staged_chunk_object_id], @p.queue.map(&:object_id)
|
994
|
+
assert_equal [@dm0], @p.stage.keys
|
995
|
+
|
996
|
+
assert_equal [9800], @p.queue.map(&:size)
|
997
|
+
assert_equal 3200, @p.stage[@dm0].size
|
998
|
+
# 9800 + 3200 == 5000 + 8000
|
999
|
+
end
|
1000
|
+
|
1001
|
+
test '#write writes event stream into many chunks including staging chunk if event stream is larger than chunk limit size' do
|
1002
|
+
assert_equal [@dm0], @p.stage.keys
|
1003
|
+
assert_equal [], @p.queue.map(&:metadata)
|
1004
|
+
|
1005
|
+
staged_chunk_object_id = @p.stage[@dm0].object_id
|
1006
|
+
|
1007
|
+
assert_equal 1_280_000, @p.chunk_limit_size
|
1008
|
+
|
1009
|
+
assert_equal 5000, @p.stage[@dm0].size
|
1010
|
+
|
1011
|
+
data = [@row] * 45000
|
1012
|
+
@p.write({@dm0 => data})
|
1013
|
+
|
1014
|
+
assert_equal staged_chunk_object_id, @p.queue.first.object_id
|
1015
|
+
|
1016
|
+
assert_equal [@dm0], @p.stage.keys
|
1017
|
+
assert_equal 900, @p.stage[@dm0].size
|
1018
|
+
assert_equal [@dm0,@dm0,@dm0,@dm0,@dm0], @p.queue.map(&:metadata)
|
1019
|
+
assert_equal [9500, 9900, 9900, 9900, 9900], @p.queue.map(&:size) # splits: 45000 / 100 => 450 * ...
|
1020
|
+
##### 900 + 9500 + 9900 * 4 == 5000 + 45000
|
1021
|
+
end
|
1022
|
+
|
1023
|
+
test '#write raises BufferChunkOverflowError if a record is biggar than chunk limit size' do
|
1024
|
+
assert_equal [@dm0], @p.stage.keys
|
1025
|
+
assert_equal [], @p.queue.map(&:metadata)
|
1026
|
+
|
1027
|
+
assert_equal 1_280_000, @p.chunk_limit_size
|
1028
|
+
|
1029
|
+
es = ["x" * 1_280_000 + "x" * 300]
|
1030
|
+
assert_raise Fluent::Plugin::Buffer::BufferChunkOverflowError do
|
1031
|
+
@p.write({@dm0 => es})
|
1032
|
+
end
|
1033
|
+
end
|
1034
|
+
end
|
1035
|
+
|
1036
|
+
sub_test_case 'with configuration for test with lower limits' do
|
1037
|
+
setup do
|
1038
|
+
@p = create_buffer({"chunk_limit_size" => 1024, "total_limit_size" => 10240})
|
1039
|
+
@dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
|
1040
|
+
@dm1 = dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
|
1041
|
+
@dm2 = dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
|
1042
|
+
@dm3 = dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
|
1043
|
+
(class << @p; self; end).module_eval do
|
1044
|
+
define_method(:resume) {
|
1045
|
+
staged = {
|
1046
|
+
dm2 => create_chunk(dm2, ["b" * 128] * 7).staged!,
|
1047
|
+
dm3 => create_chunk(dm3, ["c" * 128] * 5).staged!,
|
1048
|
+
}
|
1049
|
+
queued = [
|
1050
|
+
create_chunk(dm0, ["0" * 128] * 8).enqueued!,
|
1051
|
+
create_chunk(dm0, ["0" * 128] * 8).enqueued!,
|
1052
|
+
create_chunk(dm0, ["0" * 128] * 8).enqueued!,
|
1053
|
+
create_chunk(dm0, ["0" * 128] * 8).enqueued!,
|
1054
|
+
create_chunk(dm0, ["0" * 128] * 8).enqueued!,
|
1055
|
+
create_chunk(dm1, ["a" * 128] * 8).enqueued!,
|
1056
|
+
create_chunk(dm1, ["a" * 128] * 8).enqueued!,
|
1057
|
+
create_chunk(dm1, ["a" * 128] * 8).enqueued!, # 8th queued chunk
|
1058
|
+
create_chunk(dm1, ["a" * 128] * 3).enqueued!,
|
1059
|
+
]
|
1060
|
+
return staged, queued
|
1061
|
+
}
|
1062
|
+
end
|
1063
|
+
@p.start
|
1064
|
+
end
|
1065
|
+
|
1066
|
+
test '#storable? returns false when too many data exist' do
|
1067
|
+
assert_equal [@dm0,@dm0,@dm0,@dm0,@dm0,@dm1,@dm1,@dm1,@dm1], @p.queue.map(&:metadata)
|
1068
|
+
assert_equal [@dm2,@dm3], @p.stage.keys
|
1069
|
+
|
1070
|
+
assert_equal 128*8*8+128*3, @p.queue_size
|
1071
|
+
assert_equal 128*7+128*5, @p.stage_size
|
1072
|
+
|
1073
|
+
assert @p.storable?
|
1074
|
+
|
1075
|
+
dm3 = @p.metadata(timekey: @dm3.timekey)
|
1076
|
+
@p.write({dm3 => ["c" * 128]})
|
1077
|
+
|
1078
|
+
assert_equal 10240, (@p.stage_size + @p.queue_size)
|
1079
|
+
assert !@p.storable?
|
1080
|
+
end
|
1081
|
+
|
1082
|
+
test '#chunk_size_over? returns true if chunk size is bigger than limit' do
|
1083
|
+
m = create_metadata(Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
1084
|
+
|
1085
|
+
c1 = create_chunk(m, ["a" * 128] * 8)
|
1086
|
+
assert !@p.chunk_size_over?(c1)
|
1087
|
+
|
1088
|
+
c2 = create_chunk(m, ["a" * 128] * 9)
|
1089
|
+
assert @p.chunk_size_over?(c2)
|
1090
|
+
|
1091
|
+
c3 = create_chunk(m, ["a" * 128] * 8 + ["a"])
|
1092
|
+
assert @p.chunk_size_over?(c3)
|
1093
|
+
end
|
1094
|
+
|
1095
|
+
test '#chunk_size_full? returns true if chunk size is enough big against limit' do
|
1096
|
+
m = create_metadata(Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
1097
|
+
|
1098
|
+
c1 = create_chunk(m, ["a" * 128] * 7)
|
1099
|
+
assert !@p.chunk_size_full?(c1)
|
1100
|
+
|
1101
|
+
c2 = create_chunk(m, ["a" * 128] * 8)
|
1102
|
+
assert @p.chunk_size_full?(c2)
|
1103
|
+
|
1104
|
+
assert_equal 0.95, @p.chunk_full_threshold
|
1105
|
+
c3 = create_chunk(m, ["a" * 128] * 6 + ["a" * 64])
|
1106
|
+
assert !@p.chunk_size_full?(c3)
|
1107
|
+
end
|
1108
|
+
end
|
1109
|
+
|
1110
|
+
sub_test_case 'with configuration includes chunk_records_limit' do
|
1111
|
+
setup do
|
1112
|
+
@p = create_buffer({"chunk_limit_size" => 1024, "total_limit_size" => 10240, "chunk_records_limit" => 6})
|
1113
|
+
@dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
|
1114
|
+
@dm1 = dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
|
1115
|
+
@dm2 = dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
|
1116
|
+
@dm3 = dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
|
1117
|
+
(class << @p; self; end).module_eval do
|
1118
|
+
define_method(:resume) {
|
1119
|
+
staged = {
|
1120
|
+
dm2 => create_chunk(dm2, ["b" * 128] * 1).staged!,
|
1121
|
+
dm3 => create_chunk(dm3, ["c" * 128] * 2).staged!,
|
1122
|
+
}
|
1123
|
+
queued = [
|
1124
|
+
create_chunk(dm0, ["0" * 128] * 6).enqueued!,
|
1125
|
+
create_chunk(dm1, ["a" * 128] * 6).enqueued!,
|
1126
|
+
create_chunk(dm1, ["a" * 128] * 6).enqueued!,
|
1127
|
+
create_chunk(dm1, ["a" * 128] * 3).enqueued!,
|
1128
|
+
]
|
1129
|
+
return staged, queued
|
1130
|
+
}
|
1131
|
+
end
|
1132
|
+
@p.start
|
1133
|
+
end
|
1134
|
+
|
1135
|
+
test '#chunk_size_over? returns true if too many records exists in a chunk even if its bytes is less than limit' do
|
1136
|
+
assert_equal 6, @p.chunk_records_limit
|
1137
|
+
|
1138
|
+
m = create_metadata(Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
1139
|
+
|
1140
|
+
c1 = create_chunk(m, ["a" * 128] * 6)
|
1141
|
+
assert_equal 6, c1.size
|
1142
|
+
assert !@p.chunk_size_over?(c1)
|
1143
|
+
|
1144
|
+
c2 = create_chunk(m, ["a" * 128] * 7)
|
1145
|
+
assert @p.chunk_size_over?(c2)
|
1146
|
+
|
1147
|
+
c3 = create_chunk(m, ["a" * 128] * 6 + ["a"])
|
1148
|
+
assert @p.chunk_size_over?(c3)
|
1149
|
+
end
|
1150
|
+
|
1151
|
+
test '#chunk_size_full? returns true if enough many records exists in a chunk even if its bytes is less than limit' do
|
1152
|
+
assert_equal 6, @p.chunk_records_limit
|
1153
|
+
|
1154
|
+
m = create_metadata(Time.parse('2016-04-11 16:40:00 +0000').to_i)
|
1155
|
+
|
1156
|
+
c1 = create_chunk(m, ["a" * 128] * 5)
|
1157
|
+
assert_equal 5, c1.size
|
1158
|
+
assert !@p.chunk_size_full?(c1)
|
1159
|
+
|
1160
|
+
c2 = create_chunk(m, ["a" * 128] * 6)
|
1161
|
+
assert @p.chunk_size_full?(c2)
|
1162
|
+
|
1163
|
+
c3 = create_chunk(m, ["a" * 128] * 5 + ["a"])
|
1164
|
+
assert @p.chunk_size_full?(c3)
|
1165
|
+
end
|
1166
|
+
end
|
1167
|
+
|
1168
|
+
sub_test_case 'with configuration includes queue_length_limit' do
|
1169
|
+
setup do
|
1170
|
+
@p = create_buffer({"chunk_limit_size" => 1024, "total_limit_size" => 10240, "queue_length_limit" => 5})
|
1171
|
+
@dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
|
1172
|
+
@dm1 = dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
|
1173
|
+
@dm2 = dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
|
1174
|
+
@dm3 = dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
|
1175
|
+
(class << @p; self; end).module_eval do
|
1176
|
+
define_method(:resume) {
|
1177
|
+
staged = {
|
1178
|
+
dm2 => create_chunk(dm2, ["b" * 128] * 1).staged!,
|
1179
|
+
dm3 => create_chunk(dm3, ["c" * 128] * 2).staged!,
|
1180
|
+
}
|
1181
|
+
queued = [
|
1182
|
+
create_chunk(dm0, ["0" * 128] * 6).enqueued!,
|
1183
|
+
create_chunk(dm1, ["a" * 128] * 6).enqueued!,
|
1184
|
+
create_chunk(dm1, ["a" * 128] * 6).enqueued!,
|
1185
|
+
create_chunk(dm1, ["a" * 128] * 3).enqueued!,
|
1186
|
+
]
|
1187
|
+
return staged, queued
|
1188
|
+
}
|
1189
|
+
end
|
1190
|
+
@p.start
|
1191
|
+
end
|
1192
|
+
|
1193
|
+
test '#configure will overwrite standard configuration if queue_length_limit' do
|
1194
|
+
assert_equal 1024, @p.chunk_limit_size
|
1195
|
+
assert_equal 5, @p.queue_length_limit
|
1196
|
+
assert_equal (1024*5), @p.total_limit_size
|
1197
|
+
end
|
1198
|
+
end
|
1199
|
+
|
1200
|
+
end
|