fluentd222 1.16.2-x86_64-linux
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
 - data/.deepsource.toml +13 -0
 - data/.github/ISSUE_TEMPLATE/bug_report.yaml +71 -0
 - data/.github/ISSUE_TEMPLATE/config.yml +5 -0
 - data/.github/ISSUE_TEMPLATE/feature_request.yaml +39 -0
 - data/.github/ISSUE_TEMPLATE.md +17 -0
 - data/.github/PULL_REQUEST_TEMPLATE.md +14 -0
 - data/.github/workflows/stale-actions.yml +24 -0
 - data/.github/workflows/test-ruby-head.yaml +31 -0
 - data/.github/workflows/test.yaml +32 -0
 - data/.gitignore +30 -0
 - data/ADOPTERS.md +5 -0
 - data/AUTHORS +2 -0
 - data/CHANGELOG.md +2720 -0
 - data/CONTRIBUTING.md +45 -0
 - data/GOVERNANCE.md +55 -0
 - data/Gemfile +9 -0
 - data/GithubWorkflow.md +78 -0
 - data/LICENSE +202 -0
 - data/MAINTAINERS.md +13 -0
 - data/README.md +75 -0
 - data/Rakefile +79 -0
 - data/SECURITY.md +14 -0
 - data/bin/fluent-binlog-reader +7 -0
 - data/bin/fluent-ca-generate +6 -0
 - data/bin/fluent-cap-ctl +7 -0
 - data/bin/fluent-cat +5 -0
 - data/bin/fluent-ctl +7 -0
 - data/bin/fluent-debug +5 -0
 - data/bin/fluent-gem +9 -0
 - data/bin/fluent-plugin-config-format +5 -0
 - data/bin/fluent-plugin-generate +5 -0
 - data/bin/fluentd +15 -0
 - data/code-of-conduct.md +3 -0
 - data/docs/SECURITY_AUDIT.pdf +0 -0
 - data/example/copy_roundrobin.conf +39 -0
 - data/example/counter.conf +18 -0
 - data/example/filter_stdout.conf +22 -0
 - data/example/in_forward.conf +14 -0
 - data/example/in_forward_client.conf +37 -0
 - data/example/in_forward_shared_key.conf +15 -0
 - data/example/in_forward_tls.conf +14 -0
 - data/example/in_forward_users.conf +24 -0
 - data/example/in_forward_workers.conf +21 -0
 - data/example/in_http.conf +16 -0
 - data/example/in_out_forward.conf +17 -0
 - data/example/in_sample_blocks.conf +17 -0
 - data/example/in_sample_with_compression.conf +23 -0
 - data/example/in_syslog.conf +15 -0
 - data/example/in_tail.conf +14 -0
 - data/example/in_tcp.conf +13 -0
 - data/example/in_udp.conf +13 -0
 - data/example/logevents.conf +25 -0
 - data/example/multi_filters.conf +61 -0
 - data/example/out_copy.conf +20 -0
 - data/example/out_exec_filter.conf +42 -0
 - data/example/out_file.conf +13 -0
 - data/example/out_forward.conf +35 -0
 - data/example/out_forward_buf_file.conf +23 -0
 - data/example/out_forward_client.conf +109 -0
 - data/example/out_forward_heartbeat_none.conf +16 -0
 - data/example/out_forward_sd.conf +17 -0
 - data/example/out_forward_shared_key.conf +36 -0
 - data/example/out_forward_tls.conf +18 -0
 - data/example/out_forward_users.conf +65 -0
 - data/example/out_null.conf +36 -0
 - data/example/sd.yaml +8 -0
 - data/example/secondary_file.conf +42 -0
 - data/example/suppress_config_dump.conf +7 -0
 - data/example/v0_12_filter.conf +78 -0
 - data/example/v1_literal_example.conf +36 -0
 - data/example/worker_section.conf +36 -0
 - data/fluent.conf +139 -0
 - data/fluentd.gemspec +54 -0
 - data/lib/fluent/agent.rb +168 -0
 - data/lib/fluent/capability.rb +87 -0
 - data/lib/fluent/clock.rb +66 -0
 - data/lib/fluent/command/binlog_reader.rb +244 -0
 - data/lib/fluent/command/bundler_injection.rb +45 -0
 - data/lib/fluent/command/ca_generate.rb +184 -0
 - data/lib/fluent/command/cap_ctl.rb +174 -0
 - data/lib/fluent/command/cat.rb +365 -0
 - data/lib/fluent/command/ctl.rb +180 -0
 - data/lib/fluent/command/debug.rb +103 -0
 - data/lib/fluent/command/fluentd.rb +374 -0
 - data/lib/fluent/command/plugin_config_formatter.rb +308 -0
 - data/lib/fluent/command/plugin_generator.rb +365 -0
 - data/lib/fluent/compat/call_super_mixin.rb +76 -0
 - data/lib/fluent/compat/detach_process_mixin.rb +33 -0
 - data/lib/fluent/compat/exec_util.rb +129 -0
 - data/lib/fluent/compat/file_util.rb +54 -0
 - data/lib/fluent/compat/filter.rb +68 -0
 - data/lib/fluent/compat/formatter.rb +111 -0
 - data/lib/fluent/compat/formatter_utils.rb +85 -0
 - data/lib/fluent/compat/handle_tag_and_time_mixin.rb +62 -0
 - data/lib/fluent/compat/handle_tag_name_mixin.rb +53 -0
 - data/lib/fluent/compat/input.rb +49 -0
 - data/lib/fluent/compat/output.rb +721 -0
 - data/lib/fluent/compat/output_chain.rb +60 -0
 - data/lib/fluent/compat/parser.rb +310 -0
 - data/lib/fluent/compat/parser_utils.rb +40 -0
 - data/lib/fluent/compat/propagate_default.rb +62 -0
 - data/lib/fluent/compat/record_filter_mixin.rb +34 -0
 - data/lib/fluent/compat/set_tag_key_mixin.rb +50 -0
 - data/lib/fluent/compat/set_time_key_mixin.rb +69 -0
 - data/lib/fluent/compat/socket_util.rb +165 -0
 - data/lib/fluent/compat/string_util.rb +34 -0
 - data/lib/fluent/compat/structured_format_mixin.rb +26 -0
 - data/lib/fluent/compat/type_converter.rb +90 -0
 - data/lib/fluent/config/basic_parser.rb +123 -0
 - data/lib/fluent/config/configure_proxy.rb +424 -0
 - data/lib/fluent/config/dsl.rb +152 -0
 - data/lib/fluent/config/element.rb +265 -0
 - data/lib/fluent/config/error.rb +44 -0
 - data/lib/fluent/config/literal_parser.rb +286 -0
 - data/lib/fluent/config/parser.rb +107 -0
 - data/lib/fluent/config/section.rb +272 -0
 - data/lib/fluent/config/types.rb +249 -0
 - data/lib/fluent/config/v1_parser.rb +192 -0
 - data/lib/fluent/config/yaml_parser/fluent_value.rb +47 -0
 - data/lib/fluent/config/yaml_parser/loader.rb +108 -0
 - data/lib/fluent/config/yaml_parser/parser.rb +166 -0
 - data/lib/fluent/config/yaml_parser/section_builder.rb +107 -0
 - data/lib/fluent/config/yaml_parser.rb +56 -0
 - data/lib/fluent/config.rb +89 -0
 - data/lib/fluent/configurable.rb +201 -0
 - data/lib/fluent/counter/base_socket.rb +44 -0
 - data/lib/fluent/counter/client.rb +297 -0
 - data/lib/fluent/counter/error.rb +86 -0
 - data/lib/fluent/counter/mutex_hash.rb +163 -0
 - data/lib/fluent/counter/server.rb +273 -0
 - data/lib/fluent/counter/store.rb +205 -0
 - data/lib/fluent/counter/validator.rb +145 -0
 - data/lib/fluent/counter.rb +23 -0
 - data/lib/fluent/daemon.rb +13 -0
 - data/lib/fluent/daemonizer.rb +88 -0
 - data/lib/fluent/engine.rb +253 -0
 - data/lib/fluent/env.rb +40 -0
 - data/lib/fluent/error.rb +37 -0
 - data/lib/fluent/event.rb +330 -0
 - data/lib/fluent/event_router.rb +315 -0
 - data/lib/fluent/ext_monitor_require.rb +28 -0
 - data/lib/fluent/file_wrapper.rb +137 -0
 - data/lib/fluent/filter.rb +21 -0
 - data/lib/fluent/fluent_log_event_router.rb +139 -0
 - data/lib/fluent/formatter.rb +23 -0
 - data/lib/fluent/input.rb +21 -0
 - data/lib/fluent/label.rb +46 -0
 - data/lib/fluent/load.rb +34 -0
 - data/lib/fluent/log/console_adapter.rb +66 -0
 - data/lib/fluent/log.rb +752 -0
 - data/lib/fluent/match.rb +187 -0
 - data/lib/fluent/mixin.rb +31 -0
 - data/lib/fluent/msgpack_factory.rb +111 -0
 - data/lib/fluent/oj_options.rb +61 -0
 - data/lib/fluent/output.rb +29 -0
 - data/lib/fluent/output_chain.rb +23 -0
 - data/lib/fluent/parser.rb +23 -0
 - data/lib/fluent/plugin/bare_output.rb +104 -0
 - data/lib/fluent/plugin/base.rb +214 -0
 - data/lib/fluent/plugin/buf_file.rb +242 -0
 - data/lib/fluent/plugin/buf_file_single.rb +254 -0
 - data/lib/fluent/plugin/buf_memory.rb +34 -0
 - data/lib/fluent/plugin/buffer/chunk.rb +240 -0
 - data/lib/fluent/plugin/buffer/file_chunk.rb +413 -0
 - data/lib/fluent/plugin/buffer/file_single_chunk.rb +310 -0
 - data/lib/fluent/plugin/buffer/memory_chunk.rb +91 -0
 - data/lib/fluent/plugin/buffer.rb +941 -0
 - data/lib/fluent/plugin/compressable.rb +96 -0
 - data/lib/fluent/plugin/exec_util.rb +22 -0
 - data/lib/fluent/plugin/file_util.rb +22 -0
 - data/lib/fluent/plugin/filter.rb +127 -0
 - data/lib/fluent/plugin/filter_grep.rb +189 -0
 - data/lib/fluent/plugin/filter_parser.rb +130 -0
 - data/lib/fluent/plugin/filter_record_transformer.rb +324 -0
 - data/lib/fluent/plugin/filter_stdout.rb +53 -0
 - data/lib/fluent/plugin/formatter.rb +75 -0
 - data/lib/fluent/plugin/formatter_csv.rb +78 -0
 - data/lib/fluent/plugin/formatter_hash.rb +35 -0
 - data/lib/fluent/plugin/formatter_json.rb +59 -0
 - data/lib/fluent/plugin/formatter_ltsv.rb +44 -0
 - data/lib/fluent/plugin/formatter_msgpack.rb +33 -0
 - data/lib/fluent/plugin/formatter_out_file.rb +53 -0
 - data/lib/fluent/plugin/formatter_single_value.rb +36 -0
 - data/lib/fluent/plugin/formatter_stdout.rb +76 -0
 - data/lib/fluent/plugin/formatter_tsv.rb +40 -0
 - data/lib/fluent/plugin/in_debug_agent.rb +71 -0
 - data/lib/fluent/plugin/in_dummy.rb +18 -0
 - data/lib/fluent/plugin/in_exec.rb +110 -0
 - data/lib/fluent/plugin/in_forward.rb +473 -0
 - data/lib/fluent/plugin/in_gc_stat.rb +72 -0
 - data/lib/fluent/plugin/in_http.rb +677 -0
 - data/lib/fluent/plugin/in_monitor_agent.rb +412 -0
 - data/lib/fluent/plugin/in_object_space.rb +93 -0
 - data/lib/fluent/plugin/in_sample.rb +141 -0
 - data/lib/fluent/plugin/in_syslog.rb +276 -0
 - data/lib/fluent/plugin/in_tail/group_watch.rb +204 -0
 - data/lib/fluent/plugin/in_tail/position_file.rb +269 -0
 - data/lib/fluent/plugin/in_tail.rb +1299 -0
 - data/lib/fluent/plugin/in_tcp.rb +226 -0
 - data/lib/fluent/plugin/in_udp.rb +92 -0
 - data/lib/fluent/plugin/in_unix.rb +195 -0
 - data/lib/fluent/plugin/input.rb +75 -0
 - data/lib/fluent/plugin/metrics.rb +119 -0
 - data/lib/fluent/plugin/metrics_local.rb +96 -0
 - data/lib/fluent/plugin/multi_output.rb +195 -0
 - data/lib/fluent/plugin/out_copy.rb +120 -0
 - data/lib/fluent/plugin/out_exec.rb +105 -0
 - data/lib/fluent/plugin/out_exec_filter.rb +319 -0
 - data/lib/fluent/plugin/out_file.rb +340 -0
 - data/lib/fluent/plugin/out_forward/ack_handler.rb +176 -0
 - data/lib/fluent/plugin/out_forward/connection_manager.rb +113 -0
 - data/lib/fluent/plugin/out_forward/error.rb +28 -0
 - data/lib/fluent/plugin/out_forward/failure_detector.rb +84 -0
 - data/lib/fluent/plugin/out_forward/handshake_protocol.rb +125 -0
 - data/lib/fluent/plugin/out_forward/load_balancer.rb +114 -0
 - data/lib/fluent/plugin/out_forward/socket_cache.rb +142 -0
 - data/lib/fluent/plugin/out_forward.rb +826 -0
 - data/lib/fluent/plugin/out_http.rb +275 -0
 - data/lib/fluent/plugin/out_null.rb +74 -0
 - data/lib/fluent/plugin/out_relabel.rb +32 -0
 - data/lib/fluent/plugin/out_roundrobin.rb +84 -0
 - data/lib/fluent/plugin/out_secondary_file.rb +148 -0
 - data/lib/fluent/plugin/out_stdout.rb +74 -0
 - data/lib/fluent/plugin/out_stream.rb +130 -0
 - data/lib/fluent/plugin/output.rb +1603 -0
 - data/lib/fluent/plugin/owned_by_mixin.rb +41 -0
 - data/lib/fluent/plugin/parser.rb +274 -0
 - data/lib/fluent/plugin/parser_apache.rb +28 -0
 - data/lib/fluent/plugin/parser_apache2.rb +88 -0
 - data/lib/fluent/plugin/parser_apache_error.rb +26 -0
 - data/lib/fluent/plugin/parser_csv.rb +114 -0
 - data/lib/fluent/plugin/parser_json.rb +96 -0
 - data/lib/fluent/plugin/parser_ltsv.rb +51 -0
 - data/lib/fluent/plugin/parser_msgpack.rb +50 -0
 - data/lib/fluent/plugin/parser_multiline.rb +152 -0
 - data/lib/fluent/plugin/parser_nginx.rb +28 -0
 - data/lib/fluent/plugin/parser_none.rb +36 -0
 - data/lib/fluent/plugin/parser_regexp.rb +68 -0
 - data/lib/fluent/plugin/parser_syslog.rb +496 -0
 - data/lib/fluent/plugin/parser_tsv.rb +42 -0
 - data/lib/fluent/plugin/sd_file.rb +156 -0
 - data/lib/fluent/plugin/sd_srv.rb +135 -0
 - data/lib/fluent/plugin/sd_static.rb +58 -0
 - data/lib/fluent/plugin/service_discovery.rb +65 -0
 - data/lib/fluent/plugin/socket_util.rb +22 -0
 - data/lib/fluent/plugin/storage.rb +84 -0
 - data/lib/fluent/plugin/storage_local.rb +162 -0
 - data/lib/fluent/plugin/string_util.rb +22 -0
 - data/lib/fluent/plugin.rb +206 -0
 - data/lib/fluent/plugin_helper/cert_option.rb +191 -0
 - data/lib/fluent/plugin_helper/child_process.rb +369 -0
 - data/lib/fluent/plugin_helper/compat_parameters.rb +343 -0
 - data/lib/fluent/plugin_helper/counter.rb +51 -0
 - data/lib/fluent/plugin_helper/event_emitter.rb +100 -0
 - data/lib/fluent/plugin_helper/event_loop.rb +170 -0
 - data/lib/fluent/plugin_helper/extract.rb +104 -0
 - data/lib/fluent/plugin_helper/formatter.rb +147 -0
 - data/lib/fluent/plugin_helper/http_server/app.rb +79 -0
 - data/lib/fluent/plugin_helper/http_server/compat/server.rb +92 -0
 - data/lib/fluent/plugin_helper/http_server/compat/ssl_context_extractor.rb +52 -0
 - data/lib/fluent/plugin_helper/http_server/compat/webrick_handler.rb +58 -0
 - data/lib/fluent/plugin_helper/http_server/methods.rb +35 -0
 - data/lib/fluent/plugin_helper/http_server/request.rb +42 -0
 - data/lib/fluent/plugin_helper/http_server/router.rb +54 -0
 - data/lib/fluent/plugin_helper/http_server/server.rb +94 -0
 - data/lib/fluent/plugin_helper/http_server/ssl_context_builder.rb +41 -0
 - data/lib/fluent/plugin_helper/http_server.rb +135 -0
 - data/lib/fluent/plugin_helper/inject.rb +154 -0
 - data/lib/fluent/plugin_helper/metrics.rb +129 -0
 - data/lib/fluent/plugin_helper/parser.rb +147 -0
 - data/lib/fluent/plugin_helper/record_accessor.rb +207 -0
 - data/lib/fluent/plugin_helper/retry_state.rb +219 -0
 - data/lib/fluent/plugin_helper/server.rb +828 -0
 - data/lib/fluent/plugin_helper/service_discovery/manager.rb +146 -0
 - data/lib/fluent/plugin_helper/service_discovery/round_robin_balancer.rb +43 -0
 - data/lib/fluent/plugin_helper/service_discovery.rb +125 -0
 - data/lib/fluent/plugin_helper/socket.rb +288 -0
 - data/lib/fluent/plugin_helper/socket_option.rb +98 -0
 - data/lib/fluent/plugin_helper/storage.rb +349 -0
 - data/lib/fluent/plugin_helper/thread.rb +180 -0
 - data/lib/fluent/plugin_helper/timer.rb +92 -0
 - data/lib/fluent/plugin_helper.rb +75 -0
 - data/lib/fluent/plugin_id.rb +93 -0
 - data/lib/fluent/process.rb +22 -0
 - data/lib/fluent/registry.rb +117 -0
 - data/lib/fluent/root_agent.rb +372 -0
 - data/lib/fluent/rpc.rb +95 -0
 - data/lib/fluent/static_config_analysis.rb +194 -0
 - data/lib/fluent/supervisor.rb +1076 -0
 - data/lib/fluent/system_config.rb +189 -0
 - data/lib/fluent/test/base.rb +78 -0
 - data/lib/fluent/test/driver/base.rb +231 -0
 - data/lib/fluent/test/driver/base_owned.rb +83 -0
 - data/lib/fluent/test/driver/base_owner.rb +135 -0
 - data/lib/fluent/test/driver/event_feeder.rb +98 -0
 - data/lib/fluent/test/driver/filter.rb +61 -0
 - data/lib/fluent/test/driver/formatter.rb +30 -0
 - data/lib/fluent/test/driver/input.rb +31 -0
 - data/lib/fluent/test/driver/multi_output.rb +53 -0
 - data/lib/fluent/test/driver/output.rb +102 -0
 - data/lib/fluent/test/driver/parser.rb +30 -0
 - data/lib/fluent/test/driver/storage.rb +30 -0
 - data/lib/fluent/test/driver/test_event_router.rb +45 -0
 - data/lib/fluent/test/filter_test.rb +77 -0
 - data/lib/fluent/test/formatter_test.rb +65 -0
 - data/lib/fluent/test/helpers.rb +134 -0
 - data/lib/fluent/test/input_test.rb +174 -0
 - data/lib/fluent/test/log.rb +79 -0
 - data/lib/fluent/test/output_test.rb +156 -0
 - data/lib/fluent/test/parser_test.rb +70 -0
 - data/lib/fluent/test/startup_shutdown.rb +44 -0
 - data/lib/fluent/test.rb +58 -0
 - data/lib/fluent/time.rb +512 -0
 - data/lib/fluent/timezone.rb +171 -0
 - data/lib/fluent/tls.rb +81 -0
 - data/lib/fluent/unique_id.rb +39 -0
 - data/lib/fluent/variable_store.rb +40 -0
 - data/lib/fluent/version.rb +21 -0
 - data/lib/fluent/win32api.rb +38 -0
 - data/lib/fluent/winsvc.rb +100 -0
 - data/templates/new_gem/Gemfile +3 -0
 - data/templates/new_gem/README.md.erb +43 -0
 - data/templates/new_gem/Rakefile +13 -0
 - data/templates/new_gem/fluent-plugin.gemspec.erb +27 -0
 - data/templates/new_gem/lib/fluent/plugin/filter.rb.erb +14 -0
 - data/templates/new_gem/lib/fluent/plugin/formatter.rb.erb +14 -0
 - data/templates/new_gem/lib/fluent/plugin/input.rb.erb +11 -0
 - data/templates/new_gem/lib/fluent/plugin/output.rb.erb +11 -0
 - data/templates/new_gem/lib/fluent/plugin/parser.rb.erb +15 -0
 - data/templates/new_gem/lib/fluent/plugin/storage.rb.erb +40 -0
 - data/templates/new_gem/test/helper.rb.erb +7 -0
 - data/templates/new_gem/test/plugin/test_filter.rb.erb +18 -0
 - data/templates/new_gem/test/plugin/test_formatter.rb.erb +18 -0
 - data/templates/new_gem/test/plugin/test_input.rb.erb +18 -0
 - data/templates/new_gem/test/plugin/test_output.rb.erb +18 -0
 - data/templates/new_gem/test/plugin/test_parser.rb.erb +18 -0
 - data/templates/new_gem/test/plugin/test_storage.rb.erb +18 -0
 - data/templates/plugin_config_formatter/param.md-compact.erb +25 -0
 - data/templates/plugin_config_formatter/param.md-table.erb +10 -0
 - data/templates/plugin_config_formatter/param.md.erb +34 -0
 - data/templates/plugin_config_formatter/section.md.erb +12 -0
 - data/test/command/test_binlog_reader.rb +362 -0
 - data/test/command/test_ca_generate.rb +70 -0
 - data/test/command/test_cap_ctl.rb +100 -0
 - data/test/command/test_cat.rb +128 -0
 - data/test/command/test_ctl.rb +56 -0
 - data/test/command/test_fluentd.rb +1291 -0
 - data/test/command/test_plugin_config_formatter.rb +397 -0
 - data/test/command/test_plugin_generator.rb +109 -0
 - data/test/compat/test_calls_super.rb +166 -0
 - data/test/compat/test_parser.rb +92 -0
 - data/test/config/assertions.rb +42 -0
 - data/test/config/test_config_parser.rb +551 -0
 - data/test/config/test_configurable.rb +1784 -0
 - data/test/config/test_configure_proxy.rb +604 -0
 - data/test/config/test_dsl.rb +415 -0
 - data/test/config/test_element.rb +518 -0
 - data/test/config/test_literal_parser.rb +309 -0
 - data/test/config/test_plugin_configuration.rb +56 -0
 - data/test/config/test_section.rb +191 -0
 - data/test/config/test_system_config.rb +195 -0
 - data/test/config/test_types.rb +408 -0
 - data/test/counter/test_client.rb +563 -0
 - data/test/counter/test_error.rb +44 -0
 - data/test/counter/test_mutex_hash.rb +179 -0
 - data/test/counter/test_server.rb +589 -0
 - data/test/counter/test_store.rb +258 -0
 - data/test/counter/test_validator.rb +137 -0
 - data/test/helper.rb +155 -0
 - data/test/helpers/fuzzy_assert.rb +89 -0
 - data/test/helpers/process_extenstion.rb +33 -0
 - data/test/log/test_console_adapter.rb +110 -0
 - data/test/plugin/data/2010/01/20100102-030405.log +0 -0
 - data/test/plugin/data/2010/01/20100102-030406.log +0 -0
 - data/test/plugin/data/2010/01/20100102.log +0 -0
 - data/test/plugin/data/log/bar +0 -0
 - data/test/plugin/data/log/foo/bar.log +0 -0
 - data/test/plugin/data/log/foo/bar2 +0 -0
 - data/test/plugin/data/log/test.log +0 -0
 - data/test/plugin/data/sd_file/config +11 -0
 - data/test/plugin/data/sd_file/config.json +17 -0
 - data/test/plugin/data/sd_file/config.yaml +11 -0
 - data/test/plugin/data/sd_file/config.yml +11 -0
 - data/test/plugin/data/sd_file/invalid_config.yml +7 -0
 - data/test/plugin/in_tail/test_fifo.rb +121 -0
 - data/test/plugin/in_tail/test_io_handler.rb +150 -0
 - data/test/plugin/in_tail/test_position_file.rb +346 -0
 - data/test/plugin/out_forward/test_ack_handler.rb +140 -0
 - data/test/plugin/out_forward/test_connection_manager.rb +145 -0
 - data/test/plugin/out_forward/test_handshake_protocol.rb +112 -0
 - data/test/plugin/out_forward/test_load_balancer.rb +106 -0
 - data/test/plugin/out_forward/test_socket_cache.rb +174 -0
 - data/test/plugin/test_bare_output.rb +131 -0
 - data/test/plugin/test_base.rb +247 -0
 - data/test/plugin/test_buf_file.rb +1314 -0
 - data/test/plugin/test_buf_file_single.rb +898 -0
 - data/test/plugin/test_buf_memory.rb +42 -0
 - data/test/plugin/test_buffer.rb +1434 -0
 - data/test/plugin/test_buffer_chunk.rb +209 -0
 - data/test/plugin/test_buffer_file_chunk.rb +871 -0
 - data/test/plugin/test_buffer_file_single_chunk.rb +611 -0
 - data/test/plugin/test_buffer_memory_chunk.rb +339 -0
 - data/test/plugin/test_compressable.rb +87 -0
 - data/test/plugin/test_file_util.rb +96 -0
 - data/test/plugin/test_filter.rb +368 -0
 - data/test/plugin/test_filter_grep.rb +697 -0
 - data/test/plugin/test_filter_parser.rb +731 -0
 - data/test/plugin/test_filter_record_transformer.rb +577 -0
 - data/test/plugin/test_filter_stdout.rb +207 -0
 - data/test/plugin/test_formatter_csv.rb +136 -0
 - data/test/plugin/test_formatter_hash.rb +38 -0
 - data/test/plugin/test_formatter_json.rb +61 -0
 - data/test/plugin/test_formatter_ltsv.rb +70 -0
 - data/test/plugin/test_formatter_msgpack.rb +28 -0
 - data/test/plugin/test_formatter_out_file.rb +116 -0
 - data/test/plugin/test_formatter_single_value.rb +44 -0
 - data/test/plugin/test_formatter_tsv.rb +76 -0
 - data/test/plugin/test_in_debug_agent.rb +49 -0
 - data/test/plugin/test_in_exec.rb +261 -0
 - data/test/plugin/test_in_forward.rb +1178 -0
 - data/test/plugin/test_in_gc_stat.rb +62 -0
 - data/test/plugin/test_in_http.rb +1102 -0
 - data/test/plugin/test_in_monitor_agent.rb +922 -0
 - data/test/plugin/test_in_object_space.rb +66 -0
 - data/test/plugin/test_in_sample.rb +190 -0
 - data/test/plugin/test_in_syslog.rb +505 -0
 - data/test/plugin/test_in_tail.rb +3125 -0
 - data/test/plugin/test_in_tcp.rb +328 -0
 - data/test/plugin/test_in_udp.rb +296 -0
 - data/test/plugin/test_in_unix.rb +181 -0
 - data/test/plugin/test_input.rb +137 -0
 - data/test/plugin/test_metadata.rb +89 -0
 - data/test/plugin/test_metrics.rb +294 -0
 - data/test/plugin/test_metrics_local.rb +96 -0
 - data/test/plugin/test_multi_output.rb +204 -0
 - data/test/plugin/test_out_copy.rb +308 -0
 - data/test/plugin/test_out_exec.rb +312 -0
 - data/test/plugin/test_out_exec_filter.rb +606 -0
 - data/test/plugin/test_out_file.rb +1038 -0
 - data/test/plugin/test_out_forward.rb +1361 -0
 - data/test/plugin/test_out_http.rb +429 -0
 - data/test/plugin/test_out_null.rb +105 -0
 - data/test/plugin/test_out_relabel.rb +28 -0
 - data/test/plugin/test_out_roundrobin.rb +146 -0
 - data/test/plugin/test_out_secondary_file.rb +458 -0
 - data/test/plugin/test_out_stdout.rb +205 -0
 - data/test/plugin/test_out_stream.rb +103 -0
 - data/test/plugin/test_output.rb +1334 -0
 - data/test/plugin/test_output_as_buffered.rb +2024 -0
 - data/test/plugin/test_output_as_buffered_backup.rb +363 -0
 - data/test/plugin/test_output_as_buffered_compress.rb +179 -0
 - data/test/plugin/test_output_as_buffered_overflow.rb +250 -0
 - data/test/plugin/test_output_as_buffered_retries.rb +966 -0
 - data/test/plugin/test_output_as_buffered_secondary.rb +882 -0
 - data/test/plugin/test_output_as_standard.rb +374 -0
 - data/test/plugin/test_owned_by.rb +34 -0
 - data/test/plugin/test_parser.rb +399 -0
 - data/test/plugin/test_parser_apache.rb +42 -0
 - data/test/plugin/test_parser_apache2.rb +47 -0
 - data/test/plugin/test_parser_apache_error.rb +45 -0
 - data/test/plugin/test_parser_csv.rb +200 -0
 - data/test/plugin/test_parser_json.rb +138 -0
 - data/test/plugin/test_parser_labeled_tsv.rb +160 -0
 - data/test/plugin/test_parser_multiline.rb +111 -0
 - data/test/plugin/test_parser_nginx.rb +88 -0
 - data/test/plugin/test_parser_none.rb +52 -0
 - data/test/plugin/test_parser_regexp.rb +284 -0
 - data/test/plugin/test_parser_syslog.rb +650 -0
 - data/test/plugin/test_parser_tsv.rb +122 -0
 - data/test/plugin/test_sd_file.rb +228 -0
 - data/test/plugin/test_sd_srv.rb +230 -0
 - data/test/plugin/test_storage.rb +166 -0
 - data/test/plugin/test_storage_local.rb +335 -0
 - data/test/plugin/test_string_util.rb +26 -0
 - data/test/plugin_helper/data/cert/cert-key.pem +27 -0
 - data/test/plugin_helper/data/cert/cert-with-CRLF.pem +19 -0
 - data/test/plugin_helper/data/cert/cert-with-no-newline.pem +19 -0
 - data/test/plugin_helper/data/cert/cert.pem +19 -0
 - data/test/plugin_helper/data/cert/cert_chains/ca-cert-key.pem +27 -0
 - data/test/plugin_helper/data/cert/cert_chains/ca-cert.pem +20 -0
 - data/test/plugin_helper/data/cert/cert_chains/cert-key.pem +27 -0
 - data/test/plugin_helper/data/cert/cert_chains/cert.pem +40 -0
 - data/test/plugin_helper/data/cert/empty.pem +0 -0
 - data/test/plugin_helper/data/cert/generate_cert.rb +125 -0
 - data/test/plugin_helper/data/cert/with_ca/ca-cert-key-pass.pem +30 -0
 - data/test/plugin_helper/data/cert/with_ca/ca-cert-key.pem +27 -0
 - data/test/plugin_helper/data/cert/with_ca/ca-cert-pass.pem +20 -0
 - data/test/plugin_helper/data/cert/with_ca/ca-cert.pem +20 -0
 - data/test/plugin_helper/data/cert/with_ca/cert-key-pass.pem +30 -0
 - data/test/plugin_helper/data/cert/with_ca/cert-key.pem +27 -0
 - data/test/plugin_helper/data/cert/with_ca/cert-pass.pem +21 -0
 - data/test/plugin_helper/data/cert/with_ca/cert.pem +21 -0
 - data/test/plugin_helper/data/cert/without_ca/cert-key-pass.pem +30 -0
 - data/test/plugin_helper/data/cert/without_ca/cert-key.pem +27 -0
 - data/test/plugin_helper/data/cert/without_ca/cert-pass.pem +20 -0
 - data/test/plugin_helper/data/cert/without_ca/cert.pem +20 -0
 - data/test/plugin_helper/http_server/test_app.rb +65 -0
 - data/test/plugin_helper/http_server/test_route.rb +32 -0
 - data/test/plugin_helper/service_discovery/test_manager.rb +93 -0
 - data/test/plugin_helper/service_discovery/test_round_robin_balancer.rb +21 -0
 - data/test/plugin_helper/test_cert_option.rb +25 -0
 - data/test/plugin_helper/test_child_process.rb +862 -0
 - data/test/plugin_helper/test_compat_parameters.rb +358 -0
 - data/test/plugin_helper/test_event_emitter.rb +80 -0
 - data/test/plugin_helper/test_event_loop.rb +52 -0
 - data/test/plugin_helper/test_extract.rb +194 -0
 - data/test/plugin_helper/test_formatter.rb +255 -0
 - data/test/plugin_helper/test_http_server_helper.rb +372 -0
 - data/test/plugin_helper/test_inject.rb +561 -0
 - data/test/plugin_helper/test_metrics.rb +137 -0
 - data/test/plugin_helper/test_parser.rb +264 -0
 - data/test/plugin_helper/test_record_accessor.rb +238 -0
 - data/test/plugin_helper/test_retry_state.rb +1006 -0
 - data/test/plugin_helper/test_server.rb +1895 -0
 - data/test/plugin_helper/test_service_discovery.rb +165 -0
 - data/test/plugin_helper/test_socket.rb +146 -0
 - data/test/plugin_helper/test_storage.rb +542 -0
 - data/test/plugin_helper/test_thread.rb +164 -0
 - data/test/plugin_helper/test_timer.rb +130 -0
 - data/test/scripts/exec_script.rb +32 -0
 - data/test/scripts/fluent/plugin/formatter1/formatter_test1.rb +7 -0
 - data/test/scripts/fluent/plugin/formatter2/formatter_test2.rb +7 -0
 - data/test/scripts/fluent/plugin/formatter_known.rb +8 -0
 - data/test/scripts/fluent/plugin/out_test.rb +81 -0
 - data/test/scripts/fluent/plugin/out_test2.rb +80 -0
 - data/test/scripts/fluent/plugin/parser_known.rb +4 -0
 - data/test/test_capability.rb +74 -0
 - data/test/test_clock.rb +164 -0
 - data/test/test_config.rb +369 -0
 - data/test/test_configdsl.rb +148 -0
 - data/test/test_daemonizer.rb +91 -0
 - data/test/test_engine.rb +203 -0
 - data/test/test_event.rb +531 -0
 - data/test/test_event_router.rb +348 -0
 - data/test/test_event_time.rb +199 -0
 - data/test/test_file_wrapper.rb +53 -0
 - data/test/test_filter.rb +121 -0
 - data/test/test_fluent_log_event_router.rb +99 -0
 - data/test/test_formatter.rb +369 -0
 - data/test/test_input.rb +31 -0
 - data/test/test_log.rb +1076 -0
 - data/test/test_match.rb +148 -0
 - data/test/test_mixin.rb +351 -0
 - data/test/test_msgpack_factory.rb +50 -0
 - data/test/test_oj_options.rb +55 -0
 - data/test/test_output.rb +278 -0
 - data/test/test_plugin.rb +251 -0
 - data/test/test_plugin_classes.rb +370 -0
 - data/test/test_plugin_helper.rb +81 -0
 - data/test/test_plugin_id.rb +119 -0
 - data/test/test_process.rb +14 -0
 - data/test/test_root_agent.rb +951 -0
 - data/test/test_static_config_analysis.rb +177 -0
 - data/test/test_supervisor.rb +821 -0
 - data/test/test_test_drivers.rb +136 -0
 - data/test/test_time_formatter.rb +301 -0
 - data/test/test_time_parser.rb +362 -0
 - data/test/test_tls.rb +65 -0
 - data/test/test_unique_id.rb +47 -0
 - data/test/test_variable_store.rb +65 -0
 - metadata +1183 -0
 
| 
         @@ -0,0 +1,1434 @@ 
     | 
|
| 
      
 1 
     | 
    
         
            +
            require_relative '../helper'
         
     | 
| 
      
 2 
     | 
    
         
            +
            require 'fluent/plugin/buffer'
         
     | 
| 
      
 3 
     | 
    
         
            +
            require 'fluent/plugin/buffer/memory_chunk'
         
     | 
| 
      
 4 
     | 
    
         
            +
            require 'fluent/plugin/compressable'
         
     | 
| 
      
 5 
     | 
    
         
            +
            require 'fluent/plugin/buffer/chunk'
         
     | 
| 
      
 6 
     | 
    
         
            +
            require 'fluent/event'
         
     | 
| 
      
 7 
     | 
    
         
            +
            require 'flexmock/test_unit'
         
     | 
| 
      
 8 
     | 
    
         
            +
             
     | 
| 
      
 9 
     | 
    
         
            +
            require 'fluent/log'
         
     | 
| 
      
 10 
     | 
    
         
            +
            require 'fluent/plugin_id'
         
     | 
| 
      
 11 
     | 
    
         
            +
             
     | 
| 
      
 12 
     | 
    
         
            +
            require 'time'
         
     | 
| 
      
 13 
     | 
    
         
            +
             
     | 
| 
      
 14 
     | 
    
         
            +
            module FluentPluginBufferTest
         
     | 
| 
      
 15 
     | 
    
         
            +
              class DummyOutputPlugin < Fluent::Plugin::Base
         
     | 
| 
      
 16 
     | 
    
         
            +
                include Fluent::PluginId
         
     | 
| 
      
 17 
     | 
    
         
            +
                include Fluent::PluginLoggerMixin
         
     | 
| 
      
 18 
     | 
    
         
            +
              end
         
     | 
| 
      
 19 
     | 
    
         
            +
              class DummyMemoryChunkError < StandardError; end
         
     | 
| 
      
 20 
     | 
    
         
            +
              class DummyMemoryChunk < Fluent::Plugin::Buffer::MemoryChunk
         
     | 
| 
      
 21 
     | 
    
         
            +
                attr_reader :append_count, :rollbacked, :closed, :purged, :chunk
         
     | 
| 
      
 22 
     | 
    
         
            +
                attr_accessor :failing
         
     | 
| 
      
 23 
     | 
    
         
            +
                def initialize(metadata, compress: :text)
         
     | 
| 
      
 24 
     | 
    
         
            +
                  super
         
     | 
| 
      
 25 
     | 
    
         
            +
                  @append_count = 0
         
     | 
| 
      
 26 
     | 
    
         
            +
                  @rollbacked = false
         
     | 
| 
      
 27 
     | 
    
         
            +
                  @closed = false
         
     | 
| 
      
 28 
     | 
    
         
            +
                  @purged = false
         
     | 
| 
      
 29 
     | 
    
         
            +
                  @failing = false
         
     | 
| 
      
 30 
     | 
    
         
            +
                end
         
     | 
| 
      
 31 
     | 
    
         
            +
                def concat(data, size)
         
     | 
| 
      
 32 
     | 
    
         
            +
                  @append_count += 1
         
     | 
| 
      
 33 
     | 
    
         
            +
                  raise DummyMemoryChunkError if @failing
         
     | 
| 
      
 34 
     | 
    
         
            +
                  super
         
     | 
| 
      
 35 
     | 
    
         
            +
                end
         
     | 
| 
      
 36 
     | 
    
         
            +
                def rollback
         
     | 
| 
      
 37 
     | 
    
         
            +
                  super
         
     | 
| 
      
 38 
     | 
    
         
            +
                  @rollbacked = true
         
     | 
| 
      
 39 
     | 
    
         
            +
                end
         
     | 
| 
      
 40 
     | 
    
         
            +
                def close
         
     | 
| 
      
 41 
     | 
    
         
            +
                  super
         
     | 
| 
      
 42 
     | 
    
         
            +
                  @closed = true
         
     | 
| 
      
 43 
     | 
    
         
            +
                end
         
     | 
| 
      
 44 
     | 
    
         
            +
                def purge
         
     | 
| 
      
 45 
     | 
    
         
            +
                  super
         
     | 
| 
      
 46 
     | 
    
         
            +
                  @purged = true
         
     | 
| 
      
 47 
     | 
    
         
            +
                end
         
     | 
| 
      
 48 
     | 
    
         
            +
              end
         
     | 
| 
      
 49 
     | 
    
         
            +
              class DummyPlugin < Fluent::Plugin::Buffer
         
     | 
| 
      
 50 
     | 
    
         
            +
                def create_metadata(timekey=nil, tag=nil, variables=nil)
         
     | 
| 
      
 51 
     | 
    
         
            +
                  Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
         
     | 
| 
      
 52 
     | 
    
         
            +
                end
         
     | 
| 
      
 53 
     | 
    
         
            +
                def create_chunk(metadata, data)
         
     | 
| 
      
 54 
     | 
    
         
            +
                  c = FluentPluginBufferTest::DummyMemoryChunk.new(metadata)
         
     | 
| 
      
 55 
     | 
    
         
            +
                  c.append(data)
         
     | 
| 
      
 56 
     | 
    
         
            +
                  c.commit
         
     | 
| 
      
 57 
     | 
    
         
            +
                  c
         
     | 
| 
      
 58 
     | 
    
         
            +
                end
         
     | 
| 
      
 59 
     | 
    
         
            +
                def create_chunk_es(metadata, es)
         
     | 
| 
      
 60 
     | 
    
         
            +
                  c = FluentPluginBufferTest::DummyMemoryChunk.new(metadata)
         
     | 
| 
      
 61 
     | 
    
         
            +
                  c.concat(es.to_msgpack_stream, es.size)
         
     | 
| 
      
 62 
     | 
    
         
            +
                  c.commit
         
     | 
| 
      
 63 
     | 
    
         
            +
                  c
         
     | 
| 
      
 64 
     | 
    
         
            +
                end
         
     | 
| 
      
 65 
     | 
    
         
            +
                def resume
         
     | 
| 
      
 66 
     | 
    
         
            +
                  dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
         
     | 
| 
      
 67 
     | 
    
         
            +
                  dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
         
     | 
| 
      
 68 
     | 
    
         
            +
                  dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
         
     | 
| 
      
 69 
     | 
    
         
            +
                  dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
         
     | 
| 
      
 70 
     | 
    
         
            +
                  staged = {
         
     | 
| 
      
 71 
     | 
    
         
            +
                    dm2 => create_chunk(dm2, ["b" * 100]).staged!,
         
     | 
| 
      
 72 
     | 
    
         
            +
                    dm3 => create_chunk(dm3, ["c" * 100]).staged!,
         
     | 
| 
      
 73 
     | 
    
         
            +
                  }
         
     | 
| 
      
 74 
     | 
    
         
            +
                  queued = [
         
     | 
| 
      
 75 
     | 
    
         
            +
                    create_chunk(dm0, ["0" * 100]).enqueued!,
         
     | 
| 
      
 76 
     | 
    
         
            +
                    create_chunk(dm1, ["a" * 100]).enqueued!,
         
     | 
| 
      
 77 
     | 
    
         
            +
                    create_chunk(dm1, ["a" * 3]).enqueued!,
         
     | 
| 
      
 78 
     | 
    
         
            +
                  ]
         
     | 
| 
      
 79 
     | 
    
         
            +
                  return staged, queued
         
     | 
| 
      
 80 
     | 
    
         
            +
                end
         
     | 
| 
      
 81 
     | 
    
         
            +
                def generate_chunk(metadata)
         
     | 
| 
      
 82 
     | 
    
         
            +
                  DummyMemoryChunk.new(metadata, compress: @compress)
         
     | 
| 
      
 83 
     | 
    
         
            +
                end
         
     | 
| 
      
 84 
     | 
    
         
            +
              end
         
     | 
| 
      
 85 
     | 
    
         
            +
            end
         
     | 
| 
      
 86 
     | 
    
         
            +
             
     | 
| 
      
 87 
     | 
    
         
            +
            class BufferTest < Test::Unit::TestCase
         
     | 
| 
      
 88 
     | 
    
         
            +
              def create_buffer(hash)
         
     | 
| 
      
 89 
     | 
    
         
            +
                buffer_conf = config_element('buffer', '', hash, [])
         
     | 
| 
      
 90 
     | 
    
         
            +
                owner = FluentPluginBufferTest::DummyOutputPlugin.new
         
     | 
| 
      
 91 
     | 
    
         
            +
                owner.configure(config_element('ROOT', '', {}, [ buffer_conf ]))
         
     | 
| 
      
 92 
     | 
    
         
            +
                p = FluentPluginBufferTest::DummyPlugin.new
         
     | 
| 
      
 93 
     | 
    
         
            +
                p.owner = owner
         
     | 
| 
      
 94 
     | 
    
         
            +
                p.configure(buffer_conf)
         
     | 
| 
      
 95 
     | 
    
         
            +
                p
         
     | 
| 
      
 96 
     | 
    
         
            +
              end
         
     | 
| 
      
 97 
     | 
    
         
            +
             
     | 
| 
      
 98 
     | 
    
         
            +
              def create_metadata(timekey=nil, tag=nil, variables=nil)
         
     | 
| 
      
 99 
     | 
    
         
            +
                Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
         
     | 
| 
      
 100 
     | 
    
         
            +
              end
         
     | 
| 
      
 101 
     | 
    
         
            +
             
     | 
| 
      
 102 
     | 
    
         
            +
              def create_chunk(metadata, data)
         
     | 
| 
      
 103 
     | 
    
         
            +
                c = FluentPluginBufferTest::DummyMemoryChunk.new(metadata)
         
     | 
| 
      
 104 
     | 
    
         
            +
                c.append(data)
         
     | 
| 
      
 105 
     | 
    
         
            +
                c.commit
         
     | 
| 
      
 106 
     | 
    
         
            +
                c
         
     | 
| 
      
 107 
     | 
    
         
            +
              end
         
     | 
| 
      
 108 
     | 
    
         
            +
             
     | 
| 
      
 109 
     | 
    
         
            +
              def create_chunk_es(metadata, es)
         
     | 
| 
      
 110 
     | 
    
         
            +
                c = FluentPluginBufferTest::DummyMemoryChunk.new(metadata)
         
     | 
| 
      
 111 
     | 
    
         
            +
                c.concat(es.to_msgpack_stream, es.size)
         
     | 
| 
      
 112 
     | 
    
         
            +
                c.commit
         
     | 
| 
      
 113 
     | 
    
         
            +
                c
         
     | 
| 
      
 114 
     | 
    
         
            +
              end
         
     | 
| 
      
 115 
     | 
    
         
            +
             
     | 
| 
      
 116 
     | 
    
         
            +
              setup do
         
     | 
| 
      
 117 
     | 
    
         
            +
                Fluent::Test.setup
         
     | 
| 
      
 118 
     | 
    
         
            +
              end
         
     | 
| 
      
 119 
     | 
    
         
            +
             
     | 
| 
      
 120 
     | 
    
         
            +
              sub_test_case 'using base buffer class' do
         
     | 
| 
      
 121 
     | 
    
         
            +
                setup do
         
     | 
| 
      
 122 
     | 
    
         
            +
                  buffer_conf = config_element('buffer', '', {}, [])
         
     | 
| 
      
 123 
     | 
    
         
            +
                  owner = FluentPluginBufferTest::DummyOutputPlugin.new
         
     | 
| 
      
 124 
     | 
    
         
            +
                  owner.configure(config_element('ROOT', '', {}, [ buffer_conf ]))
         
     | 
| 
      
 125 
     | 
    
         
            +
                  p = Fluent::Plugin::Buffer.new
         
     | 
| 
      
 126 
     | 
    
         
            +
                  p.owner = owner
         
     | 
| 
      
 127 
     | 
    
         
            +
                  p.configure(buffer_conf)
         
     | 
| 
      
 128 
     | 
    
         
            +
                  @p = p
         
     | 
| 
      
 129 
     | 
    
         
            +
                end
         
     | 
| 
      
 130 
     | 
    
         
            +
             
     | 
| 
      
 131 
     | 
    
         
            +
                test 'default persistency is false' do
         
     | 
| 
      
 132 
     | 
    
         
            +
                  assert !@p.persistent?
         
     | 
| 
      
 133 
     | 
    
         
            +
                end
         
     | 
| 
      
 134 
     | 
    
         
            +
             
     | 
| 
      
 135 
     | 
    
         
            +
                test 'chunk bytes limit is 8MB, and total bytes limit is 512MB' do
         
     | 
| 
      
 136 
     | 
    
         
            +
                  assert_equal 8*1024*1024, @p.chunk_limit_size
         
     | 
| 
      
 137 
     | 
    
         
            +
                  assert_equal 512*1024*1024, @p.total_limit_size
         
     | 
| 
      
 138 
     | 
    
         
            +
                end
         
     | 
| 
      
 139 
     | 
    
         
            +
             
     | 
| 
      
 140 
     | 
    
         
            +
                test 'chunk records limit is ignored in default' do
         
     | 
| 
      
 141 
     | 
    
         
            +
                  assert_nil @p.chunk_limit_records
         
     | 
| 
      
 142 
     | 
    
         
            +
                end
         
     | 
| 
      
 143 
     | 
    
         
            +
             
     | 
| 
      
 144 
     | 
    
         
            +
                test '#storable? checks total size of staged and enqueued(includes dequeued chunks) against total_limit_size' do
         
     | 
| 
      
 145 
     | 
    
         
            +
                  assert_equal 512*1024*1024, @p.total_limit_size
         
     | 
| 
      
 146 
     | 
    
         
            +
                  assert_equal 0, @p.stage_size
         
     | 
| 
      
 147 
     | 
    
         
            +
                  assert_equal 0, @p.queue_size
         
     | 
| 
      
 148 
     | 
    
         
            +
                  assert @p.storable?
         
     | 
| 
      
 149 
     | 
    
         
            +
             
     | 
| 
      
 150 
     | 
    
         
            +
                  @p.stage_size = 256 * 1024 * 1024
         
     | 
| 
      
 151 
     | 
    
         
            +
                  @p.queue_size = 256 * 1024 * 1024 - 1
         
     | 
| 
      
 152 
     | 
    
         
            +
                  assert @p.storable?
         
     | 
| 
      
 153 
     | 
    
         
            +
             
     | 
| 
      
 154 
     | 
    
         
            +
                  @p.queue_size = 256 * 1024 * 1024
         
     | 
| 
      
 155 
     | 
    
         
            +
                  assert !@p.storable?
         
     | 
| 
      
 156 
     | 
    
         
            +
                end
         
     | 
| 
      
 157 
     | 
    
         
            +
             
     | 
| 
      
 158 
     | 
    
         
            +
                test '#resume must be implemented by subclass' do
         
     | 
| 
      
 159 
     | 
    
         
            +
                  assert_raise NotImplementedError do
         
     | 
| 
      
 160 
     | 
    
         
            +
                    @p.resume
         
     | 
| 
      
 161 
     | 
    
         
            +
                  end
         
     | 
| 
      
 162 
     | 
    
         
            +
                end
         
     | 
| 
      
 163 
     | 
    
         
            +
             
     | 
| 
      
 164 
     | 
    
         
            +
                test '#generate_chunk must be implemented by subclass' do
         
     | 
| 
      
 165 
     | 
    
         
            +
                  assert_raise NotImplementedError do
         
     | 
| 
      
 166 
     | 
    
         
            +
                    @p.generate_chunk(Object.new)
         
     | 
| 
      
 167 
     | 
    
         
            +
                  end
         
     | 
| 
      
 168 
     | 
    
         
            +
                end
         
     | 
| 
      
 169 
     | 
    
         
            +
              end
         
     | 
| 
      
 170 
     | 
    
         
            +
             
     | 
| 
      
 171 
     | 
    
         
            +
              sub_test_case 'with default configuration and dummy implementation' do
         
     | 
| 
      
 172 
     | 
    
         
            +
                setup do
         
     | 
| 
      
 173 
     | 
    
         
            +
                  @p = create_buffer({'queued_chunks_limit_size' => 100})
         
     | 
| 
      
 174 
     | 
    
         
            +
                  @dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
         
     | 
| 
      
 175 
     | 
    
         
            +
                  @dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
         
     | 
| 
      
 176 
     | 
    
         
            +
                  @dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
         
     | 
| 
      
 177 
     | 
    
         
            +
                  @dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
         
     | 
| 
      
 178 
     | 
    
         
            +
                  @p.start
         
     | 
| 
      
 179 
     | 
    
         
            +
                end
         
     | 
| 
      
 180 
     | 
    
         
            +
             
     | 
| 
      
 181 
     | 
    
         
            +
                test '#start resumes buffer states and update queued numbers per metadata' do
         
     | 
| 
      
 182 
     | 
    
         
            +
                  plugin = create_buffer({})
         
     | 
| 
      
 183 
     | 
    
         
            +
             
     | 
| 
      
 184 
     | 
    
         
            +
                  assert_equal({}, plugin.stage)
         
     | 
| 
      
 185 
     | 
    
         
            +
                  assert_equal([], plugin.queue)
         
     | 
| 
      
 186 
     | 
    
         
            +
                  assert_equal({}, plugin.dequeued)
         
     | 
| 
      
 187 
     | 
    
         
            +
                  assert_equal({}, plugin.queued_num)
         
     | 
| 
      
 188 
     | 
    
         
            +
             
     | 
| 
      
 189 
     | 
    
         
            +
                  assert_equal 0, plugin.stage_size
         
     | 
| 
      
 190 
     | 
    
         
            +
                  assert_equal 0, plugin.queue_size
         
     | 
| 
      
 191 
     | 
    
         
            +
                  assert_equal [], plugin.timekeys
         
     | 
| 
      
 192 
     | 
    
         
            +
             
     | 
| 
      
 193 
     | 
    
         
            +
                  # @p is started plugin
         
     | 
| 
      
 194 
     | 
    
         
            +
             
     | 
| 
      
 195 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3], @p.stage.keys
         
     | 
| 
      
 196 
     | 
    
         
            +
                  assert_equal "b" * 100, @p.stage[@dm2].read
         
     | 
| 
      
 197 
     | 
    
         
            +
                  assert_equal "c" * 100, @p.stage[@dm3].read
         
     | 
| 
      
 198 
     | 
    
         
            +
             
     | 
| 
      
 199 
     | 
    
         
            +
                  assert_equal 200, @p.stage_size
         
     | 
| 
      
 200 
     | 
    
         
            +
             
     | 
| 
      
 201 
     | 
    
         
            +
                  assert_equal 3, @p.queue.size
         
     | 
| 
      
 202 
     | 
    
         
            +
                  assert_equal "0" * 100, @p.queue[0].read
         
     | 
| 
      
 203 
     | 
    
         
            +
                  assert_equal "a" * 100, @p.queue[1].read
         
     | 
| 
      
 204 
     | 
    
         
            +
                  assert_equal "a" * 3, @p.queue[2].read
         
     | 
| 
      
 205 
     | 
    
         
            +
             
     | 
| 
      
 206 
     | 
    
         
            +
                  assert_equal 203, @p.queue_size
         
     | 
| 
      
 207 
     | 
    
         
            +
             
     | 
| 
      
 208 
     | 
    
         
            +
                  # staged, queued
         
     | 
| 
      
 209 
     | 
    
         
            +
                  assert_equal 1, @p.queued_num[@dm0]
         
     | 
| 
      
 210 
     | 
    
         
            +
                  assert_equal 2, @p.queued_num[@dm1]
         
     | 
| 
      
 211 
     | 
    
         
            +
                end
         
     | 
| 
      
 212 
     | 
    
         
            +
             
     | 
| 
      
 213 
     | 
    
         
            +
                test '#close closes all chunks in dequeued, enqueued and staged' do
         
     | 
| 
      
 214 
     | 
    
         
            +
                  dmx = create_metadata(Time.parse('2016-04-11 15:50:00 +0000').to_i, nil, nil)
         
     | 
| 
      
 215 
     | 
    
         
            +
                  cx = create_chunk(dmx, ["x" * 1024])
         
     | 
| 
      
 216 
     | 
    
         
            +
                  @p.dequeued[cx.unique_id] = cx
         
     | 
| 
      
 217 
     | 
    
         
            +
             
     | 
| 
      
 218 
     | 
    
         
            +
                  staged_chunks = @p.stage.values.dup
         
     | 
| 
      
 219 
     | 
    
         
            +
                  queued_chunks = @p.queue.dup
         
     | 
| 
      
 220 
     | 
    
         
            +
             
     | 
| 
      
 221 
     | 
    
         
            +
                  @p.close
         
     | 
| 
      
 222 
     | 
    
         
            +
             
     | 
| 
      
 223 
     | 
    
         
            +
                  assert cx.closed
         
     | 
| 
      
 224 
     | 
    
         
            +
                  assert{ staged_chunks.all?{|c| c.closed } }
         
     | 
| 
      
 225 
     | 
    
         
            +
                  assert{ queued_chunks.all?{|c| c.closed } }
         
     | 
| 
      
 226 
     | 
    
         
            +
                end
         
     | 
| 
      
 227 
     | 
    
         
            +
             
     | 
| 
      
 228 
     | 
    
         
            +
                test '#terminate initializes all internal states' do
         
     | 
| 
      
 229 
     | 
    
         
            +
                  dmx = create_metadata(Time.parse('2016-04-11 15:50:00 +0000').to_i, nil, nil)
         
     | 
| 
      
 230 
     | 
    
         
            +
                  cx = create_chunk(dmx, ["x" * 1024])
         
     | 
| 
      
 231 
     | 
    
         
            +
                  @p.dequeued[cx.unique_id] = cx
         
     | 
| 
      
 232 
     | 
    
         
            +
             
     | 
| 
      
 233 
     | 
    
         
            +
                  @p.close
         
     | 
| 
      
 234 
     | 
    
         
            +
             
     | 
| 
      
 235 
     | 
    
         
            +
                  @p.terminate
         
     | 
| 
      
 236 
     | 
    
         
            +
             
     | 
| 
      
 237 
     | 
    
         
            +
                  assert_nil @p.stage
         
     | 
| 
      
 238 
     | 
    
         
            +
                  assert_nil @p.queue
         
     | 
| 
      
 239 
     | 
    
         
            +
                  assert_nil @p.dequeued
         
     | 
| 
      
 240 
     | 
    
         
            +
                  assert_nil @p.queued_num
         
     | 
| 
      
 241 
     | 
    
         
            +
                  assert_nil @p.stage_length_metrics
         
     | 
| 
      
 242 
     | 
    
         
            +
                  assert_nil @p.stage_size_metrics
         
     | 
| 
      
 243 
     | 
    
         
            +
                  assert_nil @p.queue_length_metrics
         
     | 
| 
      
 244 
     | 
    
         
            +
                  assert_nil @p.queue_size_metrics
         
     | 
| 
      
 245 
     | 
    
         
            +
                  assert_nil @p.available_buffer_space_ratios_metrics
         
     | 
| 
      
 246 
     | 
    
         
            +
                  assert_nil @p.total_queued_size_metrics
         
     | 
| 
      
 247 
     | 
    
         
            +
                  assert_nil @p.newest_timekey_metrics
         
     | 
| 
      
 248 
     | 
    
         
            +
                  assert_nil @p.oldest_timekey_metrics
         
     | 
| 
      
 249 
     | 
    
         
            +
                  assert_equal [], @p.timekeys
         
     | 
| 
      
 250 
     | 
    
         
            +
                end
         
     | 
| 
      
 251 
     | 
    
         
            +
             
     | 
| 
      
 252 
     | 
    
         
            +
                test '#queued_records returns total number of size in all chunks in queue' do
         
     | 
| 
      
 253 
     | 
    
         
            +
                  assert_equal 3, @p.queue.size
         
     | 
| 
      
 254 
     | 
    
         
            +
             
     | 
| 
      
 255 
     | 
    
         
            +
                  r0 = @p.queue[0].size
         
     | 
| 
      
 256 
     | 
    
         
            +
                  assert_equal 1, r0
         
     | 
| 
      
 257 
     | 
    
         
            +
                  r1 = @p.queue[1].size
         
     | 
| 
      
 258 
     | 
    
         
            +
                  assert_equal 1, r1
         
     | 
| 
      
 259 
     | 
    
         
            +
                  r2 = @p.queue[2].size
         
     | 
| 
      
 260 
     | 
    
         
            +
                  assert_equal 1, r2
         
     | 
| 
      
 261 
     | 
    
         
            +
             
     | 
| 
      
 262 
     | 
    
         
            +
                  assert_equal (r0+r1+r2), @p.queued_records
         
     | 
| 
      
 263 
     | 
    
         
            +
                end
         
     | 
| 
      
 264 
     | 
    
         
            +
             
     | 
| 
      
 265 
     | 
    
         
            +
                test '#queued? returns queue has any chunks or not without arguments' do
         
     | 
| 
      
 266 
     | 
    
         
            +
                  assert @p.queued?
         
     | 
| 
      
 267 
     | 
    
         
            +
             
     | 
| 
      
 268 
     | 
    
         
            +
                  @p.queue.reject!{|_c| true }
         
     | 
| 
      
 269 
     | 
    
         
            +
                  assert !@p.queued?
         
     | 
| 
      
 270 
     | 
    
         
            +
                end
         
     | 
| 
      
 271 
     | 
    
         
            +
             
     | 
| 
      
 272 
     | 
    
         
            +
                test '#queued? returns queue has chunks for specified metadata with an argument' do
         
     | 
| 
      
 273 
     | 
    
         
            +
                  assert @p.queued?(@dm0)
         
     | 
| 
      
 274 
     | 
    
         
            +
                  assert @p.queued?(@dm1)
         
     | 
| 
      
 275 
     | 
    
         
            +
                  assert !@p.queued?(@dm2)
         
     | 
| 
      
 276 
     | 
    
         
            +
                end
         
     | 
| 
      
 277 
     | 
    
         
            +
             
     | 
| 
      
 278 
     | 
    
         
            +
                test '#enqueue_chunk enqueues a chunk on stage with specified metadata' do
         
     | 
| 
      
 279 
     | 
    
         
            +
                  assert_equal 2, @p.stage.size
         
     | 
| 
      
 280 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3], @p.stage.keys
         
     | 
| 
      
 281 
     | 
    
         
            +
                  assert_equal 3, @p.queue.size
         
     | 
| 
      
 282 
     | 
    
         
            +
                  assert_nil @p.queued_num[@dm2]
         
     | 
| 
      
 283 
     | 
    
         
            +
             
     | 
| 
      
 284 
     | 
    
         
            +
                  assert_equal 200, @p.stage_size
         
     | 
| 
      
 285 
     | 
    
         
            +
                  assert_equal 203, @p.queue_size
         
     | 
| 
      
 286 
     | 
    
         
            +
             
     | 
| 
      
 287 
     | 
    
         
            +
                  @p.enqueue_chunk(@dm2)
         
     | 
| 
      
 288 
     | 
    
         
            +
             
     | 
| 
      
 289 
     | 
    
         
            +
                  assert_equal [@dm3], @p.stage.keys
         
     | 
| 
      
 290 
     | 
    
         
            +
                  assert_equal @dm2, @p.queue.last.metadata
         
     | 
| 
      
 291 
     | 
    
         
            +
                  assert_equal 1, @p.queued_num[@dm2]
         
     | 
| 
      
 292 
     | 
    
         
            +
                  assert_equal 100, @p.stage_size
         
     | 
| 
      
 293 
     | 
    
         
            +
                  assert_equal 303, @p.queue_size
         
     | 
| 
      
 294 
     | 
    
         
            +
                end
         
     | 
| 
      
 295 
     | 
    
         
            +
             
     | 
| 
      
 296 
     | 
    
         
            +
                test '#enqueue_chunk ignores empty chunks' do
         
     | 
| 
      
 297 
     | 
    
         
            +
                  assert_equal 3, @p.queue.size
         
     | 
| 
      
 298 
     | 
    
         
            +
             
     | 
| 
      
 299 
     | 
    
         
            +
                  m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
         
     | 
| 
      
 300 
     | 
    
         
            +
                  c = create_chunk(m, [''])
         
     | 
| 
      
 301 
     | 
    
         
            +
                  @p.stage[m] = c
         
     | 
| 
      
 302 
     | 
    
         
            +
                  assert @p.stage[m].empty?
         
     | 
| 
      
 303 
     | 
    
         
            +
                  assert !c.closed
         
     | 
| 
      
 304 
     | 
    
         
            +
             
     | 
| 
      
 305 
     | 
    
         
            +
                  @p.enqueue_chunk(m)
         
     | 
| 
      
 306 
     | 
    
         
            +
             
     | 
| 
      
 307 
     | 
    
         
            +
                  assert_nil @p.stage[m]
         
     | 
| 
      
 308 
     | 
    
         
            +
                  assert_equal 3, @p.queue.size
         
     | 
| 
      
 309 
     | 
    
         
            +
                  assert_nil @p.queued_num[m]
         
     | 
| 
      
 310 
     | 
    
         
            +
                  assert c.closed
         
     | 
| 
      
 311 
     | 
    
         
            +
                end
         
     | 
| 
      
 312 
     | 
    
         
            +
             
     | 
| 
      
 313 
     | 
    
         
            +
                test '#enqueue_chunk calls #enqueued! if chunk responds to it' do
         
     | 
| 
      
 314 
     | 
    
         
            +
                  assert_equal 3, @p.queue.size
         
     | 
| 
      
 315 
     | 
    
         
            +
                  m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
         
     | 
| 
      
 316 
     | 
    
         
            +
                  c = create_chunk(m, ['c' * 256])
         
     | 
| 
      
 317 
     | 
    
         
            +
                  callback_called = false
         
     | 
| 
      
 318 
     | 
    
         
            +
                  (class << c; self; end).module_eval do
         
     | 
| 
      
 319 
     | 
    
         
            +
                    define_method(:enqueued!){ callback_called = true }
         
     | 
| 
      
 320 
     | 
    
         
            +
                  end
         
     | 
| 
      
 321 
     | 
    
         
            +
             
     | 
| 
      
 322 
     | 
    
         
            +
                  @p.stage[m] = c
         
     | 
| 
      
 323 
     | 
    
         
            +
                  @p.enqueue_chunk(m)
         
     | 
| 
      
 324 
     | 
    
         
            +
             
     | 
| 
      
 325 
     | 
    
         
            +
                  assert_equal c, @p.queue.last
         
     | 
| 
      
 326 
     | 
    
         
            +
                  assert callback_called
         
     | 
| 
      
 327 
     | 
    
         
            +
                end
         
     | 
| 
      
 328 
     | 
    
         
            +
             
     | 
| 
      
 329 
     | 
    
         
            +
                test '#enqueue_all enqueues chunks on stage which given block returns true with' do
         
     | 
| 
      
 330 
     | 
    
         
            +
                  m1 = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
         
     | 
| 
      
 331 
     | 
    
         
            +
                  c1 = create_chunk(m1, ['c' * 256])
         
     | 
| 
      
 332 
     | 
    
         
            +
                  @p.stage[m1] = c1
         
     | 
| 
      
 333 
     | 
    
         
            +
                  m2 = @p.metadata(timekey: Time.parse('2016-04-11 16:50:00 +0000').to_i)
         
     | 
| 
      
 334 
     | 
    
         
            +
                  c2 = create_chunk(m2, ['c' * 256])
         
     | 
| 
      
 335 
     | 
    
         
            +
                  @p.stage[m2] = c2
         
     | 
| 
      
 336 
     | 
    
         
            +
             
     | 
| 
      
 337 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3,m1,m2], @p.stage.keys
         
     | 
| 
      
 338 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 339 
     | 
    
         
            +
             
     | 
| 
      
 340 
     | 
    
         
            +
                  @p.enqueue_all{ |m, c| m.timekey < Time.parse('2016-04-11 16:41:00 +0000').to_i }
         
     | 
| 
      
 341 
     | 
    
         
            +
             
     | 
| 
      
 342 
     | 
    
         
            +
                  assert_equal [m2], @p.stage.keys
         
     | 
| 
      
 343 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1,@dm2,@dm3,m1], @p.queue.map(&:metadata)
         
     | 
| 
      
 344 
     | 
    
         
            +
                end
         
     | 
| 
      
 345 
     | 
    
         
            +
             
     | 
| 
      
 346 
     | 
    
         
            +
                test '#enqueue_all enqueues all chunks on stage without block' do
         
     | 
| 
      
 347 
     | 
    
         
            +
                  m1 = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
         
     | 
| 
      
 348 
     | 
    
         
            +
                  c1 = create_chunk(m1, ['c' * 256])
         
     | 
| 
      
 349 
     | 
    
         
            +
                  @p.stage[m1] = c1
         
     | 
| 
      
 350 
     | 
    
         
            +
                  m2 = @p.metadata(timekey: Time.parse('2016-04-11 16:50:00 +0000').to_i)
         
     | 
| 
      
 351 
     | 
    
         
            +
                  c2 = create_chunk(m2, ['c' * 256])
         
     | 
| 
      
 352 
     | 
    
         
            +
                  @p.stage[m2] = c2
         
     | 
| 
      
 353 
     | 
    
         
            +
             
     | 
| 
      
 354 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3,m1,m2], @p.stage.keys
         
     | 
| 
      
 355 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 356 
     | 
    
         
            +
             
     | 
| 
      
 357 
     | 
    
         
            +
                  @p.enqueue_all
         
     | 
| 
      
 358 
     | 
    
         
            +
             
     | 
| 
      
 359 
     | 
    
         
            +
                  assert_equal [], @p.stage.keys
         
     | 
| 
      
 360 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1,@dm2,@dm3,m1,m2], @p.queue.map(&:metadata)
         
     | 
| 
      
 361 
     | 
    
         
            +
                end
         
     | 
| 
      
 362 
     | 
    
         
            +
             
     | 
| 
      
 363 
     | 
    
         
            +
                test '#dequeue_chunk dequeues a chunk from queue if a chunk exists' do
         
     | 
| 
      
 364 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 365 
     | 
    
         
            +
                  assert_equal({}, @p.dequeued)
         
     | 
| 
      
 366 
     | 
    
         
            +
             
     | 
| 
      
 367 
     | 
    
         
            +
                  m1 = @p.dequeue_chunk
         
     | 
| 
      
 368 
     | 
    
         
            +
                  assert_equal @dm0, m1.metadata
         
     | 
| 
      
 369 
     | 
    
         
            +
                  assert_equal @dm0, @p.dequeued[m1.unique_id].metadata
         
     | 
| 
      
 370 
     | 
    
         
            +
             
     | 
| 
      
 371 
     | 
    
         
            +
                  m2 = @p.dequeue_chunk
         
     | 
| 
      
 372 
     | 
    
         
            +
                  assert_equal @dm1, m2.metadata
         
     | 
| 
      
 373 
     | 
    
         
            +
                  assert_equal @dm1, @p.dequeued[m2.unique_id].metadata
         
     | 
| 
      
 374 
     | 
    
         
            +
             
     | 
| 
      
 375 
     | 
    
         
            +
                  m3 = @p.dequeue_chunk
         
     | 
| 
      
 376 
     | 
    
         
            +
                  assert_equal @dm1, m3.metadata
         
     | 
| 
      
 377 
     | 
    
         
            +
                  assert_equal @dm1, @p.dequeued[m3.unique_id].metadata
         
     | 
| 
      
 378 
     | 
    
         
            +
             
     | 
| 
      
 379 
     | 
    
         
            +
                  m4 = @p.dequeue_chunk
         
     | 
| 
      
 380 
     | 
    
         
            +
                  assert_nil m4
         
     | 
| 
      
 381 
     | 
    
         
            +
                end
         
     | 
| 
      
 382 
     | 
    
         
            +
             
     | 
| 
      
 383 
     | 
    
         
            +
                test '#takeback_chunk resumes a chunk from dequeued to queued at the head of queue, and returns true' do
         
     | 
| 
      
 384 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 385 
     | 
    
         
            +
                  assert_equal({}, @p.dequeued)
         
     | 
| 
      
 386 
     | 
    
         
            +
             
     | 
| 
      
 387 
     | 
    
         
            +
                  m1 = @p.dequeue_chunk
         
     | 
| 
      
 388 
     | 
    
         
            +
                  assert_equal @dm0, m1.metadata
         
     | 
| 
      
 389 
     | 
    
         
            +
                  assert_equal @dm0, @p.dequeued[m1.unique_id].metadata
         
     | 
| 
      
 390 
     | 
    
         
            +
                  assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 391 
     | 
    
         
            +
                  assert_equal({m1.unique_id => m1}, @p.dequeued)
         
     | 
| 
      
 392 
     | 
    
         
            +
             
     | 
| 
      
 393 
     | 
    
         
            +
                  assert @p.takeback_chunk(m1.unique_id)
         
     | 
| 
      
 394 
     | 
    
         
            +
             
     | 
| 
      
 395 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 396 
     | 
    
         
            +
                  assert_equal({}, @p.dequeued)
         
     | 
| 
      
 397 
     | 
    
         
            +
                end
         
     | 
| 
      
 398 
     | 
    
         
            +
             
     | 
| 
      
 399 
     | 
    
         
            +
                test '#purge_chunk removes a chunk specified by argument id from dequeued chunks' do
         
     | 
| 
      
 400 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 401 
     | 
    
         
            +
                  assert_equal({}, @p.dequeued)
         
     | 
| 
      
 402 
     | 
    
         
            +
             
     | 
| 
      
 403 
     | 
    
         
            +
                  m0 = @p.dequeue_chunk
         
     | 
| 
      
 404 
     | 
    
         
            +
                  m1 = @p.dequeue_chunk
         
     | 
| 
      
 405 
     | 
    
         
            +
             
     | 
| 
      
 406 
     | 
    
         
            +
                  assert @p.takeback_chunk(m0.unique_id)
         
     | 
| 
      
 407 
     | 
    
         
            +
             
     | 
| 
      
 408 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 409 
     | 
    
         
            +
                  assert_equal({m1.unique_id => m1}, @p.dequeued)
         
     | 
| 
      
 410 
     | 
    
         
            +
             
     | 
| 
      
 411 
     | 
    
         
            +
                  assert !m1.purged
         
     | 
| 
      
 412 
     | 
    
         
            +
             
     | 
| 
      
 413 
     | 
    
         
            +
                  @p.purge_chunk(m1.unique_id)
         
     | 
| 
      
 414 
     | 
    
         
            +
                  assert m1.purged
         
     | 
| 
      
 415 
     | 
    
         
            +
             
     | 
| 
      
 416 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 417 
     | 
    
         
            +
                  assert_equal({}, @p.dequeued)
         
     | 
| 
      
 418 
     | 
    
         
            +
                end
         
     | 
| 
      
 419 
     | 
    
         
            +
             
     | 
| 
      
 420 
     | 
    
         
            +
                test '#purge_chunk removes an argument metadata if no chunks exist on stage or in queue' do
         
     | 
| 
      
 421 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 422 
     | 
    
         
            +
                  assert_equal({}, @p.dequeued)
         
     | 
| 
      
 423 
     | 
    
         
            +
             
     | 
| 
      
 424 
     | 
    
         
            +
                  m0 = @p.dequeue_chunk
         
     | 
| 
      
 425 
     | 
    
         
            +
             
     | 
| 
      
 426 
     | 
    
         
            +
                  assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 427 
     | 
    
         
            +
                  assert_equal({m0.unique_id => m0}, @p.dequeued)
         
     | 
| 
      
 428 
     | 
    
         
            +
             
     | 
| 
      
 429 
     | 
    
         
            +
                  assert !m0.purged
         
     | 
| 
      
 430 
     | 
    
         
            +
             
     | 
| 
      
 431 
     | 
    
         
            +
                  @p.purge_chunk(m0.unique_id)
         
     | 
| 
      
 432 
     | 
    
         
            +
                  assert m0.purged
         
     | 
| 
      
 433 
     | 
    
         
            +
             
     | 
| 
      
 434 
     | 
    
         
            +
                  assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 435 
     | 
    
         
            +
                  assert_equal({}, @p.dequeued)
         
     | 
| 
      
 436 
     | 
    
         
            +
                end
         
     | 
| 
      
 437 
     | 
    
         
            +
             
     | 
| 
      
 438 
     | 
    
         
            +
                test '#takeback_chunk returns false if specified chunk_id is already purged' do
         
     | 
| 
      
 439 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 440 
     | 
    
         
            +
                  assert_equal({}, @p.dequeued)
         
     | 
| 
      
 441 
     | 
    
         
            +
             
     | 
| 
      
 442 
     | 
    
         
            +
                  m0 = @p.dequeue_chunk
         
     | 
| 
      
 443 
     | 
    
         
            +
             
     | 
| 
      
 444 
     | 
    
         
            +
                  assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 445 
     | 
    
         
            +
                  assert_equal({m0.unique_id => m0}, @p.dequeued)
         
     | 
| 
      
 446 
     | 
    
         
            +
             
     | 
| 
      
 447 
     | 
    
         
            +
                  assert !m0.purged
         
     | 
| 
      
 448 
     | 
    
         
            +
             
     | 
| 
      
 449 
     | 
    
         
            +
                  @p.purge_chunk(m0.unique_id)
         
     | 
| 
      
 450 
     | 
    
         
            +
                  assert m0.purged
         
     | 
| 
      
 451 
     | 
    
         
            +
             
     | 
| 
      
 452 
     | 
    
         
            +
                  assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 453 
     | 
    
         
            +
                  assert_equal({}, @p.dequeued)
         
     | 
| 
      
 454 
     | 
    
         
            +
             
     | 
| 
      
 455 
     | 
    
         
            +
                  assert !@p.takeback_chunk(m0.unique_id)
         
     | 
| 
      
 456 
     | 
    
         
            +
             
     | 
| 
      
 457 
     | 
    
         
            +
                  assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 458 
     | 
    
         
            +
                  assert_equal({}, @p.dequeued)
         
     | 
| 
      
 459 
     | 
    
         
            +
                end
         
     | 
| 
      
 460 
     | 
    
         
            +
             
     | 
| 
      
 461 
     | 
    
         
            +
                test '#clear_queue! removes all chunks in queue, but leaves staged chunks' do
         
     | 
| 
      
 462 
     | 
    
         
            +
                  qchunks = @p.queue.dup
         
     | 
| 
      
 463 
     | 
    
         
            +
             
     | 
| 
      
 464 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 465 
     | 
    
         
            +
                  assert_equal 2, @p.stage.size
         
     | 
| 
      
 466 
     | 
    
         
            +
                  assert_equal({}, @p.dequeued)
         
     | 
| 
      
 467 
     | 
    
         
            +
             
     | 
| 
      
 468 
     | 
    
         
            +
                  @p.clear_queue!
         
     | 
| 
      
 469 
     | 
    
         
            +
             
     | 
| 
      
 470 
     | 
    
         
            +
                  assert_equal [], @p.queue
         
     | 
| 
      
 471 
     | 
    
         
            +
                  assert_equal 0, @p.queue_size
         
     | 
| 
      
 472 
     | 
    
         
            +
                  assert_equal 2, @p.stage.size
         
     | 
| 
      
 473 
     | 
    
         
            +
                  assert_equal({}, @p.dequeued)
         
     | 
| 
      
 474 
     | 
    
         
            +
             
     | 
| 
      
 475 
     | 
    
         
            +
                  assert{ qchunks.all?{ |c| c.purged } }
         
     | 
| 
      
 476 
     | 
    
         
            +
                end
         
     | 
| 
      
 477 
     | 
    
         
            +
             
     | 
| 
      
 478 
     | 
    
         
            +
                test '#write returns immediately if argument data is empty array' do
         
     | 
| 
      
 479 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 480 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3], @p.stage.keys
         
     | 
| 
      
 481 
     | 
    
         
            +
             
     | 
| 
      
 482 
     | 
    
         
            +
                  m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
         
     | 
| 
      
 483 
     | 
    
         
            +
             
     | 
| 
      
 484 
     | 
    
         
            +
                  @p.write({m => []})
         
     | 
| 
      
 485 
     | 
    
         
            +
             
     | 
| 
      
 486 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 487 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3], @p.stage.keys
         
     | 
| 
      
 488 
     | 
    
         
            +
                end
         
     | 
| 
      
 489 
     | 
    
         
            +
             
     | 
| 
      
 490 
     | 
    
         
            +
                test '#write returns immediately if argument data is empty event stream' do
         
     | 
| 
      
 491 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 492 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3], @p.stage.keys
         
     | 
| 
      
 493 
     | 
    
         
            +
             
     | 
| 
      
 494 
     | 
    
         
            +
                  m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
         
     | 
| 
      
 495 
     | 
    
         
            +
             
     | 
| 
      
 496 
     | 
    
         
            +
                  @p.write({m => Fluent::ArrayEventStream.new([])})
         
     | 
| 
      
 497 
     | 
    
         
            +
             
     | 
| 
      
 498 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 499 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3], @p.stage.keys
         
     | 
| 
      
 500 
     | 
    
         
            +
                end
         
     | 
| 
      
 501 
     | 
    
         
            +
             
     | 
| 
      
 502 
     | 
    
         
            +
                test '#write raises BufferOverflowError if buffer is not storable' do
         
     | 
| 
      
 503 
     | 
    
         
            +
                  @p.stage_size = 256 * 1024 * 1024
         
     | 
| 
      
 504 
     | 
    
         
            +
                  @p.queue_size = 256 * 1024 * 1024
         
     | 
| 
      
 505 
     | 
    
         
            +
             
     | 
| 
      
 506 
     | 
    
         
            +
                  m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
         
     | 
| 
      
 507 
     | 
    
         
            +
             
     | 
| 
      
 508 
     | 
    
         
            +
                  assert_raise Fluent::Plugin::Buffer::BufferOverflowError do
         
     | 
| 
      
 509 
     | 
    
         
            +
                    @p.write({m => ["x" * 256]})
         
     | 
| 
      
 510 
     | 
    
         
            +
                  end
         
     | 
| 
      
 511 
     | 
    
         
            +
                end
         
     | 
| 
      
 512 
     | 
    
         
            +
             
     | 
| 
      
 513 
     | 
    
         
            +
                test '#write stores data into an existing chunk with metadata specified' do
         
     | 
| 
      
 514 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 515 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3], @p.stage.keys
         
     | 
| 
      
 516 
     | 
    
         
            +
             
     | 
| 
      
 517 
     | 
    
         
            +
                  dm3data = @p.stage[@dm3].read.dup
         
     | 
| 
      
 518 
     | 
    
         
            +
                  prev_stage_size = @p.stage_size
         
     | 
| 
      
 519 
     | 
    
         
            +
             
     | 
| 
      
 520 
     | 
    
         
            +
                  assert_equal 1, @p.stage[@dm3].append_count
         
     | 
| 
      
 521 
     | 
    
         
            +
             
     | 
| 
      
 522 
     | 
    
         
            +
                  @p.write({@dm3 => ["x" * 256, "y" * 256, "z" * 256]})
         
     | 
| 
      
 523 
     | 
    
         
            +
             
     | 
| 
      
 524 
     | 
    
         
            +
                  assert_equal 2, @p.stage[@dm3].append_count
         
     | 
| 
      
 525 
     | 
    
         
            +
                  assert_equal (dm3data + ("x" * 256) + ("y" * 256) + ("z" * 256)), @p.stage[@dm3].read
         
     | 
| 
      
 526 
     | 
    
         
            +
                  assert_equal (prev_stage_size + 768), @p.stage_size
         
     | 
| 
      
 527 
     | 
    
         
            +
             
     | 
| 
      
 528 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 529 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3], @p.stage.keys
         
     | 
| 
      
 530 
     | 
    
         
            +
                end
         
     | 
| 
      
 531 
     | 
    
         
            +
             
     | 
| 
      
 532 
     | 
    
         
            +
                test '#write creates new chunk and store data into it if there are no chunks for specified metadata' do
         
     | 
| 
      
 533 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 534 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3], @p.stage.keys
         
     | 
| 
      
 535 
     | 
    
         
            +
             
     | 
| 
      
 536 
     | 
    
         
            +
                  timekey = Time.parse('2016-04-11 16:40:00 +0000').to_i
         
     | 
| 
      
 537 
     | 
    
         
            +
                  assert !@p.timekeys.include?(timekey)
         
     | 
| 
      
 538 
     | 
    
         
            +
             
     | 
| 
      
 539 
     | 
    
         
            +
                  prev_stage_size = @p.stage_size
         
     | 
| 
      
 540 
     | 
    
         
            +
             
     | 
| 
      
 541 
     | 
    
         
            +
                  m = @p.metadata(timekey: timekey)
         
     | 
| 
      
 542 
     | 
    
         
            +
             
     | 
| 
      
 543 
     | 
    
         
            +
                  @p.write({m => ["x" * 256, "y" * 256, "z" * 256]})
         
     | 
| 
      
 544 
     | 
    
         
            +
             
     | 
| 
      
 545 
     | 
    
         
            +
                  assert_equal 1, @p.stage[m].append_count
         
     | 
| 
      
 546 
     | 
    
         
            +
                  assert_equal ("x" * 256 + "y" * 256 + "z" * 256), @p.stage[m].read
         
     | 
| 
      
 547 
     | 
    
         
            +
                  assert_equal (prev_stage_size + 768), @p.stage_size
         
     | 
| 
      
 548 
     | 
    
         
            +
             
     | 
| 
      
 549 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 550 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3,m], @p.stage.keys
         
     | 
| 
      
 551 
     | 
    
         
            +
             
     | 
| 
      
 552 
     | 
    
         
            +
                  @p.update_timekeys
         
     | 
| 
      
 553 
     | 
    
         
            +
             
     | 
| 
      
 554 
     | 
    
         
            +
                  assert @p.timekeys.include?(timekey)
         
     | 
| 
      
 555 
     | 
    
         
            +
                end
         
     | 
| 
      
 556 
     | 
    
         
            +
             
     | 
| 
      
 557 
     | 
    
         
            +
                test '#write tries to enqueue and store data into a new chunk if existing chunk is full' do
         
     | 
| 
      
 558 
     | 
    
         
            +
                  assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
         
     | 
| 
      
 559 
     | 
    
         
            +
                  assert_equal 0.95, @p.chunk_full_threshold
         
     | 
| 
      
 560 
     | 
    
         
            +
             
     | 
| 
      
 561 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 562 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3], @p.stage.keys
         
     | 
| 
      
 563 
     | 
    
         
            +
             
     | 
| 
      
 564 
     | 
    
         
            +
                  m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
         
     | 
| 
      
 565 
     | 
    
         
            +
             
     | 
| 
      
 566 
     | 
    
         
            +
                  row = "x" * 1024 * 1024
         
     | 
| 
      
 567 
     | 
    
         
            +
                  small_row = "x" * 1024 * 512
         
     | 
| 
      
 568 
     | 
    
         
            +
                  @p.write({m => [row] * 7 + [small_row]})
         
     | 
| 
      
 569 
     | 
    
         
            +
             
     | 
| 
      
 570 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 571 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3,m], @p.stage.keys
         
     | 
| 
      
 572 
     | 
    
         
            +
                  assert_equal 1, @p.stage[m].append_count
         
     | 
| 
      
 573 
     | 
    
         
            +
             
     | 
| 
      
 574 
     | 
    
         
            +
                  @p.write({m => [row]})
         
     | 
| 
      
 575 
     | 
    
         
            +
             
     | 
| 
      
 576 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1,m], @p.queue.map(&:metadata)
         
     | 
| 
      
 577 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3,m], @p.stage.keys
         
     | 
| 
      
 578 
     | 
    
         
            +
                  assert_equal 1, @p.stage[m].append_count
         
     | 
| 
      
 579 
     | 
    
         
            +
                  assert_equal 1024*1024, @p.stage[m].bytesize
         
     | 
| 
      
 580 
     | 
    
         
            +
                  assert_equal 3, @p.queue.last.append_count # 1 -> write (2) -> write_step_by_step (3)
         
     | 
| 
      
 581 
     | 
    
         
            +
                  assert @p.queue.last.rollbacked
         
     | 
| 
      
 582 
     | 
    
         
            +
                end
         
     | 
| 
      
 583 
     | 
    
         
            +
             
     | 
| 
      
 584 
     | 
    
         
            +
                test '#write rollbacks if commit raises errors' do
         
     | 
| 
      
 585 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 586 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3], @p.stage.keys
         
     | 
| 
      
 587 
     | 
    
         
            +
             
     | 
| 
      
 588 
     | 
    
         
            +
                  m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
         
     | 
| 
      
 589 
     | 
    
         
            +
             
     | 
| 
      
 590 
     | 
    
         
            +
                  row = "x" * 1024
         
     | 
| 
      
 591 
     | 
    
         
            +
                  @p.write({m => [row] * 8})
         
     | 
| 
      
 592 
     | 
    
         
            +
             
     | 
| 
      
 593 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 594 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3,m], @p.stage.keys
         
     | 
| 
      
 595 
     | 
    
         
            +
             
     | 
| 
      
 596 
     | 
    
         
            +
                  target_chunk = @p.stage[m]
         
     | 
| 
      
 597 
     | 
    
         
            +
             
     | 
| 
      
 598 
     | 
    
         
            +
                  assert_equal 1, target_chunk.append_count
         
     | 
| 
      
 599 
     | 
    
         
            +
                  assert !target_chunk.rollbacked
         
     | 
| 
      
 600 
     | 
    
         
            +
             
     | 
| 
      
 601 
     | 
    
         
            +
                  (class << target_chunk; self; end).module_eval do
         
     | 
| 
      
 602 
     | 
    
         
            +
                    define_method(:commit){ raise "yay" }
         
     | 
| 
      
 603 
     | 
    
         
            +
                  end
         
     | 
| 
      
 604 
     | 
    
         
            +
             
     | 
| 
      
 605 
     | 
    
         
            +
                  assert_raise RuntimeError.new("yay") do
         
     | 
| 
      
 606 
     | 
    
         
            +
                    @p.write({m => [row]})
         
     | 
| 
      
 607 
     | 
    
         
            +
                  end
         
     | 
| 
      
 608 
     | 
    
         
            +
             
     | 
| 
      
 609 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 610 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3,m], @p.stage.keys
         
     | 
| 
      
 611 
     | 
    
         
            +
             
     | 
| 
      
 612 
     | 
    
         
            +
                  assert_equal 2, target_chunk.append_count
         
     | 
| 
      
 613 
     | 
    
         
            +
                  assert target_chunk.rollbacked
         
     | 
| 
      
 614 
     | 
    
         
            +
                  assert_equal row * 8, target_chunk.read
         
     | 
| 
      
 615 
     | 
    
         
            +
                end
         
     | 
| 
      
 616 
     | 
    
         
            +
             
     | 
| 
      
 617 
     | 
    
         
            +
                test '#write w/ format raises BufferOverflowError if buffer is not storable' do
         
     | 
| 
      
 618 
     | 
    
         
            +
                  @p.stage_size = 256 * 1024 * 1024
         
     | 
| 
      
 619 
     | 
    
         
            +
                  @p.queue_size = 256 * 1024 * 1024
         
     | 
| 
      
 620 
     | 
    
         
            +
             
     | 
| 
      
 621 
     | 
    
         
            +
                  m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
         
     | 
| 
      
 622 
     | 
    
         
            +
             
     | 
| 
      
 623 
     | 
    
         
            +
                  es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "xxxxxxxxxxxxxx"} ] ])
         
     | 
| 
      
 624 
     | 
    
         
            +
             
     | 
| 
      
 625 
     | 
    
         
            +
                  assert_raise Fluent::Plugin::Buffer::BufferOverflowError do
         
     | 
| 
      
 626 
     | 
    
         
            +
                    @p.write({m => es}, format: ->(e){e.to_msgpack_stream})
         
     | 
| 
      
 627 
     | 
    
         
            +
                  end
         
     | 
| 
      
 628 
     | 
    
         
            +
                end
         
     | 
| 
      
 629 
     | 
    
         
            +
             
     | 
| 
      
 630 
     | 
    
         
            +
                test '#write w/ format stores data into an existing chunk with metadata specified' do
         
     | 
| 
      
 631 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 632 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3], @p.stage.keys
         
     | 
| 
      
 633 
     | 
    
         
            +
             
     | 
| 
      
 634 
     | 
    
         
            +
                  dm3data = @p.stage[@dm3].read.dup
         
     | 
| 
      
 635 
     | 
    
         
            +
                  prev_stage_size = @p.stage_size
         
     | 
| 
      
 636 
     | 
    
         
            +
             
     | 
| 
      
 637 
     | 
    
         
            +
                  assert_equal 1, @p.stage[@dm3].append_count
         
     | 
| 
      
 638 
     | 
    
         
            +
             
     | 
| 
      
 639 
     | 
    
         
            +
                  es = Fluent::ArrayEventStream.new(
         
     | 
| 
      
 640 
     | 
    
         
            +
                    [
         
     | 
| 
      
 641 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 128}],
         
     | 
| 
      
 642 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "y" * 128}],
         
     | 
| 
      
 643 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "z" * 128}],
         
     | 
| 
      
 644 
     | 
    
         
            +
                    ]
         
     | 
| 
      
 645 
     | 
    
         
            +
                  )
         
     | 
| 
      
 646 
     | 
    
         
            +
             
     | 
| 
      
 647 
     | 
    
         
            +
                  @p.write({@dm3 => es}, format: ->(e){e.to_msgpack_stream})
         
     | 
| 
      
 648 
     | 
    
         
            +
             
     | 
| 
      
 649 
     | 
    
         
            +
                  assert_equal 2, @p.stage[@dm3].append_count
         
     | 
| 
      
 650 
     | 
    
         
            +
                  assert_equal (dm3data + es.to_msgpack_stream), @p.stage[@dm3].read
         
     | 
| 
      
 651 
     | 
    
         
            +
                  assert_equal (prev_stage_size + es.to_msgpack_stream.bytesize), @p.stage_size
         
     | 
| 
      
 652 
     | 
    
         
            +
             
     | 
| 
      
 653 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 654 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3], @p.stage.keys
         
     | 
| 
      
 655 
     | 
    
         
            +
                end
         
     | 
| 
      
 656 
     | 
    
         
            +
             
     | 
| 
      
 657 
     | 
    
         
            +
                test '#write w/ format creates new chunk and store data into it if there are not chunks for specified metadata' do
         
     | 
| 
      
 658 
     | 
    
         
            +
                  assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
         
     | 
| 
      
 659 
     | 
    
         
            +
             
     | 
| 
      
 660 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 661 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3], @p.stage.keys
         
     | 
| 
      
 662 
     | 
    
         
            +
             
     | 
| 
      
 663 
     | 
    
         
            +
                  timekey = Time.parse('2016-04-11 16:40:00 +0000').to_i
         
     | 
| 
      
 664 
     | 
    
         
            +
                  assert !@p.timekeys.include?(timekey)
         
     | 
| 
      
 665 
     | 
    
         
            +
             
     | 
| 
      
 666 
     | 
    
         
            +
                  m = @p.metadata(timekey: timekey)
         
     | 
| 
      
 667 
     | 
    
         
            +
             
     | 
| 
      
 668 
     | 
    
         
            +
                  es = Fluent::ArrayEventStream.new(
         
     | 
| 
      
 669 
     | 
    
         
            +
                    [
         
     | 
| 
      
 670 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
         
     | 
| 
      
 671 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
         
     | 
| 
      
 672 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
         
     | 
| 
      
 673 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
         
     | 
| 
      
 674 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
         
     | 
| 
      
 675 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
         
     | 
| 
      
 676 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
         
     | 
| 
      
 677 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:03 +0000'), {"message" => "z" * 1024 * 512}],
         
     | 
| 
      
 678 
     | 
    
         
            +
                    ]
         
     | 
| 
      
 679 
     | 
    
         
            +
                  )
         
     | 
| 
      
 680 
     | 
    
         
            +
                  @p.write({m => es}, format: ->(e){e.to_msgpack_stream})
         
     | 
| 
      
 681 
     | 
    
         
            +
             
     | 
| 
      
 682 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 683 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3,m], @p.stage.keys
         
     | 
| 
      
 684 
     | 
    
         
            +
                  assert_equal 1, @p.stage[m].append_count
         
     | 
| 
      
 685 
     | 
    
         
            +
             
     | 
| 
      
 686 
     | 
    
         
            +
                  @p.update_timekeys
         
     | 
| 
      
 687 
     | 
    
         
            +
             
     | 
| 
      
 688 
     | 
    
         
            +
                  assert @p.timekeys.include?(timekey)
         
     | 
| 
      
 689 
     | 
    
         
            +
                end
         
     | 
| 
      
 690 
     | 
    
         
            +
             
     | 
| 
      
 691 
     | 
    
         
            +
                test '#write w/ format tries to enqueue and store data into a new chunk if existing chunk does not have enough space' do
         
     | 
| 
      
 692 
     | 
    
         
            +
                  assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
         
     | 
| 
      
 693 
     | 
    
         
            +
             
     | 
| 
      
 694 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 695 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3], @p.stage.keys
         
     | 
| 
      
 696 
     | 
    
         
            +
             
     | 
| 
      
 697 
     | 
    
         
            +
                  m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
         
     | 
| 
      
 698 
     | 
    
         
            +
             
     | 
| 
      
 699 
     | 
    
         
            +
                  es = Fluent::ArrayEventStream.new(
         
     | 
| 
      
 700 
     | 
    
         
            +
                    [
         
     | 
| 
      
 701 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
         
     | 
| 
      
 702 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
         
     | 
| 
      
 703 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
         
     | 
| 
      
 704 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
         
     | 
| 
      
 705 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
         
     | 
| 
      
 706 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
         
     | 
| 
      
 707 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
         
     | 
| 
      
 708 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:03 +0000'), {"message" => "z" * 1024 * 512}],
         
     | 
| 
      
 709 
     | 
    
         
            +
                    ]
         
     | 
| 
      
 710 
     | 
    
         
            +
                  )
         
     | 
| 
      
 711 
     | 
    
         
            +
                  @p.write({m => es}, format: ->(e){e.to_msgpack_stream})
         
     | 
| 
      
 712 
     | 
    
         
            +
             
     | 
| 
      
 713 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 714 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3,m], @p.stage.keys
         
     | 
| 
      
 715 
     | 
    
         
            +
                  assert_equal 1, @p.stage[m].append_count
         
     | 
| 
      
 716 
     | 
    
         
            +
             
     | 
| 
      
 717 
     | 
    
         
            +
                  es2 = Fluent::OneEventStream.new(event_time('2016-04-11 16:40:03 +0000'), {"message" => "z" * 1024 * 1024})
         
     | 
| 
      
 718 
     | 
    
         
            +
                  @p.write({m => es2}, format: ->(e){e.to_msgpack_stream})
         
     | 
| 
      
 719 
     | 
    
         
            +
             
     | 
| 
      
 720 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1,m], @p.queue.map(&:metadata)
         
     | 
| 
      
 721 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3,m], @p.stage.keys
         
     | 
| 
      
 722 
     | 
    
         
            +
                  assert_equal 1, @p.stage[m].append_count
         
     | 
| 
      
 723 
     | 
    
         
            +
                  assert_equal es2.to_msgpack_stream.bytesize, @p.stage[m].bytesize
         
     | 
| 
      
 724 
     | 
    
         
            +
                  assert_equal 2, @p.queue.last.append_count # 1 -> write (2) -> rollback&enqueue
         
     | 
| 
      
 725 
     | 
    
         
            +
                  assert @p.queue.last.rollbacked
         
     | 
| 
      
 726 
     | 
    
         
            +
                end
         
     | 
| 
      
 727 
     | 
    
         
            +
             
     | 
| 
      
 728 
     | 
    
         
            +
                test '#write w/ format enqueues chunk if it is already full after adding data' do
         
     | 
| 
      
 729 
     | 
    
         
            +
                  assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
         
     | 
| 
      
 730 
     | 
    
         
            +
             
     | 
| 
      
 731 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 732 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3], @p.stage.keys
         
     | 
| 
      
 733 
     | 
    
         
            +
             
     | 
| 
      
 734 
     | 
    
         
            +
                  m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
         
     | 
| 
      
 735 
     | 
    
         
            +
                  es = Fluent::ArrayEventStream.new(
         
     | 
| 
      
 736 
     | 
    
         
            +
                    [
         
     | 
| 
      
 737 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}], # 1024 * 1024 bytes as msgpack stream
         
     | 
| 
      
 738 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
         
     | 
| 
      
 739 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
         
     | 
| 
      
 740 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
         
     | 
| 
      
 741 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
         
     | 
| 
      
 742 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
         
     | 
| 
      
 743 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
         
     | 
| 
      
 744 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
         
     | 
| 
      
 745 
     | 
    
         
            +
                    ]
         
     | 
| 
      
 746 
     | 
    
         
            +
                  )
         
     | 
| 
      
 747 
     | 
    
         
            +
                  @p.write({m => es}, format: ->(e){e.to_msgpack_stream})
         
     | 
| 
      
 748 
     | 
    
         
            +
             
     | 
| 
      
 749 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1,m], @p.queue.map(&:metadata)
         
     | 
| 
      
 750 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3], @p.stage.keys
         
     | 
| 
      
 751 
     | 
    
         
            +
                  assert_equal 1, @p.queue.last.append_count
         
     | 
| 
      
 752 
     | 
    
         
            +
                end
         
     | 
| 
      
 753 
     | 
    
         
            +
             
     | 
| 
      
 754 
     | 
    
         
            +
                test '#write w/ format rollbacks if commit raises errors' do
         
     | 
| 
      
 755 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 756 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3], @p.stage.keys
         
     | 
| 
      
 757 
     | 
    
         
            +
             
     | 
| 
      
 758 
     | 
    
         
            +
                  m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
         
     | 
| 
      
 759 
     | 
    
         
            +
             
     | 
| 
      
 760 
     | 
    
         
            +
                  es = Fluent::ArrayEventStream.new(
         
     | 
| 
      
 761 
     | 
    
         
            +
                    [
         
     | 
| 
      
 762 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
         
     | 
| 
      
 763 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
         
     | 
| 
      
 764 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
         
     | 
| 
      
 765 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
         
     | 
| 
      
 766 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
         
     | 
| 
      
 767 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
         
     | 
| 
      
 768 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
         
     | 
| 
      
 769 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:03 +0000'), {"message" => "z" * 1024 * 512}],
         
     | 
| 
      
 770 
     | 
    
         
            +
                    ]
         
     | 
| 
      
 771 
     | 
    
         
            +
                  )
         
     | 
| 
      
 772 
     | 
    
         
            +
                  @p.write({m => es}, format: ->(e){e.to_msgpack_stream})
         
     | 
| 
      
 773 
     | 
    
         
            +
             
     | 
| 
      
 774 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 775 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3,m], @p.stage.keys
         
     | 
| 
      
 776 
     | 
    
         
            +
             
     | 
| 
      
 777 
     | 
    
         
            +
                  target_chunk = @p.stage[m]
         
     | 
| 
      
 778 
     | 
    
         
            +
             
     | 
| 
      
 779 
     | 
    
         
            +
                  assert_equal 1, target_chunk.append_count
         
     | 
| 
      
 780 
     | 
    
         
            +
                  assert !target_chunk.rollbacked
         
     | 
| 
      
 781 
     | 
    
         
            +
             
     | 
| 
      
 782 
     | 
    
         
            +
                  (class << target_chunk; self; end).module_eval do
         
     | 
| 
      
 783 
     | 
    
         
            +
                    define_method(:commit){ raise "yay" }
         
     | 
| 
      
 784 
     | 
    
         
            +
                  end
         
     | 
| 
      
 785 
     | 
    
         
            +
             
     | 
| 
      
 786 
     | 
    
         
            +
                  es2 = Fluent::ArrayEventStream.new(
         
     | 
| 
      
 787 
     | 
    
         
            +
                    [
         
     | 
| 
      
 788 
     | 
    
         
            +
                      [event_time('2016-04-11 16:40:04 +0000'), {"message" => "z" * 1024 * 128}],
         
     | 
| 
      
 789 
     | 
    
         
            +
                    ]
         
     | 
| 
      
 790 
     | 
    
         
            +
                  )
         
     | 
| 
      
 791 
     | 
    
         
            +
                  assert_raise RuntimeError.new("yay") do
         
     | 
| 
      
 792 
     | 
    
         
            +
                    @p.write({m => es2}, format: ->(e){e.to_msgpack_stream})
         
     | 
| 
      
 793 
     | 
    
         
            +
                  end
         
     | 
| 
      
 794 
     | 
    
         
            +
             
     | 
| 
      
 795 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 796 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3,m], @p.stage.keys
         
     | 
| 
      
 797 
     | 
    
         
            +
             
     | 
| 
      
 798 
     | 
    
         
            +
                  assert_equal 2, target_chunk.append_count
         
     | 
| 
      
 799 
     | 
    
         
            +
                  assert target_chunk.rollbacked
         
     | 
| 
      
 800 
     | 
    
         
            +
                  assert_equal es.to_msgpack_stream, target_chunk.read
         
     | 
| 
      
 801 
     | 
    
         
            +
                end
         
     | 
| 
      
 802 
     | 
    
         
            +
             
     | 
| 
      
 803 
     | 
    
         
            +
                test '#write writes many metadata and data pairs at once' do
         
     | 
| 
      
 804 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 805 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3], @p.stage.keys
         
     | 
| 
      
 806 
     | 
    
         
            +
             
     | 
| 
      
 807 
     | 
    
         
            +
                  row = "x" * 1024
         
     | 
| 
      
 808 
     | 
    
         
            +
                  @p.write({ @dm0 => [row, row, row], @dm1 => [row, row] })
         
     | 
| 
      
 809 
     | 
    
         
            +
             
     | 
| 
      
 810 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3,@dm0,@dm1], @p.stage.keys
         
     | 
| 
      
 811 
     | 
    
         
            +
                end
         
     | 
| 
      
 812 
     | 
    
         
            +
             
     | 
| 
      
 813 
     | 
    
         
            +
                test '#write does not commit on any chunks if any append operation on chunk fails' do
         
     | 
| 
      
 814 
     | 
    
         
            +
                  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 815 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3], @p.stage.keys
         
     | 
| 
      
 816 
     | 
    
         
            +
             
     | 
| 
      
 817 
     | 
    
         
            +
                  row = "x" * 1024
         
     | 
| 
      
 818 
     | 
    
         
            +
                  @p.write({ @dm0 => [row, row, row], @dm1 => [row, row] })
         
     | 
| 
      
 819 
     | 
    
         
            +
             
     | 
| 
      
 820 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3,@dm0,@dm1], @p.stage.keys
         
     | 
| 
      
 821 
     | 
    
         
            +
             
     | 
| 
      
 822 
     | 
    
         
            +
                  dm2_size = @p.stage[@dm2].size
         
     | 
| 
      
 823 
     | 
    
         
            +
                  assert !@p.stage[@dm2].rollbacked
         
     | 
| 
      
 824 
     | 
    
         
            +
                  dm3_size = @p.stage[@dm3].size
         
     | 
| 
      
 825 
     | 
    
         
            +
                  assert !@p.stage[@dm3].rollbacked
         
     | 
| 
      
 826 
     | 
    
         
            +
             
     | 
| 
      
 827 
     | 
    
         
            +
                  assert{ @p.stage[@dm0].size == 3 }
         
     | 
| 
      
 828 
     | 
    
         
            +
                  assert !@p.stage[@dm0].rollbacked
         
     | 
| 
      
 829 
     | 
    
         
            +
                  assert{ @p.stage[@dm1].size == 2 }
         
     | 
| 
      
 830 
     | 
    
         
            +
                  assert !@p.stage[@dm1].rollbacked
         
     | 
| 
      
 831 
     | 
    
         
            +
             
     | 
| 
      
 832 
     | 
    
         
            +
                  meta_list = [@dm0, @dm1, @dm2, @dm3].sort
         
     | 
| 
      
 833 
     | 
    
         
            +
                  @p.stage[meta_list.last].failing = true
         
     | 
| 
      
 834 
     | 
    
         
            +
             
     | 
| 
      
 835 
     | 
    
         
            +
                  assert_raise(FluentPluginBufferTest::DummyMemoryChunkError) do
         
     | 
| 
      
 836 
     | 
    
         
            +
                    @p.write({ @dm2 => [row], @dm3 => [row], @dm0 => [row, row, row], @dm1 => [row, row] })
         
     | 
| 
      
 837 
     | 
    
         
            +
                  end
         
     | 
| 
      
 838 
     | 
    
         
            +
             
     | 
| 
      
 839 
     | 
    
         
            +
                  assert{ @p.stage[@dm2].size == dm2_size }
         
     | 
| 
      
 840 
     | 
    
         
            +
                  assert @p.stage[@dm2].rollbacked
         
     | 
| 
      
 841 
     | 
    
         
            +
                  assert{ @p.stage[@dm3].size == dm3_size }
         
     | 
| 
      
 842 
     | 
    
         
            +
                  assert @p.stage[@dm3].rollbacked
         
     | 
| 
      
 843 
     | 
    
         
            +
             
     | 
| 
      
 844 
     | 
    
         
            +
                  assert{ @p.stage[@dm0].size == 3 }
         
     | 
| 
      
 845 
     | 
    
         
            +
                  assert @p.stage[@dm0].rollbacked
         
     | 
| 
      
 846 
     | 
    
         
            +
                  assert{ @p.stage[@dm1].size == 2 }
         
     | 
| 
      
 847 
     | 
    
         
            +
                  assert @p.stage[@dm1].rollbacked
         
     | 
| 
      
 848 
     | 
    
         
            +
                end
         
     | 
| 
      
 849 
     | 
    
         
            +
             
     | 
| 
      
 850 
     | 
    
         
            +
                test '#compress returns :text' do
         
     | 
| 
      
 851 
     | 
    
         
            +
                  assert_equal :text, @p.compress
         
     | 
| 
      
 852 
     | 
    
         
            +
                end
         
     | 
| 
      
 853 
     | 
    
         
            +
             
     | 
| 
      
 854 
     | 
    
         
            +
                # https://github.com/fluent/fluentd/issues/3089
         
     | 
| 
      
 855 
     | 
    
         
            +
                test "closed chunk should not be committed" do
         
     | 
| 
      
 856 
     | 
    
         
            +
                  assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
         
     | 
| 
      
 857 
     | 
    
         
            +
                  assert_equal 0.95, @p.chunk_full_threshold
         
     | 
| 
      
 858 
     | 
    
         
            +
             
     | 
| 
      
 859 
     | 
    
         
            +
                  purge_count = 0
         
     | 
| 
      
 860 
     | 
    
         
            +
             
     | 
| 
      
 861 
     | 
    
         
            +
                  stub.proxy(@p).generate_chunk(anything) do |chunk|
         
     | 
| 
      
 862 
     | 
    
         
            +
                    stub.proxy(chunk).purge do |result|
         
     | 
| 
      
 863 
     | 
    
         
            +
                      purge_count += 1
         
     | 
| 
      
 864 
     | 
    
         
            +
                      result
         
     | 
| 
      
 865 
     | 
    
         
            +
                    end
         
     | 
| 
      
 866 
     | 
    
         
            +
                    stub.proxy(chunk).commit do |result|
         
     | 
| 
      
 867 
     | 
    
         
            +
                      assert_false(chunk.closed?)
         
     | 
| 
      
 868 
     | 
    
         
            +
                      result
         
     | 
| 
      
 869 
     | 
    
         
            +
                    end
         
     | 
| 
      
 870 
     | 
    
         
            +
                    stub.proxy(chunk).rollback do |result|
         
     | 
| 
      
 871 
     | 
    
         
            +
                      assert_false(chunk.closed?)
         
     | 
| 
      
 872 
     | 
    
         
            +
                      result
         
     | 
| 
      
 873 
     | 
    
         
            +
                    end
         
     | 
| 
      
 874 
     | 
    
         
            +
                    chunk
         
     | 
| 
      
 875 
     | 
    
         
            +
                  end
         
     | 
| 
      
 876 
     | 
    
         
            +
             
     | 
| 
      
 877 
     | 
    
         
            +
                  m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
         
     | 
| 
      
 878 
     | 
    
         
            +
                  small_row = "x" * 1024 * 400
         
     | 
| 
      
 879 
     | 
    
         
            +
                  big_row = "x" * 1024 * 1024 * 8 # just `chunk_size_limit`, it does't cause BufferOverFlowError.
         
     | 
| 
      
 880 
     | 
    
         
            +
             
     | 
| 
      
 881 
     | 
    
         
            +
                  # Write 42 events in 1 event stream, last one is for triggering `ShouldRetry`
         
     | 
| 
      
 882 
     | 
    
         
            +
                  @p.write({m => [small_row] * 40 + [big_row] + ["x"]})
         
     | 
| 
      
 883 
     | 
    
         
            +
             
     | 
| 
      
 884 
     | 
    
         
            +
                  # Above event strem will be splitted twice by `Buffer#write_step_by_step`
         
     | 
| 
      
 885 
     | 
    
         
            +
                  #
         
     | 
| 
      
 886 
     | 
    
         
            +
                  # 1. `write_once`: 42 [events] * 1 [stream]
         
     | 
| 
      
 887 
     | 
    
         
            +
                  # 2. `write_step_by_step`: 4 [events]* 10 [streams] + 2 [events] * 1 [stream]
         
     | 
| 
      
 888 
     | 
    
         
            +
                  # 3. `write_step_by_step` (by `ShouldRetry`): 1 [event] * 42 [streams]
         
     | 
| 
      
 889 
     | 
    
         
            +
                  #
         
     | 
| 
      
 890 
     | 
    
         
            +
                  # The problematic data is built in the 2nd stage.
         
     | 
| 
      
 891 
     | 
    
         
            +
                  # In the 2nd stage, 5 streams are packed in a chunk.
         
     | 
| 
      
 892 
     | 
    
         
            +
                  # ((1024 * 400) [bytes] * 4 [events] * 5 [streams] = 8192000 [bytes] < `chunk_limit_size` (8MB)).
         
     | 
| 
      
 893 
     | 
    
         
            +
                  # So 3 chunks are used to store all data.
         
     | 
| 
      
 894 
     | 
    
         
            +
                  # The 1st chunk is already staged by `write_once`.
         
     | 
| 
      
 895 
     | 
    
         
            +
                  # The 2nd & 3rd chunks are newly created as unstaged.
         
     | 
| 
      
 896 
     | 
    
         
            +
                  # The 3rd chunk is purged before `ShouldRetry`, it's no problem:
         
     | 
| 
      
 897 
     | 
    
         
            +
                  #   https://github.com/fluent/fluentd/blob/7e9eba736ff40ad985341be800ddc46558be75f2/lib/fluent/plugin/buffer.rb#L850
         
     | 
| 
      
 898 
     | 
    
         
            +
                  # The 2nd chunk is purged in `rescue ShouldRetry`:
         
     | 
| 
      
 899 
     | 
    
         
            +
                  #   https://github.com/fluent/fluentd/blob/7e9eba736ff40ad985341be800ddc46558be75f2/lib/fluent/plugin/buffer.rb#L862
         
     | 
| 
      
 900 
     | 
    
         
            +
                  # It causes the issue described in https://github.com/fluent/fluentd/issues/3089#issuecomment-1811839198
         
     | 
| 
      
 901 
     | 
    
         
            +
             
     | 
| 
      
 902 
     | 
    
         
            +
                  assert_equal 2, purge_count
         
     | 
| 
      
 903 
     | 
    
         
            +
                end
         
     | 
| 
      
 904 
     | 
    
         
            +
              end
         
     | 
| 
      
 905 
     | 
    
         
            +
             
     | 
| 
      
 906 
     | 
    
         
            +
              sub_test_case 'standard format with configuration for test with lower chunk limit size' do
         
     | 
| 
      
 907 
     | 
    
         
            +
                setup do
         
     | 
| 
      
 908 
     | 
    
         
            +
                  @p = create_buffer({"chunk_limit_size" => 1_280_000})
         
     | 
| 
      
 909 
     | 
    
         
            +
                  @format = ->(e){e.to_msgpack_stream}
         
     | 
| 
      
 910 
     | 
    
         
            +
                  @dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
         
     | 
| 
      
 911 
     | 
    
         
            +
                  # 1 record is 128bytes in msgpack stream
         
     | 
| 
      
 912 
     | 
    
         
            +
                  @es0 = es0 = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:01 +0000'), {"message" => "x" * (128 - 22)}] ] * 5000)
         
     | 
| 
      
 913 
     | 
    
         
            +
                  (class << @p; self; end).module_eval do
         
     | 
| 
      
 914 
     | 
    
         
            +
                    define_method(:resume) {
         
     | 
| 
      
 915 
     | 
    
         
            +
                      staged = {
         
     | 
| 
      
 916 
     | 
    
         
            +
                        dm0 => create_chunk_es(dm0, es0).staged!,
         
     | 
| 
      
 917 
     | 
    
         
            +
                      }
         
     | 
| 
      
 918 
     | 
    
         
            +
                      queued = []
         
     | 
| 
      
 919 
     | 
    
         
            +
                      return staged, queued
         
     | 
| 
      
 920 
     | 
    
         
            +
                    }
         
     | 
| 
      
 921 
     | 
    
         
            +
                  end
         
     | 
| 
      
 922 
     | 
    
         
            +
                  @p.start
         
     | 
| 
      
 923 
     | 
    
         
            +
                end
         
     | 
| 
      
 924 
     | 
    
         
            +
             
     | 
| 
      
 925 
     | 
    
         
            +
                test '#write appends event stream into staged chunk' do
         
     | 
| 
      
 926 
     | 
    
         
            +
                  assert_equal [@dm0], @p.stage.keys
         
     | 
| 
      
 927 
     | 
    
         
            +
                  assert_equal [], @p.queue.map(&:metadata)
         
     | 
| 
      
 928 
     | 
    
         
            +
             
     | 
| 
      
 929 
     | 
    
         
            +
                  assert_equal 1_280_000, @p.chunk_limit_size
         
     | 
| 
      
 930 
     | 
    
         
            +
             
     | 
| 
      
 931 
     | 
    
         
            +
                  es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * (128 - 22)}] ] * 1000)
         
     | 
| 
      
 932 
     | 
    
         
            +
                  @p.write({@dm0 => es}, format: @format)
         
     | 
| 
      
 933 
     | 
    
         
            +
             
     | 
| 
      
 934 
     | 
    
         
            +
                  assert_equal [@dm0], @p.stage.keys
         
     | 
| 
      
 935 
     | 
    
         
            +
                  assert_equal [], @p.queue.map(&:metadata)
         
     | 
| 
      
 936 
     | 
    
         
            +
             
     | 
| 
      
 937 
     | 
    
         
            +
                  assert_equal (@es0.to_msgpack_stream + es.to_msgpack_stream), @p.stage[@dm0].read
         
     | 
| 
      
 938 
     | 
    
         
            +
                end
         
     | 
| 
      
 939 
     | 
    
         
            +
             
     | 
| 
      
 940 
     | 
    
         
            +
                test '#write writes event stream into a new chunk with enqueueing existing chunk if event stream is larger than available space of existing chunk' do
         
     | 
| 
      
 941 
     | 
    
         
            +
                  assert_equal [@dm0], @p.stage.keys
         
     | 
| 
      
 942 
     | 
    
         
            +
                  assert_equal [], @p.queue.map(&:metadata)
         
     | 
| 
      
 943 
     | 
    
         
            +
             
     | 
| 
      
 944 
     | 
    
         
            +
                  assert_equal 1_280_000, @p.chunk_limit_size
         
     | 
| 
      
 945 
     | 
    
         
            +
             
     | 
| 
      
 946 
     | 
    
         
            +
                  es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * (128 - 22)}] ] * 8000)
         
     | 
| 
      
 947 
     | 
    
         
            +
                  @p.write({@dm0 => es}, format: @format)
         
     | 
| 
      
 948 
     | 
    
         
            +
             
     | 
| 
      
 949 
     | 
    
         
            +
                  assert_equal [@dm0], @p.stage.keys
         
     | 
| 
      
 950 
     | 
    
         
            +
                  assert_equal [@dm0], @p.queue.map(&:metadata)
         
     | 
| 
      
 951 
     | 
    
         
            +
             
     | 
| 
      
 952 
     | 
    
         
            +
                  assert_equal (es.to_msgpack_stream), @p.stage[@dm0].read
         
     | 
| 
      
 953 
     | 
    
         
            +
                end
         
     | 
| 
      
 954 
     | 
    
         
            +
             
     | 
| 
      
 955 
     | 
    
         
            +
                test '#write writes event stream into many chunks excluding staged chunk if event stream is larger than chunk limit size' do
         
     | 
| 
      
 956 
     | 
    
         
            +
                  assert_equal [@dm0], @p.stage.keys
         
     | 
| 
      
 957 
     | 
    
         
            +
                  assert_equal [], @p.queue.map(&:metadata)
         
     | 
| 
      
 958 
     | 
    
         
            +
             
     | 
| 
      
 959 
     | 
    
         
            +
                  assert_equal 1_280_000, @p.chunk_limit_size
         
     | 
| 
      
 960 
     | 
    
         
            +
             
     | 
| 
      
 961 
     | 
    
         
            +
                  es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * (128 - 22)}] ] * 45000)
         
     | 
| 
      
 962 
     | 
    
         
            +
                  @p.write({@dm0 => es}, format: @format)
         
     | 
| 
      
 963 
     | 
    
         
            +
             
     | 
| 
      
 964 
     | 
    
         
            +
                  # metadata whose seq is 4 is created, but overwrite with original metadata(seq=0) for next use of this chunk https://github.com/fluent/fluentd/blob/9d113029d4550ce576d8825bfa9612aa3e55bff0/lib/fluent/plugin/buffer.rb#L357
         
     | 
| 
      
 965 
     | 
    
         
            +
                  assert_equal [@dm0], @p.stage.keys
         
     | 
| 
      
 966 
     | 
    
         
            +
                  assert_equal 5400, @p.stage[@dm0].size
         
     | 
| 
      
 967 
     | 
    
         
            +
                  assert_equal [@dm0, @dm0, @dm0, @dm0, @dm0], @p.queue.map(&:metadata)
         
     | 
| 
      
 968 
     | 
    
         
            +
                  assert_equal [5000, 9900, 9900, 9900, 9900], @p.queue.map(&:size) # splits: 45000 / 100 => 450 * ...
         
     | 
| 
      
 969 
     | 
    
         
            +
                  # 9900 * 4 + 5400 == 45000
         
     | 
| 
      
 970 
     | 
    
         
            +
                end
         
     | 
| 
      
 971 
     | 
    
         
            +
             
     | 
| 
      
 972 
     | 
    
         
            +
                test '#dequeue_chunk succeeds when chunk is splited' do
         
     | 
| 
      
 973 
     | 
    
         
            +
                  assert_equal [@dm0], @p.stage.keys
         
     | 
| 
      
 974 
     | 
    
         
            +
                  assert_equal [], @p.queue.map(&:metadata)
         
     | 
| 
      
 975 
     | 
    
         
            +
             
     | 
| 
      
 976 
     | 
    
         
            +
                  assert_equal 1_280_000, @p.chunk_limit_size
         
     | 
| 
      
 977 
     | 
    
         
            +
             
     | 
| 
      
 978 
     | 
    
         
            +
                  es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * (128 - 22)}] ] * 45000)
         
     | 
| 
      
 979 
     | 
    
         
            +
                  @p.write({@dm0 => es}, format: @format)
         
     | 
| 
      
 980 
     | 
    
         
            +
                  @p.enqueue_all(true)
         
     | 
| 
      
 981 
     | 
    
         
            +
             
     | 
| 
      
 982 
     | 
    
         
            +
                  dequeued_chunks = 6.times.map { |e| @p.dequeue_chunk } # splits: 45000 / 100 => 450 * ...
         
     | 
| 
      
 983 
     | 
    
         
            +
                  assert_equal [5000, 9900, 9900, 9900, 9900, 5400], dequeued_chunks.map(&:size)
         
     | 
| 
      
 984 
     | 
    
         
            +
                  assert_equal [@dm0, @dm0, @dm0, @dm0, @dm0, @dm0], dequeued_chunks.map(&:metadata)
         
     | 
| 
      
 985 
     | 
    
         
            +
                end
         
     | 
| 
      
 986 
     | 
    
         
            +
             
     | 
| 
      
 987 
     | 
    
         
            +
                test '#write raises BufferChunkOverflowError if a record is biggar than chunk limit size' do
         
     | 
| 
      
 988 
     | 
    
         
            +
                  assert_equal [@dm0], @p.stage.keys
         
     | 
| 
      
 989 
     | 
    
         
            +
                  assert_equal [], @p.queue.map(&:metadata)
         
     | 
| 
      
 990 
     | 
    
         
            +
             
     | 
| 
      
 991 
     | 
    
         
            +
                  assert_equal 1_280_000, @p.chunk_limit_size
         
     | 
| 
      
 992 
     | 
    
         
            +
             
     | 
| 
      
 993 
     | 
    
         
            +
                  es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * 1_280_000}] ])
         
     | 
| 
      
 994 
     | 
    
         
            +
                  assert_raise Fluent::Plugin::Buffer::BufferChunkOverflowError do
         
     | 
| 
      
 995 
     | 
    
         
            +
                    @p.write({@dm0 => es}, format: @format)
         
     | 
| 
      
 996 
     | 
    
         
            +
                  end
         
     | 
| 
      
 997 
     | 
    
         
            +
                end
         
     | 
| 
      
 998 
     | 
    
         
            +
             
     | 
| 
      
 999 
     | 
    
         
            +
                data(
         
     | 
| 
      
 1000 
     | 
    
         
            +
                  first_chunk: Fluent::ArrayEventStream.new([[event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * 1_280_000}],
         
     | 
| 
      
 1001 
     | 
    
         
            +
                                                             [event_time('2016-04-11 16:00:02 +0000'), {"message" => "a"}],
         
     | 
| 
      
 1002 
     | 
    
         
            +
                                                             [event_time('2016-04-11 16:00:02 +0000'), {"message" => "b"}]]),
         
     | 
| 
      
 1003 
     | 
    
         
            +
                  intermediate_chunk: Fluent::ArrayEventStream.new([[event_time('2016-04-11 16:00:02 +0000'), {"message" => "a"}],
         
     | 
| 
      
 1004 
     | 
    
         
            +
                                                                    [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * 1_280_000}],
         
     | 
| 
      
 1005 
     | 
    
         
            +
                                                                    [event_time('2016-04-11 16:00:02 +0000'), {"message" => "b"}]]),
         
     | 
| 
      
 1006 
     | 
    
         
            +
                  last_chunk: Fluent::ArrayEventStream.new([[event_time('2016-04-11 16:00:02 +0000'), {"message" => "a"}],
         
     | 
| 
      
 1007 
     | 
    
         
            +
                                                            [event_time('2016-04-11 16:00:02 +0000'), {"message" => "b"}],
         
     | 
| 
      
 1008 
     | 
    
         
            +
                                                            [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * 1_280_000}]]),
         
     | 
| 
      
 1009 
     | 
    
         
            +
                  multiple_chunks: Fluent::ArrayEventStream.new([[event_time('2016-04-11 16:00:02 +0000'), {"message" => "a"}],
         
     | 
| 
      
 1010 
     | 
    
         
            +
                                                                 [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * 1_280_000}],
         
     | 
| 
      
 1011 
     | 
    
         
            +
                                                                 [event_time('2016-04-11 16:00:02 +0000'), {"message" => "b"}],
         
     | 
| 
      
 1012 
     | 
    
         
            +
                                                                 [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * 1_280_000}]])
         
     | 
| 
      
 1013 
     | 
    
         
            +
                )
         
     | 
| 
      
 1014 
     | 
    
         
            +
                test '#write exceeds chunk_limit_size, raise BufferChunkOverflowError, but not lost whole messages' do |(es)|
         
     | 
| 
      
 1015 
     | 
    
         
            +
                  assert_equal [@dm0], @p.stage.keys
         
     | 
| 
      
 1016 
     | 
    
         
            +
                  assert_equal [], @p.queue.map(&:metadata)
         
     | 
| 
      
 1017 
     | 
    
         
            +
             
     | 
| 
      
 1018 
     | 
    
         
            +
                  assert_equal 1_280_000, @p.chunk_limit_size
         
     | 
| 
      
 1019 
     | 
    
         
            +
             
     | 
| 
      
 1020 
     | 
    
         
            +
                  nth = []
         
     | 
| 
      
 1021 
     | 
    
         
            +
                  es.entries.each_with_index do |entry, index|
         
     | 
| 
      
 1022 
     | 
    
         
            +
                    if entry.last["message"].size == @p.chunk_limit_size
         
     | 
| 
      
 1023 
     | 
    
         
            +
                      nth << index
         
     | 
| 
      
 1024 
     | 
    
         
            +
                    end
         
     | 
| 
      
 1025 
     | 
    
         
            +
                  end
         
     | 
| 
      
 1026 
     | 
    
         
            +
                  messages = []
         
     | 
| 
      
 1027 
     | 
    
         
            +
                  nth.each do |n|
         
     | 
| 
      
 1028 
     | 
    
         
            +
                    messages << "a 1280025 bytes record (nth: #{n}) is larger than buffer chunk limit size (1280000)"
         
     | 
| 
      
 1029 
     | 
    
         
            +
                  end
         
     | 
| 
      
 1030 
     | 
    
         
            +
             
     | 
| 
      
 1031 
     | 
    
         
            +
                  assert_raise Fluent::Plugin::Buffer::BufferChunkOverflowError.new(messages.join(", ")) do
         
     | 
| 
      
 1032 
     | 
    
         
            +
                    @p.write({@dm0 => es}, format: @format)
         
     | 
| 
      
 1033 
     | 
    
         
            +
                  end
         
     | 
| 
      
 1034 
     | 
    
         
            +
                  # message a and b are concatenated and staged
         
     | 
| 
      
 1035 
     | 
    
         
            +
                  staged_messages = Fluent::MessagePackFactory.msgpack_unpacker.feed_each(@p.stage[@dm0].chunk).collect do |record|
         
     | 
| 
      
 1036 
     | 
    
         
            +
                    record.last
         
     | 
| 
      
 1037 
     | 
    
         
            +
                  end
         
     | 
| 
      
 1038 
     | 
    
         
            +
                  assert_equal([2, [{"message" => "a"}, {"message" => "b"}]],
         
     | 
| 
      
 1039 
     | 
    
         
            +
                               [@p.stage[@dm0].size, staged_messages])
         
     | 
| 
      
 1040 
     | 
    
         
            +
                  # only es0 message is queued
         
     | 
| 
      
 1041 
     | 
    
         
            +
                  assert_equal [@dm0], @p.queue.map(&:metadata)
         
     | 
| 
      
 1042 
     | 
    
         
            +
                  assert_equal [5000], @p.queue.map(&:size)
         
     | 
| 
      
 1043 
     | 
    
         
            +
                end
         
     | 
| 
      
 1044 
     | 
    
         
            +
             
     | 
| 
      
 1045 
     | 
    
         
            +
                test "confirm that every message which is smaller than chunk threshold does not raise BufferChunkOverflowError" do
         
     | 
| 
      
 1046 
     | 
    
         
            +
                  assert_equal [@dm0], @p.stage.keys
         
     | 
| 
      
 1047 
     | 
    
         
            +
                  assert_equal [], @p.queue.map(&:metadata)
         
     | 
| 
      
 1048 
     | 
    
         
            +
                  timestamp = event_time('2016-04-11 16:00:02 +0000')
         
     | 
| 
      
 1049 
     | 
    
         
            +
                  es = Fluent::ArrayEventStream.new([[timestamp, {"message" => "a" * 1_000_000}],
         
     | 
| 
      
 1050 
     | 
    
         
            +
                                                     [timestamp, {"message" => "b" * 1_000_000}],
         
     | 
| 
      
 1051 
     | 
    
         
            +
                                                     [timestamp, {"message" => "c" * 1_000_000}]])
         
     | 
| 
      
 1052 
     | 
    
         
            +
             
     | 
| 
      
 1053 
     | 
    
         
            +
                  # https://github.com/fluent/fluentd/issues/1849
         
     | 
| 
      
 1054 
     | 
    
         
            +
                  # Even though 1_000_000 < 1_280_000 (chunk_limit_size), it raised BufferChunkOverflowError before.
         
     | 
| 
      
 1055 
     | 
    
         
            +
                  # It should not be raised and message a,b,c should be stored into 3 chunks.
         
     | 
| 
      
 1056 
     | 
    
         
            +
                  assert_nothing_raised do
         
     | 
| 
      
 1057 
     | 
    
         
            +
                    @p.write({@dm0 => es}, format: @format)
         
     | 
| 
      
 1058 
     | 
    
         
            +
                  end
         
     | 
| 
      
 1059 
     | 
    
         
            +
                  messages = []
         
     | 
| 
      
 1060 
     | 
    
         
            +
                  # pick up first letter to check whether chunk is queued in expected order
         
     | 
| 
      
 1061 
     | 
    
         
            +
                  3.times do |index|
         
     | 
| 
      
 1062 
     | 
    
         
            +
                    chunk = @p.queue[index]
         
     | 
| 
      
 1063 
     | 
    
         
            +
                    es = Fluent::MessagePackEventStream.new(chunk.chunk)
         
     | 
| 
      
 1064 
     | 
    
         
            +
                    es.ensure_unpacked!
         
     | 
| 
      
 1065 
     | 
    
         
            +
                    records = es.instance_eval{ @unpacked_records }
         
     | 
| 
      
 1066 
     | 
    
         
            +
                    records.each do |record|
         
     | 
| 
      
 1067 
     | 
    
         
            +
                      messages << record["message"][0]
         
     | 
| 
      
 1068 
     | 
    
         
            +
                    end
         
     | 
| 
      
 1069 
     | 
    
         
            +
                  end
         
     | 
| 
      
 1070 
     | 
    
         
            +
                  es = Fluent::MessagePackEventStream.new(@p.stage[@dm0].chunk)
         
     | 
| 
      
 1071 
     | 
    
         
            +
                  es.ensure_unpacked!
         
     | 
| 
      
 1072 
     | 
    
         
            +
                  staged_message = es.instance_eval{ @unpacked_records }.first["message"]
         
     | 
| 
      
 1073 
     | 
    
         
            +
                  # message a and b are queued, message c is staged
         
     | 
| 
      
 1074 
     | 
    
         
            +
                  assert_equal([
         
     | 
| 
      
 1075 
     | 
    
         
            +
                                 [@dm0],
         
     | 
| 
      
 1076 
     | 
    
         
            +
                                 "c" * 1_000_000,
         
     | 
| 
      
 1077 
     | 
    
         
            +
                                 [@dm0, @dm0, @dm0],
         
     | 
| 
      
 1078 
     | 
    
         
            +
                                 [5000, 1, 1],
         
     | 
| 
      
 1079 
     | 
    
         
            +
                                 [["x"] * 5000, "a", "b"].flatten
         
     | 
| 
      
 1080 
     | 
    
         
            +
                               ],
         
     | 
| 
      
 1081 
     | 
    
         
            +
                               [
         
     | 
| 
      
 1082 
     | 
    
         
            +
                                 @p.stage.keys,
         
     | 
| 
      
 1083 
     | 
    
         
            +
                                 staged_message,
         
     | 
| 
      
 1084 
     | 
    
         
            +
                                 @p.queue.map(&:metadata),
         
     | 
| 
      
 1085 
     | 
    
         
            +
                                 @p.queue.map(&:size),
         
     | 
| 
      
 1086 
     | 
    
         
            +
                                 messages
         
     | 
| 
      
 1087 
     | 
    
         
            +
                               ])
         
     | 
| 
      
 1088 
     | 
    
         
            +
                end
         
     | 
| 
      
 1089 
     | 
    
         
            +
              end
         
     | 
| 
      
 1090 
     | 
    
         
            +
             
     | 
| 
      
 1091 
     | 
    
         
            +
              sub_test_case 'custom format with configuration for test with lower chunk limit size' do
         
     | 
| 
      
 1092 
     | 
    
         
            +
                setup do
         
     | 
| 
      
 1093 
     | 
    
         
            +
                  @p = create_buffer({"chunk_limit_size" => 1_280_000})
         
     | 
| 
      
 1094 
     | 
    
         
            +
                  @dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
         
     | 
| 
      
 1095 
     | 
    
         
            +
                  @row = "x" * 128
         
     | 
| 
      
 1096 
     | 
    
         
            +
                  @data0 = data0 = [@row] * 5000
         
     | 
| 
      
 1097 
     | 
    
         
            +
                  (class << @p; self; end).module_eval do
         
     | 
| 
      
 1098 
     | 
    
         
            +
                    define_method(:resume) {
         
     | 
| 
      
 1099 
     | 
    
         
            +
                      staged = {
         
     | 
| 
      
 1100 
     | 
    
         
            +
                        dm0 => create_chunk(dm0, data0).staged!,
         
     | 
| 
      
 1101 
     | 
    
         
            +
                      }
         
     | 
| 
      
 1102 
     | 
    
         
            +
                      queued = []
         
     | 
| 
      
 1103 
     | 
    
         
            +
                      return staged, queued
         
     | 
| 
      
 1104 
     | 
    
         
            +
                    }
         
     | 
| 
      
 1105 
     | 
    
         
            +
                  end
         
     | 
| 
      
 1106 
     | 
    
         
            +
                  @p.start
         
     | 
| 
      
 1107 
     | 
    
         
            +
                end
         
     | 
| 
      
 1108 
     | 
    
         
            +
             
     | 
| 
      
 1109 
     | 
    
         
            +
                test '#write appends event stream into staged chunk' do
         
     | 
| 
      
 1110 
     | 
    
         
            +
                  assert_equal [@dm0], @p.stage.keys
         
     | 
| 
      
 1111 
     | 
    
         
            +
                  assert_equal [], @p.queue.map(&:metadata)
         
     | 
| 
      
 1112 
     | 
    
         
            +
             
     | 
| 
      
 1113 
     | 
    
         
            +
                  assert_equal 1_280_000, @p.chunk_limit_size
         
     | 
| 
      
 1114 
     | 
    
         
            +
             
     | 
| 
      
 1115 
     | 
    
         
            +
                  data = [@row] * 1000
         
     | 
| 
      
 1116 
     | 
    
         
            +
                  @p.write({@dm0 => data})
         
     | 
| 
      
 1117 
     | 
    
         
            +
             
     | 
| 
      
 1118 
     | 
    
         
            +
                  assert_equal [@dm0], @p.stage.keys
         
     | 
| 
      
 1119 
     | 
    
         
            +
                  assert_equal [], @p.queue.map(&:metadata)
         
     | 
| 
      
 1120 
     | 
    
         
            +
             
     | 
| 
      
 1121 
     | 
    
         
            +
                  assert_equal (@row * 6000), @p.stage[@dm0].read
         
     | 
| 
      
 1122 
     | 
    
         
            +
                end
         
     | 
| 
      
 1123 
     | 
    
         
            +
             
     | 
| 
      
 1124 
     | 
    
         
            +
                test '#write writes event stream into a new chunk with enqueueing existing chunk if event stream is larger than available space of existing chunk' do
         
     | 
| 
      
 1125 
     | 
    
         
            +
                  assert_equal [@dm0], @p.stage.keys
         
     | 
| 
      
 1126 
     | 
    
         
            +
                  assert_equal [], @p.queue.map(&:metadata)
         
     | 
| 
      
 1127 
     | 
    
         
            +
             
     | 
| 
      
 1128 
     | 
    
         
            +
                  staged_chunk_object_id = @p.stage[@dm0].object_id
         
     | 
| 
      
 1129 
     | 
    
         
            +
             
     | 
| 
      
 1130 
     | 
    
         
            +
                  assert_equal 1_280_000, @p.chunk_limit_size
         
     | 
| 
      
 1131 
     | 
    
         
            +
             
     | 
| 
      
 1132 
     | 
    
         
            +
                  data = [@row] * 8000
         
     | 
| 
      
 1133 
     | 
    
         
            +
                  @p.write({@dm0 => data})
         
     | 
| 
      
 1134 
     | 
    
         
            +
             
     | 
| 
      
 1135 
     | 
    
         
            +
                  assert_equal [@dm0], @p.queue.map(&:metadata)
         
     | 
| 
      
 1136 
     | 
    
         
            +
                  assert_equal [staged_chunk_object_id], @p.queue.map(&:object_id)
         
     | 
| 
      
 1137 
     | 
    
         
            +
                  assert_equal [@dm0], @p.stage.keys
         
     | 
| 
      
 1138 
     | 
    
         
            +
             
     | 
| 
      
 1139 
     | 
    
         
            +
                  assert_equal [9800], @p.queue.map(&:size)
         
     | 
| 
      
 1140 
     | 
    
         
            +
                  assert_equal 3200, @p.stage[@dm0].size
         
     | 
| 
      
 1141 
     | 
    
         
            +
                  # 9800 + 3200 == 5000 + 8000
         
     | 
| 
      
 1142 
     | 
    
         
            +
                end
         
     | 
| 
      
 1143 
     | 
    
         
            +
             
     | 
| 
      
 1144 
     | 
    
         
            +
                test '#write writes event stream into many chunks including staging chunk if event stream is larger than chunk limit size' do
         
     | 
| 
      
 1145 
     | 
    
         
            +
                  assert_equal [@dm0], @p.stage.keys
         
     | 
| 
      
 1146 
     | 
    
         
            +
                  assert_equal [], @p.queue.map(&:metadata)
         
     | 
| 
      
 1147 
     | 
    
         
            +
             
     | 
| 
      
 1148 
     | 
    
         
            +
                  staged_chunk_object_id = @p.stage[@dm0].object_id
         
     | 
| 
      
 1149 
     | 
    
         
            +
             
     | 
| 
      
 1150 
     | 
    
         
            +
                  assert_equal 1_280_000, @p.chunk_limit_size
         
     | 
| 
      
 1151 
     | 
    
         
            +
             
     | 
| 
      
 1152 
     | 
    
         
            +
                  assert_equal 5000, @p.stage[@dm0].size
         
     | 
| 
      
 1153 
     | 
    
         
            +
             
     | 
| 
      
 1154 
     | 
    
         
            +
                  data = [@row] * 45000
         
     | 
| 
      
 1155 
     | 
    
         
            +
                  @p.write({@dm0 => data})
         
     | 
| 
      
 1156 
     | 
    
         
            +
             
     | 
| 
      
 1157 
     | 
    
         
            +
                  assert_equal staged_chunk_object_id, @p.queue.first.object_id
         
     | 
| 
      
 1158 
     | 
    
         
            +
             
     | 
| 
      
 1159 
     | 
    
         
            +
                  assert_equal [@dm0], @p.stage.keys
         
     | 
| 
      
 1160 
     | 
    
         
            +
                  assert_equal 900, @p.stage[@dm0].size
         
     | 
| 
      
 1161 
     | 
    
         
            +
                  assert_equal [@dm0, @dm0, @dm0, @dm0, @dm0], @p.queue.map(&:metadata)
         
     | 
| 
      
 1162 
     | 
    
         
            +
                  assert_equal [9500, 9900, 9900, 9900, 9900], @p.queue.map(&:size) # splits: 45000 / 100 => 450 * ...
         
     | 
| 
      
 1163 
     | 
    
         
            +
                  ##### 900 + 9500 + 9900 * 4 == 5000 + 45000
         
     | 
| 
      
 1164 
     | 
    
         
            +
                end
         
     | 
| 
      
 1165 
     | 
    
         
            +
             
     | 
| 
      
 1166 
     | 
    
         
            +
                test '#write raises BufferChunkOverflowError if a record is bigger than chunk limit size' do
         
     | 
| 
      
 1167 
     | 
    
         
            +
                  assert_equal [@dm0], @p.stage.keys
         
     | 
| 
      
 1168 
     | 
    
         
            +
                  assert_equal [], @p.queue.map(&:metadata)
         
     | 
| 
      
 1169 
     | 
    
         
            +
             
     | 
| 
      
 1170 
     | 
    
         
            +
                  assert_equal 1_280_000, @p.chunk_limit_size
         
     | 
| 
      
 1171 
     | 
    
         
            +
             
     | 
| 
      
 1172 
     | 
    
         
            +
                  es = ["x" * 1_280_000 + "x" * 300]
         
     | 
| 
      
 1173 
     | 
    
         
            +
                  assert_raise Fluent::Plugin::Buffer::BufferChunkOverflowError do
         
     | 
| 
      
 1174 
     | 
    
         
            +
                    @p.write({@dm0 => es})
         
     | 
| 
      
 1175 
     | 
    
         
            +
                  end
         
     | 
| 
      
 1176 
     | 
    
         
            +
                end
         
     | 
| 
      
 1177 
     | 
    
         
            +
             
     | 
| 
      
 1178 
     | 
    
         
            +
                test 'confirm that every array message which is smaller than chunk threshold does not raise BufferChunkOverflowError' do
         
     | 
| 
      
 1179 
     | 
    
         
            +
                  assert_equal [@dm0], @p.stage.keys
         
     | 
| 
      
 1180 
     | 
    
         
            +
                  assert_equal [], @p.queue.map(&:metadata)
         
     | 
| 
      
 1181 
     | 
    
         
            +
             
     | 
| 
      
 1182 
     | 
    
         
            +
                  assert_equal 1_280_000, @p.chunk_limit_size
         
     | 
| 
      
 1183 
     | 
    
         
            +
             
     | 
| 
      
 1184 
     | 
    
         
            +
                  es = ["a" * 1_000_000, "b" * 1_000_000, "c" * 1_000_000]
         
     | 
| 
      
 1185 
     | 
    
         
            +
                  assert_nothing_raised do
         
     | 
| 
      
 1186 
     | 
    
         
            +
                    @p.write({@dm0 => es})
         
     | 
| 
      
 1187 
     | 
    
         
            +
                  end
         
     | 
| 
      
 1188 
     | 
    
         
            +
                  queue_messages = @p.queue.collect do |chunk|
         
     | 
| 
      
 1189 
     | 
    
         
            +
                    # collect first character of each message
         
     | 
| 
      
 1190 
     | 
    
         
            +
                    chunk.chunk[0]
         
     | 
| 
      
 1191 
     | 
    
         
            +
                  end
         
     | 
| 
      
 1192 
     | 
    
         
            +
                  assert_equal([
         
     | 
| 
      
 1193 
     | 
    
         
            +
                                 [@dm0],
         
     | 
| 
      
 1194 
     | 
    
         
            +
                                 1,
         
     | 
| 
      
 1195 
     | 
    
         
            +
                                 "c",
         
     | 
| 
      
 1196 
     | 
    
         
            +
                                 [@dm0, @dm0, @dm0],
         
     | 
| 
      
 1197 
     | 
    
         
            +
                                 [5000, 1, 1],
         
     | 
| 
      
 1198 
     | 
    
         
            +
                                 ["x", "a", "b"]
         
     | 
| 
      
 1199 
     | 
    
         
            +
                               ],
         
     | 
| 
      
 1200 
     | 
    
         
            +
                               [
         
     | 
| 
      
 1201 
     | 
    
         
            +
                                 @p.stage.keys,
         
     | 
| 
      
 1202 
     | 
    
         
            +
                                 @p.stage[@dm0].size,
         
     | 
| 
      
 1203 
     | 
    
         
            +
                                 @p.stage[@dm0].chunk[0],
         
     | 
| 
      
 1204 
     | 
    
         
            +
                                 @p.queue.map(&:metadata),
         
     | 
| 
      
 1205 
     | 
    
         
            +
                                 @p.queue.map(&:size),
         
     | 
| 
      
 1206 
     | 
    
         
            +
                                 queue_messages
         
     | 
| 
      
 1207 
     | 
    
         
            +
                               ])
         
     | 
| 
      
 1208 
     | 
    
         
            +
                end
         
     | 
| 
      
 1209 
     | 
    
         
            +
              end
         
     | 
| 
      
 1210 
     | 
    
         
            +
             
     | 
| 
      
 1211 
     | 
    
         
            +
              sub_test_case 'with configuration for test with lower limits' do
         
     | 
| 
      
 1212 
     | 
    
         
            +
                setup do
         
     | 
| 
      
 1213 
     | 
    
         
            +
                  @p = create_buffer({"chunk_limit_size" => 1024, "total_limit_size" => 10240})
         
     | 
| 
      
 1214 
     | 
    
         
            +
                  @dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
         
     | 
| 
      
 1215 
     | 
    
         
            +
                  @dm1 = dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
         
     | 
| 
      
 1216 
     | 
    
         
            +
                  @dm2 = dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
         
     | 
| 
      
 1217 
     | 
    
         
            +
                  @dm3 = dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
         
     | 
| 
      
 1218 
     | 
    
         
            +
                  (class << @p; self; end).module_eval do
         
     | 
| 
      
 1219 
     | 
    
         
            +
                    define_method(:resume) {
         
     | 
| 
      
 1220 
     | 
    
         
            +
                      staged = {
         
     | 
| 
      
 1221 
     | 
    
         
            +
                        dm2 => create_chunk(dm2, ["b" * 128] * 7).staged!,
         
     | 
| 
      
 1222 
     | 
    
         
            +
                        dm3 => create_chunk(dm3, ["c" * 128] * 5).staged!,
         
     | 
| 
      
 1223 
     | 
    
         
            +
                      }
         
     | 
| 
      
 1224 
     | 
    
         
            +
                      queued = [
         
     | 
| 
      
 1225 
     | 
    
         
            +
                        create_chunk(dm0, ["0" * 128] * 8).enqueued!,
         
     | 
| 
      
 1226 
     | 
    
         
            +
                        create_chunk(dm0, ["0" * 128] * 8).enqueued!,
         
     | 
| 
      
 1227 
     | 
    
         
            +
                        create_chunk(dm0, ["0" * 128] * 8).enqueued!,
         
     | 
| 
      
 1228 
     | 
    
         
            +
                        create_chunk(dm0, ["0" * 128] * 8).enqueued!,
         
     | 
| 
      
 1229 
     | 
    
         
            +
                        create_chunk(dm0, ["0" * 128] * 8).enqueued!,
         
     | 
| 
      
 1230 
     | 
    
         
            +
                        create_chunk(dm1, ["a" * 128] * 8).enqueued!,
         
     | 
| 
      
 1231 
     | 
    
         
            +
                        create_chunk(dm1, ["a" * 128] * 8).enqueued!,
         
     | 
| 
      
 1232 
     | 
    
         
            +
                        create_chunk(dm1, ["a" * 128] * 8).enqueued!, # 8th queued chunk
         
     | 
| 
      
 1233 
     | 
    
         
            +
                        create_chunk(dm1, ["a" * 128] * 3).enqueued!,
         
     | 
| 
      
 1234 
     | 
    
         
            +
                      ]
         
     | 
| 
      
 1235 
     | 
    
         
            +
                      return staged, queued
         
     | 
| 
      
 1236 
     | 
    
         
            +
                    }
         
     | 
| 
      
 1237 
     | 
    
         
            +
                  end
         
     | 
| 
      
 1238 
     | 
    
         
            +
                  @p.start
         
     | 
| 
      
 1239 
     | 
    
         
            +
                end
         
     | 
| 
      
 1240 
     | 
    
         
            +
             
     | 
| 
      
 1241 
     | 
    
         
            +
                test '#storable? returns false when too many data exist' do
         
     | 
| 
      
 1242 
     | 
    
         
            +
                  assert_equal [@dm0,@dm0,@dm0,@dm0,@dm0,@dm1,@dm1,@dm1,@dm1], @p.queue.map(&:metadata)
         
     | 
| 
      
 1243 
     | 
    
         
            +
                  assert_equal [@dm2,@dm3], @p.stage.keys
         
     | 
| 
      
 1244 
     | 
    
         
            +
             
     | 
| 
      
 1245 
     | 
    
         
            +
                  assert_equal 128*8*8+128*3, @p.queue_size
         
     | 
| 
      
 1246 
     | 
    
         
            +
                  assert_equal 128*7+128*5, @p.stage_size
         
     | 
| 
      
 1247 
     | 
    
         
            +
             
     | 
| 
      
 1248 
     | 
    
         
            +
                  assert @p.storable?
         
     | 
| 
      
 1249 
     | 
    
         
            +
             
     | 
| 
      
 1250 
     | 
    
         
            +
                  dm3 = @p.metadata(timekey: @dm3.timekey)
         
     | 
| 
      
 1251 
     | 
    
         
            +
                  @p.write({dm3 => ["c" * 128]})
         
     | 
| 
      
 1252 
     | 
    
         
            +
             
     | 
| 
      
 1253 
     | 
    
         
            +
                  assert_equal 10240, (@p.stage_size + @p.queue_size)
         
     | 
| 
      
 1254 
     | 
    
         
            +
                  assert !@p.storable?
         
     | 
| 
      
 1255 
     | 
    
         
            +
                end
         
     | 
| 
      
 1256 
     | 
    
         
            +
             
     | 
| 
      
 1257 
     | 
    
         
            +
                test '#chunk_size_over? returns true if chunk size is bigger than limit' do
         
     | 
| 
      
 1258 
     | 
    
         
            +
                  m = create_metadata(Time.parse('2016-04-11 16:40:00 +0000').to_i)
         
     | 
| 
      
 1259 
     | 
    
         
            +
             
     | 
| 
      
 1260 
     | 
    
         
            +
                  c1 = create_chunk(m, ["a" * 128] * 8)
         
     | 
| 
      
 1261 
     | 
    
         
            +
                  assert !@p.chunk_size_over?(c1)
         
     | 
| 
      
 1262 
     | 
    
         
            +
             
     | 
| 
      
 1263 
     | 
    
         
            +
                  c2 = create_chunk(m, ["a" * 128] * 9)
         
     | 
| 
      
 1264 
     | 
    
         
            +
                  assert @p.chunk_size_over?(c2)
         
     | 
| 
      
 1265 
     | 
    
         
            +
             
     | 
| 
      
 1266 
     | 
    
         
            +
                  c3 = create_chunk(m, ["a" * 128] * 8 + ["a"])
         
     | 
| 
      
 1267 
     | 
    
         
            +
                  assert @p.chunk_size_over?(c3)
         
     | 
| 
      
 1268 
     | 
    
         
            +
                end
         
     | 
| 
      
 1269 
     | 
    
         
            +
             
     | 
| 
      
 1270 
     | 
    
         
            +
                test '#chunk_size_full? returns true if chunk size is enough big against limit' do
         
     | 
| 
      
 1271 
     | 
    
         
            +
                  m = create_metadata(Time.parse('2016-04-11 16:40:00 +0000').to_i)
         
     | 
| 
      
 1272 
     | 
    
         
            +
             
     | 
| 
      
 1273 
     | 
    
         
            +
                  c1 = create_chunk(m, ["a" * 128] * 7)
         
     | 
| 
      
 1274 
     | 
    
         
            +
                  assert !@p.chunk_size_full?(c1)
         
     | 
| 
      
 1275 
     | 
    
         
            +
             
     | 
| 
      
 1276 
     | 
    
         
            +
                  c2 = create_chunk(m, ["a" * 128] * 8)
         
     | 
| 
      
 1277 
     | 
    
         
            +
                  assert @p.chunk_size_full?(c2)
         
     | 
| 
      
 1278 
     | 
    
         
            +
             
     | 
| 
      
 1279 
     | 
    
         
            +
                  assert_equal 0.95, @p.chunk_full_threshold
         
     | 
| 
      
 1280 
     | 
    
         
            +
                  c3 = create_chunk(m, ["a" * 128] * 6 + ["a" * 64])
         
     | 
| 
      
 1281 
     | 
    
         
            +
                  assert !@p.chunk_size_full?(c3)
         
     | 
| 
      
 1282 
     | 
    
         
            +
                end
         
     | 
| 
      
 1283 
     | 
    
         
            +
              end
         
     | 
| 
      
 1284 
     | 
    
         
            +
             
     | 
| 
      
 1285 
     | 
    
         
            +
              sub_test_case 'with configuration includes chunk_limit_records' do
         
     | 
| 
      
 1286 
     | 
    
         
            +
                setup do
         
     | 
| 
      
 1287 
     | 
    
         
            +
                  @p = create_buffer({"chunk_limit_size" => 1024, "total_limit_size" => 10240, "chunk_limit_records" => 6})
         
     | 
| 
      
 1288 
     | 
    
         
            +
                  @dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
         
     | 
| 
      
 1289 
     | 
    
         
            +
                  @dm1 = dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
         
     | 
| 
      
 1290 
     | 
    
         
            +
                  @dm2 = dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
         
     | 
| 
      
 1291 
     | 
    
         
            +
                  @dm3 = dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
         
     | 
| 
      
 1292 
     | 
    
         
            +
                  (class << @p; self; end).module_eval do
         
     | 
| 
      
 1293 
     | 
    
         
            +
                    define_method(:resume) {
         
     | 
| 
      
 1294 
     | 
    
         
            +
                      staged = {
         
     | 
| 
      
 1295 
     | 
    
         
            +
                        dm2 => create_chunk(dm2, ["b" * 128] * 1).staged!,
         
     | 
| 
      
 1296 
     | 
    
         
            +
                        dm3 => create_chunk(dm3, ["c" * 128] * 2).staged!,
         
     | 
| 
      
 1297 
     | 
    
         
            +
                      }
         
     | 
| 
      
 1298 
     | 
    
         
            +
                      queued = [
         
     | 
| 
      
 1299 
     | 
    
         
            +
                        create_chunk(dm0, ["0" * 128] * 6).enqueued!,
         
     | 
| 
      
 1300 
     | 
    
         
            +
                        create_chunk(dm1, ["a" * 128] * 6).enqueued!,
         
     | 
| 
      
 1301 
     | 
    
         
            +
                        create_chunk(dm1, ["a" * 128] * 6).enqueued!,
         
     | 
| 
      
 1302 
     | 
    
         
            +
                        create_chunk(dm1, ["a" * 128] * 3).enqueued!,
         
     | 
| 
      
 1303 
     | 
    
         
            +
                      ]
         
     | 
| 
      
 1304 
     | 
    
         
            +
                      return staged, queued
         
     | 
| 
      
 1305 
     | 
    
         
            +
                    }
         
     | 
| 
      
 1306 
     | 
    
         
            +
                  end
         
     | 
| 
      
 1307 
     | 
    
         
            +
                  @p.start
         
     | 
| 
      
 1308 
     | 
    
         
            +
                end
         
     | 
| 
      
 1309 
     | 
    
         
            +
             
     | 
| 
      
 1310 
     | 
    
         
            +
                test '#chunk_size_over? returns true if too many records exists in a chunk even if its bytes is less than limit' do
         
     | 
| 
      
 1311 
     | 
    
         
            +
                  assert_equal 6, @p.chunk_limit_records
         
     | 
| 
      
 1312 
     | 
    
         
            +
             
     | 
| 
      
 1313 
     | 
    
         
            +
                  m = create_metadata(Time.parse('2016-04-11 16:40:00 +0000').to_i)
         
     | 
| 
      
 1314 
     | 
    
         
            +
             
     | 
| 
      
 1315 
     | 
    
         
            +
                  c1 = create_chunk(m, ["a" * 128] * 6)
         
     | 
| 
      
 1316 
     | 
    
         
            +
                  assert_equal 6, c1.size
         
     | 
| 
      
 1317 
     | 
    
         
            +
                  assert !@p.chunk_size_over?(c1)
         
     | 
| 
      
 1318 
     | 
    
         
            +
             
     | 
| 
      
 1319 
     | 
    
         
            +
                  c2 = create_chunk(m, ["a" * 128] * 7)
         
     | 
| 
      
 1320 
     | 
    
         
            +
                  assert @p.chunk_size_over?(c2)
         
     | 
| 
      
 1321 
     | 
    
         
            +
             
     | 
| 
      
 1322 
     | 
    
         
            +
                  c3 = create_chunk(m, ["a" * 128] * 6 + ["a"])
         
     | 
| 
      
 1323 
     | 
    
         
            +
                  assert @p.chunk_size_over?(c3)
         
     | 
| 
      
 1324 
     | 
    
         
            +
                end
         
     | 
| 
      
 1325 
     | 
    
         
            +
             
     | 
| 
      
 1326 
     | 
    
         
            +
                test '#chunk_size_full? returns true if enough many records exists in a chunk even if its bytes is less than limit' do
         
     | 
| 
      
 1327 
     | 
    
         
            +
                  assert_equal 6, @p.chunk_limit_records
         
     | 
| 
      
 1328 
     | 
    
         
            +
             
     | 
| 
      
 1329 
     | 
    
         
            +
                  m = create_metadata(Time.parse('2016-04-11 16:40:00 +0000').to_i)
         
     | 
| 
      
 1330 
     | 
    
         
            +
             
     | 
| 
      
 1331 
     | 
    
         
            +
                  c1 = create_chunk(m, ["a" * 128] * 5)
         
     | 
| 
      
 1332 
     | 
    
         
            +
                  assert_equal 5, c1.size
         
     | 
| 
      
 1333 
     | 
    
         
            +
                  assert !@p.chunk_size_full?(c1)
         
     | 
| 
      
 1334 
     | 
    
         
            +
             
     | 
| 
      
 1335 
     | 
    
         
            +
                  c2 = create_chunk(m, ["a" * 128] * 6)
         
     | 
| 
      
 1336 
     | 
    
         
            +
                  assert @p.chunk_size_full?(c2)
         
     | 
| 
      
 1337 
     | 
    
         
            +
             
     | 
| 
      
 1338 
     | 
    
         
            +
                  c3 = create_chunk(m, ["a" * 128] * 5 + ["a"])
         
     | 
| 
      
 1339 
     | 
    
         
            +
                  assert @p.chunk_size_full?(c3)
         
     | 
| 
      
 1340 
     | 
    
         
            +
                end
         
     | 
| 
      
 1341 
     | 
    
         
            +
              end
         
     | 
| 
      
 1342 
     | 
    
         
            +
             
     | 
| 
      
 1343 
     | 
    
         
            +
              sub_test_case 'with configuration includes queue_limit_length' do
         
     | 
| 
      
 1344 
     | 
    
         
            +
                setup do
         
     | 
| 
      
 1345 
     | 
    
         
            +
                  @p = create_buffer({"chunk_limit_size" => 1024, "total_limit_size" => 10240, "queue_limit_length" => 5})
         
     | 
| 
      
 1346 
     | 
    
         
            +
                  @dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
         
     | 
| 
      
 1347 
     | 
    
         
            +
                  @dm1 = dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
         
     | 
| 
      
 1348 
     | 
    
         
            +
                  @dm2 = dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
         
     | 
| 
      
 1349 
     | 
    
         
            +
                  @dm3 = dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
         
     | 
| 
      
 1350 
     | 
    
         
            +
                  (class << @p; self; end).module_eval do
         
     | 
| 
      
 1351 
     | 
    
         
            +
                    define_method(:resume) {
         
     | 
| 
      
 1352 
     | 
    
         
            +
                      staged = {
         
     | 
| 
      
 1353 
     | 
    
         
            +
                        dm2 => create_chunk(dm2, ["b" * 128] * 1).staged!,
         
     | 
| 
      
 1354 
     | 
    
         
            +
                        dm3 => create_chunk(dm3, ["c" * 128] * 2).staged!,
         
     | 
| 
      
 1355 
     | 
    
         
            +
                      }
         
     | 
| 
      
 1356 
     | 
    
         
            +
                      queued = [
         
     | 
| 
      
 1357 
     | 
    
         
            +
                        create_chunk(dm0, ["0" * 128] * 6).enqueued!,
         
     | 
| 
      
 1358 
     | 
    
         
            +
                        create_chunk(dm1, ["a" * 128] * 6).enqueued!,
         
     | 
| 
      
 1359 
     | 
    
         
            +
                        create_chunk(dm1, ["a" * 128] * 6).enqueued!,
         
     | 
| 
      
 1360 
     | 
    
         
            +
                        create_chunk(dm1, ["a" * 128] * 3).enqueued!,
         
     | 
| 
      
 1361 
     | 
    
         
            +
                      ]
         
     | 
| 
      
 1362 
     | 
    
         
            +
                      return staged, queued
         
     | 
| 
      
 1363 
     | 
    
         
            +
                    }
         
     | 
| 
      
 1364 
     | 
    
         
            +
                  end
         
     | 
| 
      
 1365 
     | 
    
         
            +
                  @p.start
         
     | 
| 
      
 1366 
     | 
    
         
            +
                end
         
     | 
| 
      
 1367 
     | 
    
         
            +
             
     | 
| 
      
 1368 
     | 
    
         
            +
                test '#configure will overwrite standard configuration if queue_limit_length' do
         
     | 
| 
      
 1369 
     | 
    
         
            +
                  assert_equal 1024, @p.chunk_limit_size
         
     | 
| 
      
 1370 
     | 
    
         
            +
                  assert_equal 5, @p.queue_limit_length
         
     | 
| 
      
 1371 
     | 
    
         
            +
                  assert_equal (1024*5), @p.total_limit_size
         
     | 
| 
      
 1372 
     | 
    
         
            +
                end
         
     | 
| 
      
 1373 
     | 
    
         
            +
              end
         
     | 
| 
      
 1374 
     | 
    
         
            +
             
     | 
| 
      
 1375 
     | 
    
         
            +
              sub_test_case 'when compress is gzip' do
         
     | 
| 
      
 1376 
     | 
    
         
            +
                setup do
         
     | 
| 
      
 1377 
     | 
    
         
            +
                  @p = create_buffer({'compress' => 'gzip'})
         
     | 
| 
      
 1378 
     | 
    
         
            +
                  @dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
         
     | 
| 
      
 1379 
     | 
    
         
            +
                end
         
     | 
| 
      
 1380 
     | 
    
         
            +
             
     | 
| 
      
 1381 
     | 
    
         
            +
                test '#compress returns :gzip' do
         
     | 
| 
      
 1382 
     | 
    
         
            +
                  assert_equal :gzip, @p.compress
         
     | 
| 
      
 1383 
     | 
    
         
            +
                end
         
     | 
| 
      
 1384 
     | 
    
         
            +
             
     | 
| 
      
 1385 
     | 
    
         
            +
                test 'create decompressable chunk' do
         
     | 
| 
      
 1386 
     | 
    
         
            +
                  chunk = @p.generate_chunk(create_metadata)
         
     | 
| 
      
 1387 
     | 
    
         
            +
                  assert chunk.singleton_class.ancestors.include?(Fluent::Plugin::Buffer::Chunk::Decompressable)
         
     | 
| 
      
 1388 
     | 
    
         
            +
                end
         
     | 
| 
      
 1389 
     | 
    
         
            +
             
     | 
| 
      
 1390 
     | 
    
         
            +
                test '#write compressed data which exceeds chunk_limit_size, it raises BufferChunkOverflowError' do
         
     | 
| 
      
 1391 
     | 
    
         
            +
                  @p = create_buffer({'compress' => 'gzip', 'chunk_limit_size' => 70})
         
     | 
| 
      
 1392 
     | 
    
         
            +
                  timestamp = event_time('2016-04-11 16:00:02 +0000')
         
     | 
| 
      
 1393 
     | 
    
         
            +
                  es = Fluent::ArrayEventStream.new([[timestamp, {"message" => "012345"}], # overflow
         
     | 
| 
      
 1394 
     | 
    
         
            +
                                                     [timestamp, {"message" => "aaa"}],
         
     | 
| 
      
 1395 
     | 
    
         
            +
                                                     [timestamp, {"message" => "bbb"}]])
         
     | 
| 
      
 1396 
     | 
    
         
            +
                  assert_equal [], @p.queue.map(&:metadata)
         
     | 
| 
      
 1397 
     | 
    
         
            +
                  assert_equal 70, @p.chunk_limit_size
         
     | 
| 
      
 1398 
     | 
    
         
            +
             
     | 
| 
      
 1399 
     | 
    
         
            +
                  # calculate the actual boundary value. it varies on machine
         
     | 
| 
      
 1400 
     | 
    
         
            +
                  c = @p.generate_chunk(create_metadata)
         
     | 
| 
      
 1401 
     | 
    
         
            +
                  c.append(Fluent::ArrayEventStream.new([[timestamp, {"message" => "012345"}]]), compress: :gzip)
         
     | 
| 
      
 1402 
     | 
    
         
            +
                  overflow_bytes = c.bytesize
         
     | 
| 
      
 1403 
     | 
    
         
            +
             
     | 
| 
      
 1404 
     | 
    
         
            +
                  messages = "concatenated/appended a #{overflow_bytes} bytes record (nth: 0) is larger than buffer chunk limit size (70)"
         
     | 
| 
      
 1405 
     | 
    
         
            +
                  assert_raise Fluent::Plugin::Buffer::BufferChunkOverflowError.new(messages) do
         
     | 
| 
      
 1406 
     | 
    
         
            +
                    # test format == nil && compress == :gzip
         
     | 
| 
      
 1407 
     | 
    
         
            +
                    @p.write({@dm0 => es})
         
     | 
| 
      
 1408 
     | 
    
         
            +
                  end
         
     | 
| 
      
 1409 
     | 
    
         
            +
                  # message a and b occupies each chunks in full, so both of messages are queued (no staged chunk)
         
     | 
| 
      
 1410 
     | 
    
         
            +
                  assert_equal([2, [@dm0, @dm0], [1, 1], nil],
         
     | 
| 
      
 1411 
     | 
    
         
            +
                               [@p.queue.size, @p.queue.map(&:metadata), @p.queue.map(&:size), @p.stage[@dm0]])
         
     | 
| 
      
 1412 
     | 
    
         
            +
                end
         
     | 
| 
      
 1413 
     | 
    
         
            +
              end
         
     | 
| 
      
 1414 
     | 
    
         
            +
             
     | 
| 
      
 1415 
     | 
    
         
            +
              sub_test_case '#statistics' do
         
     | 
| 
      
 1416 
     | 
    
         
            +
                setup do
         
     | 
| 
      
 1417 
     | 
    
         
            +
                  @p = create_buffer({ "total_limit_size" => 1024 })
         
     | 
| 
      
 1418 
     | 
    
         
            +
                  dm = create_metadata(Time.parse('2020-03-13 16:00:00 +0000').to_i, nil, nil)
         
     | 
| 
      
 1419 
     | 
    
         
            +
             
     | 
| 
      
 1420 
     | 
    
         
            +
                  (class << @p; self; end).module_eval do
         
     | 
| 
      
 1421 
     | 
    
         
            +
                    define_method(:resume) {
         
     | 
| 
      
 1422 
     | 
    
         
            +
                      queued = [create_chunk(dm, ["a" * (1024 - 102)]).enqueued!]
         
     | 
| 
      
 1423 
     | 
    
         
            +
                      return {}, queued
         
     | 
| 
      
 1424 
     | 
    
         
            +
                    }
         
     | 
| 
      
 1425 
     | 
    
         
            +
                  end
         
     | 
| 
      
 1426 
     | 
    
         
            +
             
     | 
| 
      
 1427 
     | 
    
         
            +
                  @p.start
         
     | 
| 
      
 1428 
     | 
    
         
            +
                end
         
     | 
| 
      
 1429 
     | 
    
         
            +
             
     | 
| 
      
 1430 
     | 
    
         
            +
                test 'returns available_buffer_space_ratios' do
         
     | 
| 
      
 1431 
     | 
    
         
            +
                  assert_equal 10.0, @p.statistics['buffer']['available_buffer_space_ratios']
         
     | 
| 
      
 1432 
     | 
    
         
            +
                end
         
     | 
| 
      
 1433 
     | 
    
         
            +
              end
         
     | 
| 
      
 1434 
     | 
    
         
            +
            end
         
     |