fluentd 1.14.4-x64-mingw-ucrt
Sign up to get free protection for your applications and to get access to all the features.
Potentially problematic release.
This version of fluentd might be problematic. Click here for more details.
- checksums.yaml +7 -0
- data/.deepsource.toml +13 -0
- data/.drone.yml +35 -0
- data/.github/ISSUE_TEMPLATE/bug_report.yaml +70 -0
- data/.github/ISSUE_TEMPLATE/config.yml +5 -0
- data/.github/ISSUE_TEMPLATE/feature_request.yaml +38 -0
- data/.github/ISSUE_TEMPLATE.md +17 -0
- data/.github/PULL_REQUEST_TEMPLATE.md +14 -0
- data/.github/workflows/issue-auto-closer.yml +12 -0
- data/.github/workflows/linux-test.yaml +36 -0
- data/.github/workflows/macos-test.yaml +30 -0
- data/.github/workflows/stale-actions.yml +22 -0
- data/.github/workflows/windows-test.yaml +46 -0
- data/.gitignore +30 -0
- data/.gitlab-ci.yml +103 -0
- data/ADOPTERS.md +5 -0
- data/AUTHORS +2 -0
- data/CHANGELOG.md +2409 -0
- data/CONTRIBUTING.md +45 -0
- data/GOVERNANCE.md +55 -0
- data/Gemfile +9 -0
- data/GithubWorkflow.md +78 -0
- data/LICENSE +202 -0
- data/MAINTAINERS.md +11 -0
- data/README.md +97 -0
- data/Rakefile +79 -0
- data/SECURITY.md +18 -0
- data/bin/fluent-binlog-reader +7 -0
- data/bin/fluent-ca-generate +6 -0
- data/bin/fluent-cap-ctl +7 -0
- data/bin/fluent-cat +5 -0
- data/bin/fluent-ctl +7 -0
- data/bin/fluent-debug +5 -0
- data/bin/fluent-gem +9 -0
- data/bin/fluent-plugin-config-format +5 -0
- data/bin/fluent-plugin-generate +5 -0
- data/bin/fluentd +15 -0
- data/code-of-conduct.md +3 -0
- data/docs/SECURITY_AUDIT.pdf +0 -0
- data/example/copy_roundrobin.conf +39 -0
- data/example/counter.conf +18 -0
- data/example/filter_stdout.conf +22 -0
- data/example/in_forward.conf +14 -0
- data/example/in_forward_client.conf +37 -0
- data/example/in_forward_shared_key.conf +15 -0
- data/example/in_forward_tls.conf +14 -0
- data/example/in_forward_users.conf +24 -0
- data/example/in_forward_workers.conf +21 -0
- data/example/in_http.conf +16 -0
- data/example/in_out_forward.conf +17 -0
- data/example/in_sample_blocks.conf +17 -0
- data/example/in_sample_with_compression.conf +23 -0
- data/example/in_syslog.conf +15 -0
- data/example/in_tail.conf +14 -0
- data/example/in_tcp.conf +13 -0
- data/example/in_udp.conf +13 -0
- data/example/logevents.conf +25 -0
- data/example/multi_filters.conf +61 -0
- data/example/out_copy.conf +20 -0
- data/example/out_exec_filter.conf +42 -0
- data/example/out_file.conf +13 -0
- data/example/out_forward.conf +35 -0
- data/example/out_forward_buf_file.conf +23 -0
- data/example/out_forward_client.conf +109 -0
- data/example/out_forward_heartbeat_none.conf +16 -0
- data/example/out_forward_sd.conf +17 -0
- data/example/out_forward_shared_key.conf +36 -0
- data/example/out_forward_tls.conf +18 -0
- data/example/out_forward_users.conf +65 -0
- data/example/out_null.conf +36 -0
- data/example/sd.yaml +8 -0
- data/example/secondary_file.conf +42 -0
- data/example/suppress_config_dump.conf +7 -0
- data/example/v0_12_filter.conf +78 -0
- data/example/v1_literal_example.conf +36 -0
- data/example/worker_section.conf +36 -0
- data/fluent.conf +139 -0
- data/fluentd.gemspec +55 -0
- data/lib/fluent/agent.rb +168 -0
- data/lib/fluent/capability.rb +87 -0
- data/lib/fluent/clock.rb +66 -0
- data/lib/fluent/command/binlog_reader.rb +244 -0
- data/lib/fluent/command/bundler_injection.rb +45 -0
- data/lib/fluent/command/ca_generate.rb +184 -0
- data/lib/fluent/command/cap_ctl.rb +174 -0
- data/lib/fluent/command/cat.rb +365 -0
- data/lib/fluent/command/ctl.rb +177 -0
- data/lib/fluent/command/debug.rb +103 -0
- data/lib/fluent/command/fluentd.rb +374 -0
- data/lib/fluent/command/plugin_config_formatter.rb +308 -0
- data/lib/fluent/command/plugin_generator.rb +365 -0
- data/lib/fluent/compat/call_super_mixin.rb +76 -0
- data/lib/fluent/compat/detach_process_mixin.rb +33 -0
- data/lib/fluent/compat/exec_util.rb +129 -0
- data/lib/fluent/compat/file_util.rb +54 -0
- data/lib/fluent/compat/filter.rb +68 -0
- data/lib/fluent/compat/formatter.rb +111 -0
- data/lib/fluent/compat/formatter_utils.rb +85 -0
- data/lib/fluent/compat/handle_tag_and_time_mixin.rb +62 -0
- data/lib/fluent/compat/handle_tag_name_mixin.rb +53 -0
- data/lib/fluent/compat/input.rb +49 -0
- data/lib/fluent/compat/output.rb +721 -0
- data/lib/fluent/compat/output_chain.rb +60 -0
- data/lib/fluent/compat/parser.rb +310 -0
- data/lib/fluent/compat/parser_utils.rb +40 -0
- data/lib/fluent/compat/propagate_default.rb +62 -0
- data/lib/fluent/compat/record_filter_mixin.rb +34 -0
- data/lib/fluent/compat/set_tag_key_mixin.rb +50 -0
- data/lib/fluent/compat/set_time_key_mixin.rb +69 -0
- data/lib/fluent/compat/socket_util.rb +165 -0
- data/lib/fluent/compat/string_util.rb +34 -0
- data/lib/fluent/compat/structured_format_mixin.rb +26 -0
- data/lib/fluent/compat/type_converter.rb +90 -0
- data/lib/fluent/config/basic_parser.rb +123 -0
- data/lib/fluent/config/configure_proxy.rb +424 -0
- data/lib/fluent/config/dsl.rb +152 -0
- data/lib/fluent/config/element.rb +265 -0
- data/lib/fluent/config/error.rb +32 -0
- data/lib/fluent/config/literal_parser.rb +286 -0
- data/lib/fluent/config/parser.rb +107 -0
- data/lib/fluent/config/section.rb +272 -0
- data/lib/fluent/config/types.rb +249 -0
- data/lib/fluent/config/v1_parser.rb +192 -0
- data/lib/fluent/config.rb +76 -0
- data/lib/fluent/configurable.rb +201 -0
- data/lib/fluent/counter/base_socket.rb +44 -0
- data/lib/fluent/counter/client.rb +297 -0
- data/lib/fluent/counter/error.rb +86 -0
- data/lib/fluent/counter/mutex_hash.rb +163 -0
- data/lib/fluent/counter/server.rb +273 -0
- data/lib/fluent/counter/store.rb +205 -0
- data/lib/fluent/counter/validator.rb +145 -0
- data/lib/fluent/counter.rb +23 -0
- data/lib/fluent/daemon.rb +15 -0
- data/lib/fluent/daemonizer.rb +88 -0
- data/lib/fluent/engine.rb +253 -0
- data/lib/fluent/env.rb +40 -0
- data/lib/fluent/error.rb +34 -0
- data/lib/fluent/event.rb +326 -0
- data/lib/fluent/event_router.rb +297 -0
- data/lib/fluent/ext_monitor_require.rb +28 -0
- data/lib/fluent/filter.rb +21 -0
- data/lib/fluent/fluent_log_event_router.rb +141 -0
- data/lib/fluent/formatter.rb +23 -0
- data/lib/fluent/input.rb +21 -0
- data/lib/fluent/label.rb +46 -0
- data/lib/fluent/load.rb +34 -0
- data/lib/fluent/log.rb +713 -0
- data/lib/fluent/match.rb +187 -0
- data/lib/fluent/mixin.rb +31 -0
- data/lib/fluent/msgpack_factory.rb +106 -0
- data/lib/fluent/oj_options.rb +62 -0
- data/lib/fluent/output.rb +29 -0
- data/lib/fluent/output_chain.rb +23 -0
- data/lib/fluent/parser.rb +23 -0
- data/lib/fluent/plugin/bare_output.rb +104 -0
- data/lib/fluent/plugin/base.rb +197 -0
- data/lib/fluent/plugin/buf_file.rb +213 -0
- data/lib/fluent/plugin/buf_file_single.rb +225 -0
- data/lib/fluent/plugin/buf_memory.rb +34 -0
- data/lib/fluent/plugin/buffer/chunk.rb +240 -0
- data/lib/fluent/plugin/buffer/file_chunk.rb +413 -0
- data/lib/fluent/plugin/buffer/file_single_chunk.rb +311 -0
- data/lib/fluent/plugin/buffer/memory_chunk.rb +91 -0
- data/lib/fluent/plugin/buffer.rb +918 -0
- data/lib/fluent/plugin/compressable.rb +96 -0
- data/lib/fluent/plugin/exec_util.rb +22 -0
- data/lib/fluent/plugin/file_util.rb +22 -0
- data/lib/fluent/plugin/file_wrapper.rb +187 -0
- data/lib/fluent/plugin/filter.rb +127 -0
- data/lib/fluent/plugin/filter_grep.rb +189 -0
- data/lib/fluent/plugin/filter_parser.rb +130 -0
- data/lib/fluent/plugin/filter_record_transformer.rb +324 -0
- data/lib/fluent/plugin/filter_stdout.rb +53 -0
- data/lib/fluent/plugin/formatter.rb +75 -0
- data/lib/fluent/plugin/formatter_csv.rb +78 -0
- data/lib/fluent/plugin/formatter_hash.rb +35 -0
- data/lib/fluent/plugin/formatter_json.rb +59 -0
- data/lib/fluent/plugin/formatter_ltsv.rb +44 -0
- data/lib/fluent/plugin/formatter_msgpack.rb +33 -0
- data/lib/fluent/plugin/formatter_out_file.rb +53 -0
- data/lib/fluent/plugin/formatter_single_value.rb +36 -0
- data/lib/fluent/plugin/formatter_stdout.rb +76 -0
- data/lib/fluent/plugin/formatter_tsv.rb +40 -0
- data/lib/fluent/plugin/in_debug_agent.rb +71 -0
- data/lib/fluent/plugin/in_dummy.rb +18 -0
- data/lib/fluent/plugin/in_exec.rb +110 -0
- data/lib/fluent/plugin/in_forward.rb +473 -0
- data/lib/fluent/plugin/in_gc_stat.rb +72 -0
- data/lib/fluent/plugin/in_http.rb +667 -0
- data/lib/fluent/plugin/in_monitor_agent.rb +412 -0
- data/lib/fluent/plugin/in_object_space.rb +93 -0
- data/lib/fluent/plugin/in_sample.rb +141 -0
- data/lib/fluent/plugin/in_syslog.rb +276 -0
- data/lib/fluent/plugin/in_tail/position_file.rb +269 -0
- data/lib/fluent/plugin/in_tail.rb +1228 -0
- data/lib/fluent/plugin/in_tcp.rb +181 -0
- data/lib/fluent/plugin/in_udp.rb +92 -0
- data/lib/fluent/plugin/in_unix.rb +195 -0
- data/lib/fluent/plugin/input.rb +75 -0
- data/lib/fluent/plugin/metrics.rb +119 -0
- data/lib/fluent/plugin/metrics_local.rb +96 -0
- data/lib/fluent/plugin/multi_output.rb +195 -0
- data/lib/fluent/plugin/out_copy.rb +120 -0
- data/lib/fluent/plugin/out_exec.rb +105 -0
- data/lib/fluent/plugin/out_exec_filter.rb +319 -0
- data/lib/fluent/plugin/out_file.rb +334 -0
- data/lib/fluent/plugin/out_forward/ack_handler.rb +161 -0
- data/lib/fluent/plugin/out_forward/connection_manager.rb +113 -0
- data/lib/fluent/plugin/out_forward/error.rb +28 -0
- data/lib/fluent/plugin/out_forward/failure_detector.rb +84 -0
- data/lib/fluent/plugin/out_forward/handshake_protocol.rb +125 -0
- data/lib/fluent/plugin/out_forward/load_balancer.rb +114 -0
- data/lib/fluent/plugin/out_forward/socket_cache.rb +140 -0
- data/lib/fluent/plugin/out_forward.rb +826 -0
- data/lib/fluent/plugin/out_http.rb +275 -0
- data/lib/fluent/plugin/out_null.rb +74 -0
- data/lib/fluent/plugin/out_relabel.rb +32 -0
- data/lib/fluent/plugin/out_roundrobin.rb +84 -0
- data/lib/fluent/plugin/out_secondary_file.rb +131 -0
- data/lib/fluent/plugin/out_stdout.rb +74 -0
- data/lib/fluent/plugin/out_stream.rb +130 -0
- data/lib/fluent/plugin/output.rb +1556 -0
- data/lib/fluent/plugin/owned_by_mixin.rb +42 -0
- data/lib/fluent/plugin/parser.rb +275 -0
- data/lib/fluent/plugin/parser_apache.rb +28 -0
- data/lib/fluent/plugin/parser_apache2.rb +88 -0
- data/lib/fluent/plugin/parser_apache_error.rb +26 -0
- data/lib/fluent/plugin/parser_csv.rb +114 -0
- data/lib/fluent/plugin/parser_json.rb +96 -0
- data/lib/fluent/plugin/parser_ltsv.rb +51 -0
- data/lib/fluent/plugin/parser_msgpack.rb +50 -0
- data/lib/fluent/plugin/parser_multiline.rb +152 -0
- data/lib/fluent/plugin/parser_nginx.rb +28 -0
- data/lib/fluent/plugin/parser_none.rb +36 -0
- data/lib/fluent/plugin/parser_regexp.rb +68 -0
- data/lib/fluent/plugin/parser_syslog.rb +496 -0
- data/lib/fluent/plugin/parser_tsv.rb +42 -0
- data/lib/fluent/plugin/sd_file.rb +156 -0
- data/lib/fluent/plugin/sd_srv.rb +135 -0
- data/lib/fluent/plugin/sd_static.rb +58 -0
- data/lib/fluent/plugin/service_discovery.rb +65 -0
- data/lib/fluent/plugin/socket_util.rb +22 -0
- data/lib/fluent/plugin/storage.rb +84 -0
- data/lib/fluent/plugin/storage_local.rb +162 -0
- data/lib/fluent/plugin/string_util.rb +22 -0
- data/lib/fluent/plugin.rb +206 -0
- data/lib/fluent/plugin_helper/cert_option.rb +191 -0
- data/lib/fluent/plugin_helper/child_process.rb +366 -0
- data/lib/fluent/plugin_helper/compat_parameters.rb +343 -0
- data/lib/fluent/plugin_helper/counter.rb +51 -0
- data/lib/fluent/plugin_helper/event_emitter.rb +100 -0
- data/lib/fluent/plugin_helper/event_loop.rb +170 -0
- data/lib/fluent/plugin_helper/extract.rb +104 -0
- data/lib/fluent/plugin_helper/formatter.rb +147 -0
- data/lib/fluent/plugin_helper/http_server/app.rb +79 -0
- data/lib/fluent/plugin_helper/http_server/compat/server.rb +92 -0
- data/lib/fluent/plugin_helper/http_server/compat/ssl_context_extractor.rb +52 -0
- data/lib/fluent/plugin_helper/http_server/compat/webrick_handler.rb +58 -0
- data/lib/fluent/plugin_helper/http_server/methods.rb +35 -0
- data/lib/fluent/plugin_helper/http_server/request.rb +42 -0
- data/lib/fluent/plugin_helper/http_server/router.rb +54 -0
- data/lib/fluent/plugin_helper/http_server/server.rb +93 -0
- data/lib/fluent/plugin_helper/http_server/ssl_context_builder.rb +41 -0
- data/lib/fluent/plugin_helper/http_server.rb +135 -0
- data/lib/fluent/plugin_helper/inject.rb +154 -0
- data/lib/fluent/plugin_helper/metrics.rb +129 -0
- data/lib/fluent/plugin_helper/parser.rb +147 -0
- data/lib/fluent/plugin_helper/record_accessor.rb +207 -0
- data/lib/fluent/plugin_helper/retry_state.rb +209 -0
- data/lib/fluent/plugin_helper/server.rb +801 -0
- data/lib/fluent/plugin_helper/service_discovery/manager.rb +146 -0
- data/lib/fluent/plugin_helper/service_discovery/round_robin_balancer.rb +43 -0
- data/lib/fluent/plugin_helper/service_discovery.rb +125 -0
- data/lib/fluent/plugin_helper/socket.rb +277 -0
- data/lib/fluent/plugin_helper/socket_option.rb +98 -0
- data/lib/fluent/plugin_helper/storage.rb +349 -0
- data/lib/fluent/plugin_helper/thread.rb +180 -0
- data/lib/fluent/plugin_helper/timer.rb +92 -0
- data/lib/fluent/plugin_helper.rb +75 -0
- data/lib/fluent/plugin_id.rb +93 -0
- data/lib/fluent/process.rb +22 -0
- data/lib/fluent/registry.rb +116 -0
- data/lib/fluent/root_agent.rb +372 -0
- data/lib/fluent/rpc.rb +94 -0
- data/lib/fluent/static_config_analysis.rb +194 -0
- data/lib/fluent/supervisor.rb +1054 -0
- data/lib/fluent/system_config.rb +187 -0
- data/lib/fluent/test/base.rb +78 -0
- data/lib/fluent/test/driver/base.rb +225 -0
- data/lib/fluent/test/driver/base_owned.rb +83 -0
- data/lib/fluent/test/driver/base_owner.rb +135 -0
- data/lib/fluent/test/driver/event_feeder.rb +98 -0
- data/lib/fluent/test/driver/filter.rb +57 -0
- data/lib/fluent/test/driver/formatter.rb +30 -0
- data/lib/fluent/test/driver/input.rb +31 -0
- data/lib/fluent/test/driver/multi_output.rb +53 -0
- data/lib/fluent/test/driver/output.rb +102 -0
- data/lib/fluent/test/driver/parser.rb +30 -0
- data/lib/fluent/test/driver/storage.rb +30 -0
- data/lib/fluent/test/driver/test_event_router.rb +45 -0
- data/lib/fluent/test/filter_test.rb +77 -0
- data/lib/fluent/test/formatter_test.rb +65 -0
- data/lib/fluent/test/helpers.rb +134 -0
- data/lib/fluent/test/input_test.rb +174 -0
- data/lib/fluent/test/log.rb +79 -0
- data/lib/fluent/test/output_test.rb +156 -0
- data/lib/fluent/test/parser_test.rb +70 -0
- data/lib/fluent/test/startup_shutdown.rb +46 -0
- data/lib/fluent/test.rb +58 -0
- data/lib/fluent/time.rb +512 -0
- data/lib/fluent/timezone.rb +171 -0
- data/lib/fluent/tls.rb +81 -0
- data/lib/fluent/unique_id.rb +39 -0
- data/lib/fluent/variable_store.rb +40 -0
- data/lib/fluent/version.rb +21 -0
- data/lib/fluent/winsvc.rb +103 -0
- data/templates/new_gem/Gemfile +3 -0
- data/templates/new_gem/README.md.erb +43 -0
- data/templates/new_gem/Rakefile +13 -0
- data/templates/new_gem/fluent-plugin.gemspec.erb +27 -0
- data/templates/new_gem/lib/fluent/plugin/filter.rb.erb +14 -0
- data/templates/new_gem/lib/fluent/plugin/formatter.rb.erb +14 -0
- data/templates/new_gem/lib/fluent/plugin/input.rb.erb +11 -0
- data/templates/new_gem/lib/fluent/plugin/output.rb.erb +11 -0
- data/templates/new_gem/lib/fluent/plugin/parser.rb.erb +15 -0
- data/templates/new_gem/lib/fluent/plugin/storage.rb.erb +40 -0
- data/templates/new_gem/test/helper.rb.erb +8 -0
- data/templates/new_gem/test/plugin/test_filter.rb.erb +18 -0
- data/templates/new_gem/test/plugin/test_formatter.rb.erb +18 -0
- data/templates/new_gem/test/plugin/test_input.rb.erb +18 -0
- data/templates/new_gem/test/plugin/test_output.rb.erb +18 -0
- data/templates/new_gem/test/plugin/test_parser.rb.erb +18 -0
- data/templates/new_gem/test/plugin/test_storage.rb.erb +18 -0
- data/templates/plugin_config_formatter/param.md-compact.erb +25 -0
- data/templates/plugin_config_formatter/param.md-table.erb +10 -0
- data/templates/plugin_config_formatter/param.md.erb +34 -0
- data/templates/plugin_config_formatter/section.md.erb +12 -0
- data/test/command/test_binlog_reader.rb +362 -0
- data/test/command/test_ca_generate.rb +70 -0
- data/test/command/test_cap_ctl.rb +100 -0
- data/test/command/test_cat.rb +128 -0
- data/test/command/test_ctl.rb +57 -0
- data/test/command/test_fluentd.rb +1106 -0
- data/test/command/test_plugin_config_formatter.rb +398 -0
- data/test/command/test_plugin_generator.rb +109 -0
- data/test/compat/test_calls_super.rb +166 -0
- data/test/compat/test_parser.rb +92 -0
- data/test/config/assertions.rb +42 -0
- data/test/config/test_config_parser.rb +551 -0
- data/test/config/test_configurable.rb +1784 -0
- data/test/config/test_configure_proxy.rb +604 -0
- data/test/config/test_dsl.rb +415 -0
- data/test/config/test_element.rb +518 -0
- data/test/config/test_literal_parser.rb +309 -0
- data/test/config/test_plugin_configuration.rb +56 -0
- data/test/config/test_section.rb +191 -0
- data/test/config/test_system_config.rb +199 -0
- data/test/config/test_types.rb +408 -0
- data/test/counter/test_client.rb +563 -0
- data/test/counter/test_error.rb +44 -0
- data/test/counter/test_mutex_hash.rb +179 -0
- data/test/counter/test_server.rb +589 -0
- data/test/counter/test_store.rb +258 -0
- data/test/counter/test_validator.rb +137 -0
- data/test/helper.rb +155 -0
- data/test/helpers/fuzzy_assert.rb +89 -0
- data/test/helpers/process_extenstion.rb +33 -0
- data/test/plugin/data/2010/01/20100102-030405.log +0 -0
- data/test/plugin/data/2010/01/20100102-030406.log +0 -0
- data/test/plugin/data/2010/01/20100102.log +0 -0
- data/test/plugin/data/log/bar +0 -0
- data/test/plugin/data/log/foo/bar.log +0 -0
- data/test/plugin/data/log/foo/bar2 +0 -0
- data/test/plugin/data/log/test.log +0 -0
- data/test/plugin/data/sd_file/config +11 -0
- data/test/plugin/data/sd_file/config.json +17 -0
- data/test/plugin/data/sd_file/config.yaml +11 -0
- data/test/plugin/data/sd_file/config.yml +11 -0
- data/test/plugin/data/sd_file/invalid_config.yml +7 -0
- data/test/plugin/in_tail/test_fifo.rb +121 -0
- data/test/plugin/in_tail/test_io_handler.rb +140 -0
- data/test/plugin/in_tail/test_position_file.rb +379 -0
- data/test/plugin/out_forward/test_ack_handler.rb +101 -0
- data/test/plugin/out_forward/test_connection_manager.rb +145 -0
- data/test/plugin/out_forward/test_handshake_protocol.rb +112 -0
- data/test/plugin/out_forward/test_load_balancer.rb +106 -0
- data/test/plugin/out_forward/test_socket_cache.rb +149 -0
- data/test/plugin/test_bare_output.rb +131 -0
- data/test/plugin/test_base.rb +115 -0
- data/test/plugin/test_buf_file.rb +1275 -0
- data/test/plugin/test_buf_file_single.rb +833 -0
- data/test/plugin/test_buf_memory.rb +42 -0
- data/test/plugin/test_buffer.rb +1383 -0
- data/test/plugin/test_buffer_chunk.rb +198 -0
- data/test/plugin/test_buffer_file_chunk.rb +871 -0
- data/test/plugin/test_buffer_file_single_chunk.rb +611 -0
- data/test/plugin/test_buffer_memory_chunk.rb +339 -0
- data/test/plugin/test_compressable.rb +87 -0
- data/test/plugin/test_file_util.rb +96 -0
- data/test/plugin/test_file_wrapper.rb +126 -0
- data/test/plugin/test_filter.rb +368 -0
- data/test/plugin/test_filter_grep.rb +697 -0
- data/test/plugin/test_filter_parser.rb +731 -0
- data/test/plugin/test_filter_record_transformer.rb +577 -0
- data/test/plugin/test_filter_stdout.rb +207 -0
- data/test/plugin/test_formatter_csv.rb +136 -0
- data/test/plugin/test_formatter_hash.rb +38 -0
- data/test/plugin/test_formatter_json.rb +61 -0
- data/test/plugin/test_formatter_ltsv.rb +70 -0
- data/test/plugin/test_formatter_msgpack.rb +28 -0
- data/test/plugin/test_formatter_out_file.rb +116 -0
- data/test/plugin/test_formatter_single_value.rb +44 -0
- data/test/plugin/test_formatter_tsv.rb +76 -0
- data/test/plugin/test_in_debug_agent.rb +49 -0
- data/test/plugin/test_in_exec.rb +261 -0
- data/test/plugin/test_in_forward.rb +1180 -0
- data/test/plugin/test_in_gc_stat.rb +62 -0
- data/test/plugin/test_in_http.rb +1080 -0
- data/test/plugin/test_in_monitor_agent.rb +923 -0
- data/test/plugin/test_in_object_space.rb +60 -0
- data/test/plugin/test_in_sample.rb +190 -0
- data/test/plugin/test_in_syslog.rb +505 -0
- data/test/plugin/test_in_tail.rb +2363 -0
- data/test/plugin/test_in_tcp.rb +243 -0
- data/test/plugin/test_in_udp.rb +268 -0
- data/test/plugin/test_in_unix.rb +181 -0
- data/test/plugin/test_input.rb +137 -0
- data/test/plugin/test_metadata.rb +89 -0
- data/test/plugin/test_metrics.rb +294 -0
- data/test/plugin/test_metrics_local.rb +96 -0
- data/test/plugin/test_multi_output.rb +204 -0
- data/test/plugin/test_out_copy.rb +308 -0
- data/test/plugin/test_out_exec.rb +312 -0
- data/test/plugin/test_out_exec_filter.rb +606 -0
- data/test/plugin/test_out_file.rb +1037 -0
- data/test/plugin/test_out_forward.rb +1348 -0
- data/test/plugin/test_out_http.rb +428 -0
- data/test/plugin/test_out_null.rb +105 -0
- data/test/plugin/test_out_relabel.rb +28 -0
- data/test/plugin/test_out_roundrobin.rb +146 -0
- data/test/plugin/test_out_secondary_file.rb +458 -0
- data/test/plugin/test_out_stdout.rb +205 -0
- data/test/plugin/test_out_stream.rb +103 -0
- data/test/plugin/test_output.rb +1065 -0
- data/test/plugin/test_output_as_buffered.rb +2024 -0
- data/test/plugin/test_output_as_buffered_backup.rb +363 -0
- data/test/plugin/test_output_as_buffered_compress.rb +165 -0
- data/test/plugin/test_output_as_buffered_overflow.rb +250 -0
- data/test/plugin/test_output_as_buffered_retries.rb +919 -0
- data/test/plugin/test_output_as_buffered_secondary.rb +882 -0
- data/test/plugin/test_output_as_standard.rb +374 -0
- data/test/plugin/test_owned_by.rb +35 -0
- data/test/plugin/test_parser.rb +399 -0
- data/test/plugin/test_parser_apache.rb +42 -0
- data/test/plugin/test_parser_apache2.rb +47 -0
- data/test/plugin/test_parser_apache_error.rb +45 -0
- data/test/plugin/test_parser_csv.rb +200 -0
- data/test/plugin/test_parser_json.rb +138 -0
- data/test/plugin/test_parser_labeled_tsv.rb +160 -0
- data/test/plugin/test_parser_multiline.rb +111 -0
- data/test/plugin/test_parser_nginx.rb +88 -0
- data/test/plugin/test_parser_none.rb +52 -0
- data/test/plugin/test_parser_regexp.rb +289 -0
- data/test/plugin/test_parser_syslog.rb +650 -0
- data/test/plugin/test_parser_tsv.rb +122 -0
- data/test/plugin/test_sd_file.rb +228 -0
- data/test/plugin/test_sd_srv.rb +230 -0
- data/test/plugin/test_storage.rb +167 -0
- data/test/plugin/test_storage_local.rb +335 -0
- data/test/plugin/test_string_util.rb +26 -0
- data/test/plugin_helper/data/cert/cert-key.pem +27 -0
- data/test/plugin_helper/data/cert/cert-with-CRLF.pem +19 -0
- data/test/plugin_helper/data/cert/cert-with-no-newline.pem +19 -0
- data/test/plugin_helper/data/cert/cert.pem +19 -0
- data/test/plugin_helper/data/cert/cert_chains/ca-cert-key.pem +27 -0
- data/test/plugin_helper/data/cert/cert_chains/ca-cert.pem +20 -0
- data/test/plugin_helper/data/cert/cert_chains/cert-key.pem +27 -0
- data/test/plugin_helper/data/cert/cert_chains/cert.pem +40 -0
- data/test/plugin_helper/data/cert/empty.pem +0 -0
- data/test/plugin_helper/data/cert/generate_cert.rb +125 -0
- data/test/plugin_helper/data/cert/with_ca/ca-cert-key-pass.pem +30 -0
- data/test/plugin_helper/data/cert/with_ca/ca-cert-key.pem +27 -0
- data/test/plugin_helper/data/cert/with_ca/ca-cert-pass.pem +20 -0
- data/test/plugin_helper/data/cert/with_ca/ca-cert.pem +20 -0
- data/test/plugin_helper/data/cert/with_ca/cert-key-pass.pem +30 -0
- data/test/plugin_helper/data/cert/with_ca/cert-key.pem +27 -0
- data/test/plugin_helper/data/cert/with_ca/cert-pass.pem +21 -0
- data/test/plugin_helper/data/cert/with_ca/cert.pem +21 -0
- data/test/plugin_helper/data/cert/without_ca/cert-key-pass.pem +30 -0
- data/test/plugin_helper/data/cert/without_ca/cert-key.pem +27 -0
- data/test/plugin_helper/data/cert/without_ca/cert-pass.pem +20 -0
- data/test/plugin_helper/data/cert/without_ca/cert.pem +20 -0
- data/test/plugin_helper/http_server/test_app.rb +65 -0
- data/test/plugin_helper/http_server/test_route.rb +32 -0
- data/test/plugin_helper/service_discovery/test_manager.rb +93 -0
- data/test/plugin_helper/service_discovery/test_round_robin_balancer.rb +21 -0
- data/test/plugin_helper/test_cert_option.rb +25 -0
- data/test/plugin_helper/test_child_process.rb +840 -0
- data/test/plugin_helper/test_compat_parameters.rb +358 -0
- data/test/plugin_helper/test_event_emitter.rb +80 -0
- data/test/plugin_helper/test_event_loop.rb +52 -0
- data/test/plugin_helper/test_extract.rb +194 -0
- data/test/plugin_helper/test_formatter.rb +255 -0
- data/test/plugin_helper/test_http_server_helper.rb +372 -0
- data/test/plugin_helper/test_inject.rb +561 -0
- data/test/plugin_helper/test_metrics.rb +137 -0
- data/test/plugin_helper/test_parser.rb +264 -0
- data/test/plugin_helper/test_record_accessor.rb +238 -0
- data/test/plugin_helper/test_retry_state.rb +442 -0
- data/test/plugin_helper/test_server.rb +1823 -0
- data/test/plugin_helper/test_service_discovery.rb +165 -0
- data/test/plugin_helper/test_socket.rb +146 -0
- data/test/plugin_helper/test_storage.rb +542 -0
- data/test/plugin_helper/test_thread.rb +164 -0
- data/test/plugin_helper/test_timer.rb +130 -0
- data/test/scripts/exec_script.rb +32 -0
- data/test/scripts/fluent/plugin/formatter1/formatter_test1.rb +7 -0
- data/test/scripts/fluent/plugin/formatter2/formatter_test2.rb +7 -0
- data/test/scripts/fluent/plugin/formatter_known.rb +8 -0
- data/test/scripts/fluent/plugin/out_test.rb +81 -0
- data/test/scripts/fluent/plugin/out_test2.rb +80 -0
- data/test/scripts/fluent/plugin/parser_known.rb +4 -0
- data/test/test_capability.rb +74 -0
- data/test/test_clock.rb +164 -0
- data/test/test_config.rb +202 -0
- data/test/test_configdsl.rb +148 -0
- data/test/test_daemonizer.rb +91 -0
- data/test/test_engine.rb +203 -0
- data/test/test_event.rb +531 -0
- data/test/test_event_router.rb +331 -0
- data/test/test_event_time.rb +199 -0
- data/test/test_filter.rb +121 -0
- data/test/test_fluent_log_event_router.rb +99 -0
- data/test/test_formatter.rb +366 -0
- data/test/test_input.rb +31 -0
- data/test/test_log.rb +994 -0
- data/test/test_logger_initializer.rb +46 -0
- data/test/test_match.rb +148 -0
- data/test/test_mixin.rb +351 -0
- data/test/test_msgpack_factory.rb +18 -0
- data/test/test_oj_options.rb +55 -0
- data/test/test_output.rb +278 -0
- data/test/test_plugin.rb +251 -0
- data/test/test_plugin_classes.rb +370 -0
- data/test/test_plugin_helper.rb +81 -0
- data/test/test_plugin_id.rb +119 -0
- data/test/test_process.rb +14 -0
- data/test/test_root_agent.rb +951 -0
- data/test/test_static_config_analysis.rb +177 -0
- data/test/test_supervisor.rb +601 -0
- data/test/test_test_drivers.rb +136 -0
- data/test/test_time_formatter.rb +301 -0
- data/test/test_time_parser.rb +362 -0
- data/test/test_tls.rb +65 -0
- data/test/test_unique_id.rb +47 -0
- data/test/test_variable_store.rb +65 -0
- metadata +1261 -0
@@ -0,0 +1,918 @@
|
|
1
|
+
#
|
2
|
+
# Fluentd
|
3
|
+
#
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
5
|
+
# you may not use this file except in compliance with the License.
|
6
|
+
# You may obtain a copy of the License at
|
7
|
+
#
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
9
|
+
#
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13
|
+
# See the License for the specific language governing permissions and
|
14
|
+
# limitations under the License.
|
15
|
+
#
|
16
|
+
|
17
|
+
require 'fluent/plugin/base'
|
18
|
+
require 'fluent/plugin/owned_by_mixin'
|
19
|
+
require 'fluent/plugin_id'
|
20
|
+
require 'fluent/plugin_helper'
|
21
|
+
require 'fluent/unique_id'
|
22
|
+
require 'fluent/ext_monitor_require'
|
23
|
+
|
24
|
+
module Fluent
|
25
|
+
module Plugin
|
26
|
+
class Buffer < Base
|
27
|
+
include OwnedByMixin
|
28
|
+
include UniqueId::Mixin
|
29
|
+
include PluginId
|
30
|
+
include MonitorMixin
|
31
|
+
include PluginHelper::Mixin # for metrics
|
32
|
+
|
33
|
+
class BufferError < StandardError; end
|
34
|
+
class BufferOverflowError < BufferError; end
|
35
|
+
class BufferChunkOverflowError < BufferError; end # A record size is larger than chunk size limit
|
36
|
+
|
37
|
+
MINIMUM_APPEND_ATTEMPT_RECORDS = 10
|
38
|
+
|
39
|
+
DEFAULT_CHUNK_LIMIT_SIZE = 8 * 1024 * 1024 # 8MB
|
40
|
+
DEFAULT_TOTAL_LIMIT_SIZE = 512 * 1024 * 1024 # 512MB, same with v0.12 (BufferedOutput + buf_memory: 64 x 8MB)
|
41
|
+
|
42
|
+
DEFAULT_CHUNK_FULL_THRESHOLD = 0.95
|
43
|
+
|
44
|
+
configured_in :buffer
|
45
|
+
|
46
|
+
helpers_internal :metrics
|
47
|
+
|
48
|
+
# TODO: system total buffer limit size in bytes by SystemConfig
|
49
|
+
|
50
|
+
config_param :chunk_limit_size, :size, default: DEFAULT_CHUNK_LIMIT_SIZE
|
51
|
+
config_param :total_limit_size, :size, default: DEFAULT_TOTAL_LIMIT_SIZE
|
52
|
+
|
53
|
+
# If user specify this value and (chunk_size * queue_length) is smaller than total_size,
|
54
|
+
# then total_size is automatically configured to that value
|
55
|
+
config_param :queue_limit_length, :integer, default: nil
|
56
|
+
|
57
|
+
# optional new limitations
|
58
|
+
config_param :chunk_limit_records, :integer, default: nil
|
59
|
+
|
60
|
+
# if chunk size (or records) is 95% or more after #write, then that chunk will be enqueued
|
61
|
+
config_param :chunk_full_threshold, :float, default: DEFAULT_CHUNK_FULL_THRESHOLD
|
62
|
+
|
63
|
+
desc 'The max number of queued chunks.'
|
64
|
+
config_param :queued_chunks_limit_size, :integer, default: nil
|
65
|
+
|
66
|
+
desc 'Compress buffered data.'
|
67
|
+
config_param :compress, :enum, list: [:text, :gzip], default: :text
|
68
|
+
|
69
|
+
Metadata = Struct.new(:timekey, :tag, :variables, :seq) do
|
70
|
+
def initialize(timekey, tag, variables)
|
71
|
+
super(timekey, tag, variables, 0)
|
72
|
+
end
|
73
|
+
|
74
|
+
def dup_next
|
75
|
+
m = dup
|
76
|
+
m.seq = seq + 1
|
77
|
+
m
|
78
|
+
end
|
79
|
+
|
80
|
+
def empty?
|
81
|
+
timekey.nil? && tag.nil? && variables.nil?
|
82
|
+
end
|
83
|
+
|
84
|
+
def cmp_variables(v1, v2)
|
85
|
+
if v1.nil? && v2.nil?
|
86
|
+
return 0
|
87
|
+
elsif v1.nil? # v2 is non-nil
|
88
|
+
return -1
|
89
|
+
elsif v2.nil? # v1 is non-nil
|
90
|
+
return 1
|
91
|
+
end
|
92
|
+
# both of v1 and v2 are non-nil
|
93
|
+
v1_sorted_keys = v1.keys.sort
|
94
|
+
v2_sorted_keys = v2.keys.sort
|
95
|
+
if v1_sorted_keys != v2_sorted_keys
|
96
|
+
if v1_sorted_keys.size == v2_sorted_keys.size
|
97
|
+
v1_sorted_keys <=> v2_sorted_keys
|
98
|
+
else
|
99
|
+
v1_sorted_keys.size <=> v2_sorted_keys.size
|
100
|
+
end
|
101
|
+
else
|
102
|
+
v1_sorted_keys.each do |k|
|
103
|
+
a = v1[k]
|
104
|
+
b = v2[k]
|
105
|
+
if a && b && a != b
|
106
|
+
return a <=> b
|
107
|
+
elsif a && b || (!a && !b) # same value (including both are nil)
|
108
|
+
next
|
109
|
+
elsif a # b is nil
|
110
|
+
return 1
|
111
|
+
else # a is nil (but b is non-nil)
|
112
|
+
return -1
|
113
|
+
end
|
114
|
+
end
|
115
|
+
|
116
|
+
0
|
117
|
+
end
|
118
|
+
end
|
119
|
+
|
120
|
+
def <=>(o)
|
121
|
+
timekey2 = o.timekey
|
122
|
+
tag2 = o.tag
|
123
|
+
variables2 = o.variables
|
124
|
+
if (!!timekey ^ !!timekey2) || (!!tag ^ !!tag2) || (!!variables ^ !!variables2)
|
125
|
+
# One has value in a field, but another doesn't have value in same field
|
126
|
+
# This case occurs very rarely
|
127
|
+
if timekey == timekey2 # including the case of nil == nil
|
128
|
+
if tag == tag2
|
129
|
+
cmp_variables(variables, variables2)
|
130
|
+
elsif tag.nil?
|
131
|
+
-1
|
132
|
+
elsif tag2.nil?
|
133
|
+
1
|
134
|
+
else
|
135
|
+
tag <=> tag2
|
136
|
+
end
|
137
|
+
elsif timekey.nil?
|
138
|
+
-1
|
139
|
+
elsif timekey2.nil?
|
140
|
+
1
|
141
|
+
else
|
142
|
+
timekey <=> timekey2
|
143
|
+
end
|
144
|
+
else
|
145
|
+
# objects have values in same field pairs (comparison with non-nil and nil doesn't occur here)
|
146
|
+
(timekey <=> timekey2 || 0).nonzero? || # if `a <=> b` is nil, then both are nil
|
147
|
+
(tag <=> tag2 || 0).nonzero? ||
|
148
|
+
cmp_variables(variables, variables2)
|
149
|
+
end
|
150
|
+
end
|
151
|
+
|
152
|
+
# This is an optimization code. Current Struct's implementation is comparing all data.
|
153
|
+
# https://github.com/ruby/ruby/blob/0623e2b7cc621b1733a760b72af246b06c30cf96/struct.c#L1200-L1203
|
154
|
+
# Actually this overhead is very small but this class is generated *per chunk* (and used in hash object).
|
155
|
+
# This means that this class is one of the most called object in Fluentd.
|
156
|
+
# See https://github.com/fluent/fluentd/pull/2560
|
157
|
+
def hash
|
158
|
+
timekey.hash
|
159
|
+
end
|
160
|
+
end
|
161
|
+
|
162
|
+
# for metrics
|
163
|
+
attr_reader :stage_size_metrics, :stage_length_metrics, :queue_size_metrics, :queue_length_metrics
|
164
|
+
attr_reader :available_buffer_space_ratios_metrics, :total_queued_size_metrics
|
165
|
+
attr_reader :newest_timekey_metrics, :oldest_timekey_metrics
|
166
|
+
# for tests
|
167
|
+
attr_reader :stage, :queue, :dequeued, :queued_num
|
168
|
+
|
169
|
+
def initialize
|
170
|
+
super
|
171
|
+
|
172
|
+
@chunk_limit_size = nil
|
173
|
+
@total_limit_size = nil
|
174
|
+
@queue_limit_length = nil
|
175
|
+
@chunk_limit_records = nil
|
176
|
+
|
177
|
+
@stage = {} #=> Hash (metadata -> chunk) : not flushed yet
|
178
|
+
@queue = [] #=> Array (chunks) : already flushed (not written)
|
179
|
+
@dequeued = {} #=> Hash (unique_id -> chunk): already written (not purged)
|
180
|
+
@queued_num = {} # metadata => int (number of queued chunks)
|
181
|
+
@dequeued_num = {} # metadata => int (number of dequeued chunks)
|
182
|
+
|
183
|
+
@stage_length_metrics = nil
|
184
|
+
@stage_size_metrics = nil
|
185
|
+
@queue_length_metrics = nil
|
186
|
+
@queue_size_metrics = nil
|
187
|
+
@available_buffer_space_ratios_metrics = nil
|
188
|
+
@total_queued_size_metrics = nil
|
189
|
+
@newest_timekey_metrics = nil
|
190
|
+
@oldest_timekey_metrics = nil
|
191
|
+
@timekeys = Hash.new(0)
|
192
|
+
@enable_update_timekeys = false
|
193
|
+
@mutex = Mutex.new
|
194
|
+
end
|
195
|
+
|
196
|
+
def stage_size
|
197
|
+
@stage_size_metrics.get
|
198
|
+
end
|
199
|
+
|
200
|
+
def stage_size=(value)
|
201
|
+
@stage_size_metrics.set(value)
|
202
|
+
end
|
203
|
+
|
204
|
+
def queue_size
|
205
|
+
@queue_size_metrics.get
|
206
|
+
end
|
207
|
+
|
208
|
+
def queue_size=(value)
|
209
|
+
@queue_size_metrics.set(value)
|
210
|
+
end
|
211
|
+
|
212
|
+
def persistent?
|
213
|
+
false
|
214
|
+
end
|
215
|
+
|
216
|
+
def configure(conf)
|
217
|
+
super
|
218
|
+
|
219
|
+
unless @queue_limit_length.nil?
|
220
|
+
@total_limit_size = @chunk_limit_size * @queue_limit_length
|
221
|
+
end
|
222
|
+
@stage_length_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "stage_length",
|
223
|
+
help_text: 'Length of stage buffers', prefer_gauge: true)
|
224
|
+
@stage_length_metrics.set(0)
|
225
|
+
@stage_size_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "stage_byte_size",
|
226
|
+
help_text: 'Total size of stage buffers', prefer_gauge: true)
|
227
|
+
@stage_size_metrics.set(0) # Ensure zero.
|
228
|
+
@queue_length_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "queue_length",
|
229
|
+
help_text: 'Length of queue buffers', prefer_gauge: true)
|
230
|
+
@queue_length_metrics.set(0)
|
231
|
+
@queue_size_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "queue_byte_size",
|
232
|
+
help_text: 'Total size of queue buffers', prefer_gauge: true)
|
233
|
+
@queue_size_metrics.set(0) # Ensure zero.
|
234
|
+
@available_buffer_space_ratios_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "available_buffer_space_ratios",
|
235
|
+
help_text: 'Ratio of available space in buffer', prefer_gauge: true)
|
236
|
+
@available_buffer_space_ratios_metrics.set(100) # Default is 100%.
|
237
|
+
@total_queued_size_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "total_queued_size",
|
238
|
+
help_text: 'Total size of stage and queue buffers', prefer_gauge: true)
|
239
|
+
@total_queued_size_metrics.set(0)
|
240
|
+
@newest_timekey_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "newest_timekey",
|
241
|
+
help_text: 'Newest timekey in buffer', prefer_gauge: true)
|
242
|
+
@oldest_timekey_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "oldest_timekey",
|
243
|
+
help_text: 'Oldest timekey in buffer', prefer_gauge: true)
|
244
|
+
end
|
245
|
+
|
246
|
+
def enable_update_timekeys
|
247
|
+
@enable_update_timekeys = true
|
248
|
+
end
|
249
|
+
|
250
|
+
def start
|
251
|
+
super
|
252
|
+
|
253
|
+
@stage, @queue = resume
|
254
|
+
@stage.each_pair do |metadata, chunk|
|
255
|
+
@stage_size_metrics.add(chunk.bytesize)
|
256
|
+
end
|
257
|
+
@queue.each do |chunk|
|
258
|
+
@queued_num[chunk.metadata] ||= 0
|
259
|
+
@queued_num[chunk.metadata] += 1
|
260
|
+
@queue_size_metrics.add(chunk.bytesize)
|
261
|
+
end
|
262
|
+
update_timekeys
|
263
|
+
log.debug "buffer started", instance: self.object_id, stage_size: @stage_size_metrics.get, queue_size: @queue_size_metrics.get
|
264
|
+
end
|
265
|
+
|
266
|
+
def close
|
267
|
+
super
|
268
|
+
synchronize do
|
269
|
+
log.debug "closing buffer", instance: self.object_id
|
270
|
+
@dequeued.each_pair do |chunk_id, chunk|
|
271
|
+
chunk.close
|
272
|
+
end
|
273
|
+
until @queue.empty?
|
274
|
+
@queue.shift.close
|
275
|
+
end
|
276
|
+
@stage.each_pair do |metadata, chunk|
|
277
|
+
chunk.close
|
278
|
+
end
|
279
|
+
end
|
280
|
+
end
|
281
|
+
|
282
|
+
def terminate
|
283
|
+
super
|
284
|
+
@dequeued = @stage = @queue = @queued_num = nil
|
285
|
+
@stage_length_metrics = @stage_size_metrics = @queue_length_metrics = @queue_size_metrics = nil
|
286
|
+
@available_buffer_space_ratios_metrics = @total_queued_size_metrics = nil
|
287
|
+
@newest_timekey_metrics = @oldest_timekey_metrics = nil
|
288
|
+
@timekeys.clear
|
289
|
+
end
|
290
|
+
|
291
|
+
def storable?
|
292
|
+
@total_limit_size > @stage_size_metrics.get + @queue_size_metrics.get
|
293
|
+
end
|
294
|
+
|
295
|
+
## TODO: for back pressure feature
|
296
|
+
# def used?(ratio)
|
297
|
+
# @total_limit_size * ratio > @stage_size_metrics.get + @queue_size_metrics.get
|
298
|
+
# end
|
299
|
+
|
300
|
+
def resume
|
301
|
+
# return {}, []
|
302
|
+
raise NotImplementedError, "Implement this method in child class"
|
303
|
+
end
|
304
|
+
|
305
|
+
def generate_chunk(metadata)
|
306
|
+
raise NotImplementedError, "Implement this method in child class"
|
307
|
+
end
|
308
|
+
|
309
|
+
def new_metadata(timekey: nil, tag: nil, variables: nil)
|
310
|
+
Metadata.new(timekey, tag, variables)
|
311
|
+
end
|
312
|
+
|
313
|
+
# Keep this method for existing code
|
314
|
+
def metadata(timekey: nil, tag: nil, variables: nil)
|
315
|
+
Metadata.new(timekey, tag, variables)
|
316
|
+
end
|
317
|
+
|
318
|
+
def timekeys
|
319
|
+
@timekeys.keys
|
320
|
+
end
|
321
|
+
|
322
|
+
# metadata MUST have consistent object_id for each variation
|
323
|
+
# data MUST be Array of serialized events, or EventStream
|
324
|
+
# metadata_and_data MUST be a hash of { metadata => data }
|
325
|
+
def write(metadata_and_data, format: nil, size: nil, enqueue: false)
|
326
|
+
return if metadata_and_data.size < 1
|
327
|
+
raise BufferOverflowError, "buffer space has too many data" unless storable?
|
328
|
+
|
329
|
+
log.on_trace { log.trace "writing events into buffer", instance: self.object_id, metadata_size: metadata_and_data.size }
|
330
|
+
|
331
|
+
operated_chunks = []
|
332
|
+
unstaged_chunks = {} # metadata => [chunk, chunk, ...]
|
333
|
+
chunks_to_enqueue = []
|
334
|
+
staged_bytesizes_by_chunk = {}
|
335
|
+
# track internal BufferChunkOverflowError in write_step_by_step
|
336
|
+
buffer_chunk_overflow_errors = []
|
337
|
+
|
338
|
+
begin
|
339
|
+
# sort metadata to get lock of chunks in same order with other threads
|
340
|
+
metadata_and_data.keys.sort.each do |metadata|
|
341
|
+
data = metadata_and_data[metadata]
|
342
|
+
write_once(metadata, data, format: format, size: size) do |chunk, adding_bytesize, error|
|
343
|
+
chunk.mon_enter # add lock to prevent to be committed/rollbacked from other threads
|
344
|
+
operated_chunks << chunk
|
345
|
+
if chunk.staged?
|
346
|
+
#
|
347
|
+
# https://github.com/fluent/fluentd/issues/2712
|
348
|
+
# write_once is supposed to write to a chunk only once
|
349
|
+
# but this block **may** run multiple times from write_step_by_step and previous write may be rollbacked
|
350
|
+
# So we should be counting the stage_size only for the last successful write
|
351
|
+
#
|
352
|
+
staged_bytesizes_by_chunk[chunk] = adding_bytesize
|
353
|
+
elsif chunk.unstaged?
|
354
|
+
unstaged_chunks[metadata] ||= []
|
355
|
+
unstaged_chunks[metadata] << chunk
|
356
|
+
end
|
357
|
+
if error && !error.empty?
|
358
|
+
buffer_chunk_overflow_errors << error
|
359
|
+
end
|
360
|
+
end
|
361
|
+
end
|
362
|
+
|
363
|
+
return if operated_chunks.empty?
|
364
|
+
|
365
|
+
# Now, this thread acquires many locks of chunks... getting buffer-global lock causes dead lock.
|
366
|
+
# Any operations needs buffer-global lock (including enqueueing) should be done after releasing locks.
|
367
|
+
|
368
|
+
first_chunk = operated_chunks.shift
|
369
|
+
# Following commits for other chunks also can finish successfully if the first commit operation
|
370
|
+
# finishes without any exceptions.
|
371
|
+
# In most cases, #commit just requires very small disk spaces, so major failure reason are
|
372
|
+
# permission errors, disk failures and other permanent(fatal) errors.
|
373
|
+
begin
|
374
|
+
first_chunk.commit
|
375
|
+
if enqueue || first_chunk.unstaged? || chunk_size_full?(first_chunk)
|
376
|
+
chunks_to_enqueue << first_chunk
|
377
|
+
end
|
378
|
+
first_chunk.mon_exit
|
379
|
+
rescue
|
380
|
+
operated_chunks.unshift(first_chunk)
|
381
|
+
raise
|
382
|
+
end
|
383
|
+
|
384
|
+
errors = []
|
385
|
+
# Buffer plugin estimates there's no serious error cause: will commit for all chunks eigher way
|
386
|
+
operated_chunks.each do |chunk|
|
387
|
+
begin
|
388
|
+
chunk.commit
|
389
|
+
if enqueue || chunk.unstaged? || chunk_size_full?(chunk)
|
390
|
+
chunks_to_enqueue << chunk
|
391
|
+
end
|
392
|
+
chunk.mon_exit
|
393
|
+
rescue => e
|
394
|
+
chunk.rollback
|
395
|
+
chunk.mon_exit
|
396
|
+
errors << e
|
397
|
+
end
|
398
|
+
end
|
399
|
+
|
400
|
+
# All locks about chunks are released.
|
401
|
+
|
402
|
+
#
|
403
|
+
# Now update the stage, stage_size with proper locking
|
404
|
+
# FIX FOR stage_size miscomputation - https://github.com/fluent/fluentd/issues/2712
|
405
|
+
#
|
406
|
+
staged_bytesizes_by_chunk.each do |chunk, bytesize|
|
407
|
+
chunk.synchronize do
|
408
|
+
synchronize { @stage_size_metrics.add(bytesize) }
|
409
|
+
log.on_trace { log.trace { "chunk #{chunk.path} size_added: #{bytesize} new_size: #{chunk.bytesize}" } }
|
410
|
+
end
|
411
|
+
end
|
412
|
+
|
413
|
+
chunks_to_enqueue.each do |c|
|
414
|
+
if c.staged? && (enqueue || chunk_size_full?(c))
|
415
|
+
m = c.metadata
|
416
|
+
enqueue_chunk(m)
|
417
|
+
if unstaged_chunks[m]
|
418
|
+
u = unstaged_chunks[m].pop
|
419
|
+
u.synchronize do
|
420
|
+
if u.unstaged? && !chunk_size_full?(u)
|
421
|
+
# `u.metadata.seq` and `m.seq` can be different but Buffer#enqueue_chunk expect them to be the same value
|
422
|
+
u.metadata.seq = 0
|
423
|
+
synchronize {
|
424
|
+
@stage[m] = u.staged!
|
425
|
+
@stage_size_metrics.add(u.bytesize)
|
426
|
+
}
|
427
|
+
end
|
428
|
+
end
|
429
|
+
end
|
430
|
+
elsif c.unstaged?
|
431
|
+
enqueue_unstaged_chunk(c)
|
432
|
+
else
|
433
|
+
# previously staged chunk is already enqueued, closed or purged.
|
434
|
+
# no problem.
|
435
|
+
end
|
436
|
+
end
|
437
|
+
|
438
|
+
operated_chunks.clear if errors.empty?
|
439
|
+
|
440
|
+
if errors.size > 0
|
441
|
+
log.warn "error occurs in committing chunks: only first one raised", errors: errors.map(&:class)
|
442
|
+
raise errors.first
|
443
|
+
end
|
444
|
+
ensure
|
445
|
+
operated_chunks.each do |chunk|
|
446
|
+
chunk.rollback rescue nil # nothing possible to do for #rollback failure
|
447
|
+
if chunk.unstaged?
|
448
|
+
chunk.purge rescue nil # to prevent leakage of unstaged chunks
|
449
|
+
end
|
450
|
+
chunk.mon_exit rescue nil # this may raise ThreadError for chunks already committed
|
451
|
+
end
|
452
|
+
unless buffer_chunk_overflow_errors.empty?
|
453
|
+
# Notify delayed BufferChunkOverflowError here
|
454
|
+
raise BufferChunkOverflowError, buffer_chunk_overflow_errors.join(", ")
|
455
|
+
end
|
456
|
+
end
|
457
|
+
end
|
458
|
+
|
459
|
+
def queue_full?
|
460
|
+
synchronize { @queue.size } >= @queued_chunks_limit_size
|
461
|
+
end
|
462
|
+
|
463
|
+
def queued_records
|
464
|
+
synchronize { @queue.reduce(0){|r, chunk| r + chunk.size } }
|
465
|
+
end
|
466
|
+
|
467
|
+
def queued?(metadata = nil, optimistic: false)
|
468
|
+
if optimistic
|
469
|
+
optimistic_queued?(metadata)
|
470
|
+
else
|
471
|
+
synchronize do
|
472
|
+
optimistic_queued?(metadata)
|
473
|
+
end
|
474
|
+
end
|
475
|
+
end
|
476
|
+
|
477
|
+
def enqueue_chunk(metadata)
|
478
|
+
log.on_trace { log.trace "enqueueing chunk", instance: self.object_id, metadata: metadata }
|
479
|
+
|
480
|
+
chunk = synchronize do
|
481
|
+
@stage.delete(metadata)
|
482
|
+
end
|
483
|
+
return nil unless chunk
|
484
|
+
|
485
|
+
chunk.synchronize do
|
486
|
+
synchronize do
|
487
|
+
if chunk.empty?
|
488
|
+
chunk.close
|
489
|
+
else
|
490
|
+
chunk.metadata.seq = 0 # metadata.seq should be 0 for counting @queued_num
|
491
|
+
@queue << chunk
|
492
|
+
@queued_num[metadata] = @queued_num.fetch(metadata, 0) + 1
|
493
|
+
chunk.enqueued!
|
494
|
+
end
|
495
|
+
bytesize = chunk.bytesize
|
496
|
+
@stage_size_metrics.sub(bytesize)
|
497
|
+
@queue_size_metrics.add(bytesize)
|
498
|
+
end
|
499
|
+
end
|
500
|
+
nil
|
501
|
+
end
|
502
|
+
|
503
|
+
def enqueue_unstaged_chunk(chunk)
|
504
|
+
log.on_trace { log.trace "enqueueing unstaged chunk", instance: self.object_id, metadata: chunk.metadata }
|
505
|
+
|
506
|
+
synchronize do
|
507
|
+
chunk.synchronize do
|
508
|
+
metadata = chunk.metadata
|
509
|
+
metadata.seq = 0 # metadata.seq should be 0 for counting @queued_num
|
510
|
+
@queue << chunk
|
511
|
+
@queued_num[metadata] = @queued_num.fetch(metadata, 0) + 1
|
512
|
+
chunk.enqueued!
|
513
|
+
end
|
514
|
+
@queue_size_metrics.add(chunk.bytesize)
|
515
|
+
end
|
516
|
+
end
|
517
|
+
|
518
|
+
def update_timekeys
|
519
|
+
synchronize do
|
520
|
+
chunks = @stage.values
|
521
|
+
chunks.concat(@queue)
|
522
|
+
@timekeys = chunks.each_with_object({}) do |chunk, keys|
|
523
|
+
if chunk.metadata && chunk.metadata.timekey
|
524
|
+
t = chunk.metadata.timekey
|
525
|
+
keys[t] = keys.fetch(t, 0) + 1
|
526
|
+
end
|
527
|
+
end
|
528
|
+
end
|
529
|
+
end
|
530
|
+
|
531
|
+
# At flush_at_shutdown, all staged chunks should be enqueued for buffer flush. Set true to force_enqueue for it.
|
532
|
+
def enqueue_all(force_enqueue = false)
|
533
|
+
log.on_trace { log.trace "enqueueing all chunks in buffer", instance: self.object_id }
|
534
|
+
update_timekeys if @enable_update_timekeys
|
535
|
+
|
536
|
+
if block_given?
|
537
|
+
synchronize{ @stage.keys }.each do |metadata|
|
538
|
+
return if !force_enqueue && queue_full?
|
539
|
+
# NOTE: The following line might cause data race depending on Ruby implementations except CRuby
|
540
|
+
# cf. https://github.com/fluent/fluentd/pull/1721#discussion_r146170251
|
541
|
+
chunk = @stage[metadata]
|
542
|
+
next unless chunk
|
543
|
+
v = yield metadata, chunk
|
544
|
+
enqueue_chunk(metadata) if v
|
545
|
+
end
|
546
|
+
else
|
547
|
+
synchronize{ @stage.keys }.each do |metadata|
|
548
|
+
return if !force_enqueue && queue_full?
|
549
|
+
enqueue_chunk(metadata)
|
550
|
+
end
|
551
|
+
end
|
552
|
+
end
|
553
|
+
|
554
|
+
def dequeue_chunk
|
555
|
+
return nil if @queue.empty?
|
556
|
+
log.on_trace { log.trace "dequeueing a chunk", instance: self.object_id }
|
557
|
+
|
558
|
+
synchronize do
|
559
|
+
chunk = @queue.shift
|
560
|
+
|
561
|
+
# this buffer is dequeued by other thread just before "synchronize" in this thread
|
562
|
+
return nil unless chunk
|
563
|
+
|
564
|
+
@dequeued[chunk.unique_id] = chunk
|
565
|
+
@queued_num[chunk.metadata] -= 1 # BUG if nil, 0 or subzero
|
566
|
+
@dequeued_num[chunk.metadata] ||= 0
|
567
|
+
@dequeued_num[chunk.metadata] += 1
|
568
|
+
log.trace "chunk dequeued", instance: self.object_id, metadata: chunk.metadata
|
569
|
+
chunk
|
570
|
+
end
|
571
|
+
end
|
572
|
+
|
573
|
+
def takeback_chunk(chunk_id)
|
574
|
+
log.on_trace { log.trace "taking back a chunk", instance: self.object_id, chunk_id: dump_unique_id_hex(chunk_id) }
|
575
|
+
|
576
|
+
synchronize do
|
577
|
+
chunk = @dequeued.delete(chunk_id)
|
578
|
+
return false unless chunk # already purged by other thread
|
579
|
+
@queue.unshift(chunk)
|
580
|
+
log.trace "chunk taken back", instance: self.object_id, chunk_id: dump_unique_id_hex(chunk_id), metadata: chunk.metadata
|
581
|
+
@queued_num[chunk.metadata] += 1 # BUG if nil
|
582
|
+
@dequeued_num[chunk.metadata] -= 1
|
583
|
+
end
|
584
|
+
true
|
585
|
+
end
|
586
|
+
|
587
|
+
def purge_chunk(chunk_id)
|
588
|
+
metadata = nil
|
589
|
+
synchronize do
|
590
|
+
chunk = @dequeued.delete(chunk_id)
|
591
|
+
return nil unless chunk # purged by other threads
|
592
|
+
|
593
|
+
metadata = chunk.metadata
|
594
|
+
log.on_trace { log.trace "purging a chunk", instance: self.object_id, chunk_id: dump_unique_id_hex(chunk_id), metadata: metadata }
|
595
|
+
|
596
|
+
begin
|
597
|
+
bytesize = chunk.bytesize
|
598
|
+
chunk.purge
|
599
|
+
@queue_size_metrics.sub(bytesize)
|
600
|
+
rescue => e
|
601
|
+
log.error "failed to purge buffer chunk", chunk_id: dump_unique_id_hex(chunk_id), error_class: e.class, error: e
|
602
|
+
log.error_backtrace
|
603
|
+
end
|
604
|
+
|
605
|
+
@dequeued_num[chunk.metadata] -= 1
|
606
|
+
if metadata && !@stage[metadata] && (!@queued_num[metadata] || @queued_num[metadata] < 1) && @dequeued_num[metadata].zero?
|
607
|
+
@queued_num.delete(metadata)
|
608
|
+
@dequeued_num.delete(metadata)
|
609
|
+
end
|
610
|
+
log.trace "chunk purged", instance: self.object_id, chunk_id: dump_unique_id_hex(chunk_id), metadata: metadata
|
611
|
+
end
|
612
|
+
|
613
|
+
nil
|
614
|
+
end
|
615
|
+
|
616
|
+
def clear_queue!
|
617
|
+
log.on_trace { log.trace "clearing queue", instance: self.object_id }
|
618
|
+
|
619
|
+
synchronize do
|
620
|
+
until @queue.empty?
|
621
|
+
begin
|
622
|
+
q = @queue.shift
|
623
|
+
log.trace("purging a chunk in queue"){ {id: dump_unique_id_hex(chunk.unique_id), bytesize: chunk.bytesize, size: chunk.size} }
|
624
|
+
q.purge
|
625
|
+
rescue => e
|
626
|
+
log.error "unexpected error while clearing buffer queue", error_class: e.class, error: e
|
627
|
+
log.error_backtrace
|
628
|
+
end
|
629
|
+
end
|
630
|
+
@queue_size_metrics.set(0)
|
631
|
+
end
|
632
|
+
end
|
633
|
+
|
634
|
+
def chunk_size_over?(chunk)
|
635
|
+
chunk.bytesize > @chunk_limit_size || (@chunk_limit_records && chunk.size > @chunk_limit_records)
|
636
|
+
end
|
637
|
+
|
638
|
+
def chunk_size_full?(chunk)
|
639
|
+
chunk.bytesize >= @chunk_limit_size * @chunk_full_threshold || (@chunk_limit_records && chunk.size >= @chunk_limit_records * @chunk_full_threshold)
|
640
|
+
end
|
641
|
+
|
642
|
+
class ShouldRetry < StandardError; end
|
643
|
+
|
644
|
+
# write once into a chunk
|
645
|
+
# 1. append whole data into existing chunk
|
646
|
+
# 2. commit it & return unless chunk_size_over?
|
647
|
+
# 3. enqueue existing chunk & retry whole method if chunk was not empty
|
648
|
+
# 4. go to step_by_step writing
|
649
|
+
|
650
|
+
def write_once(metadata, data, format: nil, size: nil, &block)
|
651
|
+
return if data.empty?
|
652
|
+
|
653
|
+
stored = false
|
654
|
+
adding_bytesize = nil
|
655
|
+
|
656
|
+
chunk = synchronize { @stage[metadata] ||= generate_chunk(metadata).staged! }
|
657
|
+
enqueue_chunk_before_retry = false
|
658
|
+
chunk.synchronize do
|
659
|
+
# retry this method if chunk is already queued (between getting chunk and entering critical section)
|
660
|
+
raise ShouldRetry unless chunk.staged?
|
661
|
+
|
662
|
+
empty_chunk = chunk.empty?
|
663
|
+
|
664
|
+
original_bytesize = chunk.bytesize
|
665
|
+
begin
|
666
|
+
if format
|
667
|
+
serialized = format.call(data)
|
668
|
+
chunk.concat(serialized, size ? size.call : data.size)
|
669
|
+
else
|
670
|
+
chunk.append(data, compress: @compress)
|
671
|
+
end
|
672
|
+
adding_bytesize = chunk.bytesize - original_bytesize
|
673
|
+
|
674
|
+
if chunk_size_over?(chunk)
|
675
|
+
if format && empty_chunk
|
676
|
+
if chunk.bytesize > @chunk_limit_size
|
677
|
+
log.warn "chunk bytes limit exceeds for an emitted event stream: #{adding_bytesize}bytes"
|
678
|
+
else
|
679
|
+
log.warn "chunk size limit exceeds for an emitted event stream: #{chunk.size}records"
|
680
|
+
end
|
681
|
+
end
|
682
|
+
chunk.rollback
|
683
|
+
|
684
|
+
if format && !empty_chunk
|
685
|
+
# Event streams should be appended into a chunk at once
|
686
|
+
# as far as possible, to improve performance of formatting.
|
687
|
+
# Event stream may be a MessagePackEventStream. We don't want to split it into
|
688
|
+
# 2 or more chunks (except for a case that the event stream is larger than chunk limit).
|
689
|
+
enqueue_chunk_before_retry = true
|
690
|
+
raise ShouldRetry
|
691
|
+
end
|
692
|
+
else
|
693
|
+
stored = true
|
694
|
+
end
|
695
|
+
rescue
|
696
|
+
chunk.rollback
|
697
|
+
raise
|
698
|
+
end
|
699
|
+
|
700
|
+
if stored
|
701
|
+
block.call(chunk, adding_bytesize)
|
702
|
+
end
|
703
|
+
end
|
704
|
+
|
705
|
+
unless stored
|
706
|
+
# try step-by-step appending if data can't be stored into existing a chunk in non-bulk mode
|
707
|
+
#
|
708
|
+
# 1/10 size of original event stream (splits_count == 10) seems enough small
|
709
|
+
# to try emitting events into existing chunk.
|
710
|
+
# it does not matter to split event stream into very small splits, because chunks have less
|
711
|
+
# overhead to write data many times (even about file buffer chunks).
|
712
|
+
write_step_by_step(metadata, data, format, 10, &block)
|
713
|
+
end
|
714
|
+
rescue ShouldRetry
|
715
|
+
enqueue_chunk(metadata) if enqueue_chunk_before_retry
|
716
|
+
retry
|
717
|
+
end
|
718
|
+
|
719
|
+
# EventStream can be split into many streams
|
720
|
+
# because (es1 + es2).to_msgpack_stream == es1.to_msgpack_stream + es2.to_msgpack_stream
|
721
|
+
|
722
|
+
# 1. split event streams into many (10 -> 100 -> 1000 -> ...) chunks
|
723
|
+
# 2. append splits into the staged chunks as much as possible
|
724
|
+
# 3. create unstaged chunk and append rest splits -> repeat it for all splits
|
725
|
+
|
726
|
+
def write_step_by_step(metadata, data, format, splits_count, &block)
|
727
|
+
splits = []
|
728
|
+
errors = []
|
729
|
+
if splits_count > data.size
|
730
|
+
splits_count = data.size
|
731
|
+
end
|
732
|
+
slice_size = if data.size % splits_count == 0
|
733
|
+
data.size / splits_count
|
734
|
+
else
|
735
|
+
data.size / (splits_count - 1)
|
736
|
+
end
|
737
|
+
slice_origin = 0
|
738
|
+
while slice_origin < data.size
|
739
|
+
splits << data.slice(slice_origin, slice_size)
|
740
|
+
slice_origin += slice_size
|
741
|
+
end
|
742
|
+
|
743
|
+
# This method will append events into the staged chunk at first.
|
744
|
+
# Then, will generate chunks not staged (not queued) to append rest data.
|
745
|
+
staged_chunk_used = false
|
746
|
+
modified_chunks = []
|
747
|
+
modified_metadata = metadata
|
748
|
+
get_next_chunk = ->(){
|
749
|
+
c = if staged_chunk_used
|
750
|
+
# Staging new chunk here is bad idea:
|
751
|
+
# Recovering whole state including newly staged chunks is much harder than current implementation.
|
752
|
+
modified_metadata = modified_metadata.dup_next
|
753
|
+
generate_chunk(modified_metadata)
|
754
|
+
else
|
755
|
+
synchronize { @stage[modified_metadata] ||= generate_chunk(modified_metadata).staged! }
|
756
|
+
end
|
757
|
+
modified_chunks << c
|
758
|
+
c
|
759
|
+
}
|
760
|
+
|
761
|
+
writing_splits_index = 0
|
762
|
+
enqueue_chunk_before_retry = false
|
763
|
+
|
764
|
+
while writing_splits_index < splits.size
|
765
|
+
chunk = get_next_chunk.call
|
766
|
+
chunk.synchronize do
|
767
|
+
raise ShouldRetry unless chunk.writable?
|
768
|
+
staged_chunk_used = true if chunk.staged?
|
769
|
+
|
770
|
+
original_bytesize = committed_bytesize = chunk.bytesize
|
771
|
+
begin
|
772
|
+
while writing_splits_index < splits.size
|
773
|
+
split = splits[writing_splits_index]
|
774
|
+
formatted_split = format ? format.call(split) : nil
|
775
|
+
|
776
|
+
if split.size == 1 # Check BufferChunkOverflowError
|
777
|
+
determined_bytesize = nil
|
778
|
+
if @compress != :text
|
779
|
+
determined_bytesize = nil
|
780
|
+
elsif formatted_split
|
781
|
+
determined_bytesize = formatted_split.bytesize
|
782
|
+
elsif split.first.respond_to?(:bytesize)
|
783
|
+
determined_bytesize = split.first.bytesize
|
784
|
+
end
|
785
|
+
|
786
|
+
if determined_bytesize && determined_bytesize > @chunk_limit_size
|
787
|
+
# It is a obvious case that BufferChunkOverflowError should be raised here.
|
788
|
+
# But if it raises here, already processed 'split' or
|
789
|
+
# the proceeding 'split' will be lost completely.
|
790
|
+
# So it is a last resort to delay raising such a exception
|
791
|
+
errors << "a #{determined_bytesize} bytes record (nth: #{writing_splits_index}) is larger than buffer chunk limit size (#{@chunk_limit_size})"
|
792
|
+
writing_splits_index += 1
|
793
|
+
next
|
794
|
+
end
|
795
|
+
|
796
|
+
if determined_bytesize.nil? || chunk.bytesize + determined_bytesize > @chunk_limit_size
|
797
|
+
# The split will (might) cause size over so keep already processed
|
798
|
+
# 'split' content here (allow performance regression a bit).
|
799
|
+
chunk.commit
|
800
|
+
committed_bytesize = chunk.bytesize
|
801
|
+
end
|
802
|
+
end
|
803
|
+
|
804
|
+
if format
|
805
|
+
chunk.concat(formatted_split, split.size)
|
806
|
+
else
|
807
|
+
chunk.append(split, compress: @compress)
|
808
|
+
end
|
809
|
+
adding_bytes = chunk.bytesize - committed_bytesize
|
810
|
+
|
811
|
+
if chunk_size_over?(chunk) # split size is larger than difference between size_full? and size_over?
|
812
|
+
chunk.rollback
|
813
|
+
committed_bytesize = chunk.bytesize
|
814
|
+
|
815
|
+
if split.size == 1 # Check BufferChunkOverflowError again
|
816
|
+
if adding_bytes > @chunk_limit_size
|
817
|
+
errors << "concatenated/appended a #{adding_bytes} bytes record (nth: #{writing_splits_index}) is larger than buffer chunk limit size (#{@chunk_limit_size})"
|
818
|
+
writing_splits_index += 1
|
819
|
+
next
|
820
|
+
else
|
821
|
+
# As already processed content is kept after rollback, then unstaged chunk should be queued.
|
822
|
+
# After that, re-process current split again.
|
823
|
+
# New chunk should be allocated, to do it, modify @stage and so on.
|
824
|
+
synchronize { @stage.delete(modified_metadata) }
|
825
|
+
staged_chunk_used = false
|
826
|
+
chunk.unstaged!
|
827
|
+
break
|
828
|
+
end
|
829
|
+
end
|
830
|
+
|
831
|
+
if chunk_size_full?(chunk) || split.size == 1
|
832
|
+
enqueue_chunk_before_retry = true
|
833
|
+
else
|
834
|
+
splits_count *= 10
|
835
|
+
end
|
836
|
+
|
837
|
+
raise ShouldRetry
|
838
|
+
end
|
839
|
+
|
840
|
+
writing_splits_index += 1
|
841
|
+
|
842
|
+
if chunk_size_full?(chunk)
|
843
|
+
break
|
844
|
+
end
|
845
|
+
end
|
846
|
+
rescue
|
847
|
+
chunk.purge if chunk.unstaged? # unstaged chunk will leak unless purge it
|
848
|
+
raise
|
849
|
+
end
|
850
|
+
|
851
|
+
block.call(chunk, chunk.bytesize - original_bytesize, errors)
|
852
|
+
errors = []
|
853
|
+
end
|
854
|
+
end
|
855
|
+
rescue ShouldRetry
|
856
|
+
modified_chunks.each do |mc|
|
857
|
+
mc.rollback rescue nil
|
858
|
+
if mc.unstaged?
|
859
|
+
mc.purge rescue nil
|
860
|
+
end
|
861
|
+
end
|
862
|
+
enqueue_chunk(metadata) if enqueue_chunk_before_retry
|
863
|
+
retry
|
864
|
+
end
|
865
|
+
|
866
|
+
STATS_KEYS = [
|
867
|
+
'stage_length',
|
868
|
+
'stage_byte_size',
|
869
|
+
'queue_length',
|
870
|
+
'queue_byte_size',
|
871
|
+
'available_buffer_space_ratios',
|
872
|
+
'total_queued_size',
|
873
|
+
'oldest_timekey',
|
874
|
+
'newest_timekey'
|
875
|
+
]
|
876
|
+
|
877
|
+
def statistics
|
878
|
+
stage_size, queue_size = @stage_size_metrics.get, @queue_size_metrics.get
|
879
|
+
buffer_space = 1.0 - ((stage_size + queue_size * 1.0) / @total_limit_size)
|
880
|
+
@stage_length_metrics.set(@stage.size)
|
881
|
+
@queue_length_metrics.set(@queue.size)
|
882
|
+
@available_buffer_space_ratios_metrics.set(buffer_space * 100)
|
883
|
+
@total_queued_size_metrics.set(stage_size + queue_size)
|
884
|
+
stats = {
|
885
|
+
'stage_length' => @stage_length_metrics.get,
|
886
|
+
'stage_byte_size' => stage_size,
|
887
|
+
'queue_length' => @queue_length_metrics.get,
|
888
|
+
'queue_byte_size' => queue_size,
|
889
|
+
'available_buffer_space_ratios' => @available_buffer_space_ratios_metrics.get.round(1),
|
890
|
+
'total_queued_size' => @total_queued_size_metrics.get,
|
891
|
+
}
|
892
|
+
|
893
|
+
tkeys = timekeys
|
894
|
+
if (m = tkeys.min)
|
895
|
+
@oldest_timekey_metrics.set(m)
|
896
|
+
stats['oldest_timekey'] = @oldest_timekey_metrics.get
|
897
|
+
end
|
898
|
+
if (m = tkeys.max)
|
899
|
+
@newest_timekey_metrics.set(m)
|
900
|
+
stats['newest_timekey'] = @newest_timekey_metrics.get
|
901
|
+
end
|
902
|
+
|
903
|
+
{ 'buffer' => stats }
|
904
|
+
end
|
905
|
+
|
906
|
+
private
|
907
|
+
|
908
|
+
def optimistic_queued?(metadata = nil)
|
909
|
+
if metadata
|
910
|
+
n = @queued_num[metadata]
|
911
|
+
n && n.nonzero?
|
912
|
+
else
|
913
|
+
!@queue.empty?
|
914
|
+
end
|
915
|
+
end
|
916
|
+
end
|
917
|
+
end
|
918
|
+
end
|