fluentd 1.14.4-x64-mingw-ucrt

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of fluentd might be problematic. Click here for more details.

Files changed (558) hide show
  1. checksums.yaml +7 -0
  2. data/.deepsource.toml +13 -0
  3. data/.drone.yml +35 -0
  4. data/.github/ISSUE_TEMPLATE/bug_report.yaml +70 -0
  5. data/.github/ISSUE_TEMPLATE/config.yml +5 -0
  6. data/.github/ISSUE_TEMPLATE/feature_request.yaml +38 -0
  7. data/.github/ISSUE_TEMPLATE.md +17 -0
  8. data/.github/PULL_REQUEST_TEMPLATE.md +14 -0
  9. data/.github/workflows/issue-auto-closer.yml +12 -0
  10. data/.github/workflows/linux-test.yaml +36 -0
  11. data/.github/workflows/macos-test.yaml +30 -0
  12. data/.github/workflows/stale-actions.yml +22 -0
  13. data/.github/workflows/windows-test.yaml +46 -0
  14. data/.gitignore +30 -0
  15. data/.gitlab-ci.yml +103 -0
  16. data/ADOPTERS.md +5 -0
  17. data/AUTHORS +2 -0
  18. data/CHANGELOG.md +2409 -0
  19. data/CONTRIBUTING.md +45 -0
  20. data/GOVERNANCE.md +55 -0
  21. data/Gemfile +9 -0
  22. data/GithubWorkflow.md +78 -0
  23. data/LICENSE +202 -0
  24. data/MAINTAINERS.md +11 -0
  25. data/README.md +97 -0
  26. data/Rakefile +79 -0
  27. data/SECURITY.md +18 -0
  28. data/bin/fluent-binlog-reader +7 -0
  29. data/bin/fluent-ca-generate +6 -0
  30. data/bin/fluent-cap-ctl +7 -0
  31. data/bin/fluent-cat +5 -0
  32. data/bin/fluent-ctl +7 -0
  33. data/bin/fluent-debug +5 -0
  34. data/bin/fluent-gem +9 -0
  35. data/bin/fluent-plugin-config-format +5 -0
  36. data/bin/fluent-plugin-generate +5 -0
  37. data/bin/fluentd +15 -0
  38. data/code-of-conduct.md +3 -0
  39. data/docs/SECURITY_AUDIT.pdf +0 -0
  40. data/example/copy_roundrobin.conf +39 -0
  41. data/example/counter.conf +18 -0
  42. data/example/filter_stdout.conf +22 -0
  43. data/example/in_forward.conf +14 -0
  44. data/example/in_forward_client.conf +37 -0
  45. data/example/in_forward_shared_key.conf +15 -0
  46. data/example/in_forward_tls.conf +14 -0
  47. data/example/in_forward_users.conf +24 -0
  48. data/example/in_forward_workers.conf +21 -0
  49. data/example/in_http.conf +16 -0
  50. data/example/in_out_forward.conf +17 -0
  51. data/example/in_sample_blocks.conf +17 -0
  52. data/example/in_sample_with_compression.conf +23 -0
  53. data/example/in_syslog.conf +15 -0
  54. data/example/in_tail.conf +14 -0
  55. data/example/in_tcp.conf +13 -0
  56. data/example/in_udp.conf +13 -0
  57. data/example/logevents.conf +25 -0
  58. data/example/multi_filters.conf +61 -0
  59. data/example/out_copy.conf +20 -0
  60. data/example/out_exec_filter.conf +42 -0
  61. data/example/out_file.conf +13 -0
  62. data/example/out_forward.conf +35 -0
  63. data/example/out_forward_buf_file.conf +23 -0
  64. data/example/out_forward_client.conf +109 -0
  65. data/example/out_forward_heartbeat_none.conf +16 -0
  66. data/example/out_forward_sd.conf +17 -0
  67. data/example/out_forward_shared_key.conf +36 -0
  68. data/example/out_forward_tls.conf +18 -0
  69. data/example/out_forward_users.conf +65 -0
  70. data/example/out_null.conf +36 -0
  71. data/example/sd.yaml +8 -0
  72. data/example/secondary_file.conf +42 -0
  73. data/example/suppress_config_dump.conf +7 -0
  74. data/example/v0_12_filter.conf +78 -0
  75. data/example/v1_literal_example.conf +36 -0
  76. data/example/worker_section.conf +36 -0
  77. data/fluent.conf +139 -0
  78. data/fluentd.gemspec +55 -0
  79. data/lib/fluent/agent.rb +168 -0
  80. data/lib/fluent/capability.rb +87 -0
  81. data/lib/fluent/clock.rb +66 -0
  82. data/lib/fluent/command/binlog_reader.rb +244 -0
  83. data/lib/fluent/command/bundler_injection.rb +45 -0
  84. data/lib/fluent/command/ca_generate.rb +184 -0
  85. data/lib/fluent/command/cap_ctl.rb +174 -0
  86. data/lib/fluent/command/cat.rb +365 -0
  87. data/lib/fluent/command/ctl.rb +177 -0
  88. data/lib/fluent/command/debug.rb +103 -0
  89. data/lib/fluent/command/fluentd.rb +374 -0
  90. data/lib/fluent/command/plugin_config_formatter.rb +308 -0
  91. data/lib/fluent/command/plugin_generator.rb +365 -0
  92. data/lib/fluent/compat/call_super_mixin.rb +76 -0
  93. data/lib/fluent/compat/detach_process_mixin.rb +33 -0
  94. data/lib/fluent/compat/exec_util.rb +129 -0
  95. data/lib/fluent/compat/file_util.rb +54 -0
  96. data/lib/fluent/compat/filter.rb +68 -0
  97. data/lib/fluent/compat/formatter.rb +111 -0
  98. data/lib/fluent/compat/formatter_utils.rb +85 -0
  99. data/lib/fluent/compat/handle_tag_and_time_mixin.rb +62 -0
  100. data/lib/fluent/compat/handle_tag_name_mixin.rb +53 -0
  101. data/lib/fluent/compat/input.rb +49 -0
  102. data/lib/fluent/compat/output.rb +721 -0
  103. data/lib/fluent/compat/output_chain.rb +60 -0
  104. data/lib/fluent/compat/parser.rb +310 -0
  105. data/lib/fluent/compat/parser_utils.rb +40 -0
  106. data/lib/fluent/compat/propagate_default.rb +62 -0
  107. data/lib/fluent/compat/record_filter_mixin.rb +34 -0
  108. data/lib/fluent/compat/set_tag_key_mixin.rb +50 -0
  109. data/lib/fluent/compat/set_time_key_mixin.rb +69 -0
  110. data/lib/fluent/compat/socket_util.rb +165 -0
  111. data/lib/fluent/compat/string_util.rb +34 -0
  112. data/lib/fluent/compat/structured_format_mixin.rb +26 -0
  113. data/lib/fluent/compat/type_converter.rb +90 -0
  114. data/lib/fluent/config/basic_parser.rb +123 -0
  115. data/lib/fluent/config/configure_proxy.rb +424 -0
  116. data/lib/fluent/config/dsl.rb +152 -0
  117. data/lib/fluent/config/element.rb +265 -0
  118. data/lib/fluent/config/error.rb +32 -0
  119. data/lib/fluent/config/literal_parser.rb +286 -0
  120. data/lib/fluent/config/parser.rb +107 -0
  121. data/lib/fluent/config/section.rb +272 -0
  122. data/lib/fluent/config/types.rb +249 -0
  123. data/lib/fluent/config/v1_parser.rb +192 -0
  124. data/lib/fluent/config.rb +76 -0
  125. data/lib/fluent/configurable.rb +201 -0
  126. data/lib/fluent/counter/base_socket.rb +44 -0
  127. data/lib/fluent/counter/client.rb +297 -0
  128. data/lib/fluent/counter/error.rb +86 -0
  129. data/lib/fluent/counter/mutex_hash.rb +163 -0
  130. data/lib/fluent/counter/server.rb +273 -0
  131. data/lib/fluent/counter/store.rb +205 -0
  132. data/lib/fluent/counter/validator.rb +145 -0
  133. data/lib/fluent/counter.rb +23 -0
  134. data/lib/fluent/daemon.rb +15 -0
  135. data/lib/fluent/daemonizer.rb +88 -0
  136. data/lib/fluent/engine.rb +253 -0
  137. data/lib/fluent/env.rb +40 -0
  138. data/lib/fluent/error.rb +34 -0
  139. data/lib/fluent/event.rb +326 -0
  140. data/lib/fluent/event_router.rb +297 -0
  141. data/lib/fluent/ext_monitor_require.rb +28 -0
  142. data/lib/fluent/filter.rb +21 -0
  143. data/lib/fluent/fluent_log_event_router.rb +141 -0
  144. data/lib/fluent/formatter.rb +23 -0
  145. data/lib/fluent/input.rb +21 -0
  146. data/lib/fluent/label.rb +46 -0
  147. data/lib/fluent/load.rb +34 -0
  148. data/lib/fluent/log.rb +713 -0
  149. data/lib/fluent/match.rb +187 -0
  150. data/lib/fluent/mixin.rb +31 -0
  151. data/lib/fluent/msgpack_factory.rb +106 -0
  152. data/lib/fluent/oj_options.rb +62 -0
  153. data/lib/fluent/output.rb +29 -0
  154. data/lib/fluent/output_chain.rb +23 -0
  155. data/lib/fluent/parser.rb +23 -0
  156. data/lib/fluent/plugin/bare_output.rb +104 -0
  157. data/lib/fluent/plugin/base.rb +197 -0
  158. data/lib/fluent/plugin/buf_file.rb +213 -0
  159. data/lib/fluent/plugin/buf_file_single.rb +225 -0
  160. data/lib/fluent/plugin/buf_memory.rb +34 -0
  161. data/lib/fluent/plugin/buffer/chunk.rb +240 -0
  162. data/lib/fluent/plugin/buffer/file_chunk.rb +413 -0
  163. data/lib/fluent/plugin/buffer/file_single_chunk.rb +311 -0
  164. data/lib/fluent/plugin/buffer/memory_chunk.rb +91 -0
  165. data/lib/fluent/plugin/buffer.rb +918 -0
  166. data/lib/fluent/plugin/compressable.rb +96 -0
  167. data/lib/fluent/plugin/exec_util.rb +22 -0
  168. data/lib/fluent/plugin/file_util.rb +22 -0
  169. data/lib/fluent/plugin/file_wrapper.rb +187 -0
  170. data/lib/fluent/plugin/filter.rb +127 -0
  171. data/lib/fluent/plugin/filter_grep.rb +189 -0
  172. data/lib/fluent/plugin/filter_parser.rb +130 -0
  173. data/lib/fluent/plugin/filter_record_transformer.rb +324 -0
  174. data/lib/fluent/plugin/filter_stdout.rb +53 -0
  175. data/lib/fluent/plugin/formatter.rb +75 -0
  176. data/lib/fluent/plugin/formatter_csv.rb +78 -0
  177. data/lib/fluent/plugin/formatter_hash.rb +35 -0
  178. data/lib/fluent/plugin/formatter_json.rb +59 -0
  179. data/lib/fluent/plugin/formatter_ltsv.rb +44 -0
  180. data/lib/fluent/plugin/formatter_msgpack.rb +33 -0
  181. data/lib/fluent/plugin/formatter_out_file.rb +53 -0
  182. data/lib/fluent/plugin/formatter_single_value.rb +36 -0
  183. data/lib/fluent/plugin/formatter_stdout.rb +76 -0
  184. data/lib/fluent/plugin/formatter_tsv.rb +40 -0
  185. data/lib/fluent/plugin/in_debug_agent.rb +71 -0
  186. data/lib/fluent/plugin/in_dummy.rb +18 -0
  187. data/lib/fluent/plugin/in_exec.rb +110 -0
  188. data/lib/fluent/plugin/in_forward.rb +473 -0
  189. data/lib/fluent/plugin/in_gc_stat.rb +72 -0
  190. data/lib/fluent/plugin/in_http.rb +667 -0
  191. data/lib/fluent/plugin/in_monitor_agent.rb +412 -0
  192. data/lib/fluent/plugin/in_object_space.rb +93 -0
  193. data/lib/fluent/plugin/in_sample.rb +141 -0
  194. data/lib/fluent/plugin/in_syslog.rb +276 -0
  195. data/lib/fluent/plugin/in_tail/position_file.rb +269 -0
  196. data/lib/fluent/plugin/in_tail.rb +1228 -0
  197. data/lib/fluent/plugin/in_tcp.rb +181 -0
  198. data/lib/fluent/plugin/in_udp.rb +92 -0
  199. data/lib/fluent/plugin/in_unix.rb +195 -0
  200. data/lib/fluent/plugin/input.rb +75 -0
  201. data/lib/fluent/plugin/metrics.rb +119 -0
  202. data/lib/fluent/plugin/metrics_local.rb +96 -0
  203. data/lib/fluent/plugin/multi_output.rb +195 -0
  204. data/lib/fluent/plugin/out_copy.rb +120 -0
  205. data/lib/fluent/plugin/out_exec.rb +105 -0
  206. data/lib/fluent/plugin/out_exec_filter.rb +319 -0
  207. data/lib/fluent/plugin/out_file.rb +334 -0
  208. data/lib/fluent/plugin/out_forward/ack_handler.rb +161 -0
  209. data/lib/fluent/plugin/out_forward/connection_manager.rb +113 -0
  210. data/lib/fluent/plugin/out_forward/error.rb +28 -0
  211. data/lib/fluent/plugin/out_forward/failure_detector.rb +84 -0
  212. data/lib/fluent/plugin/out_forward/handshake_protocol.rb +125 -0
  213. data/lib/fluent/plugin/out_forward/load_balancer.rb +114 -0
  214. data/lib/fluent/plugin/out_forward/socket_cache.rb +140 -0
  215. data/lib/fluent/plugin/out_forward.rb +826 -0
  216. data/lib/fluent/plugin/out_http.rb +275 -0
  217. data/lib/fluent/plugin/out_null.rb +74 -0
  218. data/lib/fluent/plugin/out_relabel.rb +32 -0
  219. data/lib/fluent/plugin/out_roundrobin.rb +84 -0
  220. data/lib/fluent/plugin/out_secondary_file.rb +131 -0
  221. data/lib/fluent/plugin/out_stdout.rb +74 -0
  222. data/lib/fluent/plugin/out_stream.rb +130 -0
  223. data/lib/fluent/plugin/output.rb +1556 -0
  224. data/lib/fluent/plugin/owned_by_mixin.rb +42 -0
  225. data/lib/fluent/plugin/parser.rb +275 -0
  226. data/lib/fluent/plugin/parser_apache.rb +28 -0
  227. data/lib/fluent/plugin/parser_apache2.rb +88 -0
  228. data/lib/fluent/plugin/parser_apache_error.rb +26 -0
  229. data/lib/fluent/plugin/parser_csv.rb +114 -0
  230. data/lib/fluent/plugin/parser_json.rb +96 -0
  231. data/lib/fluent/plugin/parser_ltsv.rb +51 -0
  232. data/lib/fluent/plugin/parser_msgpack.rb +50 -0
  233. data/lib/fluent/plugin/parser_multiline.rb +152 -0
  234. data/lib/fluent/plugin/parser_nginx.rb +28 -0
  235. data/lib/fluent/plugin/parser_none.rb +36 -0
  236. data/lib/fluent/plugin/parser_regexp.rb +68 -0
  237. data/lib/fluent/plugin/parser_syslog.rb +496 -0
  238. data/lib/fluent/plugin/parser_tsv.rb +42 -0
  239. data/lib/fluent/plugin/sd_file.rb +156 -0
  240. data/lib/fluent/plugin/sd_srv.rb +135 -0
  241. data/lib/fluent/plugin/sd_static.rb +58 -0
  242. data/lib/fluent/plugin/service_discovery.rb +65 -0
  243. data/lib/fluent/plugin/socket_util.rb +22 -0
  244. data/lib/fluent/plugin/storage.rb +84 -0
  245. data/lib/fluent/plugin/storage_local.rb +162 -0
  246. data/lib/fluent/plugin/string_util.rb +22 -0
  247. data/lib/fluent/plugin.rb +206 -0
  248. data/lib/fluent/plugin_helper/cert_option.rb +191 -0
  249. data/lib/fluent/plugin_helper/child_process.rb +366 -0
  250. data/lib/fluent/plugin_helper/compat_parameters.rb +343 -0
  251. data/lib/fluent/plugin_helper/counter.rb +51 -0
  252. data/lib/fluent/plugin_helper/event_emitter.rb +100 -0
  253. data/lib/fluent/plugin_helper/event_loop.rb +170 -0
  254. data/lib/fluent/plugin_helper/extract.rb +104 -0
  255. data/lib/fluent/plugin_helper/formatter.rb +147 -0
  256. data/lib/fluent/plugin_helper/http_server/app.rb +79 -0
  257. data/lib/fluent/plugin_helper/http_server/compat/server.rb +92 -0
  258. data/lib/fluent/plugin_helper/http_server/compat/ssl_context_extractor.rb +52 -0
  259. data/lib/fluent/plugin_helper/http_server/compat/webrick_handler.rb +58 -0
  260. data/lib/fluent/plugin_helper/http_server/methods.rb +35 -0
  261. data/lib/fluent/plugin_helper/http_server/request.rb +42 -0
  262. data/lib/fluent/plugin_helper/http_server/router.rb +54 -0
  263. data/lib/fluent/plugin_helper/http_server/server.rb +93 -0
  264. data/lib/fluent/plugin_helper/http_server/ssl_context_builder.rb +41 -0
  265. data/lib/fluent/plugin_helper/http_server.rb +135 -0
  266. data/lib/fluent/plugin_helper/inject.rb +154 -0
  267. data/lib/fluent/plugin_helper/metrics.rb +129 -0
  268. data/lib/fluent/plugin_helper/parser.rb +147 -0
  269. data/lib/fluent/plugin_helper/record_accessor.rb +207 -0
  270. data/lib/fluent/plugin_helper/retry_state.rb +209 -0
  271. data/lib/fluent/plugin_helper/server.rb +801 -0
  272. data/lib/fluent/plugin_helper/service_discovery/manager.rb +146 -0
  273. data/lib/fluent/plugin_helper/service_discovery/round_robin_balancer.rb +43 -0
  274. data/lib/fluent/plugin_helper/service_discovery.rb +125 -0
  275. data/lib/fluent/plugin_helper/socket.rb +277 -0
  276. data/lib/fluent/plugin_helper/socket_option.rb +98 -0
  277. data/lib/fluent/plugin_helper/storage.rb +349 -0
  278. data/lib/fluent/plugin_helper/thread.rb +180 -0
  279. data/lib/fluent/plugin_helper/timer.rb +92 -0
  280. data/lib/fluent/plugin_helper.rb +75 -0
  281. data/lib/fluent/plugin_id.rb +93 -0
  282. data/lib/fluent/process.rb +22 -0
  283. data/lib/fluent/registry.rb +116 -0
  284. data/lib/fluent/root_agent.rb +372 -0
  285. data/lib/fluent/rpc.rb +94 -0
  286. data/lib/fluent/static_config_analysis.rb +194 -0
  287. data/lib/fluent/supervisor.rb +1054 -0
  288. data/lib/fluent/system_config.rb +187 -0
  289. data/lib/fluent/test/base.rb +78 -0
  290. data/lib/fluent/test/driver/base.rb +225 -0
  291. data/lib/fluent/test/driver/base_owned.rb +83 -0
  292. data/lib/fluent/test/driver/base_owner.rb +135 -0
  293. data/lib/fluent/test/driver/event_feeder.rb +98 -0
  294. data/lib/fluent/test/driver/filter.rb +57 -0
  295. data/lib/fluent/test/driver/formatter.rb +30 -0
  296. data/lib/fluent/test/driver/input.rb +31 -0
  297. data/lib/fluent/test/driver/multi_output.rb +53 -0
  298. data/lib/fluent/test/driver/output.rb +102 -0
  299. data/lib/fluent/test/driver/parser.rb +30 -0
  300. data/lib/fluent/test/driver/storage.rb +30 -0
  301. data/lib/fluent/test/driver/test_event_router.rb +45 -0
  302. data/lib/fluent/test/filter_test.rb +77 -0
  303. data/lib/fluent/test/formatter_test.rb +65 -0
  304. data/lib/fluent/test/helpers.rb +134 -0
  305. data/lib/fluent/test/input_test.rb +174 -0
  306. data/lib/fluent/test/log.rb +79 -0
  307. data/lib/fluent/test/output_test.rb +156 -0
  308. data/lib/fluent/test/parser_test.rb +70 -0
  309. data/lib/fluent/test/startup_shutdown.rb +46 -0
  310. data/lib/fluent/test.rb +58 -0
  311. data/lib/fluent/time.rb +512 -0
  312. data/lib/fluent/timezone.rb +171 -0
  313. data/lib/fluent/tls.rb +81 -0
  314. data/lib/fluent/unique_id.rb +39 -0
  315. data/lib/fluent/variable_store.rb +40 -0
  316. data/lib/fluent/version.rb +21 -0
  317. data/lib/fluent/winsvc.rb +103 -0
  318. data/templates/new_gem/Gemfile +3 -0
  319. data/templates/new_gem/README.md.erb +43 -0
  320. data/templates/new_gem/Rakefile +13 -0
  321. data/templates/new_gem/fluent-plugin.gemspec.erb +27 -0
  322. data/templates/new_gem/lib/fluent/plugin/filter.rb.erb +14 -0
  323. data/templates/new_gem/lib/fluent/plugin/formatter.rb.erb +14 -0
  324. data/templates/new_gem/lib/fluent/plugin/input.rb.erb +11 -0
  325. data/templates/new_gem/lib/fluent/plugin/output.rb.erb +11 -0
  326. data/templates/new_gem/lib/fluent/plugin/parser.rb.erb +15 -0
  327. data/templates/new_gem/lib/fluent/plugin/storage.rb.erb +40 -0
  328. data/templates/new_gem/test/helper.rb.erb +8 -0
  329. data/templates/new_gem/test/plugin/test_filter.rb.erb +18 -0
  330. data/templates/new_gem/test/plugin/test_formatter.rb.erb +18 -0
  331. data/templates/new_gem/test/plugin/test_input.rb.erb +18 -0
  332. data/templates/new_gem/test/plugin/test_output.rb.erb +18 -0
  333. data/templates/new_gem/test/plugin/test_parser.rb.erb +18 -0
  334. data/templates/new_gem/test/plugin/test_storage.rb.erb +18 -0
  335. data/templates/plugin_config_formatter/param.md-compact.erb +25 -0
  336. data/templates/plugin_config_formatter/param.md-table.erb +10 -0
  337. data/templates/plugin_config_formatter/param.md.erb +34 -0
  338. data/templates/plugin_config_formatter/section.md.erb +12 -0
  339. data/test/command/test_binlog_reader.rb +362 -0
  340. data/test/command/test_ca_generate.rb +70 -0
  341. data/test/command/test_cap_ctl.rb +100 -0
  342. data/test/command/test_cat.rb +128 -0
  343. data/test/command/test_ctl.rb +57 -0
  344. data/test/command/test_fluentd.rb +1106 -0
  345. data/test/command/test_plugin_config_formatter.rb +398 -0
  346. data/test/command/test_plugin_generator.rb +109 -0
  347. data/test/compat/test_calls_super.rb +166 -0
  348. data/test/compat/test_parser.rb +92 -0
  349. data/test/config/assertions.rb +42 -0
  350. data/test/config/test_config_parser.rb +551 -0
  351. data/test/config/test_configurable.rb +1784 -0
  352. data/test/config/test_configure_proxy.rb +604 -0
  353. data/test/config/test_dsl.rb +415 -0
  354. data/test/config/test_element.rb +518 -0
  355. data/test/config/test_literal_parser.rb +309 -0
  356. data/test/config/test_plugin_configuration.rb +56 -0
  357. data/test/config/test_section.rb +191 -0
  358. data/test/config/test_system_config.rb +199 -0
  359. data/test/config/test_types.rb +408 -0
  360. data/test/counter/test_client.rb +563 -0
  361. data/test/counter/test_error.rb +44 -0
  362. data/test/counter/test_mutex_hash.rb +179 -0
  363. data/test/counter/test_server.rb +589 -0
  364. data/test/counter/test_store.rb +258 -0
  365. data/test/counter/test_validator.rb +137 -0
  366. data/test/helper.rb +155 -0
  367. data/test/helpers/fuzzy_assert.rb +89 -0
  368. data/test/helpers/process_extenstion.rb +33 -0
  369. data/test/plugin/data/2010/01/20100102-030405.log +0 -0
  370. data/test/plugin/data/2010/01/20100102-030406.log +0 -0
  371. data/test/plugin/data/2010/01/20100102.log +0 -0
  372. data/test/plugin/data/log/bar +0 -0
  373. data/test/plugin/data/log/foo/bar.log +0 -0
  374. data/test/plugin/data/log/foo/bar2 +0 -0
  375. data/test/plugin/data/log/test.log +0 -0
  376. data/test/plugin/data/sd_file/config +11 -0
  377. data/test/plugin/data/sd_file/config.json +17 -0
  378. data/test/plugin/data/sd_file/config.yaml +11 -0
  379. data/test/plugin/data/sd_file/config.yml +11 -0
  380. data/test/plugin/data/sd_file/invalid_config.yml +7 -0
  381. data/test/plugin/in_tail/test_fifo.rb +121 -0
  382. data/test/plugin/in_tail/test_io_handler.rb +140 -0
  383. data/test/plugin/in_tail/test_position_file.rb +379 -0
  384. data/test/plugin/out_forward/test_ack_handler.rb +101 -0
  385. data/test/plugin/out_forward/test_connection_manager.rb +145 -0
  386. data/test/plugin/out_forward/test_handshake_protocol.rb +112 -0
  387. data/test/plugin/out_forward/test_load_balancer.rb +106 -0
  388. data/test/plugin/out_forward/test_socket_cache.rb +149 -0
  389. data/test/plugin/test_bare_output.rb +131 -0
  390. data/test/plugin/test_base.rb +115 -0
  391. data/test/plugin/test_buf_file.rb +1275 -0
  392. data/test/plugin/test_buf_file_single.rb +833 -0
  393. data/test/plugin/test_buf_memory.rb +42 -0
  394. data/test/plugin/test_buffer.rb +1383 -0
  395. data/test/plugin/test_buffer_chunk.rb +198 -0
  396. data/test/plugin/test_buffer_file_chunk.rb +871 -0
  397. data/test/plugin/test_buffer_file_single_chunk.rb +611 -0
  398. data/test/plugin/test_buffer_memory_chunk.rb +339 -0
  399. data/test/plugin/test_compressable.rb +87 -0
  400. data/test/plugin/test_file_util.rb +96 -0
  401. data/test/plugin/test_file_wrapper.rb +126 -0
  402. data/test/plugin/test_filter.rb +368 -0
  403. data/test/plugin/test_filter_grep.rb +697 -0
  404. data/test/plugin/test_filter_parser.rb +731 -0
  405. data/test/plugin/test_filter_record_transformer.rb +577 -0
  406. data/test/plugin/test_filter_stdout.rb +207 -0
  407. data/test/plugin/test_formatter_csv.rb +136 -0
  408. data/test/plugin/test_formatter_hash.rb +38 -0
  409. data/test/plugin/test_formatter_json.rb +61 -0
  410. data/test/plugin/test_formatter_ltsv.rb +70 -0
  411. data/test/plugin/test_formatter_msgpack.rb +28 -0
  412. data/test/plugin/test_formatter_out_file.rb +116 -0
  413. data/test/plugin/test_formatter_single_value.rb +44 -0
  414. data/test/plugin/test_formatter_tsv.rb +76 -0
  415. data/test/plugin/test_in_debug_agent.rb +49 -0
  416. data/test/plugin/test_in_exec.rb +261 -0
  417. data/test/plugin/test_in_forward.rb +1180 -0
  418. data/test/plugin/test_in_gc_stat.rb +62 -0
  419. data/test/plugin/test_in_http.rb +1080 -0
  420. data/test/plugin/test_in_monitor_agent.rb +923 -0
  421. data/test/plugin/test_in_object_space.rb +60 -0
  422. data/test/plugin/test_in_sample.rb +190 -0
  423. data/test/plugin/test_in_syslog.rb +505 -0
  424. data/test/plugin/test_in_tail.rb +2363 -0
  425. data/test/plugin/test_in_tcp.rb +243 -0
  426. data/test/plugin/test_in_udp.rb +268 -0
  427. data/test/plugin/test_in_unix.rb +181 -0
  428. data/test/plugin/test_input.rb +137 -0
  429. data/test/plugin/test_metadata.rb +89 -0
  430. data/test/plugin/test_metrics.rb +294 -0
  431. data/test/plugin/test_metrics_local.rb +96 -0
  432. data/test/plugin/test_multi_output.rb +204 -0
  433. data/test/plugin/test_out_copy.rb +308 -0
  434. data/test/plugin/test_out_exec.rb +312 -0
  435. data/test/plugin/test_out_exec_filter.rb +606 -0
  436. data/test/plugin/test_out_file.rb +1037 -0
  437. data/test/plugin/test_out_forward.rb +1348 -0
  438. data/test/plugin/test_out_http.rb +428 -0
  439. data/test/plugin/test_out_null.rb +105 -0
  440. data/test/plugin/test_out_relabel.rb +28 -0
  441. data/test/plugin/test_out_roundrobin.rb +146 -0
  442. data/test/plugin/test_out_secondary_file.rb +458 -0
  443. data/test/plugin/test_out_stdout.rb +205 -0
  444. data/test/plugin/test_out_stream.rb +103 -0
  445. data/test/plugin/test_output.rb +1065 -0
  446. data/test/plugin/test_output_as_buffered.rb +2024 -0
  447. data/test/plugin/test_output_as_buffered_backup.rb +363 -0
  448. data/test/plugin/test_output_as_buffered_compress.rb +165 -0
  449. data/test/plugin/test_output_as_buffered_overflow.rb +250 -0
  450. data/test/plugin/test_output_as_buffered_retries.rb +919 -0
  451. data/test/plugin/test_output_as_buffered_secondary.rb +882 -0
  452. data/test/plugin/test_output_as_standard.rb +374 -0
  453. data/test/plugin/test_owned_by.rb +35 -0
  454. data/test/plugin/test_parser.rb +399 -0
  455. data/test/plugin/test_parser_apache.rb +42 -0
  456. data/test/plugin/test_parser_apache2.rb +47 -0
  457. data/test/plugin/test_parser_apache_error.rb +45 -0
  458. data/test/plugin/test_parser_csv.rb +200 -0
  459. data/test/plugin/test_parser_json.rb +138 -0
  460. data/test/plugin/test_parser_labeled_tsv.rb +160 -0
  461. data/test/plugin/test_parser_multiline.rb +111 -0
  462. data/test/plugin/test_parser_nginx.rb +88 -0
  463. data/test/plugin/test_parser_none.rb +52 -0
  464. data/test/plugin/test_parser_regexp.rb +289 -0
  465. data/test/plugin/test_parser_syslog.rb +650 -0
  466. data/test/plugin/test_parser_tsv.rb +122 -0
  467. data/test/plugin/test_sd_file.rb +228 -0
  468. data/test/plugin/test_sd_srv.rb +230 -0
  469. data/test/plugin/test_storage.rb +167 -0
  470. data/test/plugin/test_storage_local.rb +335 -0
  471. data/test/plugin/test_string_util.rb +26 -0
  472. data/test/plugin_helper/data/cert/cert-key.pem +27 -0
  473. data/test/plugin_helper/data/cert/cert-with-CRLF.pem +19 -0
  474. data/test/plugin_helper/data/cert/cert-with-no-newline.pem +19 -0
  475. data/test/plugin_helper/data/cert/cert.pem +19 -0
  476. data/test/plugin_helper/data/cert/cert_chains/ca-cert-key.pem +27 -0
  477. data/test/plugin_helper/data/cert/cert_chains/ca-cert.pem +20 -0
  478. data/test/plugin_helper/data/cert/cert_chains/cert-key.pem +27 -0
  479. data/test/plugin_helper/data/cert/cert_chains/cert.pem +40 -0
  480. data/test/plugin_helper/data/cert/empty.pem +0 -0
  481. data/test/plugin_helper/data/cert/generate_cert.rb +125 -0
  482. data/test/plugin_helper/data/cert/with_ca/ca-cert-key-pass.pem +30 -0
  483. data/test/plugin_helper/data/cert/with_ca/ca-cert-key.pem +27 -0
  484. data/test/plugin_helper/data/cert/with_ca/ca-cert-pass.pem +20 -0
  485. data/test/plugin_helper/data/cert/with_ca/ca-cert.pem +20 -0
  486. data/test/plugin_helper/data/cert/with_ca/cert-key-pass.pem +30 -0
  487. data/test/plugin_helper/data/cert/with_ca/cert-key.pem +27 -0
  488. data/test/plugin_helper/data/cert/with_ca/cert-pass.pem +21 -0
  489. data/test/plugin_helper/data/cert/with_ca/cert.pem +21 -0
  490. data/test/plugin_helper/data/cert/without_ca/cert-key-pass.pem +30 -0
  491. data/test/plugin_helper/data/cert/without_ca/cert-key.pem +27 -0
  492. data/test/plugin_helper/data/cert/without_ca/cert-pass.pem +20 -0
  493. data/test/plugin_helper/data/cert/without_ca/cert.pem +20 -0
  494. data/test/plugin_helper/http_server/test_app.rb +65 -0
  495. data/test/plugin_helper/http_server/test_route.rb +32 -0
  496. data/test/plugin_helper/service_discovery/test_manager.rb +93 -0
  497. data/test/plugin_helper/service_discovery/test_round_robin_balancer.rb +21 -0
  498. data/test/plugin_helper/test_cert_option.rb +25 -0
  499. data/test/plugin_helper/test_child_process.rb +840 -0
  500. data/test/plugin_helper/test_compat_parameters.rb +358 -0
  501. data/test/plugin_helper/test_event_emitter.rb +80 -0
  502. data/test/plugin_helper/test_event_loop.rb +52 -0
  503. data/test/plugin_helper/test_extract.rb +194 -0
  504. data/test/plugin_helper/test_formatter.rb +255 -0
  505. data/test/plugin_helper/test_http_server_helper.rb +372 -0
  506. data/test/plugin_helper/test_inject.rb +561 -0
  507. data/test/plugin_helper/test_metrics.rb +137 -0
  508. data/test/plugin_helper/test_parser.rb +264 -0
  509. data/test/plugin_helper/test_record_accessor.rb +238 -0
  510. data/test/plugin_helper/test_retry_state.rb +442 -0
  511. data/test/plugin_helper/test_server.rb +1823 -0
  512. data/test/plugin_helper/test_service_discovery.rb +165 -0
  513. data/test/plugin_helper/test_socket.rb +146 -0
  514. data/test/plugin_helper/test_storage.rb +542 -0
  515. data/test/plugin_helper/test_thread.rb +164 -0
  516. data/test/plugin_helper/test_timer.rb +130 -0
  517. data/test/scripts/exec_script.rb +32 -0
  518. data/test/scripts/fluent/plugin/formatter1/formatter_test1.rb +7 -0
  519. data/test/scripts/fluent/plugin/formatter2/formatter_test2.rb +7 -0
  520. data/test/scripts/fluent/plugin/formatter_known.rb +8 -0
  521. data/test/scripts/fluent/plugin/out_test.rb +81 -0
  522. data/test/scripts/fluent/plugin/out_test2.rb +80 -0
  523. data/test/scripts/fluent/plugin/parser_known.rb +4 -0
  524. data/test/test_capability.rb +74 -0
  525. data/test/test_clock.rb +164 -0
  526. data/test/test_config.rb +202 -0
  527. data/test/test_configdsl.rb +148 -0
  528. data/test/test_daemonizer.rb +91 -0
  529. data/test/test_engine.rb +203 -0
  530. data/test/test_event.rb +531 -0
  531. data/test/test_event_router.rb +331 -0
  532. data/test/test_event_time.rb +199 -0
  533. data/test/test_filter.rb +121 -0
  534. data/test/test_fluent_log_event_router.rb +99 -0
  535. data/test/test_formatter.rb +366 -0
  536. data/test/test_input.rb +31 -0
  537. data/test/test_log.rb +994 -0
  538. data/test/test_logger_initializer.rb +46 -0
  539. data/test/test_match.rb +148 -0
  540. data/test/test_mixin.rb +351 -0
  541. data/test/test_msgpack_factory.rb +18 -0
  542. data/test/test_oj_options.rb +55 -0
  543. data/test/test_output.rb +278 -0
  544. data/test/test_plugin.rb +251 -0
  545. data/test/test_plugin_classes.rb +370 -0
  546. data/test/test_plugin_helper.rb +81 -0
  547. data/test/test_plugin_id.rb +119 -0
  548. data/test/test_process.rb +14 -0
  549. data/test/test_root_agent.rb +951 -0
  550. data/test/test_static_config_analysis.rb +177 -0
  551. data/test/test_supervisor.rb +601 -0
  552. data/test/test_test_drivers.rb +136 -0
  553. data/test/test_time_formatter.rb +301 -0
  554. data/test/test_time_parser.rb +362 -0
  555. data/test/test_tls.rb +65 -0
  556. data/test/test_unique_id.rb +47 -0
  557. data/test/test_variable_store.rb +65 -0
  558. metadata +1261 -0
@@ -0,0 +1,1383 @@
1
+ require_relative '../helper'
2
+ require 'fluent/plugin/buffer'
3
+ require 'fluent/plugin/buffer/memory_chunk'
4
+ require 'fluent/plugin/compressable'
5
+ require 'fluent/plugin/buffer/chunk'
6
+ require 'fluent/event'
7
+ require 'flexmock/test_unit'
8
+
9
+ require 'fluent/log'
10
+ require 'fluent/plugin_id'
11
+
12
+ require 'time'
13
+
14
+ module FluentPluginBufferTest
15
+ class DummyOutputPlugin < Fluent::Plugin::Base
16
+ include Fluent::PluginId
17
+ include Fluent::PluginLoggerMixin
18
+ end
19
+ class DummyMemoryChunkError < StandardError; end
20
+ class DummyMemoryChunk < Fluent::Plugin::Buffer::MemoryChunk
21
+ attr_reader :append_count, :rollbacked, :closed, :purged, :chunk
22
+ attr_accessor :failing
23
+ def initialize(metadata, compress: :text)
24
+ super
25
+ @append_count = 0
26
+ @rollbacked = false
27
+ @closed = false
28
+ @purged = false
29
+ @failing = false
30
+ end
31
+ def concat(data, size)
32
+ @append_count += 1
33
+ raise DummyMemoryChunkError if @failing
34
+ super
35
+ end
36
+ def rollback
37
+ super
38
+ @rollbacked = true
39
+ end
40
+ def close
41
+ super
42
+ @closed = true
43
+ end
44
+ def purge
45
+ super
46
+ @purged = true
47
+ end
48
+ end
49
+ class DummyPlugin < Fluent::Plugin::Buffer
50
+ def create_metadata(timekey=nil, tag=nil, variables=nil)
51
+ Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
52
+ end
53
+ def create_chunk(metadata, data)
54
+ c = FluentPluginBufferTest::DummyMemoryChunk.new(metadata)
55
+ c.append(data)
56
+ c.commit
57
+ c
58
+ end
59
+ def create_chunk_es(metadata, es)
60
+ c = FluentPluginBufferTest::DummyMemoryChunk.new(metadata)
61
+ c.concat(es.to_msgpack_stream, es.size)
62
+ c.commit
63
+ c
64
+ end
65
+ def resume
66
+ dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
67
+ dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
68
+ dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
69
+ dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
70
+ staged = {
71
+ dm2 => create_chunk(dm2, ["b" * 100]).staged!,
72
+ dm3 => create_chunk(dm3, ["c" * 100]).staged!,
73
+ }
74
+ queued = [
75
+ create_chunk(dm0, ["0" * 100]).enqueued!,
76
+ create_chunk(dm1, ["a" * 100]).enqueued!,
77
+ create_chunk(dm1, ["a" * 3]).enqueued!,
78
+ ]
79
+ return staged, queued
80
+ end
81
+ def generate_chunk(metadata)
82
+ DummyMemoryChunk.new(metadata, compress: @compress)
83
+ end
84
+ end
85
+ end
86
+
87
+ class BufferTest < Test::Unit::TestCase
88
+ def create_buffer(hash)
89
+ buffer_conf = config_element('buffer', '', hash, [])
90
+ owner = FluentPluginBufferTest::DummyOutputPlugin.new
91
+ owner.configure(config_element('ROOT', '', {}, [ buffer_conf ]))
92
+ p = FluentPluginBufferTest::DummyPlugin.new
93
+ p.owner = owner
94
+ p.configure(buffer_conf)
95
+ p
96
+ end
97
+
98
+ def create_metadata(timekey=nil, tag=nil, variables=nil)
99
+ Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
100
+ end
101
+
102
+ def create_chunk(metadata, data)
103
+ c = FluentPluginBufferTest::DummyMemoryChunk.new(metadata)
104
+ c.append(data)
105
+ c.commit
106
+ c
107
+ end
108
+
109
+ def create_chunk_es(metadata, es)
110
+ c = FluentPluginBufferTest::DummyMemoryChunk.new(metadata)
111
+ c.concat(es.to_msgpack_stream, es.size)
112
+ c.commit
113
+ c
114
+ end
115
+
116
+ setup do
117
+ Fluent::Test.setup
118
+ end
119
+
120
+ sub_test_case 'using base buffer class' do
121
+ setup do
122
+ buffer_conf = config_element('buffer', '', {}, [])
123
+ owner = FluentPluginBufferTest::DummyOutputPlugin.new
124
+ owner.configure(config_element('ROOT', '', {}, [ buffer_conf ]))
125
+ p = Fluent::Plugin::Buffer.new
126
+ p.owner = owner
127
+ p.configure(buffer_conf)
128
+ @p = p
129
+ end
130
+
131
+ test 'default persistency is false' do
132
+ assert !@p.persistent?
133
+ end
134
+
135
+ test 'chunk bytes limit is 8MB, and total bytes limit is 512MB' do
136
+ assert_equal 8*1024*1024, @p.chunk_limit_size
137
+ assert_equal 512*1024*1024, @p.total_limit_size
138
+ end
139
+
140
+ test 'chunk records limit is ignored in default' do
141
+ assert_nil @p.chunk_limit_records
142
+ end
143
+
144
+ test '#storable? checks total size of staged and enqueued(includes dequeued chunks) against total_limit_size' do
145
+ assert_equal 512*1024*1024, @p.total_limit_size
146
+ assert_equal 0, @p.stage_size
147
+ assert_equal 0, @p.queue_size
148
+ assert @p.storable?
149
+
150
+ @p.stage_size = 256 * 1024 * 1024
151
+ @p.queue_size = 256 * 1024 * 1024 - 1
152
+ assert @p.storable?
153
+
154
+ @p.queue_size = 256 * 1024 * 1024
155
+ assert !@p.storable?
156
+ end
157
+
158
+ test '#resume must be implemented by subclass' do
159
+ assert_raise NotImplementedError do
160
+ @p.resume
161
+ end
162
+ end
163
+
164
+ test '#generate_chunk must be implemented by subclass' do
165
+ assert_raise NotImplementedError do
166
+ @p.generate_chunk(Object.new)
167
+ end
168
+ end
169
+ end
170
+
171
+ sub_test_case 'with default configuration and dummy implementation' do
172
+ setup do
173
+ @p = create_buffer({'queued_chunks_limit_size' => 100})
174
+ @dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
175
+ @dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
176
+ @dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
177
+ @dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
178
+ @p.start
179
+ end
180
+
181
+ test '#start resumes buffer states and update queued numbers per metadata' do
182
+ plugin = create_buffer({})
183
+
184
+ assert_equal({}, plugin.stage)
185
+ assert_equal([], plugin.queue)
186
+ assert_equal({}, plugin.dequeued)
187
+ assert_equal({}, plugin.queued_num)
188
+
189
+ assert_equal 0, plugin.stage_size
190
+ assert_equal 0, plugin.queue_size
191
+ assert_equal [], plugin.timekeys
192
+
193
+ # @p is started plugin
194
+
195
+ assert_equal [@dm2,@dm3], @p.stage.keys
196
+ assert_equal "b" * 100, @p.stage[@dm2].read
197
+ assert_equal "c" * 100, @p.stage[@dm3].read
198
+
199
+ assert_equal 200, @p.stage_size
200
+
201
+ assert_equal 3, @p.queue.size
202
+ assert_equal "0" * 100, @p.queue[0].read
203
+ assert_equal "a" * 100, @p.queue[1].read
204
+ assert_equal "a" * 3, @p.queue[2].read
205
+
206
+ assert_equal 203, @p.queue_size
207
+
208
+ # staged, queued
209
+ assert_equal 1, @p.queued_num[@dm0]
210
+ assert_equal 2, @p.queued_num[@dm1]
211
+ end
212
+
213
+ test '#close closes all chunks in dequeued, enqueued and staged' do
214
+ dmx = create_metadata(Time.parse('2016-04-11 15:50:00 +0000').to_i, nil, nil)
215
+ cx = create_chunk(dmx, ["x" * 1024])
216
+ @p.dequeued[cx.unique_id] = cx
217
+
218
+ staged_chunks = @p.stage.values.dup
219
+ queued_chunks = @p.queue.dup
220
+
221
+ @p.close
222
+
223
+ assert cx.closed
224
+ assert{ staged_chunks.all?{|c| c.closed } }
225
+ assert{ queued_chunks.all?{|c| c.closed } }
226
+ end
227
+
228
+ test '#terminate initializes all internal states' do
229
+ dmx = create_metadata(Time.parse('2016-04-11 15:50:00 +0000').to_i, nil, nil)
230
+ cx = create_chunk(dmx, ["x" * 1024])
231
+ @p.dequeued[cx.unique_id] = cx
232
+
233
+ @p.close
234
+
235
+ @p.terminate
236
+
237
+ assert_nil @p.stage
238
+ assert_nil @p.queue
239
+ assert_nil @p.dequeued
240
+ assert_nil @p.queued_num
241
+ assert_nil @p.stage_length_metrics
242
+ assert_nil @p.stage_size_metrics
243
+ assert_nil @p.queue_length_metrics
244
+ assert_nil @p.queue_size_metrics
245
+ assert_nil @p.available_buffer_space_ratios_metrics
246
+ assert_nil @p.total_queued_size_metrics
247
+ assert_nil @p.newest_timekey_metrics
248
+ assert_nil @p.oldest_timekey_metrics
249
+ assert_equal [], @p.timekeys
250
+ end
251
+
252
+ test '#queued_records returns total number of size in all chunks in queue' do
253
+ assert_equal 3, @p.queue.size
254
+
255
+ r0 = @p.queue[0].size
256
+ assert_equal 1, r0
257
+ r1 = @p.queue[1].size
258
+ assert_equal 1, r1
259
+ r2 = @p.queue[2].size
260
+ assert_equal 1, r2
261
+
262
+ assert_equal (r0+r1+r2), @p.queued_records
263
+ end
264
+
265
+ test '#queued? returns queue has any chunks or not without arguments' do
266
+ assert @p.queued?
267
+
268
+ @p.queue.reject!{|_c| true }
269
+ assert !@p.queued?
270
+ end
271
+
272
+ test '#queued? returns queue has chunks for specified metadata with an argument' do
273
+ assert @p.queued?(@dm0)
274
+ assert @p.queued?(@dm1)
275
+ assert !@p.queued?(@dm2)
276
+ end
277
+
278
+ test '#enqueue_chunk enqueues a chunk on stage with specified metadata' do
279
+ assert_equal 2, @p.stage.size
280
+ assert_equal [@dm2,@dm3], @p.stage.keys
281
+ assert_equal 3, @p.queue.size
282
+ assert_nil @p.queued_num[@dm2]
283
+
284
+ assert_equal 200, @p.stage_size
285
+ assert_equal 203, @p.queue_size
286
+
287
+ @p.enqueue_chunk(@dm2)
288
+
289
+ assert_equal [@dm3], @p.stage.keys
290
+ assert_equal @dm2, @p.queue.last.metadata
291
+ assert_equal 1, @p.queued_num[@dm2]
292
+ assert_equal 100, @p.stage_size
293
+ assert_equal 303, @p.queue_size
294
+ end
295
+
296
+ test '#enqueue_chunk ignores empty chunks' do
297
+ assert_equal 3, @p.queue.size
298
+
299
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
300
+ c = create_chunk(m, [''])
301
+ @p.stage[m] = c
302
+ assert @p.stage[m].empty?
303
+ assert !c.closed
304
+
305
+ @p.enqueue_chunk(m)
306
+
307
+ assert_nil @p.stage[m]
308
+ assert_equal 3, @p.queue.size
309
+ assert_nil @p.queued_num[m]
310
+ assert c.closed
311
+ end
312
+
313
+ test '#enqueue_chunk calls #enqueued! if chunk responds to it' do
314
+ assert_equal 3, @p.queue.size
315
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
316
+ c = create_chunk(m, ['c' * 256])
317
+ callback_called = false
318
+ (class << c; self; end).module_eval do
319
+ define_method(:enqueued!){ callback_called = true }
320
+ end
321
+
322
+ @p.stage[m] = c
323
+ @p.enqueue_chunk(m)
324
+
325
+ assert_equal c, @p.queue.last
326
+ assert callback_called
327
+ end
328
+
329
+ test '#enqueue_all enqueues chunks on stage which given block returns true with' do
330
+ m1 = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
331
+ c1 = create_chunk(m1, ['c' * 256])
332
+ @p.stage[m1] = c1
333
+ m2 = @p.metadata(timekey: Time.parse('2016-04-11 16:50:00 +0000').to_i)
334
+ c2 = create_chunk(m2, ['c' * 256])
335
+ @p.stage[m2] = c2
336
+
337
+ assert_equal [@dm2,@dm3,m1,m2], @p.stage.keys
338
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
339
+
340
+ @p.enqueue_all{ |m, c| m.timekey < Time.parse('2016-04-11 16:41:00 +0000').to_i }
341
+
342
+ assert_equal [m2], @p.stage.keys
343
+ assert_equal [@dm0,@dm1,@dm1,@dm2,@dm3,m1], @p.queue.map(&:metadata)
344
+ end
345
+
346
+ test '#enqueue_all enqueues all chunks on stage without block' do
347
+ m1 = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
348
+ c1 = create_chunk(m1, ['c' * 256])
349
+ @p.stage[m1] = c1
350
+ m2 = @p.metadata(timekey: Time.parse('2016-04-11 16:50:00 +0000').to_i)
351
+ c2 = create_chunk(m2, ['c' * 256])
352
+ @p.stage[m2] = c2
353
+
354
+ assert_equal [@dm2,@dm3,m1,m2], @p.stage.keys
355
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
356
+
357
+ @p.enqueue_all
358
+
359
+ assert_equal [], @p.stage.keys
360
+ assert_equal [@dm0,@dm1,@dm1,@dm2,@dm3,m1,m2], @p.queue.map(&:metadata)
361
+ end
362
+
363
+ test '#dequeue_chunk dequeues a chunk from queue if a chunk exists' do
364
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
365
+ assert_equal({}, @p.dequeued)
366
+
367
+ m1 = @p.dequeue_chunk
368
+ assert_equal @dm0, m1.metadata
369
+ assert_equal @dm0, @p.dequeued[m1.unique_id].metadata
370
+
371
+ m2 = @p.dequeue_chunk
372
+ assert_equal @dm1, m2.metadata
373
+ assert_equal @dm1, @p.dequeued[m2.unique_id].metadata
374
+
375
+ m3 = @p.dequeue_chunk
376
+ assert_equal @dm1, m3.metadata
377
+ assert_equal @dm1, @p.dequeued[m3.unique_id].metadata
378
+
379
+ m4 = @p.dequeue_chunk
380
+ assert_nil m4
381
+ end
382
+
383
+ test '#takeback_chunk resumes a chunk from dequeued to queued at the head of queue, and returns true' do
384
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
385
+ assert_equal({}, @p.dequeued)
386
+
387
+ m1 = @p.dequeue_chunk
388
+ assert_equal @dm0, m1.metadata
389
+ assert_equal @dm0, @p.dequeued[m1.unique_id].metadata
390
+ assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
391
+ assert_equal({m1.unique_id => m1}, @p.dequeued)
392
+
393
+ assert @p.takeback_chunk(m1.unique_id)
394
+
395
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
396
+ assert_equal({}, @p.dequeued)
397
+ end
398
+
399
+ test '#purge_chunk removes a chunk specified by argument id from dequeued chunks' do
400
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
401
+ assert_equal({}, @p.dequeued)
402
+
403
+ m0 = @p.dequeue_chunk
404
+ m1 = @p.dequeue_chunk
405
+
406
+ assert @p.takeback_chunk(m0.unique_id)
407
+
408
+ assert_equal [@dm0,@dm1], @p.queue.map(&:metadata)
409
+ assert_equal({m1.unique_id => m1}, @p.dequeued)
410
+
411
+ assert !m1.purged
412
+
413
+ @p.purge_chunk(m1.unique_id)
414
+ assert m1.purged
415
+
416
+ assert_equal [@dm0,@dm1], @p.queue.map(&:metadata)
417
+ assert_equal({}, @p.dequeued)
418
+ end
419
+
420
+ test '#purge_chunk removes an argument metadata if no chunks exist on stage or in queue' do
421
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
422
+ assert_equal({}, @p.dequeued)
423
+
424
+ m0 = @p.dequeue_chunk
425
+
426
+ assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
427
+ assert_equal({m0.unique_id => m0}, @p.dequeued)
428
+
429
+ assert !m0.purged
430
+
431
+ @p.purge_chunk(m0.unique_id)
432
+ assert m0.purged
433
+
434
+ assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
435
+ assert_equal({}, @p.dequeued)
436
+ end
437
+
438
+ test '#takeback_chunk returns false if specified chunk_id is already purged' do
439
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
440
+ assert_equal({}, @p.dequeued)
441
+
442
+ m0 = @p.dequeue_chunk
443
+
444
+ assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
445
+ assert_equal({m0.unique_id => m0}, @p.dequeued)
446
+
447
+ assert !m0.purged
448
+
449
+ @p.purge_chunk(m0.unique_id)
450
+ assert m0.purged
451
+
452
+ assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
453
+ assert_equal({}, @p.dequeued)
454
+
455
+ assert !@p.takeback_chunk(m0.unique_id)
456
+
457
+ assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
458
+ assert_equal({}, @p.dequeued)
459
+ end
460
+
461
+ test '#clear_queue! removes all chunks in queue, but leaves staged chunks' do
462
+ qchunks = @p.queue.dup
463
+
464
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
465
+ assert_equal 2, @p.stage.size
466
+ assert_equal({}, @p.dequeued)
467
+
468
+ @p.clear_queue!
469
+
470
+ assert_equal [], @p.queue
471
+ assert_equal 0, @p.queue_size
472
+ assert_equal 2, @p.stage.size
473
+ assert_equal({}, @p.dequeued)
474
+
475
+ assert{ qchunks.all?{ |c| c.purged } }
476
+ end
477
+
478
+ test '#write returns immediately if argument data is empty array' do
479
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
480
+ assert_equal [@dm2,@dm3], @p.stage.keys
481
+
482
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
483
+
484
+ @p.write({m => []})
485
+
486
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
487
+ assert_equal [@dm2,@dm3], @p.stage.keys
488
+ end
489
+
490
+ test '#write returns immediately if argument data is empty event stream' do
491
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
492
+ assert_equal [@dm2,@dm3], @p.stage.keys
493
+
494
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
495
+
496
+ @p.write({m => Fluent::ArrayEventStream.new([])})
497
+
498
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
499
+ assert_equal [@dm2,@dm3], @p.stage.keys
500
+ end
501
+
502
+ test '#write raises BufferOverflowError if buffer is not storable' do
503
+ @p.stage_size = 256 * 1024 * 1024
504
+ @p.queue_size = 256 * 1024 * 1024
505
+
506
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
507
+
508
+ assert_raise Fluent::Plugin::Buffer::BufferOverflowError do
509
+ @p.write({m => ["x" * 256]})
510
+ end
511
+ end
512
+
513
+ test '#write stores data into an existing chunk with metadata specified' do
514
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
515
+ assert_equal [@dm2,@dm3], @p.stage.keys
516
+
517
+ dm3data = @p.stage[@dm3].read.dup
518
+ prev_stage_size = @p.stage_size
519
+
520
+ assert_equal 1, @p.stage[@dm3].append_count
521
+
522
+ @p.write({@dm3 => ["x" * 256, "y" * 256, "z" * 256]})
523
+
524
+ assert_equal 2, @p.stage[@dm3].append_count
525
+ assert_equal (dm3data + ("x" * 256) + ("y" * 256) + ("z" * 256)), @p.stage[@dm3].read
526
+ assert_equal (prev_stage_size + 768), @p.stage_size
527
+
528
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
529
+ assert_equal [@dm2,@dm3], @p.stage.keys
530
+ end
531
+
532
+ test '#write creates new chunk and store data into it if there are no chunks for specified metadata' do
533
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
534
+ assert_equal [@dm2,@dm3], @p.stage.keys
535
+
536
+ timekey = Time.parse('2016-04-11 16:40:00 +0000').to_i
537
+ assert !@p.timekeys.include?(timekey)
538
+
539
+ prev_stage_size = @p.stage_size
540
+
541
+ m = @p.metadata(timekey: timekey)
542
+
543
+ @p.write({m => ["x" * 256, "y" * 256, "z" * 256]})
544
+
545
+ assert_equal 1, @p.stage[m].append_count
546
+ assert_equal ("x" * 256 + "y" * 256 + "z" * 256), @p.stage[m].read
547
+ assert_equal (prev_stage_size + 768), @p.stage_size
548
+
549
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
550
+ assert_equal [@dm2,@dm3,m], @p.stage.keys
551
+
552
+ @p.update_timekeys
553
+
554
+ assert @p.timekeys.include?(timekey)
555
+ end
556
+
557
+ test '#write tries to enqueue and store data into a new chunk if existing chunk is full' do
558
+ assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
559
+ assert_equal 0.95, @p.chunk_full_threshold
560
+
561
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
562
+ assert_equal [@dm2,@dm3], @p.stage.keys
563
+
564
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
565
+
566
+ row = "x" * 1024 * 1024
567
+ small_row = "x" * 1024 * 512
568
+ @p.write({m => [row] * 7 + [small_row]})
569
+
570
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
571
+ assert_equal [@dm2,@dm3,m], @p.stage.keys
572
+ assert_equal 1, @p.stage[m].append_count
573
+
574
+ @p.write({m => [row]})
575
+
576
+ assert_equal [@dm0,@dm1,@dm1,m], @p.queue.map(&:metadata)
577
+ assert_equal [@dm2,@dm3,m], @p.stage.keys
578
+ assert_equal 1, @p.stage[m].append_count
579
+ assert_equal 1024*1024, @p.stage[m].bytesize
580
+ assert_equal 3, @p.queue.last.append_count # 1 -> write (2) -> write_step_by_step (3)
581
+ assert @p.queue.last.rollbacked
582
+ end
583
+
584
+ test '#write rollbacks if commit raises errors' do
585
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
586
+ assert_equal [@dm2,@dm3], @p.stage.keys
587
+
588
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
589
+
590
+ row = "x" * 1024
591
+ @p.write({m => [row] * 8})
592
+
593
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
594
+ assert_equal [@dm2,@dm3,m], @p.stage.keys
595
+
596
+ target_chunk = @p.stage[m]
597
+
598
+ assert_equal 1, target_chunk.append_count
599
+ assert !target_chunk.rollbacked
600
+
601
+ (class << target_chunk; self; end).module_eval do
602
+ define_method(:commit){ raise "yay" }
603
+ end
604
+
605
+ assert_raise RuntimeError.new("yay") do
606
+ @p.write({m => [row]})
607
+ end
608
+
609
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
610
+ assert_equal [@dm2,@dm3,m], @p.stage.keys
611
+
612
+ assert_equal 2, target_chunk.append_count
613
+ assert target_chunk.rollbacked
614
+ assert_equal row * 8, target_chunk.read
615
+ end
616
+
617
+ test '#write w/ format raises BufferOverflowError if buffer is not storable' do
618
+ @p.stage_size = 256 * 1024 * 1024
619
+ @p.queue_size = 256 * 1024 * 1024
620
+
621
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
622
+
623
+ es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "xxxxxxxxxxxxxx"} ] ])
624
+
625
+ assert_raise Fluent::Plugin::Buffer::BufferOverflowError do
626
+ @p.write({m => es}, format: ->(e){e.to_msgpack_stream})
627
+ end
628
+ end
629
+
630
+ test '#write w/ format stores data into an existing chunk with metadata specified' do
631
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
632
+ assert_equal [@dm2,@dm3], @p.stage.keys
633
+
634
+ dm3data = @p.stage[@dm3].read.dup
635
+ prev_stage_size = @p.stage_size
636
+
637
+ assert_equal 1, @p.stage[@dm3].append_count
638
+
639
+ es = Fluent::ArrayEventStream.new(
640
+ [
641
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 128}],
642
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "y" * 128}],
643
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "z" * 128}],
644
+ ]
645
+ )
646
+
647
+ @p.write({@dm3 => es}, format: ->(e){e.to_msgpack_stream})
648
+
649
+ assert_equal 2, @p.stage[@dm3].append_count
650
+ assert_equal (dm3data + es.to_msgpack_stream), @p.stage[@dm3].read
651
+ assert_equal (prev_stage_size + es.to_msgpack_stream.bytesize), @p.stage_size
652
+
653
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
654
+ assert_equal [@dm2,@dm3], @p.stage.keys
655
+ end
656
+
657
+ test '#write w/ format creates new chunk and store data into it if there are not chunks for specified metadata' do
658
+ assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
659
+
660
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
661
+ assert_equal [@dm2,@dm3], @p.stage.keys
662
+
663
+ timekey = Time.parse('2016-04-11 16:40:00 +0000').to_i
664
+ assert !@p.timekeys.include?(timekey)
665
+
666
+ m = @p.metadata(timekey: timekey)
667
+
668
+ es = Fluent::ArrayEventStream.new(
669
+ [
670
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
671
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
672
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
673
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
674
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
675
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
676
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
677
+ [event_time('2016-04-11 16:40:03 +0000'), {"message" => "z" * 1024 * 512}],
678
+ ]
679
+ )
680
+ @p.write({m => es}, format: ->(e){e.to_msgpack_stream})
681
+
682
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
683
+ assert_equal [@dm2,@dm3,m], @p.stage.keys
684
+ assert_equal 1, @p.stage[m].append_count
685
+
686
+ @p.update_timekeys
687
+
688
+ assert @p.timekeys.include?(timekey)
689
+ end
690
+
691
+ test '#write w/ format tries to enqueue and store data into a new chunk if existing chunk does not have enough space' do
692
+ assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
693
+
694
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
695
+ assert_equal [@dm2,@dm3], @p.stage.keys
696
+
697
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
698
+
699
+ es = Fluent::ArrayEventStream.new(
700
+ [
701
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
702
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
703
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
704
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
705
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
706
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
707
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
708
+ [event_time('2016-04-11 16:40:03 +0000'), {"message" => "z" * 1024 * 512}],
709
+ ]
710
+ )
711
+ @p.write({m => es}, format: ->(e){e.to_msgpack_stream})
712
+
713
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
714
+ assert_equal [@dm2,@dm3,m], @p.stage.keys
715
+ assert_equal 1, @p.stage[m].append_count
716
+
717
+ es2 = Fluent::OneEventStream.new(event_time('2016-04-11 16:40:03 +0000'), {"message" => "z" * 1024 * 1024})
718
+ @p.write({m => es2}, format: ->(e){e.to_msgpack_stream})
719
+
720
+ assert_equal [@dm0,@dm1,@dm1,m], @p.queue.map(&:metadata)
721
+ assert_equal [@dm2,@dm3,m], @p.stage.keys
722
+ assert_equal 1, @p.stage[m].append_count
723
+ assert_equal es2.to_msgpack_stream.bytesize, @p.stage[m].bytesize
724
+ assert_equal 2, @p.queue.last.append_count # 1 -> write (2) -> rollback&enqueue
725
+ assert @p.queue.last.rollbacked
726
+ end
727
+
728
+ test '#write w/ format enqueues chunk if it is already full after adding data' do
729
+ assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
730
+
731
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
732
+ assert_equal [@dm2,@dm3], @p.stage.keys
733
+
734
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
735
+ es = Fluent::ArrayEventStream.new(
736
+ [
737
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}], # 1024 * 1024 bytes as msgpack stream
738
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
739
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
740
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
741
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
742
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
743
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
744
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
745
+ ]
746
+ )
747
+ @p.write({m => es}, format: ->(e){e.to_msgpack_stream})
748
+
749
+ assert_equal [@dm0,@dm1,@dm1,m], @p.queue.map(&:metadata)
750
+ assert_equal [@dm2,@dm3], @p.stage.keys
751
+ assert_equal 1, @p.queue.last.append_count
752
+ end
753
+
754
+ test '#write w/ format rollbacks if commit raises errors' do
755
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
756
+ assert_equal [@dm2,@dm3], @p.stage.keys
757
+
758
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
759
+
760
+ es = Fluent::ArrayEventStream.new(
761
+ [
762
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
763
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
764
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
765
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
766
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
767
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
768
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
769
+ [event_time('2016-04-11 16:40:03 +0000'), {"message" => "z" * 1024 * 512}],
770
+ ]
771
+ )
772
+ @p.write({m => es}, format: ->(e){e.to_msgpack_stream})
773
+
774
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
775
+ assert_equal [@dm2,@dm3,m], @p.stage.keys
776
+
777
+ target_chunk = @p.stage[m]
778
+
779
+ assert_equal 1, target_chunk.append_count
780
+ assert !target_chunk.rollbacked
781
+
782
+ (class << target_chunk; self; end).module_eval do
783
+ define_method(:commit){ raise "yay" }
784
+ end
785
+
786
+ es2 = Fluent::ArrayEventStream.new(
787
+ [
788
+ [event_time('2016-04-11 16:40:04 +0000'), {"message" => "z" * 1024 * 128}],
789
+ ]
790
+ )
791
+ assert_raise RuntimeError.new("yay") do
792
+ @p.write({m => es2}, format: ->(e){e.to_msgpack_stream})
793
+ end
794
+
795
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
796
+ assert_equal [@dm2,@dm3,m], @p.stage.keys
797
+
798
+ assert_equal 2, target_chunk.append_count
799
+ assert target_chunk.rollbacked
800
+ assert_equal es.to_msgpack_stream, target_chunk.read
801
+ end
802
+
803
+ test '#write writes many metadata and data pairs at once' do
804
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
805
+ assert_equal [@dm2,@dm3], @p.stage.keys
806
+
807
+ row = "x" * 1024
808
+ @p.write({ @dm0 => [row, row, row], @dm1 => [row, row] })
809
+
810
+ assert_equal [@dm2,@dm3,@dm0,@dm1], @p.stage.keys
811
+ end
812
+
813
+ test '#write does not commit on any chunks if any append operation on chunk fails' do
814
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
815
+ assert_equal [@dm2,@dm3], @p.stage.keys
816
+
817
+ row = "x" * 1024
818
+ @p.write({ @dm0 => [row, row, row], @dm1 => [row, row] })
819
+
820
+ assert_equal [@dm2,@dm3,@dm0,@dm1], @p.stage.keys
821
+
822
+ dm2_size = @p.stage[@dm2].size
823
+ assert !@p.stage[@dm2].rollbacked
824
+ dm3_size = @p.stage[@dm3].size
825
+ assert !@p.stage[@dm3].rollbacked
826
+
827
+ assert{ @p.stage[@dm0].size == 3 }
828
+ assert !@p.stage[@dm0].rollbacked
829
+ assert{ @p.stage[@dm1].size == 2 }
830
+ assert !@p.stage[@dm1].rollbacked
831
+
832
+ meta_list = [@dm0, @dm1, @dm2, @dm3].sort
833
+ @p.stage[meta_list.last].failing = true
834
+
835
+ assert_raise(FluentPluginBufferTest::DummyMemoryChunkError) do
836
+ @p.write({ @dm2 => [row], @dm3 => [row], @dm0 => [row, row, row], @dm1 => [row, row] })
837
+ end
838
+
839
+ assert{ @p.stage[@dm2].size == dm2_size }
840
+ assert @p.stage[@dm2].rollbacked
841
+ assert{ @p.stage[@dm3].size == dm3_size }
842
+ assert @p.stage[@dm3].rollbacked
843
+
844
+ assert{ @p.stage[@dm0].size == 3 }
845
+ assert @p.stage[@dm0].rollbacked
846
+ assert{ @p.stage[@dm1].size == 2 }
847
+ assert @p.stage[@dm1].rollbacked
848
+ end
849
+
850
+ test '#compress returns :text' do
851
+ assert_equal :text, @p.compress
852
+ end
853
+ end
854
+
855
+ sub_test_case 'standard format with configuration for test with lower chunk limit size' do
856
+ setup do
857
+ @p = create_buffer({"chunk_limit_size" => 1_280_000})
858
+ @format = ->(e){e.to_msgpack_stream}
859
+ @dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
860
+ # 1 record is 128bytes in msgpack stream
861
+ @es0 = es0 = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:01 +0000'), {"message" => "x" * (128 - 22)}] ] * 5000)
862
+ (class << @p; self; end).module_eval do
863
+ define_method(:resume) {
864
+ staged = {
865
+ dm0 => create_chunk_es(dm0, es0).staged!,
866
+ }
867
+ queued = []
868
+ return staged, queued
869
+ }
870
+ end
871
+ @p.start
872
+ end
873
+
874
+ test '#write appends event stream into staged chunk' do
875
+ assert_equal [@dm0], @p.stage.keys
876
+ assert_equal [], @p.queue.map(&:metadata)
877
+
878
+ assert_equal 1_280_000, @p.chunk_limit_size
879
+
880
+ es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * (128 - 22)}] ] * 1000)
881
+ @p.write({@dm0 => es}, format: @format)
882
+
883
+ assert_equal [@dm0], @p.stage.keys
884
+ assert_equal [], @p.queue.map(&:metadata)
885
+
886
+ assert_equal (@es0.to_msgpack_stream + es.to_msgpack_stream), @p.stage[@dm0].read
887
+ end
888
+
889
+ test '#write writes event stream into a new chunk with enqueueing existing chunk if event stream is larger than available space of existing chunk' do
890
+ assert_equal [@dm0], @p.stage.keys
891
+ assert_equal [], @p.queue.map(&:metadata)
892
+
893
+ assert_equal 1_280_000, @p.chunk_limit_size
894
+
895
+ es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * (128 - 22)}] ] * 8000)
896
+ @p.write({@dm0 => es}, format: @format)
897
+
898
+ assert_equal [@dm0], @p.stage.keys
899
+ assert_equal [@dm0], @p.queue.map(&:metadata)
900
+
901
+ assert_equal (es.to_msgpack_stream), @p.stage[@dm0].read
902
+ end
903
+
904
+ test '#write writes event stream into many chunks excluding staged chunk if event stream is larger than chunk limit size' do
905
+ assert_equal [@dm0], @p.stage.keys
906
+ assert_equal [], @p.queue.map(&:metadata)
907
+
908
+ assert_equal 1_280_000, @p.chunk_limit_size
909
+
910
+ es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * (128 - 22)}] ] * 45000)
911
+ @p.write({@dm0 => es}, format: @format)
912
+
913
+ # metadata whose seq is 4 is created, but overwrite with original metadata(seq=0) for next use of this chunk https://github.com/fluent/fluentd/blob/9d113029d4550ce576d8825bfa9612aa3e55bff0/lib/fluent/plugin/buffer.rb#L357
914
+ assert_equal [@dm0], @p.stage.keys
915
+ assert_equal 5400, @p.stage[@dm0].size
916
+ assert_equal [@dm0, @dm0, @dm0, @dm0, @dm0], @p.queue.map(&:metadata)
917
+ assert_equal [5000, 9900, 9900, 9900, 9900], @p.queue.map(&:size) # splits: 45000 / 100 => 450 * ...
918
+ # 9900 * 4 + 5400 == 45000
919
+ end
920
+
921
+ test '#dequeue_chunk succeeds when chunk is splited' do
922
+ assert_equal [@dm0], @p.stage.keys
923
+ assert_equal [], @p.queue.map(&:metadata)
924
+
925
+ assert_equal 1_280_000, @p.chunk_limit_size
926
+
927
+ es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * (128 - 22)}] ] * 45000)
928
+ @p.write({@dm0 => es}, format: @format)
929
+ @p.enqueue_all(true)
930
+
931
+ dequeued_chunks = 6.times.map { |e| @p.dequeue_chunk } # splits: 45000 / 100 => 450 * ...
932
+ assert_equal [5000, 9900, 9900, 9900, 9900, 5400], dequeued_chunks.map(&:size)
933
+ assert_equal [@dm0, @dm0, @dm0, @dm0, @dm0, @dm0], dequeued_chunks.map(&:metadata)
934
+ end
935
+
936
+ test '#write raises BufferChunkOverflowError if a record is biggar than chunk limit size' do
937
+ assert_equal [@dm0], @p.stage.keys
938
+ assert_equal [], @p.queue.map(&:metadata)
939
+
940
+ assert_equal 1_280_000, @p.chunk_limit_size
941
+
942
+ es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * 1_280_000}] ])
943
+ assert_raise Fluent::Plugin::Buffer::BufferChunkOverflowError do
944
+ @p.write({@dm0 => es}, format: @format)
945
+ end
946
+ end
947
+
948
+ data(
949
+ first_chunk: Fluent::ArrayEventStream.new([[event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * 1_280_000}],
950
+ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "a"}],
951
+ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "b"}]]),
952
+ intermediate_chunk: Fluent::ArrayEventStream.new([[event_time('2016-04-11 16:00:02 +0000'), {"message" => "a"}],
953
+ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * 1_280_000}],
954
+ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "b"}]]),
955
+ last_chunk: Fluent::ArrayEventStream.new([[event_time('2016-04-11 16:00:02 +0000'), {"message" => "a"}],
956
+ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "b"}],
957
+ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * 1_280_000}]]),
958
+ multiple_chunks: Fluent::ArrayEventStream.new([[event_time('2016-04-11 16:00:02 +0000'), {"message" => "a"}],
959
+ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * 1_280_000}],
960
+ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "b"}],
961
+ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * 1_280_000}]])
962
+ )
963
+ test '#write exceeds chunk_limit_size, raise BufferChunkOverflowError, but not lost whole messages' do |(es)|
964
+ assert_equal [@dm0], @p.stage.keys
965
+ assert_equal [], @p.queue.map(&:metadata)
966
+
967
+ assert_equal 1_280_000, @p.chunk_limit_size
968
+
969
+ nth = []
970
+ es.entries.each_with_index do |entry, index|
971
+ if entry.last["message"].size == @p.chunk_limit_size
972
+ nth << index
973
+ end
974
+ end
975
+ messages = []
976
+ nth.each do |n|
977
+ messages << "a 1280025 bytes record (nth: #{n}) is larger than buffer chunk limit size (1280000)"
978
+ end
979
+
980
+ assert_raise Fluent::Plugin::Buffer::BufferChunkOverflowError.new(messages.join(", ")) do
981
+ @p.write({@dm0 => es}, format: @format)
982
+ end
983
+ # message a and b are concatenated and staged
984
+ staged_messages = Fluent::MessagePackFactory.msgpack_unpacker.feed_each(@p.stage[@dm0].chunk).collect do |record|
985
+ record.last
986
+ end
987
+ assert_equal([2, [{"message" => "a"}, {"message" => "b"}]],
988
+ [@p.stage[@dm0].size, staged_messages])
989
+ # only es0 message is queued
990
+ assert_equal [@dm0], @p.queue.map(&:metadata)
991
+ assert_equal [5000], @p.queue.map(&:size)
992
+ end
993
+
994
+ test "confirm that every message which is smaller than chunk threshold does not raise BufferChunkOverflowError" do
995
+ assert_equal [@dm0], @p.stage.keys
996
+ assert_equal [], @p.queue.map(&:metadata)
997
+ timestamp = event_time('2016-04-11 16:00:02 +0000')
998
+ es = Fluent::ArrayEventStream.new([[timestamp, {"message" => "a" * 1_000_000}],
999
+ [timestamp, {"message" => "b" * 1_000_000}],
1000
+ [timestamp, {"message" => "c" * 1_000_000}]])
1001
+
1002
+ # https://github.com/fluent/fluentd/issues/1849
1003
+ # Even though 1_000_000 < 1_280_000 (chunk_limit_size), it raised BufferChunkOverflowError before.
1004
+ # It should not be raised and message a,b,c should be stored into 3 chunks.
1005
+ assert_nothing_raised do
1006
+ @p.write({@dm0 => es}, format: @format)
1007
+ end
1008
+ messages = []
1009
+ # pick up first letter to check whether chunk is queued in expected order
1010
+ 3.times do |index|
1011
+ chunk = @p.queue[index]
1012
+ es = Fluent::MessagePackEventStream.new(chunk.chunk)
1013
+ es.ensure_unpacked!
1014
+ records = es.instance_eval{ @unpacked_records }
1015
+ records.each do |record|
1016
+ messages << record["message"][0]
1017
+ end
1018
+ end
1019
+ es = Fluent::MessagePackEventStream.new(@p.stage[@dm0].chunk)
1020
+ es.ensure_unpacked!
1021
+ staged_message = es.instance_eval{ @unpacked_records }.first["message"]
1022
+ # message a and b are queued, message c is staged
1023
+ assert_equal([
1024
+ [@dm0],
1025
+ "c" * 1_000_000,
1026
+ [@dm0, @dm0, @dm0],
1027
+ [5000, 1, 1],
1028
+ [["x"] * 5000, "a", "b"].flatten
1029
+ ],
1030
+ [
1031
+ @p.stage.keys,
1032
+ staged_message,
1033
+ @p.queue.map(&:metadata),
1034
+ @p.queue.map(&:size),
1035
+ messages
1036
+ ])
1037
+ end
1038
+ end
1039
+
1040
+ sub_test_case 'custom format with configuration for test with lower chunk limit size' do
1041
+ setup do
1042
+ @p = create_buffer({"chunk_limit_size" => 1_280_000})
1043
+ @dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
1044
+ @row = "x" * 128
1045
+ @data0 = data0 = [@row] * 5000
1046
+ (class << @p; self; end).module_eval do
1047
+ define_method(:resume) {
1048
+ staged = {
1049
+ dm0 => create_chunk(dm0, data0).staged!,
1050
+ }
1051
+ queued = []
1052
+ return staged, queued
1053
+ }
1054
+ end
1055
+ @p.start
1056
+ end
1057
+
1058
+ test '#write appends event stream into staged chunk' do
1059
+ assert_equal [@dm0], @p.stage.keys
1060
+ assert_equal [], @p.queue.map(&:metadata)
1061
+
1062
+ assert_equal 1_280_000, @p.chunk_limit_size
1063
+
1064
+ data = [@row] * 1000
1065
+ @p.write({@dm0 => data})
1066
+
1067
+ assert_equal [@dm0], @p.stage.keys
1068
+ assert_equal [], @p.queue.map(&:metadata)
1069
+
1070
+ assert_equal (@row * 6000), @p.stage[@dm0].read
1071
+ end
1072
+
1073
+ test '#write writes event stream into a new chunk with enqueueing existing chunk if event stream is larger than available space of existing chunk' do
1074
+ assert_equal [@dm0], @p.stage.keys
1075
+ assert_equal [], @p.queue.map(&:metadata)
1076
+
1077
+ staged_chunk_object_id = @p.stage[@dm0].object_id
1078
+
1079
+ assert_equal 1_280_000, @p.chunk_limit_size
1080
+
1081
+ data = [@row] * 8000
1082
+ @p.write({@dm0 => data})
1083
+
1084
+ assert_equal [@dm0], @p.queue.map(&:metadata)
1085
+ assert_equal [staged_chunk_object_id], @p.queue.map(&:object_id)
1086
+ assert_equal [@dm0], @p.stage.keys
1087
+
1088
+ assert_equal [9800], @p.queue.map(&:size)
1089
+ assert_equal 3200, @p.stage[@dm0].size
1090
+ # 9800 + 3200 == 5000 + 8000
1091
+ end
1092
+
1093
+ test '#write writes event stream into many chunks including staging chunk if event stream is larger than chunk limit size' do
1094
+ assert_equal [@dm0], @p.stage.keys
1095
+ assert_equal [], @p.queue.map(&:metadata)
1096
+
1097
+ staged_chunk_object_id = @p.stage[@dm0].object_id
1098
+
1099
+ assert_equal 1_280_000, @p.chunk_limit_size
1100
+
1101
+ assert_equal 5000, @p.stage[@dm0].size
1102
+
1103
+ data = [@row] * 45000
1104
+ @p.write({@dm0 => data})
1105
+
1106
+ assert_equal staged_chunk_object_id, @p.queue.first.object_id
1107
+
1108
+ assert_equal [@dm0], @p.stage.keys
1109
+ assert_equal 900, @p.stage[@dm0].size
1110
+ assert_equal [@dm0, @dm0, @dm0, @dm0, @dm0], @p.queue.map(&:metadata)
1111
+ assert_equal [9500, 9900, 9900, 9900, 9900], @p.queue.map(&:size) # splits: 45000 / 100 => 450 * ...
1112
+ ##### 900 + 9500 + 9900 * 4 == 5000 + 45000
1113
+ end
1114
+
1115
+ test '#write raises BufferChunkOverflowError if a record is bigger than chunk limit size' do
1116
+ assert_equal [@dm0], @p.stage.keys
1117
+ assert_equal [], @p.queue.map(&:metadata)
1118
+
1119
+ assert_equal 1_280_000, @p.chunk_limit_size
1120
+
1121
+ es = ["x" * 1_280_000 + "x" * 300]
1122
+ assert_raise Fluent::Plugin::Buffer::BufferChunkOverflowError do
1123
+ @p.write({@dm0 => es})
1124
+ end
1125
+ end
1126
+
1127
+ test 'confirm that every array message which is smaller than chunk threshold does not raise BufferChunkOverflowError' do
1128
+ assert_equal [@dm0], @p.stage.keys
1129
+ assert_equal [], @p.queue.map(&:metadata)
1130
+
1131
+ assert_equal 1_280_000, @p.chunk_limit_size
1132
+
1133
+ es = ["a" * 1_000_000, "b" * 1_000_000, "c" * 1_000_000]
1134
+ assert_nothing_raised do
1135
+ @p.write({@dm0 => es})
1136
+ end
1137
+ queue_messages = @p.queue.collect do |chunk|
1138
+ # collect first character of each message
1139
+ chunk.chunk[0]
1140
+ end
1141
+ assert_equal([
1142
+ [@dm0],
1143
+ 1,
1144
+ "c",
1145
+ [@dm0, @dm0, @dm0],
1146
+ [5000, 1, 1],
1147
+ ["x", "a", "b"]
1148
+ ],
1149
+ [
1150
+ @p.stage.keys,
1151
+ @p.stage[@dm0].size,
1152
+ @p.stage[@dm0].chunk[0],
1153
+ @p.queue.map(&:metadata),
1154
+ @p.queue.map(&:size),
1155
+ queue_messages
1156
+ ])
1157
+ end
1158
+ end
1159
+
1160
+ sub_test_case 'with configuration for test with lower limits' do
1161
+ setup do
1162
+ @p = create_buffer({"chunk_limit_size" => 1024, "total_limit_size" => 10240})
1163
+ @dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
1164
+ @dm1 = dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
1165
+ @dm2 = dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
1166
+ @dm3 = dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
1167
+ (class << @p; self; end).module_eval do
1168
+ define_method(:resume) {
1169
+ staged = {
1170
+ dm2 => create_chunk(dm2, ["b" * 128] * 7).staged!,
1171
+ dm3 => create_chunk(dm3, ["c" * 128] * 5).staged!,
1172
+ }
1173
+ queued = [
1174
+ create_chunk(dm0, ["0" * 128] * 8).enqueued!,
1175
+ create_chunk(dm0, ["0" * 128] * 8).enqueued!,
1176
+ create_chunk(dm0, ["0" * 128] * 8).enqueued!,
1177
+ create_chunk(dm0, ["0" * 128] * 8).enqueued!,
1178
+ create_chunk(dm0, ["0" * 128] * 8).enqueued!,
1179
+ create_chunk(dm1, ["a" * 128] * 8).enqueued!,
1180
+ create_chunk(dm1, ["a" * 128] * 8).enqueued!,
1181
+ create_chunk(dm1, ["a" * 128] * 8).enqueued!, # 8th queued chunk
1182
+ create_chunk(dm1, ["a" * 128] * 3).enqueued!,
1183
+ ]
1184
+ return staged, queued
1185
+ }
1186
+ end
1187
+ @p.start
1188
+ end
1189
+
1190
+ test '#storable? returns false when too many data exist' do
1191
+ assert_equal [@dm0,@dm0,@dm0,@dm0,@dm0,@dm1,@dm1,@dm1,@dm1], @p.queue.map(&:metadata)
1192
+ assert_equal [@dm2,@dm3], @p.stage.keys
1193
+
1194
+ assert_equal 128*8*8+128*3, @p.queue_size
1195
+ assert_equal 128*7+128*5, @p.stage_size
1196
+
1197
+ assert @p.storable?
1198
+
1199
+ dm3 = @p.metadata(timekey: @dm3.timekey)
1200
+ @p.write({dm3 => ["c" * 128]})
1201
+
1202
+ assert_equal 10240, (@p.stage_size + @p.queue_size)
1203
+ assert !@p.storable?
1204
+ end
1205
+
1206
+ test '#chunk_size_over? returns true if chunk size is bigger than limit' do
1207
+ m = create_metadata(Time.parse('2016-04-11 16:40:00 +0000').to_i)
1208
+
1209
+ c1 = create_chunk(m, ["a" * 128] * 8)
1210
+ assert !@p.chunk_size_over?(c1)
1211
+
1212
+ c2 = create_chunk(m, ["a" * 128] * 9)
1213
+ assert @p.chunk_size_over?(c2)
1214
+
1215
+ c3 = create_chunk(m, ["a" * 128] * 8 + ["a"])
1216
+ assert @p.chunk_size_over?(c3)
1217
+ end
1218
+
1219
+ test '#chunk_size_full? returns true if chunk size is enough big against limit' do
1220
+ m = create_metadata(Time.parse('2016-04-11 16:40:00 +0000').to_i)
1221
+
1222
+ c1 = create_chunk(m, ["a" * 128] * 7)
1223
+ assert !@p.chunk_size_full?(c1)
1224
+
1225
+ c2 = create_chunk(m, ["a" * 128] * 8)
1226
+ assert @p.chunk_size_full?(c2)
1227
+
1228
+ assert_equal 0.95, @p.chunk_full_threshold
1229
+ c3 = create_chunk(m, ["a" * 128] * 6 + ["a" * 64])
1230
+ assert !@p.chunk_size_full?(c3)
1231
+ end
1232
+ end
1233
+
1234
+ sub_test_case 'with configuration includes chunk_limit_records' do
1235
+ setup do
1236
+ @p = create_buffer({"chunk_limit_size" => 1024, "total_limit_size" => 10240, "chunk_limit_records" => 6})
1237
+ @dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
1238
+ @dm1 = dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
1239
+ @dm2 = dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
1240
+ @dm3 = dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
1241
+ (class << @p; self; end).module_eval do
1242
+ define_method(:resume) {
1243
+ staged = {
1244
+ dm2 => create_chunk(dm2, ["b" * 128] * 1).staged!,
1245
+ dm3 => create_chunk(dm3, ["c" * 128] * 2).staged!,
1246
+ }
1247
+ queued = [
1248
+ create_chunk(dm0, ["0" * 128] * 6).enqueued!,
1249
+ create_chunk(dm1, ["a" * 128] * 6).enqueued!,
1250
+ create_chunk(dm1, ["a" * 128] * 6).enqueued!,
1251
+ create_chunk(dm1, ["a" * 128] * 3).enqueued!,
1252
+ ]
1253
+ return staged, queued
1254
+ }
1255
+ end
1256
+ @p.start
1257
+ end
1258
+
1259
+ test '#chunk_size_over? returns true if too many records exists in a chunk even if its bytes is less than limit' do
1260
+ assert_equal 6, @p.chunk_limit_records
1261
+
1262
+ m = create_metadata(Time.parse('2016-04-11 16:40:00 +0000').to_i)
1263
+
1264
+ c1 = create_chunk(m, ["a" * 128] * 6)
1265
+ assert_equal 6, c1.size
1266
+ assert !@p.chunk_size_over?(c1)
1267
+
1268
+ c2 = create_chunk(m, ["a" * 128] * 7)
1269
+ assert @p.chunk_size_over?(c2)
1270
+
1271
+ c3 = create_chunk(m, ["a" * 128] * 6 + ["a"])
1272
+ assert @p.chunk_size_over?(c3)
1273
+ end
1274
+
1275
+ test '#chunk_size_full? returns true if enough many records exists in a chunk even if its bytes is less than limit' do
1276
+ assert_equal 6, @p.chunk_limit_records
1277
+
1278
+ m = create_metadata(Time.parse('2016-04-11 16:40:00 +0000').to_i)
1279
+
1280
+ c1 = create_chunk(m, ["a" * 128] * 5)
1281
+ assert_equal 5, c1.size
1282
+ assert !@p.chunk_size_full?(c1)
1283
+
1284
+ c2 = create_chunk(m, ["a" * 128] * 6)
1285
+ assert @p.chunk_size_full?(c2)
1286
+
1287
+ c3 = create_chunk(m, ["a" * 128] * 5 + ["a"])
1288
+ assert @p.chunk_size_full?(c3)
1289
+ end
1290
+ end
1291
+
1292
+ sub_test_case 'with configuration includes queue_limit_length' do
1293
+ setup do
1294
+ @p = create_buffer({"chunk_limit_size" => 1024, "total_limit_size" => 10240, "queue_limit_length" => 5})
1295
+ @dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
1296
+ @dm1 = dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
1297
+ @dm2 = dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
1298
+ @dm3 = dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
1299
+ (class << @p; self; end).module_eval do
1300
+ define_method(:resume) {
1301
+ staged = {
1302
+ dm2 => create_chunk(dm2, ["b" * 128] * 1).staged!,
1303
+ dm3 => create_chunk(dm3, ["c" * 128] * 2).staged!,
1304
+ }
1305
+ queued = [
1306
+ create_chunk(dm0, ["0" * 128] * 6).enqueued!,
1307
+ create_chunk(dm1, ["a" * 128] * 6).enqueued!,
1308
+ create_chunk(dm1, ["a" * 128] * 6).enqueued!,
1309
+ create_chunk(dm1, ["a" * 128] * 3).enqueued!,
1310
+ ]
1311
+ return staged, queued
1312
+ }
1313
+ end
1314
+ @p.start
1315
+ end
1316
+
1317
+ test '#configure will overwrite standard configuration if queue_limit_length' do
1318
+ assert_equal 1024, @p.chunk_limit_size
1319
+ assert_equal 5, @p.queue_limit_length
1320
+ assert_equal (1024*5), @p.total_limit_size
1321
+ end
1322
+ end
1323
+
1324
+ sub_test_case 'when compress is gzip' do
1325
+ setup do
1326
+ @p = create_buffer({'compress' => 'gzip'})
1327
+ @dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
1328
+ end
1329
+
1330
+ test '#compress returns :gzip' do
1331
+ assert_equal :gzip, @p.compress
1332
+ end
1333
+
1334
+ test 'create decompressable chunk' do
1335
+ chunk = @p.generate_chunk(create_metadata)
1336
+ assert chunk.singleton_class.ancestors.include?(Fluent::Plugin::Buffer::Chunk::Decompressable)
1337
+ end
1338
+
1339
+ test '#write compressed data which exceeds chunk_limit_size, it raises BufferChunkOverflowError' do
1340
+ @p = create_buffer({'compress' => 'gzip', 'chunk_limit_size' => 70})
1341
+ timestamp = event_time('2016-04-11 16:00:02 +0000')
1342
+ es = Fluent::ArrayEventStream.new([[timestamp, {"message" => "012345"}], # overflow
1343
+ [timestamp, {"message" => "aaa"}],
1344
+ [timestamp, {"message" => "bbb"}]])
1345
+ assert_equal [], @p.queue.map(&:metadata)
1346
+ assert_equal 70, @p.chunk_limit_size
1347
+
1348
+ # calculate the actual boundary value. it varies on machine
1349
+ c = @p.generate_chunk(create_metadata)
1350
+ c.append(Fluent::ArrayEventStream.new([[timestamp, {"message" => "012345"}]]), compress: :gzip)
1351
+ overflow_bytes = c.bytesize
1352
+
1353
+ messages = "concatenated/appended a #{overflow_bytes} bytes record (nth: 0) is larger than buffer chunk limit size (70)"
1354
+ assert_raise Fluent::Plugin::Buffer::BufferChunkOverflowError.new(messages) do
1355
+ # test format == nil && compress == :gzip
1356
+ @p.write({@dm0 => es})
1357
+ end
1358
+ # message a and b occupies each chunks in full, so both of messages are queued (no staged chunk)
1359
+ assert_equal([2, [@dm0, @dm0], [1, 1], nil],
1360
+ [@p.queue.size, @p.queue.map(&:metadata), @p.queue.map(&:size), @p.stage[@dm0]])
1361
+ end
1362
+ end
1363
+
1364
+ sub_test_case '#statistics' do
1365
+ setup do
1366
+ @p = create_buffer({ "total_limit_size" => 1024 })
1367
+ dm = create_metadata(Time.parse('2020-03-13 16:00:00 +0000').to_i, nil, nil)
1368
+
1369
+ (class << @p; self; end).module_eval do
1370
+ define_method(:resume) {
1371
+ queued = [create_chunk(dm, ["a" * (1024 - 102)]).enqueued!]
1372
+ return {}, queued
1373
+ }
1374
+ end
1375
+
1376
+ @p.start
1377
+ end
1378
+
1379
+ test 'returns available_buffer_space_ratios' do
1380
+ assert_equal 10.0, @p.statistics['buffer']['available_buffer_space_ratios']
1381
+ end
1382
+ end
1383
+ end