fluentd 0.12.40 → 1.6.2

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of fluentd might be problematic. Click here for more details.

Files changed (428) hide show
  1. checksums.yaml +5 -5
  2. data/.github/ISSUE_TEMPLATE/bug_report.md +39 -0
  3. data/.github/ISSUE_TEMPLATE/feature_request.md +23 -0
  4. data/.github/ISSUE_TEMPLATE.md +17 -0
  5. data/.github/PULL_REQUEST_TEMPLATE.md +13 -0
  6. data/.gitignore +5 -0
  7. data/.gitlab/cicd-template.yaml +10 -0
  8. data/.gitlab-ci.yml +147 -0
  9. data/.travis.yml +56 -20
  10. data/ADOPTERS.md +5 -0
  11. data/CHANGELOG.md +1369 -0
  12. data/CONTRIBUTING.md +16 -5
  13. data/GOVERNANCE.md +55 -0
  14. data/Gemfile +5 -0
  15. data/GithubWorkflow.md +78 -0
  16. data/LICENSE +202 -0
  17. data/MAINTAINERS.md +7 -0
  18. data/README.md +23 -11
  19. data/Rakefile +48 -2
  20. data/Vagrantfile +17 -0
  21. data/appveyor.yml +37 -0
  22. data/bin/fluent-binlog-reader +7 -0
  23. data/bin/fluent-ca-generate +6 -0
  24. data/bin/fluent-plugin-config-format +5 -0
  25. data/bin/fluent-plugin-generate +5 -0
  26. data/bin/fluentd +3 -0
  27. data/code-of-conduct.md +3 -0
  28. data/example/copy_roundrobin.conf +39 -0
  29. data/example/counter.conf +18 -0
  30. data/example/in_dummy_blocks.conf +17 -0
  31. data/example/in_dummy_with_compression.conf +23 -0
  32. data/example/in_forward.conf +7 -0
  33. data/example/in_forward_client.conf +37 -0
  34. data/example/in_forward_shared_key.conf +15 -0
  35. data/example/in_forward_tls.conf +14 -0
  36. data/example/in_forward_users.conf +24 -0
  37. data/example/in_forward_workers.conf +21 -0
  38. data/example/in_http.conf +3 -1
  39. data/example/in_out_forward.conf +17 -0
  40. data/example/logevents.conf +25 -0
  41. data/example/multi_filters.conf +61 -0
  42. data/example/out_exec_filter.conf +42 -0
  43. data/example/out_forward.conf +13 -13
  44. data/example/out_forward_buf_file.conf +23 -0
  45. data/example/out_forward_client.conf +109 -0
  46. data/example/out_forward_heartbeat_none.conf +16 -0
  47. data/example/out_forward_shared_key.conf +36 -0
  48. data/example/out_forward_tls.conf +18 -0
  49. data/example/out_forward_users.conf +65 -0
  50. data/example/out_null.conf +36 -0
  51. data/example/secondary_file.conf +42 -0
  52. data/example/suppress_config_dump.conf +7 -0
  53. data/example/worker_section.conf +36 -0
  54. data/fluent.conf +29 -0
  55. data/fluentd.gemspec +21 -11
  56. data/lib/fluent/agent.rb +67 -90
  57. data/lib/fluent/clock.rb +62 -0
  58. data/lib/fluent/command/binlog_reader.rb +244 -0
  59. data/lib/fluent/command/ca_generate.rb +181 -0
  60. data/lib/fluent/command/cat.rb +42 -18
  61. data/lib/fluent/command/debug.rb +12 -10
  62. data/lib/fluent/command/fluentd.rb +153 -5
  63. data/lib/fluent/command/plugin_config_formatter.rb +292 -0
  64. data/lib/fluent/command/plugin_generator.rb +324 -0
  65. data/lib/fluent/compat/call_super_mixin.rb +67 -0
  66. data/lib/fluent/compat/detach_process_mixin.rb +33 -0
  67. data/lib/fluent/compat/exec_util.rb +129 -0
  68. data/lib/fluent/compat/file_util.rb +54 -0
  69. data/lib/fluent/compat/filter.rb +68 -0
  70. data/lib/fluent/compat/formatter.rb +111 -0
  71. data/lib/fluent/compat/formatter_utils.rb +85 -0
  72. data/lib/fluent/compat/handle_tag_and_time_mixin.rb +62 -0
  73. data/lib/fluent/compat/handle_tag_name_mixin.rb +53 -0
  74. data/lib/fluent/compat/input.rb +49 -0
  75. data/lib/fluent/compat/output.rb +718 -0
  76. data/lib/fluent/compat/output_chain.rb +60 -0
  77. data/lib/fluent/compat/parser.rb +310 -0
  78. data/lib/fluent/compat/parser_utils.rb +40 -0
  79. data/lib/fluent/compat/propagate_default.rb +62 -0
  80. data/lib/fluent/compat/record_filter_mixin.rb +34 -0
  81. data/lib/fluent/compat/set_tag_key_mixin.rb +50 -0
  82. data/lib/fluent/compat/set_time_key_mixin.rb +69 -0
  83. data/lib/fluent/compat/socket_util.rb +165 -0
  84. data/lib/fluent/compat/string_util.rb +34 -0
  85. data/lib/fluent/compat/structured_format_mixin.rb +26 -0
  86. data/lib/fluent/compat/type_converter.rb +90 -0
  87. data/lib/fluent/config/configure_proxy.rb +210 -62
  88. data/lib/fluent/config/dsl.rb +12 -5
  89. data/lib/fluent/config/element.rb +107 -9
  90. data/lib/fluent/config/literal_parser.rb +9 -3
  91. data/lib/fluent/config/parser.rb +4 -4
  92. data/lib/fluent/config/section.rb +51 -14
  93. data/lib/fluent/config/types.rb +28 -13
  94. data/lib/fluent/config/v1_parser.rb +3 -5
  95. data/lib/fluent/config.rb +23 -20
  96. data/lib/fluent/configurable.rb +79 -21
  97. data/lib/fluent/counter/base_socket.rb +46 -0
  98. data/lib/fluent/counter/client.rb +297 -0
  99. data/lib/fluent/counter/error.rb +86 -0
  100. data/lib/fluent/counter/mutex_hash.rb +163 -0
  101. data/lib/fluent/counter/server.rb +273 -0
  102. data/lib/fluent/counter/store.rb +205 -0
  103. data/lib/fluent/counter/validator.rb +145 -0
  104. data/lib/fluent/counter.rb +23 -0
  105. data/lib/fluent/daemon.rb +15 -0
  106. data/lib/fluent/engine.rb +102 -65
  107. data/lib/fluent/env.rb +7 -3
  108. data/lib/fluent/error.rb +30 -0
  109. data/lib/fluent/event.rb +197 -21
  110. data/lib/fluent/event_router.rb +93 -10
  111. data/lib/fluent/filter.rb +2 -50
  112. data/lib/fluent/formatter.rb +4 -293
  113. data/lib/fluent/input.rb +2 -32
  114. data/lib/fluent/label.rb +10 -2
  115. data/lib/fluent/load.rb +3 -3
  116. data/lib/fluent/log.rb +348 -81
  117. data/lib/fluent/match.rb +37 -36
  118. data/lib/fluent/mixin.rb +12 -176
  119. data/lib/fluent/msgpack_factory.rb +62 -0
  120. data/lib/fluent/output.rb +10 -612
  121. data/lib/fluent/output_chain.rb +23 -0
  122. data/lib/fluent/parser.rb +4 -800
  123. data/lib/fluent/plugin/bare_output.rb +63 -0
  124. data/lib/fluent/plugin/base.rb +192 -0
  125. data/lib/fluent/plugin/buf_file.rb +128 -174
  126. data/lib/fluent/plugin/buf_memory.rb +9 -92
  127. data/lib/fluent/plugin/buffer/chunk.rb +221 -0
  128. data/lib/fluent/plugin/buffer/file_chunk.rb +383 -0
  129. data/lib/fluent/plugin/buffer/memory_chunk.rb +90 -0
  130. data/lib/fluent/plugin/buffer.rb +779 -0
  131. data/lib/fluent/plugin/compressable.rb +92 -0
  132. data/lib/fluent/plugin/exec_util.rb +3 -108
  133. data/lib/fluent/plugin/file_util.rb +4 -34
  134. data/lib/fluent/plugin/file_wrapper.rb +120 -0
  135. data/lib/fluent/plugin/filter.rb +93 -0
  136. data/lib/fluent/plugin/filter_grep.rb +117 -34
  137. data/lib/fluent/plugin/filter_parser.rb +85 -62
  138. data/lib/fluent/plugin/filter_record_transformer.rb +27 -39
  139. data/lib/fluent/plugin/filter_stdout.rb +15 -12
  140. data/lib/fluent/plugin/formatter.rb +50 -0
  141. data/lib/fluent/plugin/formatter_csv.rb +52 -0
  142. data/lib/fluent/plugin/formatter_hash.rb +33 -0
  143. data/lib/fluent/plugin/formatter_json.rb +55 -0
  144. data/lib/fluent/plugin/formatter_ltsv.rb +42 -0
  145. data/lib/fluent/plugin/formatter_msgpack.rb +33 -0
  146. data/lib/fluent/plugin/formatter_out_file.rb +51 -0
  147. data/lib/fluent/plugin/formatter_single_value.rb +34 -0
  148. data/lib/fluent/plugin/formatter_stdout.rb +76 -0
  149. data/lib/fluent/plugin/formatter_tsv.rb +38 -0
  150. data/lib/fluent/plugin/in_debug_agent.rb +17 -6
  151. data/lib/fluent/plugin/in_dummy.rb +47 -20
  152. data/lib/fluent/plugin/in_exec.rb +55 -123
  153. data/lib/fluent/plugin/in_forward.rb +299 -216
  154. data/lib/fluent/plugin/in_gc_stat.rb +14 -36
  155. data/lib/fluent/plugin/in_http.rb +204 -91
  156. data/lib/fluent/plugin/in_monitor_agent.rb +186 -258
  157. data/lib/fluent/plugin/in_object_space.rb +13 -41
  158. data/lib/fluent/plugin/in_syslog.rb +112 -134
  159. data/lib/fluent/plugin/in_tail.rb +408 -745
  160. data/lib/fluent/plugin/in_tcp.rb +66 -9
  161. data/lib/fluent/plugin/in_udp.rb +60 -11
  162. data/lib/fluent/plugin/{in_stream.rb → in_unix.rb} +8 -4
  163. data/lib/fluent/plugin/input.rb +37 -0
  164. data/lib/fluent/plugin/multi_output.rb +158 -0
  165. data/lib/fluent/plugin/out_copy.rb +23 -35
  166. data/lib/fluent/plugin/out_exec.rb +67 -70
  167. data/lib/fluent/plugin/out_exec_filter.rb +204 -271
  168. data/lib/fluent/plugin/out_file.rb +267 -73
  169. data/lib/fluent/plugin/out_forward.rb +854 -325
  170. data/lib/fluent/plugin/out_null.rb +42 -9
  171. data/lib/fluent/plugin/out_relabel.rb +9 -5
  172. data/lib/fluent/plugin/out_roundrobin.rb +18 -37
  173. data/lib/fluent/plugin/out_secondary_file.rb +133 -0
  174. data/lib/fluent/plugin/out_stdout.rb +43 -10
  175. data/lib/fluent/plugin/out_stream.rb +7 -2
  176. data/lib/fluent/plugin/output.rb +1498 -0
  177. data/lib/fluent/plugin/owned_by_mixin.rb +42 -0
  178. data/lib/fluent/plugin/parser.rb +191 -0
  179. data/lib/fluent/plugin/parser_apache.rb +28 -0
  180. data/lib/fluent/plugin/parser_apache2.rb +88 -0
  181. data/lib/fluent/plugin/parser_apache_error.rb +26 -0
  182. data/lib/fluent/plugin/parser_csv.rb +39 -0
  183. data/lib/fluent/plugin/parser_json.rb +94 -0
  184. data/lib/fluent/plugin/parser_ltsv.rb +49 -0
  185. data/lib/fluent/plugin/parser_msgpack.rb +50 -0
  186. data/lib/fluent/plugin/parser_multiline.rb +106 -0
  187. data/lib/fluent/plugin/parser_nginx.rb +28 -0
  188. data/lib/fluent/plugin/parser_none.rb +36 -0
  189. data/lib/fluent/plugin/parser_regexp.rb +68 -0
  190. data/lib/fluent/plugin/parser_syslog.rb +142 -0
  191. data/lib/fluent/plugin/parser_tsv.rb +42 -0
  192. data/lib/fluent/plugin/socket_util.rb +3 -143
  193. data/lib/fluent/plugin/storage.rb +84 -0
  194. data/lib/fluent/plugin/storage_local.rb +164 -0
  195. data/lib/fluent/plugin/string_util.rb +3 -15
  196. data/lib/fluent/plugin.rb +122 -121
  197. data/lib/fluent/plugin_helper/cert_option.rb +178 -0
  198. data/lib/fluent/plugin_helper/child_process.rb +364 -0
  199. data/lib/fluent/plugin_helper/compat_parameters.rb +333 -0
  200. data/lib/fluent/plugin_helper/counter.rb +51 -0
  201. data/lib/fluent/plugin_helper/event_emitter.rb +93 -0
  202. data/lib/fluent/plugin_helper/event_loop.rb +170 -0
  203. data/lib/fluent/plugin_helper/extract.rb +104 -0
  204. data/lib/fluent/plugin_helper/formatter.rb +147 -0
  205. data/lib/fluent/plugin_helper/http_server/app.rb +79 -0
  206. data/lib/fluent/plugin_helper/http_server/compat/server.rb +81 -0
  207. data/lib/fluent/plugin_helper/http_server/compat/webrick_handler.rb +58 -0
  208. data/lib/fluent/plugin_helper/http_server/methods.rb +35 -0
  209. data/lib/fluent/plugin_helper/http_server/request.rb +42 -0
  210. data/lib/fluent/plugin_helper/http_server/router.rb +54 -0
  211. data/lib/fluent/plugin_helper/http_server/server.rb +87 -0
  212. data/lib/fluent/plugin_helper/http_server.rb +76 -0
  213. data/lib/fluent/plugin_helper/inject.rb +151 -0
  214. data/lib/fluent/plugin_helper/parser.rb +147 -0
  215. data/lib/fluent/plugin_helper/record_accessor.rb +210 -0
  216. data/lib/fluent/plugin_helper/retry_state.rb +205 -0
  217. data/lib/fluent/plugin_helper/server.rb +807 -0
  218. data/lib/fluent/plugin_helper/socket.rb +250 -0
  219. data/lib/fluent/plugin_helper/socket_option.rb +80 -0
  220. data/lib/fluent/plugin_helper/storage.rb +349 -0
  221. data/lib/fluent/plugin_helper/thread.rb +179 -0
  222. data/lib/fluent/plugin_helper/timer.rb +92 -0
  223. data/lib/fluent/plugin_helper.rb +73 -0
  224. data/lib/fluent/plugin_id.rb +80 -0
  225. data/lib/fluent/process.rb +3 -489
  226. data/lib/fluent/registry.rb +52 -10
  227. data/lib/fluent/root_agent.rb +204 -42
  228. data/lib/fluent/supervisor.rb +597 -359
  229. data/lib/fluent/system_config.rb +131 -42
  230. data/lib/fluent/test/base.rb +6 -54
  231. data/lib/fluent/test/driver/base.rb +224 -0
  232. data/lib/fluent/test/driver/base_owned.rb +70 -0
  233. data/lib/fluent/test/driver/base_owner.rb +135 -0
  234. data/lib/fluent/test/driver/event_feeder.rb +98 -0
  235. data/lib/fluent/test/driver/filter.rb +57 -0
  236. data/lib/fluent/test/driver/formatter.rb +30 -0
  237. data/lib/fluent/test/driver/input.rb +31 -0
  238. data/lib/fluent/test/driver/multi_output.rb +53 -0
  239. data/lib/fluent/test/driver/output.rb +102 -0
  240. data/lib/fluent/test/driver/parser.rb +30 -0
  241. data/lib/fluent/test/driver/test_event_router.rb +45 -0
  242. data/lib/fluent/test/filter_test.rb +0 -1
  243. data/lib/fluent/test/formatter_test.rb +4 -1
  244. data/lib/fluent/test/helpers.rb +58 -10
  245. data/lib/fluent/test/input_test.rb +27 -19
  246. data/lib/fluent/test/log.rb +79 -0
  247. data/lib/fluent/test/output_test.rb +28 -39
  248. data/lib/fluent/test/parser_test.rb +3 -1
  249. data/lib/fluent/test/startup_shutdown.rb +46 -0
  250. data/lib/fluent/test.rb +33 -1
  251. data/lib/fluent/time.rb +450 -1
  252. data/lib/fluent/timezone.rb +27 -3
  253. data/lib/fluent/{status.rb → unique_id.rb} +15 -24
  254. data/lib/fluent/version.rb +1 -1
  255. data/lib/fluent/winsvc.rb +85 -0
  256. data/templates/new_gem/Gemfile +3 -0
  257. data/templates/new_gem/README.md.erb +43 -0
  258. data/templates/new_gem/Rakefile +13 -0
  259. data/templates/new_gem/fluent-plugin.gemspec.erb +27 -0
  260. data/templates/new_gem/lib/fluent/plugin/filter.rb.erb +14 -0
  261. data/templates/new_gem/lib/fluent/plugin/formatter.rb.erb +14 -0
  262. data/templates/new_gem/lib/fluent/plugin/input.rb.erb +11 -0
  263. data/templates/new_gem/lib/fluent/plugin/output.rb.erb +11 -0
  264. data/templates/new_gem/lib/fluent/plugin/parser.rb.erb +15 -0
  265. data/templates/new_gem/test/helper.rb.erb +8 -0
  266. data/templates/new_gem/test/plugin/test_filter.rb.erb +18 -0
  267. data/templates/new_gem/test/plugin/test_formatter.rb.erb +18 -0
  268. data/templates/new_gem/test/plugin/test_input.rb.erb +18 -0
  269. data/templates/new_gem/test/plugin/test_output.rb.erb +18 -0
  270. data/templates/new_gem/test/plugin/test_parser.rb.erb +18 -0
  271. data/templates/plugin_config_formatter/param.md-compact.erb +25 -0
  272. data/templates/plugin_config_formatter/param.md.erb +34 -0
  273. data/templates/plugin_config_formatter/section.md.erb +12 -0
  274. data/test/command/test_binlog_reader.rb +346 -0
  275. data/test/command/test_ca_generate.rb +70 -0
  276. data/test/command/test_fluentd.rb +901 -0
  277. data/test/command/test_plugin_config_formatter.rb +276 -0
  278. data/test/command/test_plugin_generator.rb +92 -0
  279. data/test/compat/test_calls_super.rb +166 -0
  280. data/test/compat/test_parser.rb +92 -0
  281. data/test/config/test_config_parser.rb +126 -2
  282. data/test/config/test_configurable.rb +946 -187
  283. data/test/config/test_configure_proxy.rb +424 -74
  284. data/test/config/test_dsl.rb +11 -11
  285. data/test/config/test_element.rb +500 -0
  286. data/test/config/test_literal_parser.rb +8 -0
  287. data/test/config/test_plugin_configuration.rb +56 -0
  288. data/test/config/test_section.rb +79 -7
  289. data/test/config/test_system_config.rb +122 -35
  290. data/test/config/test_types.rb +38 -0
  291. data/test/counter/test_client.rb +559 -0
  292. data/test/counter/test_error.rb +44 -0
  293. data/test/counter/test_mutex_hash.rb +179 -0
  294. data/test/counter/test_server.rb +589 -0
  295. data/test/counter/test_store.rb +258 -0
  296. data/test/counter/test_validator.rb +137 -0
  297. data/test/helper.rb +89 -6
  298. data/test/helpers/fuzzy_assert.rb +89 -0
  299. data/test/plugin/test_bare_output.rb +118 -0
  300. data/test/plugin/test_base.rb +115 -0
  301. data/test/plugin/test_buf_file.rb +823 -460
  302. data/test/plugin/test_buf_memory.rb +32 -194
  303. data/test/plugin/test_buffer.rb +1233 -0
  304. data/test/plugin/test_buffer_chunk.rb +198 -0
  305. data/test/plugin/test_buffer_file_chunk.rb +844 -0
  306. data/test/plugin/test_buffer_memory_chunk.rb +338 -0
  307. data/test/plugin/test_compressable.rb +84 -0
  308. data/test/plugin/test_filter.rb +357 -0
  309. data/test/plugin/test_filter_grep.rb +540 -29
  310. data/test/plugin/test_filter_parser.rb +439 -452
  311. data/test/plugin/test_filter_record_transformer.rb +123 -166
  312. data/test/plugin/test_filter_stdout.rb +160 -72
  313. data/test/plugin/test_formatter_csv.rb +111 -0
  314. data/test/plugin/test_formatter_hash.rb +35 -0
  315. data/test/plugin/test_formatter_json.rb +51 -0
  316. data/test/plugin/test_formatter_ltsv.rb +62 -0
  317. data/test/plugin/test_formatter_msgpack.rb +28 -0
  318. data/test/plugin/test_formatter_out_file.rb +95 -0
  319. data/test/plugin/test_formatter_single_value.rb +38 -0
  320. data/test/plugin/test_formatter_tsv.rb +68 -0
  321. data/test/plugin/test_in_debug_agent.rb +24 -1
  322. data/test/plugin/test_in_dummy.rb +111 -18
  323. data/test/plugin/test_in_exec.rb +200 -113
  324. data/test/plugin/test_in_forward.rb +990 -387
  325. data/test/plugin/test_in_gc_stat.rb +10 -8
  326. data/test/plugin/test_in_http.rb +600 -224
  327. data/test/plugin/test_in_monitor_agent.rb +690 -0
  328. data/test/plugin/test_in_object_space.rb +24 -8
  329. data/test/plugin/test_in_syslog.rb +154 -215
  330. data/test/plugin/test_in_tail.rb +1006 -707
  331. data/test/plugin/test_in_tcp.rb +125 -48
  332. data/test/plugin/test_in_udp.rb +204 -63
  333. data/test/plugin/{test_in_stream.rb → test_in_unix.rb} +14 -13
  334. data/test/plugin/test_input.rb +126 -0
  335. data/test/plugin/test_metadata.rb +89 -0
  336. data/test/plugin/test_multi_output.rb +180 -0
  337. data/test/plugin/test_out_copy.rb +117 -112
  338. data/test/plugin/test_out_exec.rb +258 -53
  339. data/test/plugin/test_out_exec_filter.rb +538 -115
  340. data/test/plugin/test_out_file.rb +865 -178
  341. data/test/plugin/test_out_forward.rb +998 -210
  342. data/test/plugin/test_out_null.rb +105 -0
  343. data/test/plugin/test_out_relabel.rb +28 -0
  344. data/test/plugin/test_out_roundrobin.rb +36 -29
  345. data/test/plugin/test_out_secondary_file.rb +458 -0
  346. data/test/plugin/test_out_stdout.rb +135 -37
  347. data/test/plugin/test_out_stream.rb +18 -0
  348. data/test/plugin/test_output.rb +984 -0
  349. data/test/plugin/test_output_as_buffered.rb +2021 -0
  350. data/test/plugin/test_output_as_buffered_backup.rb +312 -0
  351. data/test/plugin/test_output_as_buffered_compress.rb +165 -0
  352. data/test/plugin/test_output_as_buffered_overflow.rb +250 -0
  353. data/test/plugin/test_output_as_buffered_retries.rb +911 -0
  354. data/test/plugin/test_output_as_buffered_secondary.rb +874 -0
  355. data/test/plugin/test_output_as_standard.rb +374 -0
  356. data/test/plugin/test_owned_by.rb +35 -0
  357. data/test/plugin/test_parser.rb +359 -0
  358. data/test/plugin/test_parser_apache.rb +42 -0
  359. data/test/plugin/test_parser_apache2.rb +47 -0
  360. data/test/plugin/test_parser_apache_error.rb +45 -0
  361. data/test/plugin/test_parser_csv.rb +103 -0
  362. data/test/plugin/test_parser_json.rb +138 -0
  363. data/test/plugin/test_parser_labeled_tsv.rb +145 -0
  364. data/test/plugin/test_parser_multiline.rb +100 -0
  365. data/test/plugin/test_parser_nginx.rb +88 -0
  366. data/test/plugin/test_parser_none.rb +52 -0
  367. data/test/plugin/test_parser_regexp.rb +289 -0
  368. data/test/plugin/test_parser_syslog.rb +441 -0
  369. data/test/plugin/test_parser_tsv.rb +122 -0
  370. data/test/plugin/test_storage.rb +167 -0
  371. data/test/plugin/test_storage_local.rb +335 -0
  372. data/test/plugin_helper/data/cert/cert-key.pem +27 -0
  373. data/test/plugin_helper/data/cert/cert-with-no-newline.pem +19 -0
  374. data/test/plugin_helper/data/cert/cert.pem +19 -0
  375. data/test/plugin_helper/http_server/test_app.rb +65 -0
  376. data/test/plugin_helper/http_server/test_route.rb +32 -0
  377. data/test/plugin_helper/test_cert_option.rb +16 -0
  378. data/test/plugin_helper/test_child_process.rb +794 -0
  379. data/test/plugin_helper/test_compat_parameters.rb +353 -0
  380. data/test/plugin_helper/test_event_emitter.rb +51 -0
  381. data/test/plugin_helper/test_event_loop.rb +52 -0
  382. data/test/plugin_helper/test_extract.rb +194 -0
  383. data/test/plugin_helper/test_formatter.rb +255 -0
  384. data/test/plugin_helper/test_http_server_helper.rb +205 -0
  385. data/test/plugin_helper/test_inject.rb +519 -0
  386. data/test/plugin_helper/test_parser.rb +264 -0
  387. data/test/plugin_helper/test_record_accessor.rb +197 -0
  388. data/test/plugin_helper/test_retry_state.rb +442 -0
  389. data/test/plugin_helper/test_server.rb +1714 -0
  390. data/test/plugin_helper/test_storage.rb +542 -0
  391. data/test/plugin_helper/test_thread.rb +164 -0
  392. data/test/plugin_helper/test_timer.rb +132 -0
  393. data/test/scripts/exec_script.rb +0 -6
  394. data/test/scripts/fluent/plugin/formatter1/formatter_test1.rb +7 -0
  395. data/test/scripts/fluent/plugin/formatter2/formatter_test2.rb +7 -0
  396. data/test/scripts/fluent/plugin/out_test.rb +23 -15
  397. data/test/scripts/fluent/plugin/out_test2.rb +80 -0
  398. data/test/test_clock.rb +164 -0
  399. data/test/test_config.rb +16 -7
  400. data/test/test_configdsl.rb +2 -2
  401. data/test/test_event.rb +360 -13
  402. data/test/test_event_router.rb +108 -11
  403. data/test/test_event_time.rb +199 -0
  404. data/test/test_filter.rb +48 -6
  405. data/test/test_formatter.rb +11 -391
  406. data/test/test_input.rb +1 -1
  407. data/test/test_log.rb +591 -31
  408. data/test/test_mixin.rb +1 -1
  409. data/test/test_output.rb +121 -185
  410. data/test/test_plugin.rb +251 -0
  411. data/test/test_plugin_classes.rb +177 -10
  412. data/test/test_plugin_helper.rb +81 -0
  413. data/test/test_plugin_id.rb +101 -0
  414. data/test/test_process.rb +8 -42
  415. data/test/test_root_agent.rb +766 -21
  416. data/test/test_supervisor.rb +481 -0
  417. data/test/test_test_drivers.rb +135 -0
  418. data/test/test_time_formatter.rb +282 -0
  419. data/test/test_time_parser.rb +231 -0
  420. data/test/test_unique_id.rb +47 -0
  421. metadata +454 -60
  422. data/COPYING +0 -14
  423. data/ChangeLog +0 -666
  424. data/lib/fluent/buffer.rb +0 -365
  425. data/lib/fluent/plugin/in_status.rb +0 -76
  426. data/test/plugin/test_in_status.rb +0 -38
  427. data/test/test_buffer.rb +0 -624
  428. data/test/test_parser.rb +0 -1305
@@ -0,0 +1,1233 @@
1
+ require_relative '../helper'
2
+ require 'fluent/plugin/buffer'
3
+ require 'fluent/plugin/buffer/memory_chunk'
4
+ require 'fluent/plugin/compressable'
5
+ require 'fluent/plugin/buffer/chunk'
6
+ require 'fluent/event'
7
+ require 'flexmock/test_unit'
8
+
9
+ require 'fluent/log'
10
+ require 'fluent/plugin_id'
11
+
12
+ require 'time'
13
+
14
+ module FluentPluginBufferTest
15
+ class DummyOutputPlugin < Fluent::Plugin::Base
16
+ include Fluent::PluginId
17
+ include Fluent::PluginLoggerMixin
18
+ end
19
+ class DummyMemoryChunkError < StandardError; end
20
+ class DummyMemoryChunk < Fluent::Plugin::Buffer::MemoryChunk
21
+ attr_reader :append_count, :rollbacked, :closed, :purged
22
+ attr_accessor :failing
23
+ def initialize(metadata, compress: :text)
24
+ super
25
+ @append_count = 0
26
+ @rollbacked = false
27
+ @closed = false
28
+ @purged = false
29
+ @failing = false
30
+ end
31
+ def concat(data, size)
32
+ @append_count += 1
33
+ raise DummyMemoryChunkError if @failing
34
+ super
35
+ end
36
+ def rollback
37
+ super
38
+ @rollbacked = true
39
+ end
40
+ def close
41
+ super
42
+ @closed = true
43
+ end
44
+ def purge
45
+ super
46
+ @purged = true
47
+ end
48
+ end
49
+ class DummyPlugin < Fluent::Plugin::Buffer
50
+ def create_metadata(timekey=nil, tag=nil, variables=nil)
51
+ Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
52
+ end
53
+ def create_chunk(metadata, data)
54
+ c = FluentPluginBufferTest::DummyMemoryChunk.new(metadata)
55
+ c.append(data)
56
+ c.commit
57
+ c
58
+ end
59
+ def create_chunk_es(metadata, es)
60
+ c = FluentPluginBufferTest::DummyMemoryChunk.new(metadata)
61
+ c.concat(es.to_msgpack_stream, es.size)
62
+ c.commit
63
+ c
64
+ end
65
+ def resume
66
+ dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
67
+ dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
68
+ dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
69
+ dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
70
+ staged = {
71
+ dm2 => create_chunk(dm2, ["b" * 100]).staged!,
72
+ dm3 => create_chunk(dm3, ["c" * 100]).staged!,
73
+ }
74
+ queued = [
75
+ create_chunk(dm0, ["0" * 100]).enqueued!,
76
+ create_chunk(dm1, ["a" * 100]).enqueued!,
77
+ create_chunk(dm1, ["a" * 3]).enqueued!,
78
+ ]
79
+ return staged, queued
80
+ end
81
+ def generate_chunk(metadata)
82
+ DummyMemoryChunk.new(metadata, compress: @compress)
83
+ end
84
+ end
85
+ end
86
+
87
+ class BufferTest < Test::Unit::TestCase
88
+ def create_buffer(hash)
89
+ buffer_conf = config_element('buffer', '', hash, [])
90
+ owner = FluentPluginBufferTest::DummyOutputPlugin.new
91
+ owner.configure(config_element('ROOT', '', {}, [ buffer_conf ]))
92
+ p = FluentPluginBufferTest::DummyPlugin.new
93
+ p.owner = owner
94
+ p.configure(buffer_conf)
95
+ p
96
+ end
97
+
98
+ def create_metadata(timekey=nil, tag=nil, variables=nil)
99
+ Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
100
+ end
101
+
102
+ def create_chunk(metadata, data)
103
+ c = FluentPluginBufferTest::DummyMemoryChunk.new(metadata)
104
+ c.append(data)
105
+ c.commit
106
+ c
107
+ end
108
+
109
+ def create_chunk_es(metadata, es)
110
+ c = FluentPluginBufferTest::DummyMemoryChunk.new(metadata)
111
+ c.concat(es.to_msgpack_stream, es.size)
112
+ c.commit
113
+ c
114
+ end
115
+
116
+ setup do
117
+ Fluent::Test.setup
118
+ end
119
+
120
+ sub_test_case 'using base buffer class' do
121
+ setup do
122
+ buffer_conf = config_element('buffer', '', {}, [])
123
+ owner = FluentPluginBufferTest::DummyOutputPlugin.new
124
+ owner.configure(config_element('ROOT', '', {}, [ buffer_conf ]))
125
+ p = Fluent::Plugin::Buffer.new
126
+ p.owner = owner
127
+ p.configure(buffer_conf)
128
+ @p = p
129
+ end
130
+
131
+ test 'default persistency is false' do
132
+ assert !@p.persistent?
133
+ end
134
+
135
+ test 'chunk bytes limit is 8MB, and total bytes limit is 512MB' do
136
+ assert_equal 8*1024*1024, @p.chunk_limit_size
137
+ assert_equal 512*1024*1024, @p.total_limit_size
138
+ end
139
+
140
+ test 'chunk records limit is ignored in default' do
141
+ assert_nil @p.chunk_limit_records
142
+ end
143
+
144
+ test '#storable? checks total size of staged and enqueued(includes dequeued chunks) against total_limit_size' do
145
+ assert_equal 512*1024*1024, @p.total_limit_size
146
+ assert_equal 0, @p.stage_size
147
+ assert_equal 0, @p.queue_size
148
+ assert @p.storable?
149
+
150
+ @p.stage_size = 256 * 1024 * 1024
151
+ @p.queue_size = 256 * 1024 * 1024 - 1
152
+ assert @p.storable?
153
+
154
+ @p.queue_size = 256 * 1024 * 1024
155
+ assert !@p.storable?
156
+ end
157
+
158
+ test '#resume must be implemented by subclass' do
159
+ assert_raise NotImplementedError do
160
+ @p.resume
161
+ end
162
+ end
163
+
164
+ test '#generate_chunk must be implemented by subclass' do
165
+ assert_raise NotImplementedError do
166
+ @p.generate_chunk(Object.new)
167
+ end
168
+ end
169
+ end
170
+
171
+ sub_test_case 'with default configuration and dummy implementation' do
172
+ setup do
173
+ @p = create_buffer({'queued_chunks_limit_size' => 100})
174
+ @dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
175
+ @dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
176
+ @dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
177
+ @dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
178
+ @p.start
179
+ end
180
+
181
+ test '#start resumes buffer states and update queued numbers per metadata' do
182
+ plugin = create_buffer({})
183
+
184
+ assert_equal({}, plugin.stage)
185
+ assert_equal([], plugin.queue)
186
+ assert_equal({}, plugin.dequeued)
187
+ assert_equal({}, plugin.queued_num)
188
+ assert_equal([], plugin.metadata_list)
189
+
190
+ assert_equal 0, plugin.stage_size
191
+ assert_equal 0, plugin.queue_size
192
+ assert_equal [], plugin.timekeys
193
+
194
+ # @p is started plugin
195
+
196
+ assert_equal [@dm2,@dm3], @p.stage.keys
197
+ assert_equal "b" * 100, @p.stage[@dm2].read
198
+ assert_equal "c" * 100, @p.stage[@dm3].read
199
+
200
+ assert_equal 200, @p.stage_size
201
+
202
+ assert_equal 3, @p.queue.size
203
+ assert_equal "0" * 100, @p.queue[0].read
204
+ assert_equal "a" * 100, @p.queue[1].read
205
+ assert_equal "a" * 3, @p.queue[2].read
206
+
207
+ assert_equal 203, @p.queue_size
208
+
209
+ # staged, queued
210
+ assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
211
+ assert_equal 1, @p.queued_num[@dm0]
212
+ assert_equal 2, @p.queued_num[@dm1]
213
+ end
214
+
215
+ test '#close closes all chunks in dequeued, enqueued and staged' do
216
+ dmx = create_metadata(Time.parse('2016-04-11 15:50:00 +0000').to_i, nil, nil)
217
+ cx = create_chunk(dmx, ["x" * 1024])
218
+ @p.dequeued[cx.unique_id] = cx
219
+
220
+ staged_chunks = @p.stage.values.dup
221
+ queued_chunks = @p.queue.dup
222
+
223
+ @p.close
224
+
225
+ assert cx.closed
226
+ assert{ staged_chunks.all?{|c| c.closed } }
227
+ assert{ queued_chunks.all?{|c| c.closed } }
228
+ end
229
+
230
+ test '#terminate initializes all internal states' do
231
+ dmx = create_metadata(Time.parse('2016-04-11 15:50:00 +0000').to_i, nil, nil)
232
+ cx = create_chunk(dmx, ["x" * 1024])
233
+ @p.dequeued[cx.unique_id] = cx
234
+
235
+ @p.close
236
+
237
+ @p.terminate
238
+
239
+ assert_nil @p.stage
240
+ assert_nil @p.queue
241
+ assert_nil @p.dequeued
242
+ assert_nil @p.queued_num
243
+ assert_nil @p.instance_eval{ @metadata_list } # #metadata_list does #dup for @metadata_list
244
+ assert_equal 0, @p.stage_size
245
+ assert_equal 0, @p.queue_size
246
+ assert_equal [], @p.timekeys
247
+ end
248
+
249
+ test '#metadata_list returns list of metadata on stage or in queue' do
250
+ assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
251
+ end
252
+
253
+ test '#new_metadata creates metadata instance without inserting metadata_list' do
254
+ assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
255
+ _m = @p.new_metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
256
+ assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
257
+ end
258
+
259
+ test '#add_metadata adds unknown metadata into list, or return known metadata if already exists' do
260
+ assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
261
+
262
+ m = @p.new_metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
263
+ _mx = @p.add_metadata(m)
264
+ assert_equal [@dm2,@dm3,@dm0,@dm1,m], @p.metadata_list
265
+ assert_equal m.object_id, m.object_id
266
+
267
+ my = @p.add_metadata(@dm1)
268
+ assert_equal [@dm2,@dm3,@dm0,@dm1,m], @p.metadata_list
269
+ assert_equal @dm1, my
270
+ assert{ @dm1.object_id != my.object_id } # 'my' is an object created in #resume
271
+ end
272
+
273
+ test '#metadata is utility method to create-add-and-return metadata' do
274
+ assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
275
+
276
+ m1 = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
277
+ assert_equal [@dm2,@dm3,@dm0,@dm1,m1], @p.metadata_list
278
+ m2 = @p.metadata(timekey: @dm3.timekey)
279
+ assert_equal [@dm2,@dm3,@dm0,@dm1,m1], @p.metadata_list
280
+ assert_equal @dm3, m2
281
+ end
282
+
283
+ test '#queued_records returns total number of size in all chunks in queue' do
284
+ assert_equal 3, @p.queue.size
285
+
286
+ r0 = @p.queue[0].size
287
+ assert_equal 1, r0
288
+ r1 = @p.queue[1].size
289
+ assert_equal 1, r1
290
+ r2 = @p.queue[2].size
291
+ assert_equal 1, r2
292
+
293
+ assert_equal (r0+r1+r2), @p.queued_records
294
+ end
295
+
296
+ test '#queued? returns queue has any chunks or not without arguments' do
297
+ assert @p.queued?
298
+
299
+ @p.queue.reject!{|_c| true }
300
+ assert !@p.queued?
301
+ end
302
+
303
+ test '#queued? returns queue has chunks for specified metadata with an argument' do
304
+ assert @p.queued?(@dm0)
305
+ assert @p.queued?(@dm1)
306
+ assert !@p.queued?(@dm2)
307
+ end
308
+
309
+ test '#enqueue_chunk enqueues a chunk on stage with specified metadata' do
310
+ assert_equal 2, @p.stage.size
311
+ assert_equal [@dm2,@dm3], @p.stage.keys
312
+ assert_equal 3, @p.queue.size
313
+ assert_nil @p.queued_num[@dm2]
314
+
315
+ assert_equal 200, @p.stage_size
316
+ assert_equal 203, @p.queue_size
317
+
318
+ @p.enqueue_chunk(@dm2)
319
+
320
+ assert_equal [@dm3], @p.stage.keys
321
+ assert_equal @dm2, @p.queue.last.metadata
322
+ assert_equal 1, @p.queued_num[@dm2]
323
+ assert_equal 100, @p.stage_size
324
+ assert_equal 303, @p.queue_size
325
+ end
326
+
327
+ test '#enqueue_chunk ignores empty chunks' do
328
+ assert_equal 3, @p.queue.size
329
+
330
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
331
+ c = create_chunk(m, [''])
332
+ @p.stage[m] = c
333
+ assert @p.stage[m].empty?
334
+ assert !c.closed
335
+
336
+ @p.enqueue_chunk(m)
337
+
338
+ assert_nil @p.stage[m]
339
+ assert_equal 3, @p.queue.size
340
+ assert_nil @p.queued_num[m]
341
+ assert c.closed
342
+ end
343
+
344
+ test '#enqueue_chunk calls #enqueued! if chunk responds to it' do
345
+ assert_equal 3, @p.queue.size
346
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
347
+ c = create_chunk(m, ['c' * 256])
348
+ callback_called = false
349
+ (class << c; self; end).module_eval do
350
+ define_method(:enqueued!){ callback_called = true }
351
+ end
352
+
353
+ @p.stage[m] = c
354
+ @p.enqueue_chunk(m)
355
+
356
+ assert_equal c, @p.queue.last
357
+ assert callback_called
358
+ end
359
+
360
+ test '#enqueue_all enqueues chunks on stage which given block returns true with' do
361
+ m1 = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
362
+ c1 = create_chunk(m1, ['c' * 256])
363
+ @p.stage[m1] = c1
364
+ m2 = @p.metadata(timekey: Time.parse('2016-04-11 16:50:00 +0000').to_i)
365
+ c2 = create_chunk(m2, ['c' * 256])
366
+ @p.stage[m2] = c2
367
+
368
+ assert_equal [@dm2,@dm3,m1,m2], @p.stage.keys
369
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
370
+
371
+ @p.enqueue_all{ |m, c| m.timekey < Time.parse('2016-04-11 16:41:00 +0000').to_i }
372
+
373
+ assert_equal [m2], @p.stage.keys
374
+ assert_equal [@dm0,@dm1,@dm1,@dm2,@dm3,m1], @p.queue.map(&:metadata)
375
+ end
376
+
377
+ test '#enqueue_all enqueues all chunks on stage without block' do
378
+ m1 = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
379
+ c1 = create_chunk(m1, ['c' * 256])
380
+ @p.stage[m1] = c1
381
+ m2 = @p.metadata(timekey: Time.parse('2016-04-11 16:50:00 +0000').to_i)
382
+ c2 = create_chunk(m2, ['c' * 256])
383
+ @p.stage[m2] = c2
384
+
385
+ assert_equal [@dm2,@dm3,m1,m2], @p.stage.keys
386
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
387
+
388
+ @p.enqueue_all
389
+
390
+ assert_equal [], @p.stage.keys
391
+ assert_equal [@dm0,@dm1,@dm1,@dm2,@dm3,m1,m2], @p.queue.map(&:metadata)
392
+ end
393
+
394
+ test '#dequeue_chunk dequeues a chunk from queue if a chunk exists' do
395
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
396
+ assert_equal({}, @p.dequeued)
397
+
398
+ m1 = @p.dequeue_chunk
399
+ assert_equal @dm0, m1.metadata
400
+ assert_equal @dm0, @p.dequeued[m1.unique_id].metadata
401
+
402
+ m2 = @p.dequeue_chunk
403
+ assert_equal @dm1, m2.metadata
404
+ assert_equal @dm1, @p.dequeued[m2.unique_id].metadata
405
+
406
+ m3 = @p.dequeue_chunk
407
+ assert_equal @dm1, m3.metadata
408
+ assert_equal @dm1, @p.dequeued[m3.unique_id].metadata
409
+
410
+ m4 = @p.dequeue_chunk
411
+ assert_nil m4
412
+ end
413
+
414
+ test '#takeback_chunk resumes a chunk from dequeued to queued at the head of queue, and returns true' do
415
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
416
+ assert_equal({}, @p.dequeued)
417
+
418
+ m1 = @p.dequeue_chunk
419
+ assert_equal @dm0, m1.metadata
420
+ assert_equal @dm0, @p.dequeued[m1.unique_id].metadata
421
+ assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
422
+ assert_equal({m1.unique_id => m1}, @p.dequeued)
423
+
424
+ assert @p.takeback_chunk(m1.unique_id)
425
+
426
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
427
+ assert_equal({}, @p.dequeued)
428
+ end
429
+
430
+ test '#purge_chunk removes a chunk specified by argument id from dequeued chunks' do
431
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
432
+ assert_equal({}, @p.dequeued)
433
+ assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
434
+
435
+ m0 = @p.dequeue_chunk
436
+ m1 = @p.dequeue_chunk
437
+
438
+ assert @p.takeback_chunk(m0.unique_id)
439
+
440
+ assert_equal [@dm0,@dm1], @p.queue.map(&:metadata)
441
+ assert_equal({m1.unique_id => m1}, @p.dequeued)
442
+
443
+ assert !m1.purged
444
+
445
+ @p.purge_chunk(m1.unique_id)
446
+ assert m1.purged
447
+
448
+ assert_equal [@dm0,@dm1], @p.queue.map(&:metadata)
449
+ assert_equal({}, @p.dequeued)
450
+ assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
451
+ end
452
+
453
+ test '#purge_chunk removes an argument metadata from metadata_list if no chunks exist on stage or in queue' do
454
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
455
+ assert_equal({}, @p.dequeued)
456
+ assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
457
+
458
+ m0 = @p.dequeue_chunk
459
+
460
+ assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
461
+ assert_equal({m0.unique_id => m0}, @p.dequeued)
462
+
463
+ assert !m0.purged
464
+
465
+ @p.purge_chunk(m0.unique_id)
466
+ assert m0.purged
467
+
468
+ assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
469
+ assert_equal({}, @p.dequeued)
470
+ assert_equal [@dm2,@dm3,@dm1], @p.metadata_list
471
+ end
472
+
473
+ test '#takeback_chunk returns false if specified chunk_id is already purged' do
474
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
475
+ assert_equal({}, @p.dequeued)
476
+ assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
477
+
478
+ m0 = @p.dequeue_chunk
479
+
480
+ assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
481
+ assert_equal({m0.unique_id => m0}, @p.dequeued)
482
+
483
+ assert !m0.purged
484
+
485
+ @p.purge_chunk(m0.unique_id)
486
+ assert m0.purged
487
+
488
+ assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
489
+ assert_equal({}, @p.dequeued)
490
+ assert_equal [@dm2,@dm3,@dm1], @p.metadata_list
491
+
492
+ assert !@p.takeback_chunk(m0.unique_id)
493
+
494
+ assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
495
+ assert_equal({}, @p.dequeued)
496
+ assert_equal [@dm2,@dm3,@dm1], @p.metadata_list
497
+ end
498
+
499
+ test '#clear_queue! removes all chunks in queue, but leaves staged chunks' do
500
+ qchunks = @p.queue.dup
501
+
502
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
503
+ assert_equal 2, @p.stage.size
504
+ assert_equal({}, @p.dequeued)
505
+
506
+ @p.clear_queue!
507
+
508
+ assert_equal [], @p.queue
509
+ assert_equal 0, @p.queue_size
510
+ assert_equal 2, @p.stage.size
511
+ assert_equal({}, @p.dequeued)
512
+
513
+ assert{ qchunks.all?{ |c| c.purged } }
514
+ end
515
+
516
+ test '#write returns immediately if argument data is empty array' do
517
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
518
+ assert_equal [@dm2,@dm3], @p.stage.keys
519
+
520
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
521
+
522
+ @p.write({m => []})
523
+
524
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
525
+ assert_equal [@dm2,@dm3], @p.stage.keys
526
+ end
527
+
528
+ test '#write returns immediately if argument data is empty event stream' do
529
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
530
+ assert_equal [@dm2,@dm3], @p.stage.keys
531
+
532
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
533
+
534
+ @p.write({m => Fluent::ArrayEventStream.new([])})
535
+
536
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
537
+ assert_equal [@dm2,@dm3], @p.stage.keys
538
+ end
539
+
540
+ test '#write raises BufferOverflowError if buffer is not storable' do
541
+ @p.stage_size = 256 * 1024 * 1024
542
+ @p.queue_size = 256 * 1024 * 1024
543
+
544
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
545
+
546
+ assert_raise Fluent::Plugin::Buffer::BufferOverflowError do
547
+ @p.write({m => ["x" * 256]})
548
+ end
549
+ end
550
+
551
+ test '#write stores data into an existing chunk with metadata specified' do
552
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
553
+ assert_equal [@dm2,@dm3], @p.stage.keys
554
+
555
+ dm3data = @p.stage[@dm3].read.dup
556
+ prev_stage_size = @p.stage_size
557
+
558
+ assert_equal 1, @p.stage[@dm3].append_count
559
+
560
+ @p.write({@dm3 => ["x" * 256, "y" * 256, "z" * 256]})
561
+
562
+ assert_equal 2, @p.stage[@dm3].append_count
563
+ assert_equal (dm3data + ("x" * 256) + ("y" * 256) + ("z" * 256)), @p.stage[@dm3].read
564
+ assert_equal (prev_stage_size + 768), @p.stage_size
565
+
566
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
567
+ assert_equal [@dm2,@dm3], @p.stage.keys
568
+ end
569
+
570
+ test '#write creates new chunk and store data into it if there are no chunks for specified metadata' do
571
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
572
+ assert_equal [@dm2,@dm3], @p.stage.keys
573
+
574
+ timekey = Time.parse('2016-04-11 16:40:00 +0000').to_i
575
+ assert !@p.timekeys.include?(timekey)
576
+
577
+ prev_stage_size = @p.stage_size
578
+
579
+ m = @p.metadata(timekey: timekey)
580
+
581
+ @p.write({m => ["x" * 256, "y" * 256, "z" * 256]})
582
+
583
+ assert_equal 1, @p.stage[m].append_count
584
+ assert_equal ("x" * 256 + "y" * 256 + "z" * 256), @p.stage[m].read
585
+ assert_equal (prev_stage_size + 768), @p.stage_size
586
+
587
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
588
+ assert_equal [@dm2,@dm3,m], @p.stage.keys
589
+
590
+ assert @p.timekeys.include?(timekey)
591
+ end
592
+
593
+ test '#write tries to enqueue and store data into a new chunk if existing chunk is full' do
594
+ assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
595
+ assert_equal 0.95, @p.chunk_full_threshold
596
+
597
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
598
+ assert_equal [@dm2,@dm3], @p.stage.keys
599
+
600
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
601
+
602
+ row = "x" * 1024 * 1024
603
+ small_row = "x" * 1024 * 512
604
+ @p.write({m => [row] * 7 + [small_row]})
605
+
606
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
607
+ assert_equal [@dm2,@dm3,m], @p.stage.keys
608
+ assert_equal 1, @p.stage[m].append_count
609
+
610
+ @p.write({m => [row]})
611
+
612
+ assert_equal [@dm0,@dm1,@dm1,m], @p.queue.map(&:metadata)
613
+ assert_equal [@dm2,@dm3,m], @p.stage.keys
614
+ assert_equal 1, @p.stage[m].append_count
615
+ assert_equal 1024*1024, @p.stage[m].bytesize
616
+ assert_equal 3, @p.queue.last.append_count # 1 -> write (2) -> write_step_by_step (3)
617
+ assert @p.queue.last.rollbacked
618
+ end
619
+
620
+ test '#write rollbacks if commit raises errors' do
621
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
622
+ assert_equal [@dm2,@dm3], @p.stage.keys
623
+
624
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
625
+
626
+ row = "x" * 1024
627
+ @p.write({m => [row] * 8})
628
+
629
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
630
+ assert_equal [@dm2,@dm3,m], @p.stage.keys
631
+
632
+ target_chunk = @p.stage[m]
633
+
634
+ assert_equal 1, target_chunk.append_count
635
+ assert !target_chunk.rollbacked
636
+
637
+ (class << target_chunk; self; end).module_eval do
638
+ define_method(:commit){ raise "yay" }
639
+ end
640
+
641
+ assert_raise RuntimeError.new("yay") do
642
+ @p.write({m => [row]})
643
+ end
644
+
645
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
646
+ assert_equal [@dm2,@dm3,m], @p.stage.keys
647
+
648
+ assert_equal 2, target_chunk.append_count
649
+ assert target_chunk.rollbacked
650
+ assert_equal row * 8, target_chunk.read
651
+ end
652
+
653
+ test '#write w/ format raises BufferOverflowError if buffer is not storable' do
654
+ @p.stage_size = 256 * 1024 * 1024
655
+ @p.queue_size = 256 * 1024 * 1024
656
+
657
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
658
+
659
+ es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "xxxxxxxxxxxxxx"} ] ])
660
+
661
+ assert_raise Fluent::Plugin::Buffer::BufferOverflowError do
662
+ @p.write({m => es}, format: ->(e){e.to_msgpack_stream})
663
+ end
664
+ end
665
+
666
+ test '#write w/ format stores data into an existing chunk with metadata specified' do
667
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
668
+ assert_equal [@dm2,@dm3], @p.stage.keys
669
+
670
+ dm3data = @p.stage[@dm3].read.dup
671
+ prev_stage_size = @p.stage_size
672
+
673
+ assert_equal 1, @p.stage[@dm3].append_count
674
+
675
+ es = Fluent::ArrayEventStream.new(
676
+ [
677
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 128}],
678
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "y" * 128}],
679
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "z" * 128}],
680
+ ]
681
+ )
682
+
683
+ @p.write({@dm3 => es}, format: ->(e){e.to_msgpack_stream})
684
+
685
+ assert_equal 2, @p.stage[@dm3].append_count
686
+ assert_equal (dm3data + es.to_msgpack_stream), @p.stage[@dm3].read
687
+ assert_equal (prev_stage_size + es.to_msgpack_stream.bytesize), @p.stage_size
688
+
689
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
690
+ assert_equal [@dm2,@dm3], @p.stage.keys
691
+ end
692
+
693
+ test '#write w/ format creates new chunk and store data into it if there are not chunks for specified metadata' do
694
+ assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
695
+
696
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
697
+ assert_equal [@dm2,@dm3], @p.stage.keys
698
+
699
+ timekey = Time.parse('2016-04-11 16:40:00 +0000').to_i
700
+ assert !@p.timekeys.include?(timekey)
701
+
702
+ m = @p.metadata(timekey: timekey)
703
+
704
+ es = Fluent::ArrayEventStream.new(
705
+ [
706
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
707
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
708
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
709
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
710
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
711
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
712
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
713
+ [event_time('2016-04-11 16:40:03 +0000'), {"message" => "z" * 1024 * 512}],
714
+ ]
715
+ )
716
+ @p.write({m => es}, format: ->(e){e.to_msgpack_stream})
717
+
718
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
719
+ assert_equal [@dm2,@dm3,m], @p.stage.keys
720
+ assert_equal 1, @p.stage[m].append_count
721
+
722
+ assert @p.timekeys.include?(timekey)
723
+ end
724
+
725
+ test '#write w/ format tries to enqueue and store data into a new chunk if existing chunk does not have enough space' do
726
+ assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
727
+
728
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
729
+ assert_equal [@dm2,@dm3], @p.stage.keys
730
+
731
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
732
+
733
+ es = Fluent::ArrayEventStream.new(
734
+ [
735
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
736
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
737
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
738
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
739
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
740
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
741
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
742
+ [event_time('2016-04-11 16:40:03 +0000'), {"message" => "z" * 1024 * 512}],
743
+ ]
744
+ )
745
+ @p.write({m => es}, format: ->(e){e.to_msgpack_stream})
746
+
747
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
748
+ assert_equal [@dm2,@dm3,m], @p.stage.keys
749
+ assert_equal 1, @p.stage[m].append_count
750
+
751
+ es2 = Fluent::OneEventStream.new(event_time('2016-04-11 16:40:03 +0000'), {"message" => "z" * 1024 * 1024})
752
+ @p.write({m => es2}, format: ->(e){e.to_msgpack_stream})
753
+
754
+ assert_equal [@dm0,@dm1,@dm1,m], @p.queue.map(&:metadata)
755
+ assert_equal [@dm2,@dm3,m], @p.stage.keys
756
+ assert_equal 1, @p.stage[m].append_count
757
+ assert_equal es2.to_msgpack_stream.bytesize, @p.stage[m].bytesize
758
+ assert_equal 2, @p.queue.last.append_count # 1 -> write (2) -> rollback&enqueue
759
+ assert @p.queue.last.rollbacked
760
+ end
761
+
762
+ test '#write w/ format enqueues chunk if it is already full after adding data' do
763
+ assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
764
+
765
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
766
+ assert_equal [@dm2,@dm3], @p.stage.keys
767
+
768
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
769
+ es = Fluent::ArrayEventStream.new(
770
+ [
771
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}], # 1024 * 1024 bytes as msgpack stream
772
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
773
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
774
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
775
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
776
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
777
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
778
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
779
+ ]
780
+ )
781
+ @p.write({m => es}, format: ->(e){e.to_msgpack_stream})
782
+
783
+ assert_equal [@dm0,@dm1,@dm1,m], @p.queue.map(&:metadata)
784
+ assert_equal [@dm2,@dm3], @p.stage.keys
785
+ assert_equal 1, @p.queue.last.append_count
786
+ end
787
+
788
+ test '#write w/ format rollbacks if commit raises errors' do
789
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
790
+ assert_equal [@dm2,@dm3], @p.stage.keys
791
+
792
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
793
+
794
+ es = Fluent::ArrayEventStream.new(
795
+ [
796
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
797
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
798
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
799
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
800
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
801
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
802
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
803
+ [event_time('2016-04-11 16:40:03 +0000'), {"message" => "z" * 1024 * 512}],
804
+ ]
805
+ )
806
+ @p.write({m => es}, format: ->(e){e.to_msgpack_stream})
807
+
808
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
809
+ assert_equal [@dm2,@dm3,m], @p.stage.keys
810
+
811
+ target_chunk = @p.stage[m]
812
+
813
+ assert_equal 1, target_chunk.append_count
814
+ assert !target_chunk.rollbacked
815
+
816
+ (class << target_chunk; self; end).module_eval do
817
+ define_method(:commit){ raise "yay" }
818
+ end
819
+
820
+ es2 = Fluent::ArrayEventStream.new(
821
+ [
822
+ [event_time('2016-04-11 16:40:04 +0000'), {"message" => "z" * 1024 * 128}],
823
+ ]
824
+ )
825
+ assert_raise RuntimeError.new("yay") do
826
+ @p.write({m => es2}, format: ->(e){e.to_msgpack_stream})
827
+ end
828
+
829
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
830
+ assert_equal [@dm2,@dm3,m], @p.stage.keys
831
+
832
+ assert_equal 2, target_chunk.append_count
833
+ assert target_chunk.rollbacked
834
+ assert_equal es.to_msgpack_stream, target_chunk.read
835
+ end
836
+
837
+ test '#write writes many metadata and data pairs at once' do
838
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
839
+ assert_equal [@dm2,@dm3], @p.stage.keys
840
+
841
+ row = "x" * 1024
842
+ @p.write({ @dm0 => [row, row, row], @dm1 => [row, row] })
843
+
844
+ assert_equal [@dm2,@dm3,@dm0,@dm1], @p.stage.keys
845
+ end
846
+
847
+ test '#write does not commit on any chunks if any append operation on chunk fails' do
848
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
849
+ assert_equal [@dm2,@dm3], @p.stage.keys
850
+
851
+ row = "x" * 1024
852
+ @p.write({ @dm0 => [row, row, row], @dm1 => [row, row] })
853
+
854
+ assert_equal [@dm2,@dm3,@dm0,@dm1], @p.stage.keys
855
+
856
+ dm2_size = @p.stage[@dm2].size
857
+ assert !@p.stage[@dm2].rollbacked
858
+ dm3_size = @p.stage[@dm3].size
859
+ assert !@p.stage[@dm3].rollbacked
860
+
861
+ assert{ @p.stage[@dm0].size == 3 }
862
+ assert !@p.stage[@dm0].rollbacked
863
+ assert{ @p.stage[@dm1].size == 2 }
864
+ assert !@p.stage[@dm1].rollbacked
865
+
866
+ meta_list = [@dm0, @dm1, @dm2, @dm3].sort
867
+ @p.stage[meta_list.last].failing = true
868
+
869
+ assert_raise(FluentPluginBufferTest::DummyMemoryChunkError) do
870
+ @p.write({ @dm2 => [row], @dm3 => [row], @dm0 => [row, row, row], @dm1 => [row, row] })
871
+ end
872
+
873
+ assert{ @p.stage[@dm2].size == dm2_size }
874
+ assert @p.stage[@dm2].rollbacked
875
+ assert{ @p.stage[@dm3].size == dm3_size }
876
+ assert @p.stage[@dm3].rollbacked
877
+
878
+ assert{ @p.stage[@dm0].size == 3 }
879
+ assert @p.stage[@dm0].rollbacked
880
+ assert{ @p.stage[@dm1].size == 2 }
881
+ assert @p.stage[@dm1].rollbacked
882
+ end
883
+
884
+ test '#compress returns :text' do
885
+ assert_equal :text, @p.compress
886
+ end
887
+ end
888
+
889
+ sub_test_case 'standard format with configuration for test with lower chunk limit size' do
890
+ setup do
891
+ @p = create_buffer({"chunk_limit_size" => 1_280_000})
892
+ @format = ->(e){e.to_msgpack_stream}
893
+ @dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
894
+ # 1 record is 128bytes in msgpack stream
895
+ @es0 = es0 = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:01 +0000'), {"message" => "x" * (128 - 22)}] ] * 5000)
896
+ (class << @p; self; end).module_eval do
897
+ define_method(:resume) {
898
+ staged = {
899
+ dm0 => create_chunk_es(dm0, es0).staged!,
900
+ }
901
+ queued = []
902
+ return staged, queued
903
+ }
904
+ end
905
+ @p.start
906
+ end
907
+
908
+ test '#write appends event stream into staged chunk' do
909
+ assert_equal [@dm0], @p.stage.keys
910
+ assert_equal [], @p.queue.map(&:metadata)
911
+
912
+ assert_equal 1_280_000, @p.chunk_limit_size
913
+
914
+ es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * (128 - 22)}] ] * 1000)
915
+ @p.write({@dm0 => es}, format: @format)
916
+
917
+ assert_equal [@dm0], @p.stage.keys
918
+ assert_equal [], @p.queue.map(&:metadata)
919
+
920
+ assert_equal (@es0.to_msgpack_stream + es.to_msgpack_stream), @p.stage[@dm0].read
921
+ end
922
+
923
+ test '#write writes event stream into a new chunk with enqueueing existing chunk if event stream is larger than available space of existing chunk' do
924
+ assert_equal [@dm0], @p.stage.keys
925
+ assert_equal [], @p.queue.map(&:metadata)
926
+
927
+ assert_equal 1_280_000, @p.chunk_limit_size
928
+
929
+ es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * (128 - 22)}] ] * 8000)
930
+ @p.write({@dm0 => es}, format: @format)
931
+
932
+ assert_equal [@dm0], @p.stage.keys
933
+ assert_equal [@dm0], @p.queue.map(&:metadata)
934
+
935
+ assert_equal (es.to_msgpack_stream), @p.stage[@dm0].read
936
+ end
937
+
938
+ test '#write writes event stream into many chunks excluding staged chunk if event stream is larger than chunk limit size' do
939
+ assert_equal [@dm0], @p.stage.keys
940
+ assert_equal [], @p.queue.map(&:metadata)
941
+
942
+ assert_equal 1_280_000, @p.chunk_limit_size
943
+
944
+ es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * (128 - 22)}] ] * 45000)
945
+ @p.write({@dm0 => es}, format: @format)
946
+
947
+ assert_equal [@dm0], @p.stage.keys
948
+ assert_equal 5400, @p.stage[@dm0].size
949
+ assert_equal [@dm0,@dm0,@dm0,@dm0,@dm0], @p.queue.map(&:metadata)
950
+ assert_equal [5000, 9900, 9900, 9900, 9900], @p.queue.map(&:size) # splits: 45000 / 100 => 450 * ...
951
+ # 9900 * 4 + 5400 == 45000
952
+ end
953
+
954
+ test '#write raises BufferChunkOverflowError if a record is biggar than chunk limit size' do
955
+ assert_equal [@dm0], @p.stage.keys
956
+ assert_equal [], @p.queue.map(&:metadata)
957
+
958
+ assert_equal 1_280_000, @p.chunk_limit_size
959
+
960
+ es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * 1_280_000}] ])
961
+ assert_raise Fluent::Plugin::Buffer::BufferChunkOverflowError do
962
+ @p.write({@dm0 => es}, format: @format)
963
+ end
964
+ end
965
+ end
966
+
967
+ sub_test_case 'custom format with configuration for test with lower chunk limit size' do
968
+ setup do
969
+ @p = create_buffer({"chunk_limit_size" => 1_280_000})
970
+ @dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
971
+ @row = "x" * 128
972
+ @data0 = data0 = [@row] * 5000
973
+ (class << @p; self; end).module_eval do
974
+ define_method(:resume) {
975
+ staged = {
976
+ dm0 => create_chunk(dm0, data0).staged!,
977
+ }
978
+ queued = []
979
+ return staged, queued
980
+ }
981
+ end
982
+ @p.start
983
+ end
984
+
985
+ test '#write appends event stream into staged chunk' do
986
+ assert_equal [@dm0], @p.stage.keys
987
+ assert_equal [], @p.queue.map(&:metadata)
988
+
989
+ assert_equal 1_280_000, @p.chunk_limit_size
990
+
991
+ data = [@row] * 1000
992
+ @p.write({@dm0 => data})
993
+
994
+ assert_equal [@dm0], @p.stage.keys
995
+ assert_equal [], @p.queue.map(&:metadata)
996
+
997
+ assert_equal (@row * 6000), @p.stage[@dm0].read
998
+ end
999
+
1000
+ test '#write writes event stream into a new chunk with enqueueing existing chunk if event stream is larger than available space of existing chunk' do
1001
+ assert_equal [@dm0], @p.stage.keys
1002
+ assert_equal [], @p.queue.map(&:metadata)
1003
+
1004
+ staged_chunk_object_id = @p.stage[@dm0].object_id
1005
+
1006
+ assert_equal 1_280_000, @p.chunk_limit_size
1007
+
1008
+ data = [@row] * 8000
1009
+ @p.write({@dm0 => data})
1010
+
1011
+ assert_equal [@dm0], @p.queue.map(&:metadata)
1012
+ assert_equal [staged_chunk_object_id], @p.queue.map(&:object_id)
1013
+ assert_equal [@dm0], @p.stage.keys
1014
+
1015
+ assert_equal [9800], @p.queue.map(&:size)
1016
+ assert_equal 3200, @p.stage[@dm0].size
1017
+ # 9800 + 3200 == 5000 + 8000
1018
+ end
1019
+
1020
+ test '#write writes event stream into many chunks including staging chunk if event stream is larger than chunk limit size' do
1021
+ assert_equal [@dm0], @p.stage.keys
1022
+ assert_equal [], @p.queue.map(&:metadata)
1023
+
1024
+ staged_chunk_object_id = @p.stage[@dm0].object_id
1025
+
1026
+ assert_equal 1_280_000, @p.chunk_limit_size
1027
+
1028
+ assert_equal 5000, @p.stage[@dm0].size
1029
+
1030
+ data = [@row] * 45000
1031
+ @p.write({@dm0 => data})
1032
+
1033
+ assert_equal staged_chunk_object_id, @p.queue.first.object_id
1034
+
1035
+ assert_equal [@dm0], @p.stage.keys
1036
+ assert_equal 900, @p.stage[@dm0].size
1037
+ assert_equal [@dm0,@dm0,@dm0,@dm0,@dm0], @p.queue.map(&:metadata)
1038
+ assert_equal [9500, 9900, 9900, 9900, 9900], @p.queue.map(&:size) # splits: 45000 / 100 => 450 * ...
1039
+ ##### 900 + 9500 + 9900 * 4 == 5000 + 45000
1040
+ end
1041
+
1042
+ test '#write raises BufferChunkOverflowError if a record is bigger than chunk limit size' do
1043
+ assert_equal [@dm0], @p.stage.keys
1044
+ assert_equal [], @p.queue.map(&:metadata)
1045
+
1046
+ assert_equal 1_280_000, @p.chunk_limit_size
1047
+
1048
+ es = ["x" * 1_280_000 + "x" * 300]
1049
+ assert_raise Fluent::Plugin::Buffer::BufferChunkOverflowError do
1050
+ @p.write({@dm0 => es})
1051
+ end
1052
+ end
1053
+ end
1054
+
1055
+ sub_test_case 'with configuration for test with lower limits' do
1056
+ setup do
1057
+ @p = create_buffer({"chunk_limit_size" => 1024, "total_limit_size" => 10240})
1058
+ @dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
1059
+ @dm1 = dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
1060
+ @dm2 = dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
1061
+ @dm3 = dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
1062
+ (class << @p; self; end).module_eval do
1063
+ define_method(:resume) {
1064
+ staged = {
1065
+ dm2 => create_chunk(dm2, ["b" * 128] * 7).staged!,
1066
+ dm3 => create_chunk(dm3, ["c" * 128] * 5).staged!,
1067
+ }
1068
+ queued = [
1069
+ create_chunk(dm0, ["0" * 128] * 8).enqueued!,
1070
+ create_chunk(dm0, ["0" * 128] * 8).enqueued!,
1071
+ create_chunk(dm0, ["0" * 128] * 8).enqueued!,
1072
+ create_chunk(dm0, ["0" * 128] * 8).enqueued!,
1073
+ create_chunk(dm0, ["0" * 128] * 8).enqueued!,
1074
+ create_chunk(dm1, ["a" * 128] * 8).enqueued!,
1075
+ create_chunk(dm1, ["a" * 128] * 8).enqueued!,
1076
+ create_chunk(dm1, ["a" * 128] * 8).enqueued!, # 8th queued chunk
1077
+ create_chunk(dm1, ["a" * 128] * 3).enqueued!,
1078
+ ]
1079
+ return staged, queued
1080
+ }
1081
+ end
1082
+ @p.start
1083
+ end
1084
+
1085
+ test '#storable? returns false when too many data exist' do
1086
+ assert_equal [@dm0,@dm0,@dm0,@dm0,@dm0,@dm1,@dm1,@dm1,@dm1], @p.queue.map(&:metadata)
1087
+ assert_equal [@dm2,@dm3], @p.stage.keys
1088
+
1089
+ assert_equal 128*8*8+128*3, @p.queue_size
1090
+ assert_equal 128*7+128*5, @p.stage_size
1091
+
1092
+ assert @p.storable?
1093
+
1094
+ dm3 = @p.metadata(timekey: @dm3.timekey)
1095
+ @p.write({dm3 => ["c" * 128]})
1096
+
1097
+ assert_equal 10240, (@p.stage_size + @p.queue_size)
1098
+ assert !@p.storable?
1099
+ end
1100
+
1101
+ test '#chunk_size_over? returns true if chunk size is bigger than limit' do
1102
+ m = create_metadata(Time.parse('2016-04-11 16:40:00 +0000').to_i)
1103
+
1104
+ c1 = create_chunk(m, ["a" * 128] * 8)
1105
+ assert !@p.chunk_size_over?(c1)
1106
+
1107
+ c2 = create_chunk(m, ["a" * 128] * 9)
1108
+ assert @p.chunk_size_over?(c2)
1109
+
1110
+ c3 = create_chunk(m, ["a" * 128] * 8 + ["a"])
1111
+ assert @p.chunk_size_over?(c3)
1112
+ end
1113
+
1114
+ test '#chunk_size_full? returns true if chunk size is enough big against limit' do
1115
+ m = create_metadata(Time.parse('2016-04-11 16:40:00 +0000').to_i)
1116
+
1117
+ c1 = create_chunk(m, ["a" * 128] * 7)
1118
+ assert !@p.chunk_size_full?(c1)
1119
+
1120
+ c2 = create_chunk(m, ["a" * 128] * 8)
1121
+ assert @p.chunk_size_full?(c2)
1122
+
1123
+ assert_equal 0.95, @p.chunk_full_threshold
1124
+ c3 = create_chunk(m, ["a" * 128] * 6 + ["a" * 64])
1125
+ assert !@p.chunk_size_full?(c3)
1126
+ end
1127
+ end
1128
+
1129
+ sub_test_case 'with configuration includes chunk_limit_records' do
1130
+ setup do
1131
+ @p = create_buffer({"chunk_limit_size" => 1024, "total_limit_size" => 10240, "chunk_limit_records" => 6})
1132
+ @dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
1133
+ @dm1 = dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
1134
+ @dm2 = dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
1135
+ @dm3 = dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
1136
+ (class << @p; self; end).module_eval do
1137
+ define_method(:resume) {
1138
+ staged = {
1139
+ dm2 => create_chunk(dm2, ["b" * 128] * 1).staged!,
1140
+ dm3 => create_chunk(dm3, ["c" * 128] * 2).staged!,
1141
+ }
1142
+ queued = [
1143
+ create_chunk(dm0, ["0" * 128] * 6).enqueued!,
1144
+ create_chunk(dm1, ["a" * 128] * 6).enqueued!,
1145
+ create_chunk(dm1, ["a" * 128] * 6).enqueued!,
1146
+ create_chunk(dm1, ["a" * 128] * 3).enqueued!,
1147
+ ]
1148
+ return staged, queued
1149
+ }
1150
+ end
1151
+ @p.start
1152
+ end
1153
+
1154
+ test '#chunk_size_over? returns true if too many records exists in a chunk even if its bytes is less than limit' do
1155
+ assert_equal 6, @p.chunk_limit_records
1156
+
1157
+ m = create_metadata(Time.parse('2016-04-11 16:40:00 +0000').to_i)
1158
+
1159
+ c1 = create_chunk(m, ["a" * 128] * 6)
1160
+ assert_equal 6, c1.size
1161
+ assert !@p.chunk_size_over?(c1)
1162
+
1163
+ c2 = create_chunk(m, ["a" * 128] * 7)
1164
+ assert @p.chunk_size_over?(c2)
1165
+
1166
+ c3 = create_chunk(m, ["a" * 128] * 6 + ["a"])
1167
+ assert @p.chunk_size_over?(c3)
1168
+ end
1169
+
1170
+ test '#chunk_size_full? returns true if enough many records exists in a chunk even if its bytes is less than limit' do
1171
+ assert_equal 6, @p.chunk_limit_records
1172
+
1173
+ m = create_metadata(Time.parse('2016-04-11 16:40:00 +0000').to_i)
1174
+
1175
+ c1 = create_chunk(m, ["a" * 128] * 5)
1176
+ assert_equal 5, c1.size
1177
+ assert !@p.chunk_size_full?(c1)
1178
+
1179
+ c2 = create_chunk(m, ["a" * 128] * 6)
1180
+ assert @p.chunk_size_full?(c2)
1181
+
1182
+ c3 = create_chunk(m, ["a" * 128] * 5 + ["a"])
1183
+ assert @p.chunk_size_full?(c3)
1184
+ end
1185
+ end
1186
+
1187
+ sub_test_case 'with configuration includes queue_limit_length' do
1188
+ setup do
1189
+ @p = create_buffer({"chunk_limit_size" => 1024, "total_limit_size" => 10240, "queue_limit_length" => 5})
1190
+ @dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
1191
+ @dm1 = dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
1192
+ @dm2 = dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
1193
+ @dm3 = dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
1194
+ (class << @p; self; end).module_eval do
1195
+ define_method(:resume) {
1196
+ staged = {
1197
+ dm2 => create_chunk(dm2, ["b" * 128] * 1).staged!,
1198
+ dm3 => create_chunk(dm3, ["c" * 128] * 2).staged!,
1199
+ }
1200
+ queued = [
1201
+ create_chunk(dm0, ["0" * 128] * 6).enqueued!,
1202
+ create_chunk(dm1, ["a" * 128] * 6).enqueued!,
1203
+ create_chunk(dm1, ["a" * 128] * 6).enqueued!,
1204
+ create_chunk(dm1, ["a" * 128] * 3).enqueued!,
1205
+ ]
1206
+ return staged, queued
1207
+ }
1208
+ end
1209
+ @p.start
1210
+ end
1211
+
1212
+ test '#configure will overwrite standard configuration if queue_limit_length' do
1213
+ assert_equal 1024, @p.chunk_limit_size
1214
+ assert_equal 5, @p.queue_limit_length
1215
+ assert_equal (1024*5), @p.total_limit_size
1216
+ end
1217
+ end
1218
+
1219
+ sub_test_case 'when compress is gzip' do
1220
+ setup do
1221
+ @p = create_buffer({'compress' => 'gzip'})
1222
+ end
1223
+
1224
+ test '#compress returns :gzip' do
1225
+ assert_equal :gzip, @p.compress
1226
+ end
1227
+
1228
+ test 'create decompressable chunk' do
1229
+ chunk = @p.generate_chunk(create_metadata)
1230
+ assert chunk.singleton_class.ancestors.include?(Fluent::Plugin::Buffer::Chunk::Decompressable)
1231
+ end
1232
+ end
1233
+ end