fluentd-hubspot 0.14.14.1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (396) hide show
  1. data/.github/ISSUE_TEMPLATE.md +6 -0
  2. data/.gitignore +28 -0
  3. data/.travis.yml +51 -0
  4. data/AUTHORS +2 -0
  5. data/CONTRIBUTING.md +42 -0
  6. data/COPYING +14 -0
  7. data/ChangeLog +593 -0
  8. data/Gemfile +9 -0
  9. data/README.md +76 -0
  10. data/Rakefile +74 -0
  11. data/Vagrantfile +17 -0
  12. data/appveyor.yml +43 -0
  13. data/bin/fluent-binlog-reader +7 -0
  14. data/bin/fluent-debug +5 -0
  15. data/bin/fluent-plugin-config-format +5 -0
  16. data/bin/fluent-plugin-generate +5 -0
  17. data/code-of-conduct.md +3 -0
  18. data/example/copy_roundrobin.conf +39 -0
  19. data/example/filter_stdout.conf +22 -0
  20. data/example/in_dummy_blocks.conf +17 -0
  21. data/example/in_dummy_with_compression.conf +23 -0
  22. data/example/in_forward.conf +14 -0
  23. data/example/in_forward_client.conf +37 -0
  24. data/example/in_forward_shared_key.conf +15 -0
  25. data/example/in_forward_tls.conf +14 -0
  26. data/example/in_forward_users.conf +24 -0
  27. data/example/in_forward_workers.conf +21 -0
  28. data/example/in_http.conf +14 -0
  29. data/example/in_out_forward.conf +17 -0
  30. data/example/in_syslog.conf +15 -0
  31. data/example/in_tail.conf +14 -0
  32. data/example/in_tcp.conf +13 -0
  33. data/example/in_udp.conf +13 -0
  34. data/example/logevents.conf +25 -0
  35. data/example/multi_filters.conf +61 -0
  36. data/example/out_copy.conf +20 -0
  37. data/example/out_exec_filter.conf +42 -0
  38. data/example/out_file.conf +13 -0
  39. data/example/out_forward.conf +35 -0
  40. data/example/out_forward_buf_file.conf +23 -0
  41. data/example/out_forward_client.conf +109 -0
  42. data/example/out_forward_heartbeat_none.conf +16 -0
  43. data/example/out_forward_shared_key.conf +36 -0
  44. data/example/out_forward_tls.conf +18 -0
  45. data/example/out_forward_users.conf +65 -0
  46. data/example/out_null.conf +36 -0
  47. data/example/secondary_file.conf +41 -0
  48. data/example/suppress_config_dump.conf +7 -0
  49. data/example/v0_12_filter.conf +78 -0
  50. data/example/v1_literal_example.conf +36 -0
  51. data/fluent.conf +139 -0
  52. data/fluentd.gemspec +51 -0
  53. data/lib/fluent/agent.rb +163 -0
  54. data/lib/fluent/clock.rb +62 -0
  55. data/lib/fluent/command/binlog_reader.rb +234 -0
  56. data/lib/fluent/command/bundler_injection.rb +45 -0
  57. data/lib/fluent/command/cat.rb +330 -0
  58. data/lib/fluent/command/debug.rb +102 -0
  59. data/lib/fluent/command/fluentd.rb +301 -0
  60. data/lib/fluent/command/plugin_config_formatter.rb +258 -0
  61. data/lib/fluent/command/plugin_generator.rb +301 -0
  62. data/lib/fluent/compat/call_super_mixin.rb +67 -0
  63. data/lib/fluent/compat/detach_process_mixin.rb +25 -0
  64. data/lib/fluent/compat/exec_util.rb +129 -0
  65. data/lib/fluent/compat/file_util.rb +54 -0
  66. data/lib/fluent/compat/filter.rb +68 -0
  67. data/lib/fluent/compat/formatter.rb +111 -0
  68. data/lib/fluent/compat/formatter_utils.rb +85 -0
  69. data/lib/fluent/compat/handle_tag_and_time_mixin.rb +62 -0
  70. data/lib/fluent/compat/handle_tag_name_mixin.rb +53 -0
  71. data/lib/fluent/compat/input.rb +59 -0
  72. data/lib/fluent/compat/output.rb +728 -0
  73. data/lib/fluent/compat/output_chain.rb +60 -0
  74. data/lib/fluent/compat/parser.rb +310 -0
  75. data/lib/fluent/compat/parser_utils.rb +40 -0
  76. data/lib/fluent/compat/propagate_default.rb +62 -0
  77. data/lib/fluent/compat/record_filter_mixin.rb +34 -0
  78. data/lib/fluent/compat/set_tag_key_mixin.rb +50 -0
  79. data/lib/fluent/compat/set_time_key_mixin.rb +69 -0
  80. data/lib/fluent/compat/socket_util.rb +165 -0
  81. data/lib/fluent/compat/string_util.rb +34 -0
  82. data/lib/fluent/compat/structured_format_mixin.rb +26 -0
  83. data/lib/fluent/compat/type_converter.rb +90 -0
  84. data/lib/fluent/config.rb +56 -0
  85. data/lib/fluent/config/basic_parser.rb +123 -0
  86. data/lib/fluent/config/configure_proxy.rb +418 -0
  87. data/lib/fluent/config/dsl.rb +149 -0
  88. data/lib/fluent/config/element.rb +218 -0
  89. data/lib/fluent/config/error.rb +26 -0
  90. data/lib/fluent/config/literal_parser.rb +251 -0
  91. data/lib/fluent/config/parser.rb +107 -0
  92. data/lib/fluent/config/section.rb +223 -0
  93. data/lib/fluent/config/types.rb +136 -0
  94. data/lib/fluent/config/v1_parser.rb +190 -0
  95. data/lib/fluent/configurable.rb +200 -0
  96. data/lib/fluent/daemon.rb +15 -0
  97. data/lib/fluent/engine.rb +266 -0
  98. data/lib/fluent/env.rb +28 -0
  99. data/lib/fluent/error.rb +30 -0
  100. data/lib/fluent/event.rb +334 -0
  101. data/lib/fluent/event_router.rb +269 -0
  102. data/lib/fluent/filter.rb +21 -0
  103. data/lib/fluent/formatter.rb +23 -0
  104. data/lib/fluent/input.rb +21 -0
  105. data/lib/fluent/label.rb +46 -0
  106. data/lib/fluent/load.rb +35 -0
  107. data/lib/fluent/log.rb +546 -0
  108. data/lib/fluent/match.rb +178 -0
  109. data/lib/fluent/mixin.rb +31 -0
  110. data/lib/fluent/msgpack_factory.rb +62 -0
  111. data/lib/fluent/output.rb +29 -0
  112. data/lib/fluent/output_chain.rb +23 -0
  113. data/lib/fluent/parser.rb +23 -0
  114. data/lib/fluent/plugin.rb +183 -0
  115. data/lib/fluent/plugin/bare_output.rb +63 -0
  116. data/lib/fluent/plugin/base.rb +165 -0
  117. data/lib/fluent/plugin/buf_file.rb +184 -0
  118. data/lib/fluent/plugin/buf_memory.rb +34 -0
  119. data/lib/fluent/plugin/buffer.rb +617 -0
  120. data/lib/fluent/plugin/buffer/chunk.rb +221 -0
  121. data/lib/fluent/plugin/buffer/file_chunk.rb +364 -0
  122. data/lib/fluent/plugin/buffer/memory_chunk.rb +90 -0
  123. data/lib/fluent/plugin/compressable.rb +92 -0
  124. data/lib/fluent/plugin/exec_util.rb +22 -0
  125. data/lib/fluent/plugin/file_util.rb +22 -0
  126. data/lib/fluent/plugin/file_wrapper.rb +120 -0
  127. data/lib/fluent/plugin/filter.rb +93 -0
  128. data/lib/fluent/plugin/filter_grep.rb +75 -0
  129. data/lib/fluent/plugin/filter_parser.rb +119 -0
  130. data/lib/fluent/plugin/filter_record_transformer.rb +322 -0
  131. data/lib/fluent/plugin/filter_stdout.rb +53 -0
  132. data/lib/fluent/plugin/formatter.rb +50 -0
  133. data/lib/fluent/plugin/formatter_csv.rb +52 -0
  134. data/lib/fluent/plugin/formatter_hash.rb +33 -0
  135. data/lib/fluent/plugin/formatter_json.rb +55 -0
  136. data/lib/fluent/plugin/formatter_ltsv.rb +42 -0
  137. data/lib/fluent/plugin/formatter_msgpack.rb +33 -0
  138. data/lib/fluent/plugin/formatter_out_file.rb +51 -0
  139. data/lib/fluent/plugin/formatter_single_value.rb +34 -0
  140. data/lib/fluent/plugin/formatter_stdout.rb +75 -0
  141. data/lib/fluent/plugin/formatter_tsv.rb +34 -0
  142. data/lib/fluent/plugin/in_debug_agent.rb +64 -0
  143. data/lib/fluent/plugin/in_dummy.rb +139 -0
  144. data/lib/fluent/plugin/in_exec.rb +108 -0
  145. data/lib/fluent/plugin/in_forward.rb +455 -0
  146. data/lib/fluent/plugin/in_gc_stat.rb +56 -0
  147. data/lib/fluent/plugin/in_http.rb +433 -0
  148. data/lib/fluent/plugin/in_monitor_agent.rb +448 -0
  149. data/lib/fluent/plugin/in_object_space.rb +93 -0
  150. data/lib/fluent/plugin/in_syslog.rb +209 -0
  151. data/lib/fluent/plugin/in_tail.rb +905 -0
  152. data/lib/fluent/plugin/in_tcp.rb +85 -0
  153. data/lib/fluent/plugin/in_udp.rb +81 -0
  154. data/lib/fluent/plugin/in_unix.rb +201 -0
  155. data/lib/fluent/plugin/input.rb +37 -0
  156. data/lib/fluent/plugin/multi_output.rb +157 -0
  157. data/lib/fluent/plugin/out_copy.rb +46 -0
  158. data/lib/fluent/plugin/out_exec.rb +105 -0
  159. data/lib/fluent/plugin/out_exec_filter.rb +317 -0
  160. data/lib/fluent/plugin/out_file.rb +302 -0
  161. data/lib/fluent/plugin/out_forward.rb +912 -0
  162. data/lib/fluent/plugin/out_null.rb +74 -0
  163. data/lib/fluent/plugin/out_relabel.rb +32 -0
  164. data/lib/fluent/plugin/out_roundrobin.rb +84 -0
  165. data/lib/fluent/plugin/out_secondary_file.rb +133 -0
  166. data/lib/fluent/plugin/out_stdout.rb +75 -0
  167. data/lib/fluent/plugin/out_stream.rb +130 -0
  168. data/lib/fluent/plugin/output.rb +1291 -0
  169. data/lib/fluent/plugin/owned_by_mixin.rb +42 -0
  170. data/lib/fluent/plugin/parser.rb +191 -0
  171. data/lib/fluent/plugin/parser_apache.rb +28 -0
  172. data/lib/fluent/plugin/parser_apache2.rb +84 -0
  173. data/lib/fluent/plugin/parser_apache_error.rb +26 -0
  174. data/lib/fluent/plugin/parser_csv.rb +39 -0
  175. data/lib/fluent/plugin/parser_json.rb +81 -0
  176. data/lib/fluent/plugin/parser_ltsv.rb +42 -0
  177. data/lib/fluent/plugin/parser_msgpack.rb +50 -0
  178. data/lib/fluent/plugin/parser_multiline.rb +105 -0
  179. data/lib/fluent/plugin/parser_nginx.rb +28 -0
  180. data/lib/fluent/plugin/parser_none.rb +36 -0
  181. data/lib/fluent/plugin/parser_regexp.rb +63 -0
  182. data/lib/fluent/plugin/parser_syslog.rb +121 -0
  183. data/lib/fluent/plugin/parser_tsv.rb +42 -0
  184. data/lib/fluent/plugin/socket_util.rb +22 -0
  185. data/lib/fluent/plugin/storage.rb +84 -0
  186. data/lib/fluent/plugin/storage_local.rb +159 -0
  187. data/lib/fluent/plugin/string_util.rb +22 -0
  188. data/lib/fluent/plugin_helper.rb +70 -0
  189. data/lib/fluent/plugin_helper/cert_option.rb +159 -0
  190. data/lib/fluent/plugin_helper/child_process.rb +364 -0
  191. data/lib/fluent/plugin_helper/compat_parameters.rb +331 -0
  192. data/lib/fluent/plugin_helper/event_emitter.rb +93 -0
  193. data/lib/fluent/plugin_helper/event_loop.rb +161 -0
  194. data/lib/fluent/plugin_helper/extract.rb +104 -0
  195. data/lib/fluent/plugin_helper/formatter.rb +147 -0
  196. data/lib/fluent/plugin_helper/inject.rb +151 -0
  197. data/lib/fluent/plugin_helper/parser.rb +147 -0
  198. data/lib/fluent/plugin_helper/retry_state.rb +201 -0
  199. data/lib/fluent/plugin_helper/server.rb +738 -0
  200. data/lib/fluent/plugin_helper/socket.rb +241 -0
  201. data/lib/fluent/plugin_helper/socket_option.rb +69 -0
  202. data/lib/fluent/plugin_helper/storage.rb +349 -0
  203. data/lib/fluent/plugin_helper/thread.rb +179 -0
  204. data/lib/fluent/plugin_helper/timer.rb +91 -0
  205. data/lib/fluent/plugin_id.rb +80 -0
  206. data/lib/fluent/process.rb +22 -0
  207. data/lib/fluent/registry.rb +116 -0
  208. data/lib/fluent/root_agent.rb +323 -0
  209. data/lib/fluent/rpc.rb +94 -0
  210. data/lib/fluent/supervisor.rb +741 -0
  211. data/lib/fluent/system_config.rb +159 -0
  212. data/lib/fluent/test.rb +58 -0
  213. data/lib/fluent/test/base.rb +78 -0
  214. data/lib/fluent/test/driver/base.rb +224 -0
  215. data/lib/fluent/test/driver/base_owned.rb +70 -0
  216. data/lib/fluent/test/driver/base_owner.rb +135 -0
  217. data/lib/fluent/test/driver/event_feeder.rb +98 -0
  218. data/lib/fluent/test/driver/filter.rb +57 -0
  219. data/lib/fluent/test/driver/formatter.rb +30 -0
  220. data/lib/fluent/test/driver/input.rb +31 -0
  221. data/lib/fluent/test/driver/multi_output.rb +53 -0
  222. data/lib/fluent/test/driver/output.rb +102 -0
  223. data/lib/fluent/test/driver/parser.rb +30 -0
  224. data/lib/fluent/test/driver/test_event_router.rb +45 -0
  225. data/lib/fluent/test/filter_test.rb +77 -0
  226. data/lib/fluent/test/formatter_test.rb +65 -0
  227. data/lib/fluent/test/helpers.rb +134 -0
  228. data/lib/fluent/test/input_test.rb +174 -0
  229. data/lib/fluent/test/log.rb +79 -0
  230. data/lib/fluent/test/output_test.rb +156 -0
  231. data/lib/fluent/test/parser_test.rb +70 -0
  232. data/lib/fluent/test/startup_shutdown.rb +46 -0
  233. data/lib/fluent/time.rb +412 -0
  234. data/lib/fluent/timezone.rb +133 -0
  235. data/lib/fluent/unique_id.rb +39 -0
  236. data/lib/fluent/version.rb +21 -0
  237. data/lib/fluent/winsvc.rb +71 -0
  238. data/templates/new_gem/Gemfile +3 -0
  239. data/templates/new_gem/README.md.erb +43 -0
  240. data/templates/new_gem/Rakefile +13 -0
  241. data/templates/new_gem/fluent-plugin.gemspec.erb +27 -0
  242. data/templates/new_gem/lib/fluent/plugin/filter.rb.erb +14 -0
  243. data/templates/new_gem/lib/fluent/plugin/formatter.rb.erb +14 -0
  244. data/templates/new_gem/lib/fluent/plugin/input.rb.erb +11 -0
  245. data/templates/new_gem/lib/fluent/plugin/output.rb.erb +11 -0
  246. data/templates/new_gem/lib/fluent/plugin/parser.rb.erb +15 -0
  247. data/templates/new_gem/test/helper.rb.erb +8 -0
  248. data/templates/new_gem/test/plugin/test_filter.rb.erb +18 -0
  249. data/templates/new_gem/test/plugin/test_formatter.rb.erb +18 -0
  250. data/templates/new_gem/test/plugin/test_input.rb.erb +18 -0
  251. data/templates/new_gem/test/plugin/test_output.rb.erb +18 -0
  252. data/templates/new_gem/test/plugin/test_parser.rb.erb +18 -0
  253. data/templates/plugin_config_formatter/param.md-compact.erb +25 -0
  254. data/templates/plugin_config_formatter/param.md.erb +34 -0
  255. data/templates/plugin_config_formatter/section.md.erb +12 -0
  256. data/test/command/test_binlog_reader.rb +346 -0
  257. data/test/command/test_fluentd.rb +618 -0
  258. data/test/command/test_plugin_config_formatter.rb +275 -0
  259. data/test/command/test_plugin_generator.rb +66 -0
  260. data/test/compat/test_calls_super.rb +166 -0
  261. data/test/compat/test_parser.rb +92 -0
  262. data/test/config/assertions.rb +42 -0
  263. data/test/config/test_config_parser.rb +513 -0
  264. data/test/config/test_configurable.rb +1587 -0
  265. data/test/config/test_configure_proxy.rb +566 -0
  266. data/test/config/test_dsl.rb +415 -0
  267. data/test/config/test_element.rb +403 -0
  268. data/test/config/test_literal_parser.rb +297 -0
  269. data/test/config/test_section.rb +184 -0
  270. data/test/config/test_system_config.rb +168 -0
  271. data/test/config/test_types.rb +191 -0
  272. data/test/helper.rb +153 -0
  273. data/test/plugin/data/2010/01/20100102-030405.log +0 -0
  274. data/test/plugin/data/2010/01/20100102-030406.log +0 -0
  275. data/test/plugin/data/2010/01/20100102.log +0 -0
  276. data/test/plugin/data/log/bar +0 -0
  277. data/test/plugin/data/log/foo/bar.log +0 -0
  278. data/test/plugin/data/log/foo/bar2 +0 -0
  279. data/test/plugin/data/log/test.log +0 -0
  280. data/test/plugin/test_bare_output.rb +118 -0
  281. data/test/plugin/test_base.rb +115 -0
  282. data/test/plugin/test_buf_file.rb +843 -0
  283. data/test/plugin/test_buf_memory.rb +42 -0
  284. data/test/plugin/test_buffer.rb +1220 -0
  285. data/test/plugin/test_buffer_chunk.rb +198 -0
  286. data/test/plugin/test_buffer_file_chunk.rb +844 -0
  287. data/test/plugin/test_buffer_memory_chunk.rb +338 -0
  288. data/test/plugin/test_compressable.rb +84 -0
  289. data/test/plugin/test_file_util.rb +96 -0
  290. data/test/plugin/test_filter.rb +357 -0
  291. data/test/plugin/test_filter_grep.rb +119 -0
  292. data/test/plugin/test_filter_parser.rb +700 -0
  293. data/test/plugin/test_filter_record_transformer.rb +556 -0
  294. data/test/plugin/test_filter_stdout.rb +202 -0
  295. data/test/plugin/test_formatter_csv.rb +111 -0
  296. data/test/plugin/test_formatter_hash.rb +35 -0
  297. data/test/plugin/test_formatter_json.rb +51 -0
  298. data/test/plugin/test_formatter_ltsv.rb +59 -0
  299. data/test/plugin/test_formatter_msgpack.rb +28 -0
  300. data/test/plugin/test_formatter_out_file.rb +95 -0
  301. data/test/plugin/test_formatter_single_value.rb +38 -0
  302. data/test/plugin/test_in_debug_agent.rb +28 -0
  303. data/test/plugin/test_in_dummy.rb +192 -0
  304. data/test/plugin/test_in_exec.rb +245 -0
  305. data/test/plugin/test_in_forward.rb +1120 -0
  306. data/test/plugin/test_in_gc_stat.rb +39 -0
  307. data/test/plugin/test_in_http.rb +588 -0
  308. data/test/plugin/test_in_monitor_agent.rb +516 -0
  309. data/test/plugin/test_in_object_space.rb +64 -0
  310. data/test/plugin/test_in_syslog.rb +271 -0
  311. data/test/plugin/test_in_tail.rb +1216 -0
  312. data/test/plugin/test_in_tcp.rb +118 -0
  313. data/test/plugin/test_in_udp.rb +152 -0
  314. data/test/plugin/test_in_unix.rb +126 -0
  315. data/test/plugin/test_input.rb +126 -0
  316. data/test/plugin/test_multi_output.rb +180 -0
  317. data/test/plugin/test_out_copy.rb +160 -0
  318. data/test/plugin/test_out_exec.rb +310 -0
  319. data/test/plugin/test_out_exec_filter.rb +613 -0
  320. data/test/plugin/test_out_file.rb +873 -0
  321. data/test/plugin/test_out_forward.rb +685 -0
  322. data/test/plugin/test_out_null.rb +105 -0
  323. data/test/plugin/test_out_relabel.rb +28 -0
  324. data/test/plugin/test_out_roundrobin.rb +146 -0
  325. data/test/plugin/test_out_secondary_file.rb +442 -0
  326. data/test/plugin/test_out_stdout.rb +170 -0
  327. data/test/plugin/test_out_stream.rb +93 -0
  328. data/test/plugin/test_output.rb +870 -0
  329. data/test/plugin/test_output_as_buffered.rb +1932 -0
  330. data/test/plugin/test_output_as_buffered_compress.rb +165 -0
  331. data/test/plugin/test_output_as_buffered_overflow.rb +250 -0
  332. data/test/plugin/test_output_as_buffered_retries.rb +839 -0
  333. data/test/plugin/test_output_as_buffered_secondary.rb +877 -0
  334. data/test/plugin/test_output_as_standard.rb +374 -0
  335. data/test/plugin/test_owned_by.rb +35 -0
  336. data/test/plugin/test_parser.rb +359 -0
  337. data/test/plugin/test_parser_apache.rb +42 -0
  338. data/test/plugin/test_parser_apache2.rb +46 -0
  339. data/test/plugin/test_parser_apache_error.rb +45 -0
  340. data/test/plugin/test_parser_csv.rb +103 -0
  341. data/test/plugin/test_parser_json.rb +114 -0
  342. data/test/plugin/test_parser_labeled_tsv.rb +128 -0
  343. data/test/plugin/test_parser_multiline.rb +100 -0
  344. data/test/plugin/test_parser_nginx.rb +48 -0
  345. data/test/plugin/test_parser_none.rb +52 -0
  346. data/test/plugin/test_parser_regexp.rb +281 -0
  347. data/test/plugin/test_parser_syslog.rb +242 -0
  348. data/test/plugin/test_parser_tsv.rb +122 -0
  349. data/test/plugin/test_storage.rb +167 -0
  350. data/test/plugin/test_storage_local.rb +335 -0
  351. data/test/plugin/test_string_util.rb +26 -0
  352. data/test/plugin_helper/test_child_process.rb +794 -0
  353. data/test/plugin_helper/test_compat_parameters.rb +331 -0
  354. data/test/plugin_helper/test_event_emitter.rb +51 -0
  355. data/test/plugin_helper/test_event_loop.rb +52 -0
  356. data/test/plugin_helper/test_extract.rb +194 -0
  357. data/test/plugin_helper/test_formatter.rb +255 -0
  358. data/test/plugin_helper/test_inject.rb +519 -0
  359. data/test/plugin_helper/test_parser.rb +264 -0
  360. data/test/plugin_helper/test_retry_state.rb +422 -0
  361. data/test/plugin_helper/test_server.rb +1677 -0
  362. data/test/plugin_helper/test_storage.rb +542 -0
  363. data/test/plugin_helper/test_thread.rb +164 -0
  364. data/test/plugin_helper/test_timer.rb +132 -0
  365. data/test/scripts/exec_script.rb +32 -0
  366. data/test/scripts/fluent/plugin/formatter1/formatter_test1.rb +7 -0
  367. data/test/scripts/fluent/plugin/formatter2/formatter_test2.rb +7 -0
  368. data/test/scripts/fluent/plugin/formatter_known.rb +8 -0
  369. data/test/scripts/fluent/plugin/out_test.rb +81 -0
  370. data/test/scripts/fluent/plugin/out_test2.rb +80 -0
  371. data/test/scripts/fluent/plugin/parser_known.rb +4 -0
  372. data/test/test_clock.rb +164 -0
  373. data/test/test_config.rb +179 -0
  374. data/test/test_configdsl.rb +148 -0
  375. data/test/test_event.rb +515 -0
  376. data/test/test_event_router.rb +331 -0
  377. data/test/test_event_time.rb +186 -0
  378. data/test/test_filter.rb +121 -0
  379. data/test/test_formatter.rb +312 -0
  380. data/test/test_input.rb +31 -0
  381. data/test/test_log.rb +828 -0
  382. data/test/test_match.rb +137 -0
  383. data/test/test_mixin.rb +351 -0
  384. data/test/test_output.rb +273 -0
  385. data/test/test_plugin.rb +251 -0
  386. data/test/test_plugin_classes.rb +253 -0
  387. data/test/test_plugin_helper.rb +81 -0
  388. data/test/test_plugin_id.rb +101 -0
  389. data/test/test_process.rb +14 -0
  390. data/test/test_root_agent.rb +611 -0
  391. data/test/test_supervisor.rb +373 -0
  392. data/test/test_test_drivers.rb +135 -0
  393. data/test/test_time_formatter.rb +282 -0
  394. data/test/test_time_parser.rb +211 -0
  395. data/test/test_unique_id.rb +47 -0
  396. metadata +898 -0
@@ -0,0 +1,42 @@
1
+ require_relative '../helper'
2
+ require 'fluent/plugin/buf_memory'
3
+ require 'fluent/plugin/output'
4
+ require 'flexmock/test_unit'
5
+
6
+ module FluentPluginMemoryBufferTest
7
+ class DummyOutputPlugin < Fluent::Plugin::Output
8
+ end
9
+ end
10
+
11
+ class MemoryBufferTest < Test::Unit::TestCase
12
+ setup do
13
+ Fluent::Test.setup
14
+ @d = FluentPluginMemoryBufferTest::DummyOutputPlugin.new
15
+ @p = Fluent::Plugin::MemoryBuffer.new
16
+ @p.owner = @d
17
+ end
18
+
19
+ test 'this is non persistent plugin' do
20
+ assert !@p.persistent?
21
+ end
22
+
23
+ test '#resume always returns empty stage and queue' do
24
+ ary = @p.resume
25
+ assert_equal({}, ary[0])
26
+ assert_equal([], ary[1])
27
+ end
28
+
29
+ test '#generate_chunk returns memory chunk instance' do
30
+ m1 = Fluent::Plugin::Buffer::Metadata.new(nil, nil, nil)
31
+ c1 = @p.generate_chunk(m1)
32
+ assert c1.is_a? Fluent::Plugin::Buffer::MemoryChunk
33
+ assert_equal m1, c1.metadata
34
+
35
+ require 'time'
36
+ t2 = Time.parse('2016-04-08 19:55:00 +0900').to_i
37
+ m2 = Fluent::Plugin::Buffer::Metadata.new(t2, 'test.tag', {k1: 'v1', k2: 0})
38
+ c2 = @p.generate_chunk(m2)
39
+ assert c2.is_a? Fluent::Plugin::Buffer::MemoryChunk
40
+ assert_equal m2, c2.metadata
41
+ end
42
+ end
@@ -0,0 +1,1220 @@
1
+ require_relative '../helper'
2
+ require 'fluent/plugin/buffer'
3
+ require 'fluent/plugin/buffer/memory_chunk'
4
+ require 'fluent/plugin/compressable'
5
+ require 'fluent/plugin/buffer/chunk'
6
+ require 'fluent/event'
7
+ require 'flexmock/test_unit'
8
+
9
+ require 'fluent/log'
10
+ require 'fluent/plugin_id'
11
+
12
+ require 'time'
13
+
14
+ module FluentPluginBufferTest
15
+ class DummyOutputPlugin < Fluent::Plugin::Base
16
+ include Fluent::PluginId
17
+ include Fluent::PluginLoggerMixin
18
+ end
19
+ class DummyMemoryChunkError < StandardError; end
20
+ class DummyMemoryChunk < Fluent::Plugin::Buffer::MemoryChunk
21
+ attr_reader :append_count, :rollbacked, :closed, :purged
22
+ attr_accessor :failing
23
+ def initialize(metadata, compress: :text)
24
+ super
25
+ @append_count = 0
26
+ @rollbacked = false
27
+ @closed = false
28
+ @purged = false
29
+ @failing = false
30
+ end
31
+ def concat(data, size)
32
+ @append_count += 1
33
+ raise DummyMemoryChunkError if @failing
34
+ super
35
+ end
36
+ def rollback
37
+ super
38
+ @rollbacked = true
39
+ end
40
+ def close
41
+ super
42
+ @closed = true
43
+ end
44
+ def purge
45
+ super
46
+ @purged = true
47
+ end
48
+ end
49
+ class DummyPlugin < Fluent::Plugin::Buffer
50
+ def create_metadata(timekey=nil, tag=nil, variables=nil)
51
+ Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
52
+ end
53
+ def create_chunk(metadata, data)
54
+ c = FluentPluginBufferTest::DummyMemoryChunk.new(metadata)
55
+ c.append(data)
56
+ c.commit
57
+ c
58
+ end
59
+ def create_chunk_es(metadata, es)
60
+ c = FluentPluginBufferTest::DummyMemoryChunk.new(metadata)
61
+ c.concat(es.to_msgpack_stream, es.size)
62
+ c.commit
63
+ c
64
+ end
65
+ def resume
66
+ dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
67
+ dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
68
+ dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
69
+ dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
70
+ staged = {
71
+ dm2 => create_chunk(dm2, ["b" * 100]).staged!,
72
+ dm3 => create_chunk(dm3, ["c" * 100]).staged!,
73
+ }
74
+ queued = [
75
+ create_chunk(dm0, ["0" * 100]).enqueued!,
76
+ create_chunk(dm1, ["a" * 100]).enqueued!,
77
+ create_chunk(dm1, ["a" * 3]).enqueued!,
78
+ ]
79
+ return staged, queued
80
+ end
81
+ def generate_chunk(metadata)
82
+ DummyMemoryChunk.new(metadata, compress: @compress)
83
+ end
84
+ end
85
+ end
86
+
87
+ class BufferTest < Test::Unit::TestCase
88
+ def create_buffer(hash)
89
+ buffer_conf = config_element('buffer', '', hash, [])
90
+ owner = FluentPluginBufferTest::DummyOutputPlugin.new
91
+ owner.configure(config_element('ROOT', '', {}, [ buffer_conf ]))
92
+ p = FluentPluginBufferTest::DummyPlugin.new
93
+ p.owner = owner
94
+ p.configure(buffer_conf)
95
+ p
96
+ end
97
+
98
+ def create_metadata(timekey=nil, tag=nil, variables=nil)
99
+ Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
100
+ end
101
+
102
+ def create_chunk(metadata, data)
103
+ c = FluentPluginBufferTest::DummyMemoryChunk.new(metadata)
104
+ c.append(data)
105
+ c.commit
106
+ c
107
+ end
108
+
109
+ def create_chunk_es(metadata, es)
110
+ c = FluentPluginBufferTest::DummyMemoryChunk.new(metadata)
111
+ c.concat(es.to_msgpack_stream, es.size)
112
+ c.commit
113
+ c
114
+ end
115
+
116
+ setup do
117
+ Fluent::Test.setup
118
+ end
119
+
120
+ sub_test_case 'using base buffer class' do
121
+ setup do
122
+ buffer_conf = config_element('buffer', '', {}, [])
123
+ owner = FluentPluginBufferTest::DummyOutputPlugin.new
124
+ owner.configure(config_element('ROOT', '', {}, [ buffer_conf ]))
125
+ p = Fluent::Plugin::Buffer.new
126
+ p.owner = owner
127
+ p.configure(buffer_conf)
128
+ @p = p
129
+ end
130
+
131
+ test 'default persistency is false' do
132
+ assert !@p.persistent?
133
+ end
134
+
135
+ test 'chunk bytes limit is 8MB, and total bytes limit is 512MB' do
136
+ assert_equal 8*1024*1024, @p.chunk_limit_size
137
+ assert_equal 512*1024*1024, @p.total_limit_size
138
+ end
139
+
140
+ test 'chunk records limit is ignored in default' do
141
+ assert_nil @p.chunk_limit_records
142
+ end
143
+
144
+ test '#storable? checks total size of staged and enqueued(includes dequeued chunks) against total_limit_size' do
145
+ assert_equal 512*1024*1024, @p.total_limit_size
146
+ assert_equal 0, @p.stage_size
147
+ assert_equal 0, @p.queue_size
148
+ assert @p.storable?
149
+
150
+ @p.stage_size = 256 * 1024 * 1024
151
+ @p.queue_size = 256 * 1024 * 1024 - 1
152
+ assert @p.storable?
153
+
154
+ @p.queue_size = 256 * 1024 * 1024
155
+ assert !@p.storable?
156
+ end
157
+
158
+ test '#resume must be implemented by subclass' do
159
+ assert_raise NotImplementedError do
160
+ @p.resume
161
+ end
162
+ end
163
+
164
+ test '#generate_chunk must be implemented by subclass' do
165
+ assert_raise NotImplementedError do
166
+ @p.generate_chunk(Object.new)
167
+ end
168
+ end
169
+ end
170
+
171
+ sub_test_case 'with default configuration and dummy implementation' do
172
+ setup do
173
+ @p = create_buffer({})
174
+ @dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
175
+ @dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
176
+ @dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
177
+ @dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
178
+ @p.start
179
+ end
180
+
181
+ test '#start resumes buffer states and update queued numbers per metadata' do
182
+ plugin = create_buffer({})
183
+
184
+ assert_equal({}, plugin.stage)
185
+ assert_equal([], plugin.queue)
186
+ assert_equal({}, plugin.dequeued)
187
+ assert_equal({}, plugin.queued_num)
188
+ assert_equal([], plugin.metadata_list)
189
+
190
+ assert_equal 0, plugin.stage_size
191
+ assert_equal 0, plugin.queue_size
192
+
193
+ # @p is started plugin
194
+
195
+ assert_equal [@dm2,@dm3], @p.stage.keys
196
+ assert_equal "b" * 100, @p.stage[@dm2].read
197
+ assert_equal "c" * 100, @p.stage[@dm3].read
198
+
199
+ assert_equal 200, @p.stage_size
200
+
201
+ assert_equal 3, @p.queue.size
202
+ assert_equal "0" * 100, @p.queue[0].read
203
+ assert_equal "a" * 100, @p.queue[1].read
204
+ assert_equal "a" * 3, @p.queue[2].read
205
+
206
+ assert_equal 203, @p.queue_size
207
+
208
+ # staged, queued
209
+ assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
210
+ assert_equal 1, @p.queued_num[@dm0]
211
+ assert_equal 2, @p.queued_num[@dm1]
212
+ end
213
+
214
+ test '#close closes all chunks in in dequeued, enqueued and staged' do
215
+ dmx = create_metadata(Time.parse('2016-04-11 15:50:00 +0000').to_i, nil, nil)
216
+ cx = create_chunk(dmx, ["x" * 1024])
217
+ @p.dequeued[cx.unique_id] = cx
218
+
219
+ staged_chunks = @p.stage.values.dup
220
+ queued_chunks = @p.queue.dup
221
+
222
+ @p.close
223
+
224
+ assert cx.closed
225
+ assert{ staged_chunks.all?{|c| c.closed } }
226
+ assert{ queued_chunks.all?{|c| c.closed } }
227
+ end
228
+
229
+ test '#terminate initializes all internal states' do
230
+ dmx = create_metadata(Time.parse('2016-04-11 15:50:00 +0000').to_i, nil, nil)
231
+ cx = create_chunk(dmx, ["x" * 1024])
232
+ @p.dequeued[cx.unique_id] = cx
233
+
234
+ @p.close
235
+
236
+ @p.terminate
237
+
238
+ assert_nil @p.stage
239
+ assert_nil @p.queue
240
+ assert_nil @p.dequeued
241
+ assert_nil @p.queued_num
242
+ assert_nil @p.instance_eval{ @metadata_list } # #metadata_list does #dup for @metadata_list
243
+ assert_equal 0, @p.stage_size
244
+ assert_equal 0, @p.queue_size
245
+ end
246
+
247
+ test '#metadata_list returns list of metadata on stage or in queue' do
248
+ assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
249
+ end
250
+
251
+ test '#new_metadata creates metadata instance without inserting metadata_list' do
252
+ assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
253
+ _m = @p.new_metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
254
+ assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
255
+ end
256
+
257
+ test '#add_metadata adds unknown metadata into list, or return known metadata if already exists' do
258
+ assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
259
+
260
+ m = @p.new_metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
261
+ _mx = @p.add_metadata(m)
262
+ assert_equal [@dm2,@dm3,@dm0,@dm1,m], @p.metadata_list
263
+ assert_equal m.object_id, m.object_id
264
+
265
+ my = @p.add_metadata(@dm1)
266
+ assert_equal [@dm2,@dm3,@dm0,@dm1,m], @p.metadata_list
267
+ assert_equal @dm1, my
268
+ assert{ @dm1.object_id != my.object_id } # 'my' is an object created in #resume
269
+ end
270
+
271
+ test '#metadata is utility method to create-add-and-return metadata' do
272
+ assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
273
+
274
+ m1 = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
275
+ assert_equal [@dm2,@dm3,@dm0,@dm1,m1], @p.metadata_list
276
+ m2 = @p.metadata(timekey: @dm3.timekey)
277
+ assert_equal [@dm2,@dm3,@dm0,@dm1,m1], @p.metadata_list
278
+ assert_equal @dm3, m2
279
+ end
280
+
281
+ test '#queued_records returns total number of size in all chunks in queue' do
282
+ assert_equal 3, @p.queue.size
283
+
284
+ r0 = @p.queue[0].size
285
+ assert_equal 1, r0
286
+ r1 = @p.queue[1].size
287
+ assert_equal 1, r1
288
+ r2 = @p.queue[2].size
289
+ assert_equal 1, r2
290
+
291
+ assert_equal (r0+r1+r2), @p.queued_records
292
+ end
293
+
294
+ test '#queued? returns queue has any chunks or not without arguments' do
295
+ assert @p.queued?
296
+
297
+ @p.queue.reject!{|_c| true }
298
+ assert !@p.queued?
299
+ end
300
+
301
+ test '#queued? returns queue has chunks for specified metadata with an argument' do
302
+ assert @p.queued?(@dm0)
303
+ assert @p.queued?(@dm1)
304
+ assert !@p.queued?(@dm2)
305
+ end
306
+
307
+ test '#enqueue_chunk enqueues a chunk on stage with specified metadata' do
308
+ assert_equal 2, @p.stage.size
309
+ assert_equal [@dm2,@dm3], @p.stage.keys
310
+ assert_equal 3, @p.queue.size
311
+ assert_nil @p.queued_num[@dm2]
312
+
313
+ assert_equal 200, @p.stage_size
314
+ assert_equal 203, @p.queue_size
315
+
316
+ @p.enqueue_chunk(@dm2)
317
+
318
+ assert_equal [@dm3], @p.stage.keys
319
+ assert_equal @dm2, @p.queue.last.metadata
320
+ assert_equal 1, @p.queued_num[@dm2]
321
+ assert_equal 100, @p.stage_size
322
+ assert_equal 303, @p.queue_size
323
+ end
324
+
325
+ test '#enqueue_chunk ignores empty chunks' do
326
+ assert_equal 3, @p.queue.size
327
+
328
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
329
+ c = create_chunk(m, [''])
330
+ @p.stage[m] = c
331
+ assert @p.stage[m].empty?
332
+ assert !c.closed
333
+
334
+ @p.enqueue_chunk(m)
335
+
336
+ assert_nil @p.stage[m]
337
+ assert_equal 3, @p.queue.size
338
+ assert_nil @p.queued_num[m]
339
+ assert c.closed
340
+ end
341
+
342
+ test '#enqueue_chunk calls #enqueued! if chunk responds to it' do
343
+ assert_equal 3, @p.queue.size
344
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
345
+ c = create_chunk(m, ['c' * 256])
346
+ callback_called = false
347
+ (class << c; self; end).module_eval do
348
+ define_method(:enqueued!){ callback_called = true }
349
+ end
350
+
351
+ @p.stage[m] = c
352
+ @p.enqueue_chunk(m)
353
+
354
+ assert_equal c, @p.queue.last
355
+ assert callback_called
356
+ end
357
+
358
+ test '#enqueue_all enqueues chunks on stage which given block returns true with' do
359
+ m1 = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
360
+ c1 = create_chunk(m1, ['c' * 256])
361
+ @p.stage[m1] = c1
362
+ m2 = @p.metadata(timekey: Time.parse('2016-04-11 16:50:00 +0000').to_i)
363
+ c2 = create_chunk(m2, ['c' * 256])
364
+ @p.stage[m2] = c2
365
+
366
+ assert_equal [@dm2,@dm3,m1,m2], @p.stage.keys
367
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
368
+
369
+ @p.enqueue_all{ |m, c| m.timekey < Time.parse('2016-04-11 16:41:00 +0000').to_i }
370
+
371
+ assert_equal [m2], @p.stage.keys
372
+ assert_equal [@dm0,@dm1,@dm1,@dm2,@dm3,m1], @p.queue.map(&:metadata)
373
+ end
374
+
375
+ test '#enqueue_all enqueues all chunks on stage without block' do
376
+ m1 = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
377
+ c1 = create_chunk(m1, ['c' * 256])
378
+ @p.stage[m1] = c1
379
+ m2 = @p.metadata(timekey: Time.parse('2016-04-11 16:50:00 +0000').to_i)
380
+ c2 = create_chunk(m2, ['c' * 256])
381
+ @p.stage[m2] = c2
382
+
383
+ assert_equal [@dm2,@dm3,m1,m2], @p.stage.keys
384
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
385
+
386
+ @p.enqueue_all
387
+
388
+ assert_equal [], @p.stage.keys
389
+ assert_equal [@dm0,@dm1,@dm1,@dm2,@dm3,m1,m2], @p.queue.map(&:metadata)
390
+ end
391
+
392
+ test '#dequeue_chunk dequeues a chunk from queue if a chunk exists' do
393
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
394
+ assert_equal({}, @p.dequeued)
395
+
396
+ m1 = @p.dequeue_chunk
397
+ assert_equal @dm0, m1.metadata
398
+ assert_equal @dm0, @p.dequeued[m1.unique_id].metadata
399
+
400
+ m2 = @p.dequeue_chunk
401
+ assert_equal @dm1, m2.metadata
402
+ assert_equal @dm1, @p.dequeued[m2.unique_id].metadata
403
+
404
+ m3 = @p.dequeue_chunk
405
+ assert_equal @dm1, m3.metadata
406
+ assert_equal @dm1, @p.dequeued[m3.unique_id].metadata
407
+
408
+ m4 = @p.dequeue_chunk
409
+ assert_nil m4
410
+ end
411
+
412
+ test '#takeback_chunk resumes a chunk from dequeued to queued at the head of queue, and returns true' do
413
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
414
+ assert_equal({}, @p.dequeued)
415
+
416
+ m1 = @p.dequeue_chunk
417
+ assert_equal @dm0, m1.metadata
418
+ assert_equal @dm0, @p.dequeued[m1.unique_id].metadata
419
+ assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
420
+ assert_equal({m1.unique_id => m1}, @p.dequeued)
421
+
422
+ assert @p.takeback_chunk(m1.unique_id)
423
+
424
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
425
+ assert_equal({}, @p.dequeued)
426
+ end
427
+
428
+ test '#purge_chunk removes a chunk specified by argument id from dequeued chunks' do
429
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
430
+ assert_equal({}, @p.dequeued)
431
+ assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
432
+
433
+ m0 = @p.dequeue_chunk
434
+ m1 = @p.dequeue_chunk
435
+
436
+ assert @p.takeback_chunk(m0.unique_id)
437
+
438
+ assert_equal [@dm0,@dm1], @p.queue.map(&:metadata)
439
+ assert_equal({m1.unique_id => m1}, @p.dequeued)
440
+
441
+ assert !m1.purged
442
+
443
+ @p.purge_chunk(m1.unique_id)
444
+ assert m1.purged
445
+
446
+ assert_equal [@dm0,@dm1], @p.queue.map(&:metadata)
447
+ assert_equal({}, @p.dequeued)
448
+ assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
449
+ end
450
+
451
+ test '#purge_chunk removes an argument metadata from metadata_list if no chunks exist on stage or in queue' do
452
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
453
+ assert_equal({}, @p.dequeued)
454
+ assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
455
+
456
+ m0 = @p.dequeue_chunk
457
+
458
+ assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
459
+ assert_equal({m0.unique_id => m0}, @p.dequeued)
460
+
461
+ assert !m0.purged
462
+
463
+ @p.purge_chunk(m0.unique_id)
464
+ assert m0.purged
465
+
466
+ assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
467
+ assert_equal({}, @p.dequeued)
468
+ assert_equal [@dm2,@dm3,@dm1], @p.metadata_list
469
+ end
470
+
471
+ test '#takeback_chunk returns false if specified chunk_id is already purged' do
472
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
473
+ assert_equal({}, @p.dequeued)
474
+ assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
475
+
476
+ m0 = @p.dequeue_chunk
477
+
478
+ assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
479
+ assert_equal({m0.unique_id => m0}, @p.dequeued)
480
+
481
+ assert !m0.purged
482
+
483
+ @p.purge_chunk(m0.unique_id)
484
+ assert m0.purged
485
+
486
+ assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
487
+ assert_equal({}, @p.dequeued)
488
+ assert_equal [@dm2,@dm3,@dm1], @p.metadata_list
489
+
490
+ assert !@p.takeback_chunk(m0.unique_id)
491
+
492
+ assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
493
+ assert_equal({}, @p.dequeued)
494
+ assert_equal [@dm2,@dm3,@dm1], @p.metadata_list
495
+ end
496
+
497
+ test '#clear_queue! removes all chunks in queue, but leaves staged chunks' do
498
+ qchunks = @p.queue.dup
499
+
500
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
501
+ assert_equal 2, @p.stage.size
502
+ assert_equal({}, @p.dequeued)
503
+
504
+ @p.clear_queue!
505
+
506
+ assert_equal [], @p.queue
507
+ assert_equal 0, @p.queue_size
508
+ assert_equal 2, @p.stage.size
509
+ assert_equal({}, @p.dequeued)
510
+
511
+ assert{ qchunks.all?{ |c| c.purged } }
512
+ end
513
+
514
+ test '#write returns immediately if argument data is empty array' do
515
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
516
+ assert_equal [@dm2,@dm3], @p.stage.keys
517
+
518
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
519
+
520
+ @p.write({m => []})
521
+
522
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
523
+ assert_equal [@dm2,@dm3], @p.stage.keys
524
+ end
525
+
526
+ test '#write returns immediately if argument data is empty event stream' do
527
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
528
+ assert_equal [@dm2,@dm3], @p.stage.keys
529
+
530
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
531
+
532
+ @p.write({m => Fluent::ArrayEventStream.new([])})
533
+
534
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
535
+ assert_equal [@dm2,@dm3], @p.stage.keys
536
+ end
537
+
538
+ test '#write raises BufferOverflowError if buffer is not storable' do
539
+ @p.stage_size = 256 * 1024 * 1024
540
+ @p.queue_size = 256 * 1024 * 1024
541
+
542
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
543
+
544
+ assert_raise Fluent::Plugin::Buffer::BufferOverflowError do
545
+ @p.write({m => ["x" * 256]})
546
+ end
547
+ end
548
+
549
+ test '#write stores data into an existing chunk with metadata specified' do
550
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
551
+ assert_equal [@dm2,@dm3], @p.stage.keys
552
+
553
+ dm3data = @p.stage[@dm3].read.dup
554
+ prev_stage_size = @p.stage_size
555
+
556
+ assert_equal 1, @p.stage[@dm3].append_count
557
+
558
+ @p.write({@dm3 => ["x" * 256, "y" * 256, "z" * 256]})
559
+
560
+ assert_equal 2, @p.stage[@dm3].append_count
561
+ assert_equal (dm3data + ("x" * 256) + ("y" * 256) + ("z" * 256)), @p.stage[@dm3].read
562
+ assert_equal (prev_stage_size + 768), @p.stage_size
563
+
564
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
565
+ assert_equal [@dm2,@dm3], @p.stage.keys
566
+ end
567
+
568
+ test '#write creates new chunk and store data into it if there are no chunks for specified metadata' do
569
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
570
+ assert_equal [@dm2,@dm3], @p.stage.keys
571
+
572
+ prev_stage_size = @p.stage_size
573
+
574
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
575
+
576
+ @p.write({m => ["x" * 256, "y" * 256, "z" * 256]})
577
+
578
+ assert_equal 1, @p.stage[m].append_count
579
+ assert_equal ("x" * 256 + "y" * 256 + "z" * 256), @p.stage[m].read
580
+ assert_equal (prev_stage_size + 768), @p.stage_size
581
+
582
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
583
+ assert_equal [@dm2,@dm3,m], @p.stage.keys
584
+ end
585
+
586
+ test '#write tries to enqueue and store data into a new chunk if existing chunk is full' do
587
+ assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
588
+ assert_equal 0.95, @p.chunk_full_threshold
589
+
590
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
591
+ assert_equal [@dm2,@dm3], @p.stage.keys
592
+
593
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
594
+
595
+ row = "x" * 1024 * 1024
596
+ small_row = "x" * 1024 * 512
597
+ @p.write({m => [row] * 7 + [small_row]})
598
+
599
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
600
+ assert_equal [@dm2,@dm3,m], @p.stage.keys
601
+ assert_equal 1, @p.stage[m].append_count
602
+
603
+ @p.write({m => [row]})
604
+
605
+ assert_equal [@dm0,@dm1,@dm1,m], @p.queue.map(&:metadata)
606
+ assert_equal [@dm2,@dm3,m], @p.stage.keys
607
+ assert_equal 1, @p.stage[m].append_count
608
+ assert_equal 1024*1024, @p.stage[m].bytesize
609
+ assert_equal 3, @p.queue.last.append_count # 1 -> write (2) -> write_step_by_step (3)
610
+ assert @p.queue.last.rollbacked
611
+ end
612
+
613
+ test '#write rollbacks if commit raises errors' do
614
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
615
+ assert_equal [@dm2,@dm3], @p.stage.keys
616
+
617
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
618
+
619
+ row = "x" * 1024
620
+ @p.write({m => [row] * 8})
621
+
622
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
623
+ assert_equal [@dm2,@dm3,m], @p.stage.keys
624
+
625
+ target_chunk = @p.stage[m]
626
+
627
+ assert_equal 1, target_chunk.append_count
628
+ assert !target_chunk.rollbacked
629
+
630
+ (class << target_chunk; self; end).module_eval do
631
+ define_method(:commit){ raise "yay" }
632
+ end
633
+
634
+ assert_raise RuntimeError.new("yay") do
635
+ @p.write({m => [row]})
636
+ end
637
+
638
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
639
+ assert_equal [@dm2,@dm3,m], @p.stage.keys
640
+
641
+ assert_equal 2, target_chunk.append_count
642
+ assert target_chunk.rollbacked
643
+ assert_equal row * 8, target_chunk.read
644
+ end
645
+
646
+ test '#write w/ format raises BufferOverflowError if buffer is not storable' do
647
+ @p.stage_size = 256 * 1024 * 1024
648
+ @p.queue_size = 256 * 1024 * 1024
649
+
650
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
651
+
652
+ es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "xxxxxxxxxxxxxx"} ] ])
653
+
654
+ assert_raise Fluent::Plugin::Buffer::BufferOverflowError do
655
+ @p.write({m => es}, format: ->(e){e.to_msgpack_stream})
656
+ end
657
+ end
658
+
659
+ test '#write w/ format stores data into an existing chunk with metadata specified' do
660
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
661
+ assert_equal [@dm2,@dm3], @p.stage.keys
662
+
663
+ dm3data = @p.stage[@dm3].read.dup
664
+ prev_stage_size = @p.stage_size
665
+
666
+ assert_equal 1, @p.stage[@dm3].append_count
667
+
668
+ es = Fluent::ArrayEventStream.new(
669
+ [
670
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 128}],
671
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "y" * 128}],
672
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "z" * 128}],
673
+ ]
674
+ )
675
+
676
+ @p.write({@dm3 => es}, format: ->(e){e.to_msgpack_stream})
677
+
678
+ assert_equal 2, @p.stage[@dm3].append_count
679
+ assert_equal (dm3data + es.to_msgpack_stream), @p.stage[@dm3].read
680
+ assert_equal (prev_stage_size + es.to_msgpack_stream.bytesize), @p.stage_size
681
+
682
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
683
+ assert_equal [@dm2,@dm3], @p.stage.keys
684
+ end
685
+
686
+ test '#write w/ format creates new chunk and store data into it if there are not chunks for specified metadata' do
687
+ assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
688
+
689
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
690
+ assert_equal [@dm2,@dm3], @p.stage.keys
691
+
692
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
693
+
694
+ es = Fluent::ArrayEventStream.new(
695
+ [
696
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
697
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
698
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
699
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
700
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
701
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
702
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
703
+ [event_time('2016-04-11 16:40:03 +0000'), {"message" => "z" * 1024 * 512}],
704
+ ]
705
+ )
706
+ @p.write({m => es}, format: ->(e){e.to_msgpack_stream})
707
+
708
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
709
+ assert_equal [@dm2,@dm3,m], @p.stage.keys
710
+ assert_equal 1, @p.stage[m].append_count
711
+ end
712
+
713
+ test '#write w/ format tries to enqueue and store data into a new chunk if existing chunk does not have enough space' do
714
+ assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
715
+
716
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
717
+ assert_equal [@dm2,@dm3], @p.stage.keys
718
+
719
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
720
+
721
+ es = Fluent::ArrayEventStream.new(
722
+ [
723
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
724
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
725
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
726
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
727
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
728
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
729
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
730
+ [event_time('2016-04-11 16:40:03 +0000'), {"message" => "z" * 1024 * 512}],
731
+ ]
732
+ )
733
+ @p.write({m => es}, format: ->(e){e.to_msgpack_stream})
734
+
735
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
736
+ assert_equal [@dm2,@dm3,m], @p.stage.keys
737
+ assert_equal 1, @p.stage[m].append_count
738
+
739
+ es2 = Fluent::OneEventStream.new(event_time('2016-04-11 16:40:03 +0000'), {"message" => "z" * 1024 * 1024})
740
+ @p.write({m => es2}, format: ->(e){e.to_msgpack_stream})
741
+
742
+ assert_equal [@dm0,@dm1,@dm1,m], @p.queue.map(&:metadata)
743
+ assert_equal [@dm2,@dm3,m], @p.stage.keys
744
+ assert_equal 1, @p.stage[m].append_count
745
+ assert_equal es2.to_msgpack_stream.bytesize, @p.stage[m].bytesize
746
+ assert_equal 2, @p.queue.last.append_count # 1 -> write (2) -> rollback&enqueue
747
+ assert @p.queue.last.rollbacked
748
+ end
749
+
750
+ test '#write w/ format enqueues chunk if it is already full after adding data' do
751
+ assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
752
+
753
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
754
+ assert_equal [@dm2,@dm3], @p.stage.keys
755
+
756
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
757
+ es = Fluent::ArrayEventStream.new(
758
+ [
759
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}], # 1024 * 1024 bytes as msgpack stream
760
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
761
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
762
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
763
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
764
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
765
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
766
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
767
+ ]
768
+ )
769
+ @p.write({m => es}, format: ->(e){e.to_msgpack_stream})
770
+
771
+ assert_equal [@dm0,@dm1,@dm1,m], @p.queue.map(&:metadata)
772
+ assert_equal [@dm2,@dm3], @p.stage.keys
773
+ assert_equal 1, @p.queue.last.append_count
774
+ end
775
+
776
+ test '#write w/ format rollbacks if commit raises errors' do
777
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
778
+ assert_equal [@dm2,@dm3], @p.stage.keys
779
+
780
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
781
+
782
+ es = Fluent::ArrayEventStream.new(
783
+ [
784
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
785
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
786
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
787
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
788
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
789
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
790
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
791
+ [event_time('2016-04-11 16:40:03 +0000'), {"message" => "z" * 1024 * 512}],
792
+ ]
793
+ )
794
+ @p.write({m => es}, format: ->(e){e.to_msgpack_stream})
795
+
796
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
797
+ assert_equal [@dm2,@dm3,m], @p.stage.keys
798
+
799
+ target_chunk = @p.stage[m]
800
+
801
+ assert_equal 1, target_chunk.append_count
802
+ assert !target_chunk.rollbacked
803
+
804
+ (class << target_chunk; self; end).module_eval do
805
+ define_method(:commit){ raise "yay" }
806
+ end
807
+
808
+ es2 = Fluent::ArrayEventStream.new(
809
+ [
810
+ [event_time('2016-04-11 16:40:04 +0000'), {"message" => "z" * 1024 * 128}],
811
+ ]
812
+ )
813
+ assert_raise RuntimeError.new("yay") do
814
+ @p.write({m => es2}, format: ->(e){e.to_msgpack_stream})
815
+ end
816
+
817
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
818
+ assert_equal [@dm2,@dm3,m], @p.stage.keys
819
+
820
+ assert_equal 2, target_chunk.append_count
821
+ assert target_chunk.rollbacked
822
+ assert_equal es.to_msgpack_stream, target_chunk.read
823
+ end
824
+
825
+ test '#write writes many metadata and data pairs at once' do
826
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
827
+ assert_equal [@dm2,@dm3], @p.stage.keys
828
+
829
+ row = "x" * 1024
830
+ @p.write({ @dm0 => [row, row, row], @dm1 => [row, row] })
831
+
832
+ assert_equal [@dm2,@dm3,@dm0,@dm1], @p.stage.keys
833
+ end
834
+
835
+ test '#write does not commit on any chunks if any append operation on chunk fails' do
836
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
837
+ assert_equal [@dm2,@dm3], @p.stage.keys
838
+
839
+ row = "x" * 1024
840
+ @p.write({ @dm0 => [row, row, row], @dm1 => [row, row] })
841
+
842
+ assert_equal [@dm2,@dm3,@dm0,@dm1], @p.stage.keys
843
+
844
+ dm2_size = @p.stage[@dm2].size
845
+ assert !@p.stage[@dm2].rollbacked
846
+ dm3_size = @p.stage[@dm3].size
847
+ assert !@p.stage[@dm3].rollbacked
848
+
849
+ assert{ @p.stage[@dm0].size == 3 }
850
+ assert !@p.stage[@dm0].rollbacked
851
+ assert{ @p.stage[@dm1].size == 2 }
852
+ assert !@p.stage[@dm1].rollbacked
853
+
854
+ @p.stage[@dm1].failing = true
855
+
856
+ assert_raise(FluentPluginBufferTest::DummyMemoryChunkError) do
857
+ @p.write({ @dm2 => [row], @dm3 => [row], @dm0 => [row, row, row], @dm1 => [row, row] })
858
+ end
859
+
860
+ assert{ @p.stage[@dm2].size == dm2_size }
861
+ assert @p.stage[@dm2].rollbacked
862
+ assert{ @p.stage[@dm3].size == dm3_size }
863
+ assert @p.stage[@dm3].rollbacked
864
+
865
+ assert{ @p.stage[@dm0].size == 3 }
866
+ assert @p.stage[@dm0].rollbacked
867
+ assert{ @p.stage[@dm1].size == 2 }
868
+ assert @p.stage[@dm1].rollbacked
869
+ end
870
+
871
+ test '#compress returns :text' do
872
+ assert_equal :text, @p.compress
873
+ end
874
+ end
875
+
876
+ sub_test_case 'standard format with configuration for test with lower chunk limit size' do
877
+ setup do
878
+ @p = create_buffer({"chunk_limit_size" => 1_280_000})
879
+ @format = ->(e){e.to_msgpack_stream}
880
+ @dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
881
+ # 1 record is 128bytes in msgpack stream
882
+ @es0 = es0 = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:01 +0000'), {"message" => "x" * (128 - 22)}] ] * 5000)
883
+ (class << @p; self; end).module_eval do
884
+ define_method(:resume) {
885
+ staged = {
886
+ dm0 => create_chunk_es(dm0, es0).staged!,
887
+ }
888
+ queued = []
889
+ return staged, queued
890
+ }
891
+ end
892
+ @p.start
893
+ end
894
+
895
+ test '#write appends event stream into staged chunk' do
896
+ assert_equal [@dm0], @p.stage.keys
897
+ assert_equal [], @p.queue.map(&:metadata)
898
+
899
+ assert_equal 1_280_000, @p.chunk_limit_size
900
+
901
+ es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * (128 - 22)}] ] * 1000)
902
+ @p.write({@dm0 => es}, format: @format)
903
+
904
+ assert_equal [@dm0], @p.stage.keys
905
+ assert_equal [], @p.queue.map(&:metadata)
906
+
907
+ assert_equal (@es0.to_msgpack_stream + es.to_msgpack_stream), @p.stage[@dm0].read
908
+ end
909
+
910
+ test '#write writes event stream into a new chunk with enqueueing existing chunk if event stream is larger than available space of existing chunk' do
911
+ assert_equal [@dm0], @p.stage.keys
912
+ assert_equal [], @p.queue.map(&:metadata)
913
+
914
+ assert_equal 1_280_000, @p.chunk_limit_size
915
+
916
+ es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * (128 - 22)}] ] * 8000)
917
+ @p.write({@dm0 => es}, format: @format)
918
+
919
+ assert_equal [@dm0], @p.stage.keys
920
+ assert_equal [@dm0], @p.queue.map(&:metadata)
921
+
922
+ assert_equal (es.to_msgpack_stream), @p.stage[@dm0].read
923
+ end
924
+
925
+ test '#write writes event stream into many chunks excluding staged chunk if event stream is larger than chunk limit size' do
926
+ assert_equal [@dm0], @p.stage.keys
927
+ assert_equal [], @p.queue.map(&:metadata)
928
+
929
+ assert_equal 1_280_000, @p.chunk_limit_size
930
+
931
+ es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * (128 - 22)}] ] * 45000)
932
+ @p.write({@dm0 => es}, format: @format)
933
+
934
+ assert_equal [@dm0], @p.stage.keys
935
+ assert_equal 5400, @p.stage[@dm0].size
936
+ assert_equal [@dm0,@dm0,@dm0,@dm0,@dm0], @p.queue.map(&:metadata)
937
+ assert_equal [5000, 9900, 9900, 9900, 9900], @p.queue.map(&:size) # splits: 45000 / 100 => 450 * ...
938
+ # 9900 * 4 + 5400 == 45000
939
+ end
940
+
941
+ test '#write raises BufferChunkOverflowError if a record is biggar than chunk limit size' do
942
+ assert_equal [@dm0], @p.stage.keys
943
+ assert_equal [], @p.queue.map(&:metadata)
944
+
945
+ assert_equal 1_280_000, @p.chunk_limit_size
946
+
947
+ es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * 1_280_000}] ])
948
+ assert_raise Fluent::Plugin::Buffer::BufferChunkOverflowError do
949
+ @p.write({@dm0 => es}, format: @format)
950
+ end
951
+ end
952
+ end
953
+
954
+ sub_test_case 'custom format with configuration for test with lower chunk limit size' do
955
+ setup do
956
+ @p = create_buffer({"chunk_limit_size" => 1_280_000})
957
+ @dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
958
+ @row = "x" * 128
959
+ @data0 = data0 = [@row] * 5000
960
+ (class << @p; self; end).module_eval do
961
+ define_method(:resume) {
962
+ staged = {
963
+ dm0 => create_chunk(dm0, data0).staged!,
964
+ }
965
+ queued = []
966
+ return staged, queued
967
+ }
968
+ end
969
+ @p.start
970
+ end
971
+
972
+ test '#write appends event stream into staged chunk' do
973
+ assert_equal [@dm0], @p.stage.keys
974
+ assert_equal [], @p.queue.map(&:metadata)
975
+
976
+ assert_equal 1_280_000, @p.chunk_limit_size
977
+
978
+ data = [@row] * 1000
979
+ @p.write({@dm0 => data})
980
+
981
+ assert_equal [@dm0], @p.stage.keys
982
+ assert_equal [], @p.queue.map(&:metadata)
983
+
984
+ assert_equal (@row * 6000), @p.stage[@dm0].read
985
+ end
986
+
987
+ test '#write writes event stream into a new chunk with enqueueing existing chunk if event stream is larger than available space of existing chunk' do
988
+ assert_equal [@dm0], @p.stage.keys
989
+ assert_equal [], @p.queue.map(&:metadata)
990
+
991
+ staged_chunk_object_id = @p.stage[@dm0].object_id
992
+
993
+ assert_equal 1_280_000, @p.chunk_limit_size
994
+
995
+ data = [@row] * 8000
996
+ @p.write({@dm0 => data})
997
+
998
+ assert_equal [@dm0], @p.queue.map(&:metadata)
999
+ assert_equal [staged_chunk_object_id], @p.queue.map(&:object_id)
1000
+ assert_equal [@dm0], @p.stage.keys
1001
+
1002
+ assert_equal [9800], @p.queue.map(&:size)
1003
+ assert_equal 3200, @p.stage[@dm0].size
1004
+ # 9800 + 3200 == 5000 + 8000
1005
+ end
1006
+
1007
+ test '#write writes event stream into many chunks including staging chunk if event stream is larger than chunk limit size' do
1008
+ assert_equal [@dm0], @p.stage.keys
1009
+ assert_equal [], @p.queue.map(&:metadata)
1010
+
1011
+ staged_chunk_object_id = @p.stage[@dm0].object_id
1012
+
1013
+ assert_equal 1_280_000, @p.chunk_limit_size
1014
+
1015
+ assert_equal 5000, @p.stage[@dm0].size
1016
+
1017
+ data = [@row] * 45000
1018
+ @p.write({@dm0 => data})
1019
+
1020
+ assert_equal staged_chunk_object_id, @p.queue.first.object_id
1021
+
1022
+ assert_equal [@dm0], @p.stage.keys
1023
+ assert_equal 900, @p.stage[@dm0].size
1024
+ assert_equal [@dm0,@dm0,@dm0,@dm0,@dm0], @p.queue.map(&:metadata)
1025
+ assert_equal [9500, 9900, 9900, 9900, 9900], @p.queue.map(&:size) # splits: 45000 / 100 => 450 * ...
1026
+ ##### 900 + 9500 + 9900 * 4 == 5000 + 45000
1027
+ end
1028
+
1029
+ test '#write raises BufferChunkOverflowError if a record is biggar than chunk limit size' do
1030
+ assert_equal [@dm0], @p.stage.keys
1031
+ assert_equal [], @p.queue.map(&:metadata)
1032
+
1033
+ assert_equal 1_280_000, @p.chunk_limit_size
1034
+
1035
+ es = ["x" * 1_280_000 + "x" * 300]
1036
+ assert_raise Fluent::Plugin::Buffer::BufferChunkOverflowError do
1037
+ @p.write({@dm0 => es})
1038
+ end
1039
+ end
1040
+ end
1041
+
1042
+ sub_test_case 'with configuration for test with lower limits' do
1043
+ setup do
1044
+ @p = create_buffer({"chunk_limit_size" => 1024, "total_limit_size" => 10240})
1045
+ @dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
1046
+ @dm1 = dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
1047
+ @dm2 = dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
1048
+ @dm3 = dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
1049
+ (class << @p; self; end).module_eval do
1050
+ define_method(:resume) {
1051
+ staged = {
1052
+ dm2 => create_chunk(dm2, ["b" * 128] * 7).staged!,
1053
+ dm3 => create_chunk(dm3, ["c" * 128] * 5).staged!,
1054
+ }
1055
+ queued = [
1056
+ create_chunk(dm0, ["0" * 128] * 8).enqueued!,
1057
+ create_chunk(dm0, ["0" * 128] * 8).enqueued!,
1058
+ create_chunk(dm0, ["0" * 128] * 8).enqueued!,
1059
+ create_chunk(dm0, ["0" * 128] * 8).enqueued!,
1060
+ create_chunk(dm0, ["0" * 128] * 8).enqueued!,
1061
+ create_chunk(dm1, ["a" * 128] * 8).enqueued!,
1062
+ create_chunk(dm1, ["a" * 128] * 8).enqueued!,
1063
+ create_chunk(dm1, ["a" * 128] * 8).enqueued!, # 8th queued chunk
1064
+ create_chunk(dm1, ["a" * 128] * 3).enqueued!,
1065
+ ]
1066
+ return staged, queued
1067
+ }
1068
+ end
1069
+ @p.start
1070
+ end
1071
+
1072
+ test '#storable? returns false when too many data exist' do
1073
+ assert_equal [@dm0,@dm0,@dm0,@dm0,@dm0,@dm1,@dm1,@dm1,@dm1], @p.queue.map(&:metadata)
1074
+ assert_equal [@dm2,@dm3], @p.stage.keys
1075
+
1076
+ assert_equal 128*8*8+128*3, @p.queue_size
1077
+ assert_equal 128*7+128*5, @p.stage_size
1078
+
1079
+ assert @p.storable?
1080
+
1081
+ dm3 = @p.metadata(timekey: @dm3.timekey)
1082
+ @p.write({dm3 => ["c" * 128]})
1083
+
1084
+ assert_equal 10240, (@p.stage_size + @p.queue_size)
1085
+ assert !@p.storable?
1086
+ end
1087
+
1088
+ test '#chunk_size_over? returns true if chunk size is bigger than limit' do
1089
+ m = create_metadata(Time.parse('2016-04-11 16:40:00 +0000').to_i)
1090
+
1091
+ c1 = create_chunk(m, ["a" * 128] * 8)
1092
+ assert !@p.chunk_size_over?(c1)
1093
+
1094
+ c2 = create_chunk(m, ["a" * 128] * 9)
1095
+ assert @p.chunk_size_over?(c2)
1096
+
1097
+ c3 = create_chunk(m, ["a" * 128] * 8 + ["a"])
1098
+ assert @p.chunk_size_over?(c3)
1099
+ end
1100
+
1101
+ test '#chunk_size_full? returns true if chunk size is enough big against limit' do
1102
+ m = create_metadata(Time.parse('2016-04-11 16:40:00 +0000').to_i)
1103
+
1104
+ c1 = create_chunk(m, ["a" * 128] * 7)
1105
+ assert !@p.chunk_size_full?(c1)
1106
+
1107
+ c2 = create_chunk(m, ["a" * 128] * 8)
1108
+ assert @p.chunk_size_full?(c2)
1109
+
1110
+ assert_equal 0.95, @p.chunk_full_threshold
1111
+ c3 = create_chunk(m, ["a" * 128] * 6 + ["a" * 64])
1112
+ assert !@p.chunk_size_full?(c3)
1113
+ end
1114
+ end
1115
+
1116
+ sub_test_case 'with configuration includes chunk_limit_records' do
1117
+ setup do
1118
+ @p = create_buffer({"chunk_limit_size" => 1024, "total_limit_size" => 10240, "chunk_limit_records" => 6})
1119
+ @dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
1120
+ @dm1 = dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
1121
+ @dm2 = dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
1122
+ @dm3 = dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
1123
+ (class << @p; self; end).module_eval do
1124
+ define_method(:resume) {
1125
+ staged = {
1126
+ dm2 => create_chunk(dm2, ["b" * 128] * 1).staged!,
1127
+ dm3 => create_chunk(dm3, ["c" * 128] * 2).staged!,
1128
+ }
1129
+ queued = [
1130
+ create_chunk(dm0, ["0" * 128] * 6).enqueued!,
1131
+ create_chunk(dm1, ["a" * 128] * 6).enqueued!,
1132
+ create_chunk(dm1, ["a" * 128] * 6).enqueued!,
1133
+ create_chunk(dm1, ["a" * 128] * 3).enqueued!,
1134
+ ]
1135
+ return staged, queued
1136
+ }
1137
+ end
1138
+ @p.start
1139
+ end
1140
+
1141
+ test '#chunk_size_over? returns true if too many records exists in a chunk even if its bytes is less than limit' do
1142
+ assert_equal 6, @p.chunk_limit_records
1143
+
1144
+ m = create_metadata(Time.parse('2016-04-11 16:40:00 +0000').to_i)
1145
+
1146
+ c1 = create_chunk(m, ["a" * 128] * 6)
1147
+ assert_equal 6, c1.size
1148
+ assert !@p.chunk_size_over?(c1)
1149
+
1150
+ c2 = create_chunk(m, ["a" * 128] * 7)
1151
+ assert @p.chunk_size_over?(c2)
1152
+
1153
+ c3 = create_chunk(m, ["a" * 128] * 6 + ["a"])
1154
+ assert @p.chunk_size_over?(c3)
1155
+ end
1156
+
1157
+ test '#chunk_size_full? returns true if enough many records exists in a chunk even if its bytes is less than limit' do
1158
+ assert_equal 6, @p.chunk_limit_records
1159
+
1160
+ m = create_metadata(Time.parse('2016-04-11 16:40:00 +0000').to_i)
1161
+
1162
+ c1 = create_chunk(m, ["a" * 128] * 5)
1163
+ assert_equal 5, c1.size
1164
+ assert !@p.chunk_size_full?(c1)
1165
+
1166
+ c2 = create_chunk(m, ["a" * 128] * 6)
1167
+ assert @p.chunk_size_full?(c2)
1168
+
1169
+ c3 = create_chunk(m, ["a" * 128] * 5 + ["a"])
1170
+ assert @p.chunk_size_full?(c3)
1171
+ end
1172
+ end
1173
+
1174
+ sub_test_case 'with configuration includes queue_limit_length' do
1175
+ setup do
1176
+ @p = create_buffer({"chunk_limit_size" => 1024, "total_limit_size" => 10240, "queue_limit_length" => 5})
1177
+ @dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
1178
+ @dm1 = dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
1179
+ @dm2 = dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
1180
+ @dm3 = dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
1181
+ (class << @p; self; end).module_eval do
1182
+ define_method(:resume) {
1183
+ staged = {
1184
+ dm2 => create_chunk(dm2, ["b" * 128] * 1).staged!,
1185
+ dm3 => create_chunk(dm3, ["c" * 128] * 2).staged!,
1186
+ }
1187
+ queued = [
1188
+ create_chunk(dm0, ["0" * 128] * 6).enqueued!,
1189
+ create_chunk(dm1, ["a" * 128] * 6).enqueued!,
1190
+ create_chunk(dm1, ["a" * 128] * 6).enqueued!,
1191
+ create_chunk(dm1, ["a" * 128] * 3).enqueued!,
1192
+ ]
1193
+ return staged, queued
1194
+ }
1195
+ end
1196
+ @p.start
1197
+ end
1198
+
1199
+ test '#configure will overwrite standard configuration if queue_limit_length' do
1200
+ assert_equal 1024, @p.chunk_limit_size
1201
+ assert_equal 5, @p.queue_limit_length
1202
+ assert_equal (1024*5), @p.total_limit_size
1203
+ end
1204
+ end
1205
+
1206
+ sub_test_case 'when compress is gzip' do
1207
+ setup do
1208
+ @p = create_buffer({'compress' => 'gzip'})
1209
+ end
1210
+
1211
+ test '#compress returns :gzip' do
1212
+ assert_equal :gzip, @p.compress
1213
+ end
1214
+
1215
+ test 'create decompressable chunk' do
1216
+ chunk = @p.generate_chunk(create_metadata)
1217
+ assert chunk.singleton_class.ancestors.include?(Fluent::Plugin::Buffer::Chunk::Decompressable)
1218
+ end
1219
+ end
1220
+ end