fluentd-hubspot 0.14.14.1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (396) hide show
  1. data/.github/ISSUE_TEMPLATE.md +6 -0
  2. data/.gitignore +28 -0
  3. data/.travis.yml +51 -0
  4. data/AUTHORS +2 -0
  5. data/CONTRIBUTING.md +42 -0
  6. data/COPYING +14 -0
  7. data/ChangeLog +593 -0
  8. data/Gemfile +9 -0
  9. data/README.md +76 -0
  10. data/Rakefile +74 -0
  11. data/Vagrantfile +17 -0
  12. data/appveyor.yml +43 -0
  13. data/bin/fluent-binlog-reader +7 -0
  14. data/bin/fluent-debug +5 -0
  15. data/bin/fluent-plugin-config-format +5 -0
  16. data/bin/fluent-plugin-generate +5 -0
  17. data/code-of-conduct.md +3 -0
  18. data/example/copy_roundrobin.conf +39 -0
  19. data/example/filter_stdout.conf +22 -0
  20. data/example/in_dummy_blocks.conf +17 -0
  21. data/example/in_dummy_with_compression.conf +23 -0
  22. data/example/in_forward.conf +14 -0
  23. data/example/in_forward_client.conf +37 -0
  24. data/example/in_forward_shared_key.conf +15 -0
  25. data/example/in_forward_tls.conf +14 -0
  26. data/example/in_forward_users.conf +24 -0
  27. data/example/in_forward_workers.conf +21 -0
  28. data/example/in_http.conf +14 -0
  29. data/example/in_out_forward.conf +17 -0
  30. data/example/in_syslog.conf +15 -0
  31. data/example/in_tail.conf +14 -0
  32. data/example/in_tcp.conf +13 -0
  33. data/example/in_udp.conf +13 -0
  34. data/example/logevents.conf +25 -0
  35. data/example/multi_filters.conf +61 -0
  36. data/example/out_copy.conf +20 -0
  37. data/example/out_exec_filter.conf +42 -0
  38. data/example/out_file.conf +13 -0
  39. data/example/out_forward.conf +35 -0
  40. data/example/out_forward_buf_file.conf +23 -0
  41. data/example/out_forward_client.conf +109 -0
  42. data/example/out_forward_heartbeat_none.conf +16 -0
  43. data/example/out_forward_shared_key.conf +36 -0
  44. data/example/out_forward_tls.conf +18 -0
  45. data/example/out_forward_users.conf +65 -0
  46. data/example/out_null.conf +36 -0
  47. data/example/secondary_file.conf +41 -0
  48. data/example/suppress_config_dump.conf +7 -0
  49. data/example/v0_12_filter.conf +78 -0
  50. data/example/v1_literal_example.conf +36 -0
  51. data/fluent.conf +139 -0
  52. data/fluentd.gemspec +51 -0
  53. data/lib/fluent/agent.rb +163 -0
  54. data/lib/fluent/clock.rb +62 -0
  55. data/lib/fluent/command/binlog_reader.rb +234 -0
  56. data/lib/fluent/command/bundler_injection.rb +45 -0
  57. data/lib/fluent/command/cat.rb +330 -0
  58. data/lib/fluent/command/debug.rb +102 -0
  59. data/lib/fluent/command/fluentd.rb +301 -0
  60. data/lib/fluent/command/plugin_config_formatter.rb +258 -0
  61. data/lib/fluent/command/plugin_generator.rb +301 -0
  62. data/lib/fluent/compat/call_super_mixin.rb +67 -0
  63. data/lib/fluent/compat/detach_process_mixin.rb +25 -0
  64. data/lib/fluent/compat/exec_util.rb +129 -0
  65. data/lib/fluent/compat/file_util.rb +54 -0
  66. data/lib/fluent/compat/filter.rb +68 -0
  67. data/lib/fluent/compat/formatter.rb +111 -0
  68. data/lib/fluent/compat/formatter_utils.rb +85 -0
  69. data/lib/fluent/compat/handle_tag_and_time_mixin.rb +62 -0
  70. data/lib/fluent/compat/handle_tag_name_mixin.rb +53 -0
  71. data/lib/fluent/compat/input.rb +59 -0
  72. data/lib/fluent/compat/output.rb +728 -0
  73. data/lib/fluent/compat/output_chain.rb +60 -0
  74. data/lib/fluent/compat/parser.rb +310 -0
  75. data/lib/fluent/compat/parser_utils.rb +40 -0
  76. data/lib/fluent/compat/propagate_default.rb +62 -0
  77. data/lib/fluent/compat/record_filter_mixin.rb +34 -0
  78. data/lib/fluent/compat/set_tag_key_mixin.rb +50 -0
  79. data/lib/fluent/compat/set_time_key_mixin.rb +69 -0
  80. data/lib/fluent/compat/socket_util.rb +165 -0
  81. data/lib/fluent/compat/string_util.rb +34 -0
  82. data/lib/fluent/compat/structured_format_mixin.rb +26 -0
  83. data/lib/fluent/compat/type_converter.rb +90 -0
  84. data/lib/fluent/config.rb +56 -0
  85. data/lib/fluent/config/basic_parser.rb +123 -0
  86. data/lib/fluent/config/configure_proxy.rb +418 -0
  87. data/lib/fluent/config/dsl.rb +149 -0
  88. data/lib/fluent/config/element.rb +218 -0
  89. data/lib/fluent/config/error.rb +26 -0
  90. data/lib/fluent/config/literal_parser.rb +251 -0
  91. data/lib/fluent/config/parser.rb +107 -0
  92. data/lib/fluent/config/section.rb +223 -0
  93. data/lib/fluent/config/types.rb +136 -0
  94. data/lib/fluent/config/v1_parser.rb +190 -0
  95. data/lib/fluent/configurable.rb +200 -0
  96. data/lib/fluent/daemon.rb +15 -0
  97. data/lib/fluent/engine.rb +266 -0
  98. data/lib/fluent/env.rb +28 -0
  99. data/lib/fluent/error.rb +30 -0
  100. data/lib/fluent/event.rb +334 -0
  101. data/lib/fluent/event_router.rb +269 -0
  102. data/lib/fluent/filter.rb +21 -0
  103. data/lib/fluent/formatter.rb +23 -0
  104. data/lib/fluent/input.rb +21 -0
  105. data/lib/fluent/label.rb +46 -0
  106. data/lib/fluent/load.rb +35 -0
  107. data/lib/fluent/log.rb +546 -0
  108. data/lib/fluent/match.rb +178 -0
  109. data/lib/fluent/mixin.rb +31 -0
  110. data/lib/fluent/msgpack_factory.rb +62 -0
  111. data/lib/fluent/output.rb +29 -0
  112. data/lib/fluent/output_chain.rb +23 -0
  113. data/lib/fluent/parser.rb +23 -0
  114. data/lib/fluent/plugin.rb +183 -0
  115. data/lib/fluent/plugin/bare_output.rb +63 -0
  116. data/lib/fluent/plugin/base.rb +165 -0
  117. data/lib/fluent/plugin/buf_file.rb +184 -0
  118. data/lib/fluent/plugin/buf_memory.rb +34 -0
  119. data/lib/fluent/plugin/buffer.rb +617 -0
  120. data/lib/fluent/plugin/buffer/chunk.rb +221 -0
  121. data/lib/fluent/plugin/buffer/file_chunk.rb +364 -0
  122. data/lib/fluent/plugin/buffer/memory_chunk.rb +90 -0
  123. data/lib/fluent/plugin/compressable.rb +92 -0
  124. data/lib/fluent/plugin/exec_util.rb +22 -0
  125. data/lib/fluent/plugin/file_util.rb +22 -0
  126. data/lib/fluent/plugin/file_wrapper.rb +120 -0
  127. data/lib/fluent/plugin/filter.rb +93 -0
  128. data/lib/fluent/plugin/filter_grep.rb +75 -0
  129. data/lib/fluent/plugin/filter_parser.rb +119 -0
  130. data/lib/fluent/plugin/filter_record_transformer.rb +322 -0
  131. data/lib/fluent/plugin/filter_stdout.rb +53 -0
  132. data/lib/fluent/plugin/formatter.rb +50 -0
  133. data/lib/fluent/plugin/formatter_csv.rb +52 -0
  134. data/lib/fluent/plugin/formatter_hash.rb +33 -0
  135. data/lib/fluent/plugin/formatter_json.rb +55 -0
  136. data/lib/fluent/plugin/formatter_ltsv.rb +42 -0
  137. data/lib/fluent/plugin/formatter_msgpack.rb +33 -0
  138. data/lib/fluent/plugin/formatter_out_file.rb +51 -0
  139. data/lib/fluent/plugin/formatter_single_value.rb +34 -0
  140. data/lib/fluent/plugin/formatter_stdout.rb +75 -0
  141. data/lib/fluent/plugin/formatter_tsv.rb +34 -0
  142. data/lib/fluent/plugin/in_debug_agent.rb +64 -0
  143. data/lib/fluent/plugin/in_dummy.rb +139 -0
  144. data/lib/fluent/plugin/in_exec.rb +108 -0
  145. data/lib/fluent/plugin/in_forward.rb +455 -0
  146. data/lib/fluent/plugin/in_gc_stat.rb +56 -0
  147. data/lib/fluent/plugin/in_http.rb +433 -0
  148. data/lib/fluent/plugin/in_monitor_agent.rb +448 -0
  149. data/lib/fluent/plugin/in_object_space.rb +93 -0
  150. data/lib/fluent/plugin/in_syslog.rb +209 -0
  151. data/lib/fluent/plugin/in_tail.rb +905 -0
  152. data/lib/fluent/plugin/in_tcp.rb +85 -0
  153. data/lib/fluent/plugin/in_udp.rb +81 -0
  154. data/lib/fluent/plugin/in_unix.rb +201 -0
  155. data/lib/fluent/plugin/input.rb +37 -0
  156. data/lib/fluent/plugin/multi_output.rb +157 -0
  157. data/lib/fluent/plugin/out_copy.rb +46 -0
  158. data/lib/fluent/plugin/out_exec.rb +105 -0
  159. data/lib/fluent/plugin/out_exec_filter.rb +317 -0
  160. data/lib/fluent/plugin/out_file.rb +302 -0
  161. data/lib/fluent/plugin/out_forward.rb +912 -0
  162. data/lib/fluent/plugin/out_null.rb +74 -0
  163. data/lib/fluent/plugin/out_relabel.rb +32 -0
  164. data/lib/fluent/plugin/out_roundrobin.rb +84 -0
  165. data/lib/fluent/plugin/out_secondary_file.rb +133 -0
  166. data/lib/fluent/plugin/out_stdout.rb +75 -0
  167. data/lib/fluent/plugin/out_stream.rb +130 -0
  168. data/lib/fluent/plugin/output.rb +1291 -0
  169. data/lib/fluent/plugin/owned_by_mixin.rb +42 -0
  170. data/lib/fluent/plugin/parser.rb +191 -0
  171. data/lib/fluent/plugin/parser_apache.rb +28 -0
  172. data/lib/fluent/plugin/parser_apache2.rb +84 -0
  173. data/lib/fluent/plugin/parser_apache_error.rb +26 -0
  174. data/lib/fluent/plugin/parser_csv.rb +39 -0
  175. data/lib/fluent/plugin/parser_json.rb +81 -0
  176. data/lib/fluent/plugin/parser_ltsv.rb +42 -0
  177. data/lib/fluent/plugin/parser_msgpack.rb +50 -0
  178. data/lib/fluent/plugin/parser_multiline.rb +105 -0
  179. data/lib/fluent/plugin/parser_nginx.rb +28 -0
  180. data/lib/fluent/plugin/parser_none.rb +36 -0
  181. data/lib/fluent/plugin/parser_regexp.rb +63 -0
  182. data/lib/fluent/plugin/parser_syslog.rb +121 -0
  183. data/lib/fluent/plugin/parser_tsv.rb +42 -0
  184. data/lib/fluent/plugin/socket_util.rb +22 -0
  185. data/lib/fluent/plugin/storage.rb +84 -0
  186. data/lib/fluent/plugin/storage_local.rb +159 -0
  187. data/lib/fluent/plugin/string_util.rb +22 -0
  188. data/lib/fluent/plugin_helper.rb +70 -0
  189. data/lib/fluent/plugin_helper/cert_option.rb +159 -0
  190. data/lib/fluent/plugin_helper/child_process.rb +364 -0
  191. data/lib/fluent/plugin_helper/compat_parameters.rb +331 -0
  192. data/lib/fluent/plugin_helper/event_emitter.rb +93 -0
  193. data/lib/fluent/plugin_helper/event_loop.rb +161 -0
  194. data/lib/fluent/plugin_helper/extract.rb +104 -0
  195. data/lib/fluent/plugin_helper/formatter.rb +147 -0
  196. data/lib/fluent/plugin_helper/inject.rb +151 -0
  197. data/lib/fluent/plugin_helper/parser.rb +147 -0
  198. data/lib/fluent/plugin_helper/retry_state.rb +201 -0
  199. data/lib/fluent/plugin_helper/server.rb +738 -0
  200. data/lib/fluent/plugin_helper/socket.rb +241 -0
  201. data/lib/fluent/plugin_helper/socket_option.rb +69 -0
  202. data/lib/fluent/plugin_helper/storage.rb +349 -0
  203. data/lib/fluent/plugin_helper/thread.rb +179 -0
  204. data/lib/fluent/plugin_helper/timer.rb +91 -0
  205. data/lib/fluent/plugin_id.rb +80 -0
  206. data/lib/fluent/process.rb +22 -0
  207. data/lib/fluent/registry.rb +116 -0
  208. data/lib/fluent/root_agent.rb +323 -0
  209. data/lib/fluent/rpc.rb +94 -0
  210. data/lib/fluent/supervisor.rb +741 -0
  211. data/lib/fluent/system_config.rb +159 -0
  212. data/lib/fluent/test.rb +58 -0
  213. data/lib/fluent/test/base.rb +78 -0
  214. data/lib/fluent/test/driver/base.rb +224 -0
  215. data/lib/fluent/test/driver/base_owned.rb +70 -0
  216. data/lib/fluent/test/driver/base_owner.rb +135 -0
  217. data/lib/fluent/test/driver/event_feeder.rb +98 -0
  218. data/lib/fluent/test/driver/filter.rb +57 -0
  219. data/lib/fluent/test/driver/formatter.rb +30 -0
  220. data/lib/fluent/test/driver/input.rb +31 -0
  221. data/lib/fluent/test/driver/multi_output.rb +53 -0
  222. data/lib/fluent/test/driver/output.rb +102 -0
  223. data/lib/fluent/test/driver/parser.rb +30 -0
  224. data/lib/fluent/test/driver/test_event_router.rb +45 -0
  225. data/lib/fluent/test/filter_test.rb +77 -0
  226. data/lib/fluent/test/formatter_test.rb +65 -0
  227. data/lib/fluent/test/helpers.rb +134 -0
  228. data/lib/fluent/test/input_test.rb +174 -0
  229. data/lib/fluent/test/log.rb +79 -0
  230. data/lib/fluent/test/output_test.rb +156 -0
  231. data/lib/fluent/test/parser_test.rb +70 -0
  232. data/lib/fluent/test/startup_shutdown.rb +46 -0
  233. data/lib/fluent/time.rb +412 -0
  234. data/lib/fluent/timezone.rb +133 -0
  235. data/lib/fluent/unique_id.rb +39 -0
  236. data/lib/fluent/version.rb +21 -0
  237. data/lib/fluent/winsvc.rb +71 -0
  238. data/templates/new_gem/Gemfile +3 -0
  239. data/templates/new_gem/README.md.erb +43 -0
  240. data/templates/new_gem/Rakefile +13 -0
  241. data/templates/new_gem/fluent-plugin.gemspec.erb +27 -0
  242. data/templates/new_gem/lib/fluent/plugin/filter.rb.erb +14 -0
  243. data/templates/new_gem/lib/fluent/plugin/formatter.rb.erb +14 -0
  244. data/templates/new_gem/lib/fluent/plugin/input.rb.erb +11 -0
  245. data/templates/new_gem/lib/fluent/plugin/output.rb.erb +11 -0
  246. data/templates/new_gem/lib/fluent/plugin/parser.rb.erb +15 -0
  247. data/templates/new_gem/test/helper.rb.erb +8 -0
  248. data/templates/new_gem/test/plugin/test_filter.rb.erb +18 -0
  249. data/templates/new_gem/test/plugin/test_formatter.rb.erb +18 -0
  250. data/templates/new_gem/test/plugin/test_input.rb.erb +18 -0
  251. data/templates/new_gem/test/plugin/test_output.rb.erb +18 -0
  252. data/templates/new_gem/test/plugin/test_parser.rb.erb +18 -0
  253. data/templates/plugin_config_formatter/param.md-compact.erb +25 -0
  254. data/templates/plugin_config_formatter/param.md.erb +34 -0
  255. data/templates/plugin_config_formatter/section.md.erb +12 -0
  256. data/test/command/test_binlog_reader.rb +346 -0
  257. data/test/command/test_fluentd.rb +618 -0
  258. data/test/command/test_plugin_config_formatter.rb +275 -0
  259. data/test/command/test_plugin_generator.rb +66 -0
  260. data/test/compat/test_calls_super.rb +166 -0
  261. data/test/compat/test_parser.rb +92 -0
  262. data/test/config/assertions.rb +42 -0
  263. data/test/config/test_config_parser.rb +513 -0
  264. data/test/config/test_configurable.rb +1587 -0
  265. data/test/config/test_configure_proxy.rb +566 -0
  266. data/test/config/test_dsl.rb +415 -0
  267. data/test/config/test_element.rb +403 -0
  268. data/test/config/test_literal_parser.rb +297 -0
  269. data/test/config/test_section.rb +184 -0
  270. data/test/config/test_system_config.rb +168 -0
  271. data/test/config/test_types.rb +191 -0
  272. data/test/helper.rb +153 -0
  273. data/test/plugin/data/2010/01/20100102-030405.log +0 -0
  274. data/test/plugin/data/2010/01/20100102-030406.log +0 -0
  275. data/test/plugin/data/2010/01/20100102.log +0 -0
  276. data/test/plugin/data/log/bar +0 -0
  277. data/test/plugin/data/log/foo/bar.log +0 -0
  278. data/test/plugin/data/log/foo/bar2 +0 -0
  279. data/test/plugin/data/log/test.log +0 -0
  280. data/test/plugin/test_bare_output.rb +118 -0
  281. data/test/plugin/test_base.rb +115 -0
  282. data/test/plugin/test_buf_file.rb +843 -0
  283. data/test/plugin/test_buf_memory.rb +42 -0
  284. data/test/plugin/test_buffer.rb +1220 -0
  285. data/test/plugin/test_buffer_chunk.rb +198 -0
  286. data/test/plugin/test_buffer_file_chunk.rb +844 -0
  287. data/test/plugin/test_buffer_memory_chunk.rb +338 -0
  288. data/test/plugin/test_compressable.rb +84 -0
  289. data/test/plugin/test_file_util.rb +96 -0
  290. data/test/plugin/test_filter.rb +357 -0
  291. data/test/plugin/test_filter_grep.rb +119 -0
  292. data/test/plugin/test_filter_parser.rb +700 -0
  293. data/test/plugin/test_filter_record_transformer.rb +556 -0
  294. data/test/plugin/test_filter_stdout.rb +202 -0
  295. data/test/plugin/test_formatter_csv.rb +111 -0
  296. data/test/plugin/test_formatter_hash.rb +35 -0
  297. data/test/plugin/test_formatter_json.rb +51 -0
  298. data/test/plugin/test_formatter_ltsv.rb +59 -0
  299. data/test/plugin/test_formatter_msgpack.rb +28 -0
  300. data/test/plugin/test_formatter_out_file.rb +95 -0
  301. data/test/plugin/test_formatter_single_value.rb +38 -0
  302. data/test/plugin/test_in_debug_agent.rb +28 -0
  303. data/test/plugin/test_in_dummy.rb +192 -0
  304. data/test/plugin/test_in_exec.rb +245 -0
  305. data/test/plugin/test_in_forward.rb +1120 -0
  306. data/test/plugin/test_in_gc_stat.rb +39 -0
  307. data/test/plugin/test_in_http.rb +588 -0
  308. data/test/plugin/test_in_monitor_agent.rb +516 -0
  309. data/test/plugin/test_in_object_space.rb +64 -0
  310. data/test/plugin/test_in_syslog.rb +271 -0
  311. data/test/plugin/test_in_tail.rb +1216 -0
  312. data/test/plugin/test_in_tcp.rb +118 -0
  313. data/test/plugin/test_in_udp.rb +152 -0
  314. data/test/plugin/test_in_unix.rb +126 -0
  315. data/test/plugin/test_input.rb +126 -0
  316. data/test/plugin/test_multi_output.rb +180 -0
  317. data/test/plugin/test_out_copy.rb +160 -0
  318. data/test/plugin/test_out_exec.rb +310 -0
  319. data/test/plugin/test_out_exec_filter.rb +613 -0
  320. data/test/plugin/test_out_file.rb +873 -0
  321. data/test/plugin/test_out_forward.rb +685 -0
  322. data/test/plugin/test_out_null.rb +105 -0
  323. data/test/plugin/test_out_relabel.rb +28 -0
  324. data/test/plugin/test_out_roundrobin.rb +146 -0
  325. data/test/plugin/test_out_secondary_file.rb +442 -0
  326. data/test/plugin/test_out_stdout.rb +170 -0
  327. data/test/plugin/test_out_stream.rb +93 -0
  328. data/test/plugin/test_output.rb +870 -0
  329. data/test/plugin/test_output_as_buffered.rb +1932 -0
  330. data/test/plugin/test_output_as_buffered_compress.rb +165 -0
  331. data/test/plugin/test_output_as_buffered_overflow.rb +250 -0
  332. data/test/plugin/test_output_as_buffered_retries.rb +839 -0
  333. data/test/plugin/test_output_as_buffered_secondary.rb +877 -0
  334. data/test/plugin/test_output_as_standard.rb +374 -0
  335. data/test/plugin/test_owned_by.rb +35 -0
  336. data/test/plugin/test_parser.rb +359 -0
  337. data/test/plugin/test_parser_apache.rb +42 -0
  338. data/test/plugin/test_parser_apache2.rb +46 -0
  339. data/test/plugin/test_parser_apache_error.rb +45 -0
  340. data/test/plugin/test_parser_csv.rb +103 -0
  341. data/test/plugin/test_parser_json.rb +114 -0
  342. data/test/plugin/test_parser_labeled_tsv.rb +128 -0
  343. data/test/plugin/test_parser_multiline.rb +100 -0
  344. data/test/plugin/test_parser_nginx.rb +48 -0
  345. data/test/plugin/test_parser_none.rb +52 -0
  346. data/test/plugin/test_parser_regexp.rb +281 -0
  347. data/test/plugin/test_parser_syslog.rb +242 -0
  348. data/test/plugin/test_parser_tsv.rb +122 -0
  349. data/test/plugin/test_storage.rb +167 -0
  350. data/test/plugin/test_storage_local.rb +335 -0
  351. data/test/plugin/test_string_util.rb +26 -0
  352. data/test/plugin_helper/test_child_process.rb +794 -0
  353. data/test/plugin_helper/test_compat_parameters.rb +331 -0
  354. data/test/plugin_helper/test_event_emitter.rb +51 -0
  355. data/test/plugin_helper/test_event_loop.rb +52 -0
  356. data/test/plugin_helper/test_extract.rb +194 -0
  357. data/test/plugin_helper/test_formatter.rb +255 -0
  358. data/test/plugin_helper/test_inject.rb +519 -0
  359. data/test/plugin_helper/test_parser.rb +264 -0
  360. data/test/plugin_helper/test_retry_state.rb +422 -0
  361. data/test/plugin_helper/test_server.rb +1677 -0
  362. data/test/plugin_helper/test_storage.rb +542 -0
  363. data/test/plugin_helper/test_thread.rb +164 -0
  364. data/test/plugin_helper/test_timer.rb +132 -0
  365. data/test/scripts/exec_script.rb +32 -0
  366. data/test/scripts/fluent/plugin/formatter1/formatter_test1.rb +7 -0
  367. data/test/scripts/fluent/plugin/formatter2/formatter_test2.rb +7 -0
  368. data/test/scripts/fluent/plugin/formatter_known.rb +8 -0
  369. data/test/scripts/fluent/plugin/out_test.rb +81 -0
  370. data/test/scripts/fluent/plugin/out_test2.rb +80 -0
  371. data/test/scripts/fluent/plugin/parser_known.rb +4 -0
  372. data/test/test_clock.rb +164 -0
  373. data/test/test_config.rb +179 -0
  374. data/test/test_configdsl.rb +148 -0
  375. data/test/test_event.rb +515 -0
  376. data/test/test_event_router.rb +331 -0
  377. data/test/test_event_time.rb +186 -0
  378. data/test/test_filter.rb +121 -0
  379. data/test/test_formatter.rb +312 -0
  380. data/test/test_input.rb +31 -0
  381. data/test/test_log.rb +828 -0
  382. data/test/test_match.rb +137 -0
  383. data/test/test_mixin.rb +351 -0
  384. data/test/test_output.rb +273 -0
  385. data/test/test_plugin.rb +251 -0
  386. data/test/test_plugin_classes.rb +253 -0
  387. data/test/test_plugin_helper.rb +81 -0
  388. data/test/test_plugin_id.rb +101 -0
  389. data/test/test_process.rb +14 -0
  390. data/test/test_root_agent.rb +611 -0
  391. data/test/test_supervisor.rb +373 -0
  392. data/test/test_test_drivers.rb +135 -0
  393. data/test/test_time_formatter.rb +282 -0
  394. data/test/test_time_parser.rb +211 -0
  395. data/test/test_unique_id.rb +47 -0
  396. metadata +898 -0
@@ -0,0 +1,1932 @@
1
+ require_relative '../helper'
2
+ require 'fluent/plugin/output'
3
+ require 'fluent/plugin/buffer'
4
+ require 'fluent/output'
5
+ require 'fluent/event'
6
+
7
+ require 'json'
8
+ require 'time'
9
+ require 'timeout'
10
+ require 'timecop'
11
+
12
+ module FluentPluginOutputAsBufferedTest
13
+ class DummyBareOutput < Fluent::Plugin::Output
14
+ def register(name, &block)
15
+ instance_variable_set("@#{name}", block)
16
+ end
17
+ end
18
+ class DummySyncOutput < DummyBareOutput
19
+ def initialize
20
+ super
21
+ @process = nil
22
+ end
23
+ def process(tag, es)
24
+ @process ? @process.call(tag, es) : nil
25
+ end
26
+ end
27
+ class DummyAsyncOutput < DummyBareOutput
28
+ def initialize
29
+ super
30
+ @format = nil
31
+ @write = nil
32
+ end
33
+ def format(tag, time, record)
34
+ @format ? @format.call(tag, time, record) : [tag, time, record].to_json
35
+ end
36
+ def write(chunk)
37
+ @write ? @write.call(chunk) : nil
38
+ end
39
+ end
40
+ class DummyDelayedOutput < DummyBareOutput
41
+ def initialize
42
+ super
43
+ @format = nil
44
+ @try_write = nil
45
+ @shutdown_hook = nil
46
+ end
47
+ def format(tag, time, record)
48
+ @format ? @format.call(tag, time, record) : [tag, time, record].to_json
49
+ end
50
+ def try_write(chunk)
51
+ @try_write ? @try_write.call(chunk) : nil
52
+ end
53
+ def shutdown
54
+ if @shutdown_hook
55
+ @shutdown_hook.call
56
+ end
57
+ super
58
+ end
59
+ end
60
+ class DummyStandardBufferedOutput < DummyBareOutput
61
+ def initialize
62
+ super
63
+ @prefer_delayed_commit = nil
64
+ @write = nil
65
+ @try_write = nil
66
+ end
67
+ def prefer_delayed_commit
68
+ @prefer_delayed_commit ? @prefer_delayed_commit.call : false
69
+ end
70
+ def write(chunk)
71
+ @write ? @write.call(chunk) : nil
72
+ end
73
+ def try_write(chunk)
74
+ @try_write ? @try_write.call(chunk) : nil
75
+ end
76
+ end
77
+ class DummyCustomFormatBufferedOutput < DummyBareOutput
78
+ def initialize
79
+ super
80
+ @format_type_is_msgpack = nil
81
+ @prefer_delayed_commit = nil
82
+ @write = nil
83
+ @try_write = nil
84
+ end
85
+ def format(tag, time, record)
86
+ @format ? @format.call(tag, time, record) : [tag, time, record].to_json
87
+ end
88
+ def formatted_to_msgpack_binary
89
+ @format_type_is_msgpack ? @format_type_is_msgpack.call : false
90
+ end
91
+ def prefer_delayed_commit
92
+ @prefer_delayed_commit ? @prefer_delayed_commit.call : false
93
+ end
94
+ def write(chunk)
95
+ @write ? @write.call(chunk) : nil
96
+ end
97
+ def try_write(chunk)
98
+ @try_write ? @try_write.call(chunk) : nil
99
+ end
100
+ end
101
+ class DummyFullFeatureOutput < DummyBareOutput
102
+ def initialize
103
+ super
104
+ @prefer_buffered_processing = nil
105
+ @prefer_delayed_commit = nil
106
+ @process = nil
107
+ @format = nil
108
+ @write = nil
109
+ @try_write = nil
110
+ end
111
+ def prefer_buffered_processing
112
+ @prefer_buffered_processing ? @prefer_buffered_processing.call : false
113
+ end
114
+ def prefer_delayed_commit
115
+ @prefer_delayed_commit ? @prefer_delayed_commit.call : false
116
+ end
117
+ def process(tag, es)
118
+ @process ? @process.call(tag, es) : nil
119
+ end
120
+ def format(tag, time, record)
121
+ @format ? @format.call(tag, time, record) : [tag, time, record].to_json
122
+ end
123
+ def write(chunk)
124
+ @write ? @write.call(chunk) : nil
125
+ end
126
+ def try_write(chunk)
127
+ @try_write ? @try_write.call(chunk) : nil
128
+ end
129
+ end
130
+ module OldPluginMethodMixin
131
+ def initialize
132
+ super
133
+ @format = nil
134
+ @write = nil
135
+ end
136
+ def register(name, &block)
137
+ instance_variable_set("@#{name}", block)
138
+ end
139
+ def format(tag, time, record)
140
+ @format ? @format.call(tag, time, record) : [tag, time, record].to_json
141
+ end
142
+ def write(chunk)
143
+ @write ? @write.call(chunk) : nil
144
+ end
145
+ end
146
+ class DummyOldBufferedOutput < Fluent::BufferedOutput
147
+ include OldPluginMethodMixin
148
+ end
149
+ class DummyOldObjectBufferedOutput < Fluent::ObjectBufferedOutput
150
+ include OldPluginMethodMixin
151
+ end
152
+ end
153
+
154
+ class BufferedOutputTest < Test::Unit::TestCase
155
+ def create_output(type=:full)
156
+ case type
157
+ when :bare then FluentPluginOutputAsBufferedTest::DummyBareOutput.new
158
+ when :sync then FluentPluginOutputAsBufferedTest::DummySyncOutput.new
159
+ when :buffered then FluentPluginOutputAsBufferedTest::DummyAsyncOutput.new
160
+ when :delayed then FluentPluginOutputAsBufferedTest::DummyDelayedOutput.new
161
+ when :standard then FluentPluginOutputAsBufferedTest::DummyStandardBufferedOutput.new
162
+ when :custom then FluentPluginOutputAsBufferedTest::DummyCustomFormatBufferedOutput.new
163
+ when :full then FluentPluginOutputAsBufferedTest::DummyFullFeatureOutput.new
164
+ when :old_buf then FluentPluginOutputAsBufferedTest::DummyOldBufferedOutput.new
165
+ when :old_obj then FluentPluginOutputAsBufferedTest::DummyOldObjectBufferedOutput.new
166
+ else
167
+ raise ArgumentError, "unknown type: #{type}"
168
+ end
169
+ end
170
+ def create_metadata(timekey: nil, tag: nil, variables: nil)
171
+ Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
172
+ end
173
+ def waiting(seconds)
174
+ begin
175
+ Timeout.timeout(seconds) do
176
+ yield
177
+ end
178
+ rescue Timeout::Error
179
+ STDERR.print(*@i.log.out.logs)
180
+ raise
181
+ end
182
+ end
183
+
184
+ teardown do
185
+ if @i
186
+ @i.stop unless @i.stopped?
187
+ @i.before_shutdown unless @i.before_shutdown?
188
+ @i.shutdown unless @i.shutdown?
189
+ @i.after_shutdown unless @i.after_shutdown?
190
+ @i.close unless @i.closed?
191
+ @i.terminate unless @i.terminated?
192
+ end
193
+ Timecop.return
194
+ end
195
+
196
+ sub_test_case 'chunk feature in #write for output plugins' do
197
+ setup do
198
+ @stored_global_logger = $log
199
+ $log = Fluent::Test::TestLogger.new
200
+ @hash = {
201
+ 'flush_mode' => 'immediate',
202
+ 'flush_thread_interval' => '0.01',
203
+ 'flush_thread_burst_interval' => '0.01',
204
+ }
205
+ end
206
+
207
+ teardown do
208
+ $log = @stored_global_logger
209
+ end
210
+
211
+ test 'plugin using standard format can iterate chunk for time, record in #write' do
212
+ events_from_chunk = []
213
+ @i = create_output(:standard)
214
+ @i.configure(config_element('ROOT','',{},[config_element('buffer','',@hash)]))
215
+ @i.register(:prefer_delayed_commit){ false }
216
+ @i.register(:write){ |chunk| e = []; assert chunk.respond_to?(:each); chunk.each{|t,r| e << [t,r]}; events_from_chunk << [:write, e] }
217
+ @i.register(:try_write){ |chunk| e = []; assert chunk.respond_to?(:each); chunk.each{|t,r| e << [t,r]}; events_from_chunk << [:try_write, e] }
218
+ @i.start
219
+ @i.after_start
220
+
221
+ events = [
222
+ [event_time('2016-10-05 16:16:16 -0700'), {"message" => "yaaaaaaaaay!"}],
223
+ [event_time('2016-10-05 16:16:17 -0700'), {"message" => "yoooooooooy!"}],
224
+ ]
225
+
226
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
227
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
228
+
229
+ waiting(5){ sleep 0.1 until events_from_chunk.size == 2 }
230
+
231
+ assert_equal 2, events_from_chunk.size
232
+ 2.times.each do |i|
233
+ assert_equal :write, events_from_chunk[i][0]
234
+ assert_equal events, events_from_chunk[i][1]
235
+ end
236
+ end
237
+
238
+ test 'plugin using standard format can iterate chunk for time, record in #try_write' do
239
+ events_from_chunk = []
240
+ @i = create_output(:standard)
241
+ @i.configure(config_element('ROOT','',{},[config_element('buffer','',@hash)]))
242
+ @i.register(:prefer_delayed_commit){ true }
243
+ @i.register(:write){ |chunk| e = []; assert chunk.respond_to?(:each); chunk.each{|t,r| e << [t,r]}; events_from_chunk << [:write, e] }
244
+ @i.register(:try_write){ |chunk| e = []; assert chunk.respond_to?(:each); chunk.each{|t,r| e << [t,r]}; events_from_chunk << [:try_write, e] }
245
+ @i.start
246
+ @i.after_start
247
+
248
+ events = [
249
+ [event_time('2016-10-05 16:16:16 -0700'), {"message" => "yaaaaaaaaay!"}],
250
+ [event_time('2016-10-05 16:16:17 -0700'), {"message" => "yoooooooooy!"}],
251
+ ]
252
+
253
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
254
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
255
+
256
+ waiting(5){ sleep 0.1 until events_from_chunk.size == 2 }
257
+
258
+ assert_equal 2, events_from_chunk.size
259
+ 2.times.each do |i|
260
+ assert_equal :try_write, events_from_chunk[i][0]
261
+ assert_equal events, events_from_chunk[i][1]
262
+ end
263
+ end
264
+
265
+ test 'plugin using custom format cannot iterate chunk in #write' do
266
+ events_from_chunk = []
267
+ @i = create_output(:custom)
268
+ @i.configure(config_element('ROOT','',{},[config_element('buffer','',@hash)]))
269
+ @i.register(:prefer_delayed_commit){ false }
270
+ @i.register(:format){ |tag, time, record| [tag,time,record].to_json }
271
+ @i.register(:format_type_is_msgpack){ false }
272
+ @i.register(:write){ |chunk| assert !(chunk.respond_to?(:each)) }
273
+ @i.register(:try_write){ |chunk| assert !(chunk.respond_to?(:each)) }
274
+ @i.start
275
+ @i.after_start
276
+
277
+ events = [
278
+ [event_time('2016-10-05 16:16:16 -0700'), {"message" => "yaaaaaaaaay!"}],
279
+ [event_time('2016-10-05 16:16:17 -0700'), {"message" => "yoooooooooy!"}],
280
+ ]
281
+
282
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
283
+
284
+ assert_equal 0, events_from_chunk.size
285
+ end
286
+
287
+ test 'plugin using custom format cannot iterate chunk in #try_write' do
288
+ events_from_chunk = []
289
+ @i = create_output(:custom)
290
+ @i.configure(config_element('ROOT','',{},[config_element('buffer','',@hash)]))
291
+ @i.register(:prefer_delayed_commit){ true }
292
+ @i.register(:format){ |tag, time, record| [tag,time,record].to_json }
293
+ @i.register(:format_type_is_msgpack){ false }
294
+ @i.register(:write){ |chunk| assert !(chunk.respond_to?(:each)) }
295
+ @i.register(:try_write){ |chunk| assert !(chunk.respond_to?(:each)) }
296
+ @i.start
297
+ @i.after_start
298
+
299
+ events = [
300
+ [event_time('2016-10-05 16:16:16 -0700'), {"message" => "yaaaaaaaaay!"}],
301
+ [event_time('2016-10-05 16:16:17 -0700'), {"message" => "yoooooooooy!"}],
302
+ ]
303
+
304
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
305
+
306
+ assert_equal 0, events_from_chunk.size
307
+ end
308
+
309
+ test 'plugin using custom format can iterate chunk in #write if #format returns msgpack' do
310
+ events_from_chunk = []
311
+ @i = create_output(:custom)
312
+ @i.configure(config_element('ROOT','',{},[config_element('buffer','',@hash)]))
313
+ @i.register(:prefer_delayed_commit){ false }
314
+ @i.register(:format){ |tag, time, record| [tag,time,record].to_msgpack }
315
+ @i.register(:format_type_is_msgpack){ true }
316
+ @i.register(:write){ |chunk| e = []; assert chunk.respond_to?(:each); chunk.each{|ta,t,r| e << [ta,t,r]}; events_from_chunk << [:write, e] }
317
+ @i.register(:try_write){ |chunk| e = []; assert chunk.respond_to?(:each); chunk.each{|ta,t,r| e << [ta,t,r]}; events_from_chunk << [:try_write, e] }
318
+ @i.start
319
+ @i.after_start
320
+
321
+ events = [
322
+ [event_time('2016-10-05 16:16:16 -0700'), {"message" => "yaaaaaaaaay!"}],
323
+ [event_time('2016-10-05 16:16:17 -0700'), {"message" => "yoooooooooy!"}],
324
+ ]
325
+
326
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
327
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
328
+
329
+ waiting(5){ sleep 0.1 until events_from_chunk.size == 2 }
330
+
331
+ assert_equal 2, events_from_chunk.size
332
+ 2.times.each do |i|
333
+ assert_equal :write, events_from_chunk[i][0]
334
+ each_pushed = events_from_chunk[i][1]
335
+ assert_equal 2, each_pushed.size
336
+ assert_equal 'test.tag', each_pushed[0][0]
337
+ assert_equal 'test.tag', each_pushed[1][0]
338
+ assert_equal events, each_pushed.map{|tag,time,record| [time,record]}
339
+ end
340
+ end
341
+
342
+ data(:handle_stream_simple => '',
343
+ :handle_stream_with_custom_format => 'tag,message')
344
+ test 'plugin using custom format can skip record chunk when format return nil' do |chunk_keys|
345
+ events_from_chunk = []
346
+ @i = create_output(:custom)
347
+ @i.configure(config_element('ROOT', '', {}, [config_element('buffer', chunk_keys, @hash)]))
348
+ @i.register(:prefer_delayed_commit) { false }
349
+ @i.register(:format) { |tag, time, record|
350
+ if record['message'] == 'test1'
351
+ nil
352
+ else
353
+ [tag,time,record].to_msgpack
354
+ end
355
+ }
356
+ @i.register(:format_type_is_msgpack) { true }
357
+ @i.register(:write){ |chunk| e = []; chunk.each { |ta, t, r| e << [ta, t, r] }; events_from_chunk << [:write, e] }
358
+ @i.start
359
+ @i.after_start
360
+
361
+ events = [
362
+ [event_time('2016-10-05 16:16:16 -0700'), {"message" => "test1"}],
363
+ [event_time('2016-10-05 16:16:17 -0700'), {"message" => "test2"}],
364
+ ]
365
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
366
+
367
+ waiting(5) { sleep 0.1 until events_from_chunk.size == 1 }
368
+
369
+ assert_equal 1, events_from_chunk.size
370
+ assert_equal :write, events_from_chunk[0][0]
371
+ each_pushed = events_from_chunk[0][1]
372
+ assert_equal 1, each_pushed.size
373
+ assert_equal 'test.tag', each_pushed[0][0]
374
+ assert_equal "test2", each_pushed[0][2]['message']
375
+ end
376
+
377
+ test 'plugin using custom format can iterate chunk in #try_write if #format returns msgpack' do
378
+ events_from_chunk = []
379
+ @i = create_output(:custom)
380
+ @i.configure(config_element('ROOT','',{},[config_element('buffer','',@hash)]))
381
+ @i.register(:prefer_delayed_commit){ true }
382
+ @i.register(:format){ |tag, time, record| [tag,time,record].to_msgpack }
383
+ @i.register(:format_type_is_msgpack){ true }
384
+ @i.register(:write){ |chunk| events_from_chunk = []; assert chunk.respond_to?(:each); chunk.each{|ta,t,r| e << [ta,t,r]}; events_from_chunk << [:write, e] }
385
+ @i.register(:try_write){ |chunk| e = []; assert chunk.respond_to?(:each); chunk.each{|ta,t,r| e << [ta,t,r]}; events_from_chunk << [:try_write, e] }
386
+ @i.start
387
+ @i.after_start
388
+
389
+ events = [
390
+ [event_time('2016-10-05 16:16:16 -0700'), {"message" => "yaaaaaaaaay!"}],
391
+ [event_time('2016-10-05 16:16:17 -0700'), {"message" => "yoooooooooy!"}],
392
+ ]
393
+
394
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
395
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
396
+
397
+ waiting(5){ sleep 0.1 until events_from_chunk.size == 2 }
398
+
399
+ assert_equal 2, events_from_chunk.size
400
+ 2.times.each do |i|
401
+ assert_equal :try_write, events_from_chunk[i][0]
402
+ each_pushed = events_from_chunk[i][1]
403
+ assert_equal 2, each_pushed.size
404
+ assert_equal 'test.tag', each_pushed[0][0]
405
+ assert_equal 'test.tag', each_pushed[1][0]
406
+ assert_equal events, each_pushed.map{|tag,time,record| [time,record]}
407
+ end
408
+ end
409
+
410
+ data(:BufferedOutput => :old_buf,
411
+ :ObjectBufferedOutput => :old_obj)
412
+ test 'old plugin types can iterate chunk by msgpack_each in #write' do |plugin_type|
413
+ events_from_chunk = []
414
+ # event_emitter helper requires Engine.root_agent for routing
415
+ ra = Fluent::RootAgent.new(log: $log)
416
+ stub(Fluent::Engine).root_agent { ra }
417
+ @i = create_output(plugin_type)
418
+ @i.configure(config_element('ROOT', '', {}, [config_element('buffer', '', @hash)]))
419
+ @i.register(:format) { |tag, time, record| [time, record].to_msgpack }
420
+ @i.register(:write) { |chunk| e = []; chunk.msgpack_each { |t, r| e << [t, r] }; events_from_chunk << [:write, e]; }
421
+ @i.start
422
+ @i.after_start
423
+
424
+ events = [
425
+ [event_time('2016-10-05 16:16:16 -0700'), {"message" => "yaaaaaaaaay!"}],
426
+ [event_time('2016-10-05 16:16:17 -0700'), {"message" => "yoooooooooy!"}],
427
+ ]
428
+
429
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
430
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
431
+
432
+ waiting(5) { sleep 0.1 until events_from_chunk.size == 2 }
433
+
434
+ assert_equal 2, events_from_chunk.size
435
+ 2.times.each do |i|
436
+ assert_equal :write, events_from_chunk[i][0]
437
+ assert_equal events, events_from_chunk[i][1]
438
+ end
439
+ end
440
+ end
441
+
442
+ sub_test_case 'buffered output configured with many chunk keys' do
443
+ setup do
444
+ @stored_global_logger = $log
445
+ $log = Fluent::Test::TestLogger.new
446
+ @hash = {
447
+ 'flush_mode' => 'interval',
448
+ 'flush_thread_burst_interval' => 0.01,
449
+ 'chunk_limit_size' => 1024,
450
+ 'timekey' => 60,
451
+ }
452
+ @i = create_output(:buffered)
453
+ end
454
+ teardown do
455
+ $log = @stored_global_logger
456
+ end
457
+ test 'nothing are warned with less chunk keys' do
458
+ chunk_keys = 'time,key1,key2,key3'
459
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_keys,@hash)]))
460
+ logs = @i.log.out.logs.dup
461
+ @i.start
462
+ @i.after_start
463
+ assert{ logs.select{|log| log.include?('[warn]') }.size == 0 }
464
+ end
465
+
466
+ test 'a warning reported with 4 chunk keys' do
467
+ chunk_keys = 'key1,key2,key3,key4'
468
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_keys,@hash)]))
469
+ logs = @i.log.out.logs.dup
470
+
471
+ @i.start # this calls `log.reset`... capturing logs about configure must be done before this line
472
+ @i.after_start
473
+ assert_equal ['key1', 'key2', 'key3', 'key4'], @i.chunk_keys
474
+
475
+ assert{ logs.select{|log| log.include?('[warn]: many chunk keys specified, and it may cause too many chunks on your system.') }.size == 1 }
476
+ end
477
+
478
+ test 'a warning reported with 4 chunk keys including "tag"' do
479
+ chunk_keys = 'tag,key1,key2,key3'
480
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_keys,@hash)]))
481
+ logs = @i.log.out.logs.dup
482
+ @i.start # this calls `log.reset`... capturing logs about configure must be done before this line
483
+ @i.after_start
484
+ assert{ logs.select{|log| log.include?('[warn]: many chunk keys specified, and it may cause too many chunks on your system.') }.size == 1 }
485
+ end
486
+
487
+ test 'time key is not included for warned chunk keys' do
488
+ chunk_keys = 'time,key1,key2,key3'
489
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_keys,@hash)]))
490
+ logs = @i.log.out.logs.dup
491
+ @i.start
492
+ @i.after_start
493
+ assert{ logs.select{|log| log.include?('[warn]') }.size == 0 }
494
+ end
495
+ end
496
+
497
+ sub_test_case 'buffered output feature without any buffer key, flush_mode: lazy' do
498
+ setup do
499
+ hash = {
500
+ 'flush_mode' => 'lazy',
501
+ 'flush_thread_burst_interval' => 0.01,
502
+ 'flush_thread_count' => 2,
503
+ 'chunk_limit_size' => 1024,
504
+ }
505
+ @i = create_output(:buffered)
506
+ @i.configure(config_element('ROOT','',{},[config_element('buffer','',hash)]))
507
+ @i.start
508
+ @i.after_start
509
+ end
510
+
511
+ test '#start does not create enqueue thread, but creates flush threads' do
512
+ @i.thread_wait_until_start
513
+
514
+ assert @i.thread_exist?(:flush_thread_0)
515
+ assert @i.thread_exist?(:flush_thread_1)
516
+ assert !@i.thread_exist?(:enqueue_thread)
517
+ end
518
+
519
+ test '#format is called for each events' do
520
+ ary = []
521
+ @i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
522
+
523
+ t = event_time()
524
+ es = Fluent::ArrayEventStream.new([ [t, {"key" => "value1"}], [t, {"key" => "value2"}] ])
525
+
526
+ 4.times do
527
+ @i.emit_events('tag.test', es)
528
+ end
529
+
530
+ assert_equal 8, ary.size
531
+ 4.times do |i|
532
+ assert_equal ["tag.test", t, {"key" => "value1"}], ary[i*2]
533
+ assert_equal ["tag.test", t, {"key" => "value2"}], ary[i*2+1]
534
+ end
535
+ end
536
+
537
+ test '#write is called only when chunk bytes limit exceeded, and buffer chunk is purged' do
538
+ ary = []
539
+ @i.register(:write){|chunk| ary << chunk.read }
540
+
541
+ tag = "test.tag"
542
+ t = event_time()
543
+ r = {}
544
+ (0...10).each do |i|
545
+ r["key#{i}"] = "value #{i}"
546
+ end
547
+ event_size = [tag, t, r].to_json.size # 195
548
+
549
+ (1024 * 0.9 / event_size).to_i.times do |i|
550
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new([ [t, r] ]))
551
+ end
552
+ assert{ @i.buffer.queue.size == 0 && ary.size == 0 }
553
+
554
+ staged_chunk = @i.buffer.stage[@i.buffer.stage.keys.first]
555
+ assert{ staged_chunk.size != 0 }
556
+
557
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new([ [t, r] ]))
558
+
559
+ assert{ @i.buffer.queue.size > 0 || @i.buffer.dequeued.size > 0 || ary.size > 0 }
560
+
561
+ waiting(10) do
562
+ Thread.pass until @i.buffer.queue.size == 0 && @i.buffer.dequeued.size == 0
563
+ Thread.pass until staged_chunk.size == 0
564
+ end
565
+
566
+ assert_equal 1, ary.size
567
+ assert_equal [tag,t,r].to_json * (1024 / event_size), ary.first
568
+ end
569
+
570
+ test 'flush_at_shutdown work well when plugin is shutdown' do
571
+ ary = []
572
+ @i.register(:write){|chunk| ary << chunk.read }
573
+
574
+ tag = "test.tag"
575
+ t = event_time()
576
+ r = {}
577
+ (0...10).each do |i|
578
+ r["key#{i}"] = "value #{i}"
579
+ end
580
+ event_size = [tag, t, r].to_json.size # 195
581
+
582
+ (1024 * 0.9 / event_size).to_i.times do |i|
583
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new([ [t, r] ]))
584
+ end
585
+ assert{ @i.buffer.queue.size == 0 && ary.size == 0 }
586
+
587
+ @i.stop
588
+ @i.before_shutdown
589
+ @i.shutdown
590
+ @i.after_shutdown
591
+
592
+ waiting(10) do
593
+ Thread.pass until ary.size == 1
594
+ end
595
+ assert_equal [tag,t,r].to_json * (1024 * 0.9 / event_size), ary.first
596
+ end
597
+ end
598
+
599
+ sub_test_case 'buffered output feature without any buffer key, flush_mode: interval' do
600
+ setup do
601
+ hash = {
602
+ 'flush_mode' => 'interval',
603
+ 'flush_interval' => 1,
604
+ 'flush_thread_count' => 1,
605
+ 'flush_thread_burst_interval' => 0.01,
606
+ 'chunk_limit_size' => 1024,
607
+ }
608
+ @i = create_output(:buffered)
609
+ @i.configure(config_element('ROOT','',{},[config_element('buffer','',hash)]))
610
+ @i.start
611
+ @i.after_start
612
+ end
613
+
614
+ test '#start creates enqueue thread and flush threads' do
615
+ @i.thread_wait_until_start
616
+
617
+ assert @i.thread_exist?(:flush_thread_0)
618
+ assert @i.thread_exist?(:enqueue_thread)
619
+ end
620
+
621
+ test '#format is called for each event streams' do
622
+ ary = []
623
+ @i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
624
+
625
+ t = event_time()
626
+ es = Fluent::ArrayEventStream.new([ [t, {"key" => "value1"}], [t, {"key" => "value2"}] ])
627
+
628
+ 4.times do
629
+ @i.emit_events('tag.test', es)
630
+ end
631
+
632
+ assert_equal 8, ary.size
633
+ 4.times do |i|
634
+ assert_equal ["tag.test", t, {"key" => "value1"}], ary[i*2]
635
+ assert_equal ["tag.test", t, {"key" => "value2"}], ary[i*2+1]
636
+ end
637
+ end
638
+
639
+ test '#write is called per flush_interval, and buffer chunk is purged' do
640
+ @i.thread_wait_until_start
641
+
642
+ ary = []
643
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
644
+ @i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| ary << data } }
645
+
646
+ t = event_time()
647
+ r = {}
648
+ (0...10).each do |i|
649
+ r["key#{i}"] = "value #{i}"
650
+ end
651
+
652
+ 3.times do |i|
653
+ rand_records = rand(1..4)
654
+ es = Fluent::ArrayEventStream.new([ [t, r] ] * rand_records)
655
+ assert_equal rand_records, es.size
656
+
657
+ @i.interrupt_flushes
658
+
659
+ assert{ @i.buffer.queue.size == 0 }
660
+
661
+ @i.emit_events("test.tag", es)
662
+
663
+ assert{ @i.buffer.queue.size == 0 }
664
+ assert{ @i.buffer.stage.size == 1 }
665
+
666
+ staged_chunk = @i.instance_eval{ @buffer.stage[@buffer.stage.keys.first] }
667
+ assert{ staged_chunk.size != 0 }
668
+
669
+ @i.enqueue_thread_wait
670
+
671
+ waiting(10) do
672
+ Thread.pass until @i.buffer.queue.size == 0 && @i.buffer.dequeued.size == 0
673
+ Thread.pass until staged_chunk.size == 0
674
+ end
675
+
676
+ assert_equal rand_records, ary.size
677
+ ary.reject!{|e| true }
678
+ end
679
+ end
680
+ end
681
+
682
+ sub_test_case 'with much longer flush_interval' do
683
+ setup do
684
+ hash = {
685
+ 'flush_mode' => 'interval',
686
+ 'flush_interval' => 3000,
687
+ 'flush_thread_count' => 1,
688
+ 'flush_thread_burst_interval' => 0.01,
689
+ 'chunk_limit_size' => 1024,
690
+ }
691
+ @i = create_output(:buffered)
692
+ @i.configure(config_element('ROOT','',{},[config_element('buffer','',hash)]))
693
+ @i.start
694
+ @i.after_start
695
+ end
696
+
697
+ test 'flush_at_shutdown work well when plugin is shutdown' do
698
+ ary = []
699
+ @i.register(:write){|chunk| ary << chunk.read }
700
+
701
+ tag = "test.tag"
702
+ t = event_time()
703
+ r = {}
704
+ (0...10).each do |i|
705
+ r["key#{i}"] = "value #{i}"
706
+ end
707
+ event_size = [tag, t, r].to_json.size # 195
708
+
709
+ (1024 * 0.9 / event_size).to_i.times do |i|
710
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new([ [t, r] ]))
711
+ end
712
+ queue_size = @i.buffer.queue.size
713
+ assert{ queue_size == 0 && ary.size == 0 }
714
+
715
+ @i.stop
716
+ @i.before_shutdown
717
+ @i.shutdown
718
+ @i.after_shutdown
719
+
720
+ waiting(10){ sleep 0.1 until ary.size == 1 }
721
+ assert_equal [tag,t,r].to_json * (1024 * 0.9 / event_size), ary.first
722
+ end
723
+ end
724
+
725
+ sub_test_case 'buffered output feature without any buffer key, flush_mode: immediate' do
726
+ setup do
727
+ hash = {
728
+ 'flush_mode' => 'immediate',
729
+ 'flush_thread_count' => 1,
730
+ 'flush_thread_burst_interval' => 0.01,
731
+ 'chunk_limit_size' => 1024,
732
+ }
733
+ @i = create_output(:buffered)
734
+ @i.configure(config_element('ROOT','',{},[config_element('buffer','',hash)]))
735
+ @i.start
736
+ @i.after_start
737
+ end
738
+
739
+ test '#start does not create enqueue thread, but creates flush threads' do
740
+ @i.thread_wait_until_start
741
+
742
+ assert @i.thread_exist?(:flush_thread_0)
743
+ assert !@i.thread_exist?(:enqueue_thread)
744
+ end
745
+
746
+ test '#format is called for each event streams' do
747
+ ary = []
748
+ @i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
749
+
750
+ t = event_time()
751
+ es = Fluent::ArrayEventStream.new([ [t, {"key" => "value1"}], [t, {"key" => "value2"}] ])
752
+
753
+ 4.times do
754
+ @i.emit_events('tag.test', es)
755
+ end
756
+
757
+ assert_equal 8, ary.size
758
+ 4.times do |i|
759
+ assert_equal ["tag.test", t, {"key" => "value1"}], ary[i*2]
760
+ assert_equal ["tag.test", t, {"key" => "value2"}], ary[i*2+1]
761
+ end
762
+ end
763
+
764
+ test '#write is called every time for each emits, and buffer chunk is purged' do
765
+ @i.thread_wait_until_start
766
+
767
+ ary = []
768
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
769
+ @i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| ary << data } }
770
+
771
+ t = event_time()
772
+ r = {}
773
+ (0...10).each do |i|
774
+ r["key#{i}"] = "value #{i}"
775
+ end
776
+
777
+ 3.times do |i|
778
+ rand_records = rand(1..5)
779
+ es = Fluent::ArrayEventStream.new([ [t, r] ] * rand_records)
780
+ assert_equal rand_records, es.size
781
+ @i.emit_events("test.tag", es)
782
+
783
+ waiting(10){ sleep 0.1 until @i.buffer.stage.size == 0 } # make sure that the emitted es is enqueued by "flush_mode immediate"
784
+ waiting(10){ sleep 0.1 until @i.buffer.queue.size == 0 && @i.buffer.dequeued.size == 0 }
785
+ waiting(10){ sleep 0.1 until ary.size == rand_records }
786
+
787
+ assert_equal rand_records, ary.size
788
+ ary.reject!{|e| true }
789
+ end
790
+ end
791
+
792
+ test 'flush_at_shutdown work well when plugin is shutdown' do
793
+ ary = []
794
+ @i.register(:write){|chunk| ary << chunk.read }
795
+
796
+ tag = "test.tag"
797
+ t = event_time()
798
+ r = {}
799
+ (0...10).each do |i|
800
+ r["key#{i}"] = "value #{i}"
801
+ end
802
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new([ [t, r] ]))
803
+
804
+ @i.stop
805
+ @i.before_shutdown
806
+ @i.shutdown
807
+ @i.after_shutdown
808
+
809
+ waiting(10) do
810
+ Thread.pass until ary.size == 1
811
+ end
812
+ assert_equal [tag,t,r].to_json, ary.first
813
+ end
814
+ end
815
+
816
+ sub_test_case 'buffered output feature with timekey and range' do
817
+ setup do
818
+ chunk_key = 'time'
819
+ hash = {
820
+ 'timekey' => 30, # per 30seconds
821
+ 'timekey_wait' => 5, # 5 second delay for flush
822
+ 'flush_thread_count' => 1,
823
+ 'flush_thread_burst_interval' => 0.01,
824
+ }
825
+ @i = create_output(:buffered)
826
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
827
+ @i.start
828
+ @i.after_start
829
+ end
830
+
831
+ test '#configure raises config error if timekey is not specified' do
832
+ i = create_output(:buffered)
833
+ assert_raise Fluent::ConfigError do
834
+ i.configure(config_element('ROOT','',{},[config_element('buffer','time',)]))
835
+ end
836
+ end
837
+
838
+ test 'default flush_mode is set to :lazy' do
839
+ assert_equal :lazy, @i.instance_eval{ @flush_mode }
840
+ end
841
+
842
+ test '#start creates enqueue thread and flush threads' do
843
+ @i.thread_wait_until_start
844
+
845
+ assert @i.thread_exist?(:flush_thread_0)
846
+ assert @i.thread_exist?(:enqueue_thread)
847
+ end
848
+
849
+ test '#format is called for each event streams' do
850
+ ary = []
851
+ @i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
852
+
853
+ t = event_time()
854
+ es = Fluent::ArrayEventStream.new([ [t, {"key" => "value1"}], [t, {"key" => "value2"}] ])
855
+
856
+ 5.times do
857
+ @i.emit_events('tag.test', es)
858
+ end
859
+
860
+ assert_equal 10, ary.size
861
+ 5.times do |i|
862
+ assert_equal ["tag.test", t, {"key" => "value1"}], ary[i*2]
863
+ assert_equal ["tag.test", t, {"key" => "value2"}], ary[i*2+1]
864
+ end
865
+ end
866
+
867
+ test '#write is called per time ranges after timekey_wait, and buffer chunk is purged' do
868
+ Timecop.freeze( Time.parse('2016-04-13 14:04:00 +0900') )
869
+
870
+ @i.thread_wait_until_start
871
+
872
+ ary = []
873
+ metachecks = []
874
+
875
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
876
+ @i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| e = JSON.parse(data); ary << e; metachecks << (chunk.metadata.timekey.to_i <= e[1].to_i && e[1].to_i < chunk.metadata.timekey.to_i + 30) } }
877
+
878
+ r = {}
879
+ (0...10).each do |i|
880
+ r["key#{i}"] = "value #{i}"
881
+ end
882
+ ts = [
883
+ Fluent::EventTime.parse('2016-04-13 14:03:21 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:23 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:29 +0900'),
884
+ Fluent::EventTime.parse('2016-04-13 14:03:30 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:33 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:38 +0900'),
885
+ Fluent::EventTime.parse('2016-04-13 14:03:43 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:49 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:51 +0900'),
886
+ Fluent::EventTime.parse('2016-04-13 14:04:00 +0900'), Fluent::EventTime.parse('2016-04-13 14:04:01 +0900'),
887
+ ]
888
+ events = [
889
+ ["test.tag.1", ts[0], r], # range 14:03:00 - 03:29
890
+ ["test.tag.2", ts[1], r],
891
+ ["test.tag.1", ts[2], r],
892
+ ["test.tag.1", ts[3], r], # range 14:03:30 - 04:00
893
+ ["test.tag.1", ts[4], r],
894
+ ["test.tag.1", ts[5], r],
895
+ ["test.tag.1", ts[6], r],
896
+ ["test.tag.1", ts[7], r],
897
+ ["test.tag.2", ts[8], r],
898
+ ["test.tag.1", ts[9], r], # range 14:04:00 - 04:29
899
+ ["test.tag.2", ts[10], r],
900
+ ]
901
+
902
+ assert_equal 0, @i.write_count
903
+
904
+ @i.interrupt_flushes
905
+
906
+ events.shuffle.each do |tag, time, record|
907
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
908
+ end
909
+ assert{ @i.buffer.stage.size == 3 }
910
+ assert{ @i.write_count == 0 }
911
+
912
+ @i.enqueue_thread_wait
913
+
914
+ waiting(4){ sleep 0.1 until @i.write_count > 0 }
915
+
916
+ assert{ @i.buffer.stage.size == 2 && @i.write_count == 1 }
917
+
918
+ waiting(4){ sleep 0.1 until ary.size == 3 }
919
+
920
+ assert_equal 3, ary.size
921
+ assert_equal 2, ary.select{|e| e[0] == "test.tag.1" }.size
922
+ assert_equal 1, ary.select{|e| e[0] == "test.tag.2" }.size
923
+
924
+ Timecop.freeze( Time.parse('2016-04-13 14:04:04 +0900') )
925
+
926
+ @i.enqueue_thread_wait
927
+
928
+ assert{ @i.buffer.stage.size == 2 && @i.write_count == 1 }
929
+
930
+ Timecop.freeze( Time.parse('2016-04-13 14:04:06 +0900') )
931
+
932
+ @i.enqueue_thread_wait
933
+ waiting(4){ sleep 0.1 until @i.write_count > 1 }
934
+
935
+ assert{ @i.buffer.stage.size == 1 && @i.write_count == 2 }
936
+
937
+ assert_equal 9, ary.size
938
+ assert_equal 7, ary.select{|e| e[0] == "test.tag.1" }.size
939
+ assert_equal 2, ary.select{|e| e[0] == "test.tag.2" }.size
940
+
941
+ assert metachecks.all?{|e| e }
942
+ end
943
+
944
+ test 'flush_at_shutdown work well when plugin is shutdown' do
945
+ Timecop.freeze( Time.parse('2016-04-13 14:04:00 +0900') )
946
+
947
+ @i.thread_wait_until_start
948
+
949
+ ary = []
950
+ metachecks = []
951
+
952
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
953
+ @i.register(:write){|chunk|
954
+ chunk.read.split("\n").reject{|l| l.empty? }.each{|data|
955
+ e = JSON.parse(data)
956
+ ary << e
957
+ metachecks << (chunk.metadata.timekey.to_i <= e[1].to_i && e[1].to_i < chunk.metadata.timekey.to_i + 30)
958
+ }
959
+ }
960
+
961
+ r = {}
962
+ (0...10).each do |i|
963
+ r["key#{i}"] = "value #{i}"
964
+ end
965
+ ts = [
966
+ Fluent::EventTime.parse('2016-04-13 14:03:21 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:23 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:29 +0900'),
967
+ Fluent::EventTime.parse('2016-04-13 14:03:30 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:33 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:38 +0900'),
968
+ Fluent::EventTime.parse('2016-04-13 14:03:43 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:49 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:51 +0900'),
969
+ Fluent::EventTime.parse('2016-04-13 14:04:00 +0900'), Fluent::EventTime.parse('2016-04-13 14:04:01 +0900'),
970
+ ]
971
+ events = [
972
+ ["test.tag.1", ts[0], r], # range 14:03:00 - 03:29
973
+ ["test.tag.2", ts[1], r],
974
+ ["test.tag.1", ts[2], r],
975
+ ["test.tag.1", ts[3], r], # range 14:03:30 - 04:00
976
+ ["test.tag.1", ts[4], r],
977
+ ["test.tag.1", ts[5], r],
978
+ ["test.tag.1", ts[6], r],
979
+ ["test.tag.1", ts[7], r],
980
+ ["test.tag.2", ts[8], r],
981
+ ["test.tag.1", ts[9], r], # range 14:04:00 - 04:29
982
+ ["test.tag.2", ts[10], r],
983
+ ]
984
+
985
+ assert_equal 0, @i.write_count
986
+
987
+ @i.interrupt_flushes
988
+
989
+ events.shuffle.each do |tag, time, record|
990
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
991
+ end
992
+ assert{ @i.buffer.stage.size == 3 }
993
+ assert{ @i.write_count == 0 }
994
+
995
+ @i.enqueue_thread_wait
996
+
997
+ waiting(4){ sleep 0.1 until @i.write_count > 0 }
998
+
999
+ assert{ @i.buffer.stage.size == 2 && @i.write_count == 1 }
1000
+
1001
+ Timecop.freeze( Time.parse('2016-04-13 14:04:04 +0900') )
1002
+
1003
+ @i.enqueue_thread_wait
1004
+
1005
+ assert{ @i.buffer.stage.size == 2 && @i.write_count == 1 }
1006
+
1007
+ Timecop.freeze( Time.parse('2016-04-13 14:04:06 +0900') )
1008
+
1009
+ @i.enqueue_thread_wait
1010
+ waiting(4){ sleep 0.1 until @i.write_count > 1 }
1011
+
1012
+ assert{ @i.buffer.stage.size == 1 && @i.write_count == 2 }
1013
+
1014
+ Timecop.freeze( Time.parse('2016-04-13 14:04:13 +0900') )
1015
+
1016
+ waiting(4){ sleep 0.1 until ary.size == 9 }
1017
+ assert_equal 9, ary.size
1018
+
1019
+ @i.stop
1020
+ @i.before_shutdown
1021
+ @i.shutdown
1022
+ @i.after_shutdown
1023
+
1024
+ waiting(4){ sleep 0.1 until @i.write_count > 2 && ary.size == 11 }
1025
+
1026
+ assert_equal 11, ary.size
1027
+ assert metachecks.all?{|e| e }
1028
+ end
1029
+ end
1030
+
1031
+ sub_test_case 'buffered output feature with tag key' do
1032
+ setup do
1033
+ chunk_key = 'tag'
1034
+ hash = {
1035
+ 'flush_interval' => 10,
1036
+ 'flush_thread_count' => 1,
1037
+ 'flush_thread_burst_interval' => 0.1,
1038
+ 'chunk_limit_size' => 1024,
1039
+ }
1040
+ @i = create_output(:buffered)
1041
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
1042
+ @i.start
1043
+ @i.after_start
1044
+ end
1045
+
1046
+ test 'default flush_mode is set to :interval' do
1047
+ assert_equal :interval, @i.instance_eval{ @flush_mode }
1048
+ end
1049
+
1050
+ test '#start creates enqueue thread and flush threads' do
1051
+ @i.thread_wait_until_start
1052
+
1053
+ assert @i.thread_exist?(:flush_thread_0)
1054
+ assert @i.thread_exist?(:enqueue_thread)
1055
+ end
1056
+
1057
+ test '#format is called for each event streams' do
1058
+ ary = []
1059
+ @i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
1060
+
1061
+ t = event_time()
1062
+ es = Fluent::ArrayEventStream.new([ [t, {"key" => "value1"}], [t, {"key" => "value2"}] ])
1063
+
1064
+ 5.times do
1065
+ @i.emit_events('tag.test', es)
1066
+ end
1067
+
1068
+ assert_equal 10, ary.size
1069
+ 5.times do |i|
1070
+ assert_equal ["tag.test", t, {"key" => "value1"}], ary[i*2]
1071
+ assert_equal ["tag.test", t, {"key" => "value2"}], ary[i*2+1]
1072
+ end
1073
+ end
1074
+
1075
+ test '#write is called per tags, per flush_interval & chunk sizes, and buffer chunk is purged' do
1076
+ Timecop.freeze( Time.parse('2016-04-13 14:04:01 +0900') )
1077
+
1078
+ ary = []
1079
+ metachecks = []
1080
+
1081
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
1082
+ @i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| e = JSON.parse(data); ary << e; metachecks << (chunk.metadata.tag == e[0]) } }
1083
+
1084
+ @i.thread_wait_until_start
1085
+
1086
+ r = {}
1087
+ (0...10).each do |i|
1088
+ r["key#{i}"] = "value #{i}"
1089
+ end
1090
+ ts = [
1091
+ event_time('2016-04-13 14:03:21 +0900'), event_time('2016-04-13 14:03:23 +0900'), event_time('2016-04-13 14:03:29 +0900'),
1092
+ event_time('2016-04-13 14:03:30 +0900'), event_time('2016-04-13 14:03:33 +0900'), event_time('2016-04-13 14:03:38 +0900'),
1093
+ event_time('2016-04-13 14:03:43 +0900'), event_time('2016-04-13 14:03:49 +0900'), event_time('2016-04-13 14:03:51 +0900'),
1094
+ event_time('2016-04-13 14:04:00 +0900'), event_time('2016-04-13 14:04:01 +0900'),
1095
+ ]
1096
+ # size of a event is 197
1097
+ events = [
1098
+ ["test.tag.1", ts[0], r],
1099
+ ["test.tag.2", ts[1], r],
1100
+ ["test.tag.1", ts[2], r],
1101
+ ["test.tag.1", ts[3], r],
1102
+ ["test.tag.1", ts[4], r],
1103
+ ["test.tag.1", ts[5], r],
1104
+ ["test.tag.1", ts[6], r],
1105
+ ["test.tag.1", ts[7], r],
1106
+ ["test.tag.2", ts[8], r],
1107
+ ["test.tag.1", ts[9], r],
1108
+ ["test.tag.2", ts[10], r],
1109
+ ]
1110
+
1111
+ assert_equal 0, @i.write_count
1112
+
1113
+ @i.interrupt_flushes
1114
+
1115
+ events.shuffle.each do |tag, time, record|
1116
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
1117
+ end
1118
+ assert{ @i.buffer.stage.size == 2 } # test.tag.1 x1, test.tag.2 x1
1119
+
1120
+ Timecop.freeze( Time.parse('2016-04-13 14:04:02 +0900') )
1121
+
1122
+ @i.enqueue_thread_wait
1123
+ @i.flush_thread_wakeup
1124
+
1125
+ waiting(4) do
1126
+ Thread.pass until @i.write_count > 0
1127
+ end
1128
+
1129
+ assert{ @i.buffer.stage.size == 2 }
1130
+ assert{ @i.write_count == 1 }
1131
+ assert{ @i.buffer.queue.size == 0 }
1132
+
1133
+ # events fulfills a chunk (and queued immediately)
1134
+ assert_equal 5, ary.size
1135
+ assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
1136
+ assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
1137
+
1138
+ Timecop.freeze( Time.parse('2016-04-13 14:04:09 +0900') )
1139
+
1140
+ @i.enqueue_thread_wait
1141
+
1142
+ assert{ @i.buffer.stage.size == 2 }
1143
+
1144
+ # to trigger try_flush with flush_thread_burst_interval
1145
+ Timecop.freeze( Time.parse('2016-04-13 14:04:11 +0900') )
1146
+ @i.enqueue_thread_wait
1147
+ Timecop.freeze( Time.parse('2016-04-13 14:04:15 +0900') )
1148
+ @i.enqueue_thread_wait
1149
+ @i.flush_thread_wakeup
1150
+
1151
+ assert{ @i.buffer.stage.size == 0 }
1152
+
1153
+ waiting(4) do
1154
+ Thread.pass until @i.write_count > 2
1155
+ end
1156
+
1157
+ assert{ @i.buffer.stage.size == 0 && @i.write_count == 3 }
1158
+
1159
+ assert_equal 11, ary.size
1160
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1161
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1162
+
1163
+ assert metachecks.all?{|e| e }
1164
+ end
1165
+
1166
+ test 'flush_at_shutdown work well when plugin is shutdown' do
1167
+ Timecop.freeze( Time.parse('2016-04-13 14:04:01 +0900') )
1168
+
1169
+ ary = []
1170
+ metachecks = []
1171
+
1172
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
1173
+ @i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| e = JSON.parse(data); ary << e; metachecks << (chunk.metadata.tag == e[0]) } }
1174
+
1175
+ @i.thread_wait_until_start
1176
+
1177
+ r = {}
1178
+ (0...10).each do |i|
1179
+ r["key#{i}"] = "value #{i}"
1180
+ end
1181
+ ts = [
1182
+ event_time('2016-04-13 14:03:21 +0900'), event_time('2016-04-13 14:03:23 +0900'), event_time('2016-04-13 14:03:29 +0900'),
1183
+ event_time('2016-04-13 14:03:30 +0900'), event_time('2016-04-13 14:03:33 +0900'), event_time('2016-04-13 14:03:38 +0900'),
1184
+ event_time('2016-04-13 14:03:43 +0900'), event_time('2016-04-13 14:03:49 +0900'), event_time('2016-04-13 14:03:51 +0900'),
1185
+ event_time('2016-04-13 14:04:00 +0900'), event_time('2016-04-13 14:04:01 +0900'),
1186
+ ]
1187
+ # size of a event is 197
1188
+ events = [
1189
+ ["test.tag.1", ts[0], r],
1190
+ ["test.tag.2", ts[1], r],
1191
+ ["test.tag.1", ts[2], r],
1192
+ ["test.tag.1", ts[3], r],
1193
+ ["test.tag.1", ts[4], r],
1194
+ ["test.tag.1", ts[5], r],
1195
+ ["test.tag.1", ts[6], r],
1196
+ ["test.tag.1", ts[7], r],
1197
+ ["test.tag.2", ts[8], r],
1198
+ ["test.tag.1", ts[9], r],
1199
+ ["test.tag.2", ts[10], r],
1200
+ ]
1201
+
1202
+ assert_equal 0, @i.write_count
1203
+
1204
+ @i.interrupt_flushes
1205
+
1206
+ events.shuffle.each do |tag, time, record|
1207
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
1208
+ end
1209
+ assert{ @i.buffer.stage.size == 2 } # test.tag.1 x1, test.tag.2 x1
1210
+
1211
+ Timecop.freeze( Time.parse('2016-04-13 14:04:02 +0900') )
1212
+
1213
+ @i.enqueue_thread_wait
1214
+ @i.flush_thread_wakeup
1215
+
1216
+ waiting(4) do
1217
+ Thread.pass until @i.write_count > 0
1218
+ end
1219
+
1220
+ assert{ @i.buffer.stage.size == 2 }
1221
+ assert{ @i.write_count == 1 }
1222
+ assert{ @i.buffer.queue.size == 0 }
1223
+
1224
+ # events fulfills a chunk (and queued immediately)
1225
+ assert_equal 5, ary.size
1226
+ assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
1227
+ assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
1228
+
1229
+ @i.stop
1230
+ @i.before_shutdown
1231
+ @i.shutdown
1232
+ @i.after_shutdown
1233
+
1234
+ waiting(4) do
1235
+ Thread.pass until @i.write_count > 1
1236
+ end
1237
+
1238
+ assert{ @i.buffer.stage.size == 0 && @i.buffer.queue.size == 0 && @i.write_count == 3 }
1239
+
1240
+ assert_equal 11, ary.size
1241
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1242
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1243
+
1244
+ assert metachecks.all?{|e| e }
1245
+ end
1246
+ end
1247
+
1248
+ sub_test_case 'buffered output feature with variables' do
1249
+ setup do
1250
+ chunk_key = 'name,service'
1251
+ hash = {
1252
+ 'flush_interval' => 10,
1253
+ 'flush_thread_count' => 1,
1254
+ 'flush_thread_burst_interval' => 0.1,
1255
+ 'chunk_limit_size' => 1024,
1256
+ }
1257
+ @i = create_output(:buffered)
1258
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
1259
+ @i.start
1260
+ @i.after_start
1261
+ end
1262
+
1263
+ test 'default flush_mode is set to :interval' do
1264
+ assert_equal :interval, @i.instance_eval{ @flush_mode }
1265
+ end
1266
+
1267
+ test '#start creates enqueue thread and flush threads' do
1268
+ @i.thread_wait_until_start
1269
+
1270
+ assert @i.thread_exist?(:flush_thread_0)
1271
+ assert @i.thread_exist?(:enqueue_thread)
1272
+ end
1273
+
1274
+ test '#format is called for each event streams' do
1275
+ ary = []
1276
+ @i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
1277
+
1278
+ t = event_time()
1279
+ es = Fluent::ArrayEventStream.new([
1280
+ [t, {"key" => "value1", "name" => "moris", "service" => "a"}],
1281
+ [t, {"key" => "value2", "name" => "moris", "service" => "b"}],
1282
+ ])
1283
+
1284
+ 5.times do
1285
+ @i.emit_events('tag.test', es)
1286
+ end
1287
+
1288
+ assert_equal 10, ary.size
1289
+ 5.times do |i|
1290
+ assert_equal ["tag.test", t, {"key" => "value1", "name" => "moris", "service" => "a"}], ary[i*2]
1291
+ assert_equal ["tag.test", t, {"key" => "value2", "name" => "moris", "service" => "b"}], ary[i*2+1]
1292
+ end
1293
+ end
1294
+
1295
+ test '#write is called per value combination of variables, per flush_interval & chunk sizes, and buffer chunk is purged' do
1296
+ Timecop.freeze( Time.parse('2016-04-13 14:04:01 +0900') )
1297
+
1298
+ ary = []
1299
+ metachecks = []
1300
+
1301
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
1302
+ @i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| e = JSON.parse(data); ary << e; metachecks << (e[2]["name"] == chunk.metadata.variables[:name] && e[2]["service"] == chunk.metadata.variables[:service]) } }
1303
+
1304
+ @i.thread_wait_until_start
1305
+
1306
+ # size of a event is 195
1307
+ dummy_data = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
1308
+ events = [
1309
+ ["test.tag.1", event_time('2016-04-13 14:03:21 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1) xxx-a (6 events)
1310
+ ["test.tag.2", event_time('2016-04-13 14:03:23 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}], #(2) yyy-a (3 events)
1311
+ ["test.tag.1", event_time('2016-04-13 14:03:29 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
1312
+ ["test.tag.1", event_time('2016-04-13 14:03:30 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
1313
+ ["test.tag.1", event_time('2016-04-13 14:03:33 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
1314
+ ["test.tag.1", event_time('2016-04-13 14:03:38 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}], #(3) xxx-b (2 events)
1315
+ ["test.tag.1", event_time('2016-04-13 14:03:43 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
1316
+ ["test.tag.1", event_time('2016-04-13 14:03:49 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}], #(3)
1317
+ ["test.tag.2", event_time('2016-04-13 14:03:51 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}], #(2)
1318
+ ["test.tag.1", event_time('2016-04-13 14:04:00 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
1319
+ ["test.tag.2", event_time('2016-04-13 14:04:01 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}], #(2)
1320
+ ]
1321
+
1322
+ assert_equal 0, @i.write_count
1323
+
1324
+ @i.interrupt_flushes
1325
+
1326
+ events.shuffle.each do |tag, time, record|
1327
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
1328
+ end
1329
+ assert{ @i.buffer.stage.size == 3 }
1330
+
1331
+ Timecop.freeze( Time.parse('2016-04-13 14:04:02 +0900') )
1332
+
1333
+ @i.enqueue_thread_wait
1334
+ @i.flush_thread_wakeup
1335
+
1336
+ waiting(4) do
1337
+ Thread.pass until @i.write_count > 0
1338
+ end
1339
+
1340
+ assert{ @i.buffer.stage.size == 3 }
1341
+ assert{ @i.write_count == 1 }
1342
+ assert{ @i.buffer.queue.size == 0 }
1343
+
1344
+ # events fulfills a chunk (and queued immediately)
1345
+ assert_equal 5, ary.size
1346
+ assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
1347
+ assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
1348
+ assert ary[0...5].all?{|e| e[2]["name"] == "xxx" && e[2]["service"] == "a" }
1349
+
1350
+ Timecop.freeze( Time.parse('2016-04-13 14:04:09 +0900') )
1351
+
1352
+ @i.enqueue_thread_wait
1353
+
1354
+ assert{ @i.buffer.stage.size == 3 }
1355
+
1356
+ # to trigger try_flush with flush_thread_burst_interval
1357
+ Timecop.freeze( Time.parse('2016-04-13 14:04:11 +0900') )
1358
+ @i.enqueue_thread_wait
1359
+ Timecop.freeze( Time.parse('2016-04-13 14:04:12 +0900') )
1360
+ @i.enqueue_thread_wait
1361
+ Timecop.freeze( Time.parse('2016-04-13 14:04:13 +0900') )
1362
+ @i.enqueue_thread_wait
1363
+ Timecop.freeze( Time.parse('2016-04-13 14:04:14 +0900') )
1364
+ @i.enqueue_thread_wait
1365
+ @i.flush_thread_wakeup
1366
+
1367
+ assert{ @i.buffer.stage.size == 0 }
1368
+
1369
+ waiting(4) do
1370
+ Thread.pass until @i.write_count > 1
1371
+ end
1372
+
1373
+ assert{ @i.buffer.stage.size == 0 && @i.write_count == 4 }
1374
+
1375
+ assert_equal 11, ary.size
1376
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1377
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1378
+ assert_equal 6, ary.select{|e| e[2]["name"] == "xxx" && e[2]["service"] == "a" }.size
1379
+ assert_equal 3, ary.select{|e| e[2]["name"] == "yyy" && e[2]["service"] == "a" }.size
1380
+ assert_equal 2, ary.select{|e| e[2]["name"] == "xxx" && e[2]["service"] == "b" }.size
1381
+
1382
+ assert metachecks.all?{|e| e }
1383
+ end
1384
+
1385
+ test 'flush_at_shutdown work well when plugin is shutdown' do
1386
+ Timecop.freeze( Time.parse('2016-04-13 14:04:01 +0900') )
1387
+
1388
+ ary = []
1389
+ metachecks = []
1390
+
1391
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
1392
+ @i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| e = JSON.parse(data); ary << e; metachecks << (e[2]["name"] == chunk.metadata.variables[:name] && e[2]["service"] == chunk.metadata.variables[:service]) } }
1393
+
1394
+ @i.thread_wait_until_start
1395
+
1396
+ # size of a event is 195
1397
+ dummy_data = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
1398
+ events = [
1399
+ ["test.tag.1", event_time('2016-04-13 14:03:21 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1) xxx-a (6 events)
1400
+ ["test.tag.2", event_time('2016-04-13 14:03:23 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}], #(2) yyy-a (3 events)
1401
+ ["test.tag.1", event_time('2016-04-13 14:03:29 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
1402
+ ["test.tag.1", event_time('2016-04-13 14:03:30 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
1403
+ ["test.tag.1", event_time('2016-04-13 14:03:33 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
1404
+ ["test.tag.1", event_time('2016-04-13 14:03:38 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}], #(3) xxx-b (2 events)
1405
+ ["test.tag.1", event_time('2016-04-13 14:03:43 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
1406
+ ["test.tag.1", event_time('2016-04-13 14:03:49 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}], #(3)
1407
+ ["test.tag.2", event_time('2016-04-13 14:03:51 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}], #(2)
1408
+ ["test.tag.1", event_time('2016-04-13 14:04:00 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
1409
+ ["test.tag.2", event_time('2016-04-13 14:04:01 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}], #(2)
1410
+ ]
1411
+
1412
+ assert_equal 0, @i.write_count
1413
+
1414
+ @i.interrupt_flushes
1415
+
1416
+ events.shuffle.each do |tag, time, record|
1417
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
1418
+ end
1419
+ assert{ @i.buffer.stage.size == 3 }
1420
+
1421
+ Timecop.freeze( Time.parse('2016-04-13 14:04:02 +0900') )
1422
+
1423
+ @i.enqueue_thread_wait
1424
+ @i.flush_thread_wakeup
1425
+
1426
+ waiting(4) do
1427
+ Thread.pass until @i.write_count > 0
1428
+ end
1429
+
1430
+ assert{ @i.buffer.stage.size == 3 }
1431
+ assert{ @i.write_count == 1 }
1432
+ assert{ @i.buffer.queue.size == 0 }
1433
+
1434
+ # events fulfills a chunk (and queued immediately)
1435
+ assert_equal 5, ary.size
1436
+ assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
1437
+ assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
1438
+
1439
+ @i.stop
1440
+ @i.before_shutdown
1441
+ @i.shutdown
1442
+ @i.after_shutdown
1443
+
1444
+ waiting(4) do
1445
+ Thread.pass until @i.write_count > 1
1446
+ end
1447
+
1448
+ assert{ @i.buffer.stage.size == 0 && @i.buffer.queue.size == 0 && @i.write_count == 4 }
1449
+
1450
+ assert_equal 11, ary.size
1451
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1452
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1453
+ assert_equal 6, ary.select{|e| e[2]["name"] == "xxx" && e[2]["service"] == "a" }.size
1454
+ assert_equal 3, ary.select{|e| e[2]["name"] == "yyy" && e[2]["service"] == "a" }.size
1455
+ assert_equal 2, ary.select{|e| e[2]["name"] == "xxx" && e[2]["service"] == "b" }.size
1456
+
1457
+ assert metachecks.all?{|e| e }
1458
+ end
1459
+ end
1460
+
1461
+ sub_test_case 'buffered output feature with many keys' do
1462
+ test 'default flush mode is set to :interval if keys does not include time' do
1463
+ chunk_key = 'name,service,tag'
1464
+ hash = {
1465
+ 'flush_interval' => 10,
1466
+ 'flush_thread_count' => 1,
1467
+ 'flush_thread_burst_interval' => 0.1,
1468
+ 'chunk_limit_size' => 1024,
1469
+ }
1470
+ @i = create_output(:buffered)
1471
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
1472
+ @i.start
1473
+ @i.after_start
1474
+
1475
+ assert_equal :interval, @i.instance_eval{ @flush_mode }
1476
+ end
1477
+
1478
+ test 'default flush mode is set to :lazy if keys includes time' do
1479
+ chunk_key = 'name,service,tag,time'
1480
+ hash = {
1481
+ 'timekey' => 60,
1482
+ 'flush_interval' => 10,
1483
+ 'flush_thread_count' => 1,
1484
+ 'flush_thread_burst_interval' => 0.1,
1485
+ 'chunk_limit_size' => 1024,
1486
+ }
1487
+ @i = create_output(:buffered)
1488
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
1489
+ @i.start
1490
+ @i.after_start
1491
+
1492
+ assert_equal :lazy, @i.instance_eval{ @flush_mode }
1493
+ end
1494
+ end
1495
+
1496
+ sub_test_case 'buffered output feature with delayed commit' do
1497
+ setup do
1498
+ chunk_key = 'tag'
1499
+ hash = {
1500
+ 'flush_interval' => 10,
1501
+ 'flush_thread_count' => 1,
1502
+ 'flush_thread_burst_interval' => 0.1,
1503
+ 'delayed_commit_timeout' => 30,
1504
+ 'chunk_limit_size' => 1024,
1505
+ }
1506
+ @i = create_output(:delayed)
1507
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
1508
+ @i.start
1509
+ @i.after_start
1510
+ @i.log = Fluent::Test::TestLogger.new
1511
+ end
1512
+
1513
+ test '#format is called for each event streams' do
1514
+ ary = []
1515
+ @i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
1516
+
1517
+ t = event_time()
1518
+ es = Fluent::ArrayEventStream.new([
1519
+ [t, {"key" => "value1", "name" => "moris", "service" => "a"}],
1520
+ [t, {"key" => "value2", "name" => "moris", "service" => "b"}],
1521
+ ])
1522
+
1523
+ 5.times do
1524
+ @i.emit_events('tag.test', es)
1525
+ end
1526
+
1527
+ assert_equal 10, ary.size
1528
+ 5.times do |i|
1529
+ assert_equal ["tag.test", t, {"key" => "value1", "name" => "moris", "service" => "a"}], ary[i*2]
1530
+ assert_equal ["tag.test", t, {"key" => "value2", "name" => "moris", "service" => "b"}], ary[i*2+1]
1531
+ end
1532
+ end
1533
+
1534
+ test '#try_write is called per flush, buffer chunk is not purged until #commit_write is called' do
1535
+ Timecop.freeze( Time.parse('2016-04-13 14:04:01 +0900') )
1536
+
1537
+ ary = []
1538
+ metachecks = []
1539
+ chunks = []
1540
+
1541
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
1542
+ @i.register(:try_write) do |chunk|
1543
+ chunks << chunk
1544
+ chunk.read.split("\n").reject{|l| l.empty? }.each do |data|
1545
+ e = JSON.parse(data)
1546
+ ary << e
1547
+ metachecks << (e[0] == chunk.metadata.tag)
1548
+ end
1549
+ end
1550
+
1551
+ @i.thread_wait_until_start
1552
+
1553
+ # size of a event is 195
1554
+ dummy_data = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
1555
+ events = [
1556
+ ["test.tag.1", event_time('2016-04-13 14:03:21 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1557
+ ["test.tag.2", event_time('2016-04-13 14:03:23 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1558
+ ["test.tag.1", event_time('2016-04-13 14:03:29 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1559
+ ["test.tag.1", event_time('2016-04-13 14:03:30 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1560
+ ["test.tag.1", event_time('2016-04-13 14:03:33 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1561
+ ["test.tag.1", event_time('2016-04-13 14:03:38 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}],
1562
+ ["test.tag.1", event_time('2016-04-13 14:03:43 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1563
+ ["test.tag.1", event_time('2016-04-13 14:03:49 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}],
1564
+ ["test.tag.2", event_time('2016-04-13 14:03:51 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1565
+ ["test.tag.1", event_time('2016-04-13 14:04:00 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1566
+ ["test.tag.2", event_time('2016-04-13 14:04:01 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1567
+ ]
1568
+
1569
+ assert_equal 0, @i.write_count
1570
+
1571
+ @i.interrupt_flushes
1572
+
1573
+ events.shuffle.each do |tag, time, record|
1574
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
1575
+ end
1576
+ assert{ @i.buffer.stage.size == 2 }
1577
+
1578
+ Timecop.freeze( Time.parse('2016-04-13 14:04:02 +0900') )
1579
+
1580
+ @i.enqueue_thread_wait
1581
+ @i.flush_thread_wakeup
1582
+
1583
+ waiting(4) do
1584
+ Thread.pass until @i.write_count > 0
1585
+ end
1586
+
1587
+ assert{ @i.buffer.stage.size == 2 }
1588
+ assert{ @i.write_count == 1 }
1589
+ assert{ @i.buffer.queue.size == 0 }
1590
+ assert{ @i.buffer.dequeued.size == 1 }
1591
+
1592
+ # events fulfills a chunk (and queued immediately)
1593
+ assert_equal 5, ary.size
1594
+ assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
1595
+ assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
1596
+
1597
+ assert_equal 1, chunks.size
1598
+ assert !chunks.first.empty?
1599
+
1600
+ Timecop.freeze( Time.parse('2016-04-13 14:04:09 +0900') )
1601
+
1602
+ @i.enqueue_thread_wait
1603
+
1604
+ assert{ @i.buffer.stage.size == 2 }
1605
+
1606
+ # to trigger try_flush with flush_thread_burst_interval
1607
+ Timecop.freeze( Time.parse('2016-04-13 14:04:11 +0900') )
1608
+ @i.enqueue_thread_wait
1609
+ Timecop.freeze( Time.parse('2016-04-13 14:04:12 +0900') )
1610
+ @i.enqueue_thread_wait
1611
+ Timecop.freeze( Time.parse('2016-04-13 14:04:13 +0900') )
1612
+ @i.enqueue_thread_wait
1613
+ Timecop.freeze( Time.parse('2016-04-13 14:04:14 +0900') )
1614
+ @i.enqueue_thread_wait
1615
+ @i.flush_thread_wakeup
1616
+
1617
+ assert{ @i.buffer.stage.size == 0 }
1618
+
1619
+ waiting(4) do
1620
+ Thread.pass until @i.write_count > 1
1621
+ end
1622
+
1623
+ assert{ @i.buffer.stage.size == 0 && @i.write_count == 3 }
1624
+ assert{ @i.buffer.dequeued.size == 3 }
1625
+
1626
+ assert_equal 11, ary.size
1627
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1628
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1629
+
1630
+ assert_equal 3, chunks.size
1631
+ assert chunks.all?{|c| !c.empty? }
1632
+
1633
+ assert metachecks.all?{|e| e }
1634
+
1635
+ @i.commit_write(chunks[0].unique_id)
1636
+ assert{ @i.buffer.dequeued.size == 2 }
1637
+ assert chunks[0].empty?
1638
+
1639
+ @i.commit_write(chunks[1].unique_id)
1640
+ assert{ @i.buffer.dequeued.size == 1 }
1641
+ assert chunks[1].empty?
1642
+
1643
+ @i.commit_write(chunks[2].unique_id)
1644
+ assert{ @i.buffer.dequeued.size == 0 }
1645
+ assert chunks[2].empty?
1646
+
1647
+ # no problem to commit chunks already committed
1648
+ assert_nothing_raised do
1649
+ @i.commit_write(chunks[2].unique_id)
1650
+ end
1651
+ end
1652
+
1653
+ test '#rollback_write and #try_rollback_write can rollback buffer chunks for delayed commit after timeout, and then be able to write it again' do
1654
+ Timecop.freeze( Time.parse('2016-04-13 14:04:01 +0900') )
1655
+
1656
+ ary = []
1657
+ metachecks = []
1658
+ chunks = []
1659
+
1660
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
1661
+ @i.register(:try_write) do |chunk|
1662
+ chunks << chunk
1663
+ chunk.read.split("\n").reject{|l| l.empty? }.each do |data|
1664
+ e = JSON.parse(data)
1665
+ ary << e
1666
+ metachecks << (e[0] == chunk.metadata.tag)
1667
+ end
1668
+ end
1669
+
1670
+ @i.thread_wait_until_start
1671
+
1672
+ # size of a event is 195
1673
+ dummy_data = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
1674
+ events = [
1675
+ ["test.tag.1", event_time('2016-04-13 14:03:21 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1676
+ ["test.tag.2", event_time('2016-04-13 14:03:23 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1677
+ ["test.tag.1", event_time('2016-04-13 14:03:29 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1678
+ ["test.tag.1", event_time('2016-04-13 14:03:30 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1679
+ ["test.tag.1", event_time('2016-04-13 14:03:33 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1680
+ ["test.tag.1", event_time('2016-04-13 14:03:38 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}],
1681
+ ["test.tag.1", event_time('2016-04-13 14:03:43 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1682
+ ["test.tag.1", event_time('2016-04-13 14:03:49 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}],
1683
+ ["test.tag.2", event_time('2016-04-13 14:03:51 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1684
+ ["test.tag.1", event_time('2016-04-13 14:04:00 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1685
+ ["test.tag.2", event_time('2016-04-13 14:04:01 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1686
+ ]
1687
+
1688
+ assert_equal 0, @i.write_count
1689
+
1690
+ @i.interrupt_flushes
1691
+
1692
+ events.shuffle.each do |tag, time, record|
1693
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
1694
+ end
1695
+ assert{ @i.buffer.stage.size == 2 }
1696
+
1697
+ Timecop.freeze( Time.parse('2016-04-13 14:04:02 +0900') )
1698
+
1699
+ @i.enqueue_thread_wait
1700
+ @i.flush_thread_wakeup
1701
+
1702
+ waiting(4) do
1703
+ Thread.pass until @i.write_count > 0
1704
+ end
1705
+
1706
+ assert{ @i.buffer.stage.size == 2 }
1707
+ assert{ @i.write_count == 1 }
1708
+ assert{ @i.buffer.queue.size == 0 }
1709
+ assert{ @i.buffer.dequeued.size == 1 }
1710
+
1711
+ # events fulfills a chunk (and queued immediately)
1712
+ assert_equal 5, ary.size
1713
+ assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
1714
+ assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
1715
+
1716
+ assert_equal 1, chunks.size
1717
+ assert !chunks.first.empty?
1718
+
1719
+ Timecop.freeze( Time.parse('2016-04-13 14:04:09 +0900') )
1720
+
1721
+ @i.enqueue_thread_wait
1722
+
1723
+ assert{ @i.buffer.stage.size == 2 }
1724
+
1725
+ # to trigger try_flush with flush_thread_burst_interval
1726
+ Timecop.freeze( Time.parse('2016-04-13 14:04:11 +0900') )
1727
+ @i.enqueue_thread_wait
1728
+ Timecop.freeze( Time.parse('2016-04-13 14:04:12 +0900') )
1729
+ @i.enqueue_thread_wait
1730
+ Timecop.freeze( Time.parse('2016-04-13 14:04:13 +0900') )
1731
+ @i.enqueue_thread_wait
1732
+ Timecop.freeze( Time.parse('2016-04-13 14:04:14 +0900') )
1733
+ @i.enqueue_thread_wait
1734
+ @i.flush_thread_wakeup
1735
+
1736
+ assert{ @i.buffer.stage.size == 0 }
1737
+
1738
+ waiting(4) do
1739
+ Thread.pass until @i.write_count > 2
1740
+ end
1741
+
1742
+ assert{ @i.buffer.stage.size == 0 && @i.write_count == 3 }
1743
+ assert{ @i.buffer.dequeued.size == 3 }
1744
+
1745
+ assert_equal 11, ary.size
1746
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1747
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1748
+
1749
+ assert_equal 3, chunks.size
1750
+ assert chunks.all?{|c| !c.empty? }
1751
+
1752
+ assert metachecks.all?{|e| e }
1753
+
1754
+ @i.interrupt_flushes
1755
+
1756
+ @i.rollback_write(chunks[2].unique_id)
1757
+
1758
+ assert{ @i.buffer.dequeued.size == 2 }
1759
+ assert{ @i.buffer.queue.size == 1 && @i.buffer.queue.first.unique_id == chunks[2].unique_id }
1760
+
1761
+ Timecop.freeze( Time.parse('2016-04-13 14:04:15 +0900') )
1762
+ @i.enqueue_thread_wait
1763
+ @i.flush_thread_wakeup
1764
+
1765
+ waiting(4) do
1766
+ Thread.pass until @i.write_count > 3
1767
+ end
1768
+
1769
+ assert{ @i.write_count == 4 }
1770
+ assert{ @i.rollback_count == 1 }
1771
+ assert{ @i.instance_eval{ @dequeued_chunks.size } == 3 }
1772
+ assert{ @i.buffer.dequeued.size == 3 }
1773
+ assert{ @i.buffer.queue.size == 0 }
1774
+
1775
+ assert_equal 4, chunks.size
1776
+ assert chunks[2].unique_id == chunks[3].unique_id
1777
+
1778
+ ary.reject!{|e| true }
1779
+ chunks.reject!{|e| true }
1780
+
1781
+ Timecop.freeze( Time.parse('2016-04-13 14:04:46 +0900') )
1782
+ @i.enqueue_thread_wait
1783
+ @i.flush_thread_wakeup
1784
+
1785
+ waiting(4) do
1786
+ Thread.pass until @i.rollback_count == 4
1787
+ end
1788
+
1789
+ assert{ chunks[0...3].all?{|c| !c.empty? } }
1790
+
1791
+ # rollback is in progress, but some may be flushed again in retry state, after rollback
1792
+ # retry.next_time is 14:04:49
1793
+ Timecop.freeze( Time.parse('2016-04-13 14:04:51 +0900') )
1794
+ @i.enqueue_thread_wait
1795
+ @i.flush_thread_wakeup
1796
+
1797
+ waiting(4) do
1798
+ Thread.pass until @i.write_count == 7
1799
+ end
1800
+
1801
+ assert{ @i.write_count == 7 }
1802
+ assert_equal 11, ary.size
1803
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1804
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1805
+ assert{ chunks.size == 3 }
1806
+ assert{ chunks.all?{|c| !c.empty? } }
1807
+
1808
+ chunks.each{|c| @i.commit_write(c.unique_id) }
1809
+ assert{ chunks.all?{|c| c.empty? } }
1810
+
1811
+ assert{ @i.buffer.dequeued.size == 0 }
1812
+ end
1813
+
1814
+ test '#try_rollback_all will be called for all waiting chunks after shutdown' do
1815
+ Timecop.freeze( Time.parse('2016-04-13 14:04:01 +0900') )
1816
+
1817
+ ary = []
1818
+ metachecks = []
1819
+ chunks = []
1820
+
1821
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
1822
+ @i.register(:try_write) do |chunk|
1823
+ chunks << chunk
1824
+ chunk.read.split("\n").reject{|l| l.empty? }.each do |data|
1825
+ e = JSON.parse(data)
1826
+ ary << e
1827
+ metachecks << (e[0] == chunk.metadata.tag)
1828
+ end
1829
+ end
1830
+
1831
+ @i.thread_wait_until_start
1832
+
1833
+ # size of a event is 195
1834
+ dummy_data = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
1835
+ events = [
1836
+ ["test.tag.1", event_time('2016-04-13 14:03:21 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1837
+ ["test.tag.2", event_time('2016-04-13 14:03:23 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1838
+ ["test.tag.1", event_time('2016-04-13 14:03:29 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1839
+ ["test.tag.1", event_time('2016-04-13 14:03:30 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1840
+ ["test.tag.1", event_time('2016-04-13 14:03:33 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1841
+ ["test.tag.1", event_time('2016-04-13 14:03:38 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}],
1842
+ ["test.tag.1", event_time('2016-04-13 14:03:43 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1843
+ ["test.tag.1", event_time('2016-04-13 14:03:49 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}],
1844
+ ["test.tag.2", event_time('2016-04-13 14:03:51 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1845
+ ["test.tag.1", event_time('2016-04-13 14:04:00 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1846
+ ["test.tag.2", event_time('2016-04-13 14:04:01 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1847
+ ]
1848
+
1849
+ assert_equal 0, @i.write_count
1850
+
1851
+ @i.interrupt_flushes
1852
+
1853
+ events.shuffle.each do |tag, time, record|
1854
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
1855
+ end
1856
+ assert{ @i.buffer.stage.size == 2 }
1857
+
1858
+ Timecop.freeze( Time.parse('2016-04-13 14:04:02 +0900') )
1859
+
1860
+ @i.enqueue_thread_wait
1861
+ @i.flush_thread_wakeup
1862
+
1863
+ waiting(4) do
1864
+ Thread.pass until @i.write_count > 0
1865
+ end
1866
+
1867
+ assert{ @i.buffer.stage.size == 2 }
1868
+ assert{ @i.write_count == 1 }
1869
+ assert{ @i.buffer.queue.size == 0 }
1870
+ assert{ @i.buffer.dequeued.size == 1 }
1871
+
1872
+ # events fulfills a chunk (and queued immediately)
1873
+ assert_equal 5, ary.size
1874
+ assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
1875
+ assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
1876
+
1877
+ assert_equal 1, chunks.size
1878
+ assert !chunks.first.empty?
1879
+
1880
+ Timecop.freeze( Time.parse('2016-04-13 14:04:09 +0900') )
1881
+
1882
+ @i.enqueue_thread_wait
1883
+
1884
+ assert{ @i.buffer.stage.size == 2 }
1885
+
1886
+ # to trigger try_flush with flush_thread_burst_interval
1887
+ Timecop.freeze( Time.parse('2016-04-13 14:04:11 +0900') )
1888
+ @i.enqueue_thread_wait
1889
+ Timecop.freeze( Time.parse('2016-04-13 14:04:12 +0900') )
1890
+ @i.enqueue_thread_wait
1891
+ Timecop.freeze( Time.parse('2016-04-13 14:04:13 +0900') )
1892
+ @i.enqueue_thread_wait
1893
+ Timecop.freeze( Time.parse('2016-04-13 14:04:14 +0900') )
1894
+ @i.enqueue_thread_wait
1895
+ @i.flush_thread_wakeup
1896
+
1897
+ assert{ @i.buffer.stage.size == 0 }
1898
+
1899
+ waiting(4) do
1900
+ Thread.pass until @i.write_count > 2
1901
+ end
1902
+
1903
+ assert{ @i.buffer.stage.size == 0 }
1904
+ assert{ @i.buffer.queue.size == 0 }
1905
+ assert{ @i.buffer.dequeued.size == 3 }
1906
+ assert{ @i.write_count == 3 }
1907
+ assert{ @i.rollback_count == 0 }
1908
+
1909
+ assert_equal 11, ary.size
1910
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1911
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1912
+
1913
+ assert{ chunks.size == 3 }
1914
+ assert{ chunks.all?{|c| !c.empty? } }
1915
+
1916
+ @i.register(:shutdown_hook){ @i.commit_write(chunks[1].unique_id) }
1917
+
1918
+ @i.stop
1919
+ @i.before_shutdown
1920
+ @i.shutdown
1921
+
1922
+ assert{ @i.buffer.dequeued.size == 2 }
1923
+ assert{ !chunks[0].empty? }
1924
+ assert{ chunks[1].empty? }
1925
+ assert{ !chunks[2].empty? }
1926
+
1927
+ @i.after_shutdown
1928
+
1929
+ assert{ @i.rollback_count == 2 }
1930
+ end
1931
+ end
1932
+ end