fluentd 0.12.40 → 1.6.2

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of fluentd might be problematic. Click here for more details.

Files changed (428) hide show
  1. checksums.yaml +5 -5
  2. data/.github/ISSUE_TEMPLATE/bug_report.md +39 -0
  3. data/.github/ISSUE_TEMPLATE/feature_request.md +23 -0
  4. data/.github/ISSUE_TEMPLATE.md +17 -0
  5. data/.github/PULL_REQUEST_TEMPLATE.md +13 -0
  6. data/.gitignore +5 -0
  7. data/.gitlab/cicd-template.yaml +10 -0
  8. data/.gitlab-ci.yml +147 -0
  9. data/.travis.yml +56 -20
  10. data/ADOPTERS.md +5 -0
  11. data/CHANGELOG.md +1369 -0
  12. data/CONTRIBUTING.md +16 -5
  13. data/GOVERNANCE.md +55 -0
  14. data/Gemfile +5 -0
  15. data/GithubWorkflow.md +78 -0
  16. data/LICENSE +202 -0
  17. data/MAINTAINERS.md +7 -0
  18. data/README.md +23 -11
  19. data/Rakefile +48 -2
  20. data/Vagrantfile +17 -0
  21. data/appveyor.yml +37 -0
  22. data/bin/fluent-binlog-reader +7 -0
  23. data/bin/fluent-ca-generate +6 -0
  24. data/bin/fluent-plugin-config-format +5 -0
  25. data/bin/fluent-plugin-generate +5 -0
  26. data/bin/fluentd +3 -0
  27. data/code-of-conduct.md +3 -0
  28. data/example/copy_roundrobin.conf +39 -0
  29. data/example/counter.conf +18 -0
  30. data/example/in_dummy_blocks.conf +17 -0
  31. data/example/in_dummy_with_compression.conf +23 -0
  32. data/example/in_forward.conf +7 -0
  33. data/example/in_forward_client.conf +37 -0
  34. data/example/in_forward_shared_key.conf +15 -0
  35. data/example/in_forward_tls.conf +14 -0
  36. data/example/in_forward_users.conf +24 -0
  37. data/example/in_forward_workers.conf +21 -0
  38. data/example/in_http.conf +3 -1
  39. data/example/in_out_forward.conf +17 -0
  40. data/example/logevents.conf +25 -0
  41. data/example/multi_filters.conf +61 -0
  42. data/example/out_exec_filter.conf +42 -0
  43. data/example/out_forward.conf +13 -13
  44. data/example/out_forward_buf_file.conf +23 -0
  45. data/example/out_forward_client.conf +109 -0
  46. data/example/out_forward_heartbeat_none.conf +16 -0
  47. data/example/out_forward_shared_key.conf +36 -0
  48. data/example/out_forward_tls.conf +18 -0
  49. data/example/out_forward_users.conf +65 -0
  50. data/example/out_null.conf +36 -0
  51. data/example/secondary_file.conf +42 -0
  52. data/example/suppress_config_dump.conf +7 -0
  53. data/example/worker_section.conf +36 -0
  54. data/fluent.conf +29 -0
  55. data/fluentd.gemspec +21 -11
  56. data/lib/fluent/agent.rb +67 -90
  57. data/lib/fluent/clock.rb +62 -0
  58. data/lib/fluent/command/binlog_reader.rb +244 -0
  59. data/lib/fluent/command/ca_generate.rb +181 -0
  60. data/lib/fluent/command/cat.rb +42 -18
  61. data/lib/fluent/command/debug.rb +12 -10
  62. data/lib/fluent/command/fluentd.rb +153 -5
  63. data/lib/fluent/command/plugin_config_formatter.rb +292 -0
  64. data/lib/fluent/command/plugin_generator.rb +324 -0
  65. data/lib/fluent/compat/call_super_mixin.rb +67 -0
  66. data/lib/fluent/compat/detach_process_mixin.rb +33 -0
  67. data/lib/fluent/compat/exec_util.rb +129 -0
  68. data/lib/fluent/compat/file_util.rb +54 -0
  69. data/lib/fluent/compat/filter.rb +68 -0
  70. data/lib/fluent/compat/formatter.rb +111 -0
  71. data/lib/fluent/compat/formatter_utils.rb +85 -0
  72. data/lib/fluent/compat/handle_tag_and_time_mixin.rb +62 -0
  73. data/lib/fluent/compat/handle_tag_name_mixin.rb +53 -0
  74. data/lib/fluent/compat/input.rb +49 -0
  75. data/lib/fluent/compat/output.rb +718 -0
  76. data/lib/fluent/compat/output_chain.rb +60 -0
  77. data/lib/fluent/compat/parser.rb +310 -0
  78. data/lib/fluent/compat/parser_utils.rb +40 -0
  79. data/lib/fluent/compat/propagate_default.rb +62 -0
  80. data/lib/fluent/compat/record_filter_mixin.rb +34 -0
  81. data/lib/fluent/compat/set_tag_key_mixin.rb +50 -0
  82. data/lib/fluent/compat/set_time_key_mixin.rb +69 -0
  83. data/lib/fluent/compat/socket_util.rb +165 -0
  84. data/lib/fluent/compat/string_util.rb +34 -0
  85. data/lib/fluent/compat/structured_format_mixin.rb +26 -0
  86. data/lib/fluent/compat/type_converter.rb +90 -0
  87. data/lib/fluent/config/configure_proxy.rb +210 -62
  88. data/lib/fluent/config/dsl.rb +12 -5
  89. data/lib/fluent/config/element.rb +107 -9
  90. data/lib/fluent/config/literal_parser.rb +9 -3
  91. data/lib/fluent/config/parser.rb +4 -4
  92. data/lib/fluent/config/section.rb +51 -14
  93. data/lib/fluent/config/types.rb +28 -13
  94. data/lib/fluent/config/v1_parser.rb +3 -5
  95. data/lib/fluent/config.rb +23 -20
  96. data/lib/fluent/configurable.rb +79 -21
  97. data/lib/fluent/counter/base_socket.rb +46 -0
  98. data/lib/fluent/counter/client.rb +297 -0
  99. data/lib/fluent/counter/error.rb +86 -0
  100. data/lib/fluent/counter/mutex_hash.rb +163 -0
  101. data/lib/fluent/counter/server.rb +273 -0
  102. data/lib/fluent/counter/store.rb +205 -0
  103. data/lib/fluent/counter/validator.rb +145 -0
  104. data/lib/fluent/counter.rb +23 -0
  105. data/lib/fluent/daemon.rb +15 -0
  106. data/lib/fluent/engine.rb +102 -65
  107. data/lib/fluent/env.rb +7 -3
  108. data/lib/fluent/error.rb +30 -0
  109. data/lib/fluent/event.rb +197 -21
  110. data/lib/fluent/event_router.rb +93 -10
  111. data/lib/fluent/filter.rb +2 -50
  112. data/lib/fluent/formatter.rb +4 -293
  113. data/lib/fluent/input.rb +2 -32
  114. data/lib/fluent/label.rb +10 -2
  115. data/lib/fluent/load.rb +3 -3
  116. data/lib/fluent/log.rb +348 -81
  117. data/lib/fluent/match.rb +37 -36
  118. data/lib/fluent/mixin.rb +12 -176
  119. data/lib/fluent/msgpack_factory.rb +62 -0
  120. data/lib/fluent/output.rb +10 -612
  121. data/lib/fluent/output_chain.rb +23 -0
  122. data/lib/fluent/parser.rb +4 -800
  123. data/lib/fluent/plugin/bare_output.rb +63 -0
  124. data/lib/fluent/plugin/base.rb +192 -0
  125. data/lib/fluent/plugin/buf_file.rb +128 -174
  126. data/lib/fluent/plugin/buf_memory.rb +9 -92
  127. data/lib/fluent/plugin/buffer/chunk.rb +221 -0
  128. data/lib/fluent/plugin/buffer/file_chunk.rb +383 -0
  129. data/lib/fluent/plugin/buffer/memory_chunk.rb +90 -0
  130. data/lib/fluent/plugin/buffer.rb +779 -0
  131. data/lib/fluent/plugin/compressable.rb +92 -0
  132. data/lib/fluent/plugin/exec_util.rb +3 -108
  133. data/lib/fluent/plugin/file_util.rb +4 -34
  134. data/lib/fluent/plugin/file_wrapper.rb +120 -0
  135. data/lib/fluent/plugin/filter.rb +93 -0
  136. data/lib/fluent/plugin/filter_grep.rb +117 -34
  137. data/lib/fluent/plugin/filter_parser.rb +85 -62
  138. data/lib/fluent/plugin/filter_record_transformer.rb +27 -39
  139. data/lib/fluent/plugin/filter_stdout.rb +15 -12
  140. data/lib/fluent/plugin/formatter.rb +50 -0
  141. data/lib/fluent/plugin/formatter_csv.rb +52 -0
  142. data/lib/fluent/plugin/formatter_hash.rb +33 -0
  143. data/lib/fluent/plugin/formatter_json.rb +55 -0
  144. data/lib/fluent/plugin/formatter_ltsv.rb +42 -0
  145. data/lib/fluent/plugin/formatter_msgpack.rb +33 -0
  146. data/lib/fluent/plugin/formatter_out_file.rb +51 -0
  147. data/lib/fluent/plugin/formatter_single_value.rb +34 -0
  148. data/lib/fluent/plugin/formatter_stdout.rb +76 -0
  149. data/lib/fluent/plugin/formatter_tsv.rb +38 -0
  150. data/lib/fluent/plugin/in_debug_agent.rb +17 -6
  151. data/lib/fluent/plugin/in_dummy.rb +47 -20
  152. data/lib/fluent/plugin/in_exec.rb +55 -123
  153. data/lib/fluent/plugin/in_forward.rb +299 -216
  154. data/lib/fluent/plugin/in_gc_stat.rb +14 -36
  155. data/lib/fluent/plugin/in_http.rb +204 -91
  156. data/lib/fluent/plugin/in_monitor_agent.rb +186 -258
  157. data/lib/fluent/plugin/in_object_space.rb +13 -41
  158. data/lib/fluent/plugin/in_syslog.rb +112 -134
  159. data/lib/fluent/plugin/in_tail.rb +408 -745
  160. data/lib/fluent/plugin/in_tcp.rb +66 -9
  161. data/lib/fluent/plugin/in_udp.rb +60 -11
  162. data/lib/fluent/plugin/{in_stream.rb → in_unix.rb} +8 -4
  163. data/lib/fluent/plugin/input.rb +37 -0
  164. data/lib/fluent/plugin/multi_output.rb +158 -0
  165. data/lib/fluent/plugin/out_copy.rb +23 -35
  166. data/lib/fluent/plugin/out_exec.rb +67 -70
  167. data/lib/fluent/plugin/out_exec_filter.rb +204 -271
  168. data/lib/fluent/plugin/out_file.rb +267 -73
  169. data/lib/fluent/plugin/out_forward.rb +854 -325
  170. data/lib/fluent/plugin/out_null.rb +42 -9
  171. data/lib/fluent/plugin/out_relabel.rb +9 -5
  172. data/lib/fluent/plugin/out_roundrobin.rb +18 -37
  173. data/lib/fluent/plugin/out_secondary_file.rb +133 -0
  174. data/lib/fluent/plugin/out_stdout.rb +43 -10
  175. data/lib/fluent/plugin/out_stream.rb +7 -2
  176. data/lib/fluent/plugin/output.rb +1498 -0
  177. data/lib/fluent/plugin/owned_by_mixin.rb +42 -0
  178. data/lib/fluent/plugin/parser.rb +191 -0
  179. data/lib/fluent/plugin/parser_apache.rb +28 -0
  180. data/lib/fluent/plugin/parser_apache2.rb +88 -0
  181. data/lib/fluent/plugin/parser_apache_error.rb +26 -0
  182. data/lib/fluent/plugin/parser_csv.rb +39 -0
  183. data/lib/fluent/plugin/parser_json.rb +94 -0
  184. data/lib/fluent/plugin/parser_ltsv.rb +49 -0
  185. data/lib/fluent/plugin/parser_msgpack.rb +50 -0
  186. data/lib/fluent/plugin/parser_multiline.rb +106 -0
  187. data/lib/fluent/plugin/parser_nginx.rb +28 -0
  188. data/lib/fluent/plugin/parser_none.rb +36 -0
  189. data/lib/fluent/plugin/parser_regexp.rb +68 -0
  190. data/lib/fluent/plugin/parser_syslog.rb +142 -0
  191. data/lib/fluent/plugin/parser_tsv.rb +42 -0
  192. data/lib/fluent/plugin/socket_util.rb +3 -143
  193. data/lib/fluent/plugin/storage.rb +84 -0
  194. data/lib/fluent/plugin/storage_local.rb +164 -0
  195. data/lib/fluent/plugin/string_util.rb +3 -15
  196. data/lib/fluent/plugin.rb +122 -121
  197. data/lib/fluent/plugin_helper/cert_option.rb +178 -0
  198. data/lib/fluent/plugin_helper/child_process.rb +364 -0
  199. data/lib/fluent/plugin_helper/compat_parameters.rb +333 -0
  200. data/lib/fluent/plugin_helper/counter.rb +51 -0
  201. data/lib/fluent/plugin_helper/event_emitter.rb +93 -0
  202. data/lib/fluent/plugin_helper/event_loop.rb +170 -0
  203. data/lib/fluent/plugin_helper/extract.rb +104 -0
  204. data/lib/fluent/plugin_helper/formatter.rb +147 -0
  205. data/lib/fluent/plugin_helper/http_server/app.rb +79 -0
  206. data/lib/fluent/plugin_helper/http_server/compat/server.rb +81 -0
  207. data/lib/fluent/plugin_helper/http_server/compat/webrick_handler.rb +58 -0
  208. data/lib/fluent/plugin_helper/http_server/methods.rb +35 -0
  209. data/lib/fluent/plugin_helper/http_server/request.rb +42 -0
  210. data/lib/fluent/plugin_helper/http_server/router.rb +54 -0
  211. data/lib/fluent/plugin_helper/http_server/server.rb +87 -0
  212. data/lib/fluent/plugin_helper/http_server.rb +76 -0
  213. data/lib/fluent/plugin_helper/inject.rb +151 -0
  214. data/lib/fluent/plugin_helper/parser.rb +147 -0
  215. data/lib/fluent/plugin_helper/record_accessor.rb +210 -0
  216. data/lib/fluent/plugin_helper/retry_state.rb +205 -0
  217. data/lib/fluent/plugin_helper/server.rb +807 -0
  218. data/lib/fluent/plugin_helper/socket.rb +250 -0
  219. data/lib/fluent/plugin_helper/socket_option.rb +80 -0
  220. data/lib/fluent/plugin_helper/storage.rb +349 -0
  221. data/lib/fluent/plugin_helper/thread.rb +179 -0
  222. data/lib/fluent/plugin_helper/timer.rb +92 -0
  223. data/lib/fluent/plugin_helper.rb +73 -0
  224. data/lib/fluent/plugin_id.rb +80 -0
  225. data/lib/fluent/process.rb +3 -489
  226. data/lib/fluent/registry.rb +52 -10
  227. data/lib/fluent/root_agent.rb +204 -42
  228. data/lib/fluent/supervisor.rb +597 -359
  229. data/lib/fluent/system_config.rb +131 -42
  230. data/lib/fluent/test/base.rb +6 -54
  231. data/lib/fluent/test/driver/base.rb +224 -0
  232. data/lib/fluent/test/driver/base_owned.rb +70 -0
  233. data/lib/fluent/test/driver/base_owner.rb +135 -0
  234. data/lib/fluent/test/driver/event_feeder.rb +98 -0
  235. data/lib/fluent/test/driver/filter.rb +57 -0
  236. data/lib/fluent/test/driver/formatter.rb +30 -0
  237. data/lib/fluent/test/driver/input.rb +31 -0
  238. data/lib/fluent/test/driver/multi_output.rb +53 -0
  239. data/lib/fluent/test/driver/output.rb +102 -0
  240. data/lib/fluent/test/driver/parser.rb +30 -0
  241. data/lib/fluent/test/driver/test_event_router.rb +45 -0
  242. data/lib/fluent/test/filter_test.rb +0 -1
  243. data/lib/fluent/test/formatter_test.rb +4 -1
  244. data/lib/fluent/test/helpers.rb +58 -10
  245. data/lib/fluent/test/input_test.rb +27 -19
  246. data/lib/fluent/test/log.rb +79 -0
  247. data/lib/fluent/test/output_test.rb +28 -39
  248. data/lib/fluent/test/parser_test.rb +3 -1
  249. data/lib/fluent/test/startup_shutdown.rb +46 -0
  250. data/lib/fluent/test.rb +33 -1
  251. data/lib/fluent/time.rb +450 -1
  252. data/lib/fluent/timezone.rb +27 -3
  253. data/lib/fluent/{status.rb → unique_id.rb} +15 -24
  254. data/lib/fluent/version.rb +1 -1
  255. data/lib/fluent/winsvc.rb +85 -0
  256. data/templates/new_gem/Gemfile +3 -0
  257. data/templates/new_gem/README.md.erb +43 -0
  258. data/templates/new_gem/Rakefile +13 -0
  259. data/templates/new_gem/fluent-plugin.gemspec.erb +27 -0
  260. data/templates/new_gem/lib/fluent/plugin/filter.rb.erb +14 -0
  261. data/templates/new_gem/lib/fluent/plugin/formatter.rb.erb +14 -0
  262. data/templates/new_gem/lib/fluent/plugin/input.rb.erb +11 -0
  263. data/templates/new_gem/lib/fluent/plugin/output.rb.erb +11 -0
  264. data/templates/new_gem/lib/fluent/plugin/parser.rb.erb +15 -0
  265. data/templates/new_gem/test/helper.rb.erb +8 -0
  266. data/templates/new_gem/test/plugin/test_filter.rb.erb +18 -0
  267. data/templates/new_gem/test/plugin/test_formatter.rb.erb +18 -0
  268. data/templates/new_gem/test/plugin/test_input.rb.erb +18 -0
  269. data/templates/new_gem/test/plugin/test_output.rb.erb +18 -0
  270. data/templates/new_gem/test/plugin/test_parser.rb.erb +18 -0
  271. data/templates/plugin_config_formatter/param.md-compact.erb +25 -0
  272. data/templates/plugin_config_formatter/param.md.erb +34 -0
  273. data/templates/plugin_config_formatter/section.md.erb +12 -0
  274. data/test/command/test_binlog_reader.rb +346 -0
  275. data/test/command/test_ca_generate.rb +70 -0
  276. data/test/command/test_fluentd.rb +901 -0
  277. data/test/command/test_plugin_config_formatter.rb +276 -0
  278. data/test/command/test_plugin_generator.rb +92 -0
  279. data/test/compat/test_calls_super.rb +166 -0
  280. data/test/compat/test_parser.rb +92 -0
  281. data/test/config/test_config_parser.rb +126 -2
  282. data/test/config/test_configurable.rb +946 -187
  283. data/test/config/test_configure_proxy.rb +424 -74
  284. data/test/config/test_dsl.rb +11 -11
  285. data/test/config/test_element.rb +500 -0
  286. data/test/config/test_literal_parser.rb +8 -0
  287. data/test/config/test_plugin_configuration.rb +56 -0
  288. data/test/config/test_section.rb +79 -7
  289. data/test/config/test_system_config.rb +122 -35
  290. data/test/config/test_types.rb +38 -0
  291. data/test/counter/test_client.rb +559 -0
  292. data/test/counter/test_error.rb +44 -0
  293. data/test/counter/test_mutex_hash.rb +179 -0
  294. data/test/counter/test_server.rb +589 -0
  295. data/test/counter/test_store.rb +258 -0
  296. data/test/counter/test_validator.rb +137 -0
  297. data/test/helper.rb +89 -6
  298. data/test/helpers/fuzzy_assert.rb +89 -0
  299. data/test/plugin/test_bare_output.rb +118 -0
  300. data/test/plugin/test_base.rb +115 -0
  301. data/test/plugin/test_buf_file.rb +823 -460
  302. data/test/plugin/test_buf_memory.rb +32 -194
  303. data/test/plugin/test_buffer.rb +1233 -0
  304. data/test/plugin/test_buffer_chunk.rb +198 -0
  305. data/test/plugin/test_buffer_file_chunk.rb +844 -0
  306. data/test/plugin/test_buffer_memory_chunk.rb +338 -0
  307. data/test/plugin/test_compressable.rb +84 -0
  308. data/test/plugin/test_filter.rb +357 -0
  309. data/test/plugin/test_filter_grep.rb +540 -29
  310. data/test/plugin/test_filter_parser.rb +439 -452
  311. data/test/plugin/test_filter_record_transformer.rb +123 -166
  312. data/test/plugin/test_filter_stdout.rb +160 -72
  313. data/test/plugin/test_formatter_csv.rb +111 -0
  314. data/test/plugin/test_formatter_hash.rb +35 -0
  315. data/test/plugin/test_formatter_json.rb +51 -0
  316. data/test/plugin/test_formatter_ltsv.rb +62 -0
  317. data/test/plugin/test_formatter_msgpack.rb +28 -0
  318. data/test/plugin/test_formatter_out_file.rb +95 -0
  319. data/test/plugin/test_formatter_single_value.rb +38 -0
  320. data/test/plugin/test_formatter_tsv.rb +68 -0
  321. data/test/plugin/test_in_debug_agent.rb +24 -1
  322. data/test/plugin/test_in_dummy.rb +111 -18
  323. data/test/plugin/test_in_exec.rb +200 -113
  324. data/test/plugin/test_in_forward.rb +990 -387
  325. data/test/plugin/test_in_gc_stat.rb +10 -8
  326. data/test/plugin/test_in_http.rb +600 -224
  327. data/test/plugin/test_in_monitor_agent.rb +690 -0
  328. data/test/plugin/test_in_object_space.rb +24 -8
  329. data/test/plugin/test_in_syslog.rb +154 -215
  330. data/test/plugin/test_in_tail.rb +1006 -707
  331. data/test/plugin/test_in_tcp.rb +125 -48
  332. data/test/plugin/test_in_udp.rb +204 -63
  333. data/test/plugin/{test_in_stream.rb → test_in_unix.rb} +14 -13
  334. data/test/plugin/test_input.rb +126 -0
  335. data/test/plugin/test_metadata.rb +89 -0
  336. data/test/plugin/test_multi_output.rb +180 -0
  337. data/test/plugin/test_out_copy.rb +117 -112
  338. data/test/plugin/test_out_exec.rb +258 -53
  339. data/test/plugin/test_out_exec_filter.rb +538 -115
  340. data/test/plugin/test_out_file.rb +865 -178
  341. data/test/plugin/test_out_forward.rb +998 -210
  342. data/test/plugin/test_out_null.rb +105 -0
  343. data/test/plugin/test_out_relabel.rb +28 -0
  344. data/test/plugin/test_out_roundrobin.rb +36 -29
  345. data/test/plugin/test_out_secondary_file.rb +458 -0
  346. data/test/plugin/test_out_stdout.rb +135 -37
  347. data/test/plugin/test_out_stream.rb +18 -0
  348. data/test/plugin/test_output.rb +984 -0
  349. data/test/plugin/test_output_as_buffered.rb +2021 -0
  350. data/test/plugin/test_output_as_buffered_backup.rb +312 -0
  351. data/test/plugin/test_output_as_buffered_compress.rb +165 -0
  352. data/test/plugin/test_output_as_buffered_overflow.rb +250 -0
  353. data/test/plugin/test_output_as_buffered_retries.rb +911 -0
  354. data/test/plugin/test_output_as_buffered_secondary.rb +874 -0
  355. data/test/plugin/test_output_as_standard.rb +374 -0
  356. data/test/plugin/test_owned_by.rb +35 -0
  357. data/test/plugin/test_parser.rb +359 -0
  358. data/test/plugin/test_parser_apache.rb +42 -0
  359. data/test/plugin/test_parser_apache2.rb +47 -0
  360. data/test/plugin/test_parser_apache_error.rb +45 -0
  361. data/test/plugin/test_parser_csv.rb +103 -0
  362. data/test/plugin/test_parser_json.rb +138 -0
  363. data/test/plugin/test_parser_labeled_tsv.rb +145 -0
  364. data/test/plugin/test_parser_multiline.rb +100 -0
  365. data/test/plugin/test_parser_nginx.rb +88 -0
  366. data/test/plugin/test_parser_none.rb +52 -0
  367. data/test/plugin/test_parser_regexp.rb +289 -0
  368. data/test/plugin/test_parser_syslog.rb +441 -0
  369. data/test/plugin/test_parser_tsv.rb +122 -0
  370. data/test/plugin/test_storage.rb +167 -0
  371. data/test/plugin/test_storage_local.rb +335 -0
  372. data/test/plugin_helper/data/cert/cert-key.pem +27 -0
  373. data/test/plugin_helper/data/cert/cert-with-no-newline.pem +19 -0
  374. data/test/plugin_helper/data/cert/cert.pem +19 -0
  375. data/test/plugin_helper/http_server/test_app.rb +65 -0
  376. data/test/plugin_helper/http_server/test_route.rb +32 -0
  377. data/test/plugin_helper/test_cert_option.rb +16 -0
  378. data/test/plugin_helper/test_child_process.rb +794 -0
  379. data/test/plugin_helper/test_compat_parameters.rb +353 -0
  380. data/test/plugin_helper/test_event_emitter.rb +51 -0
  381. data/test/plugin_helper/test_event_loop.rb +52 -0
  382. data/test/plugin_helper/test_extract.rb +194 -0
  383. data/test/plugin_helper/test_formatter.rb +255 -0
  384. data/test/plugin_helper/test_http_server_helper.rb +205 -0
  385. data/test/plugin_helper/test_inject.rb +519 -0
  386. data/test/plugin_helper/test_parser.rb +264 -0
  387. data/test/plugin_helper/test_record_accessor.rb +197 -0
  388. data/test/plugin_helper/test_retry_state.rb +442 -0
  389. data/test/plugin_helper/test_server.rb +1714 -0
  390. data/test/plugin_helper/test_storage.rb +542 -0
  391. data/test/plugin_helper/test_thread.rb +164 -0
  392. data/test/plugin_helper/test_timer.rb +132 -0
  393. data/test/scripts/exec_script.rb +0 -6
  394. data/test/scripts/fluent/plugin/formatter1/formatter_test1.rb +7 -0
  395. data/test/scripts/fluent/plugin/formatter2/formatter_test2.rb +7 -0
  396. data/test/scripts/fluent/plugin/out_test.rb +23 -15
  397. data/test/scripts/fluent/plugin/out_test2.rb +80 -0
  398. data/test/test_clock.rb +164 -0
  399. data/test/test_config.rb +16 -7
  400. data/test/test_configdsl.rb +2 -2
  401. data/test/test_event.rb +360 -13
  402. data/test/test_event_router.rb +108 -11
  403. data/test/test_event_time.rb +199 -0
  404. data/test/test_filter.rb +48 -6
  405. data/test/test_formatter.rb +11 -391
  406. data/test/test_input.rb +1 -1
  407. data/test/test_log.rb +591 -31
  408. data/test/test_mixin.rb +1 -1
  409. data/test/test_output.rb +121 -185
  410. data/test/test_plugin.rb +251 -0
  411. data/test/test_plugin_classes.rb +177 -10
  412. data/test/test_plugin_helper.rb +81 -0
  413. data/test/test_plugin_id.rb +101 -0
  414. data/test/test_process.rb +8 -42
  415. data/test/test_root_agent.rb +766 -21
  416. data/test/test_supervisor.rb +481 -0
  417. data/test/test_test_drivers.rb +135 -0
  418. data/test/test_time_formatter.rb +282 -0
  419. data/test/test_time_parser.rb +231 -0
  420. data/test/test_unique_id.rb +47 -0
  421. metadata +454 -60
  422. data/COPYING +0 -14
  423. data/ChangeLog +0 -666
  424. data/lib/fluent/buffer.rb +0 -365
  425. data/lib/fluent/plugin/in_status.rb +0 -76
  426. data/test/plugin/test_in_status.rb +0 -38
  427. data/test/test_buffer.rb +0 -624
  428. data/test/test_parser.rb +0 -1305
@@ -0,0 +1,2021 @@
1
+ require_relative '../helper'
2
+ require 'fluent/plugin/output'
3
+ require 'fluent/plugin/buffer'
4
+ require 'fluent/output'
5
+ require 'fluent/event'
6
+
7
+ require 'json'
8
+ require 'time'
9
+ require 'timeout'
10
+ require 'timecop'
11
+
12
+ module FluentPluginOutputAsBufferedTest
13
+ class DummyBareOutput < Fluent::Plugin::Output
14
+ def register(name, &block)
15
+ instance_variable_set("@#{name}", block)
16
+ end
17
+ end
18
+ class DummySyncOutput < DummyBareOutput
19
+ def initialize
20
+ super
21
+ @process = nil
22
+ end
23
+ def process(tag, es)
24
+ @process ? @process.call(tag, es) : nil
25
+ end
26
+ end
27
+ class DummyAsyncOutput < DummyBareOutput
28
+ def initialize
29
+ super
30
+ @format = nil
31
+ @write = nil
32
+ end
33
+ def format(tag, time, record)
34
+ @format ? @format.call(tag, time, record) : [tag, time, record].to_json
35
+ end
36
+ def write(chunk)
37
+ @write ? @write.call(chunk) : nil
38
+ end
39
+ end
40
+ class DummyDelayedOutput < DummyBareOutput
41
+ def initialize
42
+ super
43
+ @format = nil
44
+ @try_write = nil
45
+ @shutdown_hook = nil
46
+ end
47
+ def format(tag, time, record)
48
+ @format ? @format.call(tag, time, record) : [tag, time, record].to_json
49
+ end
50
+ def try_write(chunk)
51
+ @try_write ? @try_write.call(chunk) : nil
52
+ end
53
+ def shutdown
54
+ if @shutdown_hook
55
+ @shutdown_hook.call
56
+ end
57
+ super
58
+ end
59
+ end
60
+ class DummyStandardBufferedOutput < DummyBareOutput
61
+ def initialize
62
+ super
63
+ @prefer_delayed_commit = nil
64
+ @write = nil
65
+ @try_write = nil
66
+ end
67
+ def prefer_delayed_commit
68
+ @prefer_delayed_commit ? @prefer_delayed_commit.call : false
69
+ end
70
+ def write(chunk)
71
+ @write ? @write.call(chunk) : nil
72
+ end
73
+ def try_write(chunk)
74
+ @try_write ? @try_write.call(chunk) : nil
75
+ end
76
+ end
77
+ class DummyCustomFormatBufferedOutput < DummyBareOutput
78
+ def initialize
79
+ super
80
+ @format_type_is_msgpack = nil
81
+ @prefer_delayed_commit = nil
82
+ @write = nil
83
+ @try_write = nil
84
+ end
85
+ def format(tag, time, record)
86
+ @format ? @format.call(tag, time, record) : [tag, time, record].to_json
87
+ end
88
+ def formatted_to_msgpack_binary?
89
+ @format_type_is_msgpack ? @format_type_is_msgpack.call : false
90
+ end
91
+ def prefer_delayed_commit
92
+ @prefer_delayed_commit ? @prefer_delayed_commit.call : false
93
+ end
94
+ def write(chunk)
95
+ @write ? @write.call(chunk) : nil
96
+ end
97
+ def try_write(chunk)
98
+ @try_write ? @try_write.call(chunk) : nil
99
+ end
100
+ end
101
+ # check for formatted_to_msgpack_binary compatibility
102
+ class DummyOldCustomFormatBufferedOutput < DummyBareOutput
103
+ def initialize
104
+ super
105
+ @format_type_is_msgpack = nil
106
+ @prefer_delayed_commit = nil
107
+ @write = nil
108
+ @try_write = nil
109
+ end
110
+ def format(tag, time, record)
111
+ @format ? @format.call(tag, time, record) : [tag, time, record].to_json
112
+ end
113
+ def formatted_to_msgpack_binary
114
+ @format_type_is_msgpack ? @format_type_is_msgpack.call : false
115
+ end
116
+ def prefer_delayed_commit
117
+ @prefer_delayed_commit ? @prefer_delayed_commit.call : false
118
+ end
119
+ def write(chunk)
120
+ @write ? @write.call(chunk) : nil
121
+ end
122
+ def try_write(chunk)
123
+ @try_write ? @try_write.call(chunk) : nil
124
+ end
125
+ end
126
+ class DummyFullFeatureOutput < DummyBareOutput
127
+ def initialize
128
+ super
129
+ @prefer_buffered_processing = nil
130
+ @prefer_delayed_commit = nil
131
+ @process = nil
132
+ @format = nil
133
+ @write = nil
134
+ @try_write = nil
135
+ end
136
+ def prefer_buffered_processing
137
+ @prefer_buffered_processing ? @prefer_buffered_processing.call : false
138
+ end
139
+ def prefer_delayed_commit
140
+ @prefer_delayed_commit ? @prefer_delayed_commit.call : false
141
+ end
142
+ def process(tag, es)
143
+ @process ? @process.call(tag, es) : nil
144
+ end
145
+ def format(tag, time, record)
146
+ @format ? @format.call(tag, time, record) : [tag, time, record].to_json
147
+ end
148
+ def write(chunk)
149
+ @write ? @write.call(chunk) : nil
150
+ end
151
+ def try_write(chunk)
152
+ @try_write ? @try_write.call(chunk) : nil
153
+ end
154
+ end
155
+ module OldPluginMethodMixin
156
+ def initialize
157
+ super
158
+ @format = nil
159
+ @write = nil
160
+ end
161
+ def register(name, &block)
162
+ instance_variable_set("@#{name}", block)
163
+ end
164
+ def format(tag, time, record)
165
+ @format ? @format.call(tag, time, record) : [tag, time, record].to_json
166
+ end
167
+ def write(chunk)
168
+ @write ? @write.call(chunk) : nil
169
+ end
170
+ end
171
+ class DummyOldBufferedOutput < Fluent::BufferedOutput
172
+ include OldPluginMethodMixin
173
+ end
174
+ class DummyOldObjectBufferedOutput < Fluent::ObjectBufferedOutput
175
+ include OldPluginMethodMixin
176
+ end
177
+ end
178
+
179
+ class BufferedOutputTest < Test::Unit::TestCase
180
+ def create_output(type=:full)
181
+ case type
182
+ when :bare then FluentPluginOutputAsBufferedTest::DummyBareOutput.new
183
+ when :sync then FluentPluginOutputAsBufferedTest::DummySyncOutput.new
184
+ when :buffered then FluentPluginOutputAsBufferedTest::DummyAsyncOutput.new
185
+ when :delayed then FluentPluginOutputAsBufferedTest::DummyDelayedOutput.new
186
+ when :standard then FluentPluginOutputAsBufferedTest::DummyStandardBufferedOutput.new
187
+ when :custom then FluentPluginOutputAsBufferedTest::DummyCustomFormatBufferedOutput.new
188
+ when :full then FluentPluginOutputAsBufferedTest::DummyFullFeatureOutput.new
189
+ when :old_buf then FluentPluginOutputAsBufferedTest::DummyOldBufferedOutput.new
190
+ when :old_obj then FluentPluginOutputAsBufferedTest::DummyOldObjectBufferedOutput.new
191
+ when :old_custom then FluentPluginOutputAsBufferedTest::DummyOldCustomFormatBufferedOutput.new
192
+ else
193
+ raise ArgumentError, "unknown type: #{type}"
194
+ end
195
+ end
196
+ def create_metadata(timekey: nil, tag: nil, variables: nil)
197
+ Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
198
+ end
199
+ def waiting(seconds)
200
+ begin
201
+ Timeout.timeout(seconds) do
202
+ yield
203
+ end
204
+ rescue Timeout::Error
205
+ STDERR.print(*@i.log.out.logs)
206
+ raise
207
+ end
208
+ end
209
+
210
+ setup do
211
+ @i = nil
212
+ end
213
+
214
+ teardown do
215
+ if @i
216
+ @i.stop unless @i.stopped?
217
+ @i.before_shutdown unless @i.before_shutdown?
218
+ @i.shutdown unless @i.shutdown?
219
+ @i.after_shutdown unless @i.after_shutdown?
220
+ @i.close unless @i.closed?
221
+ @i.terminate unless @i.terminated?
222
+ end
223
+ Timecop.return
224
+ end
225
+
226
+ test 'queued_chunks_limit_size is same as flush_thread_count by default' do
227
+ hash = {'flush_thread_count' => 4}
228
+ i = create_output
229
+ i.register(:prefer_buffered_processing) { true }
230
+ i.configure(config_element('ROOT', '', {}, [config_element('buffer','tag',hash)]))
231
+
232
+ assert_equal 4, i.buffer.queued_chunks_limit_size
233
+ end
234
+
235
+ test 'prefer queued_chunks_limit_size parameter than flush_thread_count' do
236
+ hash = {'flush_thread_count' => 4, 'queued_chunks_limit_size' => 2}
237
+ i = create_output
238
+ i.register(:prefer_buffered_processing) { true }
239
+ i.configure(config_element('ROOT', '', {}, [config_element('buffer','tag',hash)]))
240
+
241
+ assert_equal 2, i.buffer.queued_chunks_limit_size
242
+ end
243
+
244
+ sub_test_case 'chunk feature in #write for output plugins' do
245
+ setup do
246
+ @stored_global_logger = $log
247
+ $log = Fluent::Test::TestLogger.new
248
+ @hash = {
249
+ 'flush_mode' => 'immediate',
250
+ 'flush_thread_interval' => '0.01',
251
+ 'flush_thread_burst_interval' => '0.01',
252
+ }
253
+ end
254
+
255
+ teardown do
256
+ $log = @stored_global_logger
257
+ end
258
+
259
+ test 'plugin using standard format can iterate chunk for time, record in #write' do
260
+ events_from_chunk = []
261
+ @i = create_output(:standard)
262
+ @i.configure(config_element('ROOT','',{},[config_element('buffer','',@hash)]))
263
+ @i.register(:prefer_delayed_commit){ false }
264
+ @i.register(:write){ |chunk| e = []; assert chunk.respond_to?(:each); chunk.each{|t,r| e << [t,r]}; events_from_chunk << [:write, e] }
265
+ @i.register(:try_write){ |chunk| e = []; assert chunk.respond_to?(:each); chunk.each{|t,r| e << [t,r]}; events_from_chunk << [:try_write, e] }
266
+ @i.start
267
+ @i.after_start
268
+
269
+ events = [
270
+ [event_time('2016-10-05 16:16:16 -0700'), {"message" => "yaaaaaaaaay!"}],
271
+ [event_time('2016-10-05 16:16:17 -0700'), {"message" => "yoooooooooy!"}],
272
+ ]
273
+
274
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
275
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
276
+
277
+ waiting(5){ sleep 0.1 until events_from_chunk.size == 2 }
278
+
279
+ assert_equal 2, events_from_chunk.size
280
+ 2.times.each do |i|
281
+ assert_equal :write, events_from_chunk[i][0]
282
+ assert_equal events, events_from_chunk[i][1]
283
+ end
284
+ end
285
+
286
+ test 'plugin using standard format can iterate chunk for time, record in #try_write' do
287
+ events_from_chunk = []
288
+ @i = create_output(:standard)
289
+ @i.configure(config_element('ROOT','',{},[config_element('buffer','',@hash)]))
290
+ @i.register(:prefer_delayed_commit){ true }
291
+ @i.register(:write){ |chunk| e = []; assert chunk.respond_to?(:each); chunk.each{|t,r| e << [t,r]}; events_from_chunk << [:write, e] }
292
+ @i.register(:try_write){ |chunk| e = []; assert chunk.respond_to?(:each); chunk.each{|t,r| e << [t,r]}; events_from_chunk << [:try_write, e] }
293
+ @i.start
294
+ @i.after_start
295
+
296
+ events = [
297
+ [event_time('2016-10-05 16:16:16 -0700'), {"message" => "yaaaaaaaaay!"}],
298
+ [event_time('2016-10-05 16:16:17 -0700'), {"message" => "yoooooooooy!"}],
299
+ ]
300
+
301
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
302
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
303
+
304
+ waiting(5){ sleep 0.1 until events_from_chunk.size == 2 }
305
+
306
+ assert_equal 2, events_from_chunk.size
307
+ 2.times.each do |i|
308
+ assert_equal :try_write, events_from_chunk[i][0]
309
+ assert_equal events, events_from_chunk[i][1]
310
+ end
311
+ end
312
+
313
+ test 'plugin using custom format cannot iterate chunk in #write' do
314
+ events_from_chunk = []
315
+ @i = create_output(:custom)
316
+ @i.configure(config_element('ROOT','',{},[config_element('buffer','',@hash)]))
317
+ @i.register(:prefer_delayed_commit){ false }
318
+ @i.register(:format){ |tag, time, record| [tag,time,record].to_json }
319
+ @i.register(:format_type_is_msgpack){ false }
320
+ @i.register(:write){ |chunk| assert !(chunk.respond_to?(:each)) }
321
+ @i.register(:try_write){ |chunk| assert !(chunk.respond_to?(:each)) }
322
+ @i.start
323
+ @i.after_start
324
+
325
+ events = [
326
+ [event_time('2016-10-05 16:16:16 -0700'), {"message" => "yaaaaaaaaay!"}],
327
+ [event_time('2016-10-05 16:16:17 -0700'), {"message" => "yoooooooooy!"}],
328
+ ]
329
+
330
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
331
+
332
+ assert_equal 0, events_from_chunk.size
333
+ end
334
+
335
+ test 'plugin using custom format cannot iterate chunk in #try_write' do
336
+ events_from_chunk = []
337
+ @i = create_output(:custom)
338
+ @i.configure(config_element('ROOT','',{},[config_element('buffer','',@hash)]))
339
+ @i.register(:prefer_delayed_commit){ true }
340
+ @i.register(:format){ |tag, time, record| [tag,time,record].to_json }
341
+ @i.register(:format_type_is_msgpack){ false }
342
+ @i.register(:write){ |chunk| assert !(chunk.respond_to?(:each)) }
343
+ @i.register(:try_write){ |chunk| assert !(chunk.respond_to?(:each)) }
344
+ @i.start
345
+ @i.after_start
346
+
347
+ events = [
348
+ [event_time('2016-10-05 16:16:16 -0700'), {"message" => "yaaaaaaaaay!"}],
349
+ [event_time('2016-10-05 16:16:17 -0700'), {"message" => "yoooooooooy!"}],
350
+ ]
351
+
352
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
353
+
354
+ assert_equal 0, events_from_chunk.size
355
+ end
356
+
357
+ data('formatted_to_msgpack_binary?' => :custom,
358
+ 'formatted_to_msgpack_binary' => :old_custom)
359
+ test 'plugin using custom format can iterate chunk in #write if #format returns msgpack' do |out_type|
360
+ events_from_chunk = []
361
+ @i = create_output(out_type)
362
+ @i.configure(config_element('ROOT','',{},[config_element('buffer','',@hash)]))
363
+ @i.register(:prefer_delayed_commit){ false }
364
+ @i.register(:format){ |tag, time, record| [tag,time,record].to_msgpack }
365
+ @i.register(:format_type_is_msgpack){ true }
366
+ @i.register(:write){ |chunk| e = []; assert chunk.respond_to?(:each); chunk.each{|ta,t,r| e << [ta,t,r]}; events_from_chunk << [:write, e] }
367
+ @i.register(:try_write){ |chunk| e = []; assert chunk.respond_to?(:each); chunk.each{|ta,t,r| e << [ta,t,r]}; events_from_chunk << [:try_write, e] }
368
+ @i.start
369
+ @i.after_start
370
+
371
+ events = [
372
+ [event_time('2016-10-05 16:16:16 -0700'), {"message" => "yaaaaaaaaay!"}],
373
+ [event_time('2016-10-05 16:16:17 -0700'), {"message" => "yoooooooooy!"}],
374
+ ]
375
+
376
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
377
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
378
+
379
+ waiting(5){ sleep 0.1 until events_from_chunk.size == 2 }
380
+
381
+ assert_equal 2, events_from_chunk.size
382
+ 2.times.each do |i|
383
+ assert_equal :write, events_from_chunk[i][0]
384
+ each_pushed = events_from_chunk[i][1]
385
+ assert_equal 2, each_pushed.size
386
+ assert_equal 'test.tag', each_pushed[0][0]
387
+ assert_equal 'test.tag', each_pushed[1][0]
388
+ assert_equal events, each_pushed.map{|tag,time,record| [time,record]}
389
+ end
390
+ end
391
+
392
+ data(:handle_stream_simple => '',
393
+ :handle_stream_with_custom_format => 'tag,message')
394
+ test 'plugin using custom format can skip record chunk when format return nil' do |chunk_keys|
395
+ events_from_chunk = []
396
+ @i = create_output(:custom)
397
+ @i.configure(config_element('ROOT', '', {}, [config_element('buffer', chunk_keys, @hash)]))
398
+ @i.register(:prefer_delayed_commit) { false }
399
+ @i.register(:format) { |tag, time, record|
400
+ if record['message'] == 'test1'
401
+ nil
402
+ else
403
+ [tag,time,record].to_msgpack
404
+ end
405
+ }
406
+ @i.register(:format_type_is_msgpack) { true }
407
+ @i.register(:write){ |chunk| e = []; chunk.each { |ta, t, r| e << [ta, t, r] }; events_from_chunk << [:write, e] }
408
+ @i.start
409
+ @i.after_start
410
+
411
+ events = [
412
+ [event_time('2016-10-05 16:16:16 -0700'), {"message" => "test1"}],
413
+ [event_time('2016-10-05 16:16:17 -0700'), {"message" => "test2"}],
414
+ ]
415
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
416
+
417
+ waiting(5) { sleep 0.1 until events_from_chunk.size == 1 }
418
+
419
+ assert_equal 1, events_from_chunk.size
420
+ assert_equal :write, events_from_chunk[0][0]
421
+ each_pushed = events_from_chunk[0][1]
422
+ assert_equal 1, each_pushed.size
423
+ assert_equal 'test.tag', each_pushed[0][0]
424
+ assert_equal "test2", each_pushed[0][2]['message']
425
+ end
426
+
427
+ test 'plugin using custom format can iterate chunk in #try_write if #format returns msgpack' do
428
+ events_from_chunk = []
429
+ @i = create_output(:custom)
430
+ @i.configure(config_element('ROOT','',{},[config_element('buffer','',@hash)]))
431
+ @i.register(:prefer_delayed_commit){ true }
432
+ @i.register(:format){ |tag, time, record| [tag,time,record].to_msgpack }
433
+ @i.register(:format_type_is_msgpack){ true }
434
+ @i.register(:write){ |chunk| events_from_chunk = []; assert chunk.respond_to?(:each); chunk.each{|ta,t,r| e << [ta,t,r]}; events_from_chunk << [:write, e] }
435
+ @i.register(:try_write){ |chunk| e = []; assert chunk.respond_to?(:each); chunk.each{|ta,t,r| e << [ta,t,r]}; events_from_chunk << [:try_write, e] }
436
+ @i.start
437
+ @i.after_start
438
+
439
+ events = [
440
+ [event_time('2016-10-05 16:16:16 -0700'), {"message" => "yaaaaaaaaay!"}],
441
+ [event_time('2016-10-05 16:16:17 -0700'), {"message" => "yoooooooooy!"}],
442
+ ]
443
+
444
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
445
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
446
+
447
+ waiting(5){ sleep 0.1 until events_from_chunk.size == 2 }
448
+
449
+ assert_equal 2, events_from_chunk.size
450
+ 2.times.each do |i|
451
+ assert_equal :try_write, events_from_chunk[i][0]
452
+ each_pushed = events_from_chunk[i][1]
453
+ assert_equal 2, each_pushed.size
454
+ assert_equal 'test.tag', each_pushed[0][0]
455
+ assert_equal 'test.tag', each_pushed[1][0]
456
+ assert_equal events, each_pushed.map{|tag,time,record| [time,record]}
457
+ end
458
+ end
459
+
460
+ data(:BufferedOutput => :old_buf,
461
+ :ObjectBufferedOutput => :old_obj)
462
+ test 'old plugin types can iterate chunk by msgpack_each in #write' do |plugin_type|
463
+ events_from_chunk = []
464
+ # event_emitter helper requires Engine.root_agent for routing
465
+ ra = Fluent::RootAgent.new(log: $log)
466
+ stub(Fluent::Engine).root_agent { ra }
467
+ @i = create_output(plugin_type)
468
+ @i.configure(config_element('ROOT', '', {}, [config_element('buffer', '', @hash)]))
469
+ @i.register(:format) { |tag, time, record| [time, record].to_msgpack }
470
+ @i.register(:write) { |chunk| e = []; chunk.msgpack_each { |t, r| e << [t, r] }; events_from_chunk << [:write, e]; }
471
+ @i.start
472
+ @i.after_start
473
+
474
+ events = [
475
+ [event_time('2016-10-05 16:16:16 -0700'), {"message" => "yaaaaaaaaay!"}],
476
+ [event_time('2016-10-05 16:16:17 -0700'), {"message" => "yoooooooooy!"}],
477
+ ]
478
+
479
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
480
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
481
+
482
+ waiting(5) { sleep 0.1 until events_from_chunk.size == 2 }
483
+
484
+ assert_equal 2, events_from_chunk.size
485
+ 2.times.each do |i|
486
+ assert_equal :write, events_from_chunk[i][0]
487
+ assert_equal events, events_from_chunk[i][1]
488
+ end
489
+ end
490
+ end
491
+
492
+ sub_test_case 'buffered output configured with many chunk keys' do
493
+ setup do
494
+ @stored_global_logger = $log
495
+ $log = Fluent::Test::TestLogger.new
496
+ @hash = {
497
+ 'flush_mode' => 'interval',
498
+ 'flush_thread_burst_interval' => 0.01,
499
+ 'chunk_limit_size' => 1024,
500
+ 'timekey' => 60,
501
+ }
502
+ @i = create_output(:buffered)
503
+ end
504
+ teardown do
505
+ $log = @stored_global_logger
506
+ end
507
+ test 'nothing are warned with less chunk keys' do
508
+ chunk_keys = 'time,key1,key2,key3'
509
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_keys,@hash)]))
510
+ logs = @i.log.out.logs.dup
511
+ @i.start
512
+ @i.after_start
513
+ assert{ logs.select{|log| log.include?('[warn]') }.size == 0 }
514
+ end
515
+
516
+ test 'a warning reported with 4 chunk keys' do
517
+ chunk_keys = 'key1,key2,key3,key4'
518
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_keys,@hash)]))
519
+ logs = @i.log.out.logs.dup
520
+
521
+ @i.start # this calls `log.reset`... capturing logs about configure must be done before this line
522
+ @i.after_start
523
+ assert_equal ['key1', 'key2', 'key3', 'key4'], @i.chunk_keys
524
+
525
+ assert{ logs.select{|log| log.include?('[warn]: many chunk keys specified, and it may cause too many chunks on your system.') }.size == 1 }
526
+ end
527
+
528
+ test 'a warning reported with 4 chunk keys including "tag"' do
529
+ chunk_keys = 'tag,key1,key2,key3'
530
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_keys,@hash)]))
531
+ logs = @i.log.out.logs.dup
532
+ @i.start # this calls `log.reset`... capturing logs about configure must be done before this line
533
+ @i.after_start
534
+ assert{ logs.select{|log| log.include?('[warn]: many chunk keys specified, and it may cause too many chunks on your system.') }.size == 1 }
535
+ end
536
+
537
+ test 'time key is not included for warned chunk keys' do
538
+ chunk_keys = 'time,key1,key2,key3'
539
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_keys,@hash)]))
540
+ logs = @i.log.out.logs.dup
541
+ @i.start
542
+ @i.after_start
543
+ assert{ logs.select{|log| log.include?('[warn]') }.size == 0 }
544
+ end
545
+ end
546
+
547
+ sub_test_case 'buffered output feature without any buffer key, flush_mode: lazy' do
548
+ setup do
549
+ hash = {
550
+ 'flush_mode' => 'lazy',
551
+ 'flush_thread_burst_interval' => 0.01,
552
+ 'flush_thread_count' => 2,
553
+ 'chunk_limit_size' => 1024,
554
+ }
555
+ @i = create_output(:buffered)
556
+ @i.configure(config_element('ROOT','',{},[config_element('buffer','',hash)]))
557
+ @i.start
558
+ @i.after_start
559
+ end
560
+
561
+ test '#start does not create enqueue thread, but creates flush threads' do
562
+ @i.thread_wait_until_start
563
+
564
+ assert @i.thread_exist?(:flush_thread_0)
565
+ assert @i.thread_exist?(:flush_thread_1)
566
+ assert !@i.thread_exist?(:enqueue_thread)
567
+ end
568
+
569
+ test '#format is called for each events' do
570
+ ary = []
571
+ @i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
572
+
573
+ t = event_time()
574
+ es = Fluent::ArrayEventStream.new([ [t, {"key" => "value1"}], [t, {"key" => "value2"}] ])
575
+
576
+ 4.times do
577
+ @i.emit_events('tag.test', es)
578
+ end
579
+
580
+ assert_equal 8, ary.size
581
+ 4.times do |i|
582
+ assert_equal ["tag.test", t, {"key" => "value1"}], ary[i*2]
583
+ assert_equal ["tag.test", t, {"key" => "value2"}], ary[i*2+1]
584
+ end
585
+ end
586
+
587
+ test '#write is called only when chunk bytes limit exceeded, and buffer chunk is purged' do
588
+ ary = []
589
+ @i.register(:write){|chunk| ary << chunk.read }
590
+
591
+ tag = "test.tag"
592
+ t = event_time()
593
+ r = {}
594
+ (0...10).each do |i|
595
+ r["key#{i}"] = "value #{i}"
596
+ end
597
+ event_size = [tag, t, r].to_json.size # 195
598
+
599
+ (1024 * 0.9 / event_size).to_i.times do |i|
600
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new([ [t, r] ]))
601
+ end
602
+ assert{ @i.buffer.queue.size == 0 && ary.size == 0 }
603
+
604
+ staged_chunk = @i.buffer.stage[@i.buffer.stage.keys.first]
605
+ assert{ staged_chunk.size != 0 }
606
+
607
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new([ [t, r] ]))
608
+
609
+ assert{ @i.buffer.queue.size > 0 || @i.buffer.dequeued.size > 0 || ary.size > 0 }
610
+
611
+ waiting(10) do
612
+ Thread.pass until @i.buffer.queue.size == 0 && @i.buffer.dequeued.size == 0
613
+ Thread.pass until staged_chunk.size == 0
614
+ end
615
+
616
+ assert_equal 1, ary.size
617
+ assert_equal [tag,t,r].to_json * (1024 / event_size), ary.first
618
+ end
619
+
620
+ test 'flush_at_shutdown work well when plugin is shutdown' do
621
+ ary = []
622
+ @i.register(:write){|chunk| ary << chunk.read }
623
+
624
+ tag = "test.tag"
625
+ t = event_time()
626
+ r = {}
627
+ (0...10).each do |i|
628
+ r["key#{i}"] = "value #{i}"
629
+ end
630
+ event_size = [tag, t, r].to_json.size # 195
631
+
632
+ (1024 * 0.9 / event_size).to_i.times do |i|
633
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new([ [t, r] ]))
634
+ end
635
+ assert{ @i.buffer.queue.size == 0 && ary.size == 0 }
636
+
637
+ @i.stop
638
+ @i.before_shutdown
639
+ @i.shutdown
640
+ @i.after_shutdown
641
+
642
+ waiting(10) do
643
+ Thread.pass until ary.size == 1
644
+ end
645
+ assert_equal [tag,t,r].to_json * (1024 * 0.9 / event_size), ary.first
646
+ end
647
+ end
648
+
649
+ sub_test_case 'buffered output feature without any buffer key, flush_mode: interval' do
650
+ setup do
651
+ hash = {
652
+ 'flush_mode' => 'interval',
653
+ 'flush_interval' => 1,
654
+ 'flush_thread_count' => 1,
655
+ 'flush_thread_burst_interval' => 0.01,
656
+ 'chunk_limit_size' => 1024,
657
+ }
658
+ @i = create_output(:buffered)
659
+ @i.configure(config_element('ROOT','',{},[config_element('buffer','',hash)]))
660
+ @i.start
661
+ @i.after_start
662
+ end
663
+
664
+ test '#start creates enqueue thread and flush threads' do
665
+ @i.thread_wait_until_start
666
+
667
+ assert @i.thread_exist?(:flush_thread_0)
668
+ assert @i.thread_exist?(:enqueue_thread)
669
+ end
670
+
671
+ test '#format is called for each event streams' do
672
+ ary = []
673
+ @i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
674
+
675
+ t = event_time()
676
+ es = Fluent::ArrayEventStream.new([ [t, {"key" => "value1"}], [t, {"key" => "value2"}] ])
677
+
678
+ 4.times do
679
+ @i.emit_events('tag.test', es)
680
+ end
681
+
682
+ assert_equal 8, ary.size
683
+ 4.times do |i|
684
+ assert_equal ["tag.test", t, {"key" => "value1"}], ary[i*2]
685
+ assert_equal ["tag.test", t, {"key" => "value2"}], ary[i*2+1]
686
+ end
687
+ end
688
+
689
+ test '#write is called per flush_interval, and buffer chunk is purged' do
690
+ @i.thread_wait_until_start
691
+
692
+ ary = []
693
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
694
+ @i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| ary << data } }
695
+
696
+ t = event_time()
697
+ r = {}
698
+ (0...10).each do |i|
699
+ r["key#{i}"] = "value #{i}"
700
+ end
701
+
702
+ 3.times do |i|
703
+ rand_records = rand(1..4)
704
+ es = Fluent::ArrayEventStream.new([ [t, r] ] * rand_records)
705
+ assert_equal rand_records, es.size
706
+
707
+ @i.interrupt_flushes
708
+
709
+ assert{ @i.buffer.queue.size == 0 }
710
+
711
+ @i.emit_events("test.tag", es)
712
+
713
+ assert{ @i.buffer.queue.size == 0 }
714
+ assert{ @i.buffer.stage.size == 1 }
715
+
716
+ staged_chunk = @i.instance_eval{ @buffer.stage[@buffer.stage.keys.first] }
717
+ assert{ staged_chunk.size != 0 }
718
+
719
+ @i.enqueue_thread_wait
720
+
721
+ waiting(10) do
722
+ Thread.pass until @i.buffer.queue.size == 0 && @i.buffer.dequeued.size == 0
723
+ Thread.pass until staged_chunk.size == 0
724
+ end
725
+
726
+ assert_equal rand_records, ary.size
727
+ ary.reject!{|e| true }
728
+ end
729
+ end
730
+ end
731
+
732
+ sub_test_case 'with much longer flush_interval' do
733
+ setup do
734
+ hash = {
735
+ 'flush_mode' => 'interval',
736
+ 'flush_interval' => 3000,
737
+ 'flush_thread_count' => 1,
738
+ 'flush_thread_burst_interval' => 0.01,
739
+ 'chunk_limit_size' => 1024,
740
+ }
741
+ @i = create_output(:buffered)
742
+ @i.configure(config_element('ROOT','',{},[config_element('buffer','',hash)]))
743
+ @i.start
744
+ @i.after_start
745
+ end
746
+
747
+ test 'flush_at_shutdown work well when plugin is shutdown' do
748
+ ary = []
749
+ @i.register(:write){|chunk| ary << chunk.read }
750
+
751
+ tag = "test.tag"
752
+ t = event_time()
753
+ r = {}
754
+ (0...10).each do |i|
755
+ r["key#{i}"] = "value #{i}"
756
+ end
757
+ event_size = [tag, t, r].to_json.size # 195
758
+
759
+ (1024 * 0.9 / event_size).to_i.times do |i|
760
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new([ [t, r] ]))
761
+ end
762
+ queue_size = @i.buffer.queue.size
763
+ assert{ queue_size == 0 && ary.size == 0 }
764
+
765
+ @i.stop
766
+ @i.before_shutdown
767
+ @i.shutdown
768
+ @i.after_shutdown
769
+
770
+ waiting(10){ sleep 0.1 until ary.size == 1 }
771
+ assert_equal [tag,t,r].to_json * (1024 * 0.9 / event_size), ary.first
772
+ end
773
+ end
774
+
775
+ sub_test_case 'buffered output feature without any buffer key, flush_mode: immediate' do
776
+ setup do
777
+ hash = {
778
+ 'flush_mode' => 'immediate',
779
+ 'flush_thread_count' => 1,
780
+ 'flush_thread_burst_interval' => 0.01,
781
+ 'chunk_limit_size' => 1024,
782
+ }
783
+ @i = create_output(:buffered)
784
+ @i.configure(config_element('ROOT','',{},[config_element('buffer','',hash)]))
785
+ @i.start
786
+ @i.after_start
787
+ end
788
+
789
+ test '#start does not create enqueue thread, but creates flush threads' do
790
+ @i.thread_wait_until_start
791
+
792
+ assert @i.thread_exist?(:flush_thread_0)
793
+ assert !@i.thread_exist?(:enqueue_thread)
794
+ end
795
+
796
+ test '#format is called for each event streams' do
797
+ ary = []
798
+ @i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
799
+
800
+ t = event_time()
801
+ es = Fluent::ArrayEventStream.new([ [t, {"key" => "value1"}], [t, {"key" => "value2"}] ])
802
+
803
+ 4.times do
804
+ @i.emit_events('tag.test', es)
805
+ end
806
+
807
+ assert_equal 8, ary.size
808
+ 4.times do |i|
809
+ assert_equal ["tag.test", t, {"key" => "value1"}], ary[i*2]
810
+ assert_equal ["tag.test", t, {"key" => "value2"}], ary[i*2+1]
811
+ end
812
+ end
813
+
814
+ test '#write is called every time for each emits, and buffer chunk is purged' do
815
+ @i.thread_wait_until_start
816
+
817
+ ary = []
818
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
819
+ @i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| ary << data } }
820
+
821
+ t = event_time()
822
+ r = {}
823
+ (0...10).each do |i|
824
+ r["key#{i}"] = "value #{i}"
825
+ end
826
+
827
+ 3.times do |i|
828
+ rand_records = rand(1..5)
829
+ es = Fluent::ArrayEventStream.new([ [t, r] ] * rand_records)
830
+ assert_equal rand_records, es.size
831
+ @i.emit_events("test.tag", es)
832
+
833
+ waiting(10){ sleep 0.1 until @i.buffer.stage.size == 0 } # make sure that the emitted es is enqueued by "flush_mode immediate"
834
+ waiting(10){ sleep 0.1 until @i.buffer.queue.size == 0 && @i.buffer.dequeued.size == 0 }
835
+ waiting(10){ sleep 0.1 until ary.size == rand_records }
836
+
837
+ assert_equal rand_records, ary.size
838
+ ary.reject!{|e| true }
839
+ end
840
+ end
841
+
842
+ test 'flush_at_shutdown work well when plugin is shutdown' do
843
+ ary = []
844
+ @i.register(:write){|chunk| ary << chunk.read }
845
+
846
+ tag = "test.tag"
847
+ t = event_time()
848
+ r = {}
849
+ (0...10).each do |i|
850
+ r["key#{i}"] = "value #{i}"
851
+ end
852
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new([ [t, r] ]))
853
+
854
+ @i.stop
855
+ @i.before_shutdown
856
+ @i.shutdown
857
+ @i.after_shutdown
858
+
859
+ waiting(10) do
860
+ Thread.pass until ary.size == 1
861
+ end
862
+ assert_equal [tag,t,r].to_json, ary.first
863
+ end
864
+ end
865
+
866
+ sub_test_case 'buffered output feature with timekey and range' do
867
+ setup do
868
+ chunk_key = 'time'
869
+ hash = {
870
+ 'timekey' => 30, # per 30seconds
871
+ 'timekey_wait' => 5, # 5 second delay for flush
872
+ 'flush_thread_count' => 1,
873
+ 'flush_thread_burst_interval' => 0.01,
874
+ }
875
+ @i = create_output(:buffered)
876
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
877
+ @i.start
878
+ @i.after_start
879
+ end
880
+
881
+ test '#configure raises config error if timekey is not specified' do
882
+ i = create_output(:buffered)
883
+ assert_raise Fluent::ConfigError do
884
+ i.configure(config_element('ROOT','',{},[config_element('buffer','time',)]))
885
+ end
886
+ end
887
+
888
+ test 'default flush_mode is set to :lazy' do
889
+ assert_equal :lazy, @i.instance_eval{ @flush_mode }
890
+ end
891
+
892
+ test '#start creates enqueue thread and flush threads' do
893
+ @i.thread_wait_until_start
894
+
895
+ assert @i.thread_exist?(:flush_thread_0)
896
+ assert @i.thread_exist?(:enqueue_thread)
897
+ end
898
+
899
+ test '#format is called for each event streams' do
900
+ ary = []
901
+ @i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
902
+
903
+ t = event_time()
904
+ es = Fluent::ArrayEventStream.new([ [t, {"key" => "value1"}], [t, {"key" => "value2"}] ])
905
+
906
+ 5.times do
907
+ @i.emit_events('tag.test', es)
908
+ end
909
+
910
+ assert_equal 10, ary.size
911
+ 5.times do |i|
912
+ assert_equal ["tag.test", t, {"key" => "value1"}], ary[i*2]
913
+ assert_equal ["tag.test", t, {"key" => "value2"}], ary[i*2+1]
914
+ end
915
+ end
916
+
917
+ test '#write is called per time ranges after timekey_wait, and buffer chunk is purged' do
918
+ Timecop.freeze( Time.parse('2016-04-13 14:04:00 +0900') )
919
+
920
+ @i.thread_wait_until_start
921
+
922
+ ary = []
923
+ metachecks = []
924
+
925
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
926
+ @i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| e = JSON.parse(data); ary << e; metachecks << (chunk.metadata.timekey.to_i <= e[1].to_i && e[1].to_i < chunk.metadata.timekey.to_i + 30) } }
927
+
928
+ r = {}
929
+ (0...10).each do |i|
930
+ r["key#{i}"] = "value #{i}"
931
+ end
932
+ ts = [
933
+ Fluent::EventTime.parse('2016-04-13 14:03:21 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:23 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:29 +0900'),
934
+ Fluent::EventTime.parse('2016-04-13 14:03:30 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:33 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:38 +0900'),
935
+ Fluent::EventTime.parse('2016-04-13 14:03:43 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:49 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:51 +0900'),
936
+ Fluent::EventTime.parse('2016-04-13 14:04:00 +0900'), Fluent::EventTime.parse('2016-04-13 14:04:01 +0900'),
937
+ ]
938
+ events = [
939
+ ["test.tag.1", ts[0], r], # range 14:03:00 - 03:29
940
+ ["test.tag.2", ts[1], r],
941
+ ["test.tag.1", ts[2], r],
942
+ ["test.tag.1", ts[3], r], # range 14:03:30 - 04:00
943
+ ["test.tag.1", ts[4], r],
944
+ ["test.tag.1", ts[5], r],
945
+ ["test.tag.1", ts[6], r],
946
+ ["test.tag.1", ts[7], r],
947
+ ["test.tag.2", ts[8], r],
948
+ ["test.tag.1", ts[9], r], # range 14:04:00 - 04:29
949
+ ["test.tag.2", ts[10], r],
950
+ ]
951
+
952
+ assert_equal 0, @i.write_count
953
+
954
+ @i.interrupt_flushes
955
+
956
+ events.shuffle.each do |tag, time, record|
957
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
958
+ end
959
+ assert{ @i.buffer.stage.size == 3 }
960
+ assert{ @i.write_count == 0 }
961
+
962
+ @i.enqueue_thread_wait
963
+
964
+ waiting(4){ sleep 0.1 until @i.write_count > 0 }
965
+
966
+ assert{ @i.buffer.stage.size == 2 && @i.write_count == 1 }
967
+
968
+ waiting(4){ sleep 0.1 until ary.size == 3 }
969
+
970
+ assert_equal 3, ary.size
971
+ assert_equal 2, ary.select{|e| e[0] == "test.tag.1" }.size
972
+ assert_equal 1, ary.select{|e| e[0] == "test.tag.2" }.size
973
+
974
+ Timecop.freeze( Time.parse('2016-04-13 14:04:04 +0900') )
975
+
976
+ @i.enqueue_thread_wait
977
+
978
+ assert{ @i.buffer.stage.size == 2 && @i.write_count == 1 }
979
+
980
+ Timecop.freeze( Time.parse('2016-04-13 14:04:06 +0900') )
981
+
982
+ @i.enqueue_thread_wait
983
+ waiting(4){ sleep 0.1 until @i.write_count > 1 }
984
+
985
+ assert{ @i.buffer.stage.size == 1 && @i.write_count == 2 }
986
+
987
+ assert_equal 9, ary.size
988
+ assert_equal 7, ary.select{|e| e[0] == "test.tag.1" }.size
989
+ assert_equal 2, ary.select{|e| e[0] == "test.tag.2" }.size
990
+
991
+ assert metachecks.all?{|e| e }
992
+ end
993
+
994
+ test 'flush_at_shutdown work well when plugin is shutdown' do
995
+ Timecop.freeze( Time.parse('2016-04-13 14:04:00 +0900') )
996
+
997
+ @i.thread_wait_until_start
998
+
999
+ ary = []
1000
+ metachecks = []
1001
+
1002
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
1003
+ @i.register(:write){|chunk|
1004
+ chunk.read.split("\n").reject{|l| l.empty? }.each{|data|
1005
+ e = JSON.parse(data)
1006
+ ary << e
1007
+ metachecks << (chunk.metadata.timekey.to_i <= e[1].to_i && e[1].to_i < chunk.metadata.timekey.to_i + 30)
1008
+ }
1009
+ }
1010
+
1011
+ r = {}
1012
+ (0...10).each do |i|
1013
+ r["key#{i}"] = "value #{i}"
1014
+ end
1015
+ ts = [
1016
+ Fluent::EventTime.parse('2016-04-13 14:03:21 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:23 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:29 +0900'),
1017
+ Fluent::EventTime.parse('2016-04-13 14:03:30 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:33 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:38 +0900'),
1018
+ Fluent::EventTime.parse('2016-04-13 14:03:43 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:49 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:51 +0900'),
1019
+ Fluent::EventTime.parse('2016-04-13 14:04:00 +0900'), Fluent::EventTime.parse('2016-04-13 14:04:01 +0900'),
1020
+ ]
1021
+ events = [
1022
+ ["test.tag.1", ts[0], r], # range 14:03:00 - 03:29
1023
+ ["test.tag.2", ts[1], r],
1024
+ ["test.tag.1", ts[2], r],
1025
+ ["test.tag.1", ts[3], r], # range 14:03:30 - 04:00
1026
+ ["test.tag.1", ts[4], r],
1027
+ ["test.tag.1", ts[5], r],
1028
+ ["test.tag.1", ts[6], r],
1029
+ ["test.tag.1", ts[7], r],
1030
+ ["test.tag.2", ts[8], r],
1031
+ ["test.tag.1", ts[9], r], # range 14:04:00 - 04:29
1032
+ ["test.tag.2", ts[10], r],
1033
+ ]
1034
+
1035
+ assert_equal 0, @i.write_count
1036
+
1037
+ @i.interrupt_flushes
1038
+
1039
+ events.shuffle.each do |tag, time, record|
1040
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
1041
+ end
1042
+ assert{ @i.buffer.stage.size == 3 }
1043
+ assert{ @i.write_count == 0 }
1044
+
1045
+ @i.enqueue_thread_wait
1046
+
1047
+ waiting(4){ sleep 0.1 until @i.write_count > 0 }
1048
+
1049
+ assert{ @i.buffer.stage.size == 2 && @i.write_count == 1 }
1050
+
1051
+ Timecop.freeze( Time.parse('2016-04-13 14:04:04 +0900') )
1052
+
1053
+ @i.enqueue_thread_wait
1054
+
1055
+ assert{ @i.buffer.stage.size == 2 && @i.write_count == 1 }
1056
+
1057
+ Timecop.freeze( Time.parse('2016-04-13 14:04:06 +0900') )
1058
+
1059
+ @i.enqueue_thread_wait
1060
+ waiting(4){ sleep 0.1 until @i.write_count > 1 }
1061
+
1062
+ assert{ @i.buffer.stage.size == 1 && @i.write_count == 2 }
1063
+
1064
+ Timecop.freeze( Time.parse('2016-04-13 14:04:13 +0900') )
1065
+
1066
+ waiting(4){ sleep 0.1 until ary.size == 9 }
1067
+ assert_equal 9, ary.size
1068
+
1069
+ @i.stop
1070
+ @i.before_shutdown
1071
+ @i.shutdown
1072
+ @i.after_shutdown
1073
+
1074
+ waiting(4){ sleep 0.1 until @i.write_count > 2 && ary.size == 11 }
1075
+
1076
+ assert_equal 11, ary.size
1077
+ assert metachecks.all?{|e| e }
1078
+ end
1079
+ end
1080
+
1081
+ sub_test_case 'buffered output with large timekey and small timekey_wait' do
1082
+ setup do
1083
+ chunk_key = 'time'
1084
+ hash = {
1085
+ 'timekey' => 86400, # per 1 day
1086
+ 'timekey_wait' => 10, # 10 seconds delay for flush
1087
+ 'flush_thread_count' => 1,
1088
+ 'flush_thread_burst_interval' => 0.01,
1089
+ }
1090
+ @i = create_output(:buffered)
1091
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
1092
+ @i.start
1093
+ @i.after_start
1094
+ end
1095
+
1096
+ test '#configure raises config error if timekey is not specified' do
1097
+ Timecop.freeze( Time.parse('2019-02-08 00:01:00 +0900') )
1098
+ ary = []
1099
+ @i.register(:write){|chunk| ary << chunk.read }
1100
+ @i.thread_wait_until_start
1101
+ events = [
1102
+ [event_time('2019-02-08 00:02:00 +0900'), {"message" => "foobar"}]
1103
+ ]
1104
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new(events))
1105
+ @i.enqueue_thread_wait
1106
+ assert{ @i.write_count == 0 }
1107
+
1108
+ Timecop.freeze( Time.parse('2019-02-09 00:00:08 +0900') )
1109
+ @i.enqueue_thread_wait
1110
+ assert{ @i.write_count == 0 }
1111
+
1112
+ Timecop.freeze( Time.parse('2019-02-09 00:00:12 +0900') )
1113
+ # wirte should be called in few seconds since
1114
+ # running interval of enque thread is timekey_wait / 11.0.
1115
+ waiting(5){ sleep 0.1 until @i.write_count == 1 }
1116
+ end
1117
+ end
1118
+
1119
+ sub_test_case 'buffered output feature with tag key' do
1120
+ setup do
1121
+ chunk_key = 'tag'
1122
+ hash = {
1123
+ 'flush_interval' => 10,
1124
+ 'flush_thread_count' => 1,
1125
+ 'flush_thread_burst_interval' => 0.1,
1126
+ 'chunk_limit_size' => 1024,
1127
+ 'queued_chunks_limit_size' => 100
1128
+ }
1129
+ @i = create_output(:buffered)
1130
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
1131
+ @i.start
1132
+ @i.after_start
1133
+ end
1134
+
1135
+ test 'default flush_mode is set to :interval' do
1136
+ assert_equal :interval, @i.instance_eval{ @flush_mode }
1137
+ end
1138
+
1139
+ test '#start creates enqueue thread and flush threads' do
1140
+ @i.thread_wait_until_start
1141
+
1142
+ assert @i.thread_exist?(:flush_thread_0)
1143
+ assert @i.thread_exist?(:enqueue_thread)
1144
+ end
1145
+
1146
+ test '#format is called for each event streams' do
1147
+ ary = []
1148
+ @i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
1149
+
1150
+ t = event_time()
1151
+ es = Fluent::ArrayEventStream.new([ [t, {"key" => "value1"}], [t, {"key" => "value2"}] ])
1152
+
1153
+ 5.times do
1154
+ @i.emit_events('tag.test', es)
1155
+ end
1156
+
1157
+ assert_equal 10, ary.size
1158
+ 5.times do |i|
1159
+ assert_equal ["tag.test", t, {"key" => "value1"}], ary[i*2]
1160
+ assert_equal ["tag.test", t, {"key" => "value2"}], ary[i*2+1]
1161
+ end
1162
+ end
1163
+
1164
+ test '#write is called per tags, per flush_interval & chunk sizes, and buffer chunk is purged' do
1165
+ Timecop.freeze( Time.parse('2016-04-13 14:04:01 +0900') )
1166
+
1167
+ ary = []
1168
+ metachecks = []
1169
+
1170
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
1171
+ @i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| e = JSON.parse(data); ary << e; metachecks << (chunk.metadata.tag == e[0]) } }
1172
+
1173
+ @i.thread_wait_until_start
1174
+
1175
+ r = {}
1176
+ (0...10).each do |i|
1177
+ r["key#{i}"] = "value #{i}"
1178
+ end
1179
+ ts = [
1180
+ event_time('2016-04-13 14:03:21 +0900'), event_time('2016-04-13 14:03:23 +0900'), event_time('2016-04-13 14:03:29 +0900'),
1181
+ event_time('2016-04-13 14:03:30 +0900'), event_time('2016-04-13 14:03:33 +0900'), event_time('2016-04-13 14:03:38 +0900'),
1182
+ event_time('2016-04-13 14:03:43 +0900'), event_time('2016-04-13 14:03:49 +0900'), event_time('2016-04-13 14:03:51 +0900'),
1183
+ event_time('2016-04-13 14:04:00 +0900'), event_time('2016-04-13 14:04:01 +0900'),
1184
+ ]
1185
+ # size of a event is 197
1186
+ events = [
1187
+ ["test.tag.1", ts[0], r],
1188
+ ["test.tag.2", ts[1], r],
1189
+ ["test.tag.1", ts[2], r],
1190
+ ["test.tag.1", ts[3], r],
1191
+ ["test.tag.1", ts[4], r],
1192
+ ["test.tag.1", ts[5], r],
1193
+ ["test.tag.1", ts[6], r],
1194
+ ["test.tag.1", ts[7], r],
1195
+ ["test.tag.2", ts[8], r],
1196
+ ["test.tag.1", ts[9], r],
1197
+ ["test.tag.2", ts[10], r],
1198
+ ]
1199
+
1200
+ assert_equal 0, @i.write_count
1201
+
1202
+ @i.interrupt_flushes
1203
+
1204
+ events.shuffle.each do |tag, time, record|
1205
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
1206
+ end
1207
+ assert{ @i.buffer.stage.size == 2 } # test.tag.1 x1, test.tag.2 x1
1208
+
1209
+ Timecop.freeze( Time.parse('2016-04-13 14:04:02 +0900') )
1210
+
1211
+ @i.enqueue_thread_wait
1212
+ @i.flush_thread_wakeup
1213
+
1214
+ waiting(4) do
1215
+ Thread.pass until @i.write_count > 0
1216
+ end
1217
+
1218
+ assert{ @i.buffer.stage.size == 2 }
1219
+ assert{ @i.write_count == 1 }
1220
+ assert{ @i.buffer.queue.size == 0 }
1221
+
1222
+ # events fulfills a chunk (and queued immediately)
1223
+ assert_equal 5, ary.size
1224
+ assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
1225
+ assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
1226
+
1227
+ Timecop.freeze( Time.parse('2016-04-13 14:04:09 +0900') )
1228
+
1229
+ @i.enqueue_thread_wait
1230
+
1231
+ assert{ @i.buffer.stage.size == 2 }
1232
+
1233
+ # to trigger try_flush with flush_thread_burst_interval
1234
+ Timecop.freeze( Time.parse('2016-04-13 14:04:11 +0900') )
1235
+ @i.enqueue_thread_wait
1236
+ Timecop.freeze( Time.parse('2016-04-13 14:04:15 +0900') )
1237
+ @i.enqueue_thread_wait
1238
+ @i.flush_thread_wakeup
1239
+
1240
+ assert{ @i.buffer.stage.size == 0 }
1241
+
1242
+ waiting(4) do
1243
+ Thread.pass until @i.write_count > 2
1244
+ end
1245
+
1246
+ assert{ @i.buffer.stage.size == 0 && @i.write_count == 3 }
1247
+
1248
+ assert_equal 11, ary.size
1249
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1250
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1251
+
1252
+ assert metachecks.all?{|e| e }
1253
+ end
1254
+
1255
+ test 'flush_at_shutdown work well when plugin is shutdown' do
1256
+ Timecop.freeze( Time.parse('2016-04-13 14:04:01 +0900') )
1257
+
1258
+ ary = []
1259
+ metachecks = []
1260
+
1261
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
1262
+ @i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| e = JSON.parse(data); ary << e; metachecks << (chunk.metadata.tag == e[0]) } }
1263
+
1264
+ @i.thread_wait_until_start
1265
+
1266
+ r = {}
1267
+ (0...10).each do |i|
1268
+ r["key#{i}"] = "value #{i}"
1269
+ end
1270
+ ts = [
1271
+ event_time('2016-04-13 14:03:21 +0900'), event_time('2016-04-13 14:03:23 +0900'), event_time('2016-04-13 14:03:29 +0900'),
1272
+ event_time('2016-04-13 14:03:30 +0900'), event_time('2016-04-13 14:03:33 +0900'), event_time('2016-04-13 14:03:38 +0900'),
1273
+ event_time('2016-04-13 14:03:43 +0900'), event_time('2016-04-13 14:03:49 +0900'), event_time('2016-04-13 14:03:51 +0900'),
1274
+ event_time('2016-04-13 14:04:00 +0900'), event_time('2016-04-13 14:04:01 +0900'),
1275
+ ]
1276
+ # size of a event is 197
1277
+ events = [
1278
+ ["test.tag.1", ts[0], r],
1279
+ ["test.tag.2", ts[1], r],
1280
+ ["test.tag.1", ts[2], r],
1281
+ ["test.tag.1", ts[3], r],
1282
+ ["test.tag.1", ts[4], r],
1283
+ ["test.tag.1", ts[5], r],
1284
+ ["test.tag.1", ts[6], r],
1285
+ ["test.tag.1", ts[7], r],
1286
+ ["test.tag.2", ts[8], r],
1287
+ ["test.tag.1", ts[9], r],
1288
+ ["test.tag.2", ts[10], r],
1289
+ ]
1290
+
1291
+ assert_equal 0, @i.write_count
1292
+
1293
+ @i.interrupt_flushes
1294
+
1295
+ events.shuffle.each do |tag, time, record|
1296
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
1297
+ end
1298
+ assert{ @i.buffer.stage.size == 2 } # test.tag.1 x1, test.tag.2 x1
1299
+
1300
+ Timecop.freeze( Time.parse('2016-04-13 14:04:02 +0900') )
1301
+
1302
+ @i.enqueue_thread_wait
1303
+ @i.flush_thread_wakeup
1304
+
1305
+ waiting(4) do
1306
+ Thread.pass until @i.write_count > 0
1307
+ end
1308
+
1309
+ assert{ @i.buffer.stage.size == 2 }
1310
+ assert{ @i.write_count == 1 }
1311
+ assert{ @i.buffer.queue.size == 0 }
1312
+
1313
+ # events fulfills a chunk (and queued immediately)
1314
+ assert_equal 5, ary.size
1315
+ assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
1316
+ assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
1317
+
1318
+ @i.stop
1319
+ @i.before_shutdown
1320
+ @i.shutdown
1321
+ @i.after_shutdown
1322
+
1323
+ waiting(4) do
1324
+ Thread.pass until @i.write_count > 1
1325
+ end
1326
+
1327
+ assert{ @i.buffer.stage.size == 0 && @i.buffer.queue.size == 0 && @i.write_count == 3 }
1328
+
1329
+ assert_equal 11, ary.size
1330
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1331
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1332
+
1333
+ assert metachecks.all?{|e| e }
1334
+ end
1335
+ end
1336
+
1337
+ sub_test_case 'buffered output feature with variables' do
1338
+ setup do
1339
+ chunk_key = 'name,service'
1340
+ hash = {
1341
+ 'flush_interval' => 10,
1342
+ 'flush_thread_count' => 1,
1343
+ 'flush_thread_burst_interval' => 0.1,
1344
+ 'chunk_limit_size' => 1024,
1345
+ }
1346
+ @i = create_output(:buffered)
1347
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
1348
+ @i.start
1349
+ @i.after_start
1350
+ end
1351
+
1352
+ test 'default flush_mode is set to :interval' do
1353
+ assert_equal :interval, @i.instance_eval{ @flush_mode }
1354
+ end
1355
+
1356
+ test '#start creates enqueue thread and flush threads' do
1357
+ @i.thread_wait_until_start
1358
+
1359
+ assert @i.thread_exist?(:flush_thread_0)
1360
+ assert @i.thread_exist?(:enqueue_thread)
1361
+ end
1362
+
1363
+ test '#format is called for each event streams' do
1364
+ ary = []
1365
+ @i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
1366
+
1367
+ t = event_time()
1368
+ es = Fluent::ArrayEventStream.new([
1369
+ [t, {"key" => "value1", "name" => "moris", "service" => "a"}],
1370
+ [t, {"key" => "value2", "name" => "moris", "service" => "b"}],
1371
+ ])
1372
+
1373
+ 5.times do
1374
+ @i.emit_events('tag.test', es)
1375
+ end
1376
+
1377
+ assert_equal 10, ary.size
1378
+ 5.times do |i|
1379
+ assert_equal ["tag.test", t, {"key" => "value1", "name" => "moris", "service" => "a"}], ary[i*2]
1380
+ assert_equal ["tag.test", t, {"key" => "value2", "name" => "moris", "service" => "b"}], ary[i*2+1]
1381
+ end
1382
+ end
1383
+
1384
+ test '#write is called per value combination of variables, per flush_interval & chunk sizes, and buffer chunk is purged' do
1385
+ Timecop.freeze( Time.parse('2016-04-13 14:04:01 +0900') )
1386
+
1387
+ ary = []
1388
+ metachecks = []
1389
+
1390
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
1391
+ @i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| e = JSON.parse(data); ary << e; metachecks << (e[2]["name"] == chunk.metadata.variables[:name] && e[2]["service"] == chunk.metadata.variables[:service]) } }
1392
+
1393
+ @i.thread_wait_until_start
1394
+
1395
+ # size of a event is 195
1396
+ dummy_data = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
1397
+ events = [
1398
+ ["test.tag.1", event_time('2016-04-13 14:03:21 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1) xxx-a (6 events)
1399
+ ["test.tag.2", event_time('2016-04-13 14:03:23 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}], #(2) yyy-a (3 events)
1400
+ ["test.tag.1", event_time('2016-04-13 14:03:29 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
1401
+ ["test.tag.1", event_time('2016-04-13 14:03:30 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
1402
+ ["test.tag.1", event_time('2016-04-13 14:03:33 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
1403
+ ["test.tag.1", event_time('2016-04-13 14:03:38 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}], #(3) xxx-b (2 events)
1404
+ ["test.tag.1", event_time('2016-04-13 14:03:43 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
1405
+ ["test.tag.1", event_time('2016-04-13 14:03:49 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}], #(3)
1406
+ ["test.tag.2", event_time('2016-04-13 14:03:51 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}], #(2)
1407
+ ["test.tag.1", event_time('2016-04-13 14:04:00 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
1408
+ ["test.tag.2", event_time('2016-04-13 14:04:01 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}], #(2)
1409
+ ]
1410
+
1411
+ assert_equal 0, @i.write_count
1412
+
1413
+ @i.interrupt_flushes
1414
+
1415
+ events.shuffle.each do |tag, time, record|
1416
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
1417
+ end
1418
+ assert{ @i.buffer.stage.size == 3 }
1419
+
1420
+ Timecop.freeze( Time.parse('2016-04-13 14:04:02 +0900') )
1421
+
1422
+ @i.enqueue_thread_wait
1423
+ @i.flush_thread_wakeup
1424
+
1425
+ waiting(4) do
1426
+ Thread.pass until @i.write_count > 0
1427
+ end
1428
+
1429
+ assert{ @i.buffer.stage.size == 3 }
1430
+ assert{ @i.write_count == 1 }
1431
+ assert{ @i.buffer.queue.size == 0 }
1432
+
1433
+ # events fulfills a chunk (and queued immediately)
1434
+ assert_equal 5, ary.size
1435
+ assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
1436
+ assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
1437
+ assert ary[0...5].all?{|e| e[2]["name"] == "xxx" && e[2]["service"] == "a" }
1438
+
1439
+ Timecop.freeze( Time.parse('2016-04-13 14:04:09 +0900') )
1440
+
1441
+ @i.enqueue_thread_wait
1442
+
1443
+ assert{ @i.buffer.stage.size == 3 }
1444
+
1445
+ # to trigger try_flush with flush_thread_burst_interval
1446
+ Timecop.freeze( Time.parse('2016-04-13 14:04:11 +0900') )
1447
+ @i.enqueue_thread_wait
1448
+ Timecop.freeze( Time.parse('2016-04-13 14:04:12 +0900') )
1449
+ @i.enqueue_thread_wait
1450
+ Timecop.freeze( Time.parse('2016-04-13 14:04:13 +0900') )
1451
+ @i.enqueue_thread_wait
1452
+ Timecop.freeze( Time.parse('2016-04-13 14:04:14 +0900') )
1453
+ @i.enqueue_thread_wait
1454
+ @i.flush_thread_wakeup
1455
+
1456
+ assert{ @i.buffer.stage.size == 0 }
1457
+
1458
+ waiting(4) do
1459
+ Thread.pass until @i.write_count > 1
1460
+ end
1461
+
1462
+ assert{ @i.buffer.stage.size == 0 && @i.write_count == 4 }
1463
+
1464
+ assert_equal 11, ary.size
1465
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1466
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1467
+ assert_equal 6, ary.select{|e| e[2]["name"] == "xxx" && e[2]["service"] == "a" }.size
1468
+ assert_equal 3, ary.select{|e| e[2]["name"] == "yyy" && e[2]["service"] == "a" }.size
1469
+ assert_equal 2, ary.select{|e| e[2]["name"] == "xxx" && e[2]["service"] == "b" }.size
1470
+
1471
+ assert metachecks.all?{|e| e }
1472
+ end
1473
+
1474
+ test 'flush_at_shutdown work well when plugin is shutdown' do
1475
+ Timecop.freeze( Time.parse('2016-04-13 14:04:01 +0900') )
1476
+
1477
+ ary = []
1478
+ metachecks = []
1479
+
1480
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
1481
+ @i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| e = JSON.parse(data); ary << e; metachecks << (e[2]["name"] == chunk.metadata.variables[:name] && e[2]["service"] == chunk.metadata.variables[:service]) } }
1482
+
1483
+ @i.thread_wait_until_start
1484
+
1485
+ # size of a event is 195
1486
+ dummy_data = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
1487
+ events = [
1488
+ ["test.tag.1", event_time('2016-04-13 14:03:21 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1) xxx-a (6 events)
1489
+ ["test.tag.2", event_time('2016-04-13 14:03:23 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}], #(2) yyy-a (3 events)
1490
+ ["test.tag.1", event_time('2016-04-13 14:03:29 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
1491
+ ["test.tag.1", event_time('2016-04-13 14:03:30 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
1492
+ ["test.tag.1", event_time('2016-04-13 14:03:33 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
1493
+ ["test.tag.1", event_time('2016-04-13 14:03:38 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}], #(3) xxx-b (2 events)
1494
+ ["test.tag.1", event_time('2016-04-13 14:03:43 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
1495
+ ["test.tag.1", event_time('2016-04-13 14:03:49 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}], #(3)
1496
+ ["test.tag.2", event_time('2016-04-13 14:03:51 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}], #(2)
1497
+ ["test.tag.1", event_time('2016-04-13 14:04:00 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
1498
+ ["test.tag.2", event_time('2016-04-13 14:04:01 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}], #(2)
1499
+ ]
1500
+
1501
+ assert_equal 0, @i.write_count
1502
+
1503
+ @i.interrupt_flushes
1504
+
1505
+ events.shuffle.each do |tag, time, record|
1506
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
1507
+ end
1508
+ assert{ @i.buffer.stage.size == 3 }
1509
+
1510
+ Timecop.freeze( Time.parse('2016-04-13 14:04:02 +0900') )
1511
+
1512
+ @i.enqueue_thread_wait
1513
+ @i.flush_thread_wakeup
1514
+
1515
+ waiting(4) do
1516
+ Thread.pass until @i.write_count > 0
1517
+ end
1518
+
1519
+ assert{ @i.buffer.stage.size == 3 }
1520
+ assert{ @i.write_count == 1 }
1521
+ assert{ @i.buffer.queue.size == 0 }
1522
+
1523
+ # events fulfills a chunk (and queued immediately)
1524
+ assert_equal 5, ary.size
1525
+ assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
1526
+ assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
1527
+
1528
+ @i.stop
1529
+ @i.before_shutdown
1530
+ @i.shutdown
1531
+ @i.after_shutdown
1532
+
1533
+ waiting(4) do
1534
+ Thread.pass until @i.write_count > 1
1535
+ end
1536
+
1537
+ assert{ @i.buffer.stage.size == 0 && @i.buffer.queue.size == 0 && @i.write_count == 4 }
1538
+
1539
+ assert_equal 11, ary.size
1540
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1541
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1542
+ assert_equal 6, ary.select{|e| e[2]["name"] == "xxx" && e[2]["service"] == "a" }.size
1543
+ assert_equal 3, ary.select{|e| e[2]["name"] == "yyy" && e[2]["service"] == "a" }.size
1544
+ assert_equal 2, ary.select{|e| e[2]["name"] == "xxx" && e[2]["service"] == "b" }.size
1545
+
1546
+ assert metachecks.all?{|e| e }
1547
+ end
1548
+ end
1549
+
1550
+ sub_test_case 'buffered output feature with many keys' do
1551
+ test 'default flush mode is set to :interval if keys does not include time' do
1552
+ chunk_key = 'name,service,tag'
1553
+ hash = {
1554
+ 'flush_interval' => 10,
1555
+ 'flush_thread_count' => 1,
1556
+ 'flush_thread_burst_interval' => 0.1,
1557
+ 'chunk_limit_size' => 1024,
1558
+ }
1559
+ @i = create_output(:buffered)
1560
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
1561
+ @i.start
1562
+ @i.after_start
1563
+
1564
+ assert_equal :interval, @i.instance_eval{ @flush_mode }
1565
+ end
1566
+
1567
+ test 'default flush mode is set to :lazy if keys includes time' do
1568
+ chunk_key = 'name,service,tag,time'
1569
+ hash = {
1570
+ 'timekey' => 60,
1571
+ 'flush_interval' => 10,
1572
+ 'flush_thread_count' => 1,
1573
+ 'flush_thread_burst_interval' => 0.1,
1574
+ 'chunk_limit_size' => 1024,
1575
+ }
1576
+ @i = create_output(:buffered)
1577
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
1578
+ @i.start
1579
+ @i.after_start
1580
+
1581
+ assert_equal :lazy, @i.instance_eval{ @flush_mode }
1582
+ end
1583
+ end
1584
+
1585
+ sub_test_case 'buffered output feature with delayed commit' do
1586
+ setup do
1587
+ chunk_key = 'tag'
1588
+ hash = {
1589
+ 'flush_interval' => 10,
1590
+ 'flush_thread_count' => 1,
1591
+ 'flush_thread_burst_interval' => 0.1,
1592
+ 'delayed_commit_timeout' => 30,
1593
+ 'chunk_limit_size' => 1024,
1594
+ }
1595
+ @i = create_output(:delayed)
1596
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
1597
+ @i.start
1598
+ @i.after_start
1599
+ @i.log = Fluent::Test::TestLogger.new
1600
+ end
1601
+
1602
+ test '#format is called for each event streams' do
1603
+ ary = []
1604
+ @i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
1605
+
1606
+ t = event_time()
1607
+ es = Fluent::ArrayEventStream.new([
1608
+ [t, {"key" => "value1", "name" => "moris", "service" => "a"}],
1609
+ [t, {"key" => "value2", "name" => "moris", "service" => "b"}],
1610
+ ])
1611
+
1612
+ 5.times do
1613
+ @i.emit_events('tag.test', es)
1614
+ end
1615
+
1616
+ assert_equal 10, ary.size
1617
+ 5.times do |i|
1618
+ assert_equal ["tag.test", t, {"key" => "value1", "name" => "moris", "service" => "a"}], ary[i*2]
1619
+ assert_equal ["tag.test", t, {"key" => "value2", "name" => "moris", "service" => "b"}], ary[i*2+1]
1620
+ end
1621
+ end
1622
+
1623
+ test '#try_write is called per flush, buffer chunk is not purged until #commit_write is called' do
1624
+ Timecop.freeze( Time.parse('2016-04-13 14:04:01 +0900') )
1625
+
1626
+ ary = []
1627
+ metachecks = []
1628
+ chunks = []
1629
+
1630
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
1631
+ @i.register(:try_write) do |chunk|
1632
+ chunks << chunk
1633
+ chunk.read.split("\n").reject{|l| l.empty? }.each do |data|
1634
+ e = JSON.parse(data)
1635
+ ary << e
1636
+ metachecks << (e[0] == chunk.metadata.tag)
1637
+ end
1638
+ end
1639
+
1640
+ @i.thread_wait_until_start
1641
+
1642
+ # size of a event is 195
1643
+ dummy_data = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
1644
+ events = [
1645
+ ["test.tag.1", event_time('2016-04-13 14:03:21 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1646
+ ["test.tag.2", event_time('2016-04-13 14:03:23 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1647
+ ["test.tag.1", event_time('2016-04-13 14:03:29 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1648
+ ["test.tag.1", event_time('2016-04-13 14:03:30 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1649
+ ["test.tag.1", event_time('2016-04-13 14:03:33 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1650
+ ["test.tag.1", event_time('2016-04-13 14:03:38 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}],
1651
+ ["test.tag.1", event_time('2016-04-13 14:03:43 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1652
+ ["test.tag.1", event_time('2016-04-13 14:03:49 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}],
1653
+ ["test.tag.2", event_time('2016-04-13 14:03:51 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1654
+ ["test.tag.1", event_time('2016-04-13 14:04:00 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1655
+ ["test.tag.2", event_time('2016-04-13 14:04:01 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1656
+ ]
1657
+
1658
+ assert_equal 0, @i.write_count
1659
+
1660
+ @i.interrupt_flushes
1661
+
1662
+ events.shuffle.each do |tag, time, record|
1663
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
1664
+ end
1665
+ assert{ @i.buffer.stage.size == 2 }
1666
+
1667
+ Timecop.freeze( Time.parse('2016-04-13 14:04:02 +0900') )
1668
+
1669
+ @i.enqueue_thread_wait
1670
+ @i.flush_thread_wakeup
1671
+
1672
+ waiting(4) do
1673
+ Thread.pass until @i.write_count > 0
1674
+ end
1675
+
1676
+ assert{ @i.buffer.stage.size == 2 }
1677
+ assert{ @i.write_count == 1 }
1678
+ assert{ @i.buffer.queue.size == 0 }
1679
+ assert{ @i.buffer.dequeued.size == 1 }
1680
+
1681
+ # events fulfills a chunk (and queued immediately)
1682
+ assert_equal 5, ary.size
1683
+ assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
1684
+ assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
1685
+
1686
+ assert_equal 1, chunks.size
1687
+ assert !chunks.first.empty?
1688
+
1689
+ Timecop.freeze( Time.parse('2016-04-13 14:04:09 +0900') )
1690
+
1691
+ @i.enqueue_thread_wait
1692
+
1693
+ assert{ @i.buffer.stage.size == 2 }
1694
+
1695
+ # to trigger try_flush with flush_thread_burst_interval
1696
+ Timecop.freeze( Time.parse('2016-04-13 14:04:11 +0900') )
1697
+ @i.enqueue_thread_wait
1698
+ Timecop.freeze( Time.parse('2016-04-13 14:04:12 +0900') )
1699
+ @i.enqueue_thread_wait
1700
+ Timecop.freeze( Time.parse('2016-04-13 14:04:13 +0900') )
1701
+ @i.enqueue_thread_wait
1702
+ Timecop.freeze( Time.parse('2016-04-13 14:04:14 +0900') )
1703
+ @i.enqueue_thread_wait
1704
+ @i.flush_thread_wakeup
1705
+
1706
+ assert{ @i.buffer.stage.size == 0 }
1707
+
1708
+ waiting(4) do
1709
+ Thread.pass until @i.write_count > 1
1710
+ end
1711
+
1712
+ assert{ @i.buffer.stage.size == 0 && @i.write_count == 3 }
1713
+ assert{ @i.buffer.dequeued.size == 3 }
1714
+
1715
+ assert_equal 11, ary.size
1716
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1717
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1718
+
1719
+ assert_equal 3, chunks.size
1720
+ assert chunks.all?{|c| !c.empty? }
1721
+
1722
+ assert metachecks.all?{|e| e }
1723
+
1724
+ @i.commit_write(chunks[0].unique_id)
1725
+ assert{ @i.buffer.dequeued.size == 2 }
1726
+ assert chunks[0].empty?
1727
+
1728
+ @i.commit_write(chunks[1].unique_id)
1729
+ assert{ @i.buffer.dequeued.size == 1 }
1730
+ assert chunks[1].empty?
1731
+
1732
+ @i.commit_write(chunks[2].unique_id)
1733
+ assert{ @i.buffer.dequeued.size == 0 }
1734
+ assert chunks[2].empty?
1735
+
1736
+ # no problem to commit chunks already committed
1737
+ assert_nothing_raised do
1738
+ @i.commit_write(chunks[2].unique_id)
1739
+ end
1740
+ end
1741
+
1742
+ test '#rollback_write and #try_rollback_write can rollback buffer chunks for delayed commit after timeout, and then be able to write it again' do
1743
+ Timecop.freeze( Time.parse('2016-04-13 14:04:01 +0900') )
1744
+
1745
+ ary = []
1746
+ metachecks = []
1747
+ chunks = []
1748
+
1749
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
1750
+ @i.register(:try_write) do |chunk|
1751
+ chunks << chunk
1752
+ chunk.read.split("\n").reject{|l| l.empty? }.each do |data|
1753
+ e = JSON.parse(data)
1754
+ ary << e
1755
+ metachecks << (e[0] == chunk.metadata.tag)
1756
+ end
1757
+ end
1758
+
1759
+ @i.thread_wait_until_start
1760
+
1761
+ # size of a event is 195
1762
+ dummy_data = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
1763
+ events = [
1764
+ ["test.tag.1", event_time('2016-04-13 14:03:21 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1765
+ ["test.tag.2", event_time('2016-04-13 14:03:23 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1766
+ ["test.tag.1", event_time('2016-04-13 14:03:29 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1767
+ ["test.tag.1", event_time('2016-04-13 14:03:30 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1768
+ ["test.tag.1", event_time('2016-04-13 14:03:33 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1769
+ ["test.tag.1", event_time('2016-04-13 14:03:38 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}],
1770
+ ["test.tag.1", event_time('2016-04-13 14:03:43 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1771
+ ["test.tag.1", event_time('2016-04-13 14:03:49 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}],
1772
+ ["test.tag.2", event_time('2016-04-13 14:03:51 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1773
+ ["test.tag.1", event_time('2016-04-13 14:04:00 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1774
+ ["test.tag.2", event_time('2016-04-13 14:04:01 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1775
+ ]
1776
+
1777
+ assert_equal 0, @i.write_count
1778
+
1779
+ @i.interrupt_flushes
1780
+
1781
+ events.shuffle.each do |tag, time, record|
1782
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
1783
+ end
1784
+ assert{ @i.buffer.stage.size == 2 }
1785
+
1786
+ Timecop.freeze( Time.parse('2016-04-13 14:04:02 +0900') )
1787
+
1788
+ @i.enqueue_thread_wait
1789
+ @i.flush_thread_wakeup
1790
+
1791
+ waiting(4) do
1792
+ Thread.pass until @i.write_count > 0
1793
+ end
1794
+
1795
+ assert{ @i.buffer.stage.size == 2 }
1796
+ assert{ @i.write_count == 1 }
1797
+ assert{ @i.buffer.queue.size == 0 }
1798
+ assert{ @i.buffer.dequeued.size == 1 }
1799
+
1800
+ # events fulfills a chunk (and queued immediately)
1801
+ assert_equal 5, ary.size
1802
+ assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
1803
+ assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
1804
+
1805
+ assert_equal 1, chunks.size
1806
+ assert !chunks.first.empty?
1807
+
1808
+ Timecop.freeze( Time.parse('2016-04-13 14:04:09 +0900') )
1809
+
1810
+ @i.enqueue_thread_wait
1811
+
1812
+ assert{ @i.buffer.stage.size == 2 }
1813
+
1814
+ # to trigger try_flush with flush_thread_burst_interval
1815
+ Timecop.freeze( Time.parse('2016-04-13 14:04:11 +0900') )
1816
+ @i.enqueue_thread_wait
1817
+ Timecop.freeze( Time.parse('2016-04-13 14:04:12 +0900') )
1818
+ @i.enqueue_thread_wait
1819
+ Timecop.freeze( Time.parse('2016-04-13 14:04:13 +0900') )
1820
+ @i.enqueue_thread_wait
1821
+ Timecop.freeze( Time.parse('2016-04-13 14:04:14 +0900') )
1822
+ @i.enqueue_thread_wait
1823
+ @i.flush_thread_wakeup
1824
+
1825
+ assert{ @i.buffer.stage.size == 0 }
1826
+
1827
+ waiting(4) do
1828
+ Thread.pass until @i.write_count > 2
1829
+ end
1830
+
1831
+ assert{ @i.buffer.stage.size == 0 && @i.write_count == 3 }
1832
+ assert{ @i.buffer.dequeued.size == 3 }
1833
+
1834
+ assert_equal 11, ary.size
1835
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1836
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1837
+
1838
+ assert_equal 3, chunks.size
1839
+ assert chunks.all?{|c| !c.empty? }
1840
+
1841
+ assert metachecks.all?{|e| e }
1842
+
1843
+ @i.interrupt_flushes
1844
+
1845
+ @i.rollback_write(chunks[2].unique_id)
1846
+
1847
+ assert{ @i.buffer.dequeued.size == 2 }
1848
+ assert{ @i.buffer.queue.size == 1 && @i.buffer.queue.first.unique_id == chunks[2].unique_id }
1849
+
1850
+ Timecop.freeze( Time.parse('2016-04-13 14:04:15 +0900') )
1851
+ @i.enqueue_thread_wait
1852
+ @i.flush_thread_wakeup
1853
+
1854
+ waiting(4) do
1855
+ Thread.pass until @i.write_count > 3
1856
+ end
1857
+
1858
+ assert{ @i.write_count == 4 }
1859
+ assert{ @i.rollback_count == 1 }
1860
+ assert{ @i.instance_eval{ @dequeued_chunks.size } == 3 }
1861
+ assert{ @i.buffer.dequeued.size == 3 }
1862
+ assert{ @i.buffer.queue.size == 0 }
1863
+
1864
+ assert_equal 4, chunks.size
1865
+ assert chunks[2].unique_id == chunks[3].unique_id
1866
+
1867
+ ary.reject!{|e| true }
1868
+ chunks.reject!{|e| true }
1869
+
1870
+ Timecop.freeze( Time.parse('2016-04-13 14:04:46 +0900') )
1871
+ @i.enqueue_thread_wait
1872
+ @i.flush_thread_wakeup
1873
+
1874
+ waiting(4) do
1875
+ Thread.pass until @i.rollback_count == 4
1876
+ end
1877
+
1878
+ assert{ chunks[0...3].all?{|c| !c.empty? } }
1879
+
1880
+ # rollback is in progress, but some may be flushed again in retry state, after rollback
1881
+ # retry.next_time is 14:04:49
1882
+ Timecop.freeze( Time.parse('2016-04-13 14:04:51 +0900') )
1883
+ @i.enqueue_thread_wait
1884
+ @i.flush_thread_wakeup
1885
+
1886
+ waiting(4) do
1887
+ Thread.pass until @i.write_count == 7
1888
+ end
1889
+
1890
+ assert{ @i.write_count == 7 }
1891
+ assert_equal 11, ary.size
1892
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1893
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1894
+ assert{ chunks.size == 3 }
1895
+ assert{ chunks.all?{|c| !c.empty? } }
1896
+
1897
+ chunks.each{|c| @i.commit_write(c.unique_id) }
1898
+ assert{ chunks.all?{|c| c.empty? } }
1899
+
1900
+ assert{ @i.buffer.dequeued.size == 0 }
1901
+ end
1902
+
1903
+ test '#try_rollback_all will be called for all waiting chunks after shutdown' do
1904
+ Timecop.freeze( Time.parse('2016-04-13 14:04:01 +0900') )
1905
+
1906
+ ary = []
1907
+ metachecks = []
1908
+ chunks = []
1909
+
1910
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
1911
+ @i.register(:try_write) do |chunk|
1912
+ chunks << chunk
1913
+ chunk.read.split("\n").reject{|l| l.empty? }.each do |data|
1914
+ e = JSON.parse(data)
1915
+ ary << e
1916
+ metachecks << (e[0] == chunk.metadata.tag)
1917
+ end
1918
+ end
1919
+
1920
+ @i.thread_wait_until_start
1921
+
1922
+ # size of a event is 195
1923
+ dummy_data = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
1924
+ events = [
1925
+ ["test.tag.1", event_time('2016-04-13 14:03:21 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1926
+ ["test.tag.2", event_time('2016-04-13 14:03:23 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1927
+ ["test.tag.1", event_time('2016-04-13 14:03:29 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1928
+ ["test.tag.1", event_time('2016-04-13 14:03:30 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1929
+ ["test.tag.1", event_time('2016-04-13 14:03:33 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1930
+ ["test.tag.1", event_time('2016-04-13 14:03:38 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}],
1931
+ ["test.tag.1", event_time('2016-04-13 14:03:43 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1932
+ ["test.tag.1", event_time('2016-04-13 14:03:49 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}],
1933
+ ["test.tag.2", event_time('2016-04-13 14:03:51 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1934
+ ["test.tag.1", event_time('2016-04-13 14:04:00 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1935
+ ["test.tag.2", event_time('2016-04-13 14:04:01 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1936
+ ]
1937
+
1938
+ assert_equal 0, @i.write_count
1939
+
1940
+ @i.interrupt_flushes
1941
+
1942
+ events.shuffle.each do |tag, time, record|
1943
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
1944
+ end
1945
+ assert{ @i.buffer.stage.size == 2 }
1946
+
1947
+ Timecop.freeze( Time.parse('2016-04-13 14:04:02 +0900') )
1948
+
1949
+ @i.enqueue_thread_wait
1950
+ @i.flush_thread_wakeup
1951
+
1952
+ waiting(4) do
1953
+ Thread.pass until @i.write_count > 0
1954
+ end
1955
+
1956
+ assert{ @i.buffer.stage.size == 2 }
1957
+ assert{ @i.write_count == 1 }
1958
+ assert{ @i.buffer.queue.size == 0 }
1959
+ assert{ @i.buffer.dequeued.size == 1 }
1960
+
1961
+ # events fulfills a chunk (and queued immediately)
1962
+ assert_equal 5, ary.size
1963
+ assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
1964
+ assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
1965
+
1966
+ assert_equal 1, chunks.size
1967
+ assert !chunks.first.empty?
1968
+
1969
+ Timecop.freeze( Time.parse('2016-04-13 14:04:09 +0900') )
1970
+
1971
+ @i.enqueue_thread_wait
1972
+
1973
+ assert{ @i.buffer.stage.size == 2 }
1974
+
1975
+ # to trigger try_flush with flush_thread_burst_interval
1976
+ Timecop.freeze( Time.parse('2016-04-13 14:04:11 +0900') )
1977
+ @i.enqueue_thread_wait
1978
+ Timecop.freeze( Time.parse('2016-04-13 14:04:12 +0900') )
1979
+ @i.enqueue_thread_wait
1980
+ Timecop.freeze( Time.parse('2016-04-13 14:04:13 +0900') )
1981
+ @i.enqueue_thread_wait
1982
+ Timecop.freeze( Time.parse('2016-04-13 14:04:14 +0900') )
1983
+ @i.enqueue_thread_wait
1984
+ @i.flush_thread_wakeup
1985
+
1986
+ assert{ @i.buffer.stage.size == 0 }
1987
+
1988
+ waiting(4) do
1989
+ Thread.pass until @i.write_count > 2
1990
+ end
1991
+
1992
+ assert{ @i.buffer.stage.size == 0 }
1993
+ assert{ @i.buffer.queue.size == 0 }
1994
+ assert{ @i.buffer.dequeued.size == 3 }
1995
+ assert{ @i.write_count == 3 }
1996
+ assert{ @i.rollback_count == 0 }
1997
+
1998
+ assert_equal 11, ary.size
1999
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
2000
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
2001
+
2002
+ assert{ chunks.size == 3 }
2003
+ assert{ chunks.all?{|c| !c.empty? } }
2004
+
2005
+ @i.register(:shutdown_hook){ @i.commit_write(chunks[1].unique_id) }
2006
+
2007
+ @i.stop
2008
+ @i.before_shutdown
2009
+ @i.shutdown
2010
+
2011
+ assert{ @i.buffer.dequeued.size == 2 }
2012
+ assert{ !chunks[0].empty? }
2013
+ assert{ chunks[1].empty? }
2014
+ assert{ !chunks[2].empty? }
2015
+
2016
+ @i.after_shutdown
2017
+
2018
+ assert{ @i.rollback_count == 2 }
2019
+ end
2020
+ end
2021
+ end