fluentd-hubspot 0.14.14.1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (396) hide show
  1. data/.github/ISSUE_TEMPLATE.md +6 -0
  2. data/.gitignore +28 -0
  3. data/.travis.yml +51 -0
  4. data/AUTHORS +2 -0
  5. data/CONTRIBUTING.md +42 -0
  6. data/COPYING +14 -0
  7. data/ChangeLog +593 -0
  8. data/Gemfile +9 -0
  9. data/README.md +76 -0
  10. data/Rakefile +74 -0
  11. data/Vagrantfile +17 -0
  12. data/appveyor.yml +43 -0
  13. data/bin/fluent-binlog-reader +7 -0
  14. data/bin/fluent-debug +5 -0
  15. data/bin/fluent-plugin-config-format +5 -0
  16. data/bin/fluent-plugin-generate +5 -0
  17. data/code-of-conduct.md +3 -0
  18. data/example/copy_roundrobin.conf +39 -0
  19. data/example/filter_stdout.conf +22 -0
  20. data/example/in_dummy_blocks.conf +17 -0
  21. data/example/in_dummy_with_compression.conf +23 -0
  22. data/example/in_forward.conf +14 -0
  23. data/example/in_forward_client.conf +37 -0
  24. data/example/in_forward_shared_key.conf +15 -0
  25. data/example/in_forward_tls.conf +14 -0
  26. data/example/in_forward_users.conf +24 -0
  27. data/example/in_forward_workers.conf +21 -0
  28. data/example/in_http.conf +14 -0
  29. data/example/in_out_forward.conf +17 -0
  30. data/example/in_syslog.conf +15 -0
  31. data/example/in_tail.conf +14 -0
  32. data/example/in_tcp.conf +13 -0
  33. data/example/in_udp.conf +13 -0
  34. data/example/logevents.conf +25 -0
  35. data/example/multi_filters.conf +61 -0
  36. data/example/out_copy.conf +20 -0
  37. data/example/out_exec_filter.conf +42 -0
  38. data/example/out_file.conf +13 -0
  39. data/example/out_forward.conf +35 -0
  40. data/example/out_forward_buf_file.conf +23 -0
  41. data/example/out_forward_client.conf +109 -0
  42. data/example/out_forward_heartbeat_none.conf +16 -0
  43. data/example/out_forward_shared_key.conf +36 -0
  44. data/example/out_forward_tls.conf +18 -0
  45. data/example/out_forward_users.conf +65 -0
  46. data/example/out_null.conf +36 -0
  47. data/example/secondary_file.conf +41 -0
  48. data/example/suppress_config_dump.conf +7 -0
  49. data/example/v0_12_filter.conf +78 -0
  50. data/example/v1_literal_example.conf +36 -0
  51. data/fluent.conf +139 -0
  52. data/fluentd.gemspec +51 -0
  53. data/lib/fluent/agent.rb +163 -0
  54. data/lib/fluent/clock.rb +62 -0
  55. data/lib/fluent/command/binlog_reader.rb +234 -0
  56. data/lib/fluent/command/bundler_injection.rb +45 -0
  57. data/lib/fluent/command/cat.rb +330 -0
  58. data/lib/fluent/command/debug.rb +102 -0
  59. data/lib/fluent/command/fluentd.rb +301 -0
  60. data/lib/fluent/command/plugin_config_formatter.rb +258 -0
  61. data/lib/fluent/command/plugin_generator.rb +301 -0
  62. data/lib/fluent/compat/call_super_mixin.rb +67 -0
  63. data/lib/fluent/compat/detach_process_mixin.rb +25 -0
  64. data/lib/fluent/compat/exec_util.rb +129 -0
  65. data/lib/fluent/compat/file_util.rb +54 -0
  66. data/lib/fluent/compat/filter.rb +68 -0
  67. data/lib/fluent/compat/formatter.rb +111 -0
  68. data/lib/fluent/compat/formatter_utils.rb +85 -0
  69. data/lib/fluent/compat/handle_tag_and_time_mixin.rb +62 -0
  70. data/lib/fluent/compat/handle_tag_name_mixin.rb +53 -0
  71. data/lib/fluent/compat/input.rb +59 -0
  72. data/lib/fluent/compat/output.rb +728 -0
  73. data/lib/fluent/compat/output_chain.rb +60 -0
  74. data/lib/fluent/compat/parser.rb +310 -0
  75. data/lib/fluent/compat/parser_utils.rb +40 -0
  76. data/lib/fluent/compat/propagate_default.rb +62 -0
  77. data/lib/fluent/compat/record_filter_mixin.rb +34 -0
  78. data/lib/fluent/compat/set_tag_key_mixin.rb +50 -0
  79. data/lib/fluent/compat/set_time_key_mixin.rb +69 -0
  80. data/lib/fluent/compat/socket_util.rb +165 -0
  81. data/lib/fluent/compat/string_util.rb +34 -0
  82. data/lib/fluent/compat/structured_format_mixin.rb +26 -0
  83. data/lib/fluent/compat/type_converter.rb +90 -0
  84. data/lib/fluent/config.rb +56 -0
  85. data/lib/fluent/config/basic_parser.rb +123 -0
  86. data/lib/fluent/config/configure_proxy.rb +418 -0
  87. data/lib/fluent/config/dsl.rb +149 -0
  88. data/lib/fluent/config/element.rb +218 -0
  89. data/lib/fluent/config/error.rb +26 -0
  90. data/lib/fluent/config/literal_parser.rb +251 -0
  91. data/lib/fluent/config/parser.rb +107 -0
  92. data/lib/fluent/config/section.rb +223 -0
  93. data/lib/fluent/config/types.rb +136 -0
  94. data/lib/fluent/config/v1_parser.rb +190 -0
  95. data/lib/fluent/configurable.rb +200 -0
  96. data/lib/fluent/daemon.rb +15 -0
  97. data/lib/fluent/engine.rb +266 -0
  98. data/lib/fluent/env.rb +28 -0
  99. data/lib/fluent/error.rb +30 -0
  100. data/lib/fluent/event.rb +334 -0
  101. data/lib/fluent/event_router.rb +269 -0
  102. data/lib/fluent/filter.rb +21 -0
  103. data/lib/fluent/formatter.rb +23 -0
  104. data/lib/fluent/input.rb +21 -0
  105. data/lib/fluent/label.rb +46 -0
  106. data/lib/fluent/load.rb +35 -0
  107. data/lib/fluent/log.rb +546 -0
  108. data/lib/fluent/match.rb +178 -0
  109. data/lib/fluent/mixin.rb +31 -0
  110. data/lib/fluent/msgpack_factory.rb +62 -0
  111. data/lib/fluent/output.rb +29 -0
  112. data/lib/fluent/output_chain.rb +23 -0
  113. data/lib/fluent/parser.rb +23 -0
  114. data/lib/fluent/plugin.rb +183 -0
  115. data/lib/fluent/plugin/bare_output.rb +63 -0
  116. data/lib/fluent/plugin/base.rb +165 -0
  117. data/lib/fluent/plugin/buf_file.rb +184 -0
  118. data/lib/fluent/plugin/buf_memory.rb +34 -0
  119. data/lib/fluent/plugin/buffer.rb +617 -0
  120. data/lib/fluent/plugin/buffer/chunk.rb +221 -0
  121. data/lib/fluent/plugin/buffer/file_chunk.rb +364 -0
  122. data/lib/fluent/plugin/buffer/memory_chunk.rb +90 -0
  123. data/lib/fluent/plugin/compressable.rb +92 -0
  124. data/lib/fluent/plugin/exec_util.rb +22 -0
  125. data/lib/fluent/plugin/file_util.rb +22 -0
  126. data/lib/fluent/plugin/file_wrapper.rb +120 -0
  127. data/lib/fluent/plugin/filter.rb +93 -0
  128. data/lib/fluent/plugin/filter_grep.rb +75 -0
  129. data/lib/fluent/plugin/filter_parser.rb +119 -0
  130. data/lib/fluent/plugin/filter_record_transformer.rb +322 -0
  131. data/lib/fluent/plugin/filter_stdout.rb +53 -0
  132. data/lib/fluent/plugin/formatter.rb +50 -0
  133. data/lib/fluent/plugin/formatter_csv.rb +52 -0
  134. data/lib/fluent/plugin/formatter_hash.rb +33 -0
  135. data/lib/fluent/plugin/formatter_json.rb +55 -0
  136. data/lib/fluent/plugin/formatter_ltsv.rb +42 -0
  137. data/lib/fluent/plugin/formatter_msgpack.rb +33 -0
  138. data/lib/fluent/plugin/formatter_out_file.rb +51 -0
  139. data/lib/fluent/plugin/formatter_single_value.rb +34 -0
  140. data/lib/fluent/plugin/formatter_stdout.rb +75 -0
  141. data/lib/fluent/plugin/formatter_tsv.rb +34 -0
  142. data/lib/fluent/plugin/in_debug_agent.rb +64 -0
  143. data/lib/fluent/plugin/in_dummy.rb +139 -0
  144. data/lib/fluent/plugin/in_exec.rb +108 -0
  145. data/lib/fluent/plugin/in_forward.rb +455 -0
  146. data/lib/fluent/plugin/in_gc_stat.rb +56 -0
  147. data/lib/fluent/plugin/in_http.rb +433 -0
  148. data/lib/fluent/plugin/in_monitor_agent.rb +448 -0
  149. data/lib/fluent/plugin/in_object_space.rb +93 -0
  150. data/lib/fluent/plugin/in_syslog.rb +209 -0
  151. data/lib/fluent/plugin/in_tail.rb +905 -0
  152. data/lib/fluent/plugin/in_tcp.rb +85 -0
  153. data/lib/fluent/plugin/in_udp.rb +81 -0
  154. data/lib/fluent/plugin/in_unix.rb +201 -0
  155. data/lib/fluent/plugin/input.rb +37 -0
  156. data/lib/fluent/plugin/multi_output.rb +157 -0
  157. data/lib/fluent/plugin/out_copy.rb +46 -0
  158. data/lib/fluent/plugin/out_exec.rb +105 -0
  159. data/lib/fluent/plugin/out_exec_filter.rb +317 -0
  160. data/lib/fluent/plugin/out_file.rb +302 -0
  161. data/lib/fluent/plugin/out_forward.rb +912 -0
  162. data/lib/fluent/plugin/out_null.rb +74 -0
  163. data/lib/fluent/plugin/out_relabel.rb +32 -0
  164. data/lib/fluent/plugin/out_roundrobin.rb +84 -0
  165. data/lib/fluent/plugin/out_secondary_file.rb +133 -0
  166. data/lib/fluent/plugin/out_stdout.rb +75 -0
  167. data/lib/fluent/plugin/out_stream.rb +130 -0
  168. data/lib/fluent/plugin/output.rb +1291 -0
  169. data/lib/fluent/plugin/owned_by_mixin.rb +42 -0
  170. data/lib/fluent/plugin/parser.rb +191 -0
  171. data/lib/fluent/plugin/parser_apache.rb +28 -0
  172. data/lib/fluent/plugin/parser_apache2.rb +84 -0
  173. data/lib/fluent/plugin/parser_apache_error.rb +26 -0
  174. data/lib/fluent/plugin/parser_csv.rb +39 -0
  175. data/lib/fluent/plugin/parser_json.rb +81 -0
  176. data/lib/fluent/plugin/parser_ltsv.rb +42 -0
  177. data/lib/fluent/plugin/parser_msgpack.rb +50 -0
  178. data/lib/fluent/plugin/parser_multiline.rb +105 -0
  179. data/lib/fluent/plugin/parser_nginx.rb +28 -0
  180. data/lib/fluent/plugin/parser_none.rb +36 -0
  181. data/lib/fluent/plugin/parser_regexp.rb +63 -0
  182. data/lib/fluent/plugin/parser_syslog.rb +121 -0
  183. data/lib/fluent/plugin/parser_tsv.rb +42 -0
  184. data/lib/fluent/plugin/socket_util.rb +22 -0
  185. data/lib/fluent/plugin/storage.rb +84 -0
  186. data/lib/fluent/plugin/storage_local.rb +159 -0
  187. data/lib/fluent/plugin/string_util.rb +22 -0
  188. data/lib/fluent/plugin_helper.rb +70 -0
  189. data/lib/fluent/plugin_helper/cert_option.rb +159 -0
  190. data/lib/fluent/plugin_helper/child_process.rb +364 -0
  191. data/lib/fluent/plugin_helper/compat_parameters.rb +331 -0
  192. data/lib/fluent/plugin_helper/event_emitter.rb +93 -0
  193. data/lib/fluent/plugin_helper/event_loop.rb +161 -0
  194. data/lib/fluent/plugin_helper/extract.rb +104 -0
  195. data/lib/fluent/plugin_helper/formatter.rb +147 -0
  196. data/lib/fluent/plugin_helper/inject.rb +151 -0
  197. data/lib/fluent/plugin_helper/parser.rb +147 -0
  198. data/lib/fluent/plugin_helper/retry_state.rb +201 -0
  199. data/lib/fluent/plugin_helper/server.rb +738 -0
  200. data/lib/fluent/plugin_helper/socket.rb +241 -0
  201. data/lib/fluent/plugin_helper/socket_option.rb +69 -0
  202. data/lib/fluent/plugin_helper/storage.rb +349 -0
  203. data/lib/fluent/plugin_helper/thread.rb +179 -0
  204. data/lib/fluent/plugin_helper/timer.rb +91 -0
  205. data/lib/fluent/plugin_id.rb +80 -0
  206. data/lib/fluent/process.rb +22 -0
  207. data/lib/fluent/registry.rb +116 -0
  208. data/lib/fluent/root_agent.rb +323 -0
  209. data/lib/fluent/rpc.rb +94 -0
  210. data/lib/fluent/supervisor.rb +741 -0
  211. data/lib/fluent/system_config.rb +159 -0
  212. data/lib/fluent/test.rb +58 -0
  213. data/lib/fluent/test/base.rb +78 -0
  214. data/lib/fluent/test/driver/base.rb +224 -0
  215. data/lib/fluent/test/driver/base_owned.rb +70 -0
  216. data/lib/fluent/test/driver/base_owner.rb +135 -0
  217. data/lib/fluent/test/driver/event_feeder.rb +98 -0
  218. data/lib/fluent/test/driver/filter.rb +57 -0
  219. data/lib/fluent/test/driver/formatter.rb +30 -0
  220. data/lib/fluent/test/driver/input.rb +31 -0
  221. data/lib/fluent/test/driver/multi_output.rb +53 -0
  222. data/lib/fluent/test/driver/output.rb +102 -0
  223. data/lib/fluent/test/driver/parser.rb +30 -0
  224. data/lib/fluent/test/driver/test_event_router.rb +45 -0
  225. data/lib/fluent/test/filter_test.rb +77 -0
  226. data/lib/fluent/test/formatter_test.rb +65 -0
  227. data/lib/fluent/test/helpers.rb +134 -0
  228. data/lib/fluent/test/input_test.rb +174 -0
  229. data/lib/fluent/test/log.rb +79 -0
  230. data/lib/fluent/test/output_test.rb +156 -0
  231. data/lib/fluent/test/parser_test.rb +70 -0
  232. data/lib/fluent/test/startup_shutdown.rb +46 -0
  233. data/lib/fluent/time.rb +412 -0
  234. data/lib/fluent/timezone.rb +133 -0
  235. data/lib/fluent/unique_id.rb +39 -0
  236. data/lib/fluent/version.rb +21 -0
  237. data/lib/fluent/winsvc.rb +71 -0
  238. data/templates/new_gem/Gemfile +3 -0
  239. data/templates/new_gem/README.md.erb +43 -0
  240. data/templates/new_gem/Rakefile +13 -0
  241. data/templates/new_gem/fluent-plugin.gemspec.erb +27 -0
  242. data/templates/new_gem/lib/fluent/plugin/filter.rb.erb +14 -0
  243. data/templates/new_gem/lib/fluent/plugin/formatter.rb.erb +14 -0
  244. data/templates/new_gem/lib/fluent/plugin/input.rb.erb +11 -0
  245. data/templates/new_gem/lib/fluent/plugin/output.rb.erb +11 -0
  246. data/templates/new_gem/lib/fluent/plugin/parser.rb.erb +15 -0
  247. data/templates/new_gem/test/helper.rb.erb +8 -0
  248. data/templates/new_gem/test/plugin/test_filter.rb.erb +18 -0
  249. data/templates/new_gem/test/plugin/test_formatter.rb.erb +18 -0
  250. data/templates/new_gem/test/plugin/test_input.rb.erb +18 -0
  251. data/templates/new_gem/test/plugin/test_output.rb.erb +18 -0
  252. data/templates/new_gem/test/plugin/test_parser.rb.erb +18 -0
  253. data/templates/plugin_config_formatter/param.md-compact.erb +25 -0
  254. data/templates/plugin_config_formatter/param.md.erb +34 -0
  255. data/templates/plugin_config_formatter/section.md.erb +12 -0
  256. data/test/command/test_binlog_reader.rb +346 -0
  257. data/test/command/test_fluentd.rb +618 -0
  258. data/test/command/test_plugin_config_formatter.rb +275 -0
  259. data/test/command/test_plugin_generator.rb +66 -0
  260. data/test/compat/test_calls_super.rb +166 -0
  261. data/test/compat/test_parser.rb +92 -0
  262. data/test/config/assertions.rb +42 -0
  263. data/test/config/test_config_parser.rb +513 -0
  264. data/test/config/test_configurable.rb +1587 -0
  265. data/test/config/test_configure_proxy.rb +566 -0
  266. data/test/config/test_dsl.rb +415 -0
  267. data/test/config/test_element.rb +403 -0
  268. data/test/config/test_literal_parser.rb +297 -0
  269. data/test/config/test_section.rb +184 -0
  270. data/test/config/test_system_config.rb +168 -0
  271. data/test/config/test_types.rb +191 -0
  272. data/test/helper.rb +153 -0
  273. data/test/plugin/data/2010/01/20100102-030405.log +0 -0
  274. data/test/plugin/data/2010/01/20100102-030406.log +0 -0
  275. data/test/plugin/data/2010/01/20100102.log +0 -0
  276. data/test/plugin/data/log/bar +0 -0
  277. data/test/plugin/data/log/foo/bar.log +0 -0
  278. data/test/plugin/data/log/foo/bar2 +0 -0
  279. data/test/plugin/data/log/test.log +0 -0
  280. data/test/plugin/test_bare_output.rb +118 -0
  281. data/test/plugin/test_base.rb +115 -0
  282. data/test/plugin/test_buf_file.rb +843 -0
  283. data/test/plugin/test_buf_memory.rb +42 -0
  284. data/test/plugin/test_buffer.rb +1220 -0
  285. data/test/plugin/test_buffer_chunk.rb +198 -0
  286. data/test/plugin/test_buffer_file_chunk.rb +844 -0
  287. data/test/plugin/test_buffer_memory_chunk.rb +338 -0
  288. data/test/plugin/test_compressable.rb +84 -0
  289. data/test/plugin/test_file_util.rb +96 -0
  290. data/test/plugin/test_filter.rb +357 -0
  291. data/test/plugin/test_filter_grep.rb +119 -0
  292. data/test/plugin/test_filter_parser.rb +700 -0
  293. data/test/plugin/test_filter_record_transformer.rb +556 -0
  294. data/test/plugin/test_filter_stdout.rb +202 -0
  295. data/test/plugin/test_formatter_csv.rb +111 -0
  296. data/test/plugin/test_formatter_hash.rb +35 -0
  297. data/test/plugin/test_formatter_json.rb +51 -0
  298. data/test/plugin/test_formatter_ltsv.rb +59 -0
  299. data/test/plugin/test_formatter_msgpack.rb +28 -0
  300. data/test/plugin/test_formatter_out_file.rb +95 -0
  301. data/test/plugin/test_formatter_single_value.rb +38 -0
  302. data/test/plugin/test_in_debug_agent.rb +28 -0
  303. data/test/plugin/test_in_dummy.rb +192 -0
  304. data/test/plugin/test_in_exec.rb +245 -0
  305. data/test/plugin/test_in_forward.rb +1120 -0
  306. data/test/plugin/test_in_gc_stat.rb +39 -0
  307. data/test/plugin/test_in_http.rb +588 -0
  308. data/test/plugin/test_in_monitor_agent.rb +516 -0
  309. data/test/plugin/test_in_object_space.rb +64 -0
  310. data/test/plugin/test_in_syslog.rb +271 -0
  311. data/test/plugin/test_in_tail.rb +1216 -0
  312. data/test/plugin/test_in_tcp.rb +118 -0
  313. data/test/plugin/test_in_udp.rb +152 -0
  314. data/test/plugin/test_in_unix.rb +126 -0
  315. data/test/plugin/test_input.rb +126 -0
  316. data/test/plugin/test_multi_output.rb +180 -0
  317. data/test/plugin/test_out_copy.rb +160 -0
  318. data/test/plugin/test_out_exec.rb +310 -0
  319. data/test/plugin/test_out_exec_filter.rb +613 -0
  320. data/test/plugin/test_out_file.rb +873 -0
  321. data/test/plugin/test_out_forward.rb +685 -0
  322. data/test/plugin/test_out_null.rb +105 -0
  323. data/test/plugin/test_out_relabel.rb +28 -0
  324. data/test/plugin/test_out_roundrobin.rb +146 -0
  325. data/test/plugin/test_out_secondary_file.rb +442 -0
  326. data/test/plugin/test_out_stdout.rb +170 -0
  327. data/test/plugin/test_out_stream.rb +93 -0
  328. data/test/plugin/test_output.rb +870 -0
  329. data/test/plugin/test_output_as_buffered.rb +1932 -0
  330. data/test/plugin/test_output_as_buffered_compress.rb +165 -0
  331. data/test/plugin/test_output_as_buffered_overflow.rb +250 -0
  332. data/test/plugin/test_output_as_buffered_retries.rb +839 -0
  333. data/test/plugin/test_output_as_buffered_secondary.rb +877 -0
  334. data/test/plugin/test_output_as_standard.rb +374 -0
  335. data/test/plugin/test_owned_by.rb +35 -0
  336. data/test/plugin/test_parser.rb +359 -0
  337. data/test/plugin/test_parser_apache.rb +42 -0
  338. data/test/plugin/test_parser_apache2.rb +46 -0
  339. data/test/plugin/test_parser_apache_error.rb +45 -0
  340. data/test/plugin/test_parser_csv.rb +103 -0
  341. data/test/plugin/test_parser_json.rb +114 -0
  342. data/test/plugin/test_parser_labeled_tsv.rb +128 -0
  343. data/test/plugin/test_parser_multiline.rb +100 -0
  344. data/test/plugin/test_parser_nginx.rb +48 -0
  345. data/test/plugin/test_parser_none.rb +52 -0
  346. data/test/plugin/test_parser_regexp.rb +281 -0
  347. data/test/plugin/test_parser_syslog.rb +242 -0
  348. data/test/plugin/test_parser_tsv.rb +122 -0
  349. data/test/plugin/test_storage.rb +167 -0
  350. data/test/plugin/test_storage_local.rb +335 -0
  351. data/test/plugin/test_string_util.rb +26 -0
  352. data/test/plugin_helper/test_child_process.rb +794 -0
  353. data/test/plugin_helper/test_compat_parameters.rb +331 -0
  354. data/test/plugin_helper/test_event_emitter.rb +51 -0
  355. data/test/plugin_helper/test_event_loop.rb +52 -0
  356. data/test/plugin_helper/test_extract.rb +194 -0
  357. data/test/plugin_helper/test_formatter.rb +255 -0
  358. data/test/plugin_helper/test_inject.rb +519 -0
  359. data/test/plugin_helper/test_parser.rb +264 -0
  360. data/test/plugin_helper/test_retry_state.rb +422 -0
  361. data/test/plugin_helper/test_server.rb +1677 -0
  362. data/test/plugin_helper/test_storage.rb +542 -0
  363. data/test/plugin_helper/test_thread.rb +164 -0
  364. data/test/plugin_helper/test_timer.rb +132 -0
  365. data/test/scripts/exec_script.rb +32 -0
  366. data/test/scripts/fluent/plugin/formatter1/formatter_test1.rb +7 -0
  367. data/test/scripts/fluent/plugin/formatter2/formatter_test2.rb +7 -0
  368. data/test/scripts/fluent/plugin/formatter_known.rb +8 -0
  369. data/test/scripts/fluent/plugin/out_test.rb +81 -0
  370. data/test/scripts/fluent/plugin/out_test2.rb +80 -0
  371. data/test/scripts/fluent/plugin/parser_known.rb +4 -0
  372. data/test/test_clock.rb +164 -0
  373. data/test/test_config.rb +179 -0
  374. data/test/test_configdsl.rb +148 -0
  375. data/test/test_event.rb +515 -0
  376. data/test/test_event_router.rb +331 -0
  377. data/test/test_event_time.rb +186 -0
  378. data/test/test_filter.rb +121 -0
  379. data/test/test_formatter.rb +312 -0
  380. data/test/test_input.rb +31 -0
  381. data/test/test_log.rb +828 -0
  382. data/test/test_match.rb +137 -0
  383. data/test/test_mixin.rb +351 -0
  384. data/test/test_output.rb +273 -0
  385. data/test/test_plugin.rb +251 -0
  386. data/test/test_plugin_classes.rb +253 -0
  387. data/test/test_plugin_helper.rb +81 -0
  388. data/test/test_plugin_id.rb +101 -0
  389. data/test/test_process.rb +14 -0
  390. data/test/test_root_agent.rb +611 -0
  391. data/test/test_supervisor.rb +373 -0
  392. data/test/test_test_drivers.rb +135 -0
  393. data/test/test_time_formatter.rb +282 -0
  394. data/test/test_time_parser.rb +211 -0
  395. data/test/test_unique_id.rb +47 -0
  396. metadata +898 -0
@@ -0,0 +1,165 @@
1
+ require_relative '../helper'
2
+ require 'fluent/plugin/output'
3
+ require 'fluent/plugin/buffer'
4
+ require 'fluent/plugin/compressable'
5
+ require 'fluent/event'
6
+
7
+ require 'timeout'
8
+
9
+ module FluentPluginOutputAsBufferedCompressTest
10
+ class DummyBareOutput < Fluent::Plugin::Output
11
+ def register(name, &block)
12
+ instance_variable_set("@#{name}", block)
13
+ end
14
+ end
15
+
16
+ class DummyAsyncOutput < DummyBareOutput
17
+ def initialize
18
+ super
19
+ @format = @write = nil
20
+ end
21
+ def write(chunk)
22
+ @write ? @write.call(chunk) : nil
23
+ end
24
+ end
25
+
26
+ class DummyAsyncOutputWithFormat < DummyBareOutput
27
+ def initialize
28
+ super
29
+ @format = nil
30
+ end
31
+ def write(chunk)
32
+ @write ? @write.call(chunk) : nil
33
+ end
34
+ def format(tag, time, record)
35
+ @format ? @format.call(tag, time, record) : [tag, time, record].to_json
36
+ end
37
+ end
38
+ end
39
+
40
+ class BufferedOutputCompressTest < Test::Unit::TestCase
41
+ include Fluent::Plugin::Compressable
42
+
43
+ def create_output(type=:async)
44
+ case type
45
+ when :async then FluentPluginOutputAsBufferedCompressTest::DummyAsyncOutput.new
46
+ when :async_with_format then FluentPluginOutputAsBufferedCompressTest::DummyAsyncOutputWithFormat.new
47
+ else
48
+ raise ArgumentError, "unknown type: #{type}"
49
+ end
50
+ end
51
+
52
+ def waiting(seconds)
53
+ begin
54
+ Timeout.timeout(seconds) do
55
+ yield
56
+ end
57
+ rescue Timeout::Error
58
+ STDERR.print(*@i.log.out.logs)
59
+ raise
60
+ end
61
+ end
62
+
63
+ def dummy_event_stream
64
+ Fluent::ArrayEventStream.new(
65
+ [
66
+ [event_time('2016-04-13 18:33:00'), { 'name' => 'moris', 'age' => 36, 'message' => 'data1' }],
67
+ [event_time('2016-04-13 18:33:13'), { 'name' => 'moris', 'age' => 36, 'message' => 'data2' }],
68
+ [event_time('2016-04-13 18:33:32'), { 'name' => 'moris', 'age' => 36, 'message' => 'data3' }],
69
+ ]
70
+ )
71
+ end
72
+
73
+ TMP_DIR = File.expand_path('../../tmp/test_output_as_buffered_compress', __FILE__)
74
+
75
+ setup do
76
+ FileUtils.rm_r TMP_DIR rescue nil
77
+ FileUtils.mkdir_p TMP_DIR
78
+ end
79
+
80
+ teardown do
81
+ if @i
82
+ @i.stop unless @i.stopped?
83
+ @i.before_shutdown unless @i.before_shutdown?
84
+ @i.shutdown unless @i.shutdown?
85
+ @i.after_shutdown unless @i.after_shutdown?
86
+ @i.close unless @i.closed?
87
+ @i.terminate unless @i.terminated?
88
+ end
89
+ end
90
+
91
+ data(
92
+ handle_simple_stream: config_element('buffer', '', { 'flush_interval' => 1, 'compress' => 'gzip' }),
93
+ handle_stream_with_standard_format: config_element('buffer', 'tag', { 'flush_interval' => 1, 'compress' => 'gzip' }),
94
+ handle_simple_stream_and_file_chunk: config_element('buffer', '', { '@type' => 'file', 'path' => File.join(TMP_DIR,'test.*.log'), 'flush_interval' => 1, 'compress' => 'gzip' }),
95
+ handle_stream_with_standard_format_and_file_chunk: config_element('buffer', 'tag', { '@type' => 'file', 'path' => File.join(TMP_DIR,'test.*.log'), 'flush_interval' => 1, 'compress' => 'gzip' }),
96
+ )
97
+ test 'call a standard format when output plugin adds data to chunk' do |buffer_config|
98
+ @i = create_output(:async)
99
+ @i.configure(config_element('ROOT','', {}, [buffer_config]))
100
+ @i.start
101
+ @i.after_start
102
+
103
+ io = StringIO.new
104
+ es = dummy_event_stream
105
+ expected = es.map { |e| e }
106
+ compressed_data = ''
107
+
108
+ assert_equal :gzip, @i.buffer.compress
109
+
110
+ @i.register(:write) do |c|
111
+ compressed_data = c.instance_variable_get(:@chunk)
112
+ if compressed_data.is_a?(File)
113
+ compressed_data.seek(0, IO::SEEK_SET)
114
+ compressed_data = compressed_data.read
115
+ end
116
+ c.write_to(io)
117
+ end
118
+
119
+ @i.emit_events('tag', es)
120
+ @i.enqueue_thread_wait
121
+ @i.flush_thread_wakeup
122
+ waiting(4) { Thread.pass until io.size > 0 }
123
+
124
+ assert_equal expected, Fluent::MessagePackEventStream.new(decompress(compressed_data)).map { |t, r| [t, r] }
125
+ assert_equal expected, Fluent::MessagePackEventStream.new(io.string).map { |t, r| [t, r] }
126
+ end
127
+
128
+ data(
129
+ handle_simple_stream: config_element('buffer', '', { 'flush_interval' => 1, 'compress' => 'gzip' }),
130
+ handle_stream_with_custom_format: config_element('buffer', 'tag', { 'flush_interval' => 1, 'compress' => 'gzip' }),
131
+ handle_simple_stream_and_file_chunk: config_element('buffer', '', { '@type' => 'file', 'path' => File.join(TMP_DIR,'test.*.log'), 'flush_interval' => 1, 'compress' => 'gzip' }),
132
+ handle_stream_with_custom_format_and_file_chunk: config_element('buffer', 'tag', { '@type' => 'file', 'path' => File.join(TMP_DIR,'test.*.log'), 'flush_interval' => 1, 'compress' => 'gzip' }),
133
+ )
134
+ test 'call a custom format when output plugin adds data to chunk' do |buffer_config|
135
+ @i = create_output(:async_with_format)
136
+ @i.configure(config_element('ROOT','', {}, [buffer_config]))
137
+ @i.start
138
+ @i.after_start
139
+
140
+ io = StringIO.new
141
+ es = dummy_event_stream
142
+ expected = es.map { |e| "#{e[1]}\n" }.join # e[1] is record
143
+ compressed_data = ''
144
+
145
+ assert_equal :gzip, @i.buffer.compress
146
+
147
+ @i.register(:format) { |tag, time, record| "#{record}\n" }
148
+ @i.register(:write) { |c|
149
+ compressed_data = c.instance_variable_get(:@chunk)
150
+ if compressed_data.is_a?(File)
151
+ compressed_data.seek(0, IO::SEEK_SET)
152
+ compressed_data = compressed_data.read
153
+ end
154
+ c.write_to(io)
155
+ }
156
+
157
+ @i.emit_events('tag', es)
158
+ @i.enqueue_thread_wait
159
+ @i.flush_thread_wakeup
160
+ waiting(4) { sleep 0.1 until io.size > 0 }
161
+
162
+ assert_equal expected, decompress(compressed_data)
163
+ assert_equal expected, io.string
164
+ end
165
+ end
@@ -0,0 +1,250 @@
1
+ require_relative '../helper'
2
+ require 'fluent/plugin/output'
3
+ require 'fluent/plugin/buffer'
4
+ require 'fluent/event'
5
+
6
+ require 'json'
7
+ require 'time'
8
+ require 'timeout'
9
+ require 'timecop'
10
+
11
+ module FluentPluginOutputAsBufferedOverflowTest
12
+ class DummyBareOutput < Fluent::Plugin::Output
13
+ def register(name, &block)
14
+ instance_variable_set("@#{name}", block)
15
+ end
16
+ end
17
+ class DummyAsyncOutput < DummyBareOutput
18
+ def initialize
19
+ super
20
+ @format = @write = nil
21
+ end
22
+ def format(tag, time, record)
23
+ @format ? @format.call(tag, time, record) : [tag, time, record].to_json
24
+ end
25
+ def write(chunk)
26
+ @write ? @write.call(chunk) : nil
27
+ end
28
+ end
29
+ end
30
+
31
+ class BufferedOutputOverflowTest < Test::Unit::TestCase
32
+ def create_output
33
+ FluentPluginOutputAsBufferedOverflowTest::DummyAsyncOutput.new
34
+ end
35
+ def create_metadata(timekey: nil, tag: nil, variables: nil)
36
+ Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
37
+ end
38
+ def waiting(seconds)
39
+ begin
40
+ Timeout.timeout(seconds) do
41
+ yield
42
+ end
43
+ rescue Timeout::Error
44
+ logs = @i.log.out.logs
45
+ STDERR.print(*logs)
46
+ raise
47
+ end
48
+ end
49
+
50
+ teardown do
51
+ if @i
52
+ @i.stop unless @i.stopped?
53
+ @i.before_shutdown unless @i.before_shutdown?
54
+ @i.shutdown unless @i.shutdown?
55
+ @i.after_shutdown unless @i.after_shutdown?
56
+ @i.close unless @i.closed?
57
+ @i.terminate unless @i.terminated?
58
+ end
59
+ Timecop.return
60
+ end
61
+
62
+ sub_test_case 'buffered output with default configuration (throws exception for buffer overflow)' do
63
+ setup do
64
+ hash = {
65
+ 'flush_mode' => 'lazy',
66
+ 'flush_thread_burst_interval' => 0.01,
67
+ 'chunk_limit_size' => 1024,
68
+ 'total_limit_size' => 4096,
69
+ }
70
+ @i = create_output()
71
+ @i.configure(config_element('ROOT','',{},[config_element('buffer','tag',hash)]))
72
+ @i.start
73
+ @i.after_start
74
+ end
75
+
76
+ test '#emit_events raises error when buffer is full' do
77
+ @i.register(:format){|tag, time, record| "x" * 128 } # 128bytes per record (x4 -> 512bytes)
78
+
79
+ es = Fluent::ArrayEventStream.new([
80
+ [event_time(), {"message" => "test"}],
81
+ [event_time(), {"message" => "test"}],
82
+ [event_time(), {"message" => "test"}],
83
+ [event_time(), {"message" => "test"}],
84
+ ])
85
+
86
+ 8.times do |i|
87
+ @i.emit_events("tag#{i}", es)
88
+ end
89
+
90
+ assert !@i.buffer.storable?
91
+
92
+ assert_raise(Fluent::Plugin::Buffer::BufferOverflowError) do
93
+ @i.emit_events("tag9", es)
94
+ end
95
+ logs = @i.log.out.logs
96
+ assert{ logs.any?{|line| line.include?("failed to write data into buffer by buffer overflow") } }
97
+ end
98
+ end
99
+
100
+ sub_test_case 'buffered output configured with "overflow_action block"' do
101
+ setup do
102
+ hash = {
103
+ 'flush_mode' => 'lazy',
104
+ 'flush_thread_burst_interval' => 0.01,
105
+ 'chunk_limit_size' => 1024,
106
+ 'total_limit_size' => 4096,
107
+ 'overflow_action' => "block",
108
+ }
109
+ @i = create_output()
110
+ @i.configure(config_element('ROOT','',{'log_level' => 'debug'},[config_element('buffer','tag',hash)]))
111
+ @i.start
112
+ @i.after_start
113
+ end
114
+
115
+ test '#emit_events blocks until any queues are flushed' do
116
+ failing = true
117
+ flushed_chunks = []
118
+ @i.register(:format){|tag, time, record| "x" * 128 } # 128bytes per record (x4 -> 512bytes)
119
+ @i.register(:write) do |chunk|
120
+ if failing
121
+ raise "blocking"
122
+ end
123
+ flushed_chunks << chunk
124
+ end
125
+
126
+ es = Fluent::ArrayEventStream.new([
127
+ [event_time(), {"message" => "test"}],
128
+ [event_time(), {"message" => "test"}],
129
+ [event_time(), {"message" => "test"}],
130
+ [event_time(), {"message" => "test"}],
131
+ [event_time(), {"message" => "test"}],
132
+ [event_time(), {"message" => "test"}],
133
+ [event_time(), {"message" => "test"}],
134
+ [event_time(), {"message" => "test"}],
135
+ ])
136
+
137
+ 4.times do |i|
138
+ @i.emit_events("tag#{i}", es)
139
+ end
140
+
141
+ assert !@i.buffer.storable?
142
+
143
+ Thread.new do
144
+ sleep 3
145
+ failing = false
146
+ end
147
+
148
+ assert_nothing_raised do
149
+ @i.emit_events("tag9", es)
150
+ end
151
+
152
+ assert !failing
153
+ assert{ flushed_chunks.size > 0 }
154
+
155
+ logs = @i.log.out.logs
156
+ assert{ logs.any?{|line| line.include?("failed to write data into buffer by buffer overflow") } }
157
+ assert{ logs.any?{|line| line.include?("buffer.write is now blocking") } }
158
+ assert{ logs.any?{|line| line.include?("retrying buffer.write after blocked operation") } }
159
+ end
160
+ end
161
+
162
+ sub_test_case 'buffered output configured with "overflow_action drop_oldest_chunk"' do
163
+ setup do
164
+ hash = {
165
+ 'flush_mode' => 'lazy',
166
+ 'flush_thread_burst_interval' => 0.01,
167
+ 'chunk_limit_size' => 1024,
168
+ 'total_limit_size' => 4096,
169
+ 'overflow_action' => "drop_oldest_chunk",
170
+ }
171
+ @i = create_output()
172
+ @i.configure(config_element('ROOT','',{'log_level' => 'debug'},[config_element('buffer','tag',hash)]))
173
+ @i.start
174
+ @i.after_start
175
+ end
176
+
177
+ test '#emit_events will success by dropping oldest chunk' do
178
+ failing = true
179
+ flushed_chunks = []
180
+ @i.register(:format){|tag, time, record| "x" * 128 } # 128bytes per record (x4 -> 512bytes)
181
+ @i.register(:write) do |chunk|
182
+ if failing
183
+ raise "blocking"
184
+ end
185
+ flushed_chunks << chunk
186
+ end
187
+
188
+ es = Fluent::ArrayEventStream.new([
189
+ [event_time(), {"message" => "test"}],
190
+ [event_time(), {"message" => "test"}],
191
+ [event_time(), {"message" => "test"}],
192
+ [event_time(), {"message" => "test"}],
193
+ [event_time(), {"message" => "test"}],
194
+ [event_time(), {"message" => "test"}],
195
+ [event_time(), {"message" => "test"}],
196
+ [event_time(), {"message" => "test"}],
197
+ ])
198
+
199
+ 4.times do |i|
200
+ @i.emit_events("tag#{i}", es)
201
+ end
202
+
203
+ assert !@i.buffer.storable?
204
+
205
+ assert{ @i.buffer.queue[0].metadata.tag == "tag0" }
206
+ assert{ @i.buffer.queue[1].metadata.tag == "tag1" }
207
+
208
+ assert_nothing_raised do
209
+ @i.emit_events("tag9", es)
210
+ end
211
+
212
+ assert failing
213
+ assert{ flushed_chunks.size == 0 }
214
+
215
+ assert{ @i.buffer.queue[0].metadata.tag == "tag1" }
216
+
217
+ logs = @i.log.out.logs
218
+ assert{ logs.any?{|line| line.include?("failed to write data into buffer by buffer overflow") } }
219
+ assert{ logs.any?{|line| line.include?("dropping oldest chunk to make space after buffer overflow") } }
220
+ end
221
+
222
+ test '#emit_events raises OverflowError if all buffer spaces are used by staged chunks' do
223
+ @i.register(:format){|tag, time, record| "x" * 128 } # 128bytes per record (x4 -> 512bytes)
224
+
225
+ es = Fluent::ArrayEventStream.new([
226
+ [event_time(), {"message" => "test"}],
227
+ [event_time(), {"message" => "test"}],
228
+ [event_time(), {"message" => "test"}],
229
+ [event_time(), {"message" => "test"}],
230
+ ])
231
+
232
+ 8.times do |i|
233
+ @i.emit_events("tag#{i}", es)
234
+ end
235
+
236
+ assert !@i.buffer.storable?
237
+
238
+ assert{ @i.buffer.queue.size == 0 }
239
+ assert{ @i.buffer.stage.size == 8 }
240
+
241
+ assert_raise Fluent::Plugin::Buffer::BufferOverflowError do
242
+ @i.emit_events("tag9", es)
243
+ end
244
+
245
+ logs = @i.log.out.logs
246
+ assert{ logs.any?{|line| line.include?("failed to write data into buffer by buffer overflow") } }
247
+ assert{ logs.any?{|line| line.include?("no queued chunks to be dropped for drop_oldest_chunk") } }
248
+ end
249
+ end
250
+ end
@@ -0,0 +1,839 @@
1
+ require_relative '../helper'
2
+ require 'fluent/plugin/output'
3
+ require 'fluent/plugin/buffer'
4
+ require 'fluent/event'
5
+
6
+ require 'json'
7
+ require 'time'
8
+ require 'timeout'
9
+ require 'timecop'
10
+
11
+ module FluentPluginOutputAsBufferedRetryTest
12
+ class DummyBareOutput < Fluent::Plugin::Output
13
+ def register(name, &block)
14
+ instance_variable_set("@#{name}", block)
15
+ end
16
+ end
17
+ class DummySyncOutput < DummyBareOutput
18
+ def initialize
19
+ super
20
+ @process = nil
21
+ end
22
+ def process(tag, es)
23
+ @process ? @process.call(tag, es) : nil
24
+ end
25
+ end
26
+ class DummyFullFeatureOutput < DummyBareOutput
27
+ def initialize
28
+ super
29
+ @prefer_buffered_processing = nil
30
+ @prefer_delayed_commit = nil
31
+ @process = nil
32
+ @format = nil
33
+ @write = nil
34
+ @try_write = nil
35
+ end
36
+ def prefer_buffered_processing
37
+ @prefer_buffered_processing ? @prefer_buffered_processing.call : false
38
+ end
39
+ def prefer_delayed_commit
40
+ @prefer_delayed_commit ? @prefer_delayed_commit.call : false
41
+ end
42
+ def process(tag, es)
43
+ @process ? @process.call(tag, es) : nil
44
+ end
45
+ def format(tag, time, record)
46
+ @format ? @format.call(tag, time, record) : [tag, time, record].to_json
47
+ end
48
+ def write(chunk)
49
+ @write ? @write.call(chunk) : nil
50
+ end
51
+ def try_write(chunk)
52
+ @try_write ? @try_write.call(chunk) : nil
53
+ end
54
+ end
55
+ class DummyFullFeatureOutput2 < DummyFullFeatureOutput
56
+ def prefer_buffered_processing; true; end
57
+ def prefer_delayed_commit; super; end
58
+ def format(tag, time, record); super; end
59
+ def write(chunk); super; end
60
+ def try_write(chunk); super; end
61
+ end
62
+ end
63
+
64
+ class BufferedOutputRetryTest < Test::Unit::TestCase
65
+ def create_output(type=:full)
66
+ case type
67
+ when :bare then FluentPluginOutputAsBufferedRetryTest::DummyBareOutput.new
68
+ when :sync then FluentPluginOutputAsBufferedRetryTest::DummySyncOutput.new
69
+ when :full then FluentPluginOutputAsBufferedRetryTest::DummyFullFeatureOutput.new
70
+ else
71
+ raise ArgumentError, "unknown type: #{type}"
72
+ end
73
+ end
74
+ def create_metadata(timekey: nil, tag: nil, variables: nil)
75
+ Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
76
+ end
77
+ def waiting(seconds)
78
+ begin
79
+ Timeout.timeout(seconds) do
80
+ yield
81
+ end
82
+ rescue Timeout::Error
83
+ STDERR.print(*@i.log.out.logs)
84
+ raise
85
+ end
86
+ end
87
+ def dummy_event_stream
88
+ Fluent::ArrayEventStream.new([
89
+ [ event_time('2016-04-13 18:33:00'), {"name" => "moris", "age" => 36, "message" => "data1"} ],
90
+ [ event_time('2016-04-13 18:33:13'), {"name" => "moris", "age" => 36, "message" => "data2"} ],
91
+ [ event_time('2016-04-13 18:33:32'), {"name" => "moris", "age" => 36, "message" => "data3"} ],
92
+ ])
93
+ end
94
+ def get_log_time(msg, logs)
95
+ log_time = nil
96
+ log = logs.select{|l| l.include?(msg) }.first
97
+ if log && /^(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2} [-+]\d{4}) \[error\]/ =~ log
98
+ log_time = Time.parse($1)
99
+ end
100
+ log_time
101
+ end
102
+
103
+ setup do
104
+ @i = create_output
105
+ end
106
+
107
+ teardown do
108
+ if @i
109
+ @i.stop unless @i.stopped?
110
+ @i.before_shutdown unless @i.before_shutdown?
111
+ @i.shutdown unless @i.shutdown?
112
+ @i.after_shutdown unless @i.after_shutdown?
113
+ @i.close unless @i.closed?
114
+ @i.terminate unless @i.terminated?
115
+ end
116
+ Timecop.return
117
+ end
118
+
119
+ sub_test_case 'buffered output for retries with exponential backoff' do
120
+ test 'exponential backoff is default strategy for retries' do
121
+ chunk_key = 'tag'
122
+ hash = {
123
+ 'flush_interval' => 1,
124
+ 'flush_thread_burst_interval' => 0.1,
125
+ 'retry_randomize' => false,
126
+ }
127
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
128
+ @i.register(:prefer_buffered_processing){ true }
129
+ @i.start
130
+ @i.after_start
131
+
132
+ assert_equal :exponential_backoff, @i.buffer_config.retry_type
133
+ assert_equal 1, @i.buffer_config.retry_wait
134
+ assert_equal 2.0, @i.buffer_config.retry_exponential_backoff_base
135
+ assert !@i.buffer_config.retry_randomize
136
+
137
+ now = Time.parse('2016-04-13 18:17:00 -0700')
138
+ Timecop.freeze( now )
139
+
140
+ retry_state = @i.retry_state( @i.buffer_config.retry_randomize )
141
+ retry_state.step
142
+ assert_equal 1, (retry_state.next_time - now)
143
+ retry_state.step
144
+ assert_equal (1 * (2 ** 1)), (retry_state.next_time - now)
145
+ retry_state.step
146
+ assert_equal (1 * (2 ** 2)), (retry_state.next_time - now)
147
+ retry_state.step
148
+ assert_equal (1 * (2 ** 3)), (retry_state.next_time - now)
149
+ end
150
+
151
+ test 'does retries correctly when #write fails' do
152
+ chunk_key = 'tag'
153
+ hash = {
154
+ 'flush_interval' => 1,
155
+ 'flush_thread_burst_interval' => 0.1,
156
+ 'retry_randomize' => false,
157
+ 'retry_max_interval' => 60 * 60,
158
+ }
159
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
160
+ @i.register(:prefer_buffered_processing){ true }
161
+ @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
162
+ @i.register(:write){|chunk| raise "yay, your #write must fail" }
163
+ @i.start
164
+ @i.after_start
165
+
166
+ @i.interrupt_flushes
167
+
168
+ now = Time.parse('2016-04-13 18:33:30 -0700')
169
+ Timecop.freeze( now )
170
+
171
+ @i.emit_events("test.tag.1", dummy_event_stream())
172
+
173
+ now = Time.parse('2016-04-13 18:33:32 -0700')
174
+ Timecop.freeze( now )
175
+
176
+ @i.enqueue_thread_wait
177
+
178
+ @i.flush_thread_wakeup
179
+ waiting(4){ Thread.pass until @i.write_count > 0 }
180
+
181
+ assert{ @i.write_count > 0 }
182
+ assert{ @i.num_errors > 0 }
183
+
184
+ now = @i.next_flush_time
185
+ Timecop.freeze( now )
186
+ @i.flush_thread_wakeup
187
+ waiting(4){ Thread.pass until @i.write_count > 1 }
188
+
189
+ assert{ @i.write_count > 1 }
190
+ assert{ @i.num_errors > 1 }
191
+ end
192
+
193
+ test 'max retry interval is limited by retry_max_interval' do
194
+ chunk_key = 'tag'
195
+ hash = {
196
+ 'flush_interval' => 1,
197
+ 'flush_thread_burst_interval' => 0.1,
198
+ 'retry_randomize' => false,
199
+ 'retry_max_interval' => 60,
200
+ }
201
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
202
+ @i.register(:prefer_buffered_processing){ true }
203
+ @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
204
+ @i.register(:write){|chunk| raise "yay, your #write must fail" }
205
+ @i.start
206
+ @i.after_start
207
+
208
+ @i.interrupt_flushes
209
+
210
+ now = Time.parse('2016-04-13 18:33:30 -0700')
211
+ Timecop.freeze( now )
212
+
213
+ @i.emit_events("test.tag.1", dummy_event_stream())
214
+
215
+ now = Time.parse('2016-04-13 18:33:32 -0700')
216
+ Timecop.freeze( now )
217
+
218
+ @i.enqueue_thread_wait
219
+
220
+ @i.flush_thread_wakeup
221
+ waiting(4){ Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
222
+
223
+ assert{ @i.write_count > 0 }
224
+ assert{ @i.num_errors > 0 }
225
+
226
+ prev_write_count = @i.write_count
227
+ prev_num_errors = @i.num_errors
228
+
229
+ 10.times do
230
+ now = @i.next_flush_time
231
+ Timecop.freeze( now )
232
+ @i.flush_thread_wakeup
233
+ waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
234
+
235
+ assert{ @i.write_count > prev_write_count }
236
+ assert{ @i.num_errors > prev_num_errors }
237
+
238
+ prev_write_count = @i.write_count
239
+ prev_num_errors = @i.num_errors
240
+ end
241
+ # exponential backoff interval: 1 * 2 ** 10 == 1024
242
+ # but it should be limited by retry_max_interval=60
243
+ assert_equal 60, (@i.next_flush_time - now)
244
+ end
245
+
246
+ test 'output plugin give retries up by retry_timeout, and clear queue in buffer' do
247
+ written_tags = []
248
+
249
+ chunk_key = 'tag'
250
+ hash = {
251
+ 'flush_interval' => 1,
252
+ 'flush_thread_burst_interval' => 0.1,
253
+ 'retry_randomize' => false,
254
+ 'retry_timeout' => 3600,
255
+ }
256
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
257
+ @i.register(:prefer_buffered_processing){ true }
258
+ @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
259
+ @i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
260
+ @i.start
261
+ @i.after_start
262
+
263
+ @i.interrupt_flushes
264
+
265
+ now = Time.parse('2016-04-13 18:33:30 -0700')
266
+ Timecop.freeze( now )
267
+
268
+ @i.emit_events("test.tag.1", dummy_event_stream())
269
+
270
+ now = Time.parse('2016-04-13 18:33:31 -0700')
271
+ Timecop.freeze( now )
272
+
273
+ @i.emit_events("test.tag.2", dummy_event_stream())
274
+
275
+ assert_equal 0, @i.write_count
276
+ assert_equal 0, @i.num_errors
277
+
278
+ @i.enqueue_thread_wait
279
+ @i.flush_thread_wakeup
280
+ waiting(4){ Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
281
+
282
+ assert{ @i.buffer.queue.size > 0 }
283
+ assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
284
+
285
+ assert{ @i.write_count > 0 }
286
+ assert{ @i.num_errors > 0 }
287
+
288
+ prev_write_count = @i.write_count
289
+ prev_num_errors = @i.num_errors
290
+
291
+ first_failure = @i.retry.start
292
+
293
+ 15.times do |i| # large enough
294
+ now = @i.next_flush_time
295
+ # p({i: i, now: now, diff: (now - Time.now)})
296
+ # * if loop count is 12:
297
+ # {:i=>0, :now=>2016-04-13 18:33:32 -0700, :diff=>1.0}
298
+ # {:i=>1, :now=>2016-04-13 18:33:34 -0700, :diff=>2.0}
299
+ # {:i=>2, :now=>2016-04-13 18:33:38 -0700, :diff=>4.0}
300
+ # {:i=>3, :now=>2016-04-13 18:33:46 -0700, :diff=>8.0}
301
+ # {:i=>4, :now=>2016-04-13 18:34:02 -0700, :diff=>16.0}
302
+ # {:i=>5, :now=>2016-04-13 18:34:34 -0700, :diff=>32.0}
303
+ # {:i=>6, :now=>2016-04-13 18:35:38 -0700, :diff=>64.0}
304
+ # {:i=>7, :now=>2016-04-13 18:37:46 -0700, :diff=>128.0}
305
+ # {:i=>8, :now=>2016-04-13 18:42:02 -0700, :diff=>256.0}
306
+ # {:i=>9, :now=>2016-04-13 18:50:34 -0700, :diff=>512.0}
307
+ # {:i=>10, :now=>2016-04-13 19:07:38 -0700, :diff=>1024.0}
308
+ # {:i=>11, :now=>2016-04-13 19:33:31 -0700, :diff=>1553.0} # clear_queue!
309
+
310
+ Timecop.freeze( now )
311
+ @i.enqueue_thread_wait
312
+ @i.flush_thread_wakeup
313
+ waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
314
+
315
+ assert{ @i.write_count > prev_write_count }
316
+ assert{ @i.num_errors > prev_num_errors }
317
+
318
+ break if @i.buffer.queue.size == 0
319
+
320
+ prev_write_count = @i.write_count
321
+ prev_num_errors = @i.num_errors
322
+
323
+ assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
324
+ end
325
+ assert{ now >= first_failure + 3600 }
326
+
327
+ assert{ @i.buffer.stage.size == 0 }
328
+ assert{ written_tags.all?{|t| t == 'test.tag.1' } }
329
+
330
+ @i.emit_events("test.tag.3", dummy_event_stream())
331
+
332
+ logs = @i.log.out.logs
333
+ assert{ logs.any?{|l| l.include?("[error]: failed to flush the buffer, and hit limit for retries. dropping all chunks in the buffer queue.") } }
334
+ end
335
+
336
+ test 'output plugin give retries up by retry_max_times, and clear queue in buffer' do
337
+ written_tags = []
338
+
339
+ chunk_key = 'tag'
340
+ hash = {
341
+ 'flush_interval' => 1,
342
+ 'flush_thread_burst_interval' => 0.1,
343
+ 'retry_randomize' => false,
344
+ 'retry_max_times' => 10,
345
+ }
346
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
347
+ @i.register(:prefer_buffered_processing){ true }
348
+ @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
349
+ @i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
350
+ @i.start
351
+ @i.after_start
352
+
353
+ @i.interrupt_flushes
354
+
355
+ now = Time.parse('2016-04-13 18:33:30 -0700')
356
+ Timecop.freeze( now )
357
+
358
+ @i.emit_events("test.tag.1", dummy_event_stream())
359
+
360
+ now = Time.parse('2016-04-13 18:33:31 -0700')
361
+ Timecop.freeze( now )
362
+
363
+ @i.emit_events("test.tag.2", dummy_event_stream())
364
+
365
+ assert_equal 0, @i.write_count
366
+ assert_equal 0, @i.num_errors
367
+
368
+ @i.enqueue_thread_wait
369
+ @i.flush_thread_wakeup
370
+ waiting(4){ Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
371
+
372
+ assert{ @i.buffer.queue.size > 0 }
373
+ assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
374
+
375
+ assert{ @i.write_count > 0 }
376
+ assert{ @i.num_errors > 0 }
377
+
378
+ prev_write_count = @i.write_count
379
+ prev_num_errors = @i.num_errors
380
+
381
+ _first_failure = @i.retry.start
382
+
383
+ chunks = @i.buffer.queue.dup
384
+
385
+ 20.times do |i| # large times enough
386
+ now = @i.next_flush_time
387
+
388
+ Timecop.freeze( now )
389
+ @i.enqueue_thread_wait
390
+ @i.flush_thread_wakeup
391
+ waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
392
+
393
+ assert{ @i.write_count > prev_write_count }
394
+ assert{ @i.num_errors > prev_num_errors }
395
+
396
+ break if @i.buffer.queue.size == 0
397
+
398
+ prev_write_count = @i.write_count
399
+ prev_num_errors = @i.num_errors
400
+
401
+ assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
402
+ end
403
+ assert{ @i.buffer.stage.size == 0 }
404
+ assert{ written_tags.all?{|t| t == 'test.tag.1' } }
405
+
406
+ @i.emit_events("test.tag.3", dummy_event_stream())
407
+
408
+ logs = @i.log.out.logs
409
+ assert{ logs.any?{|l| l.include?("[error]: failed to flush the buffer, and hit limit for retries. dropping all chunks in the buffer queue.") && l.include?("retry_times=10") } }
410
+
411
+ assert{ @i.buffer.queue.size == 0 }
412
+ assert{ @i.buffer.stage.size == 1 }
413
+ assert{ chunks.all?{|c| c.empty? } }
414
+ end
415
+ end
416
+
417
+ sub_test_case 'bufferd output for retries with periodical retry' do
418
+ test 'periodical retries should retry to write in failing status per retry_wait' do
419
+ chunk_key = 'tag'
420
+ hash = {
421
+ 'flush_interval' => 1,
422
+ 'flush_thread_burst_interval' => 0.1,
423
+ 'retry_type' => :periodic,
424
+ 'retry_wait' => 3,
425
+ 'retry_randomize' => false,
426
+ }
427
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
428
+ @i.register(:prefer_buffered_processing){ true }
429
+ @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
430
+ @i.register(:write){|chunk| raise "yay, your #write must fail" }
431
+ @i.start
432
+ @i.after_start
433
+
434
+ @i.interrupt_flushes
435
+
436
+ now = Time.parse('2016-04-13 18:33:30 -0700')
437
+ Timecop.freeze( now )
438
+
439
+ @i.emit_events("test.tag.1", dummy_event_stream())
440
+
441
+ now = Time.parse('2016-04-13 18:33:32 -0700')
442
+ Timecop.freeze( now )
443
+
444
+ @i.enqueue_thread_wait
445
+
446
+ @i.flush_thread_wakeup
447
+ waiting(4){ Thread.pass until @i.write_count > 0 }
448
+
449
+ assert{ @i.write_count > 0 }
450
+ assert{ @i.num_errors > 0 }
451
+
452
+ now = @i.next_flush_time
453
+ Timecop.freeze( now )
454
+ @i.flush_thread_wakeup
455
+ waiting(4){ Thread.pass until @i.write_count > 1 }
456
+
457
+ assert{ @i.write_count > 1 }
458
+ assert{ @i.num_errors > 1 }
459
+ end
460
+
461
+ test 'output plugin give retries up by retry_timeout, and clear queue in buffer' do
462
+ written_tags = []
463
+
464
+ chunk_key = 'tag'
465
+ hash = {
466
+ 'flush_interval' => 1,
467
+ 'flush_thread_burst_interval' => 0.1,
468
+ 'retry_type' => :periodic,
469
+ 'retry_wait' => 30,
470
+ 'retry_randomize' => false,
471
+ 'retry_timeout' => 120,
472
+ }
473
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
474
+ @i.register(:prefer_buffered_processing){ true }
475
+ @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
476
+ @i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
477
+ @i.start
478
+ @i.after_start
479
+
480
+ @i.interrupt_flushes
481
+
482
+ now = Time.parse('2016-04-13 18:33:30 -0700')
483
+ Timecop.freeze( now )
484
+
485
+ @i.emit_events("test.tag.1", dummy_event_stream())
486
+
487
+ now = Time.parse('2016-04-13 18:33:31 -0700')
488
+ Timecop.freeze( now )
489
+
490
+ @i.emit_events("test.tag.2", dummy_event_stream())
491
+
492
+ assert_equal 0, @i.write_count
493
+ assert_equal 0, @i.num_errors
494
+
495
+ @i.enqueue_thread_wait
496
+ @i.flush_thread_wakeup
497
+ waiting(4){ Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
498
+
499
+ assert{ @i.buffer.queue.size > 0 }
500
+ assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
501
+
502
+ assert{ @i.write_count > 0 }
503
+ assert{ @i.num_errors > 0 }
504
+
505
+ prev_write_count = @i.write_count
506
+ prev_num_errors = @i.num_errors
507
+
508
+ first_failure = @i.retry.start
509
+
510
+ 3.times do |i|
511
+ now = @i.next_flush_time
512
+
513
+ Timecop.freeze( now )
514
+ @i.enqueue_thread_wait
515
+ @i.flush_thread_wakeup
516
+ waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
517
+
518
+ assert{ @i.write_count > prev_write_count }
519
+ assert{ @i.num_errors > prev_num_errors }
520
+
521
+ prev_write_count = @i.write_count
522
+ prev_num_errors = @i.num_errors
523
+ end
524
+
525
+ assert{ @i.next_flush_time >= first_failure + 120 }
526
+
527
+ assert{ @i.buffer.queue.size == 2 }
528
+ assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
529
+ assert{ @i.buffer.stage.size == 0 }
530
+
531
+ assert{ written_tags.all?{|t| t == 'test.tag.1' } }
532
+
533
+ chunks = @i.buffer.queue.dup
534
+
535
+ @i.emit_events("test.tag.3", dummy_event_stream())
536
+
537
+ now = @i.next_flush_time
538
+ Timecop.freeze( now )
539
+ @i.flush_thread_wakeup
540
+ waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
541
+
542
+ logs = @i.log.out.logs
543
+
544
+ target_time = Time.parse("2016-04-13 18:35:31 -0700")
545
+ target_msg = "[error]: failed to flush the buffer, and hit limit for retries. dropping all chunks in the buffer queue."
546
+ assert{ logs.any?{|l| l.include?(target_msg) } }
547
+
548
+ log_time = get_log_time(target_msg, logs)
549
+ assert_equal target_time.localtime, log_time.localtime
550
+
551
+ assert{ @i.buffer.queue.size == 0 }
552
+ assert{ @i.buffer.stage.size == 1 }
553
+ assert{ chunks.all?{|c| c.empty? } }
554
+ end
555
+
556
+ test 'retry_max_times can limit maximum times for retries' do
557
+ written_tags = []
558
+
559
+ chunk_key = 'tag'
560
+ hash = {
561
+ 'flush_interval' => 1,
562
+ 'flush_thread_burst_interval' => 0.1,
563
+ 'retry_type' => :periodic,
564
+ 'retry_wait' => 3,
565
+ 'retry_randomize' => false,
566
+ 'retry_max_times' => 10,
567
+ }
568
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
569
+ @i.register(:prefer_buffered_processing){ true }
570
+ @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
571
+ @i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
572
+ @i.start
573
+ @i.after_start
574
+
575
+ @i.interrupt_flushes
576
+
577
+ now = Time.parse('2016-04-13 18:33:30 -0700')
578
+ Timecop.freeze( now )
579
+
580
+ @i.emit_events("test.tag.1", dummy_event_stream())
581
+
582
+ now = Time.parse('2016-04-13 18:33:31 -0700')
583
+ Timecop.freeze( now )
584
+
585
+ @i.emit_events("test.tag.2", dummy_event_stream())
586
+
587
+ assert_equal 0, @i.write_count
588
+ assert_equal 0, @i.num_errors
589
+
590
+ @i.enqueue_thread_wait
591
+ @i.flush_thread_wakeup
592
+ waiting(4){ Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
593
+
594
+ assert{ @i.buffer.queue.size > 0 }
595
+ assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
596
+
597
+ assert{ @i.write_count > 0 }
598
+ assert{ @i.num_errors > 0 }
599
+
600
+ prev_write_count = @i.write_count
601
+ prev_num_errors = @i.num_errors
602
+
603
+ _first_failure = @i.retry.start
604
+
605
+ chunks = @i.buffer.queue.dup
606
+
607
+ 20.times do |i|
608
+ now = @i.next_flush_time
609
+
610
+ Timecop.freeze( now )
611
+ @i.enqueue_thread_wait
612
+ @i.flush_thread_wakeup
613
+ waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
614
+
615
+ assert{ @i.write_count > prev_write_count }
616
+ assert{ @i.num_errors > prev_num_errors }
617
+
618
+ break if @i.buffer.queue.size == 0
619
+
620
+ prev_write_count = @i.write_count
621
+ prev_num_errors = @i.num_errors
622
+
623
+ assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
624
+ end
625
+ assert{ @i.buffer.stage.size == 0 }
626
+ assert{ written_tags.all?{|t| t == 'test.tag.1' } }
627
+
628
+
629
+ @i.emit_events("test.tag.3", dummy_event_stream())
630
+
631
+ logs = @i.log.out.logs
632
+ assert{ logs.any?{|l| l.include?("[error]: failed to flush the buffer, and hit limit for retries. dropping all chunks in the buffer queue.") && l.include?("retry_times=10") } }
633
+
634
+ assert{ @i.buffer.queue.size == 0 }
635
+ assert{ @i.buffer.stage.size == 1 }
636
+ assert{ chunks.all?{|c| c.empty? } }
637
+ end
638
+ end
639
+
640
+ sub_test_case 'buffered output configured as retry_forever' do
641
+ test 'configuration error will be raised if secondary section is configured' do
642
+ chunk_key = 'tag'
643
+ hash = {
644
+ 'retry_forever' => true,
645
+ 'retry_randomize' => false,
646
+ }
647
+ i = create_output()
648
+ assert_raise Fluent::ConfigError do
649
+ i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash),config_element('secondary','')]))
650
+ end
651
+ end
652
+
653
+ test 'retry_timeout and retry_max_times will be ignored if retry_forever is true for exponential backoff' do
654
+ written_tags = []
655
+
656
+ chunk_key = 'tag'
657
+ hash = {
658
+ 'flush_interval' => 1,
659
+ 'flush_thread_burst_interval' => 0.1,
660
+ 'retry_type' => :exponential_backoff,
661
+ 'retry_forever' => true,
662
+ 'retry_randomize' => false,
663
+ 'retry_timeout' => 3600,
664
+ 'retry_max_times' => 10,
665
+ }
666
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
667
+ @i.register(:prefer_buffered_processing){ true }
668
+ @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
669
+ @i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
670
+ @i.start
671
+ @i.after_start
672
+
673
+ @i.interrupt_flushes
674
+
675
+ now = Time.parse('2016-04-13 18:33:30 -0700')
676
+ Timecop.freeze( now )
677
+
678
+ @i.emit_events("test.tag.1", dummy_event_stream())
679
+
680
+ now = Time.parse('2016-04-13 18:33:31 -0700')
681
+ Timecop.freeze( now )
682
+
683
+ @i.emit_events("test.tag.2", dummy_event_stream())
684
+
685
+ assert_equal 0, @i.write_count
686
+ assert_equal 0, @i.num_errors
687
+
688
+ @i.enqueue_thread_wait
689
+ @i.flush_thread_wakeup
690
+ waiting(4){ sleep 0.1 until @i.write_count > 0 && @i.num_errors > 0 }
691
+
692
+ assert{ @i.buffer.queue.size > 0 }
693
+ assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
694
+
695
+ assert{ @i.write_count > 0 }
696
+ assert{ @i.num_errors > 0 }
697
+
698
+ prev_write_count = @i.write_count
699
+ prev_num_errors = @i.num_errors
700
+
701
+ first_failure = @i.retry.start
702
+
703
+ 15.times do |i|
704
+ now = @i.next_flush_time
705
+
706
+ Timecop.freeze( now + 1 )
707
+ @i.enqueue_thread_wait
708
+ @i.flush_thread_wakeup
709
+ waiting(4){ sleep 0.1 until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
710
+
711
+ assert{ @i.write_count > prev_write_count }
712
+ assert{ @i.num_errors > prev_num_errors }
713
+
714
+ prev_write_count = @i.write_count
715
+ prev_num_errors = @i.num_errors
716
+ end
717
+
718
+ assert{ @i.buffer.queue.size == 2 }
719
+ assert{ @i.retry.steps > 10 }
720
+ assert{ now > first_failure + 3600 }
721
+ end
722
+
723
+ test 'retry_timeout and retry_max_times will be ignored if retry_forever is true for periodical retries' do
724
+ written_tags = []
725
+
726
+ chunk_key = 'tag'
727
+ hash = {
728
+ 'flush_interval' => 1,
729
+ 'flush_thread_burst_interval' => 0.1,
730
+ 'retry_type' => :periodic,
731
+ 'retry_forever' => true,
732
+ 'retry_randomize' => false,
733
+ 'retry_wait' => 30,
734
+ 'retry_timeout' => 360,
735
+ 'retry_max_times' => 10,
736
+ }
737
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
738
+ @i.register(:prefer_buffered_processing){ true }
739
+ @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
740
+ @i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
741
+ @i.start
742
+ @i.after_start
743
+
744
+ @i.interrupt_flushes
745
+
746
+ now = Time.parse('2016-04-13 18:33:30 -0700')
747
+ Timecop.freeze( now )
748
+
749
+ @i.emit_events("test.tag.1", dummy_event_stream())
750
+
751
+ now = Time.parse('2016-04-13 18:33:31 -0700')
752
+ Timecop.freeze( now )
753
+
754
+ @i.emit_events("test.tag.2", dummy_event_stream())
755
+
756
+ assert_equal 0, @i.write_count
757
+ assert_equal 0, @i.num_errors
758
+
759
+ @i.enqueue_thread_wait
760
+ @i.flush_thread_wakeup
761
+ waiting(4){ sleep 0.1 until @i.write_count > 0 && @i.num_errors > 0 }
762
+
763
+ assert{ @i.buffer.queue.size > 0 }
764
+ assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
765
+
766
+ assert{ @i.write_count > 0 }
767
+ assert{ @i.num_errors > 0 }
768
+
769
+ prev_write_count = @i.write_count
770
+ prev_num_errors = @i.num_errors
771
+
772
+ first_failure = @i.retry.start
773
+
774
+ 15.times do |i|
775
+ now = @i.next_flush_time
776
+
777
+ Timecop.freeze( now + 1 )
778
+ @i.enqueue_thread_wait
779
+ @i.flush_thread_wakeup
780
+ waiting(4){ sleep 0.1 until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
781
+
782
+ assert{ @i.write_count > prev_write_count }
783
+ assert{ @i.num_errors > prev_num_errors }
784
+
785
+ prev_write_count = @i.write_count
786
+ prev_num_errors = @i.num_errors
787
+ end
788
+
789
+ assert{ @i.buffer.queue.size == 2 }
790
+ assert{ @i.retry.steps > 10 }
791
+ assert{ now > first_failure + 360 }
792
+ end
793
+ end
794
+
795
+ sub_test_case 'buffered output with delayed commit' do
796
+ test 'does retries correctly when #try_write fails' do
797
+ chunk_key = 'tag'
798
+ hash = {
799
+ 'flush_interval' => 1,
800
+ 'flush_thread_burst_interval' => 0.1,
801
+ 'retry_randomize' => false,
802
+ 'retry_max_interval' => 60 * 60,
803
+ }
804
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
805
+ @i.register(:prefer_buffered_processing){ true }
806
+ @i.register(:prefer_delayed_commit){ true }
807
+ @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
808
+ @i.register(:try_write){|chunk| raise "yay, your #write must fail" }
809
+ @i.start
810
+ @i.after_start
811
+
812
+ @i.interrupt_flushes
813
+
814
+ now = Time.parse('2016-04-13 18:33:30 -0700')
815
+ Timecop.freeze( now )
816
+
817
+ @i.emit_events("test.tag.1", dummy_event_stream())
818
+
819
+ now = Time.parse('2016-04-13 18:33:32 -0700')
820
+ Timecop.freeze( now )
821
+
822
+ @i.enqueue_thread_wait
823
+
824
+ @i.flush_thread_wakeup
825
+ waiting(4){ Thread.pass until @i.write_count > 0 }
826
+
827
+ assert{ @i.write_count > 0 }
828
+ assert{ @i.num_errors > 0 }
829
+
830
+ now = @i.next_flush_time
831
+ Timecop.freeze( now )
832
+ @i.flush_thread_wakeup
833
+ waiting(4){ Thread.pass until @i.write_count > 1 }
834
+
835
+ assert{ @i.write_count > 1 }
836
+ assert{ @i.num_errors > 1 }
837
+ end
838
+ end
839
+ end