fluentd-hubspot 0.14.14.1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (396) hide show
  1. data/.github/ISSUE_TEMPLATE.md +6 -0
  2. data/.gitignore +28 -0
  3. data/.travis.yml +51 -0
  4. data/AUTHORS +2 -0
  5. data/CONTRIBUTING.md +42 -0
  6. data/COPYING +14 -0
  7. data/ChangeLog +593 -0
  8. data/Gemfile +9 -0
  9. data/README.md +76 -0
  10. data/Rakefile +74 -0
  11. data/Vagrantfile +17 -0
  12. data/appveyor.yml +43 -0
  13. data/bin/fluent-binlog-reader +7 -0
  14. data/bin/fluent-debug +5 -0
  15. data/bin/fluent-plugin-config-format +5 -0
  16. data/bin/fluent-plugin-generate +5 -0
  17. data/code-of-conduct.md +3 -0
  18. data/example/copy_roundrobin.conf +39 -0
  19. data/example/filter_stdout.conf +22 -0
  20. data/example/in_dummy_blocks.conf +17 -0
  21. data/example/in_dummy_with_compression.conf +23 -0
  22. data/example/in_forward.conf +14 -0
  23. data/example/in_forward_client.conf +37 -0
  24. data/example/in_forward_shared_key.conf +15 -0
  25. data/example/in_forward_tls.conf +14 -0
  26. data/example/in_forward_users.conf +24 -0
  27. data/example/in_forward_workers.conf +21 -0
  28. data/example/in_http.conf +14 -0
  29. data/example/in_out_forward.conf +17 -0
  30. data/example/in_syslog.conf +15 -0
  31. data/example/in_tail.conf +14 -0
  32. data/example/in_tcp.conf +13 -0
  33. data/example/in_udp.conf +13 -0
  34. data/example/logevents.conf +25 -0
  35. data/example/multi_filters.conf +61 -0
  36. data/example/out_copy.conf +20 -0
  37. data/example/out_exec_filter.conf +42 -0
  38. data/example/out_file.conf +13 -0
  39. data/example/out_forward.conf +35 -0
  40. data/example/out_forward_buf_file.conf +23 -0
  41. data/example/out_forward_client.conf +109 -0
  42. data/example/out_forward_heartbeat_none.conf +16 -0
  43. data/example/out_forward_shared_key.conf +36 -0
  44. data/example/out_forward_tls.conf +18 -0
  45. data/example/out_forward_users.conf +65 -0
  46. data/example/out_null.conf +36 -0
  47. data/example/secondary_file.conf +41 -0
  48. data/example/suppress_config_dump.conf +7 -0
  49. data/example/v0_12_filter.conf +78 -0
  50. data/example/v1_literal_example.conf +36 -0
  51. data/fluent.conf +139 -0
  52. data/fluentd.gemspec +51 -0
  53. data/lib/fluent/agent.rb +163 -0
  54. data/lib/fluent/clock.rb +62 -0
  55. data/lib/fluent/command/binlog_reader.rb +234 -0
  56. data/lib/fluent/command/bundler_injection.rb +45 -0
  57. data/lib/fluent/command/cat.rb +330 -0
  58. data/lib/fluent/command/debug.rb +102 -0
  59. data/lib/fluent/command/fluentd.rb +301 -0
  60. data/lib/fluent/command/plugin_config_formatter.rb +258 -0
  61. data/lib/fluent/command/plugin_generator.rb +301 -0
  62. data/lib/fluent/compat/call_super_mixin.rb +67 -0
  63. data/lib/fluent/compat/detach_process_mixin.rb +25 -0
  64. data/lib/fluent/compat/exec_util.rb +129 -0
  65. data/lib/fluent/compat/file_util.rb +54 -0
  66. data/lib/fluent/compat/filter.rb +68 -0
  67. data/lib/fluent/compat/formatter.rb +111 -0
  68. data/lib/fluent/compat/formatter_utils.rb +85 -0
  69. data/lib/fluent/compat/handle_tag_and_time_mixin.rb +62 -0
  70. data/lib/fluent/compat/handle_tag_name_mixin.rb +53 -0
  71. data/lib/fluent/compat/input.rb +59 -0
  72. data/lib/fluent/compat/output.rb +728 -0
  73. data/lib/fluent/compat/output_chain.rb +60 -0
  74. data/lib/fluent/compat/parser.rb +310 -0
  75. data/lib/fluent/compat/parser_utils.rb +40 -0
  76. data/lib/fluent/compat/propagate_default.rb +62 -0
  77. data/lib/fluent/compat/record_filter_mixin.rb +34 -0
  78. data/lib/fluent/compat/set_tag_key_mixin.rb +50 -0
  79. data/lib/fluent/compat/set_time_key_mixin.rb +69 -0
  80. data/lib/fluent/compat/socket_util.rb +165 -0
  81. data/lib/fluent/compat/string_util.rb +34 -0
  82. data/lib/fluent/compat/structured_format_mixin.rb +26 -0
  83. data/lib/fluent/compat/type_converter.rb +90 -0
  84. data/lib/fluent/config.rb +56 -0
  85. data/lib/fluent/config/basic_parser.rb +123 -0
  86. data/lib/fluent/config/configure_proxy.rb +418 -0
  87. data/lib/fluent/config/dsl.rb +149 -0
  88. data/lib/fluent/config/element.rb +218 -0
  89. data/lib/fluent/config/error.rb +26 -0
  90. data/lib/fluent/config/literal_parser.rb +251 -0
  91. data/lib/fluent/config/parser.rb +107 -0
  92. data/lib/fluent/config/section.rb +223 -0
  93. data/lib/fluent/config/types.rb +136 -0
  94. data/lib/fluent/config/v1_parser.rb +190 -0
  95. data/lib/fluent/configurable.rb +200 -0
  96. data/lib/fluent/daemon.rb +15 -0
  97. data/lib/fluent/engine.rb +266 -0
  98. data/lib/fluent/env.rb +28 -0
  99. data/lib/fluent/error.rb +30 -0
  100. data/lib/fluent/event.rb +334 -0
  101. data/lib/fluent/event_router.rb +269 -0
  102. data/lib/fluent/filter.rb +21 -0
  103. data/lib/fluent/formatter.rb +23 -0
  104. data/lib/fluent/input.rb +21 -0
  105. data/lib/fluent/label.rb +46 -0
  106. data/lib/fluent/load.rb +35 -0
  107. data/lib/fluent/log.rb +546 -0
  108. data/lib/fluent/match.rb +178 -0
  109. data/lib/fluent/mixin.rb +31 -0
  110. data/lib/fluent/msgpack_factory.rb +62 -0
  111. data/lib/fluent/output.rb +29 -0
  112. data/lib/fluent/output_chain.rb +23 -0
  113. data/lib/fluent/parser.rb +23 -0
  114. data/lib/fluent/plugin.rb +183 -0
  115. data/lib/fluent/plugin/bare_output.rb +63 -0
  116. data/lib/fluent/plugin/base.rb +165 -0
  117. data/lib/fluent/plugin/buf_file.rb +184 -0
  118. data/lib/fluent/plugin/buf_memory.rb +34 -0
  119. data/lib/fluent/plugin/buffer.rb +617 -0
  120. data/lib/fluent/plugin/buffer/chunk.rb +221 -0
  121. data/lib/fluent/plugin/buffer/file_chunk.rb +364 -0
  122. data/lib/fluent/plugin/buffer/memory_chunk.rb +90 -0
  123. data/lib/fluent/plugin/compressable.rb +92 -0
  124. data/lib/fluent/plugin/exec_util.rb +22 -0
  125. data/lib/fluent/plugin/file_util.rb +22 -0
  126. data/lib/fluent/plugin/file_wrapper.rb +120 -0
  127. data/lib/fluent/plugin/filter.rb +93 -0
  128. data/lib/fluent/plugin/filter_grep.rb +75 -0
  129. data/lib/fluent/plugin/filter_parser.rb +119 -0
  130. data/lib/fluent/plugin/filter_record_transformer.rb +322 -0
  131. data/lib/fluent/plugin/filter_stdout.rb +53 -0
  132. data/lib/fluent/plugin/formatter.rb +50 -0
  133. data/lib/fluent/plugin/formatter_csv.rb +52 -0
  134. data/lib/fluent/plugin/formatter_hash.rb +33 -0
  135. data/lib/fluent/plugin/formatter_json.rb +55 -0
  136. data/lib/fluent/plugin/formatter_ltsv.rb +42 -0
  137. data/lib/fluent/plugin/formatter_msgpack.rb +33 -0
  138. data/lib/fluent/plugin/formatter_out_file.rb +51 -0
  139. data/lib/fluent/plugin/formatter_single_value.rb +34 -0
  140. data/lib/fluent/plugin/formatter_stdout.rb +75 -0
  141. data/lib/fluent/plugin/formatter_tsv.rb +34 -0
  142. data/lib/fluent/plugin/in_debug_agent.rb +64 -0
  143. data/lib/fluent/plugin/in_dummy.rb +139 -0
  144. data/lib/fluent/plugin/in_exec.rb +108 -0
  145. data/lib/fluent/plugin/in_forward.rb +455 -0
  146. data/lib/fluent/plugin/in_gc_stat.rb +56 -0
  147. data/lib/fluent/plugin/in_http.rb +433 -0
  148. data/lib/fluent/plugin/in_monitor_agent.rb +448 -0
  149. data/lib/fluent/plugin/in_object_space.rb +93 -0
  150. data/lib/fluent/plugin/in_syslog.rb +209 -0
  151. data/lib/fluent/plugin/in_tail.rb +905 -0
  152. data/lib/fluent/plugin/in_tcp.rb +85 -0
  153. data/lib/fluent/plugin/in_udp.rb +81 -0
  154. data/lib/fluent/plugin/in_unix.rb +201 -0
  155. data/lib/fluent/plugin/input.rb +37 -0
  156. data/lib/fluent/plugin/multi_output.rb +157 -0
  157. data/lib/fluent/plugin/out_copy.rb +46 -0
  158. data/lib/fluent/plugin/out_exec.rb +105 -0
  159. data/lib/fluent/plugin/out_exec_filter.rb +317 -0
  160. data/lib/fluent/plugin/out_file.rb +302 -0
  161. data/lib/fluent/plugin/out_forward.rb +912 -0
  162. data/lib/fluent/plugin/out_null.rb +74 -0
  163. data/lib/fluent/plugin/out_relabel.rb +32 -0
  164. data/lib/fluent/plugin/out_roundrobin.rb +84 -0
  165. data/lib/fluent/plugin/out_secondary_file.rb +133 -0
  166. data/lib/fluent/plugin/out_stdout.rb +75 -0
  167. data/lib/fluent/plugin/out_stream.rb +130 -0
  168. data/lib/fluent/plugin/output.rb +1291 -0
  169. data/lib/fluent/plugin/owned_by_mixin.rb +42 -0
  170. data/lib/fluent/plugin/parser.rb +191 -0
  171. data/lib/fluent/plugin/parser_apache.rb +28 -0
  172. data/lib/fluent/plugin/parser_apache2.rb +84 -0
  173. data/lib/fluent/plugin/parser_apache_error.rb +26 -0
  174. data/lib/fluent/plugin/parser_csv.rb +39 -0
  175. data/lib/fluent/plugin/parser_json.rb +81 -0
  176. data/lib/fluent/plugin/parser_ltsv.rb +42 -0
  177. data/lib/fluent/plugin/parser_msgpack.rb +50 -0
  178. data/lib/fluent/plugin/parser_multiline.rb +105 -0
  179. data/lib/fluent/plugin/parser_nginx.rb +28 -0
  180. data/lib/fluent/plugin/parser_none.rb +36 -0
  181. data/lib/fluent/plugin/parser_regexp.rb +63 -0
  182. data/lib/fluent/plugin/parser_syslog.rb +121 -0
  183. data/lib/fluent/plugin/parser_tsv.rb +42 -0
  184. data/lib/fluent/plugin/socket_util.rb +22 -0
  185. data/lib/fluent/plugin/storage.rb +84 -0
  186. data/lib/fluent/plugin/storage_local.rb +159 -0
  187. data/lib/fluent/plugin/string_util.rb +22 -0
  188. data/lib/fluent/plugin_helper.rb +70 -0
  189. data/lib/fluent/plugin_helper/cert_option.rb +159 -0
  190. data/lib/fluent/plugin_helper/child_process.rb +364 -0
  191. data/lib/fluent/plugin_helper/compat_parameters.rb +331 -0
  192. data/lib/fluent/plugin_helper/event_emitter.rb +93 -0
  193. data/lib/fluent/plugin_helper/event_loop.rb +161 -0
  194. data/lib/fluent/plugin_helper/extract.rb +104 -0
  195. data/lib/fluent/plugin_helper/formatter.rb +147 -0
  196. data/lib/fluent/plugin_helper/inject.rb +151 -0
  197. data/lib/fluent/plugin_helper/parser.rb +147 -0
  198. data/lib/fluent/plugin_helper/retry_state.rb +201 -0
  199. data/lib/fluent/plugin_helper/server.rb +738 -0
  200. data/lib/fluent/plugin_helper/socket.rb +241 -0
  201. data/lib/fluent/plugin_helper/socket_option.rb +69 -0
  202. data/lib/fluent/plugin_helper/storage.rb +349 -0
  203. data/lib/fluent/plugin_helper/thread.rb +179 -0
  204. data/lib/fluent/plugin_helper/timer.rb +91 -0
  205. data/lib/fluent/plugin_id.rb +80 -0
  206. data/lib/fluent/process.rb +22 -0
  207. data/lib/fluent/registry.rb +116 -0
  208. data/lib/fluent/root_agent.rb +323 -0
  209. data/lib/fluent/rpc.rb +94 -0
  210. data/lib/fluent/supervisor.rb +741 -0
  211. data/lib/fluent/system_config.rb +159 -0
  212. data/lib/fluent/test.rb +58 -0
  213. data/lib/fluent/test/base.rb +78 -0
  214. data/lib/fluent/test/driver/base.rb +224 -0
  215. data/lib/fluent/test/driver/base_owned.rb +70 -0
  216. data/lib/fluent/test/driver/base_owner.rb +135 -0
  217. data/lib/fluent/test/driver/event_feeder.rb +98 -0
  218. data/lib/fluent/test/driver/filter.rb +57 -0
  219. data/lib/fluent/test/driver/formatter.rb +30 -0
  220. data/lib/fluent/test/driver/input.rb +31 -0
  221. data/lib/fluent/test/driver/multi_output.rb +53 -0
  222. data/lib/fluent/test/driver/output.rb +102 -0
  223. data/lib/fluent/test/driver/parser.rb +30 -0
  224. data/lib/fluent/test/driver/test_event_router.rb +45 -0
  225. data/lib/fluent/test/filter_test.rb +77 -0
  226. data/lib/fluent/test/formatter_test.rb +65 -0
  227. data/lib/fluent/test/helpers.rb +134 -0
  228. data/lib/fluent/test/input_test.rb +174 -0
  229. data/lib/fluent/test/log.rb +79 -0
  230. data/lib/fluent/test/output_test.rb +156 -0
  231. data/lib/fluent/test/parser_test.rb +70 -0
  232. data/lib/fluent/test/startup_shutdown.rb +46 -0
  233. data/lib/fluent/time.rb +412 -0
  234. data/lib/fluent/timezone.rb +133 -0
  235. data/lib/fluent/unique_id.rb +39 -0
  236. data/lib/fluent/version.rb +21 -0
  237. data/lib/fluent/winsvc.rb +71 -0
  238. data/templates/new_gem/Gemfile +3 -0
  239. data/templates/new_gem/README.md.erb +43 -0
  240. data/templates/new_gem/Rakefile +13 -0
  241. data/templates/new_gem/fluent-plugin.gemspec.erb +27 -0
  242. data/templates/new_gem/lib/fluent/plugin/filter.rb.erb +14 -0
  243. data/templates/new_gem/lib/fluent/plugin/formatter.rb.erb +14 -0
  244. data/templates/new_gem/lib/fluent/plugin/input.rb.erb +11 -0
  245. data/templates/new_gem/lib/fluent/plugin/output.rb.erb +11 -0
  246. data/templates/new_gem/lib/fluent/plugin/parser.rb.erb +15 -0
  247. data/templates/new_gem/test/helper.rb.erb +8 -0
  248. data/templates/new_gem/test/plugin/test_filter.rb.erb +18 -0
  249. data/templates/new_gem/test/plugin/test_formatter.rb.erb +18 -0
  250. data/templates/new_gem/test/plugin/test_input.rb.erb +18 -0
  251. data/templates/new_gem/test/plugin/test_output.rb.erb +18 -0
  252. data/templates/new_gem/test/plugin/test_parser.rb.erb +18 -0
  253. data/templates/plugin_config_formatter/param.md-compact.erb +25 -0
  254. data/templates/plugin_config_formatter/param.md.erb +34 -0
  255. data/templates/plugin_config_formatter/section.md.erb +12 -0
  256. data/test/command/test_binlog_reader.rb +346 -0
  257. data/test/command/test_fluentd.rb +618 -0
  258. data/test/command/test_plugin_config_formatter.rb +275 -0
  259. data/test/command/test_plugin_generator.rb +66 -0
  260. data/test/compat/test_calls_super.rb +166 -0
  261. data/test/compat/test_parser.rb +92 -0
  262. data/test/config/assertions.rb +42 -0
  263. data/test/config/test_config_parser.rb +513 -0
  264. data/test/config/test_configurable.rb +1587 -0
  265. data/test/config/test_configure_proxy.rb +566 -0
  266. data/test/config/test_dsl.rb +415 -0
  267. data/test/config/test_element.rb +403 -0
  268. data/test/config/test_literal_parser.rb +297 -0
  269. data/test/config/test_section.rb +184 -0
  270. data/test/config/test_system_config.rb +168 -0
  271. data/test/config/test_types.rb +191 -0
  272. data/test/helper.rb +153 -0
  273. data/test/plugin/data/2010/01/20100102-030405.log +0 -0
  274. data/test/plugin/data/2010/01/20100102-030406.log +0 -0
  275. data/test/plugin/data/2010/01/20100102.log +0 -0
  276. data/test/plugin/data/log/bar +0 -0
  277. data/test/plugin/data/log/foo/bar.log +0 -0
  278. data/test/plugin/data/log/foo/bar2 +0 -0
  279. data/test/plugin/data/log/test.log +0 -0
  280. data/test/plugin/test_bare_output.rb +118 -0
  281. data/test/plugin/test_base.rb +115 -0
  282. data/test/plugin/test_buf_file.rb +843 -0
  283. data/test/plugin/test_buf_memory.rb +42 -0
  284. data/test/plugin/test_buffer.rb +1220 -0
  285. data/test/plugin/test_buffer_chunk.rb +198 -0
  286. data/test/plugin/test_buffer_file_chunk.rb +844 -0
  287. data/test/plugin/test_buffer_memory_chunk.rb +338 -0
  288. data/test/plugin/test_compressable.rb +84 -0
  289. data/test/plugin/test_file_util.rb +96 -0
  290. data/test/plugin/test_filter.rb +357 -0
  291. data/test/plugin/test_filter_grep.rb +119 -0
  292. data/test/plugin/test_filter_parser.rb +700 -0
  293. data/test/plugin/test_filter_record_transformer.rb +556 -0
  294. data/test/plugin/test_filter_stdout.rb +202 -0
  295. data/test/plugin/test_formatter_csv.rb +111 -0
  296. data/test/plugin/test_formatter_hash.rb +35 -0
  297. data/test/plugin/test_formatter_json.rb +51 -0
  298. data/test/plugin/test_formatter_ltsv.rb +59 -0
  299. data/test/plugin/test_formatter_msgpack.rb +28 -0
  300. data/test/plugin/test_formatter_out_file.rb +95 -0
  301. data/test/plugin/test_formatter_single_value.rb +38 -0
  302. data/test/plugin/test_in_debug_agent.rb +28 -0
  303. data/test/plugin/test_in_dummy.rb +192 -0
  304. data/test/plugin/test_in_exec.rb +245 -0
  305. data/test/plugin/test_in_forward.rb +1120 -0
  306. data/test/plugin/test_in_gc_stat.rb +39 -0
  307. data/test/plugin/test_in_http.rb +588 -0
  308. data/test/plugin/test_in_monitor_agent.rb +516 -0
  309. data/test/plugin/test_in_object_space.rb +64 -0
  310. data/test/plugin/test_in_syslog.rb +271 -0
  311. data/test/plugin/test_in_tail.rb +1216 -0
  312. data/test/plugin/test_in_tcp.rb +118 -0
  313. data/test/plugin/test_in_udp.rb +152 -0
  314. data/test/plugin/test_in_unix.rb +126 -0
  315. data/test/plugin/test_input.rb +126 -0
  316. data/test/plugin/test_multi_output.rb +180 -0
  317. data/test/plugin/test_out_copy.rb +160 -0
  318. data/test/plugin/test_out_exec.rb +310 -0
  319. data/test/plugin/test_out_exec_filter.rb +613 -0
  320. data/test/plugin/test_out_file.rb +873 -0
  321. data/test/plugin/test_out_forward.rb +685 -0
  322. data/test/plugin/test_out_null.rb +105 -0
  323. data/test/plugin/test_out_relabel.rb +28 -0
  324. data/test/plugin/test_out_roundrobin.rb +146 -0
  325. data/test/plugin/test_out_secondary_file.rb +442 -0
  326. data/test/plugin/test_out_stdout.rb +170 -0
  327. data/test/plugin/test_out_stream.rb +93 -0
  328. data/test/plugin/test_output.rb +870 -0
  329. data/test/plugin/test_output_as_buffered.rb +1932 -0
  330. data/test/plugin/test_output_as_buffered_compress.rb +165 -0
  331. data/test/plugin/test_output_as_buffered_overflow.rb +250 -0
  332. data/test/plugin/test_output_as_buffered_retries.rb +839 -0
  333. data/test/plugin/test_output_as_buffered_secondary.rb +877 -0
  334. data/test/plugin/test_output_as_standard.rb +374 -0
  335. data/test/plugin/test_owned_by.rb +35 -0
  336. data/test/plugin/test_parser.rb +359 -0
  337. data/test/plugin/test_parser_apache.rb +42 -0
  338. data/test/plugin/test_parser_apache2.rb +46 -0
  339. data/test/plugin/test_parser_apache_error.rb +45 -0
  340. data/test/plugin/test_parser_csv.rb +103 -0
  341. data/test/plugin/test_parser_json.rb +114 -0
  342. data/test/plugin/test_parser_labeled_tsv.rb +128 -0
  343. data/test/plugin/test_parser_multiline.rb +100 -0
  344. data/test/plugin/test_parser_nginx.rb +48 -0
  345. data/test/plugin/test_parser_none.rb +52 -0
  346. data/test/plugin/test_parser_regexp.rb +281 -0
  347. data/test/plugin/test_parser_syslog.rb +242 -0
  348. data/test/plugin/test_parser_tsv.rb +122 -0
  349. data/test/plugin/test_storage.rb +167 -0
  350. data/test/plugin/test_storage_local.rb +335 -0
  351. data/test/plugin/test_string_util.rb +26 -0
  352. data/test/plugin_helper/test_child_process.rb +794 -0
  353. data/test/plugin_helper/test_compat_parameters.rb +331 -0
  354. data/test/plugin_helper/test_event_emitter.rb +51 -0
  355. data/test/plugin_helper/test_event_loop.rb +52 -0
  356. data/test/plugin_helper/test_extract.rb +194 -0
  357. data/test/plugin_helper/test_formatter.rb +255 -0
  358. data/test/plugin_helper/test_inject.rb +519 -0
  359. data/test/plugin_helper/test_parser.rb +264 -0
  360. data/test/plugin_helper/test_retry_state.rb +422 -0
  361. data/test/plugin_helper/test_server.rb +1677 -0
  362. data/test/plugin_helper/test_storage.rb +542 -0
  363. data/test/plugin_helper/test_thread.rb +164 -0
  364. data/test/plugin_helper/test_timer.rb +132 -0
  365. data/test/scripts/exec_script.rb +32 -0
  366. data/test/scripts/fluent/plugin/formatter1/formatter_test1.rb +7 -0
  367. data/test/scripts/fluent/plugin/formatter2/formatter_test2.rb +7 -0
  368. data/test/scripts/fluent/plugin/formatter_known.rb +8 -0
  369. data/test/scripts/fluent/plugin/out_test.rb +81 -0
  370. data/test/scripts/fluent/plugin/out_test2.rb +80 -0
  371. data/test/scripts/fluent/plugin/parser_known.rb +4 -0
  372. data/test/test_clock.rb +164 -0
  373. data/test/test_config.rb +179 -0
  374. data/test/test_configdsl.rb +148 -0
  375. data/test/test_event.rb +515 -0
  376. data/test/test_event_router.rb +331 -0
  377. data/test/test_event_time.rb +186 -0
  378. data/test/test_filter.rb +121 -0
  379. data/test/test_formatter.rb +312 -0
  380. data/test/test_input.rb +31 -0
  381. data/test/test_log.rb +828 -0
  382. data/test/test_match.rb +137 -0
  383. data/test/test_mixin.rb +351 -0
  384. data/test/test_output.rb +273 -0
  385. data/test/test_plugin.rb +251 -0
  386. data/test/test_plugin_classes.rb +253 -0
  387. data/test/test_plugin_helper.rb +81 -0
  388. data/test/test_plugin_id.rb +101 -0
  389. data/test/test_process.rb +14 -0
  390. data/test/test_root_agent.rb +611 -0
  391. data/test/test_supervisor.rb +373 -0
  392. data/test/test_test_drivers.rb +135 -0
  393. data/test/test_time_formatter.rb +282 -0
  394. data/test/test_time_parser.rb +211 -0
  395. data/test/test_unique_id.rb +47 -0
  396. metadata +898 -0
@@ -0,0 +1,198 @@
1
+ require_relative '../helper'
2
+ require 'fluent/plugin/buffer/chunk'
3
+
4
+ class BufferChunkTest < Test::Unit::TestCase
5
+ sub_test_case 'blank buffer chunk' do
6
+ test 'has generated unique id, given metadata, created_at and modified_at' do
7
+ meta = Object.new
8
+ chunk = Fluent::Plugin::Buffer::Chunk.new(meta)
9
+ assert{ chunk.unique_id.bytesize == 16 }
10
+ assert{ chunk.metadata.object_id == meta.object_id }
11
+ assert{ chunk.created_at.is_a? Time }
12
+ assert{ chunk.modified_at.is_a? Time }
13
+ assert chunk.unstaged?
14
+ assert !chunk.staged?
15
+ assert !chunk.queued?
16
+ assert !chunk.closed?
17
+ end
18
+
19
+ test 'has many methods for chunks, but not implemented' do
20
+ meta = Object.new
21
+ chunk = Fluent::Plugin::Buffer::Chunk.new(meta)
22
+
23
+ assert chunk.respond_to?(:append)
24
+ assert chunk.respond_to?(:concat)
25
+ assert chunk.respond_to?(:commit)
26
+ assert chunk.respond_to?(:rollback)
27
+ assert chunk.respond_to?(:bytesize)
28
+ assert chunk.respond_to?(:size)
29
+ assert chunk.respond_to?(:length)
30
+ assert chunk.respond_to?(:empty?)
31
+ assert chunk.respond_to?(:read)
32
+ assert chunk.respond_to?(:open)
33
+ assert chunk.respond_to?(:write_to)
34
+ assert_raise(NotImplementedError){ chunk.append([]) }
35
+ assert_raise(NotImplementedError){ chunk.concat(nil, 0) }
36
+ assert_raise(NotImplementedError){ chunk.commit }
37
+ assert_raise(NotImplementedError){ chunk.rollback }
38
+ assert_raise(NotImplementedError){ chunk.bytesize }
39
+ assert_raise(NotImplementedError){ chunk.size }
40
+ assert_raise(NotImplementedError){ chunk.length }
41
+ assert_raise(NotImplementedError){ chunk.empty? }
42
+ assert_raise(NotImplementedError){ chunk.read }
43
+ assert_raise(NotImplementedError){ chunk.open(){} }
44
+ assert_raise(NotImplementedError){ chunk.write_to(nil) }
45
+ assert !chunk.respond_to?(:msgpack_each)
46
+ end
47
+
48
+ test 'has method #each and #msgpack_each only when extended by ChunkMessagePackEventStreamer' do
49
+ meta = Object.new
50
+ chunk = Fluent::Plugin::Buffer::Chunk.new(meta)
51
+
52
+ assert !chunk.respond_to?(:each)
53
+ assert !chunk.respond_to?(:msgpack_each)
54
+
55
+ chunk.extend Fluent::ChunkMessagePackEventStreamer
56
+ assert chunk.respond_to?(:each)
57
+ assert chunk.respond_to?(:msgpack_each)
58
+ end
59
+
60
+ test 'some methods raise ArgumentError with an option of `compressed: :gzip` and without extending Compressble`' do
61
+ meta = Object.new
62
+ chunk = Fluent::Plugin::Buffer::Chunk.new(meta)
63
+
64
+ assert_raise(ArgumentError){ chunk.read(compressed: :gzip) }
65
+ assert_raise(ArgumentError){ chunk.open(compressed: :gzip){} }
66
+ assert_raise(ArgumentError){ chunk.write_to(nil, compressed: :gzip) }
67
+ assert_raise(ArgumentError){ chunk.append(nil, compress: :gzip) }
68
+ end
69
+ end
70
+
71
+ class TestChunk < Fluent::Plugin::Buffer::Chunk
72
+ attr_accessor :data
73
+ def initialize(meta)
74
+ super
75
+ @data = ''
76
+ end
77
+ def size
78
+ @data.size
79
+ end
80
+ def open(**kwargs)
81
+ require 'stringio'
82
+ io = StringIO.new(@data)
83
+ yield io
84
+ end
85
+ end
86
+
87
+ sub_test_case 'minimum chunk implements #size and #open' do
88
+ test 'chunk lifecycle' do
89
+ c = TestChunk.new(Object.new)
90
+ assert c.unstaged?
91
+ assert !c.staged?
92
+ assert !c.queued?
93
+ assert !c.closed?
94
+ assert c.writable?
95
+
96
+ c.staged!
97
+
98
+ assert !c.unstaged?
99
+ assert c.staged?
100
+ assert !c.queued?
101
+ assert !c.closed?
102
+ assert c.writable?
103
+
104
+ c.enqueued!
105
+
106
+ assert !c.unstaged?
107
+ assert !c.staged?
108
+ assert c.queued?
109
+ assert !c.closed?
110
+ assert !c.writable?
111
+
112
+ c.close
113
+
114
+ assert !c.unstaged?
115
+ assert !c.staged?
116
+ assert !c.queued?
117
+ assert c.closed?
118
+ assert !c.writable?
119
+ end
120
+
121
+ test 'chunk can be unstaged' do
122
+ c = TestChunk.new(Object.new)
123
+ assert c.unstaged?
124
+ assert !c.staged?
125
+ assert !c.queued?
126
+ assert !c.closed?
127
+ assert c.writable?
128
+
129
+ c.staged!
130
+
131
+ assert !c.unstaged?
132
+ assert c.staged?
133
+ assert !c.queued?
134
+ assert !c.closed?
135
+ assert c.writable?
136
+
137
+ c.unstaged!
138
+
139
+ assert c.unstaged?
140
+ assert !c.staged?
141
+ assert !c.queued?
142
+ assert !c.closed?
143
+ assert c.writable?
144
+
145
+ c.enqueued!
146
+
147
+ assert !c.unstaged?
148
+ assert !c.staged?
149
+ assert c.queued?
150
+ assert !c.closed?
151
+ assert !c.writable?
152
+
153
+ c.close
154
+
155
+ assert !c.unstaged?
156
+ assert !c.staged?
157
+ assert !c.queued?
158
+ assert c.closed?
159
+ assert !c.writable?
160
+ end
161
+
162
+ test 'can respond to #empty? correctly' do
163
+ c = TestChunk.new(Object.new)
164
+ assert_equal 0, c.size
165
+ assert c.empty?
166
+ end
167
+
168
+ test 'can write its contents to io object' do
169
+ c = TestChunk.new(Object.new)
170
+ c.data << "my data\nyour data\n"
171
+ io = StringIO.new
172
+ c.write_to(io)
173
+ assert "my data\nyour data\n", io.to_s
174
+ end
175
+
176
+ test 'can feed objects into blocks with unpacking msgpack if ChunkMessagePackEventStreamer is included' do
177
+ require 'msgpack'
178
+ c = TestChunk.new(Object.new)
179
+ c.extend Fluent::ChunkMessagePackEventStreamer
180
+ c.data << MessagePack.pack(['my data', 1])
181
+ c.data << MessagePack.pack(['your data', 2])
182
+ ary = []
183
+ c.msgpack_each do |obj|
184
+ ary << obj
185
+ end
186
+ assert_equal ['my data', 1], ary[0]
187
+ assert_equal ['your data', 2], ary[1]
188
+ end
189
+ end
190
+
191
+ sub_test_case 'when compress is gzip' do
192
+ test 'create decompressable chunk' do
193
+ meta = Object.new
194
+ chunk = Fluent::Plugin::Buffer::Chunk.new(meta, compress: :gzip)
195
+ assert chunk.singleton_class.ancestors.include?(Fluent::Plugin::Buffer::Chunk::Decompressable)
196
+ end
197
+ end
198
+ end
@@ -0,0 +1,844 @@
1
+ require_relative '../helper'
2
+ require 'fluent/plugin/buffer/file_chunk'
3
+ require 'fluent/plugin/compressable'
4
+ require 'fluent/unique_id'
5
+
6
+ require 'fileutils'
7
+ require 'msgpack'
8
+ require 'time'
9
+ require 'timecop'
10
+
11
+ class BufferFileChunkTest < Test::Unit::TestCase
12
+ include Fluent::Plugin::Compressable
13
+
14
+ setup do
15
+ @klass = Fluent::Plugin::Buffer::FileChunk
16
+ @chunkdir = File.expand_path('../../tmp/buffer_file_chunk', __FILE__)
17
+ FileUtils.rm_r @chunkdir rescue nil
18
+ FileUtils.mkdir_p @chunkdir
19
+ end
20
+ teardown do
21
+ Timecop.return
22
+ end
23
+
24
+ Metadata = Struct.new(:timekey, :tag, :variables)
25
+ def gen_metadata(timekey: nil, tag: nil, variables: nil)
26
+ Metadata.new(timekey, tag, variables)
27
+ end
28
+
29
+ def read_metadata_file(path)
30
+ File.open(path, 'rb'){|f| MessagePack.unpack(f.read, symbolize_keys: true) }
31
+ end
32
+
33
+ def gen_path(path)
34
+ File.join(@chunkdir, path)
35
+ end
36
+
37
+ def gen_test_chunk_id
38
+ require 'time'
39
+ now = Time.parse('2016-04-07 14:31:33 +0900')
40
+ u1 = ((now.to_i * 1000 * 1000 + now.usec) << 12 | 1725) # 1725 is one of `rand(0xfff)`
41
+ u3 = 2979763054 # one of rand(0xffffffff)
42
+ u4 = 438020492 # ditto
43
+ [u1 >> 32, u1 & 0xffffffff, u3, u4].pack('NNNN')
44
+ # unique_id.unpack('N*').map{|n| n.to_s(16)}.join => "52fde6425d7406bdb19b936e1a1ba98c"
45
+ end
46
+
47
+ def hex_id(id)
48
+ id.unpack('N*').map{|n| n.to_s(16)}.join
49
+ end
50
+
51
+ sub_test_case 'classmethods' do
52
+ data(
53
+ correct_staged: ['/mydir/mypath/myfile.b00ff.log', :staged],
54
+ correct_queued: ['/mydir/mypath/myfile.q00ff.log', :queued],
55
+ incorrect_staged: ['/mydir/mypath/myfile.b00ff.log/unknown', :unknown],
56
+ incorrect_queued: ['/mydir/mypath/myfile.q00ff.log/unknown', :unknown],
57
+ output_file: ['/mydir/mypath/myfile.20160716.log', :unknown],
58
+ )
59
+ test 'can .assume_chunk_state' do |data|
60
+ path, expected = data
61
+ assert_equal expected, @klass.assume_chunk_state(path)
62
+ end
63
+
64
+ test '.generate_stage_chunk_path generates path with staged mark & chunk unique_id' do
65
+ assert_equal gen_path("mychunk.b52fde6425d7406bdb19b936e1a1ba98c.log"), @klass.generate_stage_chunk_path(gen_path("mychunk.*.log"), gen_test_chunk_id)
66
+ assert_raise RuntimeError.new("BUG: buffer chunk path on stage MUST have '.*.'") do
67
+ @klass.generate_stage_chunk_path(gen_path("mychunk.log"), gen_test_chunk_id)
68
+ end
69
+ assert_raise RuntimeError.new("BUG: buffer chunk path on stage MUST have '.*.'") do
70
+ @klass.generate_stage_chunk_path(gen_path("mychunk.*"), gen_test_chunk_id)
71
+ end
72
+ assert_raise RuntimeError.new("BUG: buffer chunk path on stage MUST have '.*.'") do
73
+ @klass.generate_stage_chunk_path(gen_path("*.log"), gen_test_chunk_id)
74
+ end
75
+ end
76
+
77
+ test '.generate_queued_chunk_path generates path with enqueued mark for staged chunk path' do
78
+ assert_equal(
79
+ gen_path("mychunk.q52fde6425d7406bdb19b936e1a1ba98c.log"),
80
+ @klass.generate_queued_chunk_path(gen_path("mychunk.b52fde6425d7406bdb19b936e1a1ba98c.log"), gen_test_chunk_id)
81
+ )
82
+ end
83
+
84
+ test '.generate_queued_chunk_path generates special path with chunk unique_id for non staged chunk path' do
85
+ assert_equal(
86
+ gen_path("mychunk.log.q52fde6425d7406bdb19b936e1a1ba98c.chunk"),
87
+ @klass.generate_queued_chunk_path(gen_path("mychunk.log"), gen_test_chunk_id)
88
+ )
89
+ assert_equal(
90
+ gen_path("mychunk.q55555555555555555555555555555555.log.q52fde6425d7406bdb19b936e1a1ba98c.chunk"),
91
+ @klass.generate_queued_chunk_path(gen_path("mychunk.q55555555555555555555555555555555.log"), gen_test_chunk_id)
92
+ )
93
+ end
94
+
95
+ test '.unique_id_from_path recreates unique_id from file path to assume unique_id for v0.12 chunks' do
96
+ assert_equal gen_test_chunk_id, @klass.unique_id_from_path(gen_path("mychunk.q52fde6425d7406bdb19b936e1a1ba98c.log"))
97
+ end
98
+ end
99
+
100
+ sub_test_case 'newly created chunk' do
101
+ setup do
102
+ @chunk_path = File.join(@chunkdir, 'test.*.log')
103
+ @c = Fluent::Plugin::Buffer::FileChunk.new(gen_metadata, @chunk_path, :create)
104
+ end
105
+
106
+ def gen_chunk_path(prefix, unique_id)
107
+ File.join(@chunkdir, "test.#{prefix}#{Fluent::UniqueId.hex(unique_id)}.log")
108
+ end
109
+
110
+ teardown do
111
+ if @c
112
+ @c.purge rescue nil
113
+ end
114
+ if File.exist? @chunk_path
115
+ File.unlink @chunk_path
116
+ end
117
+ end
118
+
119
+ test 'creates new files for chunk and metadata with specified path & permission' do
120
+ assert{ @c.unique_id.size == 16 }
121
+ assert_equal gen_chunk_path('b', @c.unique_id), @c.path
122
+
123
+ assert File.exist?(gen_chunk_path('b', @c.unique_id))
124
+ assert{ File.stat(gen_chunk_path('b', @c.unique_id)).mode.to_s(8).end_with?(@klass.const_get('FILE_PERMISSION').to_s(8)) }
125
+
126
+ assert File.exist?(gen_chunk_path('b', @c.unique_id) + '.meta')
127
+ assert{ File.stat(gen_chunk_path('b', @c.unique_id) + '.meta').mode.to_s(8).end_with?(@klass.const_get('FILE_PERMISSION').to_s(8)) }
128
+
129
+ assert_equal :unstaged, @c.state
130
+ assert @c.empty?
131
+ end
132
+
133
+ test 'can #append, #commit and #read it' do
134
+ assert @c.empty?
135
+
136
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
137
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
138
+ data = [d1.to_json + "\n", d2.to_json + "\n"]
139
+ @c.append(data)
140
+ @c.commit
141
+
142
+ content = @c.read
143
+ ds = content.split("\n").select{|d| !d.empty? }
144
+
145
+ assert_equal 2, ds.size
146
+ assert_equal d1, JSON.parse(ds[0])
147
+ assert_equal d2, JSON.parse(ds[1])
148
+
149
+ d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
150
+ d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
151
+ @c.append([d3.to_json + "\n", d4.to_json + "\n"])
152
+ @c.commit
153
+
154
+ content = @c.read
155
+ ds = content.split("\n").select{|d| !d.empty? }
156
+
157
+ assert_equal 4, ds.size
158
+ assert_equal d1, JSON.parse(ds[0])
159
+ assert_equal d2, JSON.parse(ds[1])
160
+ assert_equal d3, JSON.parse(ds[2])
161
+ assert_equal d4, JSON.parse(ds[3])
162
+ end
163
+
164
+ test 'can #concat, #commit and #read it' do
165
+ assert @c.empty?
166
+
167
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
168
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
169
+ data = [d1.to_json + "\n", d2.to_json + "\n"].join
170
+ @c.concat(data, 2)
171
+ @c.commit
172
+
173
+ content = @c.read
174
+ ds = content.split("\n").select{|d| !d.empty? }
175
+
176
+ assert_equal 2, ds.size
177
+ assert_equal d1, JSON.parse(ds[0])
178
+ assert_equal d2, JSON.parse(ds[1])
179
+
180
+ d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
181
+ d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
182
+ @c.concat([d3.to_json + "\n", d4.to_json + "\n"].join, 2)
183
+ @c.commit
184
+
185
+ content = @c.read
186
+ ds = content.split("\n").select{|d| !d.empty? }
187
+
188
+ assert_equal 4, ds.size
189
+ assert_equal d1, JSON.parse(ds[0])
190
+ assert_equal d2, JSON.parse(ds[1])
191
+ assert_equal d3, JSON.parse(ds[2])
192
+ assert_equal d4, JSON.parse(ds[3])
193
+ end
194
+
195
+ test 'has its contents in binary (ascii-8bit)' do
196
+ data1 = "aaa bbb ccc".force_encoding('utf-8')
197
+ @c.append([data1])
198
+ @c.commit
199
+ assert_equal Encoding::ASCII_8BIT, @c.instance_eval{ @chunk.external_encoding }
200
+
201
+ content = @c.read
202
+ assert_equal Encoding::ASCII_8BIT, content.encoding
203
+ end
204
+
205
+ test 'has #bytesize and #size' do
206
+ assert @c.empty?
207
+
208
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
209
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
210
+ data = [d1.to_json + "\n", d2.to_json + "\n"]
211
+ @c.append(data)
212
+
213
+ assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
214
+ assert_equal 2, @c.size
215
+
216
+ @c.commit
217
+
218
+ assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
219
+ assert_equal 2, @c.size
220
+
221
+ first_bytesize = @c.bytesize
222
+
223
+ d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
224
+ d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
225
+ @c.append([d3.to_json + "\n", d4.to_json + "\n"])
226
+
227
+ assert_equal first_bytesize + (d3.to_json + "\n" + d4.to_json + "\n").bytesize, @c.bytesize
228
+ assert_equal 4, @c.size
229
+
230
+ @c.commit
231
+
232
+ assert_equal first_bytesize + (d3.to_json + "\n" + d4.to_json + "\n").bytesize, @c.bytesize
233
+ assert_equal 4, @c.size
234
+ end
235
+
236
+ test 'can #rollback to revert non-committed data' do
237
+ assert @c.empty?
238
+
239
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
240
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
241
+ data = [d1.to_json + "\n", d2.to_json + "\n"]
242
+ @c.append(data)
243
+
244
+ assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
245
+ assert_equal 2, @c.size
246
+
247
+ @c.rollback
248
+
249
+ assert @c.empty?
250
+
251
+ assert_equal '', File.open(@c.path, 'rb'){|f| f.read }
252
+
253
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
254
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
255
+ data = [d1.to_json + "\n", d2.to_json + "\n"]
256
+ @c.append(data)
257
+ @c.commit
258
+
259
+ assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
260
+ assert_equal 2, @c.size
261
+
262
+ first_bytesize = @c.bytesize
263
+
264
+ d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
265
+ d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
266
+ @c.append([d3.to_json + "\n", d4.to_json + "\n"])
267
+
268
+ assert_equal first_bytesize + (d3.to_json + "\n" + d4.to_json + "\n").bytesize, @c.bytesize
269
+ assert_equal 4, @c.size
270
+
271
+ @c.rollback
272
+
273
+ assert_equal first_bytesize, @c.bytesize
274
+ assert_equal 2, @c.size
275
+
276
+ assert_equal (d1.to_json + "\n" + d2.to_json + "\n"), File.open(@c.path, 'rb'){|f| f.read }
277
+ end
278
+
279
+ test 'can #rollback to revert non-committed data from #concat' do
280
+ assert @c.empty?
281
+
282
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
283
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
284
+ data = [d1.to_json + "\n", d2.to_json + "\n"].join
285
+ @c.concat(data, 2)
286
+
287
+ assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
288
+ assert_equal 2, @c.size
289
+
290
+ @c.rollback
291
+
292
+ assert @c.empty?
293
+
294
+ assert_equal '', File.open(@c.path, 'rb'){|f| f.read }
295
+
296
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
297
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
298
+ data = [d1.to_json + "\n", d2.to_json + "\n"]
299
+ @c.append(data)
300
+ @c.commit
301
+
302
+ assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
303
+ assert_equal 2, @c.size
304
+
305
+ first_bytesize = @c.bytesize
306
+
307
+ d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
308
+ d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
309
+ @c.concat([d3.to_json + "\n", d4.to_json + "\n"].join, 2)
310
+
311
+ assert_equal first_bytesize + (d3.to_json + "\n" + d4.to_json + "\n").bytesize, @c.bytesize
312
+ assert_equal 4, @c.size
313
+
314
+ @c.rollback
315
+
316
+ assert_equal first_bytesize, @c.bytesize
317
+ assert_equal 2, @c.size
318
+
319
+ assert_equal (d1.to_json + "\n" + d2.to_json + "\n"), File.open(@c.path, 'rb'){|f| f.read }
320
+ end
321
+
322
+ test 'can store its data by #close' do
323
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
324
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
325
+ data = [d1.to_json + "\n", d2.to_json + "\n"]
326
+ @c.append(data)
327
+ @c.commit
328
+ d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
329
+ d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
330
+ @c.append([d3.to_json + "\n", d4.to_json + "\n"])
331
+ @c.commit
332
+
333
+ content = @c.read
334
+
335
+ unique_id = @c.unique_id
336
+ size = @c.size
337
+ created_at = @c.created_at
338
+ modified_at = @c.modified_at
339
+
340
+ @c.close
341
+
342
+ assert_equal content, File.open(@c.path, 'rb'){|f| f.read }
343
+
344
+ stored_meta = {
345
+ timekey: nil, tag: nil, variables: nil,
346
+ id: unique_id,
347
+ s: size,
348
+ c: created_at.to_i,
349
+ m: modified_at.to_i,
350
+ }
351
+
352
+ assert_equal stored_meta, read_metadata_file(@c.path + '.meta')
353
+ end
354
+
355
+ test 'deletes all data by #purge' do
356
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
357
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
358
+ data = [d1.to_json + "\n", d2.to_json + "\n"]
359
+ @c.append(data)
360
+ @c.commit
361
+ d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
362
+ d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
363
+ @c.append([d3.to_json + "\n", d4.to_json + "\n"])
364
+ @c.commit
365
+
366
+ @c.purge
367
+
368
+ assert @c.empty?
369
+ assert_equal 0, @c.bytesize
370
+ assert_equal 0, @c.size
371
+
372
+ assert !File.exist?(@c.path)
373
+ assert !File.exist?(@c.path + '.meta')
374
+ end
375
+
376
+ test 'can #open its contents as io' do
377
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
378
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
379
+ data = [d1.to_json + "\n", d2.to_json + "\n"]
380
+ @c.append(data)
381
+ @c.commit
382
+ d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
383
+ d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
384
+ @c.append([d3.to_json + "\n", d4.to_json + "\n"])
385
+ @c.commit
386
+
387
+ lines = []
388
+ @c.open do |io|
389
+ assert io
390
+ io.readlines.each do |l|
391
+ lines << l
392
+ end
393
+ end
394
+
395
+ assert_equal d1.to_json + "\n", lines[0]
396
+ assert_equal d2.to_json + "\n", lines[1]
397
+ assert_equal d3.to_json + "\n", lines[2]
398
+ assert_equal d4.to_json + "\n", lines[3]
399
+ end
400
+
401
+ test 'can refer system config for file permission' do
402
+ omit "NTFS doesn't support UNIX like permissions" if Fluent.windows?
403
+
404
+ chunk_path = File.join(@chunkdir, 'testperm.*.log')
405
+ Fluent::SystemConfig.overwrite_system_config("file_permission" => "600") do
406
+ c = Fluent::Plugin::Buffer::FileChunk.new(gen_metadata, chunk_path, :create)
407
+ assert{ File.stat(c.path).mode.to_s(8).end_with?('600') }
408
+ assert{ File.stat(c.path + '.meta').mode.to_s(8).end_with?('600') }
409
+ end
410
+ end
411
+
412
+ test '#write_metadata tries to store metadata on file' do
413
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
414
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
415
+ data = [d1.to_json + "\n", d2.to_json + "\n"]
416
+ @c.append(data)
417
+ @c.commit
418
+
419
+ expected = {
420
+ timekey: nil, tag: nil, variables: nil,
421
+ id: @c.unique_id,
422
+ s: @c.size,
423
+ c: @c.created_at.to_i,
424
+ m: @c.modified_at.to_i,
425
+ }
426
+ assert_equal expected, read_metadata_file(@c.path + '.meta')
427
+
428
+ d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
429
+ d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
430
+ @c.append([d3.to_json + "\n", d4.to_json + "\n"])
431
+ # append does write_metadata
432
+
433
+ dummy_now = Time.parse('2016-04-07 16:59:59 +0900')
434
+ Timecop.freeze(dummy_now)
435
+ @c.write_metadata
436
+
437
+ expected = {
438
+ timekey: nil, tag: nil, variables: nil,
439
+ id: @c.unique_id,
440
+ s: @c.size,
441
+ c: @c.created_at.to_i,
442
+ m: dummy_now.to_i,
443
+ }
444
+ assert_equal expected, read_metadata_file(@c.path + '.meta')
445
+
446
+ @c.commit
447
+
448
+ expected = {
449
+ timekey: nil, tag: nil, variables: nil,
450
+ id: @c.unique_id,
451
+ s: @c.size,
452
+ c: @c.created_at.to_i,
453
+ m: @c.modified_at.to_i,
454
+ }
455
+ assert_equal expected, read_metadata_file(@c.path + '.meta')
456
+
457
+ content = @c.read
458
+
459
+ unique_id = @c.unique_id
460
+ size = @c.size
461
+ created_at = @c.created_at
462
+ modified_at = @c.modified_at
463
+
464
+ @c.close
465
+
466
+ assert_equal content, File.open(@c.path, 'rb'){|f| f.read }
467
+
468
+ stored_meta = {
469
+ timekey: nil, tag: nil, variables: nil,
470
+ id: unique_id,
471
+ s: size,
472
+ c: created_at.to_i,
473
+ m: modified_at.to_i,
474
+ }
475
+
476
+ assert_equal stored_meta, read_metadata_file(@c.path + '.meta')
477
+ end
478
+ end
479
+
480
+ sub_test_case 'chunk with file for staged chunk' do
481
+ setup do
482
+ @chunk_id = gen_test_chunk_id
483
+ @chunk_path = File.join(@chunkdir, "test_staged.b#{hex_id(@chunk_id)}.log")
484
+ @enqueued_path = File.join(@chunkdir, "test_staged.q#{hex_id(@chunk_id)}.log")
485
+
486
+ @d1 = {"k" => "x", "f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
487
+ @d2 = {"k" => "x", "f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
488
+ @d3 = {"k" => "x", "f1" => 'x', "f2" => 'y', "f3" => 'z'}
489
+ @d4 = {"k" => "x", "f1" => 'a', "f2" => 'b', "f3" => 'c'}
490
+ @d = [@d1,@d2,@d3,@d4].map{|d| d.to_json + "\n" }.join
491
+ File.open(@chunk_path, 'wb') do |f|
492
+ f.write @d
493
+ end
494
+
495
+ @metadata = {
496
+ timekey: nil, tag: 'testing', variables: {k: "x"},
497
+ id: @chunk_id,
498
+ s: 4,
499
+ c: Time.parse('2016-04-07 17:44:00 +0900').to_i,
500
+ m: Time.parse('2016-04-07 17:44:13 +0900').to_i,
501
+ }
502
+ File.open(@chunk_path + '.meta', 'wb') do |f|
503
+ f.write @metadata.to_msgpack
504
+ end
505
+
506
+ @c = Fluent::Plugin::Buffer::FileChunk.new(gen_metadata, @chunk_path, :staged)
507
+ end
508
+
509
+ teardown do
510
+ if @c
511
+ @c.purge rescue nil
512
+ end
513
+ [@chunk_path, @chunk_path + '.meta', @enqueued_path, @enqueued_path + '.meta'].each do |path|
514
+ File.unlink path if File.exist? path
515
+ end
516
+ end
517
+
518
+ test 'can load as staged chunk from file with metadata' do
519
+ assert_equal @chunk_path, @c.path
520
+ assert_equal :staged, @c.state
521
+
522
+ assert_nil @c.metadata.timekey
523
+ assert_equal 'testing', @c.metadata.tag
524
+ assert_equal({k: "x"}, @c.metadata.variables)
525
+
526
+ assert_equal 4, @c.size
527
+ assert_equal Time.parse('2016-04-07 17:44:00 +0900'), @c.created_at
528
+ assert_equal Time.parse('2016-04-07 17:44:13 +0900'), @c.modified_at
529
+
530
+ content = @c.read
531
+ assert_equal @d, content
532
+ end
533
+
534
+ test 'can be enqueued' do
535
+ stage_path = @c.path
536
+ queue_path = @enqueued_path
537
+ assert File.exist?(stage_path)
538
+ assert File.exist?(stage_path + '.meta')
539
+ assert !File.exist?(queue_path)
540
+ assert !File.exist?(queue_path + '.meta')
541
+
542
+ @c.enqueued!
543
+
544
+ assert_equal queue_path, @c.path
545
+
546
+ assert !File.exist?(stage_path)
547
+ assert !File.exist?(stage_path + '.meta')
548
+ assert File.exist?(queue_path)
549
+ assert File.exist?(queue_path + '.meta')
550
+
551
+ assert_nil @c.metadata.timekey
552
+ assert_equal 'testing', @c.metadata.tag
553
+ assert_equal({k: "x"}, @c.metadata.variables)
554
+
555
+ assert_equal 4, @c.size
556
+ assert_equal Time.parse('2016-04-07 17:44:00 +0900'), @c.created_at
557
+ assert_equal Time.parse('2016-04-07 17:44:13 +0900'), @c.modified_at
558
+
559
+ assert_equal @d, File.open(@c.path, 'rb'){|f| f.read }
560
+ assert_equal @metadata, read_metadata_file(@c.path + '.meta')
561
+ end
562
+
563
+ test '#write_metadata tries to store metadata on file with non-committed data' do
564
+ d5 = {"k" => "x", "f1" => 'a', "f2" => 'b', "f3" => 'c'}
565
+ d5s = d5.to_json + "\n"
566
+ @c.append([d5s])
567
+
568
+ metadata = {
569
+ timekey: nil, tag: 'testing', variables: {k: "x"},
570
+ id: @chunk_id,
571
+ s: 4,
572
+ c: Time.parse('2016-04-07 17:44:00 +0900').to_i,
573
+ m: Time.parse('2016-04-07 17:44:13 +0900').to_i,
574
+ }
575
+ assert_equal metadata, read_metadata_file(@c.path + '.meta')
576
+
577
+ @c.write_metadata
578
+
579
+ metadata = {
580
+ timekey: nil, tag: 'testing', variables: {k: "x"},
581
+ id: @chunk_id,
582
+ s: 5,
583
+ c: Time.parse('2016-04-07 17:44:00 +0900').to_i,
584
+ m: Time.parse('2016-04-07 17:44:38 +0900').to_i,
585
+ }
586
+
587
+ dummy_now = Time.parse('2016-04-07 17:44:38 +0900')
588
+ Timecop.freeze(dummy_now)
589
+ @c.write_metadata
590
+
591
+ assert_equal metadata, read_metadata_file(@c.path + '.meta')
592
+ end
593
+
594
+ test '#file_rename can rename chunk files even in windows, and call callback with file size' do
595
+ data = "aaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbccccccccccccccccccccccccccccc"
596
+
597
+ testing_file1 = gen_path('rename1.test')
598
+ testing_file2 = gen_path('rename2.test')
599
+ f = File.open(testing_file1, 'wb', @c.permission)
600
+ f.set_encoding(Encoding::ASCII_8BIT)
601
+ f.sync = true
602
+ f.binmode
603
+ f.write data
604
+ pos = f.pos
605
+
606
+ assert f.binmode?
607
+ assert f.sync
608
+ assert_equal data.bytesize, f.size
609
+
610
+ io = nil
611
+ @c.file_rename(f, testing_file1, testing_file2, ->(new_io){ io = new_io })
612
+ assert io
613
+ if Fluent.windows?
614
+ assert{ f != io }
615
+ else
616
+ assert_equal f, io
617
+ end
618
+ assert_equal Encoding::ASCII_8BIT, io.external_encoding
619
+ assert io.sync
620
+ assert io.binmode?
621
+ assert_equal data.bytesize, io.size
622
+
623
+ assert_equal pos, io.pos
624
+
625
+ assert_equal '', io.read
626
+
627
+ io.rewind
628
+ assert_equal data, io.read
629
+ end
630
+ end
631
+
632
+ sub_test_case 'chunk with file for enqueued chunk' do
633
+ setup do
634
+ @chunk_id = gen_test_chunk_id
635
+ @enqueued_path = File.join(@chunkdir, "test_staged.q#{hex_id(@chunk_id)}.log")
636
+
637
+ @d1 = {"k" => "x", "f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
638
+ @d2 = {"k" => "x", "f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
639
+ @d3 = {"k" => "x", "f1" => 'x', "f2" => 'y', "f3" => 'z'}
640
+ @d4 = {"k" => "x", "f1" => 'a', "f2" => 'b', "f3" => 'c'}
641
+ @d = [@d1,@d2,@d3,@d4].map{|d| d.to_json + "\n" }.join
642
+ File.open(@enqueued_path, 'wb') do |f|
643
+ f.write @d
644
+ end
645
+
646
+ @dummy_timekey = Time.parse('2016-04-07 17:40:00 +0900').to_i
647
+
648
+ @metadata = {
649
+ timekey: @dummy_timekey, tag: 'testing', variables: {k: "x"},
650
+ id: @chunk_id,
651
+ s: 4,
652
+ c: Time.parse('2016-04-07 17:44:00 +0900').to_i,
653
+ m: Time.parse('2016-04-07 17:44:13 +0900').to_i,
654
+ }
655
+ File.open(@enqueued_path + '.meta', 'wb') do |f|
656
+ f.write @metadata.to_msgpack
657
+ end
658
+
659
+ @c = Fluent::Plugin::Buffer::FileChunk.new(gen_metadata, @enqueued_path, :queued)
660
+ end
661
+
662
+ teardown do
663
+ if @c
664
+ @c.purge rescue nil
665
+ end
666
+ [@enqueued_path, @enqueued_path + '.meta'].each do |path|
667
+ File.unlink path if File.exist? path
668
+ end
669
+ end
670
+
671
+ test 'can load as queued chunk (read only) with metadata' do
672
+ assert @c
673
+ assert_equal @chunk_id, @c.unique_id
674
+ assert_equal :queued, @c.state
675
+ assert_equal gen_metadata(timekey: @dummy_timekey, tag: 'testing', variables: {k: "x"}), @c.metadata
676
+ assert_equal Time.at(@metadata[:c]), @c.created_at
677
+ assert_equal Time.at(@metadata[:m]), @c.modified_at
678
+ assert_equal @metadata[:s], @c.size
679
+ assert_equal @d.bytesize, @c.bytesize
680
+ assert_equal @d, @c.read
681
+
682
+ assert_raise RuntimeError.new("BUG: concatenating to unwritable chunk, now 'queued'") do
683
+ @c.append(["queued chunk is read only"])
684
+ end
685
+ assert_raise IOError do
686
+ @c.instance_eval{ @chunk }.write "chunk io is opened as read only"
687
+ end
688
+ end
689
+ end
690
+
691
+ sub_test_case 'chunk with queued chunk file of v0.12, without metadata' do
692
+ setup do
693
+ @chunk_id = gen_test_chunk_id
694
+ @chunk_path = File.join(@chunkdir, "test_v12.2016040811.q#{hex_id(@chunk_id)}.log")
695
+
696
+ @d1 = {"k" => "x", "f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
697
+ @d2 = {"k" => "x", "f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
698
+ @d3 = {"k" => "x", "f1" => 'x', "f2" => 'y', "f3" => 'z'}
699
+ @d4 = {"k" => "x", "f1" => 'a', "f2" => 'b', "f3" => 'c'}
700
+ @d = [@d1,@d2,@d3,@d4].map{|d| d.to_json + "\n" }.join
701
+ File.open(@chunk_path, 'wb') do |f|
702
+ f.write @d
703
+ end
704
+
705
+ @c = Fluent::Plugin::Buffer::FileChunk.new(gen_metadata, @chunk_path, :queued)
706
+ end
707
+
708
+ teardown do
709
+ if @c
710
+ @c.purge rescue nil
711
+ end
712
+ File.unlink @chunk_path if File.exist? @chunk_path
713
+ end
714
+
715
+ test 'can load as queued chunk from file without metadata' do
716
+ assert @c
717
+ assert_equal :queued, @c.state
718
+ assert_equal @chunk_id, @c.unique_id
719
+ assert_equal gen_metadata, @c.metadata
720
+ assert_equal @d.bytesize, @c.bytesize
721
+ assert_equal 0, @c.size
722
+ assert_equal @d, @c.read
723
+
724
+ assert_raise RuntimeError.new("BUG: concatenating to unwritable chunk, now 'queued'") do
725
+ @c.append(["queued chunk is read only"])
726
+ end
727
+ assert_raise IOError do
728
+ @c.instance_eval{ @chunk }.write "chunk io is opened as read only"
729
+ end
730
+ end
731
+ end
732
+
733
+ sub_test_case 'chunk with staged chunk file of v0.12, without metadata' do
734
+ setup do
735
+ @chunk_id = gen_test_chunk_id
736
+ @chunk_path = File.join(@chunkdir, "test_v12.2016040811.b#{hex_id(@chunk_id)}.log")
737
+
738
+ @d1 = {"k" => "x", "f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
739
+ @d2 = {"k" => "x", "f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
740
+ @d3 = {"k" => "x", "f1" => 'x', "f2" => 'y', "f3" => 'z'}
741
+ @d4 = {"k" => "x", "f1" => 'a', "f2" => 'b', "f3" => 'c'}
742
+ @d = [@d1,@d2,@d3,@d4].map{|d| d.to_json + "\n" }.join
743
+ File.open(@chunk_path, 'wb') do |f|
744
+ f.write @d
745
+ end
746
+
747
+ @c = Fluent::Plugin::Buffer::FileChunk.new(gen_metadata, @chunk_path, :staged)
748
+ end
749
+
750
+ teardown do
751
+ if @c
752
+ @c.purge rescue nil
753
+ end
754
+ File.unlink @chunk_path if File.exist? @chunk_path
755
+ end
756
+
757
+ test 'can load as queued chunk from file without metadata even if it was loaded as staged chunk' do
758
+ assert @c
759
+ assert_equal :queued, @c.state
760
+ assert_equal @chunk_id, @c.unique_id
761
+ assert_equal gen_metadata, @c.metadata
762
+ assert_equal @d.bytesize, @c.bytesize
763
+ assert_equal 0, @c.size
764
+ assert_equal @d, @c.read
765
+
766
+ assert_raise RuntimeError.new("BUG: concatenating to unwritable chunk, now 'queued'") do
767
+ @c.append(["queued chunk is read only"])
768
+ end
769
+ assert_raise IOError do
770
+ @c.instance_eval{ @chunk }.write "chunk io is opened as read only"
771
+ end
772
+ end
773
+ end
774
+
775
+ sub_test_case 'compressed buffer' do
776
+ setup do
777
+ @src = 'text data for compressing' * 5
778
+ @gzipped_src = compress(@src)
779
+ end
780
+
781
+ test '#append with compress option writes compressed data to chunk when compress is gzip' do
782
+ c = @klass.new(gen_metadata, File.join(@chunkdir,'test.*.log'), :create, compress: :gzip)
783
+ c.append([@src, @src], compress: :gzip)
784
+ c.commit
785
+
786
+ # check chunk is compressed
787
+ assert c.read(compressed: :gzip).size < [@src, @src].join("").size
788
+
789
+ assert_equal @src + @src, c.read
790
+ end
791
+
792
+ test '#open passes io object having decompressed data to a block when compress is gzip' do
793
+ c = @klass.new(gen_metadata, File.join(@chunkdir,'test.*.log'), :create, compress: :gzip)
794
+ c.concat(@gzipped_src, @src.size)
795
+ c.commit
796
+
797
+ decomressed_data = c.open do |io|
798
+ v = io.read
799
+ assert_equal @src, v
800
+ v
801
+ end
802
+ assert_equal @src, decomressed_data
803
+ end
804
+
805
+ test '#open with compressed option passes io object having decompressed data to a block when compress is gzip' do
806
+ c = @klass.new(gen_metadata, File.join(@chunkdir,'test.*.log'), :create, compress: :gzip)
807
+ c.concat(@gzipped_src, @src.size)
808
+ c.commit
809
+
810
+ comressed_data = c.open(compressed: :gzip) do |io|
811
+ v = io.read
812
+ assert_equal @gzipped_src, v
813
+ v
814
+ end
815
+ assert_equal @gzipped_src, comressed_data
816
+ end
817
+
818
+ test '#write_to writes decompressed data when compress is gzip' do
819
+ c = @klass.new(gen_metadata, File.join(@chunkdir,'test.*.log'), :create, compress: :gzip)
820
+ c.concat(@gzipped_src, @src.size)
821
+ c.commit
822
+
823
+ assert_equal @src, c.read
824
+ assert_equal @gzipped_src, c.read(compressed: :gzip)
825
+
826
+ io = StringIO.new
827
+ c.write_to(io)
828
+ assert_equal @src, io.string
829
+ end
830
+
831
+ test '#write_to with compressed option writes compressed data when compress is gzip' do
832
+ c = @klass.new(gen_metadata, File.join(@chunkdir,'test.*.log'), :create, compress: :gzip)
833
+ c.concat(@gzipped_src, @src.size)
834
+ c.commit
835
+
836
+ assert_equal @src, c.read
837
+ assert_equal @gzipped_src, c.read(compressed: :gzip)
838
+
839
+ io = StringIO.new
840
+ c.write_to(io, compressed: :gzip)
841
+ assert_equal @gzipped_src, io.string
842
+ end
843
+ end
844
+ end