fluentd 0.14.4-x64-mingw32

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of fluentd might be problematic. Click here for more details.

Files changed (328) hide show
  1. checksums.yaml +7 -0
  2. data/.github/ISSUE_TEMPLATE.md +6 -0
  3. data/.gitignore +26 -0
  4. data/.travis.yml +45 -0
  5. data/AUTHORS +2 -0
  6. data/CONTRIBUTING.md +35 -0
  7. data/COPYING +14 -0
  8. data/ChangeLog +276 -0
  9. data/Gemfile +9 -0
  10. data/README.md +51 -0
  11. data/Rakefile +53 -0
  12. data/Vagrantfile +17 -0
  13. data/appveyor.yml +41 -0
  14. data/bin/fluent-debug +5 -0
  15. data/example/copy_roundrobin.conf +39 -0
  16. data/example/filter_stdout.conf +22 -0
  17. data/example/in_forward.conf +11 -0
  18. data/example/in_http.conf +14 -0
  19. data/example/in_out_forward.conf +17 -0
  20. data/example/in_syslog.conf +15 -0
  21. data/example/in_tail.conf +14 -0
  22. data/example/in_tcp.conf +13 -0
  23. data/example/in_udp.conf +13 -0
  24. data/example/multi_filters.conf +61 -0
  25. data/example/out_buffered_null.conf +32 -0
  26. data/example/out_copy.conf +20 -0
  27. data/example/out_file.conf +13 -0
  28. data/example/out_forward.conf +35 -0
  29. data/example/out_forward_buf_file.conf +23 -0
  30. data/example/v0_12_filter.conf +78 -0
  31. data/example/v1_literal_example.conf +36 -0
  32. data/fluent.conf +139 -0
  33. data/fluentd.gemspec +51 -0
  34. data/lib/fluent/agent.rb +194 -0
  35. data/lib/fluent/command/bundler_injection.rb +45 -0
  36. data/lib/fluent/command/cat.rb +319 -0
  37. data/lib/fluent/command/debug.rb +102 -0
  38. data/lib/fluent/command/fluentd.rb +273 -0
  39. data/lib/fluent/compat/call_super_mixin.rb +67 -0
  40. data/lib/fluent/compat/exec_util.rb +129 -0
  41. data/lib/fluent/compat/file_util.rb +54 -0
  42. data/lib/fluent/compat/filter.rb +68 -0
  43. data/lib/fluent/compat/formatter.rb +111 -0
  44. data/lib/fluent/compat/formatter_utils.rb +85 -0
  45. data/lib/fluent/compat/handle_tag_and_time_mixin.rb +62 -0
  46. data/lib/fluent/compat/handle_tag_name_mixin.rb +53 -0
  47. data/lib/fluent/compat/input.rb +49 -0
  48. data/lib/fluent/compat/output.rb +677 -0
  49. data/lib/fluent/compat/output_chain.rb +60 -0
  50. data/lib/fluent/compat/parser.rb +180 -0
  51. data/lib/fluent/compat/parser_utils.rb +40 -0
  52. data/lib/fluent/compat/propagate_default.rb +62 -0
  53. data/lib/fluent/compat/record_filter_mixin.rb +34 -0
  54. data/lib/fluent/compat/set_tag_key_mixin.rb +50 -0
  55. data/lib/fluent/compat/set_time_key_mixin.rb +69 -0
  56. data/lib/fluent/compat/socket_util.rb +165 -0
  57. data/lib/fluent/compat/string_util.rb +34 -0
  58. data/lib/fluent/compat/structured_format_mixin.rb +26 -0
  59. data/lib/fluent/compat/type_converter.rb +90 -0
  60. data/lib/fluent/config.rb +56 -0
  61. data/lib/fluent/config/basic_parser.rb +123 -0
  62. data/lib/fluent/config/configure_proxy.rb +366 -0
  63. data/lib/fluent/config/dsl.rb +149 -0
  64. data/lib/fluent/config/element.rb +218 -0
  65. data/lib/fluent/config/error.rb +26 -0
  66. data/lib/fluent/config/literal_parser.rb +251 -0
  67. data/lib/fluent/config/parser.rb +107 -0
  68. data/lib/fluent/config/section.rb +212 -0
  69. data/lib/fluent/config/types.rb +136 -0
  70. data/lib/fluent/config/v1_parser.rb +190 -0
  71. data/lib/fluent/configurable.rb +176 -0
  72. data/lib/fluent/daemon.rb +15 -0
  73. data/lib/fluent/engine.rb +220 -0
  74. data/lib/fluent/env.rb +27 -0
  75. data/lib/fluent/event.rb +287 -0
  76. data/lib/fluent/event_router.rb +259 -0
  77. data/lib/fluent/filter.rb +21 -0
  78. data/lib/fluent/formatter.rb +23 -0
  79. data/lib/fluent/input.rb +21 -0
  80. data/lib/fluent/label.rb +38 -0
  81. data/lib/fluent/load.rb +36 -0
  82. data/lib/fluent/log.rb +445 -0
  83. data/lib/fluent/match.rb +141 -0
  84. data/lib/fluent/mixin.rb +31 -0
  85. data/lib/fluent/msgpack_factory.rb +62 -0
  86. data/lib/fluent/output.rb +26 -0
  87. data/lib/fluent/output_chain.rb +23 -0
  88. data/lib/fluent/parser.rb +23 -0
  89. data/lib/fluent/plugin.rb +161 -0
  90. data/lib/fluent/plugin/bare_output.rb +63 -0
  91. data/lib/fluent/plugin/base.rb +130 -0
  92. data/lib/fluent/plugin/buf_file.rb +154 -0
  93. data/lib/fluent/plugin/buf_memory.rb +34 -0
  94. data/lib/fluent/plugin/buffer.rb +603 -0
  95. data/lib/fluent/plugin/buffer/chunk.rb +160 -0
  96. data/lib/fluent/plugin/buffer/file_chunk.rb +323 -0
  97. data/lib/fluent/plugin/buffer/memory_chunk.rb +90 -0
  98. data/lib/fluent/plugin/exec_util.rb +22 -0
  99. data/lib/fluent/plugin/file_util.rb +22 -0
  100. data/lib/fluent/plugin/file_wrapper.rb +120 -0
  101. data/lib/fluent/plugin/filter.rb +93 -0
  102. data/lib/fluent/plugin/filter_grep.rb +75 -0
  103. data/lib/fluent/plugin/filter_record_transformer.rb +342 -0
  104. data/lib/fluent/plugin/filter_stdout.rb +53 -0
  105. data/lib/fluent/plugin/formatter.rb +45 -0
  106. data/lib/fluent/plugin/formatter_csv.rb +47 -0
  107. data/lib/fluent/plugin/formatter_hash.rb +29 -0
  108. data/lib/fluent/plugin/formatter_json.rb +44 -0
  109. data/lib/fluent/plugin/formatter_ltsv.rb +41 -0
  110. data/lib/fluent/plugin/formatter_msgpack.rb +29 -0
  111. data/lib/fluent/plugin/formatter_out_file.rb +78 -0
  112. data/lib/fluent/plugin/formatter_single_value.rb +34 -0
  113. data/lib/fluent/plugin/formatter_stdout.rb +74 -0
  114. data/lib/fluent/plugin/in_debug_agent.rb +64 -0
  115. data/lib/fluent/plugin/in_dummy.rb +135 -0
  116. data/lib/fluent/plugin/in_exec.rb +149 -0
  117. data/lib/fluent/plugin/in_forward.rb +366 -0
  118. data/lib/fluent/plugin/in_gc_stat.rb +52 -0
  119. data/lib/fluent/plugin/in_http.rb +422 -0
  120. data/lib/fluent/plugin/in_monitor_agent.rb +401 -0
  121. data/lib/fluent/plugin/in_object_space.rb +90 -0
  122. data/lib/fluent/plugin/in_syslog.rb +204 -0
  123. data/lib/fluent/plugin/in_tail.rb +838 -0
  124. data/lib/fluent/plugin/in_tcp.rb +41 -0
  125. data/lib/fluent/plugin/in_udp.rb +37 -0
  126. data/lib/fluent/plugin/in_unix.rb +201 -0
  127. data/lib/fluent/plugin/input.rb +33 -0
  128. data/lib/fluent/plugin/multi_output.rb +95 -0
  129. data/lib/fluent/plugin/out_buffered_null.rb +59 -0
  130. data/lib/fluent/plugin/out_buffered_stdout.rb +70 -0
  131. data/lib/fluent/plugin/out_copy.rb +42 -0
  132. data/lib/fluent/plugin/out_exec.rb +114 -0
  133. data/lib/fluent/plugin/out_exec_filter.rb +393 -0
  134. data/lib/fluent/plugin/out_file.rb +167 -0
  135. data/lib/fluent/plugin/out_forward.rb +646 -0
  136. data/lib/fluent/plugin/out_null.rb +27 -0
  137. data/lib/fluent/plugin/out_relabel.rb +28 -0
  138. data/lib/fluent/plugin/out_roundrobin.rb +80 -0
  139. data/lib/fluent/plugin/out_stdout.rb +48 -0
  140. data/lib/fluent/plugin/out_stream.rb +130 -0
  141. data/lib/fluent/plugin/output.rb +1020 -0
  142. data/lib/fluent/plugin/owned_by_mixin.rb +42 -0
  143. data/lib/fluent/plugin/parser.rb +175 -0
  144. data/lib/fluent/plugin/parser_apache.rb +28 -0
  145. data/lib/fluent/plugin/parser_apache2.rb +84 -0
  146. data/lib/fluent/plugin/parser_apache_error.rb +26 -0
  147. data/lib/fluent/plugin/parser_csv.rb +33 -0
  148. data/lib/fluent/plugin/parser_json.rb +79 -0
  149. data/lib/fluent/plugin/parser_ltsv.rb +50 -0
  150. data/lib/fluent/plugin/parser_multiline.rb +104 -0
  151. data/lib/fluent/plugin/parser_nginx.rb +28 -0
  152. data/lib/fluent/plugin/parser_none.rb +36 -0
  153. data/lib/fluent/plugin/parser_regexp.rb +73 -0
  154. data/lib/fluent/plugin/parser_syslog.rb +82 -0
  155. data/lib/fluent/plugin/parser_tsv.rb +37 -0
  156. data/lib/fluent/plugin/socket_util.rb +22 -0
  157. data/lib/fluent/plugin/storage.rb +84 -0
  158. data/lib/fluent/plugin/storage_local.rb +132 -0
  159. data/lib/fluent/plugin/string_util.rb +22 -0
  160. data/lib/fluent/plugin_helper.rb +42 -0
  161. data/lib/fluent/plugin_helper/child_process.rb +298 -0
  162. data/lib/fluent/plugin_helper/compat_parameters.rb +224 -0
  163. data/lib/fluent/plugin_helper/event_emitter.rb +80 -0
  164. data/lib/fluent/plugin_helper/event_loop.rb +118 -0
  165. data/lib/fluent/plugin_helper/formatter.rb +149 -0
  166. data/lib/fluent/plugin_helper/inject.rb +125 -0
  167. data/lib/fluent/plugin_helper/parser.rb +147 -0
  168. data/lib/fluent/plugin_helper/retry_state.rb +177 -0
  169. data/lib/fluent/plugin_helper/storage.rb +331 -0
  170. data/lib/fluent/plugin_helper/thread.rb +147 -0
  171. data/lib/fluent/plugin_helper/timer.rb +90 -0
  172. data/lib/fluent/plugin_id.rb +63 -0
  173. data/lib/fluent/process.rb +504 -0
  174. data/lib/fluent/registry.rb +99 -0
  175. data/lib/fluent/root_agent.rb +314 -0
  176. data/lib/fluent/rpc.rb +94 -0
  177. data/lib/fluent/supervisor.rb +680 -0
  178. data/lib/fluent/system_config.rb +122 -0
  179. data/lib/fluent/test.rb +56 -0
  180. data/lib/fluent/test/base.rb +85 -0
  181. data/lib/fluent/test/driver/base.rb +179 -0
  182. data/lib/fluent/test/driver/base_owned.rb +70 -0
  183. data/lib/fluent/test/driver/base_owner.rb +125 -0
  184. data/lib/fluent/test/driver/event_feeder.rb +98 -0
  185. data/lib/fluent/test/driver/filter.rb +57 -0
  186. data/lib/fluent/test/driver/formatter.rb +30 -0
  187. data/lib/fluent/test/driver/input.rb +31 -0
  188. data/lib/fluent/test/driver/multi_output.rb +52 -0
  189. data/lib/fluent/test/driver/output.rb +76 -0
  190. data/lib/fluent/test/driver/parser.rb +30 -0
  191. data/lib/fluent/test/driver/test_event_router.rb +45 -0
  192. data/lib/fluent/test/filter_test.rb +77 -0
  193. data/lib/fluent/test/formatter_test.rb +65 -0
  194. data/lib/fluent/test/helpers.rb +79 -0
  195. data/lib/fluent/test/input_test.rb +172 -0
  196. data/lib/fluent/test/log.rb +73 -0
  197. data/lib/fluent/test/output_test.rb +156 -0
  198. data/lib/fluent/test/parser_test.rb +70 -0
  199. data/lib/fluent/time.rb +175 -0
  200. data/lib/fluent/timezone.rb +133 -0
  201. data/lib/fluent/unique_id.rb +39 -0
  202. data/lib/fluent/version.rb +21 -0
  203. data/lib/fluent/winsvc.rb +71 -0
  204. data/test/compat/test_calls_super.rb +166 -0
  205. data/test/compat/test_parser.rb +82 -0
  206. data/test/config/assertions.rb +42 -0
  207. data/test/config/test_config_parser.rb +507 -0
  208. data/test/config/test_configurable.rb +1194 -0
  209. data/test/config/test_configure_proxy.rb +386 -0
  210. data/test/config/test_dsl.rb +415 -0
  211. data/test/config/test_element.rb +403 -0
  212. data/test/config/test_literal_parser.rb +297 -0
  213. data/test/config/test_section.rb +184 -0
  214. data/test/config/test_system_config.rb +120 -0
  215. data/test/config/test_types.rb +171 -0
  216. data/test/helper.rb +119 -0
  217. data/test/plugin/data/2010/01/20100102-030405.log +0 -0
  218. data/test/plugin/data/2010/01/20100102-030406.log +0 -0
  219. data/test/plugin/data/2010/01/20100102.log +0 -0
  220. data/test/plugin/data/log/bar +0 -0
  221. data/test/plugin/data/log/foo/bar.log +0 -0
  222. data/test/plugin/data/log/test.log +0 -0
  223. data/test/plugin/test_bare_output.rb +118 -0
  224. data/test/plugin/test_base.rb +75 -0
  225. data/test/plugin/test_buf_file.rb +571 -0
  226. data/test/plugin/test_buf_memory.rb +42 -0
  227. data/test/plugin/test_buffer.rb +1200 -0
  228. data/test/plugin/test_buffer_chunk.rb +168 -0
  229. data/test/plugin/test_buffer_file_chunk.rb +771 -0
  230. data/test/plugin/test_buffer_memory_chunk.rb +265 -0
  231. data/test/plugin/test_file_util.rb +96 -0
  232. data/test/plugin/test_filter.rb +353 -0
  233. data/test/plugin/test_filter_grep.rb +119 -0
  234. data/test/plugin/test_filter_record_transformer.rb +600 -0
  235. data/test/plugin/test_filter_stdout.rb +211 -0
  236. data/test/plugin/test_formatter_csv.rb +94 -0
  237. data/test/plugin/test_formatter_json.rb +30 -0
  238. data/test/plugin/test_formatter_ltsv.rb +52 -0
  239. data/test/plugin/test_formatter_msgpack.rb +28 -0
  240. data/test/plugin/test_formatter_out_file.rb +95 -0
  241. data/test/plugin/test_formatter_single_value.rb +38 -0
  242. data/test/plugin/test_in_debug_agent.rb +28 -0
  243. data/test/plugin/test_in_dummy.rb +188 -0
  244. data/test/plugin/test_in_exec.rb +133 -0
  245. data/test/plugin/test_in_forward.rb +635 -0
  246. data/test/plugin/test_in_gc_stat.rb +39 -0
  247. data/test/plugin/test_in_http.rb +442 -0
  248. data/test/plugin/test_in_monitor_agent.rb +329 -0
  249. data/test/plugin/test_in_object_space.rb +64 -0
  250. data/test/plugin/test_in_syslog.rb +205 -0
  251. data/test/plugin/test_in_tail.rb +1001 -0
  252. data/test/plugin/test_in_tcp.rb +102 -0
  253. data/test/plugin/test_in_udp.rb +121 -0
  254. data/test/plugin/test_in_unix.rb +126 -0
  255. data/test/plugin/test_input.rb +122 -0
  256. data/test/plugin/test_multi_output.rb +180 -0
  257. data/test/plugin/test_out_buffered_null.rb +79 -0
  258. data/test/plugin/test_out_buffered_stdout.rb +122 -0
  259. data/test/plugin/test_out_copy.rb +160 -0
  260. data/test/plugin/test_out_exec.rb +155 -0
  261. data/test/plugin/test_out_exec_filter.rb +262 -0
  262. data/test/plugin/test_out_file.rb +383 -0
  263. data/test/plugin/test_out_forward.rb +590 -0
  264. data/test/plugin/test_out_null.rb +29 -0
  265. data/test/plugin/test_out_relabel.rb +28 -0
  266. data/test/plugin/test_out_roundrobin.rb +146 -0
  267. data/test/plugin/test_out_stdout.rb +92 -0
  268. data/test/plugin/test_out_stream.rb +93 -0
  269. data/test/plugin/test_output.rb +568 -0
  270. data/test/plugin/test_output_as_buffered.rb +1604 -0
  271. data/test/plugin/test_output_as_buffered_overflow.rb +250 -0
  272. data/test/plugin/test_output_as_buffered_retries.rb +839 -0
  273. data/test/plugin/test_output_as_buffered_secondary.rb +817 -0
  274. data/test/plugin/test_output_as_standard.rb +374 -0
  275. data/test/plugin/test_owned_by.rb +35 -0
  276. data/test/plugin/test_parser_apache.rb +42 -0
  277. data/test/plugin/test_parser_apache2.rb +38 -0
  278. data/test/plugin/test_parser_apache_error.rb +45 -0
  279. data/test/plugin/test_parser_base.rb +32 -0
  280. data/test/plugin/test_parser_csv.rb +104 -0
  281. data/test/plugin/test_parser_json.rb +107 -0
  282. data/test/plugin/test_parser_labeled_tsv.rb +129 -0
  283. data/test/plugin/test_parser_multiline.rb +100 -0
  284. data/test/plugin/test_parser_nginx.rb +48 -0
  285. data/test/plugin/test_parser_none.rb +53 -0
  286. data/test/plugin/test_parser_regexp.rb +277 -0
  287. data/test/plugin/test_parser_syslog.rb +66 -0
  288. data/test/plugin/test_parser_time.rb +46 -0
  289. data/test/plugin/test_parser_tsv.rb +121 -0
  290. data/test/plugin/test_storage.rb +167 -0
  291. data/test/plugin/test_storage_local.rb +8 -0
  292. data/test/plugin/test_string_util.rb +26 -0
  293. data/test/plugin_helper/test_child_process.rb +608 -0
  294. data/test/plugin_helper/test_compat_parameters.rb +242 -0
  295. data/test/plugin_helper/test_event_emitter.rb +51 -0
  296. data/test/plugin_helper/test_event_loop.rb +52 -0
  297. data/test/plugin_helper/test_formatter.rb +252 -0
  298. data/test/plugin_helper/test_inject.rb +487 -0
  299. data/test/plugin_helper/test_parser.rb +263 -0
  300. data/test/plugin_helper/test_retry_state.rb +399 -0
  301. data/test/plugin_helper/test_storage.rb +521 -0
  302. data/test/plugin_helper/test_thread.rb +164 -0
  303. data/test/plugin_helper/test_timer.rb +131 -0
  304. data/test/scripts/exec_script.rb +32 -0
  305. data/test/scripts/fluent/plugin/formatter_known.rb +8 -0
  306. data/test/scripts/fluent/plugin/out_test.rb +81 -0
  307. data/test/scripts/fluent/plugin/out_test2.rb +80 -0
  308. data/test/scripts/fluent/plugin/parser_known.rb +4 -0
  309. data/test/test_config.rb +179 -0
  310. data/test/test_configdsl.rb +148 -0
  311. data/test/test_event.rb +329 -0
  312. data/test/test_event_router.rb +331 -0
  313. data/test/test_event_time.rb +184 -0
  314. data/test/test_filter.rb +121 -0
  315. data/test/test_formatter.rb +319 -0
  316. data/test/test_input.rb +31 -0
  317. data/test/test_log.rb +572 -0
  318. data/test/test_match.rb +137 -0
  319. data/test/test_mixin.rb +351 -0
  320. data/test/test_output.rb +214 -0
  321. data/test/test_plugin_classes.rb +136 -0
  322. data/test/test_plugin_helper.rb +81 -0
  323. data/test/test_process.rb +48 -0
  324. data/test/test_root_agent.rb +278 -0
  325. data/test/test_supervisor.rb +339 -0
  326. data/test/test_time_formatter.rb +186 -0
  327. data/test/test_unique_id.rb +47 -0
  328. metadata +823 -0
@@ -0,0 +1,1604 @@
1
+ require_relative '../helper'
2
+ require 'fluent/plugin/output'
3
+ require 'fluent/plugin/buffer'
4
+ require 'fluent/event'
5
+
6
+ require 'json'
7
+ require 'time'
8
+ require 'timeout'
9
+ require 'timecop'
10
+
11
+ module FluentPluginOutputAsBufferedTest
12
+ class DummyBareOutput < Fluent::Plugin::Output
13
+ def register(name, &block)
14
+ instance_variable_set("@#{name}", block)
15
+ end
16
+ end
17
+ class DummySyncOutput < DummyBareOutput
18
+ def initialize
19
+ super
20
+ @process = nil
21
+ end
22
+ def process(tag, es)
23
+ @process ? @process.call(tag, es) : nil
24
+ end
25
+ end
26
+ class DummyAsyncOutput < DummyBareOutput
27
+ def initialize
28
+ super
29
+ @format = nil
30
+ @write = nil
31
+ end
32
+ def format(tag, time, record)
33
+ @format ? @format.call(tag, time, record) : [tag, time, record].to_json
34
+ end
35
+ def write(chunk)
36
+ @write ? @write.call(chunk) : nil
37
+ end
38
+ end
39
+ class DummyDelayedOutput < DummyBareOutput
40
+ def initialize
41
+ super
42
+ @format = nil
43
+ @try_write = nil
44
+ @shutdown_hook = nil
45
+ end
46
+ def format(tag, time, record)
47
+ @format ? @format.call(tag, time, record) : [tag, time, record].to_json
48
+ end
49
+ def try_write(chunk)
50
+ @try_write ? @try_write.call(chunk) : nil
51
+ end
52
+ def shutdown
53
+ if @shutdown_hook
54
+ @shutdown_hook.call
55
+ end
56
+ super
57
+ end
58
+ end
59
+ class DummyFullFeatureOutput < DummyBareOutput
60
+ def initialize
61
+ super
62
+ @prefer_buffered_processing = nil
63
+ @prefer_delayed_commit = nil
64
+ @process = nil
65
+ @format = nil
66
+ @write = nil
67
+ @try_write = nil
68
+ end
69
+ def prefer_buffered_processing
70
+ @prefer_buffered_processing ? @prefer_buffered_processing.call : false
71
+ end
72
+ def prefer_delayed_commit
73
+ @prefer_delayed_commit ? @prefer_delayed_commit.call : false
74
+ end
75
+ def process(tag, es)
76
+ @process ? @process.call(tag, es) : nil
77
+ end
78
+ def format(tag, time, record)
79
+ @format ? @format.call(tag, time, record) : [tag, time, record].to_json
80
+ end
81
+ def write(chunk)
82
+ @write ? @write.call(chunk) : nil
83
+ end
84
+ def try_write(chunk)
85
+ @try_write ? @try_write.call(chunk) : nil
86
+ end
87
+ end
88
+ end
89
+
90
+ class BufferedOutputTest < Test::Unit::TestCase
91
+ def create_output(type=:full)
92
+ case type
93
+ when :bare then FluentPluginOutputAsBufferedTest::DummyBareOutput.new
94
+ when :sync then FluentPluginOutputAsBufferedTest::DummySyncOutput.new
95
+ when :buffered then FluentPluginOutputAsBufferedTest::DummyAsyncOutput.new
96
+ when :delayed then FluentPluginOutputAsBufferedTest::DummyDelayedOutput.new
97
+ when :full then FluentPluginOutputAsBufferedTest::DummyFullFeatureOutput.new
98
+ else
99
+ raise ArgumentError, "unknown type: #{type}"
100
+ end
101
+ end
102
+ def create_metadata(timekey: nil, tag: nil, variables: nil)
103
+ Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
104
+ end
105
+ def waiting(seconds)
106
+ begin
107
+ Timeout.timeout(seconds) do
108
+ yield
109
+ end
110
+ rescue Timeout::Error
111
+ STDERR.print(*@i.log.out.logs)
112
+ raise
113
+ end
114
+ end
115
+
116
+ teardown do
117
+ if @i
118
+ @i.stop unless @i.stopped?
119
+ @i.before_shutdown unless @i.before_shutdown?
120
+ @i.shutdown unless @i.shutdown?
121
+ @i.after_shutdown unless @i.after_shutdown?
122
+ @i.close unless @i.closed?
123
+ @i.terminate unless @i.terminated?
124
+ end
125
+ Timecop.return
126
+ end
127
+
128
+ sub_test_case 'buffered output configured with many chunk keys' do
129
+ setup do
130
+ @stored_global_logger = $log
131
+ $log = Fluent::Test::TestLogger.new
132
+ @hash = {
133
+ 'flush_mode' => 'interval',
134
+ 'flush_thread_burst_interval' => 0.01,
135
+ 'chunk_limit_size' => 1024,
136
+ 'timekey' => 60,
137
+ }
138
+ @i = create_output(:buffered)
139
+ end
140
+ teardown do
141
+ $log = @stored_global_logger
142
+ end
143
+ test 'nothing are warned with less chunk keys' do
144
+ chunk_keys = 'time,key1,key2,key3'
145
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_keys,@hash)]))
146
+ logs = @i.log.out.logs.dup
147
+ @i.start
148
+ @i.after_start
149
+ assert{ logs.select{|log| log.include?('[warn]') }.size == 0 }
150
+ end
151
+
152
+ test 'a warning reported with 4 chunk keys' do
153
+ chunk_keys = 'key1,key2,key3,key4'
154
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_keys,@hash)]))
155
+ logs = @i.log.out.logs.dup
156
+
157
+ @i.start # this calls `log.reset`... capturing logs about configure must be done before this line
158
+ @i.after_start
159
+ assert_equal ['key1', 'key2', 'key3', 'key4'], @i.chunk_keys
160
+
161
+ assert{ logs.select{|log| log.include?('[warn]: many chunk keys specified, and it may cause too many chunks on your system.') }.size == 1 }
162
+ end
163
+
164
+ test 'a warning reported with 4 chunk keys including "tag"' do
165
+ chunk_keys = 'tag,key1,key2,key3'
166
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_keys,@hash)]))
167
+ logs = @i.log.out.logs.dup
168
+ @i.start # this calls `log.reset`... capturing logs about configure must be done before this line
169
+ @i.after_start
170
+ assert{ logs.select{|log| log.include?('[warn]: many chunk keys specified, and it may cause too many chunks on your system.') }.size == 1 }
171
+ end
172
+
173
+ test 'time key is not included for warned chunk keys' do
174
+ chunk_keys = 'time,key1,key2,key3'
175
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_keys,@hash)]))
176
+ logs = @i.log.out.logs.dup
177
+ @i.start
178
+ @i.after_start
179
+ assert{ logs.select{|log| log.include?('[warn]') }.size == 0 }
180
+ end
181
+ end
182
+
183
+ sub_test_case 'buffered output feature without any buffer key, flush_mode: lazy' do
184
+ setup do
185
+ hash = {
186
+ 'flush_mode' => 'lazy',
187
+ 'flush_thread_burst_interval' => 0.01,
188
+ 'flush_thread_count' => 2,
189
+ 'chunk_limit_size' => 1024,
190
+ }
191
+ @i = create_output(:buffered)
192
+ @i.configure(config_element('ROOT','',{},[config_element('buffer','',hash)]))
193
+ @i.start
194
+ @i.after_start
195
+ end
196
+
197
+ test '#start does not create enqueue thread, but creates flush threads' do
198
+ @i.thread_wait_until_start
199
+
200
+ assert @i.thread_exist?(:flush_thread_0)
201
+ assert @i.thread_exist?(:flush_thread_1)
202
+ assert !@i.thread_exist?(:enqueue_thread)
203
+ end
204
+
205
+ test '#format is called for each events' do
206
+ ary = []
207
+ @i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
208
+
209
+ t = event_time()
210
+ es = Fluent::ArrayEventStream.new([ [t, {"key" => "value1"}], [t, {"key" => "value2"}] ])
211
+
212
+ 4.times do
213
+ @i.emit_events('tag.test', es)
214
+ end
215
+
216
+ assert_equal 8, ary.size
217
+ 4.times do |i|
218
+ assert_equal ["tag.test", t, {"key" => "value1"}], ary[i*2]
219
+ assert_equal ["tag.test", t, {"key" => "value2"}], ary[i*2+1]
220
+ end
221
+ end
222
+
223
+ test '#write is called only when chunk bytes limit exceeded, and buffer chunk is purged' do
224
+ ary = []
225
+ @i.register(:write){|chunk| ary << chunk.read }
226
+
227
+ tag = "test.tag"
228
+ t = event_time()
229
+ r = {}
230
+ (0...10).each do |i|
231
+ r["key#{i}"] = "value #{i}"
232
+ end
233
+ event_size = [tag, t, r].to_json.size # 195
234
+
235
+ (1024 * 0.9 / event_size).to_i.times do |i|
236
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new([ [t, r] ]))
237
+ end
238
+ assert{ @i.buffer.queue.size == 0 && ary.size == 0 }
239
+
240
+ staged_chunk = @i.buffer.stage[@i.buffer.stage.keys.first]
241
+ assert{ staged_chunk.size != 0 }
242
+
243
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new([ [t, r] ]))
244
+
245
+ assert{ @i.buffer.queue.size > 0 || @i.buffer.dequeued.size > 0 || ary.size > 0 }
246
+
247
+ waiting(10) do
248
+ Thread.pass until @i.buffer.queue.size == 0 && @i.buffer.dequeued.size == 0
249
+ Thread.pass until staged_chunk.size == 0
250
+ end
251
+
252
+ assert_equal 1, ary.size
253
+ assert_equal [tag,t,r].to_json * (1024 / event_size), ary.first
254
+ end
255
+
256
+ test 'flush_at_shutdown work well when plugin is shutdown' do
257
+ ary = []
258
+ @i.register(:write){|chunk| ary << chunk.read }
259
+
260
+ tag = "test.tag"
261
+ t = event_time()
262
+ r = {}
263
+ (0...10).each do |i|
264
+ r["key#{i}"] = "value #{i}"
265
+ end
266
+ event_size = [tag, t, r].to_json.size # 195
267
+
268
+ (1024 * 0.9 / event_size).to_i.times do |i|
269
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new([ [t, r] ]))
270
+ end
271
+ assert{ @i.buffer.queue.size == 0 && ary.size == 0 }
272
+
273
+ @i.stop
274
+ @i.before_shutdown
275
+ @i.shutdown
276
+ @i.after_shutdown
277
+
278
+ waiting(10) do
279
+ Thread.pass until ary.size == 1
280
+ end
281
+ assert_equal [tag,t,r].to_json * (1024 * 0.9 / event_size), ary.first
282
+ end
283
+ end
284
+
285
+ sub_test_case 'buffered output feature without any buffer key, flush_mode: interval' do
286
+ setup do
287
+ hash = {
288
+ 'flush_mode' => 'interval',
289
+ 'flush_interval' => 1,
290
+ 'flush_thread_count' => 1,
291
+ 'flush_thread_burst_interval' => 0.01,
292
+ 'chunk_limit_size' => 1024,
293
+ }
294
+ @i = create_output(:buffered)
295
+ @i.configure(config_element('ROOT','',{},[config_element('buffer','',hash)]))
296
+ @i.start
297
+ @i.after_start
298
+ end
299
+
300
+ test '#start creates enqueue thread and flush threads' do
301
+ @i.thread_wait_until_start
302
+
303
+ assert @i.thread_exist?(:flush_thread_0)
304
+ assert @i.thread_exist?(:enqueue_thread)
305
+ end
306
+
307
+ test '#format is called for each event streams' do
308
+ ary = []
309
+ @i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
310
+
311
+ t = event_time()
312
+ es = Fluent::ArrayEventStream.new([ [t, {"key" => "value1"}], [t, {"key" => "value2"}] ])
313
+
314
+ 4.times do
315
+ @i.emit_events('tag.test', es)
316
+ end
317
+
318
+ assert_equal 8, ary.size
319
+ 4.times do |i|
320
+ assert_equal ["tag.test", t, {"key" => "value1"}], ary[i*2]
321
+ assert_equal ["tag.test", t, {"key" => "value2"}], ary[i*2+1]
322
+ end
323
+ end
324
+
325
+ test '#write is called per flush_interval, and buffer chunk is purged' do
326
+ @i.thread_wait_until_start
327
+
328
+ ary = []
329
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
330
+ @i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| ary << data } }
331
+
332
+ t = event_time()
333
+ r = {}
334
+ (0...10).each do |i|
335
+ r["key#{i}"] = "value #{i}"
336
+ end
337
+
338
+ 3.times do |i|
339
+ rand_records = rand(1..4)
340
+ es = Fluent::ArrayEventStream.new([ [t, r] ] * rand_records)
341
+ assert_equal rand_records, es.size
342
+
343
+ @i.interrupt_flushes
344
+
345
+ assert{ @i.buffer.queue.size == 0 }
346
+
347
+ @i.emit_events("test.tag", es)
348
+
349
+ assert{ @i.buffer.queue.size == 0 }
350
+ assert{ @i.buffer.stage.size == 1 }
351
+
352
+ staged_chunk = @i.instance_eval{ @buffer.stage[@buffer.stage.keys.first] }
353
+ assert{ staged_chunk.size != 0 }
354
+
355
+ @i.enqueue_thread_wait
356
+
357
+ waiting(10) do
358
+ Thread.pass until @i.buffer.queue.size == 0 && @i.buffer.dequeued.size == 0
359
+ Thread.pass until staged_chunk.size == 0
360
+ end
361
+
362
+ assert_equal rand_records, ary.size
363
+ ary.reject!{|e| true }
364
+ end
365
+ end
366
+
367
+ test 'flush_at_shutdown work well when plugin is shutdown' do
368
+ ary = []
369
+ @i.register(:write){|chunk| ary << chunk.read }
370
+
371
+ tag = "test.tag"
372
+ t = event_time()
373
+ r = {}
374
+ (0...10).each do |i|
375
+ r["key#{i}"] = "value #{i}"
376
+ end
377
+ event_size = [tag, t, r].to_json.size # 195
378
+
379
+ (1024 * 0.9 / event_size).to_i.times do |i|
380
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new([ [t, r] ]))
381
+ end
382
+ assert{ @i.buffer.queue.size == 0 && ary.size == 0 }
383
+
384
+ @i.stop
385
+ @i.before_shutdown
386
+ @i.shutdown
387
+ @i.after_shutdown
388
+
389
+ waiting(10) do
390
+ Thread.pass until ary.size == 1
391
+ end
392
+ assert_equal [tag,t,r].to_json * (1024 * 0.9 / event_size), ary.first
393
+ end
394
+ end
395
+
396
+ sub_test_case 'buffered output feature without any buffer key, flush_mode: immediate' do
397
+ setup do
398
+ hash = {
399
+ 'flush_mode' => 'immediate',
400
+ 'flush_thread_count' => 1,
401
+ 'flush_thread_burst_interval' => 0.01,
402
+ 'chunk_limit_size' => 1024,
403
+ }
404
+ @i = create_output(:buffered)
405
+ @i.configure(config_element('ROOT','',{},[config_element('buffer','',hash)]))
406
+ @i.start
407
+ @i.after_start
408
+ end
409
+
410
+ test '#start does not create enqueue thread, but creates flush threads' do
411
+ @i.thread_wait_until_start
412
+
413
+ assert @i.thread_exist?(:flush_thread_0)
414
+ assert !@i.thread_exist?(:enqueue_thread)
415
+ end
416
+
417
+ test '#format is called for each event streams' do
418
+ ary = []
419
+ @i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
420
+
421
+ t = event_time()
422
+ es = Fluent::ArrayEventStream.new([ [t, {"key" => "value1"}], [t, {"key" => "value2"}] ])
423
+
424
+ 4.times do
425
+ @i.emit_events('tag.test', es)
426
+ end
427
+
428
+ assert_equal 8, ary.size
429
+ 4.times do |i|
430
+ assert_equal ["tag.test", t, {"key" => "value1"}], ary[i*2]
431
+ assert_equal ["tag.test", t, {"key" => "value2"}], ary[i*2+1]
432
+ end
433
+ end
434
+
435
+ test '#write is called every time for each emits, and buffer chunk is purged' do
436
+ @i.thread_wait_until_start
437
+
438
+ ary = []
439
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
440
+ @i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| ary << data } }
441
+
442
+ t = event_time()
443
+ r = {}
444
+ (0...10).each do |i|
445
+ r["key#{i}"] = "value #{i}"
446
+ end
447
+
448
+ 3.times do |i|
449
+ rand_records = rand(1..5)
450
+ es = Fluent::ArrayEventStream.new([ [t, r] ] * rand_records)
451
+ assert_equal rand_records, es.size
452
+ @i.emit_events("test.tag", es)
453
+
454
+ assert{ @i.buffer.stage.size == 0 && (@i.buffer.queue.size == 1 || @i.buffer.dequeued.size == 1 || ary.size > 0) }
455
+
456
+ waiting(10) do
457
+ Thread.pass until @i.buffer.queue.size == 0 && @i.buffer.dequeued.size == 0
458
+ end
459
+
460
+ assert_equal rand_records, ary.size
461
+ ary.reject!{|e| true }
462
+ end
463
+ end
464
+
465
+ test 'flush_at_shutdown work well when plugin is shutdown' do
466
+ ary = []
467
+ @i.register(:write){|chunk| ary << chunk.read }
468
+
469
+ tag = "test.tag"
470
+ t = event_time()
471
+ r = {}
472
+ (0...10).each do |i|
473
+ r["key#{i}"] = "value #{i}"
474
+ end
475
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new([ [t, r] ]))
476
+
477
+ @i.stop
478
+ @i.before_shutdown
479
+ @i.shutdown
480
+ @i.after_shutdown
481
+
482
+ waiting(10) do
483
+ Thread.pass until ary.size == 1
484
+ end
485
+ assert_equal [tag,t,r].to_json, ary.first
486
+ end
487
+ end
488
+
489
+ sub_test_case 'buffered output feature with timekey and range' do
490
+ setup do
491
+ chunk_key = 'time'
492
+ hash = {
493
+ 'timekey' => 30, # per 30seconds
494
+ 'timekey_wait' => 5, # 5 second delay for flush
495
+ 'flush_thread_count' => 1,
496
+ 'flush_thread_burst_interval' => 0.01,
497
+ }
498
+ @i = create_output(:buffered)
499
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
500
+ @i.start
501
+ @i.after_start
502
+ end
503
+
504
+ test '#configure raises config error if timekey is not specified' do
505
+ i = create_output(:buffered)
506
+ assert_raise Fluent::ConfigError do
507
+ i.configure(config_element('ROOT','',{},[config_element('buffer','time',)]))
508
+ end
509
+ end
510
+
511
+ test 'default flush_mode is set to :lazy' do
512
+ assert_equal :lazy, @i.instance_eval{ @flush_mode }
513
+ end
514
+
515
+ test '#start creates enqueue thread and flush threads' do
516
+ @i.thread_wait_until_start
517
+
518
+ assert @i.thread_exist?(:flush_thread_0)
519
+ assert @i.thread_exist?(:enqueue_thread)
520
+ end
521
+
522
+ test '#format is called for each event streams' do
523
+ ary = []
524
+ @i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
525
+
526
+ t = event_time()
527
+ es = Fluent::ArrayEventStream.new([ [t, {"key" => "value1"}], [t, {"key" => "value2"}] ])
528
+
529
+ 5.times do
530
+ @i.emit_events('tag.test', es)
531
+ end
532
+
533
+ assert_equal 10, ary.size
534
+ 5.times do |i|
535
+ assert_equal ["tag.test", t, {"key" => "value1"}], ary[i*2]
536
+ assert_equal ["tag.test", t, {"key" => "value2"}], ary[i*2+1]
537
+ end
538
+ end
539
+
540
+ test '#write is called per time ranges after timekey_wait, and buffer chunk is purged' do
541
+ Timecop.freeze( Time.parse('2016-04-13 14:04:00 +0900') )
542
+
543
+ @i.thread_wait_until_start
544
+
545
+ ary = []
546
+ metachecks = []
547
+
548
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
549
+ @i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| e = JSON.parse(data); ary << e; metachecks << (chunk.metadata.timekey.to_i <= e[1].to_i && e[1].to_i < chunk.metadata.timekey.to_i + 30) } }
550
+
551
+ r = {}
552
+ (0...10).each do |i|
553
+ r["key#{i}"] = "value #{i}"
554
+ end
555
+ ts = [
556
+ Fluent::EventTime.parse('2016-04-13 14:03:21 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:23 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:29 +0900'),
557
+ Fluent::EventTime.parse('2016-04-13 14:03:30 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:33 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:38 +0900'),
558
+ Fluent::EventTime.parse('2016-04-13 14:03:43 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:49 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:51 +0900'),
559
+ Fluent::EventTime.parse('2016-04-13 14:04:00 +0900'), Fluent::EventTime.parse('2016-04-13 14:04:01 +0900'),
560
+ ]
561
+ events = [
562
+ ["test.tag.1", ts[0], r], # range 14:03:00 - 03:29
563
+ ["test.tag.2", ts[1], r],
564
+ ["test.tag.1", ts[2], r],
565
+ ["test.tag.1", ts[3], r], # range 14:03:30 - 04:00
566
+ ["test.tag.1", ts[4], r],
567
+ ["test.tag.1", ts[5], r],
568
+ ["test.tag.1", ts[6], r],
569
+ ["test.tag.1", ts[7], r],
570
+ ["test.tag.2", ts[8], r],
571
+ ["test.tag.1", ts[9], r], # range 14:04:00 - 04:29
572
+ ["test.tag.2", ts[10], r],
573
+ ]
574
+
575
+ assert_equal 0, @i.write_count
576
+
577
+ @i.interrupt_flushes
578
+
579
+ events.shuffle.each do |tag, time, record|
580
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
581
+ end
582
+ assert{ @i.buffer.stage.size == 3 }
583
+ assert{ @i.write_count == 0 }
584
+
585
+ @i.enqueue_thread_wait
586
+
587
+ waiting(4) do
588
+ Thread.pass until @i.write_count > 0
589
+ end
590
+
591
+ assert{ @i.buffer.stage.size == 2 && @i.write_count == 1 }
592
+
593
+ assert_equal 3, ary.size
594
+ assert_equal 2, ary.select{|e| e[0] == "test.tag.1" }.size
595
+ assert_equal 1, ary.select{|e| e[0] == "test.tag.2" }.size
596
+
597
+ Timecop.freeze( Time.parse('2016-04-13 14:04:04 +0900') )
598
+
599
+ @i.enqueue_thread_wait
600
+
601
+ assert{ @i.buffer.stage.size == 2 && @i.write_count == 1 }
602
+
603
+ Timecop.freeze( Time.parse('2016-04-13 14:04:06 +0900') )
604
+
605
+ @i.enqueue_thread_wait
606
+ waiting(4) do
607
+ Thread.pass until @i.write_count > 1
608
+ end
609
+
610
+ assert{ @i.buffer.stage.size == 1 && @i.write_count == 2 }
611
+
612
+ assert_equal 9, ary.size
613
+ assert_equal 7, ary.select{|e| e[0] == "test.tag.1" }.size
614
+ assert_equal 2, ary.select{|e| e[0] == "test.tag.2" }.size
615
+
616
+ assert metachecks.all?{|e| e }
617
+ end
618
+
619
+ test 'flush_at_shutdown work well when plugin is shutdown' do
620
+ Timecop.freeze( Time.parse('2016-04-13 14:04:00 +0900') )
621
+
622
+ @i.thread_wait_until_start
623
+
624
+ ary = []
625
+ metachecks = []
626
+
627
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
628
+ @i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| e = JSON.parse(data); ary << e; metachecks << (chunk.metadata.timekey.to_i <= e[1].to_i && e[1].to_i < chunk.metadata.timekey.to_i + 30) } }
629
+
630
+ r = {}
631
+ (0...10).each do |i|
632
+ r["key#{i}"] = "value #{i}"
633
+ end
634
+ ts = [
635
+ Fluent::EventTime.parse('2016-04-13 14:03:21 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:23 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:29 +0900'),
636
+ Fluent::EventTime.parse('2016-04-13 14:03:30 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:33 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:38 +0900'),
637
+ Fluent::EventTime.parse('2016-04-13 14:03:43 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:49 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:51 +0900'),
638
+ Fluent::EventTime.parse('2016-04-13 14:04:00 +0900'), Fluent::EventTime.parse('2016-04-13 14:04:01 +0900'),
639
+ ]
640
+ events = [
641
+ ["test.tag.1", ts[0], r], # range 14:03:00 - 03:29
642
+ ["test.tag.2", ts[1], r],
643
+ ["test.tag.1", ts[2], r],
644
+ ["test.tag.1", ts[3], r], # range 14:03:30 - 04:00
645
+ ["test.tag.1", ts[4], r],
646
+ ["test.tag.1", ts[5], r],
647
+ ["test.tag.1", ts[6], r],
648
+ ["test.tag.1", ts[7], r],
649
+ ["test.tag.2", ts[8], r],
650
+ ["test.tag.1", ts[9], r], # range 14:04:00 - 04:29
651
+ ["test.tag.2", ts[10], r],
652
+ ]
653
+
654
+ assert_equal 0, @i.write_count
655
+
656
+ @i.interrupt_flushes
657
+
658
+ events.shuffle.each do |tag, time, record|
659
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
660
+ end
661
+ assert{ @i.buffer.stage.size == 3 }
662
+ assert{ @i.write_count == 0 }
663
+
664
+ @i.enqueue_thread_wait
665
+
666
+ waiting(4) do
667
+ Thread.pass until @i.write_count > 0
668
+ end
669
+
670
+ assert{ @i.buffer.stage.size == 2 && @i.write_count == 1 }
671
+
672
+ Timecop.freeze( Time.parse('2016-04-13 14:04:04 +0900') )
673
+
674
+ @i.enqueue_thread_wait
675
+
676
+ assert{ @i.buffer.stage.size == 2 && @i.write_count == 1 }
677
+
678
+ Timecop.freeze( Time.parse('2016-04-13 14:04:06 +0900') )
679
+
680
+ @i.enqueue_thread_wait
681
+ waiting(4) do
682
+ Thread.pass until @i.write_count > 1
683
+ end
684
+
685
+ assert{ @i.buffer.stage.size == 1 && @i.write_count == 2 }
686
+
687
+ Timecop.freeze( Time.parse('2016-04-13 14:04:13 +0900') )
688
+
689
+ assert_equal 9, ary.size
690
+
691
+ @i.stop
692
+ @i.before_shutdown
693
+ @i.shutdown
694
+ @i.after_shutdown
695
+
696
+ waiting(4) do
697
+ Thread.pass until @i.write_count > 2
698
+ end
699
+
700
+ assert_equal 11, ary.size
701
+ assert metachecks.all?{|e| e }
702
+ end
703
+ end
704
+
705
+ sub_test_case 'buffered output feature with tag key' do
706
+ setup do
707
+ chunk_key = 'tag'
708
+ hash = {
709
+ 'flush_interval' => 10,
710
+ 'flush_thread_count' => 1,
711
+ 'flush_thread_burst_interval' => 0.1,
712
+ 'chunk_limit_size' => 1024,
713
+ }
714
+ @i = create_output(:buffered)
715
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
716
+ @i.start
717
+ @i.after_start
718
+ end
719
+
720
+ test 'default flush_mode is set to :interval' do
721
+ assert_equal :interval, @i.instance_eval{ @flush_mode }
722
+ end
723
+
724
+ test '#start creates enqueue thread and flush threads' do
725
+ @i.thread_wait_until_start
726
+
727
+ assert @i.thread_exist?(:flush_thread_0)
728
+ assert @i.thread_exist?(:enqueue_thread)
729
+ end
730
+
731
+ test '#format is called for each event streams' do
732
+ ary = []
733
+ @i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
734
+
735
+ t = event_time()
736
+ es = Fluent::ArrayEventStream.new([ [t, {"key" => "value1"}], [t, {"key" => "value2"}] ])
737
+
738
+ 5.times do
739
+ @i.emit_events('tag.test', es)
740
+ end
741
+
742
+ assert_equal 10, ary.size
743
+ 5.times do |i|
744
+ assert_equal ["tag.test", t, {"key" => "value1"}], ary[i*2]
745
+ assert_equal ["tag.test", t, {"key" => "value2"}], ary[i*2+1]
746
+ end
747
+ end
748
+
749
+ test '#write is called per tags, per flush_interval & chunk sizes, and buffer chunk is purged' do
750
+ Timecop.freeze( Time.parse('2016-04-13 14:04:01 +0900') )
751
+
752
+ ary = []
753
+ metachecks = []
754
+
755
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
756
+ @i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| e = JSON.parse(data); ary << e; metachecks << (chunk.metadata.tag == e[0]) } }
757
+
758
+ @i.thread_wait_until_start
759
+
760
+ r = {}
761
+ (0...10).each do |i|
762
+ r["key#{i}"] = "value #{i}"
763
+ end
764
+ ts = [
765
+ event_time('2016-04-13 14:03:21 +0900'), event_time('2016-04-13 14:03:23 +0900'), event_time('2016-04-13 14:03:29 +0900'),
766
+ event_time('2016-04-13 14:03:30 +0900'), event_time('2016-04-13 14:03:33 +0900'), event_time('2016-04-13 14:03:38 +0900'),
767
+ event_time('2016-04-13 14:03:43 +0900'), event_time('2016-04-13 14:03:49 +0900'), event_time('2016-04-13 14:03:51 +0900'),
768
+ event_time('2016-04-13 14:04:00 +0900'), event_time('2016-04-13 14:04:01 +0900'),
769
+ ]
770
+ # size of a event is 197
771
+ events = [
772
+ ["test.tag.1", ts[0], r],
773
+ ["test.tag.2", ts[1], r],
774
+ ["test.tag.1", ts[2], r],
775
+ ["test.tag.1", ts[3], r],
776
+ ["test.tag.1", ts[4], r],
777
+ ["test.tag.1", ts[5], r],
778
+ ["test.tag.1", ts[6], r],
779
+ ["test.tag.1", ts[7], r],
780
+ ["test.tag.2", ts[8], r],
781
+ ["test.tag.1", ts[9], r],
782
+ ["test.tag.2", ts[10], r],
783
+ ]
784
+
785
+ assert_equal 0, @i.write_count
786
+
787
+ @i.interrupt_flushes
788
+
789
+ events.shuffle.each do |tag, time, record|
790
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
791
+ end
792
+ assert{ @i.buffer.stage.size == 2 } # test.tag.1 x1, test.tag.2 x1
793
+
794
+ Timecop.freeze( Time.parse('2016-04-13 14:04:02 +0900') )
795
+
796
+ @i.enqueue_thread_wait
797
+ @i.flush_thread_wakeup
798
+
799
+ waiting(4) do
800
+ Thread.pass until @i.write_count > 0
801
+ end
802
+
803
+ assert{ @i.buffer.stage.size == 2 }
804
+ assert{ @i.write_count == 1 }
805
+ assert{ @i.buffer.queue.size == 0 }
806
+
807
+ # events fulfills a chunk (and queued immediately)
808
+ assert_equal 5, ary.size
809
+ assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
810
+ assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
811
+
812
+ Timecop.freeze( Time.parse('2016-04-13 14:04:09 +0900') )
813
+
814
+ @i.enqueue_thread_wait
815
+
816
+ assert{ @i.buffer.stage.size == 2 }
817
+
818
+ # to trigger try_flush with flush_thread_burst_interval
819
+ Timecop.freeze( Time.parse('2016-04-13 14:04:11 +0900') )
820
+ @i.enqueue_thread_wait
821
+ Timecop.freeze( Time.parse('2016-04-13 14:04:15 +0900') )
822
+ @i.enqueue_thread_wait
823
+ @i.flush_thread_wakeup
824
+
825
+ assert{ @i.buffer.stage.size == 0 }
826
+
827
+ waiting(4) do
828
+ Thread.pass until @i.write_count > 2
829
+ end
830
+
831
+ assert{ @i.buffer.stage.size == 0 && @i.write_count == 3 }
832
+
833
+ assert_equal 11, ary.size
834
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
835
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
836
+
837
+ assert metachecks.all?{|e| e }
838
+ end
839
+
840
+ test 'flush_at_shutdown work well when plugin is shutdown' do
841
+ Timecop.freeze( Time.parse('2016-04-13 14:04:01 +0900') )
842
+
843
+ ary = []
844
+ metachecks = []
845
+
846
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
847
+ @i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| e = JSON.parse(data); ary << e; metachecks << (chunk.metadata.tag == e[0]) } }
848
+
849
+ @i.thread_wait_until_start
850
+
851
+ r = {}
852
+ (0...10).each do |i|
853
+ r["key#{i}"] = "value #{i}"
854
+ end
855
+ ts = [
856
+ event_time('2016-04-13 14:03:21 +0900'), event_time('2016-04-13 14:03:23 +0900'), event_time('2016-04-13 14:03:29 +0900'),
857
+ event_time('2016-04-13 14:03:30 +0900'), event_time('2016-04-13 14:03:33 +0900'), event_time('2016-04-13 14:03:38 +0900'),
858
+ event_time('2016-04-13 14:03:43 +0900'), event_time('2016-04-13 14:03:49 +0900'), event_time('2016-04-13 14:03:51 +0900'),
859
+ event_time('2016-04-13 14:04:00 +0900'), event_time('2016-04-13 14:04:01 +0900'),
860
+ ]
861
+ # size of a event is 197
862
+ events = [
863
+ ["test.tag.1", ts[0], r],
864
+ ["test.tag.2", ts[1], r],
865
+ ["test.tag.1", ts[2], r],
866
+ ["test.tag.1", ts[3], r],
867
+ ["test.tag.1", ts[4], r],
868
+ ["test.tag.1", ts[5], r],
869
+ ["test.tag.1", ts[6], r],
870
+ ["test.tag.1", ts[7], r],
871
+ ["test.tag.2", ts[8], r],
872
+ ["test.tag.1", ts[9], r],
873
+ ["test.tag.2", ts[10], r],
874
+ ]
875
+
876
+ assert_equal 0, @i.write_count
877
+
878
+ @i.interrupt_flushes
879
+
880
+ events.shuffle.each do |tag, time, record|
881
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
882
+ end
883
+ assert{ @i.buffer.stage.size == 2 } # test.tag.1 x1, test.tag.2 x1
884
+
885
+ Timecop.freeze( Time.parse('2016-04-13 14:04:02 +0900') )
886
+
887
+ @i.enqueue_thread_wait
888
+ @i.flush_thread_wakeup
889
+
890
+ waiting(4) do
891
+ Thread.pass until @i.write_count > 0
892
+ end
893
+
894
+ assert{ @i.buffer.stage.size == 2 }
895
+ assert{ @i.write_count == 1 }
896
+ assert{ @i.buffer.queue.size == 0 }
897
+
898
+ # events fulfills a chunk (and queued immediately)
899
+ assert_equal 5, ary.size
900
+ assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
901
+ assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
902
+
903
+ @i.stop
904
+ @i.before_shutdown
905
+ @i.shutdown
906
+ @i.after_shutdown
907
+
908
+ waiting(4) do
909
+ Thread.pass until @i.write_count > 1
910
+ end
911
+
912
+ assert{ @i.buffer.stage.size == 0 && @i.buffer.queue.size == 0 && @i.write_count == 3 }
913
+
914
+ assert_equal 11, ary.size
915
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
916
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
917
+
918
+ assert metachecks.all?{|e| e }
919
+ end
920
+ end
921
+
922
+ sub_test_case 'buffered output feature with variables' do
923
+ setup do
924
+ chunk_key = 'name,service'
925
+ hash = {
926
+ 'flush_interval' => 10,
927
+ 'flush_thread_count' => 1,
928
+ 'flush_thread_burst_interval' => 0.1,
929
+ 'chunk_limit_size' => 1024,
930
+ }
931
+ @i = create_output(:buffered)
932
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
933
+ @i.start
934
+ @i.after_start
935
+ end
936
+
937
+ test 'default flush_mode is set to :interval' do
938
+ assert_equal :interval, @i.instance_eval{ @flush_mode }
939
+ end
940
+
941
+ test '#start creates enqueue thread and flush threads' do
942
+ @i.thread_wait_until_start
943
+
944
+ assert @i.thread_exist?(:flush_thread_0)
945
+ assert @i.thread_exist?(:enqueue_thread)
946
+ end
947
+
948
+ test '#format is called for each event streams' do
949
+ ary = []
950
+ @i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
951
+
952
+ t = event_time()
953
+ es = Fluent::ArrayEventStream.new([
954
+ [t, {"key" => "value1", "name" => "moris", "service" => "a"}],
955
+ [t, {"key" => "value2", "name" => "moris", "service" => "b"}],
956
+ ])
957
+
958
+ 5.times do
959
+ @i.emit_events('tag.test', es)
960
+ end
961
+
962
+ assert_equal 10, ary.size
963
+ 5.times do |i|
964
+ assert_equal ["tag.test", t, {"key" => "value1", "name" => "moris", "service" => "a"}], ary[i*2]
965
+ assert_equal ["tag.test", t, {"key" => "value2", "name" => "moris", "service" => "b"}], ary[i*2+1]
966
+ end
967
+ end
968
+
969
+ test '#write is called per value combination of variables, per flush_interval & chunk sizes, and buffer chunk is purged' do
970
+ Timecop.freeze( Time.parse('2016-04-13 14:04:01 +0900') )
971
+
972
+ ary = []
973
+ metachecks = []
974
+
975
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
976
+ @i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| e = JSON.parse(data); ary << e; metachecks << (e[2]["name"] == chunk.metadata.variables[:name] && e[2]["service"] == chunk.metadata.variables[:service]) } }
977
+
978
+ @i.thread_wait_until_start
979
+
980
+ # size of a event is 195
981
+ dummy_data = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
982
+ events = [
983
+ ["test.tag.1", event_time('2016-04-13 14:03:21 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1) xxx-a (6 events)
984
+ ["test.tag.2", event_time('2016-04-13 14:03:23 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}], #(2) yyy-a (3 events)
985
+ ["test.tag.1", event_time('2016-04-13 14:03:29 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
986
+ ["test.tag.1", event_time('2016-04-13 14:03:30 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
987
+ ["test.tag.1", event_time('2016-04-13 14:03:33 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
988
+ ["test.tag.1", event_time('2016-04-13 14:03:38 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}], #(3) xxx-b (2 events)
989
+ ["test.tag.1", event_time('2016-04-13 14:03:43 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
990
+ ["test.tag.1", event_time('2016-04-13 14:03:49 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}], #(3)
991
+ ["test.tag.2", event_time('2016-04-13 14:03:51 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}], #(2)
992
+ ["test.tag.1", event_time('2016-04-13 14:04:00 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
993
+ ["test.tag.2", event_time('2016-04-13 14:04:01 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}], #(2)
994
+ ]
995
+
996
+ assert_equal 0, @i.write_count
997
+
998
+ @i.interrupt_flushes
999
+
1000
+ events.shuffle.each do |tag, time, record|
1001
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
1002
+ end
1003
+ assert{ @i.buffer.stage.size == 3 }
1004
+
1005
+ Timecop.freeze( Time.parse('2016-04-13 14:04:02 +0900') )
1006
+
1007
+ @i.enqueue_thread_wait
1008
+ @i.flush_thread_wakeup
1009
+
1010
+ waiting(4) do
1011
+ Thread.pass until @i.write_count > 0
1012
+ end
1013
+
1014
+ assert{ @i.buffer.stage.size == 3 }
1015
+ assert{ @i.write_count == 1 }
1016
+ assert{ @i.buffer.queue.size == 0 }
1017
+
1018
+ # events fulfills a chunk (and queued immediately)
1019
+ assert_equal 5, ary.size
1020
+ assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
1021
+ assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
1022
+ assert ary[0...5].all?{|e| e[2]["name"] == "xxx" && e[2]["service"] == "a" }
1023
+
1024
+ Timecop.freeze( Time.parse('2016-04-13 14:04:09 +0900') )
1025
+
1026
+ @i.enqueue_thread_wait
1027
+
1028
+ assert{ @i.buffer.stage.size == 3 }
1029
+
1030
+ # to trigger try_flush with flush_thread_burst_interval
1031
+ Timecop.freeze( Time.parse('2016-04-13 14:04:11 +0900') )
1032
+ @i.enqueue_thread_wait
1033
+ Timecop.freeze( Time.parse('2016-04-13 14:04:12 +0900') )
1034
+ @i.enqueue_thread_wait
1035
+ Timecop.freeze( Time.parse('2016-04-13 14:04:13 +0900') )
1036
+ @i.enqueue_thread_wait
1037
+ Timecop.freeze( Time.parse('2016-04-13 14:04:14 +0900') )
1038
+ @i.enqueue_thread_wait
1039
+ @i.flush_thread_wakeup
1040
+
1041
+ assert{ @i.buffer.stage.size == 0 }
1042
+
1043
+ waiting(4) do
1044
+ Thread.pass until @i.write_count > 1
1045
+ end
1046
+
1047
+ assert{ @i.buffer.stage.size == 0 && @i.write_count == 4 }
1048
+
1049
+ assert_equal 11, ary.size
1050
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1051
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1052
+ assert_equal 6, ary.select{|e| e[2]["name"] == "xxx" && e[2]["service"] == "a" }.size
1053
+ assert_equal 3, ary.select{|e| e[2]["name"] == "yyy" && e[2]["service"] == "a" }.size
1054
+ assert_equal 2, ary.select{|e| e[2]["name"] == "xxx" && e[2]["service"] == "b" }.size
1055
+
1056
+ assert metachecks.all?{|e| e }
1057
+ end
1058
+
1059
+ test 'flush_at_shutdown work well when plugin is shutdown' do
1060
+ Timecop.freeze( Time.parse('2016-04-13 14:04:01 +0900') )
1061
+
1062
+ ary = []
1063
+ metachecks = []
1064
+
1065
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
1066
+ @i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| e = JSON.parse(data); ary << e; metachecks << (e[2]["name"] == chunk.metadata.variables[:name] && e[2]["service"] == chunk.metadata.variables[:service]) } }
1067
+
1068
+ @i.thread_wait_until_start
1069
+
1070
+ # size of a event is 195
1071
+ dummy_data = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
1072
+ events = [
1073
+ ["test.tag.1", event_time('2016-04-13 14:03:21 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1) xxx-a (6 events)
1074
+ ["test.tag.2", event_time('2016-04-13 14:03:23 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}], #(2) yyy-a (3 events)
1075
+ ["test.tag.1", event_time('2016-04-13 14:03:29 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
1076
+ ["test.tag.1", event_time('2016-04-13 14:03:30 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
1077
+ ["test.tag.1", event_time('2016-04-13 14:03:33 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
1078
+ ["test.tag.1", event_time('2016-04-13 14:03:38 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}], #(3) xxx-b (2 events)
1079
+ ["test.tag.1", event_time('2016-04-13 14:03:43 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
1080
+ ["test.tag.1", event_time('2016-04-13 14:03:49 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}], #(3)
1081
+ ["test.tag.2", event_time('2016-04-13 14:03:51 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}], #(2)
1082
+ ["test.tag.1", event_time('2016-04-13 14:04:00 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
1083
+ ["test.tag.2", event_time('2016-04-13 14:04:01 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}], #(2)
1084
+ ]
1085
+
1086
+ assert_equal 0, @i.write_count
1087
+
1088
+ @i.interrupt_flushes
1089
+
1090
+ events.shuffle.each do |tag, time, record|
1091
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
1092
+ end
1093
+ assert{ @i.buffer.stage.size == 3 }
1094
+
1095
+ Timecop.freeze( Time.parse('2016-04-13 14:04:02 +0900') )
1096
+
1097
+ @i.enqueue_thread_wait
1098
+ @i.flush_thread_wakeup
1099
+
1100
+ waiting(4) do
1101
+ Thread.pass until @i.write_count > 0
1102
+ end
1103
+
1104
+ assert{ @i.buffer.stage.size == 3 }
1105
+ assert{ @i.write_count == 1 }
1106
+ assert{ @i.buffer.queue.size == 0 }
1107
+
1108
+ # events fulfills a chunk (and queued immediately)
1109
+ assert_equal 5, ary.size
1110
+ assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
1111
+ assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
1112
+
1113
+ @i.stop
1114
+ @i.before_shutdown
1115
+ @i.shutdown
1116
+ @i.after_shutdown
1117
+
1118
+ waiting(4) do
1119
+ Thread.pass until @i.write_count > 1
1120
+ end
1121
+
1122
+ assert{ @i.buffer.stage.size == 0 && @i.buffer.queue.size == 0 && @i.write_count == 4 }
1123
+
1124
+ assert_equal 11, ary.size
1125
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1126
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1127
+ assert_equal 6, ary.select{|e| e[2]["name"] == "xxx" && e[2]["service"] == "a" }.size
1128
+ assert_equal 3, ary.select{|e| e[2]["name"] == "yyy" && e[2]["service"] == "a" }.size
1129
+ assert_equal 2, ary.select{|e| e[2]["name"] == "xxx" && e[2]["service"] == "b" }.size
1130
+
1131
+ assert metachecks.all?{|e| e }
1132
+ end
1133
+ end
1134
+
1135
+ sub_test_case 'buffered output feature with many keys' do
1136
+ test 'default flush mode is set to :interval if keys does not include time' do
1137
+ chunk_key = 'name,service,tag'
1138
+ hash = {
1139
+ 'flush_interval' => 10,
1140
+ 'flush_thread_count' => 1,
1141
+ 'flush_thread_burst_interval' => 0.1,
1142
+ 'chunk_limit_size' => 1024,
1143
+ }
1144
+ @i = create_output(:buffered)
1145
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
1146
+ @i.start
1147
+ @i.after_start
1148
+
1149
+ assert_equal :interval, @i.instance_eval{ @flush_mode }
1150
+ end
1151
+
1152
+ test 'default flush mode is set to :lazy if keys includes time' do
1153
+ chunk_key = 'name,service,tag,time'
1154
+ hash = {
1155
+ 'timekey' => 60,
1156
+ 'flush_interval' => 10,
1157
+ 'flush_thread_count' => 1,
1158
+ 'flush_thread_burst_interval' => 0.1,
1159
+ 'chunk_limit_size' => 1024,
1160
+ }
1161
+ @i = create_output(:buffered)
1162
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
1163
+ @i.start
1164
+ @i.after_start
1165
+
1166
+ assert_equal :lazy, @i.instance_eval{ @flush_mode }
1167
+ end
1168
+ end
1169
+
1170
+ sub_test_case 'buffered output feature with delayed commit' do
1171
+ setup do
1172
+ chunk_key = 'tag'
1173
+ hash = {
1174
+ 'flush_interval' => 10,
1175
+ 'flush_thread_count' => 1,
1176
+ 'flush_thread_burst_interval' => 0.1,
1177
+ 'delayed_commit_timeout' => 30,
1178
+ 'chunk_limit_size' => 1024,
1179
+ }
1180
+ @i = create_output(:delayed)
1181
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
1182
+ @i.start
1183
+ @i.after_start
1184
+ end
1185
+
1186
+ test '#format is called for each event streams' do
1187
+ ary = []
1188
+ @i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
1189
+
1190
+ t = event_time()
1191
+ es = Fluent::ArrayEventStream.new([
1192
+ [t, {"key" => "value1", "name" => "moris", "service" => "a"}],
1193
+ [t, {"key" => "value2", "name" => "moris", "service" => "b"}],
1194
+ ])
1195
+
1196
+ 5.times do
1197
+ @i.emit_events('tag.test', es)
1198
+ end
1199
+
1200
+ assert_equal 10, ary.size
1201
+ 5.times do |i|
1202
+ assert_equal ["tag.test", t, {"key" => "value1", "name" => "moris", "service" => "a"}], ary[i*2]
1203
+ assert_equal ["tag.test", t, {"key" => "value2", "name" => "moris", "service" => "b"}], ary[i*2+1]
1204
+ end
1205
+ end
1206
+
1207
+ test '#try_write is called per flush, buffer chunk is not purged until #commit_write is called' do
1208
+ Timecop.freeze( Time.parse('2016-04-13 14:04:01 +0900') )
1209
+
1210
+ ary = []
1211
+ metachecks = []
1212
+ chunks = []
1213
+
1214
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
1215
+ @i.register(:try_write) do |chunk|
1216
+ chunks << chunk
1217
+ chunk.read.split("\n").reject{|l| l.empty? }.each do |data|
1218
+ e = JSON.parse(data)
1219
+ ary << e
1220
+ metachecks << (e[0] == chunk.metadata.tag)
1221
+ end
1222
+ end
1223
+
1224
+ @i.thread_wait_until_start
1225
+
1226
+ # size of a event is 195
1227
+ dummy_data = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
1228
+ events = [
1229
+ ["test.tag.1", event_time('2016-04-13 14:03:21 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1230
+ ["test.tag.2", event_time('2016-04-13 14:03:23 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1231
+ ["test.tag.1", event_time('2016-04-13 14:03:29 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1232
+ ["test.tag.1", event_time('2016-04-13 14:03:30 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1233
+ ["test.tag.1", event_time('2016-04-13 14:03:33 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1234
+ ["test.tag.1", event_time('2016-04-13 14:03:38 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}],
1235
+ ["test.tag.1", event_time('2016-04-13 14:03:43 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1236
+ ["test.tag.1", event_time('2016-04-13 14:03:49 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}],
1237
+ ["test.tag.2", event_time('2016-04-13 14:03:51 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1238
+ ["test.tag.1", event_time('2016-04-13 14:04:00 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1239
+ ["test.tag.2", event_time('2016-04-13 14:04:01 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1240
+ ]
1241
+
1242
+ assert_equal 0, @i.write_count
1243
+
1244
+ @i.interrupt_flushes
1245
+
1246
+ events.shuffle.each do |tag, time, record|
1247
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
1248
+ end
1249
+ assert{ @i.buffer.stage.size == 2 }
1250
+
1251
+ Timecop.freeze( Time.parse('2016-04-13 14:04:02 +0900') )
1252
+
1253
+ @i.enqueue_thread_wait
1254
+ @i.flush_thread_wakeup
1255
+
1256
+ waiting(4) do
1257
+ Thread.pass until @i.write_count > 0
1258
+ end
1259
+
1260
+ assert{ @i.buffer.stage.size == 2 }
1261
+ assert{ @i.write_count == 1 }
1262
+ assert{ @i.buffer.queue.size == 0 }
1263
+ assert{ @i.buffer.dequeued.size == 1 }
1264
+
1265
+ # events fulfills a chunk (and queued immediately)
1266
+ assert_equal 5, ary.size
1267
+ assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
1268
+ assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
1269
+
1270
+ assert_equal 1, chunks.size
1271
+ assert !chunks.first.empty?
1272
+
1273
+ Timecop.freeze( Time.parse('2016-04-13 14:04:09 +0900') )
1274
+
1275
+ @i.enqueue_thread_wait
1276
+
1277
+ assert{ @i.buffer.stage.size == 2 }
1278
+
1279
+ # to trigger try_flush with flush_thread_burst_interval
1280
+ Timecop.freeze( Time.parse('2016-04-13 14:04:11 +0900') )
1281
+ @i.enqueue_thread_wait
1282
+ Timecop.freeze( Time.parse('2016-04-13 14:04:12 +0900') )
1283
+ @i.enqueue_thread_wait
1284
+ Timecop.freeze( Time.parse('2016-04-13 14:04:13 +0900') )
1285
+ @i.enqueue_thread_wait
1286
+ Timecop.freeze( Time.parse('2016-04-13 14:04:14 +0900') )
1287
+ @i.enqueue_thread_wait
1288
+ @i.flush_thread_wakeup
1289
+
1290
+ assert{ @i.buffer.stage.size == 0 }
1291
+
1292
+ waiting(4) do
1293
+ Thread.pass until @i.write_count > 1
1294
+ end
1295
+
1296
+ assert{ @i.buffer.stage.size == 0 && @i.write_count == 3 }
1297
+ assert{ @i.buffer.dequeued.size == 3 }
1298
+
1299
+ assert_equal 11, ary.size
1300
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1301
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1302
+
1303
+ assert_equal 3, chunks.size
1304
+ assert chunks.all?{|c| !c.empty? }
1305
+
1306
+ assert metachecks.all?{|e| e }
1307
+
1308
+ @i.commit_write(chunks[0].unique_id)
1309
+ assert{ @i.buffer.dequeued.size == 2 }
1310
+ assert chunks[0].empty?
1311
+
1312
+ @i.commit_write(chunks[1].unique_id)
1313
+ assert{ @i.buffer.dequeued.size == 1 }
1314
+ assert chunks[1].empty?
1315
+
1316
+ @i.commit_write(chunks[2].unique_id)
1317
+ assert{ @i.buffer.dequeued.size == 0 }
1318
+ assert chunks[2].empty?
1319
+
1320
+ # no problem to commit chunks already committed
1321
+ assert_nothing_raised do
1322
+ @i.commit_write(chunks[2].unique_id)
1323
+ end
1324
+ end
1325
+
1326
+ test '#rollback_write and #try_rollback_write can rollback buffer chunks for delayed commit after timeout, and then be able to write it again' do
1327
+ Timecop.freeze( Time.parse('2016-04-13 14:04:01 +0900') )
1328
+
1329
+ ary = []
1330
+ metachecks = []
1331
+ chunks = []
1332
+
1333
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
1334
+ @i.register(:try_write) do |chunk|
1335
+ chunks << chunk
1336
+ chunk.read.split("\n").reject{|l| l.empty? }.each do |data|
1337
+ e = JSON.parse(data)
1338
+ ary << e
1339
+ metachecks << (e[0] == chunk.metadata.tag)
1340
+ end
1341
+ end
1342
+
1343
+ @i.thread_wait_until_start
1344
+
1345
+ # size of a event is 195
1346
+ dummy_data = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
1347
+ events = [
1348
+ ["test.tag.1", event_time('2016-04-13 14:03:21 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1349
+ ["test.tag.2", event_time('2016-04-13 14:03:23 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1350
+ ["test.tag.1", event_time('2016-04-13 14:03:29 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1351
+ ["test.tag.1", event_time('2016-04-13 14:03:30 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1352
+ ["test.tag.1", event_time('2016-04-13 14:03:33 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1353
+ ["test.tag.1", event_time('2016-04-13 14:03:38 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}],
1354
+ ["test.tag.1", event_time('2016-04-13 14:03:43 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1355
+ ["test.tag.1", event_time('2016-04-13 14:03:49 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}],
1356
+ ["test.tag.2", event_time('2016-04-13 14:03:51 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1357
+ ["test.tag.1", event_time('2016-04-13 14:04:00 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1358
+ ["test.tag.2", event_time('2016-04-13 14:04:01 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1359
+ ]
1360
+
1361
+ assert_equal 0, @i.write_count
1362
+
1363
+ @i.interrupt_flushes
1364
+
1365
+ events.shuffle.each do |tag, time, record|
1366
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
1367
+ end
1368
+ assert{ @i.buffer.stage.size == 2 }
1369
+
1370
+ Timecop.freeze( Time.parse('2016-04-13 14:04:02 +0900') )
1371
+
1372
+ @i.enqueue_thread_wait
1373
+ @i.flush_thread_wakeup
1374
+
1375
+ waiting(4) do
1376
+ Thread.pass until @i.write_count > 0
1377
+ end
1378
+
1379
+ assert{ @i.buffer.stage.size == 2 }
1380
+ assert{ @i.write_count == 1 }
1381
+ assert{ @i.buffer.queue.size == 0 }
1382
+ assert{ @i.buffer.dequeued.size == 1 }
1383
+
1384
+ # events fulfills a chunk (and queued immediately)
1385
+ assert_equal 5, ary.size
1386
+ assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
1387
+ assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
1388
+
1389
+ assert_equal 1, chunks.size
1390
+ assert !chunks.first.empty?
1391
+
1392
+ Timecop.freeze( Time.parse('2016-04-13 14:04:09 +0900') )
1393
+
1394
+ @i.enqueue_thread_wait
1395
+
1396
+ assert{ @i.buffer.stage.size == 2 }
1397
+
1398
+ # to trigger try_flush with flush_thread_burst_interval
1399
+ Timecop.freeze( Time.parse('2016-04-13 14:04:11 +0900') )
1400
+ @i.enqueue_thread_wait
1401
+ Timecop.freeze( Time.parse('2016-04-13 14:04:12 +0900') )
1402
+ @i.enqueue_thread_wait
1403
+ Timecop.freeze( Time.parse('2016-04-13 14:04:13 +0900') )
1404
+ @i.enqueue_thread_wait
1405
+ Timecop.freeze( Time.parse('2016-04-13 14:04:14 +0900') )
1406
+ @i.enqueue_thread_wait
1407
+ @i.flush_thread_wakeup
1408
+
1409
+ assert{ @i.buffer.stage.size == 0 }
1410
+
1411
+ waiting(4) do
1412
+ Thread.pass until @i.write_count > 2
1413
+ end
1414
+
1415
+ assert{ @i.buffer.stage.size == 0 && @i.write_count == 3 }
1416
+ assert{ @i.buffer.dequeued.size == 3 }
1417
+
1418
+ assert_equal 11, ary.size
1419
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1420
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1421
+
1422
+ assert_equal 3, chunks.size
1423
+ assert chunks.all?{|c| !c.empty? }
1424
+
1425
+ assert metachecks.all?{|e| e }
1426
+
1427
+ @i.interrupt_flushes
1428
+
1429
+ @i.rollback_write(chunks[2].unique_id)
1430
+
1431
+ assert{ @i.buffer.dequeued.size == 2 }
1432
+ assert{ @i.buffer.queue.size == 1 && @i.buffer.queue.first.unique_id == chunks[2].unique_id }
1433
+
1434
+ Timecop.freeze( Time.parse('2016-04-13 14:04:15 +0900') )
1435
+ @i.enqueue_thread_wait
1436
+ @i.flush_thread_wakeup
1437
+
1438
+ waiting(4) do
1439
+ Thread.pass until @i.write_count > 3
1440
+ end
1441
+
1442
+ assert{ @i.write_count == 4 }
1443
+ assert{ @i.rollback_count == 1 }
1444
+ assert{ @i.instance_eval{ @dequeued_chunks.size } == 3 }
1445
+ assert{ @i.buffer.dequeued.size == 3 }
1446
+ assert{ @i.buffer.queue.size == 0 }
1447
+
1448
+ assert_equal 4, chunks.size
1449
+ assert chunks[2].unique_id == chunks[3].unique_id
1450
+
1451
+ ary.reject!{|e| true }
1452
+ chunks.reject!{|e| true }
1453
+
1454
+ Timecop.freeze( Time.parse('2016-04-13 14:04:46 +0900') )
1455
+ @i.enqueue_thread_wait
1456
+ @i.flush_thread_wakeup
1457
+
1458
+ waiting(4) do
1459
+ Thread.pass until @i.rollback_count == 4
1460
+ end
1461
+
1462
+ assert{ chunks[0...3].all?{|c| !c.empty? } }
1463
+
1464
+ # rollback is in progress, but some may be flushed again after rollback
1465
+ Timecop.freeze( Time.parse('2016-04-13 14:04:46 +0900') )
1466
+ @i.enqueue_thread_wait
1467
+ @i.flush_thread_wakeup
1468
+
1469
+ waiting(4) do
1470
+ Thread.pass until @i.write_count == 7
1471
+ end
1472
+
1473
+ assert{ @i.write_count == 7 }
1474
+ assert_equal 11, ary.size
1475
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1476
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1477
+ assert{ chunks.size == 3 }
1478
+ assert{ chunks.all?{|c| !c.empty? } }
1479
+
1480
+ chunks.each{|c| @i.commit_write(c.unique_id) }
1481
+ assert{ chunks.all?{|c| c.empty? } }
1482
+
1483
+ assert{ @i.buffer.dequeued.size == 0 }
1484
+ end
1485
+
1486
+ test '#try_rollback_all will be called for all waiting chunks after shutdown' do
1487
+ Timecop.freeze( Time.parse('2016-04-13 14:04:01 +0900') )
1488
+
1489
+ ary = []
1490
+ metachecks = []
1491
+ chunks = []
1492
+
1493
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
1494
+ @i.register(:try_write) do |chunk|
1495
+ chunks << chunk
1496
+ chunk.read.split("\n").reject{|l| l.empty? }.each do |data|
1497
+ e = JSON.parse(data)
1498
+ ary << e
1499
+ metachecks << (e[0] == chunk.metadata.tag)
1500
+ end
1501
+ end
1502
+
1503
+ @i.thread_wait_until_start
1504
+
1505
+ # size of a event is 195
1506
+ dummy_data = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
1507
+ events = [
1508
+ ["test.tag.1", event_time('2016-04-13 14:03:21 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1509
+ ["test.tag.2", event_time('2016-04-13 14:03:23 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1510
+ ["test.tag.1", event_time('2016-04-13 14:03:29 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1511
+ ["test.tag.1", event_time('2016-04-13 14:03:30 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1512
+ ["test.tag.1", event_time('2016-04-13 14:03:33 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1513
+ ["test.tag.1", event_time('2016-04-13 14:03:38 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}],
1514
+ ["test.tag.1", event_time('2016-04-13 14:03:43 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1515
+ ["test.tag.1", event_time('2016-04-13 14:03:49 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}],
1516
+ ["test.tag.2", event_time('2016-04-13 14:03:51 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1517
+ ["test.tag.1", event_time('2016-04-13 14:04:00 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1518
+ ["test.tag.2", event_time('2016-04-13 14:04:01 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1519
+ ]
1520
+
1521
+ assert_equal 0, @i.write_count
1522
+
1523
+ @i.interrupt_flushes
1524
+
1525
+ events.shuffle.each do |tag, time, record|
1526
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
1527
+ end
1528
+ assert{ @i.buffer.stage.size == 2 }
1529
+
1530
+ Timecop.freeze( Time.parse('2016-04-13 14:04:02 +0900') )
1531
+
1532
+ @i.enqueue_thread_wait
1533
+ @i.flush_thread_wakeup
1534
+
1535
+ waiting(4) do
1536
+ Thread.pass until @i.write_count > 0
1537
+ end
1538
+
1539
+ assert{ @i.buffer.stage.size == 2 }
1540
+ assert{ @i.write_count == 1 }
1541
+ assert{ @i.buffer.queue.size == 0 }
1542
+ assert{ @i.buffer.dequeued.size == 1 }
1543
+
1544
+ # events fulfills a chunk (and queued immediately)
1545
+ assert_equal 5, ary.size
1546
+ assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
1547
+ assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
1548
+
1549
+ assert_equal 1, chunks.size
1550
+ assert !chunks.first.empty?
1551
+
1552
+ Timecop.freeze( Time.parse('2016-04-13 14:04:09 +0900') )
1553
+
1554
+ @i.enqueue_thread_wait
1555
+
1556
+ assert{ @i.buffer.stage.size == 2 }
1557
+
1558
+ # to trigger try_flush with flush_thread_burst_interval
1559
+ Timecop.freeze( Time.parse('2016-04-13 14:04:11 +0900') )
1560
+ @i.enqueue_thread_wait
1561
+ Timecop.freeze( Time.parse('2016-04-13 14:04:12 +0900') )
1562
+ @i.enqueue_thread_wait
1563
+ Timecop.freeze( Time.parse('2016-04-13 14:04:13 +0900') )
1564
+ @i.enqueue_thread_wait
1565
+ Timecop.freeze( Time.parse('2016-04-13 14:04:14 +0900') )
1566
+ @i.enqueue_thread_wait
1567
+ @i.flush_thread_wakeup
1568
+
1569
+ assert{ @i.buffer.stage.size == 0 }
1570
+
1571
+ waiting(4) do
1572
+ Thread.pass until @i.write_count > 2
1573
+ end
1574
+
1575
+ assert{ @i.buffer.stage.size == 0 }
1576
+ assert{ @i.buffer.queue.size == 0 }
1577
+ assert{ @i.buffer.dequeued.size == 3 }
1578
+ assert{ @i.write_count == 3 }
1579
+ assert{ @i.rollback_count == 0 }
1580
+
1581
+ assert_equal 11, ary.size
1582
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1583
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1584
+
1585
+ assert{ chunks.size == 3 }
1586
+ assert{ chunks.all?{|c| !c.empty? } }
1587
+
1588
+ @i.register(:shutdown_hook){ @i.commit_write(chunks[1].unique_id) }
1589
+
1590
+ @i.stop
1591
+ @i.before_shutdown
1592
+ @i.shutdown
1593
+
1594
+ assert{ @i.buffer.dequeued.size == 2 }
1595
+ assert{ !chunks[0].empty? }
1596
+ assert{ chunks[1].empty? }
1597
+ assert{ !chunks[2].empty? }
1598
+
1599
+ @i.after_shutdown
1600
+
1601
+ assert{ @i.rollback_count == 2 }
1602
+ end
1603
+ end
1604
+ end