fluentd 0.14.4-x86-mingw32

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of fluentd might be problematic. Click here for more details.

Files changed (328) hide show
  1. checksums.yaml +7 -0
  2. data/.github/ISSUE_TEMPLATE.md +6 -0
  3. data/.gitignore +26 -0
  4. data/.travis.yml +45 -0
  5. data/AUTHORS +2 -0
  6. data/CONTRIBUTING.md +35 -0
  7. data/COPYING +14 -0
  8. data/ChangeLog +276 -0
  9. data/Gemfile +9 -0
  10. data/README.md +51 -0
  11. data/Rakefile +53 -0
  12. data/Vagrantfile +17 -0
  13. data/appveyor.yml +41 -0
  14. data/bin/fluent-debug +5 -0
  15. data/example/copy_roundrobin.conf +39 -0
  16. data/example/filter_stdout.conf +22 -0
  17. data/example/in_forward.conf +11 -0
  18. data/example/in_http.conf +14 -0
  19. data/example/in_out_forward.conf +17 -0
  20. data/example/in_syslog.conf +15 -0
  21. data/example/in_tail.conf +14 -0
  22. data/example/in_tcp.conf +13 -0
  23. data/example/in_udp.conf +13 -0
  24. data/example/multi_filters.conf +61 -0
  25. data/example/out_buffered_null.conf +32 -0
  26. data/example/out_copy.conf +20 -0
  27. data/example/out_file.conf +13 -0
  28. data/example/out_forward.conf +35 -0
  29. data/example/out_forward_buf_file.conf +23 -0
  30. data/example/v0_12_filter.conf +78 -0
  31. data/example/v1_literal_example.conf +36 -0
  32. data/fluent.conf +139 -0
  33. data/fluentd.gemspec +51 -0
  34. data/lib/fluent/agent.rb +194 -0
  35. data/lib/fluent/command/bundler_injection.rb +45 -0
  36. data/lib/fluent/command/cat.rb +319 -0
  37. data/lib/fluent/command/debug.rb +102 -0
  38. data/lib/fluent/command/fluentd.rb +273 -0
  39. data/lib/fluent/compat/call_super_mixin.rb +67 -0
  40. data/lib/fluent/compat/exec_util.rb +129 -0
  41. data/lib/fluent/compat/file_util.rb +54 -0
  42. data/lib/fluent/compat/filter.rb +68 -0
  43. data/lib/fluent/compat/formatter.rb +111 -0
  44. data/lib/fluent/compat/formatter_utils.rb +85 -0
  45. data/lib/fluent/compat/handle_tag_and_time_mixin.rb +62 -0
  46. data/lib/fluent/compat/handle_tag_name_mixin.rb +53 -0
  47. data/lib/fluent/compat/input.rb +49 -0
  48. data/lib/fluent/compat/output.rb +677 -0
  49. data/lib/fluent/compat/output_chain.rb +60 -0
  50. data/lib/fluent/compat/parser.rb +180 -0
  51. data/lib/fluent/compat/parser_utils.rb +40 -0
  52. data/lib/fluent/compat/propagate_default.rb +62 -0
  53. data/lib/fluent/compat/record_filter_mixin.rb +34 -0
  54. data/lib/fluent/compat/set_tag_key_mixin.rb +50 -0
  55. data/lib/fluent/compat/set_time_key_mixin.rb +69 -0
  56. data/lib/fluent/compat/socket_util.rb +165 -0
  57. data/lib/fluent/compat/string_util.rb +34 -0
  58. data/lib/fluent/compat/structured_format_mixin.rb +26 -0
  59. data/lib/fluent/compat/type_converter.rb +90 -0
  60. data/lib/fluent/config.rb +56 -0
  61. data/lib/fluent/config/basic_parser.rb +123 -0
  62. data/lib/fluent/config/configure_proxy.rb +366 -0
  63. data/lib/fluent/config/dsl.rb +149 -0
  64. data/lib/fluent/config/element.rb +218 -0
  65. data/lib/fluent/config/error.rb +26 -0
  66. data/lib/fluent/config/literal_parser.rb +251 -0
  67. data/lib/fluent/config/parser.rb +107 -0
  68. data/lib/fluent/config/section.rb +212 -0
  69. data/lib/fluent/config/types.rb +136 -0
  70. data/lib/fluent/config/v1_parser.rb +190 -0
  71. data/lib/fluent/configurable.rb +176 -0
  72. data/lib/fluent/daemon.rb +15 -0
  73. data/lib/fluent/engine.rb +220 -0
  74. data/lib/fluent/env.rb +27 -0
  75. data/lib/fluent/event.rb +287 -0
  76. data/lib/fluent/event_router.rb +259 -0
  77. data/lib/fluent/filter.rb +21 -0
  78. data/lib/fluent/formatter.rb +23 -0
  79. data/lib/fluent/input.rb +21 -0
  80. data/lib/fluent/label.rb +38 -0
  81. data/lib/fluent/load.rb +36 -0
  82. data/lib/fluent/log.rb +445 -0
  83. data/lib/fluent/match.rb +141 -0
  84. data/lib/fluent/mixin.rb +31 -0
  85. data/lib/fluent/msgpack_factory.rb +62 -0
  86. data/lib/fluent/output.rb +26 -0
  87. data/lib/fluent/output_chain.rb +23 -0
  88. data/lib/fluent/parser.rb +23 -0
  89. data/lib/fluent/plugin.rb +161 -0
  90. data/lib/fluent/plugin/bare_output.rb +63 -0
  91. data/lib/fluent/plugin/base.rb +130 -0
  92. data/lib/fluent/plugin/buf_file.rb +154 -0
  93. data/lib/fluent/plugin/buf_memory.rb +34 -0
  94. data/lib/fluent/plugin/buffer.rb +603 -0
  95. data/lib/fluent/plugin/buffer/chunk.rb +160 -0
  96. data/lib/fluent/plugin/buffer/file_chunk.rb +323 -0
  97. data/lib/fluent/plugin/buffer/memory_chunk.rb +90 -0
  98. data/lib/fluent/plugin/exec_util.rb +22 -0
  99. data/lib/fluent/plugin/file_util.rb +22 -0
  100. data/lib/fluent/plugin/file_wrapper.rb +120 -0
  101. data/lib/fluent/plugin/filter.rb +93 -0
  102. data/lib/fluent/plugin/filter_grep.rb +75 -0
  103. data/lib/fluent/plugin/filter_record_transformer.rb +342 -0
  104. data/lib/fluent/plugin/filter_stdout.rb +53 -0
  105. data/lib/fluent/plugin/formatter.rb +45 -0
  106. data/lib/fluent/plugin/formatter_csv.rb +47 -0
  107. data/lib/fluent/plugin/formatter_hash.rb +29 -0
  108. data/lib/fluent/plugin/formatter_json.rb +44 -0
  109. data/lib/fluent/plugin/formatter_ltsv.rb +41 -0
  110. data/lib/fluent/plugin/formatter_msgpack.rb +29 -0
  111. data/lib/fluent/plugin/formatter_out_file.rb +78 -0
  112. data/lib/fluent/plugin/formatter_single_value.rb +34 -0
  113. data/lib/fluent/plugin/formatter_stdout.rb +74 -0
  114. data/lib/fluent/plugin/in_debug_agent.rb +64 -0
  115. data/lib/fluent/plugin/in_dummy.rb +135 -0
  116. data/lib/fluent/plugin/in_exec.rb +149 -0
  117. data/lib/fluent/plugin/in_forward.rb +366 -0
  118. data/lib/fluent/plugin/in_gc_stat.rb +52 -0
  119. data/lib/fluent/plugin/in_http.rb +422 -0
  120. data/lib/fluent/plugin/in_monitor_agent.rb +401 -0
  121. data/lib/fluent/plugin/in_object_space.rb +90 -0
  122. data/lib/fluent/plugin/in_syslog.rb +204 -0
  123. data/lib/fluent/plugin/in_tail.rb +838 -0
  124. data/lib/fluent/plugin/in_tcp.rb +41 -0
  125. data/lib/fluent/plugin/in_udp.rb +37 -0
  126. data/lib/fluent/plugin/in_unix.rb +201 -0
  127. data/lib/fluent/plugin/input.rb +33 -0
  128. data/lib/fluent/plugin/multi_output.rb +95 -0
  129. data/lib/fluent/plugin/out_buffered_null.rb +59 -0
  130. data/lib/fluent/plugin/out_buffered_stdout.rb +70 -0
  131. data/lib/fluent/plugin/out_copy.rb +42 -0
  132. data/lib/fluent/plugin/out_exec.rb +114 -0
  133. data/lib/fluent/plugin/out_exec_filter.rb +393 -0
  134. data/lib/fluent/plugin/out_file.rb +167 -0
  135. data/lib/fluent/plugin/out_forward.rb +646 -0
  136. data/lib/fluent/plugin/out_null.rb +27 -0
  137. data/lib/fluent/plugin/out_relabel.rb +28 -0
  138. data/lib/fluent/plugin/out_roundrobin.rb +80 -0
  139. data/lib/fluent/plugin/out_stdout.rb +48 -0
  140. data/lib/fluent/plugin/out_stream.rb +130 -0
  141. data/lib/fluent/plugin/output.rb +1020 -0
  142. data/lib/fluent/plugin/owned_by_mixin.rb +42 -0
  143. data/lib/fluent/plugin/parser.rb +175 -0
  144. data/lib/fluent/plugin/parser_apache.rb +28 -0
  145. data/lib/fluent/plugin/parser_apache2.rb +84 -0
  146. data/lib/fluent/plugin/parser_apache_error.rb +26 -0
  147. data/lib/fluent/plugin/parser_csv.rb +33 -0
  148. data/lib/fluent/plugin/parser_json.rb +79 -0
  149. data/lib/fluent/plugin/parser_ltsv.rb +50 -0
  150. data/lib/fluent/plugin/parser_multiline.rb +104 -0
  151. data/lib/fluent/plugin/parser_nginx.rb +28 -0
  152. data/lib/fluent/plugin/parser_none.rb +36 -0
  153. data/lib/fluent/plugin/parser_regexp.rb +73 -0
  154. data/lib/fluent/plugin/parser_syslog.rb +82 -0
  155. data/lib/fluent/plugin/parser_tsv.rb +37 -0
  156. data/lib/fluent/plugin/socket_util.rb +22 -0
  157. data/lib/fluent/plugin/storage.rb +84 -0
  158. data/lib/fluent/plugin/storage_local.rb +132 -0
  159. data/lib/fluent/plugin/string_util.rb +22 -0
  160. data/lib/fluent/plugin_helper.rb +42 -0
  161. data/lib/fluent/plugin_helper/child_process.rb +298 -0
  162. data/lib/fluent/plugin_helper/compat_parameters.rb +224 -0
  163. data/lib/fluent/plugin_helper/event_emitter.rb +80 -0
  164. data/lib/fluent/plugin_helper/event_loop.rb +118 -0
  165. data/lib/fluent/plugin_helper/formatter.rb +149 -0
  166. data/lib/fluent/plugin_helper/inject.rb +125 -0
  167. data/lib/fluent/plugin_helper/parser.rb +147 -0
  168. data/lib/fluent/plugin_helper/retry_state.rb +177 -0
  169. data/lib/fluent/plugin_helper/storage.rb +331 -0
  170. data/lib/fluent/plugin_helper/thread.rb +147 -0
  171. data/lib/fluent/plugin_helper/timer.rb +90 -0
  172. data/lib/fluent/plugin_id.rb +63 -0
  173. data/lib/fluent/process.rb +504 -0
  174. data/lib/fluent/registry.rb +99 -0
  175. data/lib/fluent/root_agent.rb +314 -0
  176. data/lib/fluent/rpc.rb +94 -0
  177. data/lib/fluent/supervisor.rb +680 -0
  178. data/lib/fluent/system_config.rb +122 -0
  179. data/lib/fluent/test.rb +56 -0
  180. data/lib/fluent/test/base.rb +85 -0
  181. data/lib/fluent/test/driver/base.rb +179 -0
  182. data/lib/fluent/test/driver/base_owned.rb +70 -0
  183. data/lib/fluent/test/driver/base_owner.rb +125 -0
  184. data/lib/fluent/test/driver/event_feeder.rb +98 -0
  185. data/lib/fluent/test/driver/filter.rb +57 -0
  186. data/lib/fluent/test/driver/formatter.rb +30 -0
  187. data/lib/fluent/test/driver/input.rb +31 -0
  188. data/lib/fluent/test/driver/multi_output.rb +52 -0
  189. data/lib/fluent/test/driver/output.rb +76 -0
  190. data/lib/fluent/test/driver/parser.rb +30 -0
  191. data/lib/fluent/test/driver/test_event_router.rb +45 -0
  192. data/lib/fluent/test/filter_test.rb +77 -0
  193. data/lib/fluent/test/formatter_test.rb +65 -0
  194. data/lib/fluent/test/helpers.rb +79 -0
  195. data/lib/fluent/test/input_test.rb +172 -0
  196. data/lib/fluent/test/log.rb +73 -0
  197. data/lib/fluent/test/output_test.rb +156 -0
  198. data/lib/fluent/test/parser_test.rb +70 -0
  199. data/lib/fluent/time.rb +175 -0
  200. data/lib/fluent/timezone.rb +133 -0
  201. data/lib/fluent/unique_id.rb +39 -0
  202. data/lib/fluent/version.rb +21 -0
  203. data/lib/fluent/winsvc.rb +71 -0
  204. data/test/compat/test_calls_super.rb +166 -0
  205. data/test/compat/test_parser.rb +82 -0
  206. data/test/config/assertions.rb +42 -0
  207. data/test/config/test_config_parser.rb +507 -0
  208. data/test/config/test_configurable.rb +1194 -0
  209. data/test/config/test_configure_proxy.rb +386 -0
  210. data/test/config/test_dsl.rb +415 -0
  211. data/test/config/test_element.rb +403 -0
  212. data/test/config/test_literal_parser.rb +297 -0
  213. data/test/config/test_section.rb +184 -0
  214. data/test/config/test_system_config.rb +120 -0
  215. data/test/config/test_types.rb +171 -0
  216. data/test/helper.rb +119 -0
  217. data/test/plugin/data/2010/01/20100102-030405.log +0 -0
  218. data/test/plugin/data/2010/01/20100102-030406.log +0 -0
  219. data/test/plugin/data/2010/01/20100102.log +0 -0
  220. data/test/plugin/data/log/bar +0 -0
  221. data/test/plugin/data/log/foo/bar.log +0 -0
  222. data/test/plugin/data/log/test.log +0 -0
  223. data/test/plugin/test_bare_output.rb +118 -0
  224. data/test/plugin/test_base.rb +75 -0
  225. data/test/plugin/test_buf_file.rb +571 -0
  226. data/test/plugin/test_buf_memory.rb +42 -0
  227. data/test/plugin/test_buffer.rb +1200 -0
  228. data/test/plugin/test_buffer_chunk.rb +168 -0
  229. data/test/plugin/test_buffer_file_chunk.rb +771 -0
  230. data/test/plugin/test_buffer_memory_chunk.rb +265 -0
  231. data/test/plugin/test_file_util.rb +96 -0
  232. data/test/plugin/test_filter.rb +353 -0
  233. data/test/plugin/test_filter_grep.rb +119 -0
  234. data/test/plugin/test_filter_record_transformer.rb +600 -0
  235. data/test/plugin/test_filter_stdout.rb +211 -0
  236. data/test/plugin/test_formatter_csv.rb +94 -0
  237. data/test/plugin/test_formatter_json.rb +30 -0
  238. data/test/plugin/test_formatter_ltsv.rb +52 -0
  239. data/test/plugin/test_formatter_msgpack.rb +28 -0
  240. data/test/plugin/test_formatter_out_file.rb +95 -0
  241. data/test/plugin/test_formatter_single_value.rb +38 -0
  242. data/test/plugin/test_in_debug_agent.rb +28 -0
  243. data/test/plugin/test_in_dummy.rb +188 -0
  244. data/test/plugin/test_in_exec.rb +133 -0
  245. data/test/plugin/test_in_forward.rb +635 -0
  246. data/test/plugin/test_in_gc_stat.rb +39 -0
  247. data/test/plugin/test_in_http.rb +442 -0
  248. data/test/plugin/test_in_monitor_agent.rb +329 -0
  249. data/test/plugin/test_in_object_space.rb +64 -0
  250. data/test/plugin/test_in_syslog.rb +205 -0
  251. data/test/plugin/test_in_tail.rb +1001 -0
  252. data/test/plugin/test_in_tcp.rb +102 -0
  253. data/test/plugin/test_in_udp.rb +121 -0
  254. data/test/plugin/test_in_unix.rb +126 -0
  255. data/test/plugin/test_input.rb +122 -0
  256. data/test/plugin/test_multi_output.rb +180 -0
  257. data/test/plugin/test_out_buffered_null.rb +79 -0
  258. data/test/plugin/test_out_buffered_stdout.rb +122 -0
  259. data/test/plugin/test_out_copy.rb +160 -0
  260. data/test/plugin/test_out_exec.rb +155 -0
  261. data/test/plugin/test_out_exec_filter.rb +262 -0
  262. data/test/plugin/test_out_file.rb +383 -0
  263. data/test/plugin/test_out_forward.rb +590 -0
  264. data/test/plugin/test_out_null.rb +29 -0
  265. data/test/plugin/test_out_relabel.rb +28 -0
  266. data/test/plugin/test_out_roundrobin.rb +146 -0
  267. data/test/plugin/test_out_stdout.rb +92 -0
  268. data/test/plugin/test_out_stream.rb +93 -0
  269. data/test/plugin/test_output.rb +568 -0
  270. data/test/plugin/test_output_as_buffered.rb +1604 -0
  271. data/test/plugin/test_output_as_buffered_overflow.rb +250 -0
  272. data/test/plugin/test_output_as_buffered_retries.rb +839 -0
  273. data/test/plugin/test_output_as_buffered_secondary.rb +817 -0
  274. data/test/plugin/test_output_as_standard.rb +374 -0
  275. data/test/plugin/test_owned_by.rb +35 -0
  276. data/test/plugin/test_parser_apache.rb +42 -0
  277. data/test/plugin/test_parser_apache2.rb +38 -0
  278. data/test/plugin/test_parser_apache_error.rb +45 -0
  279. data/test/plugin/test_parser_base.rb +32 -0
  280. data/test/plugin/test_parser_csv.rb +104 -0
  281. data/test/plugin/test_parser_json.rb +107 -0
  282. data/test/plugin/test_parser_labeled_tsv.rb +129 -0
  283. data/test/plugin/test_parser_multiline.rb +100 -0
  284. data/test/plugin/test_parser_nginx.rb +48 -0
  285. data/test/plugin/test_parser_none.rb +53 -0
  286. data/test/plugin/test_parser_regexp.rb +277 -0
  287. data/test/plugin/test_parser_syslog.rb +66 -0
  288. data/test/plugin/test_parser_time.rb +46 -0
  289. data/test/plugin/test_parser_tsv.rb +121 -0
  290. data/test/plugin/test_storage.rb +167 -0
  291. data/test/plugin/test_storage_local.rb +8 -0
  292. data/test/plugin/test_string_util.rb +26 -0
  293. data/test/plugin_helper/test_child_process.rb +608 -0
  294. data/test/plugin_helper/test_compat_parameters.rb +242 -0
  295. data/test/plugin_helper/test_event_emitter.rb +51 -0
  296. data/test/plugin_helper/test_event_loop.rb +52 -0
  297. data/test/plugin_helper/test_formatter.rb +252 -0
  298. data/test/plugin_helper/test_inject.rb +487 -0
  299. data/test/plugin_helper/test_parser.rb +263 -0
  300. data/test/plugin_helper/test_retry_state.rb +399 -0
  301. data/test/plugin_helper/test_storage.rb +521 -0
  302. data/test/plugin_helper/test_thread.rb +164 -0
  303. data/test/plugin_helper/test_timer.rb +131 -0
  304. data/test/scripts/exec_script.rb +32 -0
  305. data/test/scripts/fluent/plugin/formatter_known.rb +8 -0
  306. data/test/scripts/fluent/plugin/out_test.rb +81 -0
  307. data/test/scripts/fluent/plugin/out_test2.rb +80 -0
  308. data/test/scripts/fluent/plugin/parser_known.rb +4 -0
  309. data/test/test_config.rb +179 -0
  310. data/test/test_configdsl.rb +148 -0
  311. data/test/test_event.rb +329 -0
  312. data/test/test_event_router.rb +331 -0
  313. data/test/test_event_time.rb +184 -0
  314. data/test/test_filter.rb +121 -0
  315. data/test/test_formatter.rb +319 -0
  316. data/test/test_input.rb +31 -0
  317. data/test/test_log.rb +572 -0
  318. data/test/test_match.rb +137 -0
  319. data/test/test_mixin.rb +351 -0
  320. data/test/test_output.rb +214 -0
  321. data/test/test_plugin_classes.rb +136 -0
  322. data/test/test_plugin_helper.rb +81 -0
  323. data/test/test_process.rb +48 -0
  324. data/test/test_root_agent.rb +278 -0
  325. data/test/test_supervisor.rb +339 -0
  326. data/test/test_time_formatter.rb +186 -0
  327. data/test/test_unique_id.rb +47 -0
  328. metadata +823 -0
@@ -0,0 +1,250 @@
1
+ require_relative '../helper'
2
+ require 'fluent/plugin/output'
3
+ require 'fluent/plugin/buffer'
4
+ require 'fluent/event'
5
+
6
+ require 'json'
7
+ require 'time'
8
+ require 'timeout'
9
+ require 'timecop'
10
+
11
+ module FluentPluginOutputAsBufferedOverflowTest
12
+ class DummyBareOutput < Fluent::Plugin::Output
13
+ def register(name, &block)
14
+ instance_variable_set("@#{name}", block)
15
+ end
16
+ end
17
+ class DummyAsyncOutput < DummyBareOutput
18
+ def initialize
19
+ super
20
+ @format = @write = nil
21
+ end
22
+ def format(tag, time, record)
23
+ @format ? @format.call(tag, time, record) : [tag, time, record].to_json
24
+ end
25
+ def write(chunk)
26
+ @write ? @write.call(chunk) : nil
27
+ end
28
+ end
29
+ end
30
+
31
+ class BufferedOutputOverflowTest < Test::Unit::TestCase
32
+ def create_output
33
+ FluentPluginOutputAsBufferedOverflowTest::DummyAsyncOutput.new
34
+ end
35
+ def create_metadata(timekey: nil, tag: nil, variables: nil)
36
+ Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
37
+ end
38
+ def waiting(seconds)
39
+ begin
40
+ Timeout.timeout(seconds) do
41
+ yield
42
+ end
43
+ rescue Timeout::Error
44
+ logs = @i.log.out.logs
45
+ STDERR.print(*logs)
46
+ raise
47
+ end
48
+ end
49
+
50
+ teardown do
51
+ if @i
52
+ @i.stop unless @i.stopped?
53
+ @i.before_shutdown unless @i.before_shutdown?
54
+ @i.shutdown unless @i.shutdown?
55
+ @i.after_shutdown unless @i.after_shutdown?
56
+ @i.close unless @i.closed?
57
+ @i.terminate unless @i.terminated?
58
+ end
59
+ Timecop.return
60
+ end
61
+
62
+ sub_test_case 'buffered output with default configuration (throws exception for buffer overflow)' do
63
+ setup do
64
+ hash = {
65
+ 'flush_mode' => 'lazy',
66
+ 'flush_thread_burst_interval' => 0.01,
67
+ 'chunk_limit_size' => 1024,
68
+ 'total_limit_size' => 4096,
69
+ }
70
+ @i = create_output()
71
+ @i.configure(config_element('ROOT','',{},[config_element('buffer','tag',hash)]))
72
+ @i.start
73
+ @i.after_start
74
+ end
75
+
76
+ test '#emit_events raises error when buffer is full' do
77
+ @i.register(:format){|tag, time, record| "x" * 128 } # 128bytes per record (x4 -> 512bytes)
78
+
79
+ es = Fluent::ArrayEventStream.new([
80
+ [event_time(), {"message" => "test"}],
81
+ [event_time(), {"message" => "test"}],
82
+ [event_time(), {"message" => "test"}],
83
+ [event_time(), {"message" => "test"}],
84
+ ])
85
+
86
+ 8.times do |i|
87
+ @i.emit_events("tag#{i}", es)
88
+ end
89
+
90
+ assert !@i.buffer.storable?
91
+
92
+ assert_raise(Fluent::Plugin::Buffer::BufferOverflowError) do
93
+ @i.emit_events("tag9", es)
94
+ end
95
+ logs = @i.log.out.logs
96
+ assert{ logs.any?{|line| line.include?("failed to write data into buffer by buffer overflow") } }
97
+ end
98
+ end
99
+
100
+ sub_test_case 'buffered output configured with "overflow_action block"' do
101
+ setup do
102
+ hash = {
103
+ 'flush_mode' => 'lazy',
104
+ 'flush_thread_burst_interval' => 0.01,
105
+ 'chunk_limit_size' => 1024,
106
+ 'total_limit_size' => 4096,
107
+ 'overflow_action' => "block",
108
+ }
109
+ @i = create_output()
110
+ @i.configure(config_element('ROOT','',{'log_level' => 'debug'},[config_element('buffer','tag',hash)]))
111
+ @i.start
112
+ @i.after_start
113
+ end
114
+
115
+ test '#emit_events blocks until any queues are flushed' do
116
+ failing = true
117
+ flushed_chunks = []
118
+ @i.register(:format){|tag, time, record| "x" * 128 } # 128bytes per record (x4 -> 512bytes)
119
+ @i.register(:write) do |chunk|
120
+ if failing
121
+ raise "blocking"
122
+ end
123
+ flushed_chunks << chunk
124
+ end
125
+
126
+ es = Fluent::ArrayEventStream.new([
127
+ [event_time(), {"message" => "test"}],
128
+ [event_time(), {"message" => "test"}],
129
+ [event_time(), {"message" => "test"}],
130
+ [event_time(), {"message" => "test"}],
131
+ [event_time(), {"message" => "test"}],
132
+ [event_time(), {"message" => "test"}],
133
+ [event_time(), {"message" => "test"}],
134
+ [event_time(), {"message" => "test"}],
135
+ ])
136
+
137
+ 4.times do |i|
138
+ @i.emit_events("tag#{i}", es)
139
+ end
140
+
141
+ assert !@i.buffer.storable?
142
+
143
+ Thread.new do
144
+ sleep 3
145
+ failing = false
146
+ end
147
+
148
+ assert_nothing_raised do
149
+ @i.emit_events("tag9", es)
150
+ end
151
+
152
+ assert !failing
153
+ assert{ flushed_chunks.size > 0 }
154
+
155
+ logs = @i.log.out.logs
156
+ assert{ logs.any?{|line| line.include?("failed to write data into buffer by buffer overflow") } }
157
+ assert{ logs.any?{|line| line.include?("buffer.write is now blocking") } }
158
+ assert{ logs.any?{|line| line.include?("retrying buffer.write after blocked operation") } }
159
+ end
160
+ end
161
+
162
+ sub_test_case 'buffered output configured with "overflow_action drop_oldest_chunk"' do
163
+ setup do
164
+ hash = {
165
+ 'flush_mode' => 'lazy',
166
+ 'flush_thread_burst_interval' => 0.01,
167
+ 'chunk_limit_size' => 1024,
168
+ 'total_limit_size' => 4096,
169
+ 'overflow_action' => "drop_oldest_chunk",
170
+ }
171
+ @i = create_output()
172
+ @i.configure(config_element('ROOT','',{'log_level' => 'debug'},[config_element('buffer','tag',hash)]))
173
+ @i.start
174
+ @i.after_start
175
+ end
176
+
177
+ test '#emit_events will success by dropping oldest chunk' do
178
+ failing = true
179
+ flushed_chunks = []
180
+ @i.register(:format){|tag, time, record| "x" * 128 } # 128bytes per record (x4 -> 512bytes)
181
+ @i.register(:write) do |chunk|
182
+ if failing
183
+ raise "blocking"
184
+ end
185
+ flushed_chunks << chunk
186
+ end
187
+
188
+ es = Fluent::ArrayEventStream.new([
189
+ [event_time(), {"message" => "test"}],
190
+ [event_time(), {"message" => "test"}],
191
+ [event_time(), {"message" => "test"}],
192
+ [event_time(), {"message" => "test"}],
193
+ [event_time(), {"message" => "test"}],
194
+ [event_time(), {"message" => "test"}],
195
+ [event_time(), {"message" => "test"}],
196
+ [event_time(), {"message" => "test"}],
197
+ ])
198
+
199
+ 4.times do |i|
200
+ @i.emit_events("tag#{i}", es)
201
+ end
202
+
203
+ assert !@i.buffer.storable?
204
+
205
+ assert{ @i.buffer.queue[0].metadata.tag == "tag0" }
206
+ assert{ @i.buffer.queue[1].metadata.tag == "tag1" }
207
+
208
+ assert_nothing_raised do
209
+ @i.emit_events("tag9", es)
210
+ end
211
+
212
+ assert failing
213
+ assert{ flushed_chunks.size == 0 }
214
+
215
+ assert{ @i.buffer.queue[0].metadata.tag == "tag1" }
216
+
217
+ logs = @i.log.out.logs
218
+ assert{ logs.any?{|line| line.include?("failed to write data into buffer by buffer overflow") } }
219
+ assert{ logs.any?{|line| line.include?("dropping oldest chunk to make space after buffer overflow") } }
220
+ end
221
+
222
+ test '#emit_events raises OverflowError if all buffer spaces are used by staged chunks' do
223
+ @i.register(:format){|tag, time, record| "x" * 128 } # 128bytes per record (x4 -> 512bytes)
224
+
225
+ es = Fluent::ArrayEventStream.new([
226
+ [event_time(), {"message" => "test"}],
227
+ [event_time(), {"message" => "test"}],
228
+ [event_time(), {"message" => "test"}],
229
+ [event_time(), {"message" => "test"}],
230
+ ])
231
+
232
+ 8.times do |i|
233
+ @i.emit_events("tag#{i}", es)
234
+ end
235
+
236
+ assert !@i.buffer.storable?
237
+
238
+ assert{ @i.buffer.queue.size == 0 }
239
+ assert{ @i.buffer.stage.size == 8 }
240
+
241
+ assert_raise Fluent::Plugin::Buffer::BufferOverflowError do
242
+ @i.emit_events("tag9", es)
243
+ end
244
+
245
+ logs = @i.log.out.logs
246
+ assert{ logs.any?{|line| line.include?("failed to write data into buffer by buffer overflow") } }
247
+ assert{ logs.any?{|line| line.include?("no queued chunks to be dropped for drop_oldest_chunk") } }
248
+ end
249
+ end
250
+ end
@@ -0,0 +1,839 @@
1
+ require_relative '../helper'
2
+ require 'fluent/plugin/output'
3
+ require 'fluent/plugin/buffer'
4
+ require 'fluent/event'
5
+
6
+ require 'json'
7
+ require 'time'
8
+ require 'timeout'
9
+ require 'timecop'
10
+
11
+ module FluentPluginOutputAsBufferedRetryTest
12
+ class DummyBareOutput < Fluent::Plugin::Output
13
+ def register(name, &block)
14
+ instance_variable_set("@#{name}", block)
15
+ end
16
+ end
17
+ class DummySyncOutput < DummyBareOutput
18
+ def initialize
19
+ super
20
+ @process = nil
21
+ end
22
+ def process(tag, es)
23
+ @process ? @process.call(tag, es) : nil
24
+ end
25
+ end
26
+ class DummyFullFeatureOutput < DummyBareOutput
27
+ def initialize
28
+ super
29
+ @prefer_buffered_processing = nil
30
+ @prefer_delayed_commit = nil
31
+ @process = nil
32
+ @format = nil
33
+ @write = nil
34
+ @try_write = nil
35
+ end
36
+ def prefer_buffered_processing
37
+ @prefer_buffered_processing ? @prefer_buffered_processing.call : false
38
+ end
39
+ def prefer_delayed_commit
40
+ @prefer_delayed_commit ? @prefer_delayed_commit.call : false
41
+ end
42
+ def process(tag, es)
43
+ @process ? @process.call(tag, es) : nil
44
+ end
45
+ def format(tag, time, record)
46
+ @format ? @format.call(tag, time, record) : [tag, time, record].to_json
47
+ end
48
+ def write(chunk)
49
+ @write ? @write.call(chunk) : nil
50
+ end
51
+ def try_write(chunk)
52
+ @try_write ? @try_write.call(chunk) : nil
53
+ end
54
+ end
55
+ class DummyFullFeatureOutput2 < DummyFullFeatureOutput
56
+ def prefer_buffered_processing; true; end
57
+ def prefer_delayed_commit; super; end
58
+ def format(tag, time, record); super; end
59
+ def write(chunk); super; end
60
+ def try_write(chunk); super; end
61
+ end
62
+ end
63
+
64
+ class BufferedOutputRetryTest < Test::Unit::TestCase
65
+ def create_output(type=:full)
66
+ case type
67
+ when :bare then FluentPluginOutputAsBufferedRetryTest::DummyBareOutput.new
68
+ when :sync then FluentPluginOutputAsBufferedRetryTest::DummySyncOutput.new
69
+ when :full then FluentPluginOutputAsBufferedRetryTest::DummyFullFeatureOutput.new
70
+ else
71
+ raise ArgumentError, "unknown type: #{type}"
72
+ end
73
+ end
74
+ def create_metadata(timekey: nil, tag: nil, variables: nil)
75
+ Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
76
+ end
77
+ def waiting(seconds)
78
+ begin
79
+ Timeout.timeout(seconds) do
80
+ yield
81
+ end
82
+ rescue Timeout::Error
83
+ STDERR.print(*@i.log.out.logs)
84
+ raise
85
+ end
86
+ end
87
+ def dummy_event_stream
88
+ Fluent::ArrayEventStream.new([
89
+ [ event_time('2016-04-13 18:33:00'), {"name" => "moris", "age" => 36, "message" => "data1"} ],
90
+ [ event_time('2016-04-13 18:33:13'), {"name" => "moris", "age" => 36, "message" => "data2"} ],
91
+ [ event_time('2016-04-13 18:33:32'), {"name" => "moris", "age" => 36, "message" => "data3"} ],
92
+ ])
93
+ end
94
+ def get_log_time(msg, logs)
95
+ log_time = nil
96
+ log = logs.select{|l| l.include?(msg) }.first
97
+ if log && /^(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2} [-+]\d{4}) \[error\]/ =~ log
98
+ log_time = Time.parse($1)
99
+ end
100
+ log_time
101
+ end
102
+
103
+ setup do
104
+ @i = create_output
105
+ end
106
+
107
+ teardown do
108
+ if @i
109
+ @i.stop unless @i.stopped?
110
+ @i.before_shutdown unless @i.before_shutdown?
111
+ @i.shutdown unless @i.shutdown?
112
+ @i.after_shutdown unless @i.after_shutdown?
113
+ @i.close unless @i.closed?
114
+ @i.terminate unless @i.terminated?
115
+ end
116
+ Timecop.return
117
+ end
118
+
119
+ sub_test_case 'buffered output for retries with exponential backoff' do
120
+ test 'exponential backoff is default strategy for retries' do
121
+ chunk_key = 'tag'
122
+ hash = {
123
+ 'flush_interval' => 1,
124
+ 'flush_thread_burst_interval' => 0.1,
125
+ 'retry_randomize' => false,
126
+ }
127
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
128
+ @i.register(:prefer_buffered_processing){ true }
129
+ @i.start
130
+ @i.after_start
131
+
132
+ assert_equal :exponential_backoff, @i.buffer_config.retry_type
133
+ assert_equal 1, @i.buffer_config.retry_wait
134
+ assert_equal 2.0, @i.buffer_config.retry_exponential_backoff_base
135
+ assert !@i.buffer_config.retry_randomize
136
+
137
+ now = Time.parse('2016-04-13 18:17:00 -0700')
138
+ Timecop.freeze( now )
139
+
140
+ retry_state = @i.retry_state( @i.buffer_config.retry_randomize )
141
+ retry_state.step
142
+ assert_equal 1, (retry_state.next_time - now)
143
+ retry_state.step
144
+ assert_equal (1 * (2 ** 1)), (retry_state.next_time - now)
145
+ retry_state.step
146
+ assert_equal (1 * (2 ** 2)), (retry_state.next_time - now)
147
+ retry_state.step
148
+ assert_equal (1 * (2 ** 3)), (retry_state.next_time - now)
149
+ end
150
+
151
+ test 'does retries correctly when #write fails' do
152
+ chunk_key = 'tag'
153
+ hash = {
154
+ 'flush_interval' => 1,
155
+ 'flush_thread_burst_interval' => 0.1,
156
+ 'retry_randomize' => false,
157
+ 'retry_max_interval' => 60 * 60,
158
+ }
159
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
160
+ @i.register(:prefer_buffered_processing){ true }
161
+ @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
162
+ @i.register(:write){|chunk| raise "yay, your #write must fail" }
163
+ @i.start
164
+ @i.after_start
165
+
166
+ @i.interrupt_flushes
167
+
168
+ now = Time.parse('2016-04-13 18:33:30 -0700')
169
+ Timecop.freeze( now )
170
+
171
+ @i.emit_events("test.tag.1", dummy_event_stream())
172
+
173
+ now = Time.parse('2016-04-13 18:33:32 -0700')
174
+ Timecop.freeze( now )
175
+
176
+ @i.enqueue_thread_wait
177
+
178
+ @i.flush_thread_wakeup
179
+ waiting(4){ Thread.pass until @i.write_count > 0 }
180
+
181
+ assert{ @i.write_count > 0 }
182
+ assert{ @i.num_errors > 0 }
183
+
184
+ now = @i.next_flush_time
185
+ Timecop.freeze( now )
186
+ @i.flush_thread_wakeup
187
+ waiting(4){ Thread.pass until @i.write_count > 1 }
188
+
189
+ assert{ @i.write_count > 1 }
190
+ assert{ @i.num_errors > 1 }
191
+ end
192
+
193
+ test 'max retry interval is limited by retry_max_interval' do
194
+ chunk_key = 'tag'
195
+ hash = {
196
+ 'flush_interval' => 1,
197
+ 'flush_thread_burst_interval' => 0.1,
198
+ 'retry_randomize' => false,
199
+ 'retry_max_interval' => 60,
200
+ }
201
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
202
+ @i.register(:prefer_buffered_processing){ true }
203
+ @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
204
+ @i.register(:write){|chunk| raise "yay, your #write must fail" }
205
+ @i.start
206
+ @i.after_start
207
+
208
+ @i.interrupt_flushes
209
+
210
+ now = Time.parse('2016-04-13 18:33:30 -0700')
211
+ Timecop.freeze( now )
212
+
213
+ @i.emit_events("test.tag.1", dummy_event_stream())
214
+
215
+ now = Time.parse('2016-04-13 18:33:32 -0700')
216
+ Timecop.freeze( now )
217
+
218
+ @i.enqueue_thread_wait
219
+
220
+ @i.flush_thread_wakeup
221
+ waiting(4){ Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
222
+
223
+ assert{ @i.write_count > 0 }
224
+ assert{ @i.num_errors > 0 }
225
+
226
+ prev_write_count = @i.write_count
227
+ prev_num_errors = @i.num_errors
228
+
229
+ 10.times do
230
+ now = @i.next_flush_time
231
+ Timecop.freeze( now )
232
+ @i.flush_thread_wakeup
233
+ waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
234
+
235
+ assert{ @i.write_count > prev_write_count }
236
+ assert{ @i.num_errors > prev_num_errors }
237
+
238
+ prev_write_count = @i.write_count
239
+ prev_num_errors = @i.num_errors
240
+ end
241
+ # exponential backoff interval: 1 * 2 ** 10 == 1024
242
+ # but it should be limited by retry_max_interval=60
243
+ assert_equal 60, (@i.next_flush_time - now)
244
+ end
245
+
246
+ test 'output plugin give retries up by retry_timeout, and clear queue in buffer' do
247
+ written_tags = []
248
+
249
+ chunk_key = 'tag'
250
+ hash = {
251
+ 'flush_interval' => 1,
252
+ 'flush_thread_burst_interval' => 0.1,
253
+ 'retry_randomize' => false,
254
+ 'retry_timeout' => 3600,
255
+ }
256
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
257
+ @i.register(:prefer_buffered_processing){ true }
258
+ @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
259
+ @i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
260
+ @i.start
261
+ @i.after_start
262
+
263
+ @i.interrupt_flushes
264
+
265
+ now = Time.parse('2016-04-13 18:33:30 -0700')
266
+ Timecop.freeze( now )
267
+
268
+ @i.emit_events("test.tag.1", dummy_event_stream())
269
+
270
+ now = Time.parse('2016-04-13 18:33:31 -0700')
271
+ Timecop.freeze( now )
272
+
273
+ @i.emit_events("test.tag.2", dummy_event_stream())
274
+
275
+ assert_equal 0, @i.write_count
276
+ assert_equal 0, @i.num_errors
277
+
278
+ @i.enqueue_thread_wait
279
+ @i.flush_thread_wakeup
280
+ waiting(4){ Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
281
+
282
+ assert{ @i.buffer.queue.size > 0 }
283
+ assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
284
+
285
+ assert{ @i.write_count > 0 }
286
+ assert{ @i.num_errors > 0 }
287
+
288
+ prev_write_count = @i.write_count
289
+ prev_num_errors = @i.num_errors
290
+
291
+ first_failure = @i.retry.start
292
+
293
+ 15.times do |i| # large enough
294
+ now = @i.next_flush_time
295
+ # p({i: i, now: now, diff: (now - Time.now)})
296
+ # * if loop count is 12:
297
+ # {:i=>0, :now=>2016-04-13 18:33:32 -0700, :diff=>1.0}
298
+ # {:i=>1, :now=>2016-04-13 18:33:34 -0700, :diff=>2.0}
299
+ # {:i=>2, :now=>2016-04-13 18:33:38 -0700, :diff=>4.0}
300
+ # {:i=>3, :now=>2016-04-13 18:33:46 -0700, :diff=>8.0}
301
+ # {:i=>4, :now=>2016-04-13 18:34:02 -0700, :diff=>16.0}
302
+ # {:i=>5, :now=>2016-04-13 18:34:34 -0700, :diff=>32.0}
303
+ # {:i=>6, :now=>2016-04-13 18:35:38 -0700, :diff=>64.0}
304
+ # {:i=>7, :now=>2016-04-13 18:37:46 -0700, :diff=>128.0}
305
+ # {:i=>8, :now=>2016-04-13 18:42:02 -0700, :diff=>256.0}
306
+ # {:i=>9, :now=>2016-04-13 18:50:34 -0700, :diff=>512.0}
307
+ # {:i=>10, :now=>2016-04-13 19:07:38 -0700, :diff=>1024.0}
308
+ # {:i=>11, :now=>2016-04-13 19:33:31 -0700, :diff=>1553.0} # clear_queue!
309
+
310
+ Timecop.freeze( now )
311
+ @i.enqueue_thread_wait
312
+ @i.flush_thread_wakeup
313
+ waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
314
+
315
+ assert{ @i.write_count > prev_write_count }
316
+ assert{ @i.num_errors > prev_num_errors }
317
+
318
+ break if @i.buffer.queue.size == 0
319
+
320
+ prev_write_count = @i.write_count
321
+ prev_num_errors = @i.num_errors
322
+
323
+ assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
324
+ end
325
+ assert{ now >= first_failure + 3600 }
326
+
327
+ assert{ @i.buffer.stage.size == 0 }
328
+ assert{ written_tags.all?{|t| t == 'test.tag.1' } }
329
+
330
+ @i.emit_events("test.tag.3", dummy_event_stream())
331
+
332
+ logs = @i.log.out.logs
333
+ assert{ logs.any?{|l| l.include?("[error]: failed to flush the buffer, and hit limit for retries. dropping all chunks in the buffer queue.") } }
334
+ end
335
+
336
+ test 'output plugin give retries up by retry_max_times, and clear queue in buffer' do
337
+ written_tags = []
338
+
339
+ chunk_key = 'tag'
340
+ hash = {
341
+ 'flush_interval' => 1,
342
+ 'flush_thread_burst_interval' => 0.1,
343
+ 'retry_randomize' => false,
344
+ 'retry_max_times' => 10,
345
+ }
346
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
347
+ @i.register(:prefer_buffered_processing){ true }
348
+ @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
349
+ @i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
350
+ @i.start
351
+ @i.after_start
352
+
353
+ @i.interrupt_flushes
354
+
355
+ now = Time.parse('2016-04-13 18:33:30 -0700')
356
+ Timecop.freeze( now )
357
+
358
+ @i.emit_events("test.tag.1", dummy_event_stream())
359
+
360
+ now = Time.parse('2016-04-13 18:33:31 -0700')
361
+ Timecop.freeze( now )
362
+
363
+ @i.emit_events("test.tag.2", dummy_event_stream())
364
+
365
+ assert_equal 0, @i.write_count
366
+ assert_equal 0, @i.num_errors
367
+
368
+ @i.enqueue_thread_wait
369
+ @i.flush_thread_wakeup
370
+ waiting(4){ Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
371
+
372
+ assert{ @i.buffer.queue.size > 0 }
373
+ assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
374
+
375
+ assert{ @i.write_count > 0 }
376
+ assert{ @i.num_errors > 0 }
377
+
378
+ prev_write_count = @i.write_count
379
+ prev_num_errors = @i.num_errors
380
+
381
+ _first_failure = @i.retry.start
382
+
383
+ chunks = @i.buffer.queue.dup
384
+
385
+ 20.times do |i| # large times enough
386
+ now = @i.next_flush_time
387
+
388
+ Timecop.freeze( now )
389
+ @i.enqueue_thread_wait
390
+ @i.flush_thread_wakeup
391
+ waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
392
+
393
+ assert{ @i.write_count > prev_write_count }
394
+ assert{ @i.num_errors > prev_num_errors }
395
+
396
+ break if @i.buffer.queue.size == 0
397
+
398
+ prev_write_count = @i.write_count
399
+ prev_num_errors = @i.num_errors
400
+
401
+ assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
402
+ end
403
+ assert{ @i.buffer.stage.size == 0 }
404
+ assert{ written_tags.all?{|t| t == 'test.tag.1' } }
405
+
406
+ @i.emit_events("test.tag.3", dummy_event_stream())
407
+
408
+ logs = @i.log.out.logs
409
+ assert{ logs.any?{|l| l.include?("[error]: failed to flush the buffer, and hit limit for retries. dropping all chunks in the buffer queue.") && l.include?("retry_times=10") } }
410
+
411
+ assert{ @i.buffer.queue.size == 0 }
412
+ assert{ @i.buffer.stage.size == 1 }
413
+ assert{ chunks.all?{|c| c.empty? } }
414
+ end
415
+ end
416
+
417
+ sub_test_case 'bufferd output for retries with periodical retry' do
418
+ test 'periodical retries should retry to write in failing status per retry_wait' do
419
+ chunk_key = 'tag'
420
+ hash = {
421
+ 'flush_interval' => 1,
422
+ 'flush_thread_burst_interval' => 0.1,
423
+ 'retry_type' => :periodic,
424
+ 'retry_wait' => 3,
425
+ 'retry_randomize' => false,
426
+ }
427
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
428
+ @i.register(:prefer_buffered_processing){ true }
429
+ @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
430
+ @i.register(:write){|chunk| raise "yay, your #write must fail" }
431
+ @i.start
432
+ @i.after_start
433
+
434
+ @i.interrupt_flushes
435
+
436
+ now = Time.parse('2016-04-13 18:33:30 -0700')
437
+ Timecop.freeze( now )
438
+
439
+ @i.emit_events("test.tag.1", dummy_event_stream())
440
+
441
+ now = Time.parse('2016-04-13 18:33:32 -0700')
442
+ Timecop.freeze( now )
443
+
444
+ @i.enqueue_thread_wait
445
+
446
+ @i.flush_thread_wakeup
447
+ waiting(4){ Thread.pass until @i.write_count > 0 }
448
+
449
+ assert{ @i.write_count > 0 }
450
+ assert{ @i.num_errors > 0 }
451
+
452
+ now = @i.next_flush_time
453
+ Timecop.freeze( now )
454
+ @i.flush_thread_wakeup
455
+ waiting(4){ Thread.pass until @i.write_count > 1 }
456
+
457
+ assert{ @i.write_count > 1 }
458
+ assert{ @i.num_errors > 1 }
459
+ end
460
+
461
+ test 'output plugin give retries up by retry_timeout, and clear queue in buffer' do
462
+ written_tags = []
463
+
464
+ chunk_key = 'tag'
465
+ hash = {
466
+ 'flush_interval' => 1,
467
+ 'flush_thread_burst_interval' => 0.1,
468
+ 'retry_type' => :periodic,
469
+ 'retry_wait' => 30,
470
+ 'retry_randomize' => false,
471
+ 'retry_timeout' => 120,
472
+ }
473
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
474
+ @i.register(:prefer_buffered_processing){ true }
475
+ @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
476
+ @i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
477
+ @i.start
478
+ @i.after_start
479
+
480
+ @i.interrupt_flushes
481
+
482
+ now = Time.parse('2016-04-13 18:33:30 -0700')
483
+ Timecop.freeze( now )
484
+
485
+ @i.emit_events("test.tag.1", dummy_event_stream())
486
+
487
+ now = Time.parse('2016-04-13 18:33:31 -0700')
488
+ Timecop.freeze( now )
489
+
490
+ @i.emit_events("test.tag.2", dummy_event_stream())
491
+
492
+ assert_equal 0, @i.write_count
493
+ assert_equal 0, @i.num_errors
494
+
495
+ @i.enqueue_thread_wait
496
+ @i.flush_thread_wakeup
497
+ waiting(4){ Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
498
+
499
+ assert{ @i.buffer.queue.size > 0 }
500
+ assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
501
+
502
+ assert{ @i.write_count > 0 }
503
+ assert{ @i.num_errors > 0 }
504
+
505
+ prev_write_count = @i.write_count
506
+ prev_num_errors = @i.num_errors
507
+
508
+ first_failure = @i.retry.start
509
+
510
+ 3.times do |i|
511
+ now = @i.next_flush_time
512
+
513
+ Timecop.freeze( now )
514
+ @i.enqueue_thread_wait
515
+ @i.flush_thread_wakeup
516
+ waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
517
+
518
+ assert{ @i.write_count > prev_write_count }
519
+ assert{ @i.num_errors > prev_num_errors }
520
+
521
+ prev_write_count = @i.write_count
522
+ prev_num_errors = @i.num_errors
523
+ end
524
+
525
+ assert{ @i.next_flush_time >= first_failure + 120 }
526
+
527
+ assert{ @i.buffer.queue.size == 2 }
528
+ assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
529
+ assert{ @i.buffer.stage.size == 0 }
530
+
531
+ assert{ written_tags.all?{|t| t == 'test.tag.1' } }
532
+
533
+ chunks = @i.buffer.queue.dup
534
+
535
+ @i.emit_events("test.tag.3", dummy_event_stream())
536
+
537
+ now = @i.next_flush_time
538
+ Timecop.freeze( now )
539
+ @i.flush_thread_wakeup
540
+ waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
541
+
542
+ logs = @i.log.out.logs
543
+
544
+ target_time = Time.parse("2016-04-13 18:35:31 -0700")
545
+ target_msg = "[error]: failed to flush the buffer, and hit limit for retries. dropping all chunks in the buffer queue."
546
+ assert{ logs.any?{|l| l.include?(target_msg) } }
547
+
548
+ log_time = get_log_time(target_msg, logs)
549
+ assert_equal target_time.localtime, log_time.localtime
550
+
551
+ assert{ @i.buffer.queue.size == 0 }
552
+ assert{ @i.buffer.stage.size == 1 }
553
+ assert{ chunks.all?{|c| c.empty? } }
554
+ end
555
+
556
+ test 'retry_max_times can limit maximum times for retries' do
557
+ written_tags = []
558
+
559
+ chunk_key = 'tag'
560
+ hash = {
561
+ 'flush_interval' => 1,
562
+ 'flush_thread_burst_interval' => 0.1,
563
+ 'retry_type' => :periodic,
564
+ 'retry_wait' => 3,
565
+ 'retry_randomize' => false,
566
+ 'retry_max_times' => 10,
567
+ }
568
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
569
+ @i.register(:prefer_buffered_processing){ true }
570
+ @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
571
+ @i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
572
+ @i.start
573
+ @i.after_start
574
+
575
+ @i.interrupt_flushes
576
+
577
+ now = Time.parse('2016-04-13 18:33:30 -0700')
578
+ Timecop.freeze( now )
579
+
580
+ @i.emit_events("test.tag.1", dummy_event_stream())
581
+
582
+ now = Time.parse('2016-04-13 18:33:31 -0700')
583
+ Timecop.freeze( now )
584
+
585
+ @i.emit_events("test.tag.2", dummy_event_stream())
586
+
587
+ assert_equal 0, @i.write_count
588
+ assert_equal 0, @i.num_errors
589
+
590
+ @i.enqueue_thread_wait
591
+ @i.flush_thread_wakeup
592
+ waiting(4){ Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
593
+
594
+ assert{ @i.buffer.queue.size > 0 }
595
+ assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
596
+
597
+ assert{ @i.write_count > 0 }
598
+ assert{ @i.num_errors > 0 }
599
+
600
+ prev_write_count = @i.write_count
601
+ prev_num_errors = @i.num_errors
602
+
603
+ _first_failure = @i.retry.start
604
+
605
+ chunks = @i.buffer.queue.dup
606
+
607
+ 20.times do |i|
608
+ now = @i.next_flush_time
609
+
610
+ Timecop.freeze( now )
611
+ @i.enqueue_thread_wait
612
+ @i.flush_thread_wakeup
613
+ waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
614
+
615
+ assert{ @i.write_count > prev_write_count }
616
+ assert{ @i.num_errors > prev_num_errors }
617
+
618
+ break if @i.buffer.queue.size == 0
619
+
620
+ prev_write_count = @i.write_count
621
+ prev_num_errors = @i.num_errors
622
+
623
+ assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
624
+ end
625
+ assert{ @i.buffer.stage.size == 0 }
626
+ assert{ written_tags.all?{|t| t == 'test.tag.1' } }
627
+
628
+
629
+ @i.emit_events("test.tag.3", dummy_event_stream())
630
+
631
+ logs = @i.log.out.logs
632
+ assert{ logs.any?{|l| l.include?("[error]: failed to flush the buffer, and hit limit for retries. dropping all chunks in the buffer queue.") && l.include?("retry_times=10") } }
633
+
634
+ assert{ @i.buffer.queue.size == 0 }
635
+ assert{ @i.buffer.stage.size == 1 }
636
+ assert{ chunks.all?{|c| c.empty? } }
637
+ end
638
+ end
639
+
640
+ sub_test_case 'buffered output configured as retry_forever' do
641
+ test 'configuration error will be raised if secondary section is configured' do
642
+ chunk_key = 'tag'
643
+ hash = {
644
+ 'retry_forever' => true,
645
+ 'retry_randomize' => false,
646
+ }
647
+ i = create_output()
648
+ assert_raise Fluent::ConfigError do
649
+ i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash),config_element('secondary','')]))
650
+ end
651
+ end
652
+
653
+ test 'retry_timeout and retry_max_times will be ignored if retry_forever is true for exponential backoff' do
654
+ written_tags = []
655
+
656
+ chunk_key = 'tag'
657
+ hash = {
658
+ 'flush_interval' => 1,
659
+ 'flush_thread_burst_interval' => 0.1,
660
+ 'retry_type' => :exponential_backoff,
661
+ 'retry_forever' => true,
662
+ 'retry_randomize' => false,
663
+ 'retry_timeout' => 3600,
664
+ 'retry_max_times' => 10,
665
+ }
666
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
667
+ @i.register(:prefer_buffered_processing){ true }
668
+ @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
669
+ @i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
670
+ @i.start
671
+ @i.after_start
672
+
673
+ @i.interrupt_flushes
674
+
675
+ now = Time.parse('2016-04-13 18:33:30 -0700')
676
+ Timecop.freeze( now )
677
+
678
+ @i.emit_events("test.tag.1", dummy_event_stream())
679
+
680
+ now = Time.parse('2016-04-13 18:33:31 -0700')
681
+ Timecop.freeze( now )
682
+
683
+ @i.emit_events("test.tag.2", dummy_event_stream())
684
+
685
+ assert_equal 0, @i.write_count
686
+ assert_equal 0, @i.num_errors
687
+
688
+ @i.enqueue_thread_wait
689
+ @i.flush_thread_wakeup
690
+ waiting(4){ Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
691
+
692
+ assert{ @i.buffer.queue.size > 0 }
693
+ assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
694
+
695
+ assert{ @i.write_count > 0 }
696
+ assert{ @i.num_errors > 0 }
697
+
698
+ prev_write_count = @i.write_count
699
+ prev_num_errors = @i.num_errors
700
+
701
+ first_failure = @i.retry.start
702
+
703
+ 15.times do |i|
704
+ now = @i.next_flush_time
705
+
706
+ Timecop.freeze( now )
707
+ @i.enqueue_thread_wait
708
+ @i.flush_thread_wakeup
709
+ waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
710
+
711
+ assert{ @i.write_count > prev_write_count }
712
+ assert{ @i.num_errors > prev_num_errors }
713
+
714
+ prev_write_count = @i.write_count
715
+ prev_num_errors = @i.num_errors
716
+ end
717
+
718
+ assert{ @i.buffer.queue.size == 2 }
719
+ assert{ @i.retry.steps > 10 }
720
+ assert{ now > first_failure + 3600 }
721
+ end
722
+
723
+ test 'retry_timeout and retry_max_times will be ignored if retry_forever is true for periodical retries' do
724
+ written_tags = []
725
+
726
+ chunk_key = 'tag'
727
+ hash = {
728
+ 'flush_interval' => 1,
729
+ 'flush_thread_burst_interval' => 0.1,
730
+ 'retry_type' => :periodic,
731
+ 'retry_forever' => true,
732
+ 'retry_randomize' => false,
733
+ 'retry_wait' => 30,
734
+ 'retry_timeout' => 360,
735
+ 'retry_max_times' => 10,
736
+ }
737
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
738
+ @i.register(:prefer_buffered_processing){ true }
739
+ @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
740
+ @i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
741
+ @i.start
742
+ @i.after_start
743
+
744
+ @i.interrupt_flushes
745
+
746
+ now = Time.parse('2016-04-13 18:33:30 -0700')
747
+ Timecop.freeze( now )
748
+
749
+ @i.emit_events("test.tag.1", dummy_event_stream())
750
+
751
+ now = Time.parse('2016-04-13 18:33:31 -0700')
752
+ Timecop.freeze( now )
753
+
754
+ @i.emit_events("test.tag.2", dummy_event_stream())
755
+
756
+ assert_equal 0, @i.write_count
757
+ assert_equal 0, @i.num_errors
758
+
759
+ @i.enqueue_thread_wait
760
+ @i.flush_thread_wakeup
761
+ waiting(4){ Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
762
+
763
+ assert{ @i.buffer.queue.size > 0 }
764
+ assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
765
+
766
+ assert{ @i.write_count > 0 }
767
+ assert{ @i.num_errors > 0 }
768
+
769
+ prev_write_count = @i.write_count
770
+ prev_num_errors = @i.num_errors
771
+
772
+ first_failure = @i.retry.start
773
+
774
+ 15.times do |i|
775
+ now = @i.next_flush_time
776
+
777
+ Timecop.freeze( now )
778
+ @i.enqueue_thread_wait
779
+ @i.flush_thread_wakeup
780
+ waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
781
+
782
+ assert{ @i.write_count > prev_write_count }
783
+ assert{ @i.num_errors > prev_num_errors }
784
+
785
+ prev_write_count = @i.write_count
786
+ prev_num_errors = @i.num_errors
787
+ end
788
+
789
+ assert{ @i.buffer.queue.size == 2 }
790
+ assert{ @i.retry.steps > 10 }
791
+ assert{ now > first_failure + 360 }
792
+ end
793
+ end
794
+
795
+ sub_test_case 'buffered output with delayed commit' do
796
+ test 'does retries correctly when #try_write fails' do
797
+ chunk_key = 'tag'
798
+ hash = {
799
+ 'flush_interval' => 1,
800
+ 'flush_thread_burst_interval' => 0.1,
801
+ 'retry_randomize' => false,
802
+ 'retry_max_interval' => 60 * 60,
803
+ }
804
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
805
+ @i.register(:prefer_buffered_processing){ true }
806
+ @i.register(:prefer_delayed_commit){ true }
807
+ @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
808
+ @i.register(:try_write){|chunk| raise "yay, your #write must fail" }
809
+ @i.start
810
+ @i.after_start
811
+
812
+ @i.interrupt_flushes
813
+
814
+ now = Time.parse('2016-04-13 18:33:30 -0700')
815
+ Timecop.freeze( now )
816
+
817
+ @i.emit_events("test.tag.1", dummy_event_stream())
818
+
819
+ now = Time.parse('2016-04-13 18:33:32 -0700')
820
+ Timecop.freeze( now )
821
+
822
+ @i.enqueue_thread_wait
823
+
824
+ @i.flush_thread_wakeup
825
+ waiting(4){ Thread.pass until @i.write_count > 0 }
826
+
827
+ assert{ @i.write_count > 0 }
828
+ assert{ @i.num_errors > 0 }
829
+
830
+ now = @i.next_flush_time
831
+ Timecop.freeze( now )
832
+ @i.flush_thread_wakeup
833
+ waiting(4){ Thread.pass until @i.write_count > 1 }
834
+
835
+ assert{ @i.write_count > 1 }
836
+ assert{ @i.num_errors > 1 }
837
+ end
838
+ end
839
+ end