fluentd 0.12.43 → 0.14.0

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of fluentd might be problematic. Click here for more details.

Files changed (253) hide show
  1. checksums.yaml +4 -4
  2. data/.github/ISSUE_TEMPLATE.md +6 -0
  3. data/.gitignore +2 -0
  4. data/.travis.yml +33 -21
  5. data/CONTRIBUTING.md +1 -0
  6. data/ChangeLog +1239 -0
  7. data/README.md +0 -25
  8. data/Rakefile +2 -1
  9. data/Vagrantfile +17 -0
  10. data/appveyor.yml +35 -0
  11. data/example/filter_stdout.conf +5 -5
  12. data/example/in_forward.conf +2 -2
  13. data/example/in_http.conf +2 -2
  14. data/example/in_out_forward.conf +17 -0
  15. data/example/in_syslog.conf +2 -2
  16. data/example/in_tail.conf +2 -2
  17. data/example/in_tcp.conf +2 -2
  18. data/example/in_udp.conf +2 -2
  19. data/example/out_copy.conf +4 -4
  20. data/example/out_file.conf +2 -2
  21. data/example/out_forward.conf +2 -2
  22. data/example/out_forward_buf_file.conf +23 -0
  23. data/example/v0_12_filter.conf +8 -8
  24. data/fluent.conf +29 -0
  25. data/fluentd.gemspec +18 -11
  26. data/lib/fluent/agent.rb +60 -58
  27. data/lib/fluent/command/cat.rb +1 -1
  28. data/lib/fluent/command/debug.rb +7 -5
  29. data/lib/fluent/command/fluentd.rb +97 -2
  30. data/lib/fluent/compat/call_super_mixin.rb +67 -0
  31. data/lib/fluent/compat/filter.rb +50 -0
  32. data/lib/fluent/compat/formatter.rb +109 -0
  33. data/lib/fluent/compat/input.rb +50 -0
  34. data/lib/fluent/compat/output.rb +617 -0
  35. data/lib/fluent/compat/output_chain.rb +60 -0
  36. data/lib/fluent/compat/parser.rb +163 -0
  37. data/lib/fluent/compat/propagate_default.rb +62 -0
  38. data/lib/fluent/config.rb +23 -20
  39. data/lib/fluent/config/configure_proxy.rb +119 -70
  40. data/lib/fluent/config/dsl.rb +5 -18
  41. data/lib/fluent/config/element.rb +72 -8
  42. data/lib/fluent/config/error.rb +0 -3
  43. data/lib/fluent/config/literal_parser.rb +0 -2
  44. data/lib/fluent/config/parser.rb +4 -4
  45. data/lib/fluent/config/section.rb +39 -28
  46. data/lib/fluent/config/types.rb +2 -13
  47. data/lib/fluent/config/v1_parser.rb +1 -3
  48. data/lib/fluent/configurable.rb +48 -16
  49. data/lib/fluent/daemon.rb +15 -0
  50. data/lib/fluent/engine.rb +26 -52
  51. data/lib/fluent/env.rb +6 -4
  52. data/lib/fluent/event.rb +58 -11
  53. data/lib/fluent/event_router.rb +5 -5
  54. data/lib/fluent/filter.rb +2 -50
  55. data/lib/fluent/formatter.rb +4 -293
  56. data/lib/fluent/input.rb +2 -32
  57. data/lib/fluent/label.rb +2 -2
  58. data/lib/fluent/load.rb +3 -2
  59. data/lib/fluent/log.rb +107 -38
  60. data/lib/fluent/match.rb +0 -36
  61. data/lib/fluent/mixin.rb +117 -7
  62. data/lib/fluent/msgpack_factory.rb +62 -0
  63. data/lib/fluent/output.rb +7 -612
  64. data/lib/fluent/output_chain.rb +23 -0
  65. data/lib/fluent/parser.rb +4 -800
  66. data/lib/fluent/plugin.rb +100 -121
  67. data/lib/fluent/plugin/bare_output.rb +63 -0
  68. data/lib/fluent/plugin/base.rb +121 -0
  69. data/lib/fluent/plugin/buf_file.rb +101 -182
  70. data/lib/fluent/plugin/buf_memory.rb +9 -92
  71. data/lib/fluent/plugin/buffer.rb +473 -0
  72. data/lib/fluent/plugin/buffer/chunk.rb +135 -0
  73. data/lib/fluent/plugin/buffer/file_chunk.rb +339 -0
  74. data/lib/fluent/plugin/buffer/memory_chunk.rb +100 -0
  75. data/lib/fluent/plugin/exec_util.rb +80 -75
  76. data/lib/fluent/plugin/file_util.rb +33 -28
  77. data/lib/fluent/plugin/file_wrapper.rb +120 -0
  78. data/lib/fluent/plugin/filter.rb +51 -0
  79. data/lib/fluent/plugin/filter_grep.rb +13 -40
  80. data/lib/fluent/plugin/filter_record_transformer.rb +22 -18
  81. data/lib/fluent/plugin/formatter.rb +93 -0
  82. data/lib/fluent/plugin/formatter_csv.rb +48 -0
  83. data/lib/fluent/plugin/formatter_hash.rb +32 -0
  84. data/lib/fluent/plugin/formatter_json.rb +47 -0
  85. data/lib/fluent/plugin/formatter_ltsv.rb +42 -0
  86. data/lib/fluent/plugin/formatter_msgpack.rb +32 -0
  87. data/lib/fluent/plugin/formatter_out_file.rb +45 -0
  88. data/lib/fluent/plugin/formatter_single_value.rb +34 -0
  89. data/lib/fluent/plugin/formatter_stdout.rb +39 -0
  90. data/lib/fluent/plugin/in_debug_agent.rb +4 -0
  91. data/lib/fluent/plugin/in_dummy.rb +22 -18
  92. data/lib/fluent/plugin/in_exec.rb +18 -8
  93. data/lib/fluent/plugin/in_forward.rb +36 -79
  94. data/lib/fluent/plugin/in_gc_stat.rb +4 -0
  95. data/lib/fluent/plugin/in_http.rb +21 -18
  96. data/lib/fluent/plugin/in_monitor_agent.rb +15 -48
  97. data/lib/fluent/plugin/in_object_space.rb +6 -1
  98. data/lib/fluent/plugin/in_stream.rb +7 -3
  99. data/lib/fluent/plugin/in_syslog.rb +46 -95
  100. data/lib/fluent/plugin/in_tail.rb +58 -640
  101. data/lib/fluent/plugin/in_tcp.rb +8 -1
  102. data/lib/fluent/plugin/in_udp.rb +8 -18
  103. data/lib/fluent/plugin/input.rb +33 -0
  104. data/lib/fluent/plugin/multi_output.rb +95 -0
  105. data/lib/fluent/plugin/out_buffered_null.rb +59 -0
  106. data/lib/fluent/plugin/out_copy.rb +11 -7
  107. data/lib/fluent/plugin/out_exec.rb +15 -11
  108. data/lib/fluent/plugin/out_exec_filter.rb +18 -10
  109. data/lib/fluent/plugin/out_file.rb +34 -5
  110. data/lib/fluent/plugin/out_forward.rb +25 -19
  111. data/lib/fluent/plugin/out_null.rb +0 -14
  112. data/lib/fluent/plugin/out_roundrobin.rb +11 -7
  113. data/lib/fluent/plugin/out_stdout.rb +5 -7
  114. data/lib/fluent/plugin/out_stream.rb +3 -1
  115. data/lib/fluent/plugin/output.rb +979 -0
  116. data/lib/fluent/plugin/owned_by_mixin.rb +42 -0
  117. data/lib/fluent/plugin/parser.rb +244 -0
  118. data/lib/fluent/plugin/parser_apache.rb +24 -0
  119. data/lib/fluent/plugin/parser_apache2.rb +84 -0
  120. data/lib/fluent/plugin/parser_apache_error.rb +21 -0
  121. data/lib/fluent/plugin/parser_csv.rb +31 -0
  122. data/lib/fluent/plugin/parser_json.rb +79 -0
  123. data/lib/fluent/plugin/parser_ltsv.rb +50 -0
  124. data/lib/fluent/plugin/parser_multiline.rb +102 -0
  125. data/lib/fluent/plugin/parser_nginx.rb +24 -0
  126. data/lib/fluent/plugin/parser_none.rb +36 -0
  127. data/lib/fluent/plugin/parser_syslog.rb +82 -0
  128. data/lib/fluent/plugin/parser_tsv.rb +37 -0
  129. data/lib/fluent/plugin/socket_util.rb +119 -117
  130. data/lib/fluent/plugin/storage.rb +84 -0
  131. data/lib/fluent/plugin/storage_local.rb +116 -0
  132. data/lib/fluent/plugin/string_util.rb +16 -13
  133. data/lib/fluent/plugin_helper.rb +39 -0
  134. data/lib/fluent/plugin_helper/child_process.rb +298 -0
  135. data/lib/fluent/plugin_helper/compat_parameters.rb +99 -0
  136. data/lib/fluent/plugin_helper/event_emitter.rb +80 -0
  137. data/lib/fluent/plugin_helper/event_loop.rb +118 -0
  138. data/lib/fluent/plugin_helper/retry_state.rb +177 -0
  139. data/lib/fluent/plugin_helper/storage.rb +308 -0
  140. data/lib/fluent/plugin_helper/thread.rb +147 -0
  141. data/lib/fluent/plugin_helper/timer.rb +85 -0
  142. data/lib/fluent/plugin_id.rb +63 -0
  143. data/lib/fluent/process.rb +21 -30
  144. data/lib/fluent/registry.rb +21 -9
  145. data/lib/fluent/root_agent.rb +115 -40
  146. data/lib/fluent/supervisor.rb +330 -320
  147. data/lib/fluent/system_config.rb +42 -18
  148. data/lib/fluent/test.rb +6 -1
  149. data/lib/fluent/test/base.rb +23 -3
  150. data/lib/fluent/test/driver/base.rb +247 -0
  151. data/lib/fluent/test/driver/event_feeder.rb +98 -0
  152. data/lib/fluent/test/driver/filter.rb +35 -0
  153. data/lib/fluent/test/driver/input.rb +31 -0
  154. data/lib/fluent/test/driver/output.rb +78 -0
  155. data/lib/fluent/test/driver/test_event_router.rb +45 -0
  156. data/lib/fluent/test/filter_test.rb +0 -1
  157. data/lib/fluent/test/formatter_test.rb +2 -1
  158. data/lib/fluent/test/input_test.rb +23 -17
  159. data/lib/fluent/test/output_test.rb +28 -39
  160. data/lib/fluent/test/parser_test.rb +1 -1
  161. data/lib/fluent/time.rb +104 -1
  162. data/lib/fluent/{status.rb → unique_id.rb} +15 -24
  163. data/lib/fluent/version.rb +1 -1
  164. data/lib/fluent/winsvc.rb +72 -0
  165. data/test/compat/test_calls_super.rb +164 -0
  166. data/test/config/test_config_parser.rb +83 -0
  167. data/test/config/test_configurable.rb +547 -274
  168. data/test/config/test_configure_proxy.rb +146 -29
  169. data/test/config/test_dsl.rb +3 -181
  170. data/test/config/test_element.rb +274 -0
  171. data/test/config/test_literal_parser.rb +1 -1
  172. data/test/config/test_section.rb +79 -7
  173. data/test/config/test_system_config.rb +21 -0
  174. data/test/config/test_types.rb +3 -26
  175. data/test/helper.rb +78 -8
  176. data/test/plugin/test_bare_output.rb +118 -0
  177. data/test/plugin/test_base.rb +75 -0
  178. data/test/plugin/test_buf_file.rb +420 -521
  179. data/test/plugin/test_buf_memory.rb +32 -194
  180. data/test/plugin/test_buffer.rb +981 -0
  181. data/test/plugin/test_buffer_chunk.rb +110 -0
  182. data/test/plugin/test_buffer_file_chunk.rb +770 -0
  183. data/test/plugin/test_buffer_memory_chunk.rb +265 -0
  184. data/test/plugin/test_filter.rb +255 -0
  185. data/test/plugin/test_filter_grep.rb +2 -73
  186. data/test/plugin/test_filter_record_transformer.rb +24 -68
  187. data/test/plugin/test_filter_stdout.rb +6 -6
  188. data/test/plugin/test_in_debug_agent.rb +2 -0
  189. data/test/plugin/test_in_dummy.rb +11 -17
  190. data/test/plugin/test_in_exec.rb +6 -25
  191. data/test/plugin/test_in_forward.rb +112 -151
  192. data/test/plugin/test_in_gc_stat.rb +2 -0
  193. data/test/plugin/test_in_http.rb +106 -157
  194. data/test/plugin/test_in_object_space.rb +21 -5
  195. data/test/plugin/test_in_stream.rb +14 -13
  196. data/test/plugin/test_in_syslog.rb +30 -275
  197. data/test/plugin/test_in_tail.rb +95 -282
  198. data/test/plugin/test_in_tcp.rb +14 -0
  199. data/test/plugin/test_in_udp.rb +21 -67
  200. data/test/plugin/test_input.rb +122 -0
  201. data/test/plugin/test_multi_output.rb +180 -0
  202. data/test/plugin/test_out_buffered_null.rb +79 -0
  203. data/test/plugin/test_out_copy.rb +15 -2
  204. data/test/plugin/test_out_exec.rb +75 -25
  205. data/test/plugin/test_out_exec_filter.rb +74 -8
  206. data/test/plugin/test_out_file.rb +61 -7
  207. data/test/plugin/test_out_forward.rb +92 -15
  208. data/test/plugin/test_out_roundrobin.rb +1 -0
  209. data/test/plugin/test_out_stdout.rb +22 -13
  210. data/test/plugin/test_out_stream.rb +18 -0
  211. data/test/plugin/test_output.rb +515 -0
  212. data/test/plugin/test_output_as_buffered.rb +1540 -0
  213. data/test/plugin/test_output_as_buffered_overflow.rb +247 -0
  214. data/test/plugin/test_output_as_buffered_retries.rb +808 -0
  215. data/test/plugin/test_output_as_buffered_secondary.rb +776 -0
  216. data/test/plugin/test_output_as_standard.rb +362 -0
  217. data/test/plugin/test_owned_by.rb +35 -0
  218. data/test/plugin/test_storage.rb +167 -0
  219. data/test/plugin/test_storage_local.rb +8 -0
  220. data/test/plugin_helper/test_child_process.rb +599 -0
  221. data/test/plugin_helper/test_compat_parameters.rb +175 -0
  222. data/test/plugin_helper/test_event_emitter.rb +51 -0
  223. data/test/plugin_helper/test_event_loop.rb +52 -0
  224. data/test/plugin_helper/test_retry_state.rb +399 -0
  225. data/test/plugin_helper/test_storage.rb +411 -0
  226. data/test/plugin_helper/test_thread.rb +164 -0
  227. data/test/plugin_helper/test_timer.rb +100 -0
  228. data/test/scripts/exec_script.rb +0 -6
  229. data/test/scripts/fluent/plugin/out_test.rb +3 -0
  230. data/test/test_config.rb +13 -4
  231. data/test/test_event.rb +24 -13
  232. data/test/test_event_router.rb +8 -7
  233. data/test/test_event_time.rb +187 -0
  234. data/test/test_formatter.rb +13 -51
  235. data/test/test_input.rb +1 -1
  236. data/test/test_log.rb +239 -16
  237. data/test/test_mixin.rb +1 -1
  238. data/test/test_output.rb +53 -66
  239. data/test/test_parser.rb +105 -323
  240. data/test/test_plugin_helper.rb +81 -0
  241. data/test/test_root_agent.rb +4 -52
  242. data/test/test_supervisor.rb +272 -0
  243. data/test/test_unique_id.rb +47 -0
  244. metadata +181 -55
  245. data/CHANGELOG.md +0 -710
  246. data/lib/fluent/buffer.rb +0 -365
  247. data/lib/fluent/plugin/filter_parser.rb +0 -107
  248. data/lib/fluent/plugin/in_status.rb +0 -76
  249. data/lib/fluent/test/helpers.rb +0 -86
  250. data/test/plugin/data/log/foo/bar2 +0 -0
  251. data/test/plugin/test_filter_parser.rb +0 -744
  252. data/test/plugin/test_in_status.rb +0 -38
  253. data/test/test_buffer.rb +0 -624
@@ -0,0 +1,247 @@
1
+ require_relative '../helper'
2
+ require 'fluent/plugin/output'
3
+ require 'fluent/plugin/buffer'
4
+ require 'fluent/event'
5
+
6
+ require 'json'
7
+ require 'time'
8
+ require 'timeout'
9
+ require 'timecop'
10
+
11
+ module FluentPluginOutputAsBufferedOverflowTest
12
+ class DummyBareOutput < Fluent::Plugin::Output
13
+ def register(name, &block)
14
+ instance_variable_set("@#{name}", block)
15
+ end
16
+ end
17
+ class DummyAsyncOutput < DummyBareOutput
18
+ def initialize
19
+ super
20
+ @format = @write = nil
21
+ end
22
+ def format(tag, time, record)
23
+ @format ? @format.call(tag, time, record) : [tag, time, record].to_json
24
+ end
25
+ def write(chunk)
26
+ @write ? @write.call(chunk) : nil
27
+ end
28
+ end
29
+ end
30
+
31
+ class BufferedOutputOverflowTest < Test::Unit::TestCase
32
+ def create_output
33
+ FluentPluginOutputAsBufferedOverflowTest::DummyAsyncOutput.new
34
+ end
35
+ def create_metadata(timekey: nil, tag: nil, variables: nil)
36
+ Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
37
+ end
38
+ def waiting(seconds)
39
+ begin
40
+ Timeout.timeout(seconds) do
41
+ yield
42
+ end
43
+ rescue Timeout::Error
44
+ logs = @i.log.out.logs
45
+ STDERR.print(*logs)
46
+ raise
47
+ end
48
+ end
49
+
50
+ teardown do
51
+ if @i
52
+ @i.stop unless @i.stopped?
53
+ @i.before_shutdown unless @i.before_shutdown?
54
+ @i.shutdown unless @i.shutdown?
55
+ @i.after_shutdown unless @i.after_shutdown?
56
+ @i.close unless @i.closed?
57
+ @i.terminate unless @i.terminated?
58
+ end
59
+ Timecop.return
60
+ end
61
+
62
+ sub_test_case 'buffered output with default configuration (throws exception for buffer overflow)' do
63
+ setup do
64
+ hash = {
65
+ 'flush_mode' => 'lazy',
66
+ 'flush_thread_burst_interval' => 0.01,
67
+ 'chunk_limit_size' => 1024,
68
+ 'total_limit_size' => 4096,
69
+ }
70
+ @i = create_output()
71
+ @i.configure(config_element('ROOT','',{},[config_element('buffer','tag',hash)]))
72
+ @i.start
73
+ end
74
+
75
+ test '#emit_events raises error when buffer is full' do
76
+ @i.register(:format){|tag, time, record| "x" * 128 } # 128bytes per record (x4 -> 512bytes)
77
+
78
+ es = Fluent::ArrayEventStream.new([
79
+ [event_time(), {"message" => "test"}],
80
+ [event_time(), {"message" => "test"}],
81
+ [event_time(), {"message" => "test"}],
82
+ [event_time(), {"message" => "test"}],
83
+ ])
84
+
85
+ 8.times do |i|
86
+ @i.emit_events("tag#{i}", es)
87
+ end
88
+
89
+ assert !@i.buffer.storable?
90
+
91
+ assert_raise(Fluent::Plugin::Buffer::BufferOverflowError) do
92
+ @i.emit_events("tag9", es)
93
+ end
94
+ logs = @i.log.out.logs
95
+ assert{ logs.any?{|line| line.include?("failed to write data into buffer by buffer overflow") } }
96
+ end
97
+ end
98
+
99
+ sub_test_case 'buffered output configured with "overflow_action block"' do
100
+ setup do
101
+ hash = {
102
+ 'flush_mode' => 'lazy',
103
+ 'flush_thread_burst_interval' => 0.01,
104
+ 'chunk_limit_size' => 1024,
105
+ 'total_limit_size' => 4096,
106
+ 'overflow_action' => "block",
107
+ }
108
+ @i = create_output()
109
+ @i.configure(config_element('ROOT','',{'log_level' => 'debug'},[config_element('buffer','tag',hash)]))
110
+ @i.start
111
+ end
112
+
113
+ test '#emit_events blocks until any queues are flushed' do
114
+ failing = true
115
+ flushed_chunks = []
116
+ @i.register(:format){|tag, time, record| "x" * 128 } # 128bytes per record (x4 -> 512bytes)
117
+ @i.register(:write) do |chunk|
118
+ if failing
119
+ raise "blocking"
120
+ end
121
+ flushed_chunks << chunk
122
+ end
123
+
124
+ es = Fluent::ArrayEventStream.new([
125
+ [event_time(), {"message" => "test"}],
126
+ [event_time(), {"message" => "test"}],
127
+ [event_time(), {"message" => "test"}],
128
+ [event_time(), {"message" => "test"}],
129
+ [event_time(), {"message" => "test"}],
130
+ [event_time(), {"message" => "test"}],
131
+ [event_time(), {"message" => "test"}],
132
+ [event_time(), {"message" => "test"}],
133
+ ])
134
+
135
+ 4.times do |i|
136
+ @i.emit_events("tag#{i}", es)
137
+ end
138
+
139
+ assert !@i.buffer.storable?
140
+
141
+ Thread.new do
142
+ sleep 3
143
+ failing = false
144
+ end
145
+
146
+ assert_nothing_raised do
147
+ @i.emit_events("tag9", es)
148
+ end
149
+
150
+ assert !failing
151
+ assert{ flushed_chunks.size > 0 }
152
+
153
+ logs = @i.log.out.logs
154
+ assert{ logs.any?{|line| line.include?("failed to write data into buffer by buffer overflow") } }
155
+ assert{ logs.any?{|line| line.include?("buffer.write is now blocking") } }
156
+ assert{ logs.any?{|line| line.include?("retrying buffer.write after blocked operation") } }
157
+ end
158
+ end
159
+
160
+ sub_test_case 'buffered output configured with "overflow_action drop_oldest_chunk"' do
161
+ setup do
162
+ hash = {
163
+ 'flush_mode' => 'lazy',
164
+ 'flush_thread_burst_interval' => 0.01,
165
+ 'chunk_limit_size' => 1024,
166
+ 'total_limit_size' => 4096,
167
+ 'overflow_action' => "drop_oldest_chunk",
168
+ }
169
+ @i = create_output()
170
+ @i.configure(config_element('ROOT','',{'log_level' => 'debug'},[config_element('buffer','tag',hash)]))
171
+ @i.start
172
+ end
173
+
174
+ test '#emit_events will success by dropping oldest chunk' do
175
+ failing = true
176
+ flushed_chunks = []
177
+ @i.register(:format){|tag, time, record| "x" * 128 } # 128bytes per record (x4 -> 512bytes)
178
+ @i.register(:write) do |chunk|
179
+ if failing
180
+ raise "blocking"
181
+ end
182
+ flushed_chunks << chunk
183
+ end
184
+
185
+ es = Fluent::ArrayEventStream.new([
186
+ [event_time(), {"message" => "test"}],
187
+ [event_time(), {"message" => "test"}],
188
+ [event_time(), {"message" => "test"}],
189
+ [event_time(), {"message" => "test"}],
190
+ [event_time(), {"message" => "test"}],
191
+ [event_time(), {"message" => "test"}],
192
+ [event_time(), {"message" => "test"}],
193
+ [event_time(), {"message" => "test"}],
194
+ ])
195
+
196
+ 4.times do |i|
197
+ @i.emit_events("tag#{i}", es)
198
+ end
199
+
200
+ assert !@i.buffer.storable?
201
+
202
+ assert{ @i.buffer.queue[0].metadata.tag == "tag0" }
203
+ assert{ @i.buffer.queue[1].metadata.tag == "tag1" }
204
+
205
+ assert_nothing_raised do
206
+ @i.emit_events("tag9", es)
207
+ end
208
+
209
+ assert failing
210
+ assert{ flushed_chunks.size == 0 }
211
+
212
+ assert{ @i.buffer.queue[0].metadata.tag == "tag1" }
213
+
214
+ logs = @i.log.out.logs
215
+ assert{ logs.any?{|line| line.include?("failed to write data into buffer by buffer overflow") } }
216
+ assert{ logs.any?{|line| line.include?("dropping oldest chunk to make space after buffer overflow") } }
217
+ end
218
+
219
+ test '#emit_events raises OverflowError if all buffer spaces are used by staged chunks' do
220
+ @i.register(:format){|tag, time, record| "x" * 128 } # 128bytes per record (x4 -> 512bytes)
221
+
222
+ es = Fluent::ArrayEventStream.new([
223
+ [event_time(), {"message" => "test"}],
224
+ [event_time(), {"message" => "test"}],
225
+ [event_time(), {"message" => "test"}],
226
+ [event_time(), {"message" => "test"}],
227
+ ])
228
+
229
+ 8.times do |i|
230
+ @i.emit_events("tag#{i}", es)
231
+ end
232
+
233
+ assert !@i.buffer.storable?
234
+
235
+ assert{ @i.buffer.queue.size == 0 }
236
+ assert{ @i.buffer.stage.size == 8 }
237
+
238
+ assert_raise Fluent::Plugin::Buffer::BufferOverflowError do
239
+ @i.emit_events("tag9", es)
240
+ end
241
+
242
+ logs = @i.log.out.logs
243
+ assert{ logs.any?{|line| line.include?("failed to write data into buffer by buffer overflow") } }
244
+ assert{ logs.any?{|line| line.include?("no queued chunks to be dropped for drop_oldest_chunk") } }
245
+ end
246
+ end
247
+ end
@@ -0,0 +1,808 @@
1
+ require_relative '../helper'
2
+ require 'fluent/plugin/output'
3
+ require 'fluent/plugin/buffer'
4
+ require 'fluent/event'
5
+
6
+ require 'json'
7
+ require 'time'
8
+ require 'timeout'
9
+ require 'timecop'
10
+
11
+ module FluentPluginOutputAsBufferedRetryTest
12
+ class DummyBareOutput < Fluent::Plugin::Output
13
+ def register(name, &block)
14
+ instance_variable_set("@#{name}", block)
15
+ end
16
+ end
17
+ class DummySyncOutput < DummyBareOutput
18
+ def initialize
19
+ super
20
+ @process = nil
21
+ end
22
+ def process(tag, es)
23
+ @process ? @process.call(tag, es) : nil
24
+ end
25
+ end
26
+ class DummyFullFeatureOutput < DummyBareOutput
27
+ def initialize
28
+ super
29
+ @prefer_buffered_processing = nil
30
+ @prefer_delayed_commit = nil
31
+ @process = nil
32
+ @format = nil
33
+ @write = nil
34
+ @try_write = nil
35
+ end
36
+ def prefer_buffered_processing
37
+ @prefer_buffered_processing ? @prefer_buffered_processing.call : false
38
+ end
39
+ def prefer_delayed_commit
40
+ @prefer_delayed_commit ? @prefer_delayed_commit.call : false
41
+ end
42
+ def process(tag, es)
43
+ @process ? @process.call(tag, es) : nil
44
+ end
45
+ def format(tag, time, record)
46
+ @format ? @format.call(tag, time, record) : [tag, time, record].to_json
47
+ end
48
+ def write(chunk)
49
+ @write ? @write.call(chunk) : nil
50
+ end
51
+ def try_write(chunk)
52
+ @try_write ? @try_write.call(chunk) : nil
53
+ end
54
+ end
55
+ class DummyFullFeatureOutput2 < DummyFullFeatureOutput
56
+ def prefer_buffered_processing; true; end
57
+ def prefer_delayed_commit; super; end
58
+ def format(tag, time, record); super; end
59
+ def write(chunk); super; end
60
+ def try_write(chunk); super; end
61
+ end
62
+ end
63
+
64
+ class BufferedOutputRetryTest < Test::Unit::TestCase
65
+ def create_output(type=:full)
66
+ case type
67
+ when :bare then FluentPluginOutputAsBufferedRetryTest::DummyBareOutput.new
68
+ when :sync then FluentPluginOutputAsBufferedRetryTest::DummySyncOutput.new
69
+ when :full then FluentPluginOutputAsBufferedRetryTest::DummyFullFeatureOutput.new
70
+ else
71
+ raise ArgumentError, "unknown type: #{type}"
72
+ end
73
+ end
74
+ def create_metadata(timekey: nil, tag: nil, variables: nil)
75
+ Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
76
+ end
77
+ def waiting(seconds)
78
+ begin
79
+ Timeout.timeout(seconds) do
80
+ yield
81
+ end
82
+ rescue Timeout::Error
83
+ STDERR.print(*@i.log.out.logs)
84
+ raise
85
+ end
86
+ end
87
+ def dummy_event_stream
88
+ Fluent::ArrayEventStream.new([
89
+ [ event_time('2016-04-13 18:33:00'), {"name" => "moris", "age" => 36, "message" => "data1"} ],
90
+ [ event_time('2016-04-13 18:33:13'), {"name" => "moris", "age" => 36, "message" => "data2"} ],
91
+ [ event_time('2016-04-13 18:33:32'), {"name" => "moris", "age" => 36, "message" => "data3"} ],
92
+ ])
93
+ end
94
+ def get_log_time(msg, logs)
95
+ log_time = nil
96
+ log = logs.select{|l| l.include?(msg) }.first
97
+ if log && /^(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2} [-+]\d{4}) \[error\]/ =~ log
98
+ log_time = Time.parse($1)
99
+ end
100
+ log_time
101
+ end
102
+
103
+ setup do
104
+ @i = create_output
105
+ end
106
+
107
+ teardown do
108
+ if @i
109
+ @i.stop unless @i.stopped?
110
+ @i.before_shutdown unless @i.before_shutdown?
111
+ @i.shutdown unless @i.shutdown?
112
+ @i.after_shutdown unless @i.after_shutdown?
113
+ @i.close unless @i.closed?
114
+ @i.terminate unless @i.terminated?
115
+ end
116
+ Timecop.return
117
+ end
118
+
119
+ sub_test_case 'buffered output for retries with exponential backoff' do
120
+ test 'exponential backoff is default strategy for retries' do
121
+ chunk_key = 'tag'
122
+ hash = {
123
+ 'flush_interval' => 1,
124
+ 'flush_thread_burst_interval' => 0.1,
125
+ 'retry_randomize' => false,
126
+ }
127
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
128
+ @i.register(:prefer_buffered_processing){ true }
129
+ @i.start
130
+
131
+ assert_equal :exponential_backoff, @i.buffer_config.retry_type
132
+ assert_equal 1, @i.buffer_config.retry_wait
133
+ assert_equal 2.0, @i.buffer_config.retry_exponential_backoff_base
134
+ assert !@i.buffer_config.retry_randomize
135
+
136
+ now = Time.parse('2016-04-13 18:17:00 -0700')
137
+ Timecop.freeze( now )
138
+
139
+ retry_state = @i.retry_state( @i.buffer_config.retry_randomize )
140
+ retry_state.step
141
+ assert_equal 1, (retry_state.next_time - now)
142
+ retry_state.step
143
+ assert_equal (1 * (2 ** 1)), (retry_state.next_time - now)
144
+ retry_state.step
145
+ assert_equal (1 * (2 ** 2)), (retry_state.next_time - now)
146
+ retry_state.step
147
+ assert_equal (1 * (2 ** 3)), (retry_state.next_time - now)
148
+ end
149
+
150
+ test 'does retries correctly when #write fails' do
151
+ chunk_key = 'tag'
152
+ hash = {
153
+ 'flush_interval' => 1,
154
+ 'flush_thread_burst_interval' => 0.1,
155
+ 'retry_randomize' => false,
156
+ 'retry_max_interval' => 60 * 60,
157
+ }
158
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
159
+ @i.register(:prefer_buffered_processing){ true }
160
+ @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
161
+ @i.register(:write){|chunk| raise "yay, your #write must fail" }
162
+ @i.start
163
+
164
+ now = Time.parse('2016-04-13 18:33:30 -0700')
165
+ Timecop.freeze( now )
166
+
167
+ @i.emit_events("test.tag.1", dummy_event_stream())
168
+
169
+ now = Time.parse('2016-04-13 18:33:32 -0700')
170
+ Timecop.freeze( now )
171
+
172
+ @i.enqueue_thread_wait
173
+
174
+ @i.flush_thread_wakeup
175
+ waiting(4){ Thread.pass until @i.write_count > 0 }
176
+
177
+ assert{ @i.write_count > 0 }
178
+ assert{ @i.num_errors > 0 }
179
+
180
+ now = @i.next_flush_time
181
+ Timecop.freeze( now )
182
+ @i.flush_thread_wakeup
183
+ waiting(4){ Thread.pass until @i.write_count > 1 }
184
+
185
+ assert{ @i.write_count > 1 }
186
+ assert{ @i.num_errors > 1 }
187
+ end
188
+
189
+ test 'max retry interval is limited by retry_max_interval' do
190
+ chunk_key = 'tag'
191
+ hash = {
192
+ 'flush_interval' => 1,
193
+ 'flush_thread_burst_interval' => 0.1,
194
+ 'retry_randomize' => false,
195
+ 'retry_max_interval' => 60,
196
+ }
197
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
198
+ @i.register(:prefer_buffered_processing){ true }
199
+ @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
200
+ @i.register(:write){|chunk| raise "yay, your #write must fail" }
201
+ @i.start
202
+
203
+ now = Time.parse('2016-04-13 18:33:30 -0700')
204
+ Timecop.freeze( now )
205
+
206
+ @i.emit_events("test.tag.1", dummy_event_stream())
207
+
208
+ now = Time.parse('2016-04-13 18:33:32 -0700')
209
+ Timecop.freeze( now )
210
+
211
+ @i.enqueue_thread_wait
212
+
213
+ @i.flush_thread_wakeup
214
+ waiting(4){ Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
215
+
216
+ assert{ @i.write_count > 0 }
217
+ assert{ @i.num_errors > 0 }
218
+
219
+ prev_write_count = @i.write_count
220
+ prev_num_errors = @i.num_errors
221
+
222
+ 10.times do
223
+ now = @i.next_flush_time
224
+ Timecop.freeze( now )
225
+ @i.flush_thread_wakeup
226
+ waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
227
+
228
+ assert{ @i.write_count > prev_write_count }
229
+ assert{ @i.num_errors > prev_num_errors }
230
+
231
+ prev_write_count = @i.write_count
232
+ prev_num_errors = @i.num_errors
233
+ end
234
+ # exponential backoff interval: 1 * 2 ** 10 == 1024
235
+ # but it should be limited by retry_max_interval=60
236
+ assert_equal 60, (@i.next_flush_time - now)
237
+ end
238
+
239
+ test 'output plugin give retries up by retry_timeout, and clear queue in buffer' do
240
+ written_tags = []
241
+
242
+ chunk_key = 'tag'
243
+ hash = {
244
+ 'flush_interval' => 1,
245
+ 'flush_thread_burst_interval' => 0.1,
246
+ 'retry_randomize' => false,
247
+ 'retry_timeout' => 3600,
248
+ }
249
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
250
+ @i.register(:prefer_buffered_processing){ true }
251
+ @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
252
+ @i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
253
+ @i.start
254
+
255
+ now = Time.parse('2016-04-13 18:33:30 -0700')
256
+ Timecop.freeze( now )
257
+
258
+ @i.emit_events("test.tag.1", dummy_event_stream())
259
+
260
+ now = Time.parse('2016-04-13 18:33:31 -0700')
261
+ Timecop.freeze( now )
262
+
263
+ @i.emit_events("test.tag.2", dummy_event_stream())
264
+
265
+ assert_equal 0, @i.write_count
266
+ assert_equal 0, @i.num_errors
267
+
268
+ @i.enqueue_thread_wait
269
+ @i.flush_thread_wakeup
270
+ waiting(4){ Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
271
+
272
+ assert{ @i.buffer.queue.size > 0 }
273
+ assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
274
+
275
+ assert{ @i.write_count > 0 }
276
+ assert{ @i.num_errors > 0 }
277
+
278
+ prev_write_count = @i.write_count
279
+ prev_num_errors = @i.num_errors
280
+
281
+ first_failure = @i.retry.start
282
+
283
+ 15.times do |i| # large enough
284
+ now = @i.next_flush_time
285
+ # p({i: i, now: now, diff: (now - Time.now)})
286
+ # * if loop count is 12:
287
+ # {:i=>0, :now=>2016-04-13 18:33:32 -0700, :diff=>1.0}
288
+ # {:i=>1, :now=>2016-04-13 18:33:34 -0700, :diff=>2.0}
289
+ # {:i=>2, :now=>2016-04-13 18:33:38 -0700, :diff=>4.0}
290
+ # {:i=>3, :now=>2016-04-13 18:33:46 -0700, :diff=>8.0}
291
+ # {:i=>4, :now=>2016-04-13 18:34:02 -0700, :diff=>16.0}
292
+ # {:i=>5, :now=>2016-04-13 18:34:34 -0700, :diff=>32.0}
293
+ # {:i=>6, :now=>2016-04-13 18:35:38 -0700, :diff=>64.0}
294
+ # {:i=>7, :now=>2016-04-13 18:37:46 -0700, :diff=>128.0}
295
+ # {:i=>8, :now=>2016-04-13 18:42:02 -0700, :diff=>256.0}
296
+ # {:i=>9, :now=>2016-04-13 18:50:34 -0700, :diff=>512.0}
297
+ # {:i=>10, :now=>2016-04-13 19:07:38 -0700, :diff=>1024.0}
298
+ # {:i=>11, :now=>2016-04-13 19:33:31 -0700, :diff=>1553.0} # clear_queue!
299
+
300
+ Timecop.freeze( now )
301
+ @i.enqueue_thread_wait
302
+ @i.flush_thread_wakeup
303
+ waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
304
+
305
+ assert{ @i.write_count > prev_write_count }
306
+ assert{ @i.num_errors > prev_num_errors }
307
+
308
+ break if @i.buffer.queue.size == 0
309
+
310
+ prev_write_count = @i.write_count
311
+ prev_num_errors = @i.num_errors
312
+
313
+ assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
314
+ end
315
+ assert{ now >= first_failure + 3600 }
316
+
317
+ assert{ @i.buffer.stage.size == 0 }
318
+ assert{ written_tags.all?{|t| t == 'test.tag.1' } }
319
+
320
+ @i.emit_events("test.tag.3", dummy_event_stream())
321
+
322
+ logs = @i.log.out.logs
323
+ assert{ logs.any?{|l| l.include?("[error]: failed to flush the buffer, and hit limit for retries. dropping all chunks in the buffer queue.") } }
324
+ end
325
+
326
+ test 'output plugin give retries up by retry_max_times, and clear queue in buffer' do
327
+ written_tags = []
328
+
329
+ chunk_key = 'tag'
330
+ hash = {
331
+ 'flush_interval' => 1,
332
+ 'flush_thread_burst_interval' => 0.1,
333
+ 'retry_randomize' => false,
334
+ 'retry_max_times' => 10,
335
+ }
336
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
337
+ @i.register(:prefer_buffered_processing){ true }
338
+ @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
339
+ @i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
340
+ @i.start
341
+
342
+ now = Time.parse('2016-04-13 18:33:30 -0700')
343
+ Timecop.freeze( now )
344
+
345
+ @i.emit_events("test.tag.1", dummy_event_stream())
346
+
347
+ now = Time.parse('2016-04-13 18:33:31 -0700')
348
+ Timecop.freeze( now )
349
+
350
+ @i.emit_events("test.tag.2", dummy_event_stream())
351
+
352
+ assert_equal 0, @i.write_count
353
+ assert_equal 0, @i.num_errors
354
+
355
+ @i.enqueue_thread_wait
356
+ @i.flush_thread_wakeup
357
+ waiting(4){ Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
358
+
359
+ assert{ @i.buffer.queue.size > 0 }
360
+ assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
361
+
362
+ assert{ @i.write_count > 0 }
363
+ assert{ @i.num_errors > 0 }
364
+
365
+ prev_write_count = @i.write_count
366
+ prev_num_errors = @i.num_errors
367
+
368
+ _first_failure = @i.retry.start
369
+
370
+ chunks = @i.buffer.queue.dup
371
+
372
+ 20.times do |i| # large times enough
373
+ now = @i.next_flush_time
374
+
375
+ Timecop.freeze( now )
376
+ @i.enqueue_thread_wait
377
+ @i.flush_thread_wakeup
378
+ waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
379
+
380
+ assert{ @i.write_count > prev_write_count }
381
+ assert{ @i.num_errors > prev_num_errors }
382
+
383
+ break if @i.buffer.queue.size == 0
384
+
385
+ prev_write_count = @i.write_count
386
+ prev_num_errors = @i.num_errors
387
+
388
+ assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
389
+ end
390
+ assert{ @i.buffer.stage.size == 0 }
391
+ assert{ written_tags.all?{|t| t == 'test.tag.1' } }
392
+
393
+ @i.emit_events("test.tag.3", dummy_event_stream())
394
+
395
+ logs = @i.log.out.logs
396
+ assert{ logs.any?{|l| l.include?("[error]: failed to flush the buffer, and hit limit for retries. dropping all chunks in the buffer queue.") && l.include?("retry_times=10") } }
397
+
398
+ assert{ @i.buffer.queue.size == 0 }
399
+ assert{ @i.buffer.stage.size == 1 }
400
+ assert{ chunks.all?{|c| c.empty? } }
401
+ end
402
+ end
403
+
404
+ sub_test_case 'bufferd output for retries with periodical retry' do
405
+ test 'periodical retries should retry to write in failing status per retry_wait' do
406
+ chunk_key = 'tag'
407
+ hash = {
408
+ 'flush_interval' => 1,
409
+ 'flush_thread_burst_interval' => 0.1,
410
+ 'retry_type' => :periodic,
411
+ 'retry_wait' => 3,
412
+ 'retry_randomize' => false,
413
+ }
414
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
415
+ @i.register(:prefer_buffered_processing){ true }
416
+ @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
417
+ @i.register(:write){|chunk| raise "yay, your #write must fail" }
418
+ @i.start
419
+
420
+ now = Time.parse('2016-04-13 18:33:30 -0700')
421
+ Timecop.freeze( now )
422
+
423
+ @i.emit_events("test.tag.1", dummy_event_stream())
424
+
425
+ now = Time.parse('2016-04-13 18:33:32 -0700')
426
+ Timecop.freeze( now )
427
+
428
+ @i.enqueue_thread_wait
429
+
430
+ @i.flush_thread_wakeup
431
+ waiting(4){ Thread.pass until @i.write_count > 0 }
432
+
433
+ assert{ @i.write_count > 0 }
434
+ assert{ @i.num_errors > 0 }
435
+
436
+ now = @i.next_flush_time
437
+ Timecop.freeze( now )
438
+ @i.flush_thread_wakeup
439
+ waiting(4){ Thread.pass until @i.write_count > 1 }
440
+
441
+ assert{ @i.write_count > 1 }
442
+ assert{ @i.num_errors > 1 }
443
+ end
444
+
445
+ test 'output plugin give retries up by retry_timeout, and clear queue in buffer' do
446
+ written_tags = []
447
+
448
+ chunk_key = 'tag'
449
+ hash = {
450
+ 'flush_interval' => 1,
451
+ 'flush_thread_burst_interval' => 0.1,
452
+ 'retry_type' => :periodic,
453
+ 'retry_wait' => 30,
454
+ 'retry_randomize' => false,
455
+ 'retry_timeout' => 120,
456
+ }
457
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
458
+ @i.register(:prefer_buffered_processing){ true }
459
+ @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
460
+ @i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
461
+ @i.start
462
+
463
+ now = Time.parse('2016-04-13 18:33:30 -0700')
464
+ Timecop.freeze( now )
465
+
466
+ @i.emit_events("test.tag.1", dummy_event_stream())
467
+
468
+ now = Time.parse('2016-04-13 18:33:31 -0700')
469
+ Timecop.freeze( now )
470
+
471
+ @i.emit_events("test.tag.2", dummy_event_stream())
472
+
473
+ assert_equal 0, @i.write_count
474
+ assert_equal 0, @i.num_errors
475
+
476
+ @i.enqueue_thread_wait
477
+ @i.flush_thread_wakeup
478
+ waiting(4){ Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
479
+
480
+ assert{ @i.buffer.queue.size > 0 }
481
+ assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
482
+
483
+ assert{ @i.write_count > 0 }
484
+ assert{ @i.num_errors > 0 }
485
+
486
+ prev_write_count = @i.write_count
487
+ prev_num_errors = @i.num_errors
488
+
489
+ first_failure = @i.retry.start
490
+
491
+ 3.times do |i|
492
+ now = @i.next_flush_time
493
+
494
+ Timecop.freeze( now )
495
+ @i.enqueue_thread_wait
496
+ @i.flush_thread_wakeup
497
+ waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
498
+
499
+ assert{ @i.write_count > prev_write_count }
500
+ assert{ @i.num_errors > prev_num_errors }
501
+
502
+ prev_write_count = @i.write_count
503
+ prev_num_errors = @i.num_errors
504
+ end
505
+
506
+ assert{ @i.next_flush_time >= first_failure + 120 }
507
+
508
+ assert{ @i.buffer.queue.size == 2 }
509
+ assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
510
+ assert{ @i.buffer.stage.size == 0 }
511
+
512
+ assert{ written_tags.all?{|t| t == 'test.tag.1' } }
513
+
514
+ chunks = @i.buffer.queue.dup
515
+
516
+ @i.emit_events("test.tag.3", dummy_event_stream())
517
+
518
+ now = @i.next_flush_time
519
+ Timecop.freeze( now )
520
+ @i.flush_thread_wakeup
521
+ waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
522
+
523
+ logs = @i.log.out.logs
524
+
525
+ target_time = Time.parse("2016-04-13 18:35:31 -0700")
526
+ target_msg = "[error]: failed to flush the buffer, and hit limit for retries. dropping all chunks in the buffer queue."
527
+ assert{ logs.any?{|l| l.include?(target_msg) } }
528
+
529
+ log_time = get_log_time(target_msg, logs)
530
+ assert_equal target_time.localtime, log_time.localtime
531
+
532
+ assert{ @i.buffer.queue.size == 0 }
533
+ assert{ @i.buffer.stage.size == 1 }
534
+ assert{ chunks.all?{|c| c.empty? } }
535
+ end
536
+
537
+ test 'retry_max_times can limit maximum times for retries' do
538
+ written_tags = []
539
+
540
+ chunk_key = 'tag'
541
+ hash = {
542
+ 'flush_interval' => 1,
543
+ 'flush_thread_burst_interval' => 0.1,
544
+ 'retry_type' => :periodic,
545
+ 'retry_wait' => 3,
546
+ 'retry_randomize' => false,
547
+ 'retry_max_times' => 10,
548
+ }
549
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
550
+ @i.register(:prefer_buffered_processing){ true }
551
+ @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
552
+ @i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
553
+ @i.start
554
+
555
+ now = Time.parse('2016-04-13 18:33:30 -0700')
556
+ Timecop.freeze( now )
557
+
558
+ @i.emit_events("test.tag.1", dummy_event_stream())
559
+
560
+ now = Time.parse('2016-04-13 18:33:31 -0700')
561
+ Timecop.freeze( now )
562
+
563
+ @i.emit_events("test.tag.2", dummy_event_stream())
564
+
565
+ assert_equal 0, @i.write_count
566
+ assert_equal 0, @i.num_errors
567
+
568
+ @i.enqueue_thread_wait
569
+ @i.flush_thread_wakeup
570
+ waiting(4){ Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
571
+
572
+ assert{ @i.buffer.queue.size > 0 }
573
+ assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
574
+
575
+ assert{ @i.write_count > 0 }
576
+ assert{ @i.num_errors > 0 }
577
+
578
+ prev_write_count = @i.write_count
579
+ prev_num_errors = @i.num_errors
580
+
581
+ _first_failure = @i.retry.start
582
+
583
+ chunks = @i.buffer.queue.dup
584
+
585
+ 20.times do |i|
586
+ now = @i.next_flush_time
587
+
588
+ Timecop.freeze( now )
589
+ @i.enqueue_thread_wait
590
+ @i.flush_thread_wakeup
591
+ waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
592
+
593
+ assert{ @i.write_count > prev_write_count }
594
+ assert{ @i.num_errors > prev_num_errors }
595
+
596
+ break if @i.buffer.queue.size == 0
597
+
598
+ prev_write_count = @i.write_count
599
+ prev_num_errors = @i.num_errors
600
+
601
+ assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
602
+ end
603
+ assert{ @i.buffer.stage.size == 0 }
604
+ assert{ written_tags.all?{|t| t == 'test.tag.1' } }
605
+
606
+
607
+ @i.emit_events("test.tag.3", dummy_event_stream())
608
+
609
+ logs = @i.log.out.logs
610
+ assert{ logs.any?{|l| l.include?("[error]: failed to flush the buffer, and hit limit for retries. dropping all chunks in the buffer queue.") && l.include?("retry_times=10") } }
611
+
612
+ assert{ @i.buffer.queue.size == 0 }
613
+ assert{ @i.buffer.stage.size == 1 }
614
+ assert{ chunks.all?{|c| c.empty? } }
615
+ end
616
+ end
617
+
618
+ sub_test_case 'buffered output configured as retry_forever' do
619
+ test 'configuration error will be raised if secondary section is configured' do
620
+ chunk_key = 'tag'
621
+ hash = {
622
+ 'retry_forever' => true,
623
+ 'retry_randomize' => false,
624
+ }
625
+ i = create_output()
626
+ assert_raise Fluent::ConfigError do
627
+ i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash),config_element('secondary','')]))
628
+ end
629
+ end
630
+
631
+ test 'retry_timeout and retry_max_times will be ignored if retry_forever is true for exponential backoff' do
632
+ written_tags = []
633
+
634
+ chunk_key = 'tag'
635
+ hash = {
636
+ 'flush_interval' => 1,
637
+ 'flush_thread_burst_interval' => 0.1,
638
+ 'retry_type' => :exponential_backoff,
639
+ 'retry_forever' => true,
640
+ 'retry_randomize' => false,
641
+ 'retry_timeout' => 3600,
642
+ 'retry_max_times' => 10,
643
+ }
644
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
645
+ @i.register(:prefer_buffered_processing){ true }
646
+ @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
647
+ @i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
648
+ @i.start
649
+
650
+ now = Time.parse('2016-04-13 18:33:30 -0700')
651
+ Timecop.freeze( now )
652
+
653
+ @i.emit_events("test.tag.1", dummy_event_stream())
654
+
655
+ now = Time.parse('2016-04-13 18:33:31 -0700')
656
+ Timecop.freeze( now )
657
+
658
+ @i.emit_events("test.tag.2", dummy_event_stream())
659
+
660
+ assert_equal 0, @i.write_count
661
+ assert_equal 0, @i.num_errors
662
+
663
+ @i.enqueue_thread_wait
664
+ @i.flush_thread_wakeup
665
+ waiting(4){ Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
666
+
667
+ assert{ @i.buffer.queue.size > 0 }
668
+ assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
669
+
670
+ assert{ @i.write_count > 0 }
671
+ assert{ @i.num_errors > 0 }
672
+
673
+ prev_write_count = @i.write_count
674
+ prev_num_errors = @i.num_errors
675
+
676
+ first_failure = @i.retry.start
677
+
678
+ 15.times do |i|
679
+ now = @i.next_flush_time
680
+
681
+ Timecop.freeze( now )
682
+ @i.enqueue_thread_wait
683
+ @i.flush_thread_wakeup
684
+ waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
685
+
686
+ assert{ @i.write_count > prev_write_count }
687
+ assert{ @i.num_errors > prev_num_errors }
688
+
689
+ prev_write_count = @i.write_count
690
+ prev_num_errors = @i.num_errors
691
+ end
692
+
693
+ assert{ @i.buffer.queue.size == 2 }
694
+ assert{ @i.retry.steps > 10 }
695
+ assert{ now > first_failure + 3600 }
696
+ end
697
+
698
+ test 'retry_timeout and retry_max_times will be ignored if retry_forever is true for periodical retries' do
699
+ written_tags = []
700
+
701
+ chunk_key = 'tag'
702
+ hash = {
703
+ 'flush_interval' => 1,
704
+ 'flush_thread_burst_interval' => 0.1,
705
+ 'retry_type' => :periodic,
706
+ 'retry_forever' => true,
707
+ 'retry_randomize' => false,
708
+ 'retry_wait' => 30,
709
+ 'retry_timeout' => 360,
710
+ 'retry_max_times' => 10,
711
+ }
712
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
713
+ @i.register(:prefer_buffered_processing){ true }
714
+ @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
715
+ @i.register(:write){|chunk| written_tags << chunk.metadata.tag; raise "yay, your #write must fail" }
716
+ @i.start
717
+
718
+ now = Time.parse('2016-04-13 18:33:30 -0700')
719
+ Timecop.freeze( now )
720
+
721
+ @i.emit_events("test.tag.1", dummy_event_stream())
722
+
723
+ now = Time.parse('2016-04-13 18:33:31 -0700')
724
+ Timecop.freeze( now )
725
+
726
+ @i.emit_events("test.tag.2", dummy_event_stream())
727
+
728
+ assert_equal 0, @i.write_count
729
+ assert_equal 0, @i.num_errors
730
+
731
+ @i.enqueue_thread_wait
732
+ @i.flush_thread_wakeup
733
+ waiting(4){ Thread.pass until @i.write_count > 0 && @i.num_errors > 0 }
734
+
735
+ assert{ @i.buffer.queue.size > 0 }
736
+ assert{ @i.buffer.queue.first.metadata.tag == 'test.tag.1' }
737
+
738
+ assert{ @i.write_count > 0 }
739
+ assert{ @i.num_errors > 0 }
740
+
741
+ prev_write_count = @i.write_count
742
+ prev_num_errors = @i.num_errors
743
+
744
+ first_failure = @i.retry.start
745
+
746
+ 15.times do |i|
747
+ now = @i.next_flush_time
748
+
749
+ Timecop.freeze( now )
750
+ @i.enqueue_thread_wait
751
+ @i.flush_thread_wakeup
752
+ waiting(4){ Thread.pass until @i.write_count > prev_write_count && @i.num_errors > prev_num_errors }
753
+
754
+ assert{ @i.write_count > prev_write_count }
755
+ assert{ @i.num_errors > prev_num_errors }
756
+
757
+ prev_write_count = @i.write_count
758
+ prev_num_errors = @i.num_errors
759
+ end
760
+
761
+ assert{ @i.buffer.queue.size == 2 }
762
+ assert{ @i.retry.steps > 10 }
763
+ assert{ now > first_failure + 360 }
764
+ end
765
+ end
766
+
767
+ sub_test_case 'buffered output with delayed commit' do
768
+ test 'does retries correctly when #try_write fails' do
769
+ chunk_key = 'tag'
770
+ hash = {
771
+ 'flush_interval' => 1,
772
+ 'flush_thread_burst_interval' => 0.1,
773
+ 'retry_randomize' => false,
774
+ 'retry_max_interval' => 60 * 60,
775
+ }
776
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
777
+ @i.register(:prefer_buffered_processing){ true }
778
+ @i.register(:prefer_delayed_commit){ true }
779
+ @i.register(:format){|tag,time,record| [tag,time.to_i,record].to_json + "\n" }
780
+ @i.register(:try_write){|chunk| raise "yay, your #write must fail" }
781
+ @i.start
782
+
783
+ now = Time.parse('2016-04-13 18:33:30 -0700')
784
+ Timecop.freeze( now )
785
+
786
+ @i.emit_events("test.tag.1", dummy_event_stream())
787
+
788
+ now = Time.parse('2016-04-13 18:33:32 -0700')
789
+ Timecop.freeze( now )
790
+
791
+ @i.enqueue_thread_wait
792
+
793
+ @i.flush_thread_wakeup
794
+ waiting(4){ Thread.pass until @i.write_count > 0 }
795
+
796
+ assert{ @i.write_count > 0 }
797
+ assert{ @i.num_errors > 0 }
798
+
799
+ now = @i.next_flush_time
800
+ Timecop.freeze( now )
801
+ @i.flush_thread_wakeup
802
+ waiting(4){ Thread.pass until @i.write_count > 1 }
803
+
804
+ assert{ @i.write_count > 1 }
805
+ assert{ @i.num_errors > 1 }
806
+ end
807
+ end
808
+ end