fluentd 0.12.43 → 0.14.0

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of fluentd might be problematic. Click here for more details.

Files changed (253) hide show
  1. checksums.yaml +4 -4
  2. data/.github/ISSUE_TEMPLATE.md +6 -0
  3. data/.gitignore +2 -0
  4. data/.travis.yml +33 -21
  5. data/CONTRIBUTING.md +1 -0
  6. data/ChangeLog +1239 -0
  7. data/README.md +0 -25
  8. data/Rakefile +2 -1
  9. data/Vagrantfile +17 -0
  10. data/appveyor.yml +35 -0
  11. data/example/filter_stdout.conf +5 -5
  12. data/example/in_forward.conf +2 -2
  13. data/example/in_http.conf +2 -2
  14. data/example/in_out_forward.conf +17 -0
  15. data/example/in_syslog.conf +2 -2
  16. data/example/in_tail.conf +2 -2
  17. data/example/in_tcp.conf +2 -2
  18. data/example/in_udp.conf +2 -2
  19. data/example/out_copy.conf +4 -4
  20. data/example/out_file.conf +2 -2
  21. data/example/out_forward.conf +2 -2
  22. data/example/out_forward_buf_file.conf +23 -0
  23. data/example/v0_12_filter.conf +8 -8
  24. data/fluent.conf +29 -0
  25. data/fluentd.gemspec +18 -11
  26. data/lib/fluent/agent.rb +60 -58
  27. data/lib/fluent/command/cat.rb +1 -1
  28. data/lib/fluent/command/debug.rb +7 -5
  29. data/lib/fluent/command/fluentd.rb +97 -2
  30. data/lib/fluent/compat/call_super_mixin.rb +67 -0
  31. data/lib/fluent/compat/filter.rb +50 -0
  32. data/lib/fluent/compat/formatter.rb +109 -0
  33. data/lib/fluent/compat/input.rb +50 -0
  34. data/lib/fluent/compat/output.rb +617 -0
  35. data/lib/fluent/compat/output_chain.rb +60 -0
  36. data/lib/fluent/compat/parser.rb +163 -0
  37. data/lib/fluent/compat/propagate_default.rb +62 -0
  38. data/lib/fluent/config.rb +23 -20
  39. data/lib/fluent/config/configure_proxy.rb +119 -70
  40. data/lib/fluent/config/dsl.rb +5 -18
  41. data/lib/fluent/config/element.rb +72 -8
  42. data/lib/fluent/config/error.rb +0 -3
  43. data/lib/fluent/config/literal_parser.rb +0 -2
  44. data/lib/fluent/config/parser.rb +4 -4
  45. data/lib/fluent/config/section.rb +39 -28
  46. data/lib/fluent/config/types.rb +2 -13
  47. data/lib/fluent/config/v1_parser.rb +1 -3
  48. data/lib/fluent/configurable.rb +48 -16
  49. data/lib/fluent/daemon.rb +15 -0
  50. data/lib/fluent/engine.rb +26 -52
  51. data/lib/fluent/env.rb +6 -4
  52. data/lib/fluent/event.rb +58 -11
  53. data/lib/fluent/event_router.rb +5 -5
  54. data/lib/fluent/filter.rb +2 -50
  55. data/lib/fluent/formatter.rb +4 -293
  56. data/lib/fluent/input.rb +2 -32
  57. data/lib/fluent/label.rb +2 -2
  58. data/lib/fluent/load.rb +3 -2
  59. data/lib/fluent/log.rb +107 -38
  60. data/lib/fluent/match.rb +0 -36
  61. data/lib/fluent/mixin.rb +117 -7
  62. data/lib/fluent/msgpack_factory.rb +62 -0
  63. data/lib/fluent/output.rb +7 -612
  64. data/lib/fluent/output_chain.rb +23 -0
  65. data/lib/fluent/parser.rb +4 -800
  66. data/lib/fluent/plugin.rb +100 -121
  67. data/lib/fluent/plugin/bare_output.rb +63 -0
  68. data/lib/fluent/plugin/base.rb +121 -0
  69. data/lib/fluent/plugin/buf_file.rb +101 -182
  70. data/lib/fluent/plugin/buf_memory.rb +9 -92
  71. data/lib/fluent/plugin/buffer.rb +473 -0
  72. data/lib/fluent/plugin/buffer/chunk.rb +135 -0
  73. data/lib/fluent/plugin/buffer/file_chunk.rb +339 -0
  74. data/lib/fluent/plugin/buffer/memory_chunk.rb +100 -0
  75. data/lib/fluent/plugin/exec_util.rb +80 -75
  76. data/lib/fluent/plugin/file_util.rb +33 -28
  77. data/lib/fluent/plugin/file_wrapper.rb +120 -0
  78. data/lib/fluent/plugin/filter.rb +51 -0
  79. data/lib/fluent/plugin/filter_grep.rb +13 -40
  80. data/lib/fluent/plugin/filter_record_transformer.rb +22 -18
  81. data/lib/fluent/plugin/formatter.rb +93 -0
  82. data/lib/fluent/plugin/formatter_csv.rb +48 -0
  83. data/lib/fluent/plugin/formatter_hash.rb +32 -0
  84. data/lib/fluent/plugin/formatter_json.rb +47 -0
  85. data/lib/fluent/plugin/formatter_ltsv.rb +42 -0
  86. data/lib/fluent/plugin/formatter_msgpack.rb +32 -0
  87. data/lib/fluent/plugin/formatter_out_file.rb +45 -0
  88. data/lib/fluent/plugin/formatter_single_value.rb +34 -0
  89. data/lib/fluent/plugin/formatter_stdout.rb +39 -0
  90. data/lib/fluent/plugin/in_debug_agent.rb +4 -0
  91. data/lib/fluent/plugin/in_dummy.rb +22 -18
  92. data/lib/fluent/plugin/in_exec.rb +18 -8
  93. data/lib/fluent/plugin/in_forward.rb +36 -79
  94. data/lib/fluent/plugin/in_gc_stat.rb +4 -0
  95. data/lib/fluent/plugin/in_http.rb +21 -18
  96. data/lib/fluent/plugin/in_monitor_agent.rb +15 -48
  97. data/lib/fluent/plugin/in_object_space.rb +6 -1
  98. data/lib/fluent/plugin/in_stream.rb +7 -3
  99. data/lib/fluent/plugin/in_syslog.rb +46 -95
  100. data/lib/fluent/plugin/in_tail.rb +58 -640
  101. data/lib/fluent/plugin/in_tcp.rb +8 -1
  102. data/lib/fluent/plugin/in_udp.rb +8 -18
  103. data/lib/fluent/plugin/input.rb +33 -0
  104. data/lib/fluent/plugin/multi_output.rb +95 -0
  105. data/lib/fluent/plugin/out_buffered_null.rb +59 -0
  106. data/lib/fluent/plugin/out_copy.rb +11 -7
  107. data/lib/fluent/plugin/out_exec.rb +15 -11
  108. data/lib/fluent/plugin/out_exec_filter.rb +18 -10
  109. data/lib/fluent/plugin/out_file.rb +34 -5
  110. data/lib/fluent/plugin/out_forward.rb +25 -19
  111. data/lib/fluent/plugin/out_null.rb +0 -14
  112. data/lib/fluent/plugin/out_roundrobin.rb +11 -7
  113. data/lib/fluent/plugin/out_stdout.rb +5 -7
  114. data/lib/fluent/plugin/out_stream.rb +3 -1
  115. data/lib/fluent/plugin/output.rb +979 -0
  116. data/lib/fluent/plugin/owned_by_mixin.rb +42 -0
  117. data/lib/fluent/plugin/parser.rb +244 -0
  118. data/lib/fluent/plugin/parser_apache.rb +24 -0
  119. data/lib/fluent/plugin/parser_apache2.rb +84 -0
  120. data/lib/fluent/plugin/parser_apache_error.rb +21 -0
  121. data/lib/fluent/plugin/parser_csv.rb +31 -0
  122. data/lib/fluent/plugin/parser_json.rb +79 -0
  123. data/lib/fluent/plugin/parser_ltsv.rb +50 -0
  124. data/lib/fluent/plugin/parser_multiline.rb +102 -0
  125. data/lib/fluent/plugin/parser_nginx.rb +24 -0
  126. data/lib/fluent/plugin/parser_none.rb +36 -0
  127. data/lib/fluent/plugin/parser_syslog.rb +82 -0
  128. data/lib/fluent/plugin/parser_tsv.rb +37 -0
  129. data/lib/fluent/plugin/socket_util.rb +119 -117
  130. data/lib/fluent/plugin/storage.rb +84 -0
  131. data/lib/fluent/plugin/storage_local.rb +116 -0
  132. data/lib/fluent/plugin/string_util.rb +16 -13
  133. data/lib/fluent/plugin_helper.rb +39 -0
  134. data/lib/fluent/plugin_helper/child_process.rb +298 -0
  135. data/lib/fluent/plugin_helper/compat_parameters.rb +99 -0
  136. data/lib/fluent/plugin_helper/event_emitter.rb +80 -0
  137. data/lib/fluent/plugin_helper/event_loop.rb +118 -0
  138. data/lib/fluent/plugin_helper/retry_state.rb +177 -0
  139. data/lib/fluent/plugin_helper/storage.rb +308 -0
  140. data/lib/fluent/plugin_helper/thread.rb +147 -0
  141. data/lib/fluent/plugin_helper/timer.rb +85 -0
  142. data/lib/fluent/plugin_id.rb +63 -0
  143. data/lib/fluent/process.rb +21 -30
  144. data/lib/fluent/registry.rb +21 -9
  145. data/lib/fluent/root_agent.rb +115 -40
  146. data/lib/fluent/supervisor.rb +330 -320
  147. data/lib/fluent/system_config.rb +42 -18
  148. data/lib/fluent/test.rb +6 -1
  149. data/lib/fluent/test/base.rb +23 -3
  150. data/lib/fluent/test/driver/base.rb +247 -0
  151. data/lib/fluent/test/driver/event_feeder.rb +98 -0
  152. data/lib/fluent/test/driver/filter.rb +35 -0
  153. data/lib/fluent/test/driver/input.rb +31 -0
  154. data/lib/fluent/test/driver/output.rb +78 -0
  155. data/lib/fluent/test/driver/test_event_router.rb +45 -0
  156. data/lib/fluent/test/filter_test.rb +0 -1
  157. data/lib/fluent/test/formatter_test.rb +2 -1
  158. data/lib/fluent/test/input_test.rb +23 -17
  159. data/lib/fluent/test/output_test.rb +28 -39
  160. data/lib/fluent/test/parser_test.rb +1 -1
  161. data/lib/fluent/time.rb +104 -1
  162. data/lib/fluent/{status.rb → unique_id.rb} +15 -24
  163. data/lib/fluent/version.rb +1 -1
  164. data/lib/fluent/winsvc.rb +72 -0
  165. data/test/compat/test_calls_super.rb +164 -0
  166. data/test/config/test_config_parser.rb +83 -0
  167. data/test/config/test_configurable.rb +547 -274
  168. data/test/config/test_configure_proxy.rb +146 -29
  169. data/test/config/test_dsl.rb +3 -181
  170. data/test/config/test_element.rb +274 -0
  171. data/test/config/test_literal_parser.rb +1 -1
  172. data/test/config/test_section.rb +79 -7
  173. data/test/config/test_system_config.rb +21 -0
  174. data/test/config/test_types.rb +3 -26
  175. data/test/helper.rb +78 -8
  176. data/test/plugin/test_bare_output.rb +118 -0
  177. data/test/plugin/test_base.rb +75 -0
  178. data/test/plugin/test_buf_file.rb +420 -521
  179. data/test/plugin/test_buf_memory.rb +32 -194
  180. data/test/plugin/test_buffer.rb +981 -0
  181. data/test/plugin/test_buffer_chunk.rb +110 -0
  182. data/test/plugin/test_buffer_file_chunk.rb +770 -0
  183. data/test/plugin/test_buffer_memory_chunk.rb +265 -0
  184. data/test/plugin/test_filter.rb +255 -0
  185. data/test/plugin/test_filter_grep.rb +2 -73
  186. data/test/plugin/test_filter_record_transformer.rb +24 -68
  187. data/test/plugin/test_filter_stdout.rb +6 -6
  188. data/test/plugin/test_in_debug_agent.rb +2 -0
  189. data/test/plugin/test_in_dummy.rb +11 -17
  190. data/test/plugin/test_in_exec.rb +6 -25
  191. data/test/plugin/test_in_forward.rb +112 -151
  192. data/test/plugin/test_in_gc_stat.rb +2 -0
  193. data/test/plugin/test_in_http.rb +106 -157
  194. data/test/plugin/test_in_object_space.rb +21 -5
  195. data/test/plugin/test_in_stream.rb +14 -13
  196. data/test/plugin/test_in_syslog.rb +30 -275
  197. data/test/plugin/test_in_tail.rb +95 -282
  198. data/test/plugin/test_in_tcp.rb +14 -0
  199. data/test/plugin/test_in_udp.rb +21 -67
  200. data/test/plugin/test_input.rb +122 -0
  201. data/test/plugin/test_multi_output.rb +180 -0
  202. data/test/plugin/test_out_buffered_null.rb +79 -0
  203. data/test/plugin/test_out_copy.rb +15 -2
  204. data/test/plugin/test_out_exec.rb +75 -25
  205. data/test/plugin/test_out_exec_filter.rb +74 -8
  206. data/test/plugin/test_out_file.rb +61 -7
  207. data/test/plugin/test_out_forward.rb +92 -15
  208. data/test/plugin/test_out_roundrobin.rb +1 -0
  209. data/test/plugin/test_out_stdout.rb +22 -13
  210. data/test/plugin/test_out_stream.rb +18 -0
  211. data/test/plugin/test_output.rb +515 -0
  212. data/test/plugin/test_output_as_buffered.rb +1540 -0
  213. data/test/plugin/test_output_as_buffered_overflow.rb +247 -0
  214. data/test/plugin/test_output_as_buffered_retries.rb +808 -0
  215. data/test/plugin/test_output_as_buffered_secondary.rb +776 -0
  216. data/test/plugin/test_output_as_standard.rb +362 -0
  217. data/test/plugin/test_owned_by.rb +35 -0
  218. data/test/plugin/test_storage.rb +167 -0
  219. data/test/plugin/test_storage_local.rb +8 -0
  220. data/test/plugin_helper/test_child_process.rb +599 -0
  221. data/test/plugin_helper/test_compat_parameters.rb +175 -0
  222. data/test/plugin_helper/test_event_emitter.rb +51 -0
  223. data/test/plugin_helper/test_event_loop.rb +52 -0
  224. data/test/plugin_helper/test_retry_state.rb +399 -0
  225. data/test/plugin_helper/test_storage.rb +411 -0
  226. data/test/plugin_helper/test_thread.rb +164 -0
  227. data/test/plugin_helper/test_timer.rb +100 -0
  228. data/test/scripts/exec_script.rb +0 -6
  229. data/test/scripts/fluent/plugin/out_test.rb +3 -0
  230. data/test/test_config.rb +13 -4
  231. data/test/test_event.rb +24 -13
  232. data/test/test_event_router.rb +8 -7
  233. data/test/test_event_time.rb +187 -0
  234. data/test/test_formatter.rb +13 -51
  235. data/test/test_input.rb +1 -1
  236. data/test/test_log.rb +239 -16
  237. data/test/test_mixin.rb +1 -1
  238. data/test/test_output.rb +53 -66
  239. data/test/test_parser.rb +105 -323
  240. data/test/test_plugin_helper.rb +81 -0
  241. data/test/test_root_agent.rb +4 -52
  242. data/test/test_supervisor.rb +272 -0
  243. data/test/test_unique_id.rb +47 -0
  244. metadata +181 -55
  245. data/CHANGELOG.md +0 -710
  246. data/lib/fluent/buffer.rb +0 -365
  247. data/lib/fluent/plugin/filter_parser.rb +0 -107
  248. data/lib/fluent/plugin/in_status.rb +0 -76
  249. data/lib/fluent/test/helpers.rb +0 -86
  250. data/test/plugin/data/log/foo/bar2 +0 -0
  251. data/test/plugin/test_filter_parser.rb +0 -744
  252. data/test/plugin/test_in_status.rb +0 -38
  253. data/test/test_buffer.rb +0 -624
@@ -0,0 +1,1540 @@
1
+ require_relative '../helper'
2
+ require 'fluent/plugin/output'
3
+ require 'fluent/plugin/buffer'
4
+ require 'fluent/event'
5
+
6
+ require 'json'
7
+ require 'time'
8
+ require 'timeout'
9
+ require 'timecop'
10
+
11
+ module FluentPluginOutputAsBufferedTest
12
+ class DummyBareOutput < Fluent::Plugin::Output
13
+ def register(name, &block)
14
+ instance_variable_set("@#{name}", block)
15
+ end
16
+ end
17
+ class DummySyncOutput < DummyBareOutput
18
+ def initialize
19
+ super
20
+ @process = nil
21
+ end
22
+ def process(tag, es)
23
+ @process ? @process.call(tag, es) : nil
24
+ end
25
+ end
26
+ class DummyAsyncOutput < DummyBareOutput
27
+ def initialize
28
+ super
29
+ @format = nil
30
+ @write = nil
31
+ end
32
+ def format(tag, time, record)
33
+ @format ? @format.call(tag, time, record) : [tag, time, record].to_json
34
+ end
35
+ def write(chunk)
36
+ @write ? @write.call(chunk) : nil
37
+ end
38
+ end
39
+ class DummyDelayedOutput < DummyBareOutput
40
+ def initialize
41
+ super
42
+ @format = nil
43
+ @try_write = nil
44
+ @shutdown_hook = nil
45
+ end
46
+ def format(tag, time, record)
47
+ @format ? @format.call(tag, time, record) : [tag, time, record].to_json
48
+ end
49
+ def try_write(chunk)
50
+ @try_write ? @try_write.call(chunk) : nil
51
+ end
52
+ def shutdown
53
+ if @shutdown_hook
54
+ @shutdown_hook.call
55
+ end
56
+ super
57
+ end
58
+ end
59
+ class DummyFullFeatureOutput < DummyBareOutput
60
+ def initialize
61
+ super
62
+ @prefer_buffered_processing = nil
63
+ @prefer_delayed_commit = nil
64
+ @process = nil
65
+ @format = nil
66
+ @write = nil
67
+ @try_write = nil
68
+ end
69
+ def prefer_buffered_processing
70
+ @prefer_buffered_processing ? @prefer_buffered_processing.call : false
71
+ end
72
+ def prefer_delayed_commit
73
+ @prefer_delayed_commit ? @prefer_delayed_commit.call : false
74
+ end
75
+ def process(tag, es)
76
+ @process ? @process.call(tag, es) : nil
77
+ end
78
+ def format(tag, time, record)
79
+ @format ? @format.call(tag, time, record) : [tag, time, record].to_json
80
+ end
81
+ def write(chunk)
82
+ @write ? @write.call(chunk) : nil
83
+ end
84
+ def try_write(chunk)
85
+ @try_write ? @try_write.call(chunk) : nil
86
+ end
87
+ end
88
+ end
89
+
90
+ class BufferedOutputTest < Test::Unit::TestCase
91
+ def create_output(type=:full)
92
+ case type
93
+ when :bare then FluentPluginOutputAsBufferedTest::DummyBareOutput.new
94
+ when :sync then FluentPluginOutputAsBufferedTest::DummySyncOutput.new
95
+ when :buffered then FluentPluginOutputAsBufferedTest::DummyAsyncOutput.new
96
+ when :delayed then FluentPluginOutputAsBufferedTest::DummyDelayedOutput.new
97
+ when :full then FluentPluginOutputAsBufferedTest::DummyFullFeatureOutput.new
98
+ else
99
+ raise ArgumentError, "unknown type: #{type}"
100
+ end
101
+ end
102
+ def create_metadata(timekey: nil, tag: nil, variables: nil)
103
+ Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
104
+ end
105
+ def waiting(seconds)
106
+ begin
107
+ Timeout.timeout(seconds) do
108
+ yield
109
+ end
110
+ rescue Timeout::Error
111
+ STDERR.print(*@i.log.out.logs)
112
+ raise
113
+ end
114
+ end
115
+
116
+ teardown do
117
+ if @i
118
+ @i.stop unless @i.stopped?
119
+ @i.before_shutdown unless @i.before_shutdown?
120
+ @i.shutdown unless @i.shutdown?
121
+ @i.after_shutdown unless @i.after_shutdown?
122
+ @i.close unless @i.closed?
123
+ @i.terminate unless @i.terminated?
124
+ end
125
+ Timecop.return
126
+ end
127
+
128
+ sub_test_case 'buffered output feature without any buffer key, flush_mode: lazy' do
129
+ setup do
130
+ hash = {
131
+ 'flush_mode' => 'lazy',
132
+ 'flush_thread_burst_interval' => 0.01,
133
+ 'flush_thread_count' => 2,
134
+ 'chunk_limit_size' => 1024,
135
+ }
136
+ @i = create_output(:buffered)
137
+ @i.configure(config_element('ROOT','',{},[config_element('buffer','',hash)]))
138
+ @i.start
139
+ end
140
+
141
+ test '#start does not create enqueue thread, but creates flush threads' do
142
+ @i.thread_wait_until_start
143
+
144
+ assert @i.thread_exist?(:flush_thread_0)
145
+ assert @i.thread_exist?(:flush_thread_1)
146
+ assert !@i.thread_exist?(:enqueue_thread)
147
+ end
148
+
149
+ test '#format is called for each events' do
150
+ ary = []
151
+ @i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
152
+
153
+ t = event_time()
154
+ es = Fluent::ArrayEventStream.new([ [t, {"key" => "value1"}], [t, {"key" => "value2"}] ])
155
+
156
+ 4.times do
157
+ @i.emit_events('tag.test', es)
158
+ end
159
+
160
+ assert_equal 8, ary.size
161
+ 4.times do |i|
162
+ assert_equal ["tag.test", t, {"key" => "value1"}], ary[i*2]
163
+ assert_equal ["tag.test", t, {"key" => "value2"}], ary[i*2+1]
164
+ end
165
+ end
166
+
167
+ test '#write is called only when chunk bytes limit exceeded, and buffer chunk is purged' do
168
+ ary = []
169
+ @i.register(:write){|chunk| ary << chunk.read }
170
+
171
+ tag = "test.tag"
172
+ t = event_time()
173
+ r = {}
174
+ (0...10).each do |i|
175
+ r["key#{i}"] = "value #{i}"
176
+ end
177
+ event_size = [tag, t, r].to_json.size # 195
178
+
179
+ (1024 * 0.9 / event_size).to_i.times do |i|
180
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new([ [t, r] ]))
181
+ end
182
+ assert{ @i.buffer.queue.size == 0 && ary.size == 0 }
183
+
184
+ staged_chunk = @i.buffer.stage[@i.buffer.stage.keys.first]
185
+ assert{ staged_chunk.size != 0 }
186
+
187
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new([ [t, r] ]))
188
+
189
+ assert{ @i.buffer.queue.size > 0 || @i.buffer.dequeued.size > 0 || ary.size > 0 }
190
+
191
+ waiting(10) do
192
+ Thread.pass until @i.buffer.queue.size == 0 && @i.buffer.dequeued.size == 0
193
+ Thread.pass until staged_chunk.size == 0
194
+ end
195
+
196
+ assert_equal 1, ary.size
197
+ assert_equal [tag,t,r].to_json * (1024 / event_size), ary.first
198
+ end
199
+
200
+ test 'flush_at_shutdown work well when plugin is shutdown' do
201
+ ary = []
202
+ @i.register(:write){|chunk| ary << chunk.read }
203
+
204
+ tag = "test.tag"
205
+ t = event_time()
206
+ r = {}
207
+ (0...10).each do |i|
208
+ r["key#{i}"] = "value #{i}"
209
+ end
210
+ event_size = [tag, t, r].to_json.size # 195
211
+
212
+ (1024 * 0.9 / event_size).to_i.times do |i|
213
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new([ [t, r] ]))
214
+ end
215
+ assert{ @i.buffer.queue.size == 0 && ary.size == 0 }
216
+
217
+ @i.stop
218
+ @i.before_shutdown
219
+ @i.shutdown
220
+ @i.after_shutdown
221
+
222
+ waiting(10) do
223
+ Thread.pass until ary.size == 1
224
+ end
225
+ assert_equal [tag,t,r].to_json * (1024 * 0.9 / event_size), ary.first
226
+ end
227
+ end
228
+
229
+ sub_test_case 'buffered output feature without any buffer key, flush_mode: interval' do
230
+ setup do
231
+ hash = {
232
+ 'flush_mode' => 'interval',
233
+ 'flush_interval' => 1,
234
+ 'flush_thread_count' => 1,
235
+ 'flush_thread_burst_interval' => 0.01,
236
+ 'chunk_limit_size' => 1024,
237
+ }
238
+ @i = create_output(:buffered)
239
+ @i.configure(config_element('ROOT','',{},[config_element('buffer','',hash)]))
240
+ @i.start
241
+ end
242
+
243
+ test '#start creates enqueue thread and flush threads' do
244
+ @i.thread_wait_until_start
245
+
246
+ assert @i.thread_exist?(:flush_thread_0)
247
+ assert @i.thread_exist?(:enqueue_thread)
248
+ end
249
+
250
+ test '#format is called for each event streams' do
251
+ ary = []
252
+ @i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
253
+
254
+ t = event_time()
255
+ es = Fluent::ArrayEventStream.new([ [t, {"key" => "value1"}], [t, {"key" => "value2"}] ])
256
+
257
+ 4.times do
258
+ @i.emit_events('tag.test', es)
259
+ end
260
+
261
+ assert_equal 8, ary.size
262
+ 4.times do |i|
263
+ assert_equal ["tag.test", t, {"key" => "value1"}], ary[i*2]
264
+ assert_equal ["tag.test", t, {"key" => "value2"}], ary[i*2+1]
265
+ end
266
+ end
267
+
268
+ test '#write is called per flush_interval, and buffer chunk is purged' do
269
+ @i.thread_wait_until_start
270
+
271
+ ary = []
272
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
273
+ @i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| ary << data } }
274
+
275
+ t = event_time()
276
+ r = {}
277
+ (0...10).each do |i|
278
+ r["key#{i}"] = "value #{i}"
279
+ end
280
+
281
+ 3.times do |i|
282
+ rand_records = rand(1..4)
283
+ es = Fluent::ArrayEventStream.new([ [t, r] ] * rand_records)
284
+ assert_equal rand_records, es.size
285
+
286
+ @i.interrupt_flushes
287
+
288
+ assert{ @i.buffer.queue.size == 0 }
289
+
290
+ @i.emit_events("test.tag", es)
291
+
292
+ assert{ @i.buffer.queue.size == 0 }
293
+ assert{ @i.buffer.stage.size == 1 }
294
+
295
+ staged_chunk = @i.instance_eval{ @buffer.stage[@buffer.stage.keys.first] }
296
+ assert{ staged_chunk.size != 0 }
297
+
298
+ @i.enqueue_thread_wait
299
+
300
+ waiting(10) do
301
+ Thread.pass until @i.buffer.queue.size == 0 && @i.buffer.dequeued.size == 0
302
+ Thread.pass until staged_chunk.size == 0
303
+ end
304
+
305
+ assert_equal rand_records, ary.size
306
+ ary.reject!{|e| true }
307
+ end
308
+ end
309
+
310
+ test 'flush_at_shutdown work well when plugin is shutdown' do
311
+ ary = []
312
+ @i.register(:write){|chunk| ary << chunk.read }
313
+
314
+ tag = "test.tag"
315
+ t = event_time()
316
+ r = {}
317
+ (0...10).each do |i|
318
+ r["key#{i}"] = "value #{i}"
319
+ end
320
+ event_size = [tag, t, r].to_json.size # 195
321
+
322
+ (1024 * 0.9 / event_size).to_i.times do |i|
323
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new([ [t, r] ]))
324
+ end
325
+ assert{ @i.buffer.queue.size == 0 && ary.size == 0 }
326
+
327
+ @i.stop
328
+ @i.before_shutdown
329
+ @i.shutdown
330
+ @i.after_shutdown
331
+
332
+ waiting(10) do
333
+ Thread.pass until ary.size == 1
334
+ end
335
+ assert_equal [tag,t,r].to_json * (1024 * 0.9 / event_size), ary.first
336
+ end
337
+ end
338
+
339
+ sub_test_case 'buffered output feature without any buffer key, flush_mode: immediate' do
340
+ setup do
341
+ hash = {
342
+ 'flush_mode' => 'immediate',
343
+ 'flush_thread_count' => 1,
344
+ 'flush_thread_burst_interval' => 0.01,
345
+ 'chunk_limit_size' => 1024,
346
+ }
347
+ @i = create_output(:buffered)
348
+ @i.configure(config_element('ROOT','',{},[config_element('buffer','',hash)]))
349
+ @i.start
350
+ end
351
+
352
+ test '#start does not create enqueue thread, but creates flush threads' do
353
+ @i.thread_wait_until_start
354
+
355
+ assert @i.thread_exist?(:flush_thread_0)
356
+ assert !@i.thread_exist?(:enqueue_thread)
357
+ end
358
+
359
+ test '#format is called for each event streams' do
360
+ ary = []
361
+ @i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
362
+
363
+ t = event_time()
364
+ es = Fluent::ArrayEventStream.new([ [t, {"key" => "value1"}], [t, {"key" => "value2"}] ])
365
+
366
+ 4.times do
367
+ @i.emit_events('tag.test', es)
368
+ end
369
+
370
+ assert_equal 8, ary.size
371
+ 4.times do |i|
372
+ assert_equal ["tag.test", t, {"key" => "value1"}], ary[i*2]
373
+ assert_equal ["tag.test", t, {"key" => "value2"}], ary[i*2+1]
374
+ end
375
+ end
376
+
377
+ test '#write is called every time for each emits, and buffer chunk is purged' do
378
+ @i.thread_wait_until_start
379
+
380
+ ary = []
381
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
382
+ @i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| ary << data } }
383
+
384
+ t = event_time()
385
+ r = {}
386
+ (0...10).each do |i|
387
+ r["key#{i}"] = "value #{i}"
388
+ end
389
+
390
+ 3.times do |i|
391
+ rand_records = rand(1..5)
392
+ es = Fluent::ArrayEventStream.new([ [t, r] ] * rand_records)
393
+ assert_equal rand_records, es.size
394
+ @i.emit_events("test.tag", es)
395
+
396
+ assert{ @i.buffer.stage.size == 0 && (@i.buffer.queue.size == 1 || @i.buffer.dequeued.size == 1 || ary.size > 0) }
397
+
398
+ waiting(10) do
399
+ Thread.pass until @i.buffer.queue.size == 0 && @i.buffer.dequeued.size == 0
400
+ end
401
+
402
+ assert_equal rand_records, ary.size
403
+ ary.reject!{|e| true }
404
+ end
405
+ end
406
+
407
+ test 'flush_at_shutdown work well when plugin is shutdown' do
408
+ ary = []
409
+ @i.register(:write){|chunk| ary << chunk.read }
410
+
411
+ tag = "test.tag"
412
+ t = event_time()
413
+ r = {}
414
+ (0...10).each do |i|
415
+ r["key#{i}"] = "value #{i}"
416
+ end
417
+ @i.emit_events("test.tag", Fluent::ArrayEventStream.new([ [t, r] ]))
418
+
419
+ @i.stop
420
+ @i.before_shutdown
421
+ @i.shutdown
422
+ @i.after_shutdown
423
+
424
+ waiting(10) do
425
+ Thread.pass until ary.size == 1
426
+ end
427
+ assert_equal [tag,t,r].to_json, ary.first
428
+ end
429
+ end
430
+
431
+ sub_test_case 'buffered output feature with timekey and range' do
432
+ setup do
433
+ chunk_key = 'time'
434
+ hash = {
435
+ 'timekey' => 30, # per 30seconds
436
+ 'timekey_wait' => 5, # 5 second delay for flush
437
+ 'flush_thread_count' => 1,
438
+ 'flush_thread_burst_interval' => 0.01,
439
+ }
440
+ @i = create_output(:buffered)
441
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
442
+ @i.start
443
+ end
444
+
445
+ test '#configure raises config error if timekey is not specified' do
446
+ i = create_output(:buffered)
447
+ assert_raise Fluent::ConfigError do
448
+ i.configure(config_element('ROOT','',{},[config_element('buffer','time',)]))
449
+ end
450
+ end
451
+
452
+ test 'default flush_mode is set to :lazy' do
453
+ assert_equal :lazy, @i.instance_eval{ @flush_mode }
454
+ end
455
+
456
+ test '#start creates enqueue thread and flush threads' do
457
+ @i.thread_wait_until_start
458
+
459
+ assert @i.thread_exist?(:flush_thread_0)
460
+ assert @i.thread_exist?(:enqueue_thread)
461
+ end
462
+
463
+ test '#format is called for each event streams' do
464
+ ary = []
465
+ @i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
466
+
467
+ t = event_time()
468
+ es = Fluent::ArrayEventStream.new([ [t, {"key" => "value1"}], [t, {"key" => "value2"}] ])
469
+
470
+ 5.times do
471
+ @i.emit_events('tag.test', es)
472
+ end
473
+
474
+ assert_equal 10, ary.size
475
+ 5.times do |i|
476
+ assert_equal ["tag.test", t, {"key" => "value1"}], ary[i*2]
477
+ assert_equal ["tag.test", t, {"key" => "value2"}], ary[i*2+1]
478
+ end
479
+ end
480
+
481
+ test '#write is called per time ranges after timekey_wait, and buffer chunk is purged' do
482
+ Timecop.freeze( Time.parse('2016-04-13 14:04:00 +0900') )
483
+
484
+ @i.thread_wait_until_start
485
+
486
+ ary = []
487
+ metachecks = []
488
+
489
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
490
+ @i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| e = JSON.parse(data); ary << e; metachecks << (chunk.metadata.timekey.to_i <= e[1].to_i && e[1].to_i < chunk.metadata.timekey.to_i + 30) } }
491
+
492
+ r = {}
493
+ (0...10).each do |i|
494
+ r["key#{i}"] = "value #{i}"
495
+ end
496
+ ts = [
497
+ Fluent::EventTime.parse('2016-04-13 14:03:21 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:23 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:29 +0900'),
498
+ Fluent::EventTime.parse('2016-04-13 14:03:30 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:33 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:38 +0900'),
499
+ Fluent::EventTime.parse('2016-04-13 14:03:43 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:49 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:51 +0900'),
500
+ Fluent::EventTime.parse('2016-04-13 14:04:00 +0900'), Fluent::EventTime.parse('2016-04-13 14:04:01 +0900'),
501
+ ]
502
+ events = [
503
+ ["test.tag.1", ts[0], r], # range 14:03:00 - 03:29
504
+ ["test.tag.2", ts[1], r],
505
+ ["test.tag.1", ts[2], r],
506
+ ["test.tag.1", ts[3], r], # range 14:03:30 - 04:00
507
+ ["test.tag.1", ts[4], r],
508
+ ["test.tag.1", ts[5], r],
509
+ ["test.tag.1", ts[6], r],
510
+ ["test.tag.1", ts[7], r],
511
+ ["test.tag.2", ts[8], r],
512
+ ["test.tag.1", ts[9], r], # range 14:04:00 - 04:29
513
+ ["test.tag.2", ts[10], r],
514
+ ]
515
+
516
+ assert_equal 0, @i.write_count
517
+
518
+ @i.interrupt_flushes
519
+
520
+ events.shuffle.each do |tag, time, record|
521
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
522
+ end
523
+ assert{ @i.buffer.stage.size == 3 }
524
+ assert{ @i.write_count == 0 }
525
+
526
+ @i.enqueue_thread_wait
527
+
528
+ waiting(4) do
529
+ Thread.pass until @i.write_count > 0
530
+ end
531
+
532
+ assert{ @i.buffer.stage.size == 2 && @i.write_count == 1 }
533
+
534
+ assert_equal 3, ary.size
535
+ assert_equal 2, ary.select{|e| e[0] == "test.tag.1" }.size
536
+ assert_equal 1, ary.select{|e| e[0] == "test.tag.2" }.size
537
+
538
+ Timecop.freeze( Time.parse('2016-04-13 14:04:04 +0900') )
539
+
540
+ @i.enqueue_thread_wait
541
+
542
+ assert{ @i.buffer.stage.size == 2 && @i.write_count == 1 }
543
+
544
+ Timecop.freeze( Time.parse('2016-04-13 14:04:06 +0900') )
545
+
546
+ @i.enqueue_thread_wait
547
+ waiting(4) do
548
+ Thread.pass until @i.write_count > 1
549
+ end
550
+
551
+ assert{ @i.buffer.stage.size == 1 && @i.write_count == 2 }
552
+
553
+ assert_equal 9, ary.size
554
+ assert_equal 7, ary.select{|e| e[0] == "test.tag.1" }.size
555
+ assert_equal 2, ary.select{|e| e[0] == "test.tag.2" }.size
556
+
557
+ assert metachecks.all?{|e| e }
558
+ end
559
+
560
+ test 'flush_at_shutdown work well when plugin is shutdown' do
561
+ Timecop.freeze( Time.parse('2016-04-13 14:04:00 +0900') )
562
+
563
+ @i.thread_wait_until_start
564
+
565
+ ary = []
566
+ metachecks = []
567
+
568
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
569
+ @i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| e = JSON.parse(data); ary << e; metachecks << (chunk.metadata.timekey.to_i <= e[1].to_i && e[1].to_i < chunk.metadata.timekey.to_i + 30) } }
570
+
571
+ r = {}
572
+ (0...10).each do |i|
573
+ r["key#{i}"] = "value #{i}"
574
+ end
575
+ ts = [
576
+ Fluent::EventTime.parse('2016-04-13 14:03:21 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:23 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:29 +0900'),
577
+ Fluent::EventTime.parse('2016-04-13 14:03:30 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:33 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:38 +0900'),
578
+ Fluent::EventTime.parse('2016-04-13 14:03:43 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:49 +0900'), Fluent::EventTime.parse('2016-04-13 14:03:51 +0900'),
579
+ Fluent::EventTime.parse('2016-04-13 14:04:00 +0900'), Fluent::EventTime.parse('2016-04-13 14:04:01 +0900'),
580
+ ]
581
+ events = [
582
+ ["test.tag.1", ts[0], r], # range 14:03:00 - 03:29
583
+ ["test.tag.2", ts[1], r],
584
+ ["test.tag.1", ts[2], r],
585
+ ["test.tag.1", ts[3], r], # range 14:03:30 - 04:00
586
+ ["test.tag.1", ts[4], r],
587
+ ["test.tag.1", ts[5], r],
588
+ ["test.tag.1", ts[6], r],
589
+ ["test.tag.1", ts[7], r],
590
+ ["test.tag.2", ts[8], r],
591
+ ["test.tag.1", ts[9], r], # range 14:04:00 - 04:29
592
+ ["test.tag.2", ts[10], r],
593
+ ]
594
+
595
+ assert_equal 0, @i.write_count
596
+
597
+ @i.interrupt_flushes
598
+
599
+ events.shuffle.each do |tag, time, record|
600
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
601
+ end
602
+ assert{ @i.buffer.stage.size == 3 }
603
+ assert{ @i.write_count == 0 }
604
+
605
+ @i.enqueue_thread_wait
606
+
607
+ waiting(4) do
608
+ Thread.pass until @i.write_count > 0
609
+ end
610
+
611
+ assert{ @i.buffer.stage.size == 2 && @i.write_count == 1 }
612
+
613
+ Timecop.freeze( Time.parse('2016-04-13 14:04:04 +0900') )
614
+
615
+ @i.enqueue_thread_wait
616
+
617
+ assert{ @i.buffer.stage.size == 2 && @i.write_count == 1 }
618
+
619
+ Timecop.freeze( Time.parse('2016-04-13 14:04:06 +0900') )
620
+
621
+ @i.enqueue_thread_wait
622
+ waiting(4) do
623
+ Thread.pass until @i.write_count > 1
624
+ end
625
+
626
+ assert{ @i.buffer.stage.size == 1 && @i.write_count == 2 }
627
+
628
+ Timecop.freeze( Time.parse('2016-04-13 14:04:13 +0900') )
629
+
630
+ assert_equal 9, ary.size
631
+
632
+ @i.stop
633
+ @i.before_shutdown
634
+ @i.shutdown
635
+ @i.after_shutdown
636
+
637
+ waiting(4) do
638
+ Thread.pass until @i.write_count > 2
639
+ end
640
+
641
+ assert_equal 11, ary.size
642
+ assert metachecks.all?{|e| e }
643
+ end
644
+ end
645
+
646
+ sub_test_case 'buffered output feature with tag key' do
647
+ setup do
648
+ chunk_key = 'tag'
649
+ hash = {
650
+ 'flush_interval' => 10,
651
+ 'flush_thread_count' => 1,
652
+ 'flush_thread_burst_interval' => 0.1,
653
+ 'chunk_limit_size' => 1024,
654
+ }
655
+ @i = create_output(:buffered)
656
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
657
+ @i.start
658
+ end
659
+
660
+ test 'default flush_mode is set to :interval' do
661
+ assert_equal :interval, @i.instance_eval{ @flush_mode }
662
+ end
663
+
664
+ test '#start creates enqueue thread and flush threads' do
665
+ @i.thread_wait_until_start
666
+
667
+ assert @i.thread_exist?(:flush_thread_0)
668
+ assert @i.thread_exist?(:enqueue_thread)
669
+ end
670
+
671
+ test '#format is called for each event streams' do
672
+ ary = []
673
+ @i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
674
+
675
+ t = event_time()
676
+ es = Fluent::ArrayEventStream.new([ [t, {"key" => "value1"}], [t, {"key" => "value2"}] ])
677
+
678
+ 5.times do
679
+ @i.emit_events('tag.test', es)
680
+ end
681
+
682
+ assert_equal 10, ary.size
683
+ 5.times do |i|
684
+ assert_equal ["tag.test", t, {"key" => "value1"}], ary[i*2]
685
+ assert_equal ["tag.test", t, {"key" => "value2"}], ary[i*2+1]
686
+ end
687
+ end
688
+
689
+ test '#write is called per tags, per flush_interval & chunk sizes, and buffer chunk is purged' do
690
+ Timecop.freeze( Time.parse('2016-04-13 14:04:01 +0900') )
691
+
692
+ ary = []
693
+ metachecks = []
694
+
695
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
696
+ @i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| e = JSON.parse(data); ary << e; metachecks << (chunk.metadata.tag == e[0]) } }
697
+
698
+ @i.thread_wait_until_start
699
+
700
+ r = {}
701
+ (0...10).each do |i|
702
+ r["key#{i}"] = "value #{i}"
703
+ end
704
+ ts = [
705
+ event_time('2016-04-13 14:03:21 +0900'), event_time('2016-04-13 14:03:23 +0900'), event_time('2016-04-13 14:03:29 +0900'),
706
+ event_time('2016-04-13 14:03:30 +0900'), event_time('2016-04-13 14:03:33 +0900'), event_time('2016-04-13 14:03:38 +0900'),
707
+ event_time('2016-04-13 14:03:43 +0900'), event_time('2016-04-13 14:03:49 +0900'), event_time('2016-04-13 14:03:51 +0900'),
708
+ event_time('2016-04-13 14:04:00 +0900'), event_time('2016-04-13 14:04:01 +0900'),
709
+ ]
710
+ # size of a event is 197
711
+ events = [
712
+ ["test.tag.1", ts[0], r],
713
+ ["test.tag.2", ts[1], r],
714
+ ["test.tag.1", ts[2], r],
715
+ ["test.tag.1", ts[3], r],
716
+ ["test.tag.1", ts[4], r],
717
+ ["test.tag.1", ts[5], r],
718
+ ["test.tag.1", ts[6], r],
719
+ ["test.tag.1", ts[7], r],
720
+ ["test.tag.2", ts[8], r],
721
+ ["test.tag.1", ts[9], r],
722
+ ["test.tag.2", ts[10], r],
723
+ ]
724
+
725
+ assert_equal 0, @i.write_count
726
+
727
+ @i.interrupt_flushes
728
+
729
+ events.shuffle.each do |tag, time, record|
730
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
731
+ end
732
+ assert{ @i.buffer.stage.size == 2 } # test.tag.1 x1, test.tag.2 x1
733
+
734
+ Timecop.freeze( Time.parse('2016-04-13 14:04:02 +0900') )
735
+
736
+ @i.enqueue_thread_wait
737
+ @i.flush_thread_wakeup
738
+
739
+ waiting(4) do
740
+ Thread.pass until @i.write_count > 0
741
+ end
742
+
743
+ assert{ @i.buffer.stage.size == 2 }
744
+ assert{ @i.write_count == 1 }
745
+ assert{ @i.buffer.queue.size == 0 }
746
+
747
+ # events fulfills a chunk (and queued immediately)
748
+ assert_equal 5, ary.size
749
+ assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
750
+ assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
751
+
752
+ Timecop.freeze( Time.parse('2016-04-13 14:04:09 +0900') )
753
+
754
+ @i.enqueue_thread_wait
755
+
756
+ assert{ @i.buffer.stage.size == 2 }
757
+
758
+ # to trigger try_flush with flush_thread_burst_interval
759
+ Timecop.freeze( Time.parse('2016-04-13 14:04:11 +0900') )
760
+ @i.enqueue_thread_wait
761
+ Timecop.freeze( Time.parse('2016-04-13 14:04:15 +0900') )
762
+ @i.enqueue_thread_wait
763
+ @i.flush_thread_wakeup
764
+
765
+ assert{ @i.buffer.stage.size == 0 }
766
+
767
+ waiting(4) do
768
+ Thread.pass until @i.write_count > 2
769
+ end
770
+
771
+ assert{ @i.buffer.stage.size == 0 && @i.write_count == 3 }
772
+
773
+ assert_equal 11, ary.size
774
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
775
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
776
+
777
+ assert metachecks.all?{|e| e }
778
+ end
779
+
780
+ test 'flush_at_shutdown work well when plugin is shutdown' do
781
+ Timecop.freeze( Time.parse('2016-04-13 14:04:01 +0900') )
782
+
783
+ ary = []
784
+ metachecks = []
785
+
786
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
787
+ @i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| e = JSON.parse(data); ary << e; metachecks << (chunk.metadata.tag == e[0]) } }
788
+
789
+ @i.thread_wait_until_start
790
+
791
+ r = {}
792
+ (0...10).each do |i|
793
+ r["key#{i}"] = "value #{i}"
794
+ end
795
+ ts = [
796
+ event_time('2016-04-13 14:03:21 +0900'), event_time('2016-04-13 14:03:23 +0900'), event_time('2016-04-13 14:03:29 +0900'),
797
+ event_time('2016-04-13 14:03:30 +0900'), event_time('2016-04-13 14:03:33 +0900'), event_time('2016-04-13 14:03:38 +0900'),
798
+ event_time('2016-04-13 14:03:43 +0900'), event_time('2016-04-13 14:03:49 +0900'), event_time('2016-04-13 14:03:51 +0900'),
799
+ event_time('2016-04-13 14:04:00 +0900'), event_time('2016-04-13 14:04:01 +0900'),
800
+ ]
801
+ # size of a event is 197
802
+ events = [
803
+ ["test.tag.1", ts[0], r],
804
+ ["test.tag.2", ts[1], r],
805
+ ["test.tag.1", ts[2], r],
806
+ ["test.tag.1", ts[3], r],
807
+ ["test.tag.1", ts[4], r],
808
+ ["test.tag.1", ts[5], r],
809
+ ["test.tag.1", ts[6], r],
810
+ ["test.tag.1", ts[7], r],
811
+ ["test.tag.2", ts[8], r],
812
+ ["test.tag.1", ts[9], r],
813
+ ["test.tag.2", ts[10], r],
814
+ ]
815
+
816
+ assert_equal 0, @i.write_count
817
+
818
+ @i.interrupt_flushes
819
+
820
+ events.shuffle.each do |tag, time, record|
821
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
822
+ end
823
+ assert{ @i.buffer.stage.size == 2 } # test.tag.1 x1, test.tag.2 x1
824
+
825
+ Timecop.freeze( Time.parse('2016-04-13 14:04:02 +0900') )
826
+
827
+ @i.enqueue_thread_wait
828
+ @i.flush_thread_wakeup
829
+
830
+ waiting(4) do
831
+ Thread.pass until @i.write_count > 0
832
+ end
833
+
834
+ assert{ @i.buffer.stage.size == 2 }
835
+ assert{ @i.write_count == 1 }
836
+ assert{ @i.buffer.queue.size == 0 }
837
+
838
+ # events fulfills a chunk (and queued immediately)
839
+ assert_equal 5, ary.size
840
+ assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
841
+ assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
842
+
843
+ @i.stop
844
+ @i.before_shutdown
845
+ @i.shutdown
846
+ @i.after_shutdown
847
+
848
+ waiting(4) do
849
+ Thread.pass until @i.write_count > 1
850
+ end
851
+
852
+ assert{ @i.buffer.stage.size == 0 && @i.buffer.queue.size == 0 && @i.write_count == 3 }
853
+
854
+ assert_equal 11, ary.size
855
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
856
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
857
+
858
+ assert metachecks.all?{|e| e }
859
+ end
860
+ end
861
+
862
+ sub_test_case 'buffered output feature with variables' do
863
+ setup do
864
+ chunk_key = 'name,service'
865
+ hash = {
866
+ 'flush_interval' => 10,
867
+ 'flush_thread_count' => 1,
868
+ 'flush_thread_burst_interval' => 0.1,
869
+ 'chunk_limit_size' => 1024,
870
+ }
871
+ @i = create_output(:buffered)
872
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
873
+ @i.start
874
+ end
875
+
876
+ test 'default flush_mode is set to :interval' do
877
+ assert_equal :interval, @i.instance_eval{ @flush_mode }
878
+ end
879
+
880
+ test '#start creates enqueue thread and flush threads' do
881
+ @i.thread_wait_until_start
882
+
883
+ assert @i.thread_exist?(:flush_thread_0)
884
+ assert @i.thread_exist?(:enqueue_thread)
885
+ end
886
+
887
+ test '#format is called for each event streams' do
888
+ ary = []
889
+ @i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
890
+
891
+ t = event_time()
892
+ es = Fluent::ArrayEventStream.new([
893
+ [t, {"key" => "value1", "name" => "moris", "service" => "a"}],
894
+ [t, {"key" => "value2", "name" => "moris", "service" => "b"}],
895
+ ])
896
+
897
+ 5.times do
898
+ @i.emit_events('tag.test', es)
899
+ end
900
+
901
+ assert_equal 10, ary.size
902
+ 5.times do |i|
903
+ assert_equal ["tag.test", t, {"key" => "value1", "name" => "moris", "service" => "a"}], ary[i*2]
904
+ assert_equal ["tag.test", t, {"key" => "value2", "name" => "moris", "service" => "b"}], ary[i*2+1]
905
+ end
906
+ end
907
+
908
+ test '#write is called per value combination of variables, per flush_interval & chunk sizes, and buffer chunk is purged' do
909
+ Timecop.freeze( Time.parse('2016-04-13 14:04:01 +0900') )
910
+
911
+ ary = []
912
+ metachecks = []
913
+
914
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
915
+ @i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| e = JSON.parse(data); ary << e; metachecks << (e[2]["name"] == chunk.metadata.variables[:name] && e[2]["service"] == chunk.metadata.variables[:service]) } }
916
+
917
+ @i.thread_wait_until_start
918
+
919
+ # size of a event is 195
920
+ dummy_data = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
921
+ events = [
922
+ ["test.tag.1", event_time('2016-04-13 14:03:21 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1) xxx-a (6 events)
923
+ ["test.tag.2", event_time('2016-04-13 14:03:23 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}], #(2) yyy-a (3 events)
924
+ ["test.tag.1", event_time('2016-04-13 14:03:29 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
925
+ ["test.tag.1", event_time('2016-04-13 14:03:30 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
926
+ ["test.tag.1", event_time('2016-04-13 14:03:33 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
927
+ ["test.tag.1", event_time('2016-04-13 14:03:38 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}], #(3) xxx-b (2 events)
928
+ ["test.tag.1", event_time('2016-04-13 14:03:43 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
929
+ ["test.tag.1", event_time('2016-04-13 14:03:49 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}], #(3)
930
+ ["test.tag.2", event_time('2016-04-13 14:03:51 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}], #(2)
931
+ ["test.tag.1", event_time('2016-04-13 14:04:00 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
932
+ ["test.tag.2", event_time('2016-04-13 14:04:01 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}], #(2)
933
+ ]
934
+
935
+ assert_equal 0, @i.write_count
936
+
937
+ @i.interrupt_flushes
938
+
939
+ events.shuffle.each do |tag, time, record|
940
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
941
+ end
942
+ assert{ @i.buffer.stage.size == 3 }
943
+
944
+ Timecop.freeze( Time.parse('2016-04-13 14:04:02 +0900') )
945
+
946
+ @i.enqueue_thread_wait
947
+ @i.flush_thread_wakeup
948
+
949
+ waiting(4) do
950
+ Thread.pass until @i.write_count > 0
951
+ end
952
+
953
+ assert{ @i.buffer.stage.size == 3 }
954
+ assert{ @i.write_count == 1 }
955
+ assert{ @i.buffer.queue.size == 0 }
956
+
957
+ # events fulfills a chunk (and queued immediately)
958
+ assert_equal 5, ary.size
959
+ assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
960
+ assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
961
+ assert ary[0...5].all?{|e| e[2]["name"] == "xxx" && e[2]["service"] == "a" }
962
+
963
+ Timecop.freeze( Time.parse('2016-04-13 14:04:09 +0900') )
964
+
965
+ @i.enqueue_thread_wait
966
+
967
+ assert{ @i.buffer.stage.size == 3 }
968
+
969
+ # to trigger try_flush with flush_thread_burst_interval
970
+ Timecop.freeze( Time.parse('2016-04-13 14:04:11 +0900') )
971
+ @i.enqueue_thread_wait
972
+ Timecop.freeze( Time.parse('2016-04-13 14:04:12 +0900') )
973
+ @i.enqueue_thread_wait
974
+ Timecop.freeze( Time.parse('2016-04-13 14:04:13 +0900') )
975
+ @i.enqueue_thread_wait
976
+ Timecop.freeze( Time.parse('2016-04-13 14:04:14 +0900') )
977
+ @i.enqueue_thread_wait
978
+ @i.flush_thread_wakeup
979
+
980
+ assert{ @i.buffer.stage.size == 0 }
981
+
982
+ waiting(4) do
983
+ Thread.pass until @i.write_count > 1
984
+ end
985
+
986
+ assert{ @i.buffer.stage.size == 0 && @i.write_count == 4 }
987
+
988
+ assert_equal 11, ary.size
989
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
990
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
991
+ assert_equal 6, ary.select{|e| e[2]["name"] == "xxx" && e[2]["service"] == "a" }.size
992
+ assert_equal 3, ary.select{|e| e[2]["name"] == "yyy" && e[2]["service"] == "a" }.size
993
+ assert_equal 2, ary.select{|e| e[2]["name"] == "xxx" && e[2]["service"] == "b" }.size
994
+
995
+ assert metachecks.all?{|e| e }
996
+ end
997
+
998
+ test 'flush_at_shutdown work well when plugin is shutdown' do
999
+ Timecop.freeze( Time.parse('2016-04-13 14:04:01 +0900') )
1000
+
1001
+ ary = []
1002
+ metachecks = []
1003
+
1004
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
1005
+ @i.register(:write){|chunk| chunk.read.split("\n").reject{|l| l.empty? }.each{|data| e = JSON.parse(data); ary << e; metachecks << (e[2]["name"] == chunk.metadata.variables[:name] && e[2]["service"] == chunk.metadata.variables[:service]) } }
1006
+
1007
+ @i.thread_wait_until_start
1008
+
1009
+ # size of a event is 195
1010
+ dummy_data = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
1011
+ events = [
1012
+ ["test.tag.1", event_time('2016-04-13 14:03:21 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1) xxx-a (6 events)
1013
+ ["test.tag.2", event_time('2016-04-13 14:03:23 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}], #(2) yyy-a (3 events)
1014
+ ["test.tag.1", event_time('2016-04-13 14:03:29 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
1015
+ ["test.tag.1", event_time('2016-04-13 14:03:30 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
1016
+ ["test.tag.1", event_time('2016-04-13 14:03:33 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
1017
+ ["test.tag.1", event_time('2016-04-13 14:03:38 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}], #(3) xxx-b (2 events)
1018
+ ["test.tag.1", event_time('2016-04-13 14:03:43 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
1019
+ ["test.tag.1", event_time('2016-04-13 14:03:49 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}], #(3)
1020
+ ["test.tag.2", event_time('2016-04-13 14:03:51 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}], #(2)
1021
+ ["test.tag.1", event_time('2016-04-13 14:04:00 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}], #(1)
1022
+ ["test.tag.2", event_time('2016-04-13 14:04:01 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}], #(2)
1023
+ ]
1024
+
1025
+ assert_equal 0, @i.write_count
1026
+
1027
+ @i.interrupt_flushes
1028
+
1029
+ events.shuffle.each do |tag, time, record|
1030
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
1031
+ end
1032
+ assert{ @i.buffer.stage.size == 3 }
1033
+
1034
+ Timecop.freeze( Time.parse('2016-04-13 14:04:02 +0900') )
1035
+
1036
+ @i.enqueue_thread_wait
1037
+ @i.flush_thread_wakeup
1038
+
1039
+ waiting(4) do
1040
+ Thread.pass until @i.write_count > 0
1041
+ end
1042
+
1043
+ assert{ @i.buffer.stage.size == 3 }
1044
+ assert{ @i.write_count == 1 }
1045
+ assert{ @i.buffer.queue.size == 0 }
1046
+
1047
+ # events fulfills a chunk (and queued immediately)
1048
+ assert_equal 5, ary.size
1049
+ assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
1050
+ assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
1051
+
1052
+ @i.stop
1053
+ @i.before_shutdown
1054
+ @i.shutdown
1055
+ @i.after_shutdown
1056
+
1057
+ waiting(4) do
1058
+ Thread.pass until @i.write_count > 1
1059
+ end
1060
+
1061
+ assert{ @i.buffer.stage.size == 0 && @i.buffer.queue.size == 0 && @i.write_count == 4 }
1062
+
1063
+ assert_equal 11, ary.size
1064
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1065
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1066
+ assert_equal 6, ary.select{|e| e[2]["name"] == "xxx" && e[2]["service"] == "a" }.size
1067
+ assert_equal 3, ary.select{|e| e[2]["name"] == "yyy" && e[2]["service"] == "a" }.size
1068
+ assert_equal 2, ary.select{|e| e[2]["name"] == "xxx" && e[2]["service"] == "b" }.size
1069
+
1070
+ assert metachecks.all?{|e| e }
1071
+ end
1072
+ end
1073
+
1074
+ sub_test_case 'buffered output feature with many keys' do
1075
+ test 'default flush mode is set to :interval if keys does not include time' do
1076
+ chunk_key = 'name,service,tag'
1077
+ hash = {
1078
+ 'flush_interval' => 10,
1079
+ 'flush_thread_count' => 1,
1080
+ 'flush_thread_burst_interval' => 0.1,
1081
+ 'chunk_limit_size' => 1024,
1082
+ }
1083
+ @i = create_output(:buffered)
1084
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
1085
+ @i.start
1086
+
1087
+ assert_equal :interval, @i.instance_eval{ @flush_mode }
1088
+ end
1089
+
1090
+ test 'default flush mode is set to :lazy if keys includes time' do
1091
+ chunk_key = 'name,service,tag,time'
1092
+ hash = {
1093
+ 'timekey' => 60,
1094
+ 'flush_interval' => 10,
1095
+ 'flush_thread_count' => 1,
1096
+ 'flush_thread_burst_interval' => 0.1,
1097
+ 'chunk_limit_size' => 1024,
1098
+ }
1099
+ @i = create_output(:buffered)
1100
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
1101
+ @i.start
1102
+
1103
+ assert_equal :lazy, @i.instance_eval{ @flush_mode }
1104
+ end
1105
+ end
1106
+
1107
+ sub_test_case 'buffered output feature with delayed commit' do
1108
+ setup do
1109
+ chunk_key = 'tag'
1110
+ hash = {
1111
+ 'flush_interval' => 10,
1112
+ 'flush_thread_count' => 1,
1113
+ 'flush_thread_burst_interval' => 0.1,
1114
+ 'delayed_commit_timeout' => 30,
1115
+ 'chunk_limit_size' => 1024,
1116
+ }
1117
+ @i = create_output(:delayed)
1118
+ @i.configure(config_element('ROOT','',{},[config_element('buffer',chunk_key,hash)]))
1119
+ @i.start
1120
+ end
1121
+
1122
+ test '#format is called for each event streams' do
1123
+ ary = []
1124
+ @i.register(:format){|tag, time, record| ary << [tag, time, record]; '' }
1125
+
1126
+ t = event_time()
1127
+ es = Fluent::ArrayEventStream.new([
1128
+ [t, {"key" => "value1", "name" => "moris", "service" => "a"}],
1129
+ [t, {"key" => "value2", "name" => "moris", "service" => "b"}],
1130
+ ])
1131
+
1132
+ 5.times do
1133
+ @i.emit_events('tag.test', es)
1134
+ end
1135
+
1136
+ assert_equal 10, ary.size
1137
+ 5.times do |i|
1138
+ assert_equal ["tag.test", t, {"key" => "value1", "name" => "moris", "service" => "a"}], ary[i*2]
1139
+ assert_equal ["tag.test", t, {"key" => "value2", "name" => "moris", "service" => "b"}], ary[i*2+1]
1140
+ end
1141
+ end
1142
+
1143
+ test '#try_write is called per flush, buffer chunk is not purged until #commit_write is called' do
1144
+ Timecop.freeze( Time.parse('2016-04-13 14:04:01 +0900') )
1145
+
1146
+ ary = []
1147
+ metachecks = []
1148
+ chunks = []
1149
+
1150
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
1151
+ @i.register(:try_write) do |chunk|
1152
+ chunks << chunk
1153
+ chunk.read.split("\n").reject{|l| l.empty? }.each do |data|
1154
+ e = JSON.parse(data)
1155
+ ary << e
1156
+ metachecks << (e[0] == chunk.metadata.tag)
1157
+ end
1158
+ end
1159
+
1160
+ @i.thread_wait_until_start
1161
+
1162
+ # size of a event is 195
1163
+ dummy_data = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
1164
+ events = [
1165
+ ["test.tag.1", event_time('2016-04-13 14:03:21 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1166
+ ["test.tag.2", event_time('2016-04-13 14:03:23 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1167
+ ["test.tag.1", event_time('2016-04-13 14:03:29 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1168
+ ["test.tag.1", event_time('2016-04-13 14:03:30 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1169
+ ["test.tag.1", event_time('2016-04-13 14:03:33 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1170
+ ["test.tag.1", event_time('2016-04-13 14:03:38 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}],
1171
+ ["test.tag.1", event_time('2016-04-13 14:03:43 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1172
+ ["test.tag.1", event_time('2016-04-13 14:03:49 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}],
1173
+ ["test.tag.2", event_time('2016-04-13 14:03:51 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1174
+ ["test.tag.1", event_time('2016-04-13 14:04:00 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1175
+ ["test.tag.2", event_time('2016-04-13 14:04:01 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1176
+ ]
1177
+
1178
+ assert_equal 0, @i.write_count
1179
+
1180
+ @i.interrupt_flushes
1181
+
1182
+ events.shuffle.each do |tag, time, record|
1183
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
1184
+ end
1185
+ assert{ @i.buffer.stage.size == 2 }
1186
+
1187
+ Timecop.freeze( Time.parse('2016-04-13 14:04:02 +0900') )
1188
+
1189
+ @i.enqueue_thread_wait
1190
+ @i.flush_thread_wakeup
1191
+
1192
+ waiting(4) do
1193
+ Thread.pass until @i.write_count > 0
1194
+ end
1195
+
1196
+ assert{ @i.buffer.stage.size == 2 }
1197
+ assert{ @i.write_count == 1 }
1198
+ assert{ @i.buffer.queue.size == 0 }
1199
+ assert{ @i.buffer.dequeued.size == 1 }
1200
+
1201
+ # events fulfills a chunk (and queued immediately)
1202
+ assert_equal 5, ary.size
1203
+ assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
1204
+ assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
1205
+
1206
+ assert_equal 1, chunks.size
1207
+ assert !chunks.first.empty?
1208
+
1209
+ Timecop.freeze( Time.parse('2016-04-13 14:04:09 +0900') )
1210
+
1211
+ @i.enqueue_thread_wait
1212
+
1213
+ assert{ @i.buffer.stage.size == 2 }
1214
+
1215
+ # to trigger try_flush with flush_thread_burst_interval
1216
+ Timecop.freeze( Time.parse('2016-04-13 14:04:11 +0900') )
1217
+ @i.enqueue_thread_wait
1218
+ Timecop.freeze( Time.parse('2016-04-13 14:04:12 +0900') )
1219
+ @i.enqueue_thread_wait
1220
+ Timecop.freeze( Time.parse('2016-04-13 14:04:13 +0900') )
1221
+ @i.enqueue_thread_wait
1222
+ Timecop.freeze( Time.parse('2016-04-13 14:04:14 +0900') )
1223
+ @i.enqueue_thread_wait
1224
+ @i.flush_thread_wakeup
1225
+
1226
+ assert{ @i.buffer.stage.size == 0 }
1227
+
1228
+ waiting(4) do
1229
+ Thread.pass until @i.write_count > 1
1230
+ end
1231
+
1232
+ assert{ @i.buffer.stage.size == 0 && @i.write_count == 3 }
1233
+ assert{ @i.buffer.dequeued.size == 3 }
1234
+
1235
+ assert_equal 11, ary.size
1236
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1237
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1238
+
1239
+ assert_equal 3, chunks.size
1240
+ assert chunks.all?{|c| !c.empty? }
1241
+
1242
+ assert metachecks.all?{|e| e }
1243
+
1244
+ @i.commit_write(chunks[0].unique_id)
1245
+ assert{ @i.buffer.dequeued.size == 2 }
1246
+ assert chunks[0].empty?
1247
+
1248
+ @i.commit_write(chunks[1].unique_id)
1249
+ assert{ @i.buffer.dequeued.size == 1 }
1250
+ assert chunks[1].empty?
1251
+
1252
+ @i.commit_write(chunks[2].unique_id)
1253
+ assert{ @i.buffer.dequeued.size == 0 }
1254
+ assert chunks[2].empty?
1255
+
1256
+ # no problem to commit chunks already committed
1257
+ assert_nothing_raised do
1258
+ @i.commit_write(chunks[2].unique_id)
1259
+ end
1260
+ end
1261
+
1262
+ test '#rollback_write and #try_rollback_write can rollback buffer chunks for delayed commit after timeout, and then be able to write it again' do
1263
+ Timecop.freeze( Time.parse('2016-04-13 14:04:01 +0900') )
1264
+
1265
+ ary = []
1266
+ metachecks = []
1267
+ chunks = []
1268
+
1269
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
1270
+ @i.register(:try_write) do |chunk|
1271
+ chunks << chunk
1272
+ chunk.read.split("\n").reject{|l| l.empty? }.each do |data|
1273
+ e = JSON.parse(data)
1274
+ ary << e
1275
+ metachecks << (e[0] == chunk.metadata.tag)
1276
+ end
1277
+ end
1278
+
1279
+ @i.thread_wait_until_start
1280
+
1281
+ # size of a event is 195
1282
+ dummy_data = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
1283
+ events = [
1284
+ ["test.tag.1", event_time('2016-04-13 14:03:21 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1285
+ ["test.tag.2", event_time('2016-04-13 14:03:23 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1286
+ ["test.tag.1", event_time('2016-04-13 14:03:29 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1287
+ ["test.tag.1", event_time('2016-04-13 14:03:30 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1288
+ ["test.tag.1", event_time('2016-04-13 14:03:33 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1289
+ ["test.tag.1", event_time('2016-04-13 14:03:38 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}],
1290
+ ["test.tag.1", event_time('2016-04-13 14:03:43 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1291
+ ["test.tag.1", event_time('2016-04-13 14:03:49 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}],
1292
+ ["test.tag.2", event_time('2016-04-13 14:03:51 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1293
+ ["test.tag.1", event_time('2016-04-13 14:04:00 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1294
+ ["test.tag.2", event_time('2016-04-13 14:04:01 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1295
+ ]
1296
+
1297
+ assert_equal 0, @i.write_count
1298
+
1299
+ @i.interrupt_flushes
1300
+
1301
+ events.shuffle.each do |tag, time, record|
1302
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
1303
+ end
1304
+ assert{ @i.buffer.stage.size == 2 }
1305
+
1306
+ Timecop.freeze( Time.parse('2016-04-13 14:04:02 +0900') )
1307
+
1308
+ @i.enqueue_thread_wait
1309
+ @i.flush_thread_wakeup
1310
+
1311
+ waiting(4) do
1312
+ Thread.pass until @i.write_count > 0
1313
+ end
1314
+
1315
+ assert{ @i.buffer.stage.size == 2 }
1316
+ assert{ @i.write_count == 1 }
1317
+ assert{ @i.buffer.queue.size == 0 }
1318
+ assert{ @i.buffer.dequeued.size == 1 }
1319
+
1320
+ # events fulfills a chunk (and queued immediately)
1321
+ assert_equal 5, ary.size
1322
+ assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
1323
+ assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
1324
+
1325
+ assert_equal 1, chunks.size
1326
+ assert !chunks.first.empty?
1327
+
1328
+ Timecop.freeze( Time.parse('2016-04-13 14:04:09 +0900') )
1329
+
1330
+ @i.enqueue_thread_wait
1331
+
1332
+ assert{ @i.buffer.stage.size == 2 }
1333
+
1334
+ # to trigger try_flush with flush_thread_burst_interval
1335
+ Timecop.freeze( Time.parse('2016-04-13 14:04:11 +0900') )
1336
+ @i.enqueue_thread_wait
1337
+ Timecop.freeze( Time.parse('2016-04-13 14:04:12 +0900') )
1338
+ @i.enqueue_thread_wait
1339
+ Timecop.freeze( Time.parse('2016-04-13 14:04:13 +0900') )
1340
+ @i.enqueue_thread_wait
1341
+ Timecop.freeze( Time.parse('2016-04-13 14:04:14 +0900') )
1342
+ @i.enqueue_thread_wait
1343
+ @i.flush_thread_wakeup
1344
+
1345
+ assert{ @i.buffer.stage.size == 0 }
1346
+
1347
+ waiting(4) do
1348
+ Thread.pass until @i.write_count > 2
1349
+ end
1350
+
1351
+ assert{ @i.buffer.stage.size == 0 && @i.write_count == 3 }
1352
+ assert{ @i.buffer.dequeued.size == 3 }
1353
+
1354
+ assert_equal 11, ary.size
1355
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1356
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1357
+
1358
+ assert_equal 3, chunks.size
1359
+ assert chunks.all?{|c| !c.empty? }
1360
+
1361
+ assert metachecks.all?{|e| e }
1362
+
1363
+ @i.interrupt_flushes
1364
+
1365
+ @i.rollback_write(chunks[2].unique_id)
1366
+
1367
+ assert{ @i.buffer.dequeued.size == 2 }
1368
+ assert{ @i.buffer.queue.size == 1 && @i.buffer.queue.first.unique_id == chunks[2].unique_id }
1369
+
1370
+ Timecop.freeze( Time.parse('2016-04-13 14:04:15 +0900') )
1371
+ @i.enqueue_thread_wait
1372
+ @i.flush_thread_wakeup
1373
+
1374
+ waiting(4) do
1375
+ Thread.pass until @i.write_count > 3
1376
+ end
1377
+
1378
+ assert{ @i.write_count == 4 }
1379
+ assert{ @i.rollback_count == 1 }
1380
+ assert{ @i.instance_eval{ @dequeued_chunks.size } == 3 }
1381
+ assert{ @i.buffer.dequeued.size == 3 }
1382
+ assert{ @i.buffer.queue.size == 0 }
1383
+
1384
+ assert_equal 4, chunks.size
1385
+ assert chunks[2].unique_id == chunks[3].unique_id
1386
+
1387
+ ary.reject!{|e| true }
1388
+ chunks.reject!{|e| true }
1389
+
1390
+ Timecop.freeze( Time.parse('2016-04-13 14:04:46 +0900') )
1391
+ @i.enqueue_thread_wait
1392
+ @i.flush_thread_wakeup
1393
+
1394
+ waiting(4) do
1395
+ Thread.pass until @i.rollback_count == 4
1396
+ end
1397
+
1398
+ assert{ chunks[0...3].all?{|c| !c.empty? } }
1399
+
1400
+ # rollback is in progress, but some may be flushed again after rollback
1401
+ Timecop.freeze( Time.parse('2016-04-13 14:04:46 +0900') )
1402
+ @i.enqueue_thread_wait
1403
+ @i.flush_thread_wakeup
1404
+
1405
+ waiting(4) do
1406
+ Thread.pass until @i.write_count == 7
1407
+ end
1408
+
1409
+ assert{ @i.write_count == 7 }
1410
+ assert_equal 11, ary.size
1411
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1412
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1413
+ assert{ chunks.size == 3 }
1414
+ assert{ chunks.all?{|c| !c.empty? } }
1415
+
1416
+ chunks.each{|c| @i.commit_write(c.unique_id) }
1417
+ assert{ chunks.all?{|c| c.empty? } }
1418
+
1419
+ assert{ @i.buffer.dequeued.size == 0 }
1420
+ end
1421
+
1422
+ test '#try_rollback_all will be called for all waiting chunks after shutdown' do
1423
+ Timecop.freeze( Time.parse('2016-04-13 14:04:01 +0900') )
1424
+
1425
+ ary = []
1426
+ metachecks = []
1427
+ chunks = []
1428
+
1429
+ @i.register(:format){|tag,time,record| [tag,time,record].to_json + "\n" }
1430
+ @i.register(:try_write) do |chunk|
1431
+ chunks << chunk
1432
+ chunk.read.split("\n").reject{|l| l.empty? }.each do |data|
1433
+ e = JSON.parse(data)
1434
+ ary << e
1435
+ metachecks << (e[0] == chunk.metadata.tag)
1436
+ end
1437
+ end
1438
+
1439
+ @i.thread_wait_until_start
1440
+
1441
+ # size of a event is 195
1442
+ dummy_data = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
1443
+ events = [
1444
+ ["test.tag.1", event_time('2016-04-13 14:03:21 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1445
+ ["test.tag.2", event_time('2016-04-13 14:03:23 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1446
+ ["test.tag.1", event_time('2016-04-13 14:03:29 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1447
+ ["test.tag.1", event_time('2016-04-13 14:03:30 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1448
+ ["test.tag.1", event_time('2016-04-13 14:03:33 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1449
+ ["test.tag.1", event_time('2016-04-13 14:03:38 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}],
1450
+ ["test.tag.1", event_time('2016-04-13 14:03:43 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1451
+ ["test.tag.1", event_time('2016-04-13 14:03:49 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "b"}],
1452
+ ["test.tag.2", event_time('2016-04-13 14:03:51 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1453
+ ["test.tag.1", event_time('2016-04-13 14:04:00 +0900'), {"data" => dummy_data, "name" => "xxx", "service" => "a"}],
1454
+ ["test.tag.2", event_time('2016-04-13 14:04:01 +0900'), {"data" => dummy_data, "name" => "yyy", "service" => "a"}],
1455
+ ]
1456
+
1457
+ assert_equal 0, @i.write_count
1458
+
1459
+ @i.interrupt_flushes
1460
+
1461
+ events.shuffle.each do |tag, time, record|
1462
+ @i.emit_events(tag, Fluent::ArrayEventStream.new([ [time, record] ]))
1463
+ end
1464
+ assert{ @i.buffer.stage.size == 2 }
1465
+
1466
+ Timecop.freeze( Time.parse('2016-04-13 14:04:02 +0900') )
1467
+
1468
+ @i.enqueue_thread_wait
1469
+ @i.flush_thread_wakeup
1470
+
1471
+ waiting(4) do
1472
+ Thread.pass until @i.write_count > 0
1473
+ end
1474
+
1475
+ assert{ @i.buffer.stage.size == 2 }
1476
+ assert{ @i.write_count == 1 }
1477
+ assert{ @i.buffer.queue.size == 0 }
1478
+ assert{ @i.buffer.dequeued.size == 1 }
1479
+
1480
+ # events fulfills a chunk (and queued immediately)
1481
+ assert_equal 5, ary.size
1482
+ assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
1483
+ assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
1484
+
1485
+ assert_equal 1, chunks.size
1486
+ assert !chunks.first.empty?
1487
+
1488
+ Timecop.freeze( Time.parse('2016-04-13 14:04:09 +0900') )
1489
+
1490
+ @i.enqueue_thread_wait
1491
+
1492
+ assert{ @i.buffer.stage.size == 2 }
1493
+
1494
+ # to trigger try_flush with flush_thread_burst_interval
1495
+ Timecop.freeze( Time.parse('2016-04-13 14:04:11 +0900') )
1496
+ @i.enqueue_thread_wait
1497
+ Timecop.freeze( Time.parse('2016-04-13 14:04:12 +0900') )
1498
+ @i.enqueue_thread_wait
1499
+ Timecop.freeze( Time.parse('2016-04-13 14:04:13 +0900') )
1500
+ @i.enqueue_thread_wait
1501
+ Timecop.freeze( Time.parse('2016-04-13 14:04:14 +0900') )
1502
+ @i.enqueue_thread_wait
1503
+ @i.flush_thread_wakeup
1504
+
1505
+ assert{ @i.buffer.stage.size == 0 }
1506
+
1507
+ waiting(4) do
1508
+ Thread.pass until @i.write_count > 2
1509
+ end
1510
+
1511
+ assert{ @i.buffer.stage.size == 0 }
1512
+ assert{ @i.buffer.queue.size == 0 }
1513
+ assert{ @i.buffer.dequeued.size == 3 }
1514
+ assert{ @i.write_count == 3 }
1515
+ assert{ @i.rollback_count == 0 }
1516
+
1517
+ assert_equal 11, ary.size
1518
+ assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1519
+ assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1520
+
1521
+ assert{ chunks.size == 3 }
1522
+ assert{ chunks.all?{|c| !c.empty? } }
1523
+
1524
+ @i.register(:shutdown_hook){ @i.commit_write(chunks[1].unique_id) }
1525
+
1526
+ @i.stop
1527
+ @i.before_shutdown
1528
+ @i.shutdown
1529
+
1530
+ assert{ @i.buffer.dequeued.size == 2 }
1531
+ assert{ !chunks[0].empty? }
1532
+ assert{ chunks[1].empty? }
1533
+ assert{ !chunks[2].empty? }
1534
+
1535
+ @i.after_shutdown
1536
+
1537
+ assert{ @i.rollback_count == 2 }
1538
+ end
1539
+ end
1540
+ end