fluentd 0.12.43 → 0.14.0

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of fluentd might be problematic. Click here for more details.

Files changed (253) hide show
  1. checksums.yaml +4 -4
  2. data/.github/ISSUE_TEMPLATE.md +6 -0
  3. data/.gitignore +2 -0
  4. data/.travis.yml +33 -21
  5. data/CONTRIBUTING.md +1 -0
  6. data/ChangeLog +1239 -0
  7. data/README.md +0 -25
  8. data/Rakefile +2 -1
  9. data/Vagrantfile +17 -0
  10. data/appveyor.yml +35 -0
  11. data/example/filter_stdout.conf +5 -5
  12. data/example/in_forward.conf +2 -2
  13. data/example/in_http.conf +2 -2
  14. data/example/in_out_forward.conf +17 -0
  15. data/example/in_syslog.conf +2 -2
  16. data/example/in_tail.conf +2 -2
  17. data/example/in_tcp.conf +2 -2
  18. data/example/in_udp.conf +2 -2
  19. data/example/out_copy.conf +4 -4
  20. data/example/out_file.conf +2 -2
  21. data/example/out_forward.conf +2 -2
  22. data/example/out_forward_buf_file.conf +23 -0
  23. data/example/v0_12_filter.conf +8 -8
  24. data/fluent.conf +29 -0
  25. data/fluentd.gemspec +18 -11
  26. data/lib/fluent/agent.rb +60 -58
  27. data/lib/fluent/command/cat.rb +1 -1
  28. data/lib/fluent/command/debug.rb +7 -5
  29. data/lib/fluent/command/fluentd.rb +97 -2
  30. data/lib/fluent/compat/call_super_mixin.rb +67 -0
  31. data/lib/fluent/compat/filter.rb +50 -0
  32. data/lib/fluent/compat/formatter.rb +109 -0
  33. data/lib/fluent/compat/input.rb +50 -0
  34. data/lib/fluent/compat/output.rb +617 -0
  35. data/lib/fluent/compat/output_chain.rb +60 -0
  36. data/lib/fluent/compat/parser.rb +163 -0
  37. data/lib/fluent/compat/propagate_default.rb +62 -0
  38. data/lib/fluent/config.rb +23 -20
  39. data/lib/fluent/config/configure_proxy.rb +119 -70
  40. data/lib/fluent/config/dsl.rb +5 -18
  41. data/lib/fluent/config/element.rb +72 -8
  42. data/lib/fluent/config/error.rb +0 -3
  43. data/lib/fluent/config/literal_parser.rb +0 -2
  44. data/lib/fluent/config/parser.rb +4 -4
  45. data/lib/fluent/config/section.rb +39 -28
  46. data/lib/fluent/config/types.rb +2 -13
  47. data/lib/fluent/config/v1_parser.rb +1 -3
  48. data/lib/fluent/configurable.rb +48 -16
  49. data/lib/fluent/daemon.rb +15 -0
  50. data/lib/fluent/engine.rb +26 -52
  51. data/lib/fluent/env.rb +6 -4
  52. data/lib/fluent/event.rb +58 -11
  53. data/lib/fluent/event_router.rb +5 -5
  54. data/lib/fluent/filter.rb +2 -50
  55. data/lib/fluent/formatter.rb +4 -293
  56. data/lib/fluent/input.rb +2 -32
  57. data/lib/fluent/label.rb +2 -2
  58. data/lib/fluent/load.rb +3 -2
  59. data/lib/fluent/log.rb +107 -38
  60. data/lib/fluent/match.rb +0 -36
  61. data/lib/fluent/mixin.rb +117 -7
  62. data/lib/fluent/msgpack_factory.rb +62 -0
  63. data/lib/fluent/output.rb +7 -612
  64. data/lib/fluent/output_chain.rb +23 -0
  65. data/lib/fluent/parser.rb +4 -800
  66. data/lib/fluent/plugin.rb +100 -121
  67. data/lib/fluent/plugin/bare_output.rb +63 -0
  68. data/lib/fluent/plugin/base.rb +121 -0
  69. data/lib/fluent/plugin/buf_file.rb +101 -182
  70. data/lib/fluent/plugin/buf_memory.rb +9 -92
  71. data/lib/fluent/plugin/buffer.rb +473 -0
  72. data/lib/fluent/plugin/buffer/chunk.rb +135 -0
  73. data/lib/fluent/plugin/buffer/file_chunk.rb +339 -0
  74. data/lib/fluent/plugin/buffer/memory_chunk.rb +100 -0
  75. data/lib/fluent/plugin/exec_util.rb +80 -75
  76. data/lib/fluent/plugin/file_util.rb +33 -28
  77. data/lib/fluent/plugin/file_wrapper.rb +120 -0
  78. data/lib/fluent/plugin/filter.rb +51 -0
  79. data/lib/fluent/plugin/filter_grep.rb +13 -40
  80. data/lib/fluent/plugin/filter_record_transformer.rb +22 -18
  81. data/lib/fluent/plugin/formatter.rb +93 -0
  82. data/lib/fluent/plugin/formatter_csv.rb +48 -0
  83. data/lib/fluent/plugin/formatter_hash.rb +32 -0
  84. data/lib/fluent/plugin/formatter_json.rb +47 -0
  85. data/lib/fluent/plugin/formatter_ltsv.rb +42 -0
  86. data/lib/fluent/plugin/formatter_msgpack.rb +32 -0
  87. data/lib/fluent/plugin/formatter_out_file.rb +45 -0
  88. data/lib/fluent/plugin/formatter_single_value.rb +34 -0
  89. data/lib/fluent/plugin/formatter_stdout.rb +39 -0
  90. data/lib/fluent/plugin/in_debug_agent.rb +4 -0
  91. data/lib/fluent/plugin/in_dummy.rb +22 -18
  92. data/lib/fluent/plugin/in_exec.rb +18 -8
  93. data/lib/fluent/plugin/in_forward.rb +36 -79
  94. data/lib/fluent/plugin/in_gc_stat.rb +4 -0
  95. data/lib/fluent/plugin/in_http.rb +21 -18
  96. data/lib/fluent/plugin/in_monitor_agent.rb +15 -48
  97. data/lib/fluent/plugin/in_object_space.rb +6 -1
  98. data/lib/fluent/plugin/in_stream.rb +7 -3
  99. data/lib/fluent/plugin/in_syslog.rb +46 -95
  100. data/lib/fluent/plugin/in_tail.rb +58 -640
  101. data/lib/fluent/plugin/in_tcp.rb +8 -1
  102. data/lib/fluent/plugin/in_udp.rb +8 -18
  103. data/lib/fluent/plugin/input.rb +33 -0
  104. data/lib/fluent/plugin/multi_output.rb +95 -0
  105. data/lib/fluent/plugin/out_buffered_null.rb +59 -0
  106. data/lib/fluent/plugin/out_copy.rb +11 -7
  107. data/lib/fluent/plugin/out_exec.rb +15 -11
  108. data/lib/fluent/plugin/out_exec_filter.rb +18 -10
  109. data/lib/fluent/plugin/out_file.rb +34 -5
  110. data/lib/fluent/plugin/out_forward.rb +25 -19
  111. data/lib/fluent/plugin/out_null.rb +0 -14
  112. data/lib/fluent/plugin/out_roundrobin.rb +11 -7
  113. data/lib/fluent/plugin/out_stdout.rb +5 -7
  114. data/lib/fluent/plugin/out_stream.rb +3 -1
  115. data/lib/fluent/plugin/output.rb +979 -0
  116. data/lib/fluent/plugin/owned_by_mixin.rb +42 -0
  117. data/lib/fluent/plugin/parser.rb +244 -0
  118. data/lib/fluent/plugin/parser_apache.rb +24 -0
  119. data/lib/fluent/plugin/parser_apache2.rb +84 -0
  120. data/lib/fluent/plugin/parser_apache_error.rb +21 -0
  121. data/lib/fluent/plugin/parser_csv.rb +31 -0
  122. data/lib/fluent/plugin/parser_json.rb +79 -0
  123. data/lib/fluent/plugin/parser_ltsv.rb +50 -0
  124. data/lib/fluent/plugin/parser_multiline.rb +102 -0
  125. data/lib/fluent/plugin/parser_nginx.rb +24 -0
  126. data/lib/fluent/plugin/parser_none.rb +36 -0
  127. data/lib/fluent/plugin/parser_syslog.rb +82 -0
  128. data/lib/fluent/plugin/parser_tsv.rb +37 -0
  129. data/lib/fluent/plugin/socket_util.rb +119 -117
  130. data/lib/fluent/plugin/storage.rb +84 -0
  131. data/lib/fluent/plugin/storage_local.rb +116 -0
  132. data/lib/fluent/plugin/string_util.rb +16 -13
  133. data/lib/fluent/plugin_helper.rb +39 -0
  134. data/lib/fluent/plugin_helper/child_process.rb +298 -0
  135. data/lib/fluent/plugin_helper/compat_parameters.rb +99 -0
  136. data/lib/fluent/plugin_helper/event_emitter.rb +80 -0
  137. data/lib/fluent/plugin_helper/event_loop.rb +118 -0
  138. data/lib/fluent/plugin_helper/retry_state.rb +177 -0
  139. data/lib/fluent/plugin_helper/storage.rb +308 -0
  140. data/lib/fluent/plugin_helper/thread.rb +147 -0
  141. data/lib/fluent/plugin_helper/timer.rb +85 -0
  142. data/lib/fluent/plugin_id.rb +63 -0
  143. data/lib/fluent/process.rb +21 -30
  144. data/lib/fluent/registry.rb +21 -9
  145. data/lib/fluent/root_agent.rb +115 -40
  146. data/lib/fluent/supervisor.rb +330 -320
  147. data/lib/fluent/system_config.rb +42 -18
  148. data/lib/fluent/test.rb +6 -1
  149. data/lib/fluent/test/base.rb +23 -3
  150. data/lib/fluent/test/driver/base.rb +247 -0
  151. data/lib/fluent/test/driver/event_feeder.rb +98 -0
  152. data/lib/fluent/test/driver/filter.rb +35 -0
  153. data/lib/fluent/test/driver/input.rb +31 -0
  154. data/lib/fluent/test/driver/output.rb +78 -0
  155. data/lib/fluent/test/driver/test_event_router.rb +45 -0
  156. data/lib/fluent/test/filter_test.rb +0 -1
  157. data/lib/fluent/test/formatter_test.rb +2 -1
  158. data/lib/fluent/test/input_test.rb +23 -17
  159. data/lib/fluent/test/output_test.rb +28 -39
  160. data/lib/fluent/test/parser_test.rb +1 -1
  161. data/lib/fluent/time.rb +104 -1
  162. data/lib/fluent/{status.rb → unique_id.rb} +15 -24
  163. data/lib/fluent/version.rb +1 -1
  164. data/lib/fluent/winsvc.rb +72 -0
  165. data/test/compat/test_calls_super.rb +164 -0
  166. data/test/config/test_config_parser.rb +83 -0
  167. data/test/config/test_configurable.rb +547 -274
  168. data/test/config/test_configure_proxy.rb +146 -29
  169. data/test/config/test_dsl.rb +3 -181
  170. data/test/config/test_element.rb +274 -0
  171. data/test/config/test_literal_parser.rb +1 -1
  172. data/test/config/test_section.rb +79 -7
  173. data/test/config/test_system_config.rb +21 -0
  174. data/test/config/test_types.rb +3 -26
  175. data/test/helper.rb +78 -8
  176. data/test/plugin/test_bare_output.rb +118 -0
  177. data/test/plugin/test_base.rb +75 -0
  178. data/test/plugin/test_buf_file.rb +420 -521
  179. data/test/plugin/test_buf_memory.rb +32 -194
  180. data/test/plugin/test_buffer.rb +981 -0
  181. data/test/plugin/test_buffer_chunk.rb +110 -0
  182. data/test/plugin/test_buffer_file_chunk.rb +770 -0
  183. data/test/plugin/test_buffer_memory_chunk.rb +265 -0
  184. data/test/plugin/test_filter.rb +255 -0
  185. data/test/plugin/test_filter_grep.rb +2 -73
  186. data/test/plugin/test_filter_record_transformer.rb +24 -68
  187. data/test/plugin/test_filter_stdout.rb +6 -6
  188. data/test/plugin/test_in_debug_agent.rb +2 -0
  189. data/test/plugin/test_in_dummy.rb +11 -17
  190. data/test/plugin/test_in_exec.rb +6 -25
  191. data/test/plugin/test_in_forward.rb +112 -151
  192. data/test/plugin/test_in_gc_stat.rb +2 -0
  193. data/test/plugin/test_in_http.rb +106 -157
  194. data/test/plugin/test_in_object_space.rb +21 -5
  195. data/test/plugin/test_in_stream.rb +14 -13
  196. data/test/plugin/test_in_syslog.rb +30 -275
  197. data/test/plugin/test_in_tail.rb +95 -282
  198. data/test/plugin/test_in_tcp.rb +14 -0
  199. data/test/plugin/test_in_udp.rb +21 -67
  200. data/test/plugin/test_input.rb +122 -0
  201. data/test/plugin/test_multi_output.rb +180 -0
  202. data/test/plugin/test_out_buffered_null.rb +79 -0
  203. data/test/plugin/test_out_copy.rb +15 -2
  204. data/test/plugin/test_out_exec.rb +75 -25
  205. data/test/plugin/test_out_exec_filter.rb +74 -8
  206. data/test/plugin/test_out_file.rb +61 -7
  207. data/test/plugin/test_out_forward.rb +92 -15
  208. data/test/plugin/test_out_roundrobin.rb +1 -0
  209. data/test/plugin/test_out_stdout.rb +22 -13
  210. data/test/plugin/test_out_stream.rb +18 -0
  211. data/test/plugin/test_output.rb +515 -0
  212. data/test/plugin/test_output_as_buffered.rb +1540 -0
  213. data/test/plugin/test_output_as_buffered_overflow.rb +247 -0
  214. data/test/plugin/test_output_as_buffered_retries.rb +808 -0
  215. data/test/plugin/test_output_as_buffered_secondary.rb +776 -0
  216. data/test/plugin/test_output_as_standard.rb +362 -0
  217. data/test/plugin/test_owned_by.rb +35 -0
  218. data/test/plugin/test_storage.rb +167 -0
  219. data/test/plugin/test_storage_local.rb +8 -0
  220. data/test/plugin_helper/test_child_process.rb +599 -0
  221. data/test/plugin_helper/test_compat_parameters.rb +175 -0
  222. data/test/plugin_helper/test_event_emitter.rb +51 -0
  223. data/test/plugin_helper/test_event_loop.rb +52 -0
  224. data/test/plugin_helper/test_retry_state.rb +399 -0
  225. data/test/plugin_helper/test_storage.rb +411 -0
  226. data/test/plugin_helper/test_thread.rb +164 -0
  227. data/test/plugin_helper/test_timer.rb +100 -0
  228. data/test/scripts/exec_script.rb +0 -6
  229. data/test/scripts/fluent/plugin/out_test.rb +3 -0
  230. data/test/test_config.rb +13 -4
  231. data/test/test_event.rb +24 -13
  232. data/test/test_event_router.rb +8 -7
  233. data/test/test_event_time.rb +187 -0
  234. data/test/test_formatter.rb +13 -51
  235. data/test/test_input.rb +1 -1
  236. data/test/test_log.rb +239 -16
  237. data/test/test_mixin.rb +1 -1
  238. data/test/test_output.rb +53 -66
  239. data/test/test_parser.rb +105 -323
  240. data/test/test_plugin_helper.rb +81 -0
  241. data/test/test_root_agent.rb +4 -52
  242. data/test/test_supervisor.rb +272 -0
  243. data/test/test_unique_id.rb +47 -0
  244. metadata +181 -55
  245. data/CHANGELOG.md +0 -710
  246. data/lib/fluent/buffer.rb +0 -365
  247. data/lib/fluent/plugin/filter_parser.rb +0 -107
  248. data/lib/fluent/plugin/in_status.rb +0 -76
  249. data/lib/fluent/test/helpers.rb +0 -86
  250. data/test/plugin/data/log/foo/bar2 +0 -0
  251. data/test/plugin/test_filter_parser.rb +0 -744
  252. data/test/plugin/test_in_status.rb +0 -38
  253. data/test/test_buffer.rb +0 -624
@@ -68,24 +68,13 @@ class TestConfigTypes < ::Test::Unit::TestCase
68
68
  assert_equal ' ', Config::STRING_TYPE.call(' ', {})
69
69
  end
70
70
 
71
- data('latin' => 'Märch',
72
- 'ascii' => 'ascii',
73
- 'space' => ' ',
74
- 'number' => '1',
75
- 'Hiragana' => 'あいうえお')
76
- test 'string w/ binary' do |str|
77
- actual = Config::STRING_TYPE.call(str.b, {})
78
- assert_equal str, actual
79
- assert_equal Encoding::UTF_8, actual.encoding
80
- end
81
-
82
71
  test 'enum' do
83
72
  assert_equal :val, Config::ENUM_TYPE.call('val', {list: [:val, :value, :v]})
84
73
  assert_equal :v, Config::ENUM_TYPE.call('v', {list: [:val, :value, :v]})
85
74
  assert_equal :value, Config::ENUM_TYPE.call('value', {list: [:val, :value, :v]})
86
- assert_raises(Fluent::ConfigError.new("valid options are val,value,v but got x")){ Config::ENUM_TYPE.call('x', {list: [:val, :value, :v]}) }
87
- assert_raises(RuntimeError.new("Plugin BUG: config type 'enum' requires :list of symbols")){ Config::ENUM_TYPE.call('val', {}) }
88
- assert_raises(RuntimeError.new("Plugin BUG: config type 'enum' requires :list of symbols")){ Config::ENUM_TYPE.call('val', {list: ["val", "value", "v"]}) }
75
+ assert_raises(Fluent::ConfigError){ Config::ENUM_TYPE.call('x', {list: [:val, :value, :v]}) }
76
+ assert_raises(RuntimeError){ Config::ENUM_TYPE.call('val', {}) }
77
+ assert_raises(RuntimeError){ Config::ENUM_TYPE.call('val', {list: ["val", "value", "v"]}) }
89
78
  end
90
79
 
91
80
  test 'integer' do
@@ -149,16 +138,6 @@ class TestConfigTypes < ::Test::Unit::TestCase
149
138
 
150
139
  assert_equal({"x"=>1,"y"=>60,"z"=>3600}, Config::HASH_TYPE.call('{"x":"1s","y":"1m","z":"1h"}', {value_type: :time}))
151
140
  assert_equal({"x"=>1,"y"=>60,"z"=>3600}, Config::HASH_TYPE.call('x:1s,y:1m,z:1h', {value_type: :time}))
152
-
153
- assert_raise(RuntimeError.new("unknown type in REFORMAT: foo")){ Config::HASH_TYPE.call("x:1,y:2", {value_type: :foo}) }
154
- end
155
-
156
- data('latin' => ['3:Märch', {"3"=>"Märch"}],
157
- 'ascii' => ['ascii:ascii', {"ascii"=>"ascii"}],
158
- 'number' => ['number:1', {"number"=>"1"}],
159
- 'Hiragana' => ['hiragana:あいうえお', {"hiragana"=>"あいうえお"}])
160
- test 'hash w/ binary' do |(target, expected)|
161
- assert_equal(expected, Config::HASH_TYPE.call(target.b, { value_type: :string }))
162
141
  end
163
142
 
164
143
  test 'array' do
@@ -183,8 +162,6 @@ class TestConfigTypes < ::Test::Unit::TestCase
183
162
  }
184
163
  assert_equal(["1","2"], Config::ARRAY_TYPE.call('["1","2"]', array_options))
185
164
  assert_equal(["3"], Config::ARRAY_TYPE.call('["3"]', array_options))
186
-
187
- assert_raise(RuntimeError.new("unknown type in REFORMAT: foo")){ Config::ARRAY_TYPE.call("1,2", {value_type: :foo}) }
188
165
  end
189
166
  end
190
167
  end
data/test/helper.rb CHANGED
@@ -39,22 +39,88 @@ require 'rr'
39
39
  require 'test/unit'
40
40
  require 'test/unit/rr'
41
41
  require 'fileutils'
42
+ require 'fluent/config/element'
42
43
  require 'fluent/log'
43
44
  require 'fluent/test'
44
- require 'fluent/test/helpers'
45
+ require 'fluent/plugin/base'
46
+ require 'fluent/log'
47
+ require 'fluent/plugin_id'
48
+ require 'fluent/plugin_helper'
49
+ require 'fluent/msgpack_factory'
50
+ require 'fluent/time'
51
+ require 'serverengine'
52
+
53
+ module Fluent
54
+ module Plugin
55
+ class TestBase < Base
56
+ # a base plugin class, but not input nor output
57
+ # mainly for helpers and owned plugins
58
+ include PluginId
59
+ include PluginLoggerMixin
60
+ include PluginHelper::Mixin
61
+ end
62
+ end
63
+ end
45
64
 
46
65
  unless defined?(Test::Unit::AssertionFailedError)
47
66
  class Test::Unit::AssertionFailedError < StandardError
48
67
  end
49
68
  end
50
69
 
51
- include Fluent::Test::Helpers
70
+ def config_element(name = 'test', argument = '', params = {}, elements = [])
71
+ Fluent::Config::Element.new(name, argument, params, elements)
72
+ end
73
+
74
+ def event_time(str=nil)
75
+ if str
76
+ Fluent::EventTime.parse(str)
77
+ else
78
+ Fluent::EventTime.now
79
+ end
80
+ end
81
+
82
+ def msgpack(type)
83
+ case type
84
+ when :factory
85
+ Fluent::MessagePackFactory.factory
86
+ when :packer
87
+ Fluent::MessagePackFactory.packer
88
+ when :unpacker
89
+ Fluent::MessagePackFactory.unpacker
90
+ else
91
+ raise ArgumentError, "unknown msgpack object type '#{type}'"
92
+ end
93
+ end
94
+
95
+ def unused_port(num = 1)
96
+ ports = []
97
+ sockets = []
98
+ num.times do
99
+ s = TCPServer.open(0)
100
+ sockets << s
101
+ ports << s.addr[1]
102
+ end
103
+ sockets.each{|s| s.close }
104
+ if num == 1
105
+ return ports.first
106
+ else
107
+ return *ports
108
+ end
109
+ end
52
110
 
53
- def unused_port
54
- s = TCPServer.open(0)
55
- port = s.addr[1]
56
- s.close
57
- port
111
+ def waiting(seconds, logs: nil, plugin: nil)
112
+ begin
113
+ Timeout.timeout(seconds) do
114
+ yield
115
+ end
116
+ rescue Timeout::Error
117
+ if logs
118
+ STDERR.print(*logs)
119
+ elsif plugin
120
+ STDERR.print(*plugin.log.out.logs)
121
+ end
122
+ raise
123
+ end
58
124
  end
59
125
 
60
126
  def ipv6_enabled?
@@ -68,4 +134,8 @@ def ipv6_enabled?
68
134
  end
69
135
  end
70
136
 
71
- $log = Fluent::Log.new(Fluent::Test::DummyLogDevice.new, Fluent::Log::LEVEL_WARN)
137
+ dl_opts = {}
138
+ dl_opts[:log_level] = ServerEngine::DaemonLogger::WARN
139
+ logdev = Fluent::Test::DummyLogDevice.new
140
+ logger = ServerEngine::DaemonLogger.new(logdev, dl_opts)
141
+ $log ||= Fluent::Log.new(logger)
@@ -0,0 +1,118 @@
1
+ require_relative '../helper'
2
+ require 'fluent/plugin/bare_output'
3
+ require 'fluent/event'
4
+
5
+ module FluentPluginBareOutputTest
6
+ class DummyPlugin < Fluent::Plugin::BareOutput
7
+ attr_reader :store
8
+ def initialize
9
+ super
10
+ @store = []
11
+ end
12
+ def process(tag, es)
13
+ es.each do |time, record|
14
+ @store << [tag, time, record]
15
+ end
16
+ end
17
+ end
18
+ end
19
+
20
+ class BareOutputTest < Test::Unit::TestCase
21
+ setup do
22
+ Fluent::Test.setup
23
+ @p = FluentPluginBareOutputTest::DummyPlugin.new
24
+ end
25
+
26
+ test 'has healthy lifecycle' do
27
+ assert !@p.configured?
28
+ @p.configure(config_element())
29
+ assert @p.configured?
30
+
31
+ assert !@p.started?
32
+ @p.start
33
+ assert @p.start
34
+
35
+ assert !@p.stopped?
36
+ @p.stop
37
+ assert @p.stopped?
38
+
39
+ assert !@p.before_shutdown?
40
+ @p.before_shutdown
41
+ assert @p.before_shutdown?
42
+
43
+ assert !@p.shutdown?
44
+ @p.shutdown
45
+ assert @p.shutdown?
46
+
47
+ assert !@p.after_shutdown?
48
+ @p.after_shutdown
49
+ assert @p.after_shutdown?
50
+
51
+ assert !@p.closed?
52
+ @p.close
53
+ assert @p.closed?
54
+
55
+ assert !@p.terminated?
56
+ @p.terminate
57
+ assert @p.terminated?
58
+ end
59
+
60
+ test 'has plugin_id automatically generated' do
61
+ assert @p.respond_to?(:plugin_id_configured?)
62
+ assert @p.respond_to?(:plugin_id)
63
+
64
+ @p.configure(config_element())
65
+
66
+ assert !@p.plugin_id_configured?
67
+ assert @p.plugin_id
68
+ assert{ @p.plugin_id != 'mytest' }
69
+ end
70
+
71
+ test 'has plugin_id manually configured' do
72
+ @p.configure(config_element('ROOT', '', {'@id' => 'mytest'}))
73
+ assert @p.plugin_id_configured?
74
+ assert_equal 'mytest', @p.plugin_id
75
+ end
76
+
77
+ test 'has plugin logger' do
78
+ assert @p.respond_to?(:log)
79
+ assert @p.log
80
+
81
+ # default logger
82
+ original_logger = @p.log
83
+
84
+ @p.configure(config_element('ROOT', '', {'@log_level' => 'debug'}))
85
+
86
+ assert{ @p.log.object_id != original_logger.object_id }
87
+ assert_equal Fluent::Log::LEVEL_DEBUG, @p.log.level
88
+ end
89
+
90
+ test 'can load plugin helpers' do
91
+ assert_nothing_raised do
92
+ class FluentPluginBareOutputTest::DummyPlugin2 < Fluent::Plugin::BareOutput
93
+ helpers :storage
94
+ end
95
+ end
96
+ end
97
+
98
+ test 'can get input event stream to write' do
99
+ @p.configure(config_element('ROOT'))
100
+ @p.start
101
+
102
+ es1 = Fluent::OneEventStream.new(event_time('2016-05-21 18:37:31 +0900'), {'k1' => 'v1'})
103
+ es2 = Fluent::ArrayEventStream.new([
104
+ [event_time('2016-05-21 18:38:33 +0900'), {'k2' => 'v2'}],
105
+ [event_time('2016-05-21 18:39:10 +0900'), {'k3' => 'v3'}],
106
+ ])
107
+ @p.emit_events('mytest1', es1)
108
+ @p.emit_events('mytest2', es2)
109
+
110
+ all_events = [
111
+ ['mytest1', event_time('2016-05-21 18:37:31 +0900'), {'k1' => 'v1'}],
112
+ ['mytest2', event_time('2016-05-21 18:38:33 +0900'), {'k2' => 'v2'}],
113
+ ['mytest2', event_time('2016-05-21 18:39:10 +0900'), {'k3' => 'v3'}],
114
+ ]
115
+
116
+ assert_equal all_events, @p.store
117
+ end
118
+ end
@@ -0,0 +1,75 @@
1
+ require_relative '../helper'
2
+ require 'fluent/plugin/base'
3
+
4
+ module FluentPluginBaseTest
5
+ class DummyPlugin < Fluent::Plugin::Base
6
+ end
7
+ end
8
+
9
+ class BaseTest < Test::Unit::TestCase
10
+ setup do
11
+ @p = FluentPluginBaseTest::DummyPlugin.new
12
+ end
13
+
14
+ test 'has methods for phases of plugin life cycle, and methods to know "super"s were correctly called or not' do
15
+ assert !@p.configured?
16
+ @p.configure(config_element())
17
+ assert @p.configured?
18
+
19
+ assert !@p.started?
20
+ @p.start
21
+ assert @p.start
22
+
23
+ assert !@p.stopped?
24
+ @p.stop
25
+ assert @p.stopped?
26
+
27
+ assert !@p.before_shutdown?
28
+ @p.before_shutdown
29
+ assert @p.before_shutdown?
30
+
31
+ assert !@p.shutdown?
32
+ @p.shutdown
33
+ assert @p.shutdown?
34
+
35
+ assert !@p.after_shutdown?
36
+ @p.after_shutdown
37
+ assert @p.after_shutdown?
38
+
39
+ assert !@p.closed?
40
+ @p.close
41
+ assert @p.closed?
42
+
43
+ assert !@p.terminated?
44
+ @p.terminate
45
+ assert @p.terminated?
46
+ end
47
+
48
+ test 'can access system config' do
49
+ assert @p.system_config
50
+
51
+ @p.system_config_override({'process_name' => 'mytest'})
52
+ assert_equal 'mytest', @p.system_config.process_name
53
+ end
54
+
55
+ test 'does not have router in default' do
56
+ assert !@p.has_router?
57
+ end
58
+
59
+ test 'is configurable by config_param and config_section' do
60
+ assert_nothing_raised do
61
+ class FluentPluginBaseTest::DummyPlugin2 < Fluent::Plugin::TestBase
62
+ config_param :myparam1, :string
63
+ config_section :mysection, multi: false do
64
+ config_param :myparam2, :integer
65
+ end
66
+ end
67
+ end
68
+ p2 = FluentPluginBaseTest::DummyPlugin2.new
69
+ assert_nothing_raised do
70
+ p2.configure(config_element('ROOT', '', {'myparam1' => 'myvalue1'}, [config_element('mysection', '', {'myparam2' => 99})]))
71
+ end
72
+ assert_equal 'myvalue1', p2.myparam1
73
+ assert_equal 99, p2.mysection.myparam2
74
+ end
75
+ end
@@ -1,604 +1,503 @@
1
- # -*- coding: utf-8 -*-
2
1
  require_relative '../helper'
3
- require 'fluent/test'
4
2
  require 'fluent/plugin/buf_file'
3
+ require 'fluent/plugin/output'
4
+ require 'fluent/unique_id'
5
+ require 'fluent/system_config'
6
+ require 'fluent/env'
5
7
 
6
- require 'fileutils'
7
-
8
- require 'stringio'
9
8
  require 'msgpack'
10
9
 
11
- module FluentFileBufferTest
12
- class FileBufferChunkTest < Test::Unit::TestCase
13
- BUF_FILE_TMPDIR = File.expand_path(File.join(File.dirname(__FILE__), '..', 'tmp', 'buf_file_chunk'))
14
-
15
- def setup
16
- if Dir.exists? BUF_FILE_TMPDIR
17
- FileUtils.remove_entry_secure BUF_FILE_TMPDIR
18
- end
19
- FileUtils.mkdir_p BUF_FILE_TMPDIR
20
- end
21
-
22
- def bufpath(unique, link=false)
23
- File.join(BUF_FILE_TMPDIR, unique + '.log' + (link ? '.link' : ''))
24
- end
25
-
26
- def filebufferchunk(key, unique, opts={})
27
- Fluent::FileBufferChunk.new(key, bufpath(unique), unique, opts[:mode] || "a+", opts[:symlink])
28
- end
29
-
30
- def test_init
31
- chunk = filebufferchunk('key', 'init1')
32
- assert_equal 'key', chunk.key
33
- assert_equal 'init1', chunk.unique_id
34
- assert_equal bufpath('init1'), chunk.path
35
-
36
- chunk.close # size==0, then, unlinked
37
-
38
- symlink_path = bufpath('init2', true)
39
-
40
- chunk = filebufferchunk('key2', 'init2', symlink: symlink_path)
41
- assert_equal 'key2', chunk.key
42
- assert_equal 'init2', chunk.unique_id
43
- assert File.exists?(symlink_path) && File.symlink?(symlink_path)
44
-
45
- chunk.close # unlink
46
-
47
- assert File.symlink?(symlink_path)
48
- File.unlink(symlink_path)
49
- end
50
-
51
- def test_buffer_chunk_interface
52
- chunk = filebufferchunk('key', 'interface1')
10
+ module FluentPluginFileBufferTest
11
+ class DummyOutputPlugin < Fluent::Plugin::Output
12
+ Fluent::Plugin.register_output('buffer_file_test_output', self)
13
+ end
14
+ end
53
15
 
54
- assert chunk.respond_to?(:empty?)
55
- assert chunk.respond_to?(:<<)
56
- assert chunk.respond_to?(:size)
57
- assert chunk.respond_to?(:close)
58
- assert chunk.respond_to?(:purge)
59
- assert chunk.respond_to?(:read)
60
- assert chunk.respond_to?(:open)
61
- assert chunk.respond_to?(:write_to)
62
- assert chunk.respond_to?(:msgpack_each)
16
+ class FileBufferTest < Test::Unit::TestCase
17
+ def metadata(timekey: nil, tag: nil, variables: nil)
18
+ Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
19
+ end
63
20
 
64
- chunk.close
21
+ def write_metadata(path, chunk_id, metadata, size, ctime, mtime)
22
+ metadata = {
23
+ timekey: metadata.timekey, tag: metadata.tag, variables: metadata.variables,
24
+ id: chunk_id,
25
+ s: size,
26
+ c: ctime,
27
+ m: mtime,
28
+ }
29
+ File.open(path, 'wb') do |f|
30
+ f.write metadata.to_msgpack
65
31
  end
32
+ end
66
33
 
67
- def test_empty?
68
- chunk = filebufferchunk('e1', 'empty1')
69
- assert chunk.empty?
70
- chunk.close
34
+ sub_test_case 'non configured buffer plugin instance' do
35
+ setup do
36
+ Fluent::Test.setup
71
37
 
72
- open(bufpath('empty2'), 'w') do |file|
73
- file.write "data1\ndata2\n"
38
+ @dir = File.expand_path('../../tmp/buffer_file_dir', __FILE__)
39
+ unless File.exist?(@dir)
40
+ FileUtils.mkdir_p @dir
74
41
  end
75
- chunk = filebufferchunk('e2', 'empty2')
76
- assert !(chunk.empty?)
77
- chunk.close
78
- end
79
-
80
- def test_append_close_purge
81
- chunk = filebufferchunk('a1', 'append1')
82
- assert chunk.empty?
83
-
84
- test_data1 = ("1" * 9 + "\n" + "2" * 9 + "\n").force_encoding('ASCII-8BIT')
85
- test_data2 = "日本語Japanese\n".force_encoding('UTF-8')
86
- chunk << test_data1
87
- chunk << test_data2
88
- assert_equal 38, chunk.size
89
- chunk.close
90
-
91
- assert File.exists?(bufpath('append1'))
92
-
93
- chunk = filebufferchunk('a1', 'append1', mode: 'r')
94
- test_data = test_data1.force_encoding('ASCII-8BIT') + test_data2.force_encoding('ASCII-8BIT')
95
-
96
- #### TODO: This assertion currently fails. Oops.
97
- # FileBuffer#read does NOT do force_encoding('ASCII-8BIT'). So encoding of output string instance are 'UTF-8'.
98
- # I think it is a kind of bug, but fixing it may break some behavior of buf_file. So I cannot be sure to fix it just now.
99
- #
100
- # assert_equal test_data, chunk.read
101
-
102
- chunk.purge
103
-
104
- assert !(File.exists?(bufpath('append1')))
105
- end
106
-
107
- def test_empty_chunk_key # for BufferedOutput#emit
108
- chunk = filebufferchunk('', 'append1')
109
- assert chunk.empty?
110
-
111
- test_data1 = ("1" * 9 + "\n" + "2" * 9 + "\n").force_encoding('ASCII-8BIT')
112
- test_data2 = "日本語Japanese\n".force_encoding('UTF-8')
113
- chunk << test_data1
114
- chunk << test_data2
115
- assert_equal 38, chunk.size
116
- chunk.close
117
- end
118
-
119
- def test_read
120
- chunk = filebufferchunk('r1', 'read1')
121
- assert chunk.empty?
122
-
123
- d1 = "abcde" * 200 + "\n"
124
- chunk << d1
125
- d2 = "12345" * 200 + "\n"
126
- chunk << d2
127
- assert_equal (d1.size + d2.size), chunk.size
128
-
129
- read_data = chunk.read
130
- assert_equal (d1 + d2), read_data
131
-
132
- chunk.purge
133
- end
134
-
135
- def test_open
136
- chunk = filebufferchunk('o1', 'open1')
137
- assert chunk.empty?
138
-
139
- d1 = "abcde" * 200 + "\n"
140
- chunk << d1
141
- d2 = "12345" * 200 + "\n"
142
- chunk << d2
143
- assert_equal (d1.size + d2.size), chunk.size
144
-
145
- read_data = chunk.open do |io|
146
- io.read
42
+ Dir.glob(File.join(@dir, '*')).each do |path|
43
+ next if ['.', '..'].include?(File.basename(path))
44
+ File.delete(path)
147
45
  end
148
- assert_equal (d1 + d2), read_data
149
-
150
- chunk.purge
151
46
  end
152
47
 
153
- def test_write_to
154
- chunk = filebufferchunk('w1', 'write1')
155
- assert chunk.empty?
156
-
157
- d1 = "abcde" * 200 + "\n"
158
- chunk << d1
159
- d2 = "12345" * 200 + "\n"
160
- chunk << d2
161
- assert_equal (d1.size + d2.size), chunk.size
162
-
163
- dummy_dst = StringIO.new
164
-
165
- chunk.write_to(dummy_dst)
166
- assert_equal (d1 + d2), dummy_dst.string
167
-
168
- chunk.purge
48
+ test 'path should include * normally' do
49
+ d = FluentPluginFileBufferTest::DummyOutputPlugin.new
50
+ p = Fluent::Plugin::FileBuffer.new
51
+ p.owner = d
52
+ p.configure(config_element('buffer', '', {'path' => File.join(@dir, 'buffer.*.file')}))
53
+ assert_equal File.join(@dir, 'buffer.*.file'), p.path
169
54
  end
170
55
 
171
- def test_msgpack_each
172
- chunk = filebufferchunk('m1', 'msgpack1')
173
- assert chunk.empty?
174
-
175
- d0 = MessagePack.pack([[1, "foo"], [2, "bar"], [3, "baz"]])
176
- d1 = MessagePack.pack({"key1" => "value1", "key2" => "value2"})
177
- d2 = MessagePack.pack("string1")
178
- d3 = MessagePack.pack(1)
179
- d4 = MessagePack.pack(nil)
180
- chunk << d0
181
- chunk << d1
182
- chunk << d2
183
- chunk << d3
184
- chunk << d4
185
-
186
- store = []
187
- chunk.msgpack_each do |data|
188
- store << data
189
- end
190
-
191
- assert_equal 5, store.size
192
- assert_equal [[1, "foo"], [2, "bar"], [3, "baz"]], store[0]
193
- assert_equal({"key1" => "value1", "key2" => "value2"}, store[1])
194
- assert_equal "string1", store[2]
195
- assert_equal 1, store[3]
196
- assert_equal nil, store[4]
197
-
198
- chunk.purge
56
+ test 'existing directory will be used with additional default file name' do
57
+ d = FluentPluginFileBufferTest::DummyOutputPlugin.new
58
+ p = Fluent::Plugin::FileBuffer.new
59
+ p.owner = d
60
+ p.configure(config_element('buffer', '', {'path' => @dir}))
61
+ assert_equal File.join(@dir, 'buffer.*.log'), p.path
199
62
  end
200
63
 
201
- def test_mv
202
- chunk = filebufferchunk('m1', 'move1')
203
- assert chunk.empty?
204
-
205
- d1 = "abcde" * 200 + "\n"
206
- chunk << d1
207
- d2 = "12345" * 200 + "\n"
208
- chunk << d2
209
- assert_equal (d1.size + d2.size), chunk.size
210
-
211
- assert_equal bufpath('move1'), chunk.path
212
-
213
- assert File.exists?( bufpath( 'move1' ) )
214
- assert !(File.exists?( bufpath( 'move2' ) ))
215
-
216
- chunk.mv(bufpath('move2'))
217
-
218
- assert !(File.exists?( bufpath( 'move1' ) ))
219
- assert File.exists?( bufpath( 'move2' ) )
220
-
221
- assert_equal bufpath('move2'), chunk.path
222
-
223
- chunk.purge
64
+ test 'unexisting path without * handled as directory' do
65
+ d = FluentPluginFileBufferTest::DummyOutputPlugin.new
66
+ p = Fluent::Plugin::FileBuffer.new
67
+ p.owner = d
68
+ p.configure(config_element('buffer', '', {'path' => File.join(@dir, 'buffer')}))
69
+ assert_equal File.join(@dir, 'buffer', 'buffer.*.log'), p.path
224
70
  end
225
71
  end
226
72
 
227
- class FileBufferTest < Test::Unit::TestCase
228
- BUF_FILE_TMPDIR = File.expand_path(File.join(File.dirname(__FILE__), '..', 'tmp', 'buf_file'))
229
-
230
- def setup
231
- if Dir.exists? BUF_FILE_TMPDIR
232
- FileUtils.remove_entry_secure BUF_FILE_TMPDIR
73
+ sub_test_case 'buffer plugin configured only with path' do
74
+ setup do
75
+ @bufdir = File.expand_path('../../tmp/buffer_file', __FILE__)
76
+ @bufpath = File.join(@bufdir, 'testbuf.*.log')
77
+ FileUtils.rm_r @bufdir if File.exist?(@bufdir)
78
+
79
+ Fluent::Test.setup
80
+ @d = FluentPluginFileBufferTest::DummyOutputPlugin.new
81
+ @p = Fluent::Plugin::FileBuffer.new
82
+ @p.owner = @d
83
+ @p.configure(config_element('buffer', '', {'path' => @bufpath}))
84
+ @p.start
85
+ end
86
+
87
+ teardown do
88
+ if @p
89
+ @p.stop unless @p.stopped?
90
+ @p.before_shutdown unless @p.before_shutdown?
91
+ @p.shutdown unless @p.shutdown?
92
+ @p.after_shutdown unless @p.after_shutdown?
93
+ @p.close unless @p.closed?
94
+ @p.terminate unless @p.terminated?
233
95
  end
234
- FileUtils.mkdir_p BUF_FILE_TMPDIR
235
- end
236
-
237
- def bufpath(basename)
238
- File.join(BUF_FILE_TMPDIR, basename)
239
- end
240
-
241
- def filebuffer(key, unique, opts={})
242
- Fluent::FileBufferChunk.new(key, bufpath(unique), unique, opts[:mode] || "a+", opts[:symlink])
243
- end
244
-
245
- def test_init_configure
246
- buf = Fluent::FileBuffer.new
247
-
248
- assert_raise(Fluent::ConfigError){ buf.configure({}) }
249
-
250
- buf.configure({'buffer_path' => bufpath('configure1.*.log')})
251
- assert_equal bufpath('configure1.*.log'), buf.buffer_path
252
- assert_equal nil, buf.symlink_path
253
- assert_equal false, buf.instance_eval{ @flush_at_shutdown }
254
-
255
- buf2 = Fluent::FileBuffer.new
256
-
257
- # Same buffer_path value is rejected, not to overwrite exisitng buffer file.
258
- assert_raise(Fluent::ConfigError){ buf2.configure({'buffer_path' => bufpath('configure1.*.log')}) }
259
-
260
- buf2.configure({'buffer_path' => bufpath('configure2.*.log'), 'flush_at_shutdown' => ''})
261
- assert_equal bufpath('configure2.*.log'), buf2.buffer_path
262
- assert_equal true, buf2.instance_eval{ @flush_at_shutdown }
263
- end
264
-
265
- def test_configure_path_prefix_suffix
266
- # With '*' in path, prefix is the part before '*', suffix is the part after '*'
267
- buf = Fluent::FileBuffer.new
268
-
269
- path1 = bufpath('suffpref1.*.log')
270
- prefix1, suffix1 = path1.split('*', 2)
271
- buf.configure({'buffer_path' => path1})
272
- assert_equal prefix1, buf.instance_eval{ @buffer_path_prefix }
273
- assert_equal suffix1, buf.instance_eval{ @buffer_path_suffix }
274
-
275
- # Without '*', prefix is the string of whole path + '.', suffix is '.log'
276
- path2 = bufpath('suffpref2')
277
- buf.configure({'buffer_path' => path2})
278
- assert_equal path2 + '.', buf.instance_eval{ @buffer_path_prefix }
279
- assert_equal '.log', buf.instance_eval{ @buffer_path_suffix }
280
- end
281
-
282
- class DummyOutput
283
- attr_accessor :written
284
-
285
- def write(chunk)
286
- @written ||= []
287
- @written.push chunk
288
- "return value"
96
+ if @bufdir
97
+ Dir.glob(File.join(@bufdir, '*')).each do |path|
98
+ next if ['.', '..'].include?(File.basename(path))
99
+ File.delete(path)
100
+ end
289
101
  end
290
102
  end
291
103
 
292
- def test_encode_key
293
- buf = Fluent::FileBuffer.new
294
- safe_chars = '-_.abcdefgxyzABCDEFGXYZ0123456789'
295
- assert_equal safe_chars, buf.send(:encode_key, safe_chars)
296
- unsafe_chars = '-_.abcdefgxyzABCDEFGXYZ0123456789 ~/*()'
297
- assert_equal safe_chars + '%20%7E%2F%2A%28%29', buf.send(:encode_key, unsafe_chars)
104
+ test 'this is persistent plugin' do
105
+ assert @p.persistent?
298
106
  end
299
107
 
300
- def test_decode_key
301
- buf = Fluent::FileBuffer.new
302
- safe_chars = '-_.abcdefgxyzABCDEFGXYZ0123456789'
303
- assert_equal safe_chars, buf.send(:decode_key, safe_chars)
304
- unsafe_chars = '-_.abcdefgxyzABCDEFGXYZ0123456789 ~/*()'
305
- assert_equal unsafe_chars, buf.send(:decode_key, safe_chars + '%20%7E%2F%2A%28%29')
108
+ test '#start creates directory for buffer chunks' do
109
+ plugin = Fluent::Plugin::FileBuffer.new
110
+ plugin.owner = @d
111
+ rand_num = rand(0..100)
112
+ bufpath = File.join(File.expand_path("../../tmp/buffer_file_#{rand_num}", __FILE__), 'testbuf.*.log')
113
+ bufdir = File.dirname(bufpath)
306
114
 
307
- assert_equal safe_chars, buf.send(:decode_key, buf.send(:encode_key, safe_chars))
308
- assert_equal unsafe_chars, buf.send(:decode_key, buf.send(:encode_key, unsafe_chars))
309
- end
115
+ FileUtils.rm_r bufdir if File.exist?(bufdir)
116
+ assert !File.exist?(bufdir)
310
117
 
311
- def test_make_path
312
- buf = Fluent::FileBuffer.new
313
- buf.configure({'buffer_path' => bufpath('makepath.*.log')})
314
- prefix = buf.instance_eval{ @buffer_path_prefix }
315
- suffix = buf.instance_eval{ @buffer_path_suffix }
118
+ plugin.configure(config_element('buffer', '', {'path' => bufpath}))
119
+ assert !File.exist?(bufdir)
316
120
 
317
- path,tsuffix = buf.send(:make_path, buf.send(:encode_key, 'foo bar'), 'b')
318
- assert path =~ /\A#{prefix}[-_.a-zA-Z0-9\%]+\.[bq][0-9a-f]+#{suffix}\Z/, "invalid format:#{path}"
319
- assert tsuffix =~ /\A[0-9a-f]+\Z/, "invalid hexadecimal:#{tsuffix}"
121
+ plugin.start
122
+ assert File.exist?(bufdir)
123
+ assert{ File.stat(bufdir).mode.to_s(8).end_with?('755') }
320
124
 
321
- path,tsuffix = buf.send(:make_path, buf.send(:encode_key, 'baz 123'), 'q')
322
- assert path =~ /\A#{prefix}[-_.a-zA-Z0-9\%]+\.[bq][0-9a-f]+#{suffix}\Z/, "invalid format:#{path}"
323
- assert tsuffix =~ /\A[0-9a-f]+\Z/, "invalid hexadecimal:#{tsuffix}"
125
+ plugin.stop; plugin.before_shutdown; plugin.shutdown; plugin.after_shutdown; plugin.close; plugin.terminate
126
+ FileUtils.rm_r bufdir
324
127
  end
325
128
 
326
- def test_tsuffix_to_unique_id
327
- buf = Fluent::FileBuffer.new
328
- # why *2 ? frsyuki said "I forgot why completely."
329
- assert_equal "\xFF\xFF\xFF\xFF".force_encoding('ASCII-8BIT'), buf.send(:tsuffix_to_unique_id, 'ffff')
330
- assert_equal "\x88\x00\xFF\x00\x11\xEE\x88\x00\xFF\x00\x11\xEE".force_encoding('ASCII-8BIT'), buf.send(:tsuffix_to_unique_id, '8800ff0011ee')
331
- end
332
-
333
- def test_start_makes_parent_directories
334
- buf = Fluent::FileBuffer.new
335
- buf.configure({'buffer_path' => bufpath('start/base.*.log')})
336
- parent_dirname = File.dirname(buf.instance_eval{ @buffer_path_prefix })
337
- assert !(Dir.exists?(parent_dirname))
338
- buf.start
339
- assert Dir.exists?(parent_dirname)
340
- end
129
+ test '#start creates directory for buffer chunks with specified permission' do
130
+ omit "NTFS doesn't support UNIX like permissions" if Fluent.windows?
341
131
 
342
- def test_new_chunk
343
- buf = Fluent::FileBuffer.new
344
- buf.configure({'buffer_path' => bufpath('new_chunk_1')})
345
- prefix = buf.instance_eval{ @buffer_path_prefix }
346
- suffix = buf.instance_eval{ @buffer_path_suffix }
347
-
348
- chunk = buf.new_chunk('key1')
349
- assert chunk
350
- assert File.exists?(chunk.path)
351
- assert chunk.path =~ /\A#{prefix}[-_.a-zA-Z0-9\%]+\.b[0-9a-f]+#{suffix}\Z/, "path from new_chunk must be a 'b' buffer chunk"
352
- chunk.close
353
- end
132
+ plugin = Fluent::Plugin::FileBuffer.new
133
+ plugin.owner = @d
134
+ rand_num = rand(0..100)
135
+ bufpath = File.join(File.expand_path("../../tmp/buffer_file_#{rand_num}", __FILE__), 'testbuf.*.log')
136
+ bufdir = File.dirname(bufpath)
354
137
 
355
- def test_chunk_identifier_in_path
356
- buf1 = Fluent::FileBuffer.new
357
- buf1.configure({'buffer_path' => bufpath('chunkid1')})
358
- prefix1 = buf1.instance_eval{ @buffer_path_prefix }
359
- suffix1 = buf1.instance_eval{ @buffer_path_suffix }
138
+ FileUtils.rm_r bufdir if File.exist?(bufdir)
139
+ assert !File.exist?(bufdir)
360
140
 
361
- chunk1 = buf1.new_chunk('key1')
362
- assert_equal chunk1.path, prefix1 + buf1.chunk_identifier_in_path(chunk1.path) + suffix1
141
+ plugin.configure(config_element('buffer', '', {'path' => bufpath, 'dir_permission' => 0700}))
142
+ assert !File.exist?(bufdir)
363
143
 
364
- buf2 = Fluent::FileBuffer.new
365
- buf2.configure({'buffer_path' => bufpath('chunkid2')})
366
- prefix2 = buf2.instance_eval{ @buffer_path_prefix }
367
- suffix2 = buf2.instance_eval{ @buffer_path_suffix }
144
+ plugin.start
145
+ assert File.exist?(bufdir)
146
+ assert{ File.stat(bufdir).mode.to_s(8).end_with?('700') }
368
147
 
369
- chunk2 = buf2.new_chunk('key2')
370
- assert_equal chunk2.path, prefix2 + buf2.chunk_identifier_in_path(chunk2.path) + suffix2
148
+ plugin.stop; plugin.before_shutdown; plugin.shutdown; plugin.after_shutdown; plugin.close; plugin.terminate
149
+ FileUtils.rm_r bufdir
371
150
  end
372
151
 
373
- def test_enqueue_moves_chunk_from_b_to_q
374
- buf = Fluent::FileBuffer.new
375
- buf.configure({'buffer_path' => bufpath('enqueue1')})
376
- prefix = buf.instance_eval{ @buffer_path_prefix }
377
- suffix = buf.instance_eval{ @buffer_path_suffix }
152
+ test '#start creates directory for buffer chunks with specified permission via system config' do
153
+ omit "NTFS doesn't support UNIX like permissions" if Fluent.windows?
378
154
 
379
- chunk = buf.new_chunk('key1')
380
- chunk << "data1\ndata2\n"
155
+ sysconf = {'dir_permission' => '700'}
156
+ Fluent::SystemConfig.overwrite_system_config(sysconf) do
157
+ plugin = Fluent::Plugin::FileBuffer.new
158
+ plugin.owner = @d
159
+ rand_num = rand(0..100)
160
+ bufpath = File.join(File.expand_path("../../tmp/buffer_file_#{rand_num}", __FILE__), 'testbuf.*.log')
161
+ bufdir = File.dirname(bufpath)
381
162
 
382
- assert chunk
383
- old_path = chunk.path.dup
384
- assert File.exists?(chunk.path)
385
- assert chunk.path =~ /\A#{prefix}[-_.a-zA-Z0-9\%]+\.b[0-9a-f]+#{suffix}\Z/, "path from new_chunk must be a 'b' buffer chunk"
163
+ FileUtils.rm_r bufdir if File.exist?(bufdir)
164
+ assert !File.exist?(bufdir)
386
165
 
387
- buf.enqueue(chunk)
166
+ plugin.configure(config_element('buffer', '', {'path' => bufpath}))
167
+ assert !File.exist?(bufdir)
388
168
 
389
- assert chunk
390
- assert File.exists?(chunk.path)
391
- assert !(File.exists?(old_path))
392
- assert chunk.path =~ /\A#{prefix}[-_.a-zA-Z0-9\%]+\.q[0-9a-f]+#{suffix}\Z/, "enqueued chunk's path must be a 'q' buffer chunk"
169
+ plugin.start
170
+ assert File.exist?(bufdir)
171
+ assert{ File.stat(bufdir).mode.to_s(8).end_with?('700') }
393
172
 
394
- data = chunk.read
395
- assert "data1\ndata2\n", data
173
+ plugin.stop; plugin.before_shutdown; plugin.shutdown; plugin.after_shutdown; plugin.close; plugin.terminate
174
+ FileUtils.rm_r bufdir
175
+ end
396
176
  end
397
177
 
398
- # empty chunk keys are used w/ BufferedOutput
399
- # * ObjectBufferedOutput's keys are tag
400
- # * TimeSlicedOutput's keys are time_key
401
- def test_enqueue_chunk_with_empty_key
402
- buf = Fluent::FileBuffer.new
403
- buf.configure({'buffer_path' => bufpath('enqueue2')})
404
- prefix = buf.instance_eval{ @buffer_path_prefix }
405
- suffix = buf.instance_eval{ @buffer_path_suffix }
406
-
407
- chunk = buf.new_chunk('')
408
- chunk << "data1\ndata2\n"
409
-
410
- assert chunk
411
- old_path = chunk.path.dup
412
- assert File.exists?(chunk.path)
413
- # chunk key is empty
414
- assert chunk.path =~ /\A#{prefix}\.b[0-9a-f]+#{suffix}\Z/, "path from new_chunk must be a 'b' buffer chunk"
415
-
416
- buf.enqueue(chunk)
417
-
418
- assert chunk
419
- assert File.exists?(chunk.path)
420
- assert !(File.exists?(old_path))
421
- # chunk key is empty
422
- assert chunk.path =~ /\A#{prefix}\.q[0-9a-f]+#{suffix}\Z/, "enqueued chunk's path must be a 'q' buffer chunk"
423
-
424
- data = chunk.read
425
- assert "data1\ndata2\n", data
178
+ test '#generate_chunk generates blank file chunk on path from unique_id of metadata' do
179
+ m1 = metadata()
180
+ c1 = @p.generate_chunk(m1)
181
+ assert c1.is_a? Fluent::Plugin::Buffer::FileChunk
182
+ assert_equal m1, c1.metadata
183
+ assert c1.empty?
184
+ assert_equal :staged, c1.state
185
+ assert_equal Fluent::Plugin::Buffer::FileChunk::FILE_PERMISSION, c1.permission
186
+ assert_equal @bufpath.gsub('.*.', ".b#{Fluent::UniqueId.hex(c1.unique_id)}."), c1.path
187
+ assert{ File.stat(c1.path).mode.to_s(8).end_with?('644') }
188
+
189
+ m2 = metadata(timekey: event_time('2016-04-17 11:15:00 -0700').to_i)
190
+ c2 = @p.generate_chunk(m2)
191
+ assert c2.is_a? Fluent::Plugin::Buffer::FileChunk
192
+ assert_equal m2, c2.metadata
193
+ assert c2.empty?
194
+ assert_equal :staged, c2.state
195
+ assert_equal Fluent::Plugin::Buffer::FileChunk::FILE_PERMISSION, c2.permission
196
+ assert_equal @bufpath.gsub('.*.', ".b#{Fluent::UniqueId.hex(c2.unique_id)}."), c2.path
197
+ assert{ File.stat(c2.path).mode.to_s(8).end_with?('644') }
198
+
199
+ c1.purge
200
+ c2.purge
201
+ end
202
+
203
+ test '#generate_chunk generates blank file chunk with specified permission' do
204
+ omit "NTFS doesn't support UNIX like permissions" if Fluent.windows?
205
+
206
+ plugin = Fluent::Plugin::FileBuffer.new
207
+ plugin.owner = @d
208
+ rand_num = rand(0..100)
209
+ bufpath = File.join(File.expand_path("../../tmp/buffer_file_#{rand_num}", __FILE__), 'testbuf.*.log')
210
+ bufdir = File.dirname(bufpath)
211
+
212
+ FileUtils.rm_r bufdir if File.exist?(bufdir)
213
+ assert !File.exist?(bufdir)
214
+
215
+ plugin.configure(config_element('buffer', '', {'path' => bufpath, 'file_permission' => 0600}))
216
+ assert !File.exist?(bufdir)
217
+ plugin.start
218
+
219
+ m = metadata()
220
+ c = plugin.generate_chunk(m)
221
+ assert c.is_a? Fluent::Plugin::Buffer::FileChunk
222
+ assert_equal m, c.metadata
223
+ assert c.empty?
224
+ assert_equal :staged, c.state
225
+ assert_equal 0600, c.permission
226
+ assert_equal bufpath.gsub('.*.', ".b#{Fluent::UniqueId.hex(c.unique_id)}."), c.path
227
+ assert{ File.stat(c.path).mode.to_s(8).end_with?('600') }
228
+
229
+ c.purge
230
+
231
+ plugin.stop; plugin.before_shutdown; plugin.shutdown; plugin.after_shutdown; plugin.close; plugin.terminate
232
+ FileUtils.rm_r bufdir
426
233
  end
234
+ end
427
235
 
428
- def test_before_shutdown_without_flush_at_shutdown
429
- buf = Fluent::FileBuffer.new
430
- buf.configure({'buffer_path' => bufpath('before_shutdown1')})
431
- buf.start
432
-
433
- # before_shutdown does nothing
434
-
435
- c1 = [ buf.new_chunk('k0'), buf.new_chunk('k1'), buf.new_chunk('k2'), buf.new_chunk('k3') ]
436
- c2 = [ buf.new_chunk('q0'), buf.new_chunk('q1') ]
437
236
 
438
- buf.instance_eval do
439
- @map = {
440
- 'k0' => c1[0], 'k1' => c1[1], 'k2' => c1[2], 'k3' => c1[3],
441
- 'q0' => c2[0], 'q1' => c2[1]
442
- }
237
+ sub_test_case 'there are no existing file chunks' do
238
+ setup do
239
+ @bufdir = File.expand_path('../../tmp/buffer_file', __FILE__)
240
+ @bufpath = File.join(@bufdir, 'testbuf.*.log')
241
+ FileUtils.rm_r @bufdir if File.exist?(@bufdir)
242
+
243
+ Fluent::Test.setup
244
+ @d = FluentPluginFileBufferTest::DummyOutputPlugin.new
245
+ @p = Fluent::Plugin::FileBuffer.new
246
+ @p.owner = @d
247
+ @p.configure(config_element('buffer', '', {'path' => @bufpath}))
248
+ @p.start
249
+ end
250
+ teardown do
251
+ if @p
252
+ @p.stop unless @p.stopped?
253
+ @p.before_shutdown unless @p.before_shutdown?
254
+ @p.shutdown unless @p.shutdown?
255
+ @p.after_shutdown unless @p.after_shutdown?
256
+ @p.close unless @p.closed?
257
+ @p.terminate unless @p.terminated?
443
258
  end
444
- c1[0] << "data1\ndata2\n"
445
- c1[1] << "data1\ndata2\n"
446
- c1[2] << "data1\ndata2\n"
447
- # k3 chunk is empty!
448
-
449
- c2[0] << "data1\ndata2\n"
450
- c2[1] << "data1\ndata2\n"
451
- buf.push('q0')
452
- buf.push('q1')
453
-
454
- buf.instance_eval do
455
- @enqueue_hook_times = 0
456
- def enqueue(chunk)
457
- @enqueue_hook_times += 1
259
+ if @bufdir
260
+ Dir.glob(File.join(@bufdir, '*')).each do |path|
261
+ next if ['.', '..'].include?(File.basename(path))
262
+ File.delete(path)
458
263
  end
459
264
  end
460
- assert_equal 0, buf.instance_eval{ @enqueue_hook_times }
461
-
462
- out = DummyOutput.new
463
- assert_equal nil, out.written
464
-
465
- buf.before_shutdown(out)
466
-
467
- assert_equal 0, buf.instance_eval{ @enqueue_hook_times } # k0, k1, k2
468
- assert_nil out.written
469
265
  end
470
266
 
471
- def test_before_shutdown_with_flush_at_shutdown
472
- buf = Fluent::FileBuffer.new
473
- buf.configure({'buffer_path' => bufpath('before_shutdown2'), 'flush_at_shutdown' => 'true'})
474
- buf.start
475
-
476
- # before_shutdown flushes all chunks in @map and @queue
477
-
478
- c1 = [ buf.new_chunk('k0'), buf.new_chunk('k1'), buf.new_chunk('k2'), buf.new_chunk('k3') ]
479
- c2 = [ buf.new_chunk('q0'), buf.new_chunk('q1') ]
267
+ test '#resume returns empty buffer state' do
268
+ ary = @p.resume
269
+ assert_equal({}, ary[0])
270
+ assert_equal([], ary[1])
271
+ end
272
+ end
480
273
 
481
- buf.instance_eval do
482
- @map = {
483
- 'k0' => c1[0], 'k1' => c1[1], 'k2' => c1[2], 'k3' => c1[3],
484
- 'q0' => c2[0], 'q1' => c2[1]
485
- }
274
+ sub_test_case 'there are some existing file chunks' do
275
+ setup do
276
+ @bufdir = File.expand_path('../../tmp/buffer_file', __FILE__)
277
+ FileUtils.mkdir_p @bufdir unless File.exist?(@bufdir)
278
+
279
+ @c1id = Fluent::UniqueId.generate
280
+ p1 = File.join(@bufdir, "etest.q#{Fluent::UniqueId.hex(@c1id)}.log")
281
+ File.open(p1, 'wb') do |f|
282
+ f.write ["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
283
+ f.write ["t2.test", event_time('2016-04-17 13:58:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
284
+ f.write ["t3.test", event_time('2016-04-17 13:58:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
285
+ f.write ["t4.test", event_time('2016-04-17 13:58:22 -0700').to_i, {"message" => "yay"}].to_json + "\n"
286
+ end
287
+ write_metadata(
288
+ p1 + '.meta', @c1id, metadata(timekey: event_time('2016-04-17 13:58:00 -0700').to_i),
289
+ 4, event_time('2016-04-17 13:58:00 -0700').to_i, event_time('2016-04-17 13:58:22 -0700').to_i
290
+ )
291
+
292
+ @c2id = Fluent::UniqueId.generate
293
+ p2 = File.join(@bufdir, "etest.q#{Fluent::UniqueId.hex(@c2id)}.log")
294
+ File.open(p2, 'wb') do |f|
295
+ f.write ["t1.test", event_time('2016-04-17 13:59:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
296
+ f.write ["t2.test", event_time('2016-04-17 13:59:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
297
+ f.write ["t3.test", event_time('2016-04-17 13:59:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
298
+ end
299
+ write_metadata(
300
+ p2 + '.meta', @c2id, metadata(timekey: event_time('2016-04-17 13:59:00 -0700').to_i),
301
+ 3, event_time('2016-04-17 13:59:00 -0700').to_i, event_time('2016-04-17 13:59:23 -0700').to_i
302
+ )
303
+
304
+ @c3id = Fluent::UniqueId.generate
305
+ p3 = File.join(@bufdir, "etest.b#{Fluent::UniqueId.hex(@c3id)}.log")
306
+ File.open(p3, 'wb') do |f|
307
+ f.write ["t1.test", event_time('2016-04-17 14:00:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
308
+ f.write ["t2.test", event_time('2016-04-17 14:00:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
309
+ f.write ["t3.test", event_time('2016-04-17 14:00:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
310
+ f.write ["t4.test", event_time('2016-04-17 14:00:28 -0700').to_i, {"message" => "yay"}].to_json + "\n"
486
311
  end
487
- c1[0] << "data1\ndata2\n"
488
- c1[1] << "data1\ndata2\n"
489
- c1[2] << "data1\ndata2\n"
490
- # k3 chunk is empty!
491
-
492
- c2[0] << "data1\ndata2\n"
493
- c2[1] << "data1\ndata2\n"
494
- buf.push('q0')
495
- buf.push('q1')
496
-
497
- buf.instance_eval do
498
- @enqueue_hook_times = 0
499
- def enqueue(chunk)
500
- @enqueue_hook_times += 1
312
+ write_metadata(
313
+ p3 + '.meta', @c3id, metadata(timekey: event_time('2016-04-17 14:00:00 -0700').to_i),
314
+ 4, event_time('2016-04-17 14:00:00 -0700').to_i, event_time('2016-04-17 14:00:28 -0700').to_i
315
+ )
316
+
317
+ @c4id = Fluent::UniqueId.generate
318
+ p4 = File.join(@bufdir, "etest.b#{Fluent::UniqueId.hex(@c4id)}.log")
319
+ File.open(p4, 'wb') do |f|
320
+ f.write ["t1.test", event_time('2016-04-17 14:01:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
321
+ f.write ["t2.test", event_time('2016-04-17 14:01:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
322
+ f.write ["t3.test", event_time('2016-04-17 14:01:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
323
+ end
324
+ write_metadata(
325
+ p4 + '.meta', @c4id, metadata(timekey: event_time('2016-04-17 14:01:00 -0700').to_i),
326
+ 3, event_time('2016-04-17 14:01:00 -0700').to_i, event_time('2016-04-17 14:01:25 -0700').to_i
327
+ )
328
+
329
+ @bufpath = File.join(@bufdir, 'etest.*.log')
330
+
331
+ Fluent::Test.setup
332
+ @d = FluentPluginFileBufferTest::DummyOutputPlugin.new
333
+ @p = Fluent::Plugin::FileBuffer.new
334
+ @p.owner = @d
335
+ @p.configure(config_element('buffer', '', {'path' => @bufpath}))
336
+ @p.start
337
+ end
338
+
339
+ teardown do
340
+ if @p
341
+ @p.stop unless @p.stopped?
342
+ @p.before_shutdown unless @p.before_shutdown?
343
+ @p.shutdown unless @p.shutdown?
344
+ @p.after_shutdown unless @p.after_shutdown?
345
+ @p.close unless @p.closed?
346
+ @p.terminate unless @p.terminated?
347
+ end
348
+ if @bufdir
349
+ Dir.glob(File.join(@bufdir, '*')).each do |path|
350
+ next if ['.', '..'].include?(File.basename(path))
351
+ File.delete(path)
501
352
  end
502
353
  end
503
- assert_equal 0, buf.instance_eval{ @enqueue_hook_times }
504
-
505
- out = DummyOutput.new
506
- assert_equal nil, out.written
507
-
508
- buf.before_shutdown(out)
509
-
510
- assert_equal 3, buf.instance_eval{ @enqueue_hook_times } # k0, k1, k2
511
- assert_equal 5, out.written.size
512
- assert_equal [c2[0], c2[1], c1[0], c1[1], c1[2]], out.written
513
354
  end
514
355
 
515
- def test_resume
516
- buffer_path_for_resume_test = bufpath('resume')
517
-
518
- buf1 = Fluent::FileBuffer.new
519
- buf1.configure({'buffer_path' => buffer_path_for_resume_test})
520
- prefix = buf1.instance_eval{ @buffer_path_prefix }
521
- suffix = buf1.instance_eval{ @buffer_path_suffix }
522
-
523
- buf1.start
356
+ test '#resume returns staged/queued chunks with metadata' do
357
+ assert_equal 2, @p.stage.size
358
+ assert_equal 2, @p.queue.size
524
359
 
525
- chunk1 = buf1.new_chunk('key1')
526
- chunk1 << "data1\ndata2\n"
360
+ stage = @p.stage
527
361
 
528
- chunk2 = buf1.new_chunk('key2')
529
- chunk2 << "data3\ndata4\n"
362
+ m3 = metadata(timekey: event_time('2016-04-17 14:00:00 -0700').to_i)
363
+ assert_equal @c3id, stage[m3].unique_id
364
+ assert_equal 4, stage[m3].size
365
+ assert_equal :staged, stage[m3].state
530
366
 
531
- assert chunk1
532
- assert chunk1.path =~ /\A#{prefix}[-_.a-zA-Z0-9\%]+\.b[0-9a-f]+#{suffix}\Z/, "path from new_chunk must be a 'b' buffer chunk"
533
-
534
- buf1.enqueue(chunk1)
535
-
536
- assert chunk1
537
- assert chunk1.path =~ /\A#{prefix}[-_.a-zA-Z0-9\%]+\.q[0-9a-f]+#{suffix}\Z/, "chunk1 must be enqueued"
538
- assert chunk2
539
- assert chunk2.path =~ /\A#{prefix}[-_.a-zA-Z0-9\%]+\.b[0-9a-f]+#{suffix}\Z/, "chunk2 is not enqueued yet"
540
-
541
- buf1.shutdown
367
+ m4 = metadata(timekey: event_time('2016-04-17 14:01:00 -0700').to_i)
368
+ assert_equal @c4id, stage[m4].unique_id
369
+ assert_equal 3, stage[m4].size
370
+ assert_equal :staged, stage[m4].state
371
+ end
542
372
 
543
- buf2 = Fluent::FileBuffer.new
544
- Fluent::FileBuffer.send(:class_variable_set, :'@@buffer_paths', {})
545
- buf2.configure({'buffer_path' => buffer_path_for_resume_test})
546
- prefix = buf2.instance_eval{ @buffer_path_prefix }
547
- suffix = buf2.instance_eval{ @buffer_path_suffix }
373
+ test '#resume returns queued chunks ordered by last modified time (FIFO)' do
374
+ assert_equal 2, @p.stage.size
375
+ assert_equal 2, @p.queue.size
548
376
 
549
- # buf1.start -> resume is normal operation, but now, we cannot it.
550
- queue, map = buf2.resume
377
+ queue = @p.queue
551
378
 
552
- assert_equal 1, queue.size
553
- assert_equal 1, map.size
379
+ assert{ queue[0].modified_at < queue[1].modified_at }
554
380
 
555
- resumed_chunk1 = queue.first
556
- assert_equal chunk1.path, resumed_chunk1.path
557
- resumed_chunk2 = map['key2']
558
- assert_equal chunk2.path, resumed_chunk2.path
381
+ assert_equal @c1id, queue[0].unique_id
382
+ assert_equal :queued, queue[0].state
383
+ assert_equal event_time('2016-04-17 13:58:00 -0700').to_i, queue[0].metadata.timekey
384
+ assert_nil queue[0].metadata.tag
385
+ assert_nil queue[0].metadata.variables
386
+ assert_equal Time.parse('2016-04-17 13:58:00 -0700').localtime, queue[0].created_at
387
+ assert_equal Time.parse('2016-04-17 13:58:22 -0700').localtime, queue[0].modified_at
388
+ assert_equal 4, queue[0].size
559
389
 
560
- assert_equal "data1\ndata2\n", resumed_chunk1.read
561
- assert_equal "data3\ndata4\n", resumed_chunk2.read
390
+ assert_equal @c2id, queue[1].unique_id
391
+ assert_equal :queued, queue[1].state
392
+ assert_equal event_time('2016-04-17 13:59:00 -0700').to_i, queue[1].metadata.timekey
393
+ assert_nil queue[1].metadata.tag
394
+ assert_nil queue[1].metadata.variables
395
+ assert_equal Time.parse('2016-04-17 13:59:00 -0700').localtime, queue[1].created_at
396
+ assert_equal Time.parse('2016-04-17 13:59:23 -0700').localtime, queue[1].modified_at
397
+ assert_equal 3, queue[1].size
562
398
  end
399
+ end
563
400
 
564
- class DummyChain
565
- def next
566
- true
401
+ sub_test_case 'there are some existing file chunks without metadata file' do
402
+ setup do
403
+ @bufdir = File.expand_path('../../tmp/buffer_file', __FILE__)
404
+
405
+ @c1id = Fluent::UniqueId.generate
406
+ p1 = File.join(@bufdir, "etest.201604171358.q#{Fluent::UniqueId.hex(@c1id)}.log")
407
+ File.open(p1, 'wb') do |f|
408
+ f.write ["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
409
+ f.write ["t2.test", event_time('2016-04-17 13:58:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
410
+ f.write ["t3.test", event_time('2016-04-17 13:58:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
411
+ f.write ["t4.test", event_time('2016-04-17 13:58:22 -0700').to_i, {"message" => "yay"}].to_json + "\n"
412
+ end
413
+ FileUtils.touch(p1, mtime: Time.parse('2016-04-17 13:58:28 -0700'))
414
+
415
+ @c2id = Fluent::UniqueId.generate
416
+ p2 = File.join(@bufdir, "etest.201604171359.q#{Fluent::UniqueId.hex(@c2id)}.log")
417
+ File.open(p2, 'wb') do |f|
418
+ f.write ["t1.test", event_time('2016-04-17 13:59:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
419
+ f.write ["t2.test", event_time('2016-04-17 13:59:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
420
+ f.write ["t3.test", event_time('2016-04-17 13:59:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
421
+ end
422
+ FileUtils.touch(p2, mtime: Time.parse('2016-04-17 13:59:30 -0700'))
423
+
424
+ @c3id = Fluent::UniqueId.generate
425
+ p3 = File.join(@bufdir, "etest.201604171400.b#{Fluent::UniqueId.hex(@c3id)}.log")
426
+ File.open(p3, 'wb') do |f|
427
+ f.write ["t1.test", event_time('2016-04-17 14:00:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
428
+ f.write ["t2.test", event_time('2016-04-17 14:00:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
429
+ f.write ["t3.test", event_time('2016-04-17 14:00:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
430
+ f.write ["t4.test", event_time('2016-04-17 14:00:28 -0700').to_i, {"message" => "yay"}].to_json + "\n"
431
+ end
432
+ FileUtils.touch(p3, mtime: Time.parse('2016-04-17 14:00:29 -0700'))
433
+
434
+ @c4id = Fluent::UniqueId.generate
435
+ p4 = File.join(@bufdir, "etest.201604171401.b#{Fluent::UniqueId.hex(@c4id)}.log")
436
+ File.open(p4, 'wb') do |f|
437
+ f.write ["t1.test", event_time('2016-04-17 14:01:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
438
+ f.write ["t2.test", event_time('2016-04-17 14:01:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
439
+ f.write ["t3.test", event_time('2016-04-17 14:01:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
440
+ end
441
+ FileUtils.touch(p4, mtime: Time.parse('2016-04-17 14:01:22 -0700'))
442
+
443
+ @bufpath = File.join(@bufdir, 'etest.*.log')
444
+
445
+ Fluent::Test.setup
446
+ @d = FluentPluginFileBufferTest::DummyOutputPlugin.new
447
+ @p = Fluent::Plugin::FileBuffer.new
448
+ @p.owner = @d
449
+ @p.configure(config_element('buffer', '', {'path' => @bufpath}))
450
+ @p.start
451
+ end
452
+
453
+ teardown do
454
+ if @p
455
+ @p.stop unless @p.stopped?
456
+ @p.before_shutdown unless @p.before_shutdown?
457
+ @p.shutdown unless @p.shutdown?
458
+ @p.after_shutdown unless @p.after_shutdown?
459
+ @p.close unless @p.closed?
460
+ @p.terminate unless @p.terminated?
461
+ end
462
+ if @bufdir
463
+ Dir.glob(File.join(@bufdir, '*')).each do |path|
464
+ next if ['.', '..'].include?(File.basename(path))
465
+ File.delete(path)
466
+ end
567
467
  end
568
468
  end
569
469
 
570
- def test_resume_only_for_my_buffer_path
571
- chain = DummyChain.new
572
-
573
- buffer_path_for_resume_test_1 = bufpath('resume_fix.1.*.log')
574
- buffer_path_for_resume_test_2 = bufpath('resume_fix.*.log')
575
-
576
- buf1 = Fluent::FileBuffer.new
577
- buf1.configure({'buffer_path' => buffer_path_for_resume_test_1})
578
- buf1.start
579
-
580
- buf1.emit('key1', "x1\ty1\tz1\n", chain)
581
- buf1.emit('key1', "x2\ty2\tz2\n", chain)
470
+ test '#resume returns queued chunks for files without metadata' do
471
+ assert_equal 0, @p.stage.size
472
+ assert_equal 4, @p.queue.size
582
473
 
583
- assert buf1.instance_eval{ @map['key1'] }
474
+ queue = @p.queue
584
475
 
585
- buf1.shutdown
476
+ m = metadata()
586
477
 
587
- buf2 = Fluent::FileBuffer.new
588
- buf2.configure({'buffer_path' => buffer_path_for_resume_test_2}) # other buffer_path
478
+ assert_equal @c1id, queue[0].unique_id
479
+ assert_equal m, queue[0].metadata
480
+ assert_equal 0, queue[0].size
481
+ assert_equal :queued, queue[0].state
482
+ assert_equal Time.parse('2016-04-17 13:58:28 -0700'), queue[0].modified_at
589
483
 
590
- queue, map = buf2.resume
484
+ assert_equal @c2id, queue[1].unique_id
485
+ assert_equal m, queue[1].metadata
486
+ assert_equal 0, queue[1].size
487
+ assert_equal :queued, queue[1].state
488
+ assert_equal Time.parse('2016-04-17 13:59:30 -0700'), queue[1].modified_at
591
489
 
592
- assert_equal 0, queue.size
490
+ assert_equal @c3id, queue[2].unique_id
491
+ assert_equal m, queue[2].metadata
492
+ assert_equal 0, queue[2].size
493
+ assert_equal :queued, queue[2].state
494
+ assert_equal Time.parse('2016-04-17 14:00:29 -0700'), queue[2].modified_at
593
495
 
594
- ### TODO: This map size MUST be 0, but actually, 1
595
- # This is because 1.XXXXX is misunderstood like chunk key of resume_fix.*.log.
596
- # This may be a kind of bug, but we cannot decide whether 1. is a part of chunk key or not,
597
- # because current version of buffer plugin uses '.'(dot) as a one of chars for chunk encoding.
598
- # I think that this is a mistake of design, but we cannot fix it because updated plugin become
599
- # not to be able to resume existing file buffer chunk.
600
- # We will fix it in next API version of buffer plugin.
601
- assert_equal 1, map.size
496
+ assert_equal @c4id, queue[3].unique_id
497
+ assert_equal m, queue[3].metadata
498
+ assert_equal 0, queue[3].size
499
+ assert_equal :queued, queue[3].state
500
+ assert_equal Time.parse('2016-04-17 14:01:22 -0700'), queue[3].modified_at
602
501
  end
603
502
  end
604
503
  end