fluentd 1.16.5-x86-mingw32 → 1.17.1-x86-mingw32

Sign up to get free protection for your applications and to get access to all the features.
Files changed (268) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +88 -0
  3. data/README.md +2 -1
  4. data/Rakefile +1 -1
  5. data/SECURITY.md +2 -2
  6. data/fluent.conf +14 -14
  7. data/lib/fluent/command/binlog_reader.rb +1 -1
  8. data/lib/fluent/command/cap_ctl.rb +4 -4
  9. data/lib/fluent/compat/call_super_mixin.rb +3 -3
  10. data/lib/fluent/compat/propagate_default.rb +4 -4
  11. data/lib/fluent/config/configure_proxy.rb +2 -2
  12. data/lib/fluent/config/types.rb +1 -1
  13. data/lib/fluent/config/yaml_parser/parser.rb +4 -0
  14. data/lib/fluent/configurable.rb +2 -2
  15. data/lib/fluent/counter/mutex_hash.rb +1 -1
  16. data/lib/fluent/fluent_log_event_router.rb +0 -2
  17. data/lib/fluent/log/console_adapter.rb +4 -2
  18. data/lib/fluent/plugin/buf_file.rb +1 -1
  19. data/lib/fluent/plugin/buffer/file_chunk.rb +1 -1
  20. data/lib/fluent/plugin/buffer/file_single_chunk.rb +2 -3
  21. data/lib/fluent/plugin/filter_parser.rb +26 -8
  22. data/lib/fluent/plugin/in_exec.rb +14 -2
  23. data/lib/fluent/plugin/in_http.rb +19 -54
  24. data/lib/fluent/plugin/in_sample.rb +13 -7
  25. data/lib/fluent/plugin/in_tail.rb +99 -25
  26. data/lib/fluent/plugin/out_copy.rb +1 -1
  27. data/lib/fluent/plugin/out_file.rb +8 -0
  28. data/lib/fluent/plugin/out_http.rb +137 -13
  29. data/lib/fluent/plugin/owned_by_mixin.rb +0 -1
  30. data/lib/fluent/plugin/parser_json.rb +26 -17
  31. data/lib/fluent/plugin/parser_msgpack.rb +24 -3
  32. data/lib/fluent/plugin_helper/http_server/server.rb +1 -1
  33. data/lib/fluent/plugin_helper/metrics.rb +2 -2
  34. data/lib/fluent/registry.rb +6 -6
  35. data/lib/fluent/test/output_test.rb +1 -1
  36. data/lib/fluent/unique_id.rb +1 -1
  37. data/lib/fluent/version.rb +1 -1
  38. data/templates/new_gem/fluent-plugin.gemspec.erb +6 -5
  39. metadata +109 -459
  40. data/.github/ISSUE_TEMPLATE/bug_report.yml +0 -71
  41. data/.github/ISSUE_TEMPLATE/config.yml +0 -5
  42. data/.github/ISSUE_TEMPLATE/feature_request.yml +0 -39
  43. data/.github/ISSUE_TEMPLATE.md +0 -17
  44. data/.github/PULL_REQUEST_TEMPLATE.md +0 -14
  45. data/.github/workflows/stale-actions.yml +0 -24
  46. data/.github/workflows/test.yml +0 -32
  47. data/.gitignore +0 -30
  48. data/Gemfile +0 -9
  49. data/fluentd.gemspec +0 -54
  50. data/test/command/test_binlog_reader.rb +0 -362
  51. data/test/command/test_ca_generate.rb +0 -70
  52. data/test/command/test_cap_ctl.rb +0 -100
  53. data/test/command/test_cat.rb +0 -128
  54. data/test/command/test_ctl.rb +0 -56
  55. data/test/command/test_fluentd.rb +0 -1291
  56. data/test/command/test_plugin_config_formatter.rb +0 -397
  57. data/test/command/test_plugin_generator.rb +0 -109
  58. data/test/compat/test_calls_super.rb +0 -166
  59. data/test/compat/test_parser.rb +0 -92
  60. data/test/config/assertions.rb +0 -42
  61. data/test/config/test_config_parser.rb +0 -551
  62. data/test/config/test_configurable.rb +0 -1784
  63. data/test/config/test_configure_proxy.rb +0 -604
  64. data/test/config/test_dsl.rb +0 -415
  65. data/test/config/test_element.rb +0 -518
  66. data/test/config/test_literal_parser.rb +0 -309
  67. data/test/config/test_plugin_configuration.rb +0 -56
  68. data/test/config/test_section.rb +0 -191
  69. data/test/config/test_system_config.rb +0 -195
  70. data/test/config/test_types.rb +0 -408
  71. data/test/counter/test_client.rb +0 -563
  72. data/test/counter/test_error.rb +0 -44
  73. data/test/counter/test_mutex_hash.rb +0 -179
  74. data/test/counter/test_server.rb +0 -589
  75. data/test/counter/test_store.rb +0 -258
  76. data/test/counter/test_validator.rb +0 -137
  77. data/test/helper.rb +0 -155
  78. data/test/helpers/fuzzy_assert.rb +0 -89
  79. data/test/helpers/process_extenstion.rb +0 -33
  80. data/test/log/test_console_adapter.rb +0 -110
  81. data/test/plugin/data/2010/01/20100102-030405.log +0 -0
  82. data/test/plugin/data/2010/01/20100102-030406.log +0 -0
  83. data/test/plugin/data/2010/01/20100102.log +0 -0
  84. data/test/plugin/data/log/bar +0 -0
  85. data/test/plugin/data/log/foo/bar.log +0 -0
  86. data/test/plugin/data/log/foo/bar2 +0 -0
  87. data/test/plugin/data/log/test.log +0 -0
  88. data/test/plugin/data/sd_file/config +0 -11
  89. data/test/plugin/data/sd_file/config.json +0 -17
  90. data/test/plugin/data/sd_file/config.yaml +0 -11
  91. data/test/plugin/data/sd_file/config.yml +0 -11
  92. data/test/plugin/data/sd_file/invalid_config.yml +0 -7
  93. data/test/plugin/in_tail/test_fifo.rb +0 -121
  94. data/test/plugin/in_tail/test_io_handler.rb +0 -150
  95. data/test/plugin/in_tail/test_position_file.rb +0 -346
  96. data/test/plugin/out_forward/test_ack_handler.rb +0 -140
  97. data/test/plugin/out_forward/test_connection_manager.rb +0 -145
  98. data/test/plugin/out_forward/test_handshake_protocol.rb +0 -112
  99. data/test/plugin/out_forward/test_load_balancer.rb +0 -106
  100. data/test/plugin/out_forward/test_socket_cache.rb +0 -174
  101. data/test/plugin/test_bare_output.rb +0 -131
  102. data/test/plugin/test_base.rb +0 -247
  103. data/test/plugin/test_buf_file.rb +0 -1314
  104. data/test/plugin/test_buf_file_single.rb +0 -898
  105. data/test/plugin/test_buf_memory.rb +0 -42
  106. data/test/plugin/test_buffer.rb +0 -1493
  107. data/test/plugin/test_buffer_chunk.rb +0 -209
  108. data/test/plugin/test_buffer_file_chunk.rb +0 -871
  109. data/test/plugin/test_buffer_file_single_chunk.rb +0 -611
  110. data/test/plugin/test_buffer_memory_chunk.rb +0 -339
  111. data/test/plugin/test_compressable.rb +0 -87
  112. data/test/plugin/test_file_util.rb +0 -96
  113. data/test/plugin/test_filter.rb +0 -368
  114. data/test/plugin/test_filter_grep.rb +0 -697
  115. data/test/plugin/test_filter_parser.rb +0 -731
  116. data/test/plugin/test_filter_record_transformer.rb +0 -577
  117. data/test/plugin/test_filter_stdout.rb +0 -207
  118. data/test/plugin/test_formatter_csv.rb +0 -136
  119. data/test/plugin/test_formatter_hash.rb +0 -38
  120. data/test/plugin/test_formatter_json.rb +0 -61
  121. data/test/plugin/test_formatter_ltsv.rb +0 -70
  122. data/test/plugin/test_formatter_msgpack.rb +0 -28
  123. data/test/plugin/test_formatter_out_file.rb +0 -116
  124. data/test/plugin/test_formatter_single_value.rb +0 -44
  125. data/test/plugin/test_formatter_tsv.rb +0 -76
  126. data/test/plugin/test_in_debug_agent.rb +0 -49
  127. data/test/plugin/test_in_exec.rb +0 -261
  128. data/test/plugin/test_in_forward.rb +0 -1178
  129. data/test/plugin/test_in_gc_stat.rb +0 -62
  130. data/test/plugin/test_in_http.rb +0 -1102
  131. data/test/plugin/test_in_monitor_agent.rb +0 -922
  132. data/test/plugin/test_in_object_space.rb +0 -66
  133. data/test/plugin/test_in_sample.rb +0 -190
  134. data/test/plugin/test_in_syslog.rb +0 -505
  135. data/test/plugin/test_in_tail.rb +0 -3288
  136. data/test/plugin/test_in_tcp.rb +0 -328
  137. data/test/plugin/test_in_udp.rb +0 -296
  138. data/test/plugin/test_in_unix.rb +0 -181
  139. data/test/plugin/test_input.rb +0 -137
  140. data/test/plugin/test_metadata.rb +0 -89
  141. data/test/plugin/test_metrics.rb +0 -294
  142. data/test/plugin/test_metrics_local.rb +0 -96
  143. data/test/plugin/test_multi_output.rb +0 -204
  144. data/test/plugin/test_out_copy.rb +0 -308
  145. data/test/plugin/test_out_exec.rb +0 -312
  146. data/test/plugin/test_out_exec_filter.rb +0 -606
  147. data/test/plugin/test_out_file.rb +0 -1038
  148. data/test/plugin/test_out_forward.rb +0 -1349
  149. data/test/plugin/test_out_http.rb +0 -429
  150. data/test/plugin/test_out_null.rb +0 -105
  151. data/test/plugin/test_out_relabel.rb +0 -28
  152. data/test/plugin/test_out_roundrobin.rb +0 -146
  153. data/test/plugin/test_out_secondary_file.rb +0 -458
  154. data/test/plugin/test_out_stdout.rb +0 -205
  155. data/test/plugin/test_out_stream.rb +0 -103
  156. data/test/plugin/test_output.rb +0 -1334
  157. data/test/plugin/test_output_as_buffered.rb +0 -2024
  158. data/test/plugin/test_output_as_buffered_backup.rb +0 -363
  159. data/test/plugin/test_output_as_buffered_compress.rb +0 -179
  160. data/test/plugin/test_output_as_buffered_overflow.rb +0 -250
  161. data/test/plugin/test_output_as_buffered_retries.rb +0 -966
  162. data/test/plugin/test_output_as_buffered_secondary.rb +0 -882
  163. data/test/plugin/test_output_as_standard.rb +0 -374
  164. data/test/plugin/test_owned_by.rb +0 -35
  165. data/test/plugin/test_parser.rb +0 -399
  166. data/test/plugin/test_parser_apache.rb +0 -42
  167. data/test/plugin/test_parser_apache2.rb +0 -47
  168. data/test/plugin/test_parser_apache_error.rb +0 -45
  169. data/test/plugin/test_parser_csv.rb +0 -200
  170. data/test/plugin/test_parser_json.rb +0 -138
  171. data/test/plugin/test_parser_labeled_tsv.rb +0 -160
  172. data/test/plugin/test_parser_multiline.rb +0 -111
  173. data/test/plugin/test_parser_nginx.rb +0 -88
  174. data/test/plugin/test_parser_none.rb +0 -52
  175. data/test/plugin/test_parser_regexp.rb +0 -284
  176. data/test/plugin/test_parser_syslog.rb +0 -650
  177. data/test/plugin/test_parser_tsv.rb +0 -122
  178. data/test/plugin/test_sd_file.rb +0 -228
  179. data/test/plugin/test_sd_srv.rb +0 -230
  180. data/test/plugin/test_storage.rb +0 -167
  181. data/test/plugin/test_storage_local.rb +0 -335
  182. data/test/plugin/test_string_util.rb +0 -26
  183. data/test/plugin_helper/data/cert/cert-key.pem +0 -27
  184. data/test/plugin_helper/data/cert/cert-with-CRLF.pem +0 -19
  185. data/test/plugin_helper/data/cert/cert-with-no-newline.pem +0 -19
  186. data/test/plugin_helper/data/cert/cert.pem +0 -19
  187. data/test/plugin_helper/data/cert/cert_chains/ca-cert-key.pem +0 -27
  188. data/test/plugin_helper/data/cert/cert_chains/ca-cert.pem +0 -20
  189. data/test/plugin_helper/data/cert/cert_chains/cert-key.pem +0 -27
  190. data/test/plugin_helper/data/cert/cert_chains/cert.pem +0 -40
  191. data/test/plugin_helper/data/cert/empty.pem +0 -0
  192. data/test/plugin_helper/data/cert/generate_cert.rb +0 -125
  193. data/test/plugin_helper/data/cert/with_ca/ca-cert-key-pass.pem +0 -30
  194. data/test/plugin_helper/data/cert/with_ca/ca-cert-key.pem +0 -27
  195. data/test/plugin_helper/data/cert/with_ca/ca-cert-pass.pem +0 -20
  196. data/test/plugin_helper/data/cert/with_ca/ca-cert.pem +0 -20
  197. data/test/plugin_helper/data/cert/with_ca/cert-key-pass.pem +0 -30
  198. data/test/plugin_helper/data/cert/with_ca/cert-key.pem +0 -27
  199. data/test/plugin_helper/data/cert/with_ca/cert-pass.pem +0 -21
  200. data/test/plugin_helper/data/cert/with_ca/cert.pem +0 -21
  201. data/test/plugin_helper/data/cert/without_ca/cert-key-pass.pem +0 -30
  202. data/test/plugin_helper/data/cert/without_ca/cert-key.pem +0 -27
  203. data/test/plugin_helper/data/cert/without_ca/cert-pass.pem +0 -20
  204. data/test/plugin_helper/data/cert/without_ca/cert.pem +0 -20
  205. data/test/plugin_helper/http_server/test_app.rb +0 -65
  206. data/test/plugin_helper/http_server/test_route.rb +0 -32
  207. data/test/plugin_helper/service_discovery/test_manager.rb +0 -93
  208. data/test/plugin_helper/service_discovery/test_round_robin_balancer.rb +0 -21
  209. data/test/plugin_helper/test_cert_option.rb +0 -25
  210. data/test/plugin_helper/test_child_process.rb +0 -862
  211. data/test/plugin_helper/test_compat_parameters.rb +0 -358
  212. data/test/plugin_helper/test_event_emitter.rb +0 -80
  213. data/test/plugin_helper/test_event_loop.rb +0 -52
  214. data/test/plugin_helper/test_extract.rb +0 -194
  215. data/test/plugin_helper/test_formatter.rb +0 -255
  216. data/test/plugin_helper/test_http_server_helper.rb +0 -372
  217. data/test/plugin_helper/test_inject.rb +0 -561
  218. data/test/plugin_helper/test_metrics.rb +0 -137
  219. data/test/plugin_helper/test_parser.rb +0 -264
  220. data/test/plugin_helper/test_record_accessor.rb +0 -238
  221. data/test/plugin_helper/test_retry_state.rb +0 -1006
  222. data/test/plugin_helper/test_server.rb +0 -1895
  223. data/test/plugin_helper/test_service_discovery.rb +0 -165
  224. data/test/plugin_helper/test_socket.rb +0 -146
  225. data/test/plugin_helper/test_storage.rb +0 -542
  226. data/test/plugin_helper/test_thread.rb +0 -164
  227. data/test/plugin_helper/test_timer.rb +0 -130
  228. data/test/scripts/exec_script.rb +0 -32
  229. data/test/scripts/fluent/plugin/formatter1/formatter_test1.rb +0 -7
  230. data/test/scripts/fluent/plugin/formatter2/formatter_test2.rb +0 -7
  231. data/test/scripts/fluent/plugin/formatter_known.rb +0 -8
  232. data/test/scripts/fluent/plugin/out_test.rb +0 -81
  233. data/test/scripts/fluent/plugin/out_test2.rb +0 -80
  234. data/test/scripts/fluent/plugin/parser_known.rb +0 -4
  235. data/test/test_capability.rb +0 -74
  236. data/test/test_clock.rb +0 -164
  237. data/test/test_config.rb +0 -369
  238. data/test/test_configdsl.rb +0 -148
  239. data/test/test_daemonizer.rb +0 -91
  240. data/test/test_engine.rb +0 -203
  241. data/test/test_event.rb +0 -531
  242. data/test/test_event_router.rb +0 -348
  243. data/test/test_event_time.rb +0 -199
  244. data/test/test_file_wrapper.rb +0 -53
  245. data/test/test_filter.rb +0 -121
  246. data/test/test_fluent_log_event_router.rb +0 -99
  247. data/test/test_formatter.rb +0 -369
  248. data/test/test_input.rb +0 -31
  249. data/test/test_log.rb +0 -1076
  250. data/test/test_match.rb +0 -148
  251. data/test/test_mixin.rb +0 -351
  252. data/test/test_msgpack_factory.rb +0 -50
  253. data/test/test_oj_options.rb +0 -55
  254. data/test/test_output.rb +0 -278
  255. data/test/test_plugin.rb +0 -251
  256. data/test/test_plugin_classes.rb +0 -370
  257. data/test/test_plugin_helper.rb +0 -81
  258. data/test/test_plugin_id.rb +0 -119
  259. data/test/test_process.rb +0 -14
  260. data/test/test_root_agent.rb +0 -951
  261. data/test/test_static_config_analysis.rb +0 -177
  262. data/test/test_supervisor.rb +0 -821
  263. data/test/test_test_drivers.rb +0 -136
  264. data/test/test_time_formatter.rb +0 -301
  265. data/test/test_time_parser.rb +0 -362
  266. data/test/test_tls.rb +0 -65
  267. data/test/test_unique_id.rb +0 -47
  268. data/test/test_variable_store.rb +0 -65
@@ -1,1314 +0,0 @@
1
- require_relative '../helper'
2
- require 'fluent/plugin/buf_file'
3
- require 'fluent/plugin/output'
4
- require 'fluent/unique_id'
5
- require 'fluent/system_config'
6
- require 'fluent/env'
7
-
8
- require 'msgpack'
9
-
10
- module FluentPluginFileBufferTest
11
- class DummyOutputPlugin < Fluent::Plugin::Output
12
- Fluent::Plugin.register_output('buffer_file_test_output', self)
13
- config_section :buffer do
14
- config_set_default :@type, 'file'
15
- end
16
- def multi_workers_ready?
17
- true
18
- end
19
- def write(chunk)
20
- # drop
21
- end
22
- end
23
- end
24
-
25
- class FileBufferTest < Test::Unit::TestCase
26
- def metadata(timekey: nil, tag: nil, variables: nil, seq: 0)
27
- m = Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
28
- m.seq = seq
29
- m
30
- end
31
-
32
- def write_metadata_old(path, chunk_id, metadata, size, ctime, mtime)
33
- metadata = {
34
- timekey: metadata.timekey, tag: metadata.tag, variables: metadata.variables,
35
- id: chunk_id,
36
- s: size,
37
- c: ctime,
38
- m: mtime,
39
- }
40
- File.open(path, 'wb') do |f|
41
- f.write metadata.to_msgpack
42
- end
43
- end
44
-
45
- def write_metadata(path, chunk_id, metadata, size, ctime, mtime)
46
- metadata = {
47
- timekey: metadata.timekey, tag: metadata.tag, variables: metadata.variables,
48
- seq: metadata.seq,
49
- id: chunk_id,
50
- s: size,
51
- c: ctime,
52
- m: mtime,
53
- }
54
-
55
- data = metadata.to_msgpack
56
- size = [data.size].pack('N')
57
- File.open(path, 'wb') do |f|
58
- f.write(Fluent::Plugin::Buffer::FileChunk::BUFFER_HEADER + size + data)
59
- end
60
- end
61
-
62
- sub_test_case 'non configured buffer plugin instance' do
63
- setup do
64
- Fluent::Test.setup
65
-
66
- @dir = File.expand_path('../../tmp/buffer_file_dir', __FILE__)
67
- FileUtils.rm_rf @dir
68
- FileUtils.mkdir_p @dir
69
- end
70
-
71
- test 'path should include * normally' do
72
- d = FluentPluginFileBufferTest::DummyOutputPlugin.new
73
- p = Fluent::Plugin::FileBuffer.new
74
- p.owner = d
75
- p.configure(config_element('buffer', '', {'path' => File.join(@dir, 'buffer.*.file')}))
76
- assert_equal File.join(@dir, 'buffer.*.file'), p.path
77
- end
78
-
79
- data('default' => [nil, 'log'],
80
- 'conf' => ['.buf', 'buf'])
81
- test 'existing directory will be used with additional default file name' do |params|
82
- conf, suffix = params
83
- d = FluentPluginFileBufferTest::DummyOutputPlugin.new
84
- p = Fluent::Plugin::FileBuffer.new
85
- p.owner = d
86
- c = {'path' => @dir}
87
- c['path_suffix'] = conf if conf
88
- p.configure(config_element('buffer', '', c))
89
- assert_equal File.join(@dir, "buffer.*.#{suffix}"), p.path
90
- end
91
-
92
- data('default' => [nil, 'log'],
93
- 'conf' => ['.buf', 'buf'])
94
- test 'unexisting path without * handled as directory' do |params|
95
- conf, suffix = params
96
- d = FluentPluginFileBufferTest::DummyOutputPlugin.new
97
- p = Fluent::Plugin::FileBuffer.new
98
- p.owner = d
99
- c = {'path' => File.join(@dir, 'buffer')}
100
- c['path_suffix'] = conf if conf
101
- p.configure(config_element('buffer', '', c))
102
- assert_equal File.join(@dir, 'buffer', "buffer.*.#{suffix}"), p.path
103
- end
104
- end
105
-
106
- sub_test_case 'buffer configurations and workers' do
107
- setup do
108
- @bufdir = File.expand_path('../../tmp/buffer_file', __FILE__)
109
- FileUtils.rm_rf @bufdir
110
- Fluent::Test.setup
111
-
112
- @d = FluentPluginFileBufferTest::DummyOutputPlugin.new
113
- @p = Fluent::Plugin::FileBuffer.new
114
- @p.owner = @d
115
- end
116
-
117
- test 'raise error if configured path is of existing file' do
118
- @bufpath = File.join(@bufdir, 'buf')
119
- FileUtils.mkdir_p @bufdir
120
- File.open(@bufpath, 'w'){|f| } # create and close the file
121
- assert File.exist?(@bufpath)
122
- assert File.file?(@bufpath)
123
-
124
- buf_conf = config_element('buffer', '', {'path' => @bufpath})
125
- assert_raise Fluent::ConfigError.new("Plugin 'file' does not support multi workers configuration (Fluent::Plugin::FileBuffer)") do
126
- Fluent::SystemConfig.overwrite_system_config('workers' => 4) do
127
- @d.configure(config_element('ROOT', '', {'@id' => 'dummy_output_with_buf'}, [buf_conf]))
128
- end
129
- end
130
- end
131
-
132
- test 'raise error if fluentd is configured to use file path pattern and multi workers' do
133
- @bufpath = File.join(@bufdir, 'testbuf.*.log')
134
- buf_conf = config_element('buffer', '', {'path' => @bufpath})
135
- assert_raise Fluent::ConfigError.new("Plugin 'file' does not support multi workers configuration (Fluent::Plugin::FileBuffer)") do
136
- Fluent::SystemConfig.overwrite_system_config('workers' => 4) do
137
- @d.configure(config_element('ROOT', '', {'@id' => 'dummy_output_with_buf'}, [buf_conf]))
138
- end
139
- end
140
- end
141
-
142
- test 'enables multi worker configuration with unexisting directory path' do
143
- assert_false File.exist?(@bufdir)
144
- buf_conf = config_element('buffer', '', {'path' => @bufdir})
145
- assert_nothing_raised do
146
- Fluent::SystemConfig.overwrite_system_config('root_dir' => @bufdir, 'workers' => 4) do
147
- @d.configure(config_element('ROOT', '', {}, [buf_conf]))
148
- end
149
- end
150
- end
151
-
152
- test 'enables multi worker configuration with existing directory path' do
153
- FileUtils.mkdir_p @bufdir
154
- buf_conf = config_element('buffer', '', {'path' => @bufdir})
155
- assert_nothing_raised do
156
- Fluent::SystemConfig.overwrite_system_config('root_dir' => @bufdir, 'workers' => 4) do
157
- @d.configure(config_element('ROOT', '', {}, [buf_conf]))
158
- end
159
- end
160
- end
161
-
162
- test 'enables multi worker configuration with root dir' do
163
- buf_conf = config_element('buffer', '')
164
- assert_nothing_raised do
165
- Fluent::SystemConfig.overwrite_system_config('root_dir' => @bufdir, 'workers' => 4) do
166
- @d.configure(config_element('ROOT', '', {'@id' => 'dummy_output_with_buf'}, [buf_conf]))
167
- end
168
- end
169
- end
170
- end
171
-
172
- sub_test_case 'buffer plugin configured only with path' do
173
- setup do
174
- @bufdir = File.expand_path('../../tmp/buffer_file', __FILE__)
175
- @bufpath = File.join(@bufdir, 'testbuf.*.log')
176
- FileUtils.rm_r @bufdir if File.exist?(@bufdir)
177
-
178
- Fluent::Test.setup
179
- @d = FluentPluginFileBufferTest::DummyOutputPlugin.new
180
- @p = Fluent::Plugin::FileBuffer.new
181
- @p.owner = @d
182
- @p.configure(config_element('buffer', '', {'path' => @bufpath}))
183
- @p.start
184
- end
185
-
186
- teardown do
187
- if @p
188
- @p.stop unless @p.stopped?
189
- @p.before_shutdown unless @p.before_shutdown?
190
- @p.shutdown unless @p.shutdown?
191
- @p.after_shutdown unless @p.after_shutdown?
192
- @p.close unless @p.closed?
193
- @p.terminate unless @p.terminated?
194
- end
195
- if @bufdir
196
- Dir.glob(File.join(@bufdir, '*')).each do |path|
197
- next if ['.', '..'].include?(File.basename(path))
198
- File.delete(path)
199
- end
200
- end
201
- end
202
-
203
- test 'this is persistent plugin' do
204
- assert @p.persistent?
205
- end
206
-
207
- test '#start creates directory for buffer chunks' do
208
- plugin = Fluent::Plugin::FileBuffer.new
209
- plugin.owner = @d
210
- rand_num = rand(0..100)
211
- bufpath = File.join(File.expand_path("../../tmp/buffer_file_#{rand_num}", __FILE__), 'testbuf.*.log')
212
- bufdir = File.dirname(bufpath)
213
-
214
- FileUtils.rm_r bufdir if File.exist?(bufdir)
215
- assert !File.exist?(bufdir)
216
-
217
- plugin.configure(config_element('buffer', '', {'path' => bufpath}))
218
- assert !File.exist?(bufdir)
219
-
220
- plugin.start
221
- assert File.exist?(bufdir)
222
- assert{ File.stat(bufdir).mode.to_s(8).end_with?('755') }
223
-
224
- plugin.stop; plugin.before_shutdown; plugin.shutdown; plugin.after_shutdown; plugin.close; plugin.terminate
225
- FileUtils.rm_r bufdir
226
- end
227
-
228
- test '#start creates directory for buffer chunks with specified permission' do
229
- omit "NTFS doesn't support UNIX like permissions" if Fluent.windows?
230
-
231
- plugin = Fluent::Plugin::FileBuffer.new
232
- plugin.owner = @d
233
- rand_num = rand(0..100)
234
- bufpath = File.join(File.expand_path("../../tmp/buffer_file_#{rand_num}", __FILE__), 'testbuf.*.log')
235
- bufdir = File.dirname(bufpath)
236
-
237
- FileUtils.rm_r bufdir if File.exist?(bufdir)
238
- assert !File.exist?(bufdir)
239
-
240
- plugin.configure(config_element('buffer', '', {'path' => bufpath, 'dir_permission' => '0700'}))
241
- assert !File.exist?(bufdir)
242
-
243
- plugin.start
244
- assert File.exist?(bufdir)
245
- assert{ File.stat(bufdir).mode.to_s(8).end_with?('700') }
246
-
247
- plugin.stop; plugin.before_shutdown; plugin.shutdown; plugin.after_shutdown; plugin.close; plugin.terminate
248
- FileUtils.rm_r bufdir
249
- end
250
-
251
- test '#start creates directory for buffer chunks with specified permission via system config' do
252
- omit "NTFS doesn't support UNIX like permissions" if Fluent.windows?
253
-
254
- sysconf = {'dir_permission' => '700'}
255
- Fluent::SystemConfig.overwrite_system_config(sysconf) do
256
- plugin = Fluent::Plugin::FileBuffer.new
257
- plugin.owner = @d
258
- rand_num = rand(0..100)
259
- bufpath = File.join(File.expand_path("../../tmp/buffer_file_#{rand_num}", __FILE__), 'testbuf.*.log')
260
- bufdir = File.dirname(bufpath)
261
-
262
- FileUtils.rm_r bufdir if File.exist?(bufdir)
263
- assert !File.exist?(bufdir)
264
-
265
- plugin.configure(config_element('buffer', '', {'path' => bufpath}))
266
- assert !File.exist?(bufdir)
267
-
268
- plugin.start
269
- assert File.exist?(bufdir)
270
- assert{ File.stat(bufdir).mode.to_s(8).end_with?('700') }
271
-
272
- plugin.stop; plugin.before_shutdown; plugin.shutdown; plugin.after_shutdown; plugin.close; plugin.terminate
273
- FileUtils.rm_r bufdir
274
- end
275
- end
276
-
277
- test '#generate_chunk generates blank file chunk on path from unique_id of metadata' do
278
- m1 = metadata()
279
- c1 = @p.generate_chunk(m1)
280
- assert c1.is_a? Fluent::Plugin::Buffer::FileChunk
281
- assert_equal m1, c1.metadata
282
- assert c1.empty?
283
- assert_equal :unstaged, c1.state
284
- assert_equal Fluent::DEFAULT_FILE_PERMISSION, c1.permission
285
- assert_equal @bufpath.gsub('.*.', ".b#{Fluent::UniqueId.hex(c1.unique_id)}."), c1.path
286
- assert{ File.stat(c1.path).mode.to_s(8).end_with?('644') }
287
-
288
- m2 = metadata(timekey: event_time('2016-04-17 11:15:00 -0700').to_i)
289
- c2 = @p.generate_chunk(m2)
290
- assert c2.is_a? Fluent::Plugin::Buffer::FileChunk
291
- assert_equal m2, c2.metadata
292
- assert c2.empty?
293
- assert_equal :unstaged, c2.state
294
- assert_equal Fluent::DEFAULT_FILE_PERMISSION, c2.permission
295
- assert_equal @bufpath.gsub('.*.', ".b#{Fluent::UniqueId.hex(c2.unique_id)}."), c2.path
296
- assert{ File.stat(c2.path).mode.to_s(8).end_with?('644') }
297
-
298
- c1.purge
299
- c2.purge
300
- end
301
-
302
- test '#generate_chunk generates blank file chunk with specified permission' do
303
- omit "NTFS doesn't support UNIX like permissions" if Fluent.windows?
304
-
305
- plugin = Fluent::Plugin::FileBuffer.new
306
- plugin.owner = @d
307
- rand_num = rand(0..100)
308
- bufpath = File.join(File.expand_path("../../tmp/buffer_file_#{rand_num}", __FILE__), 'testbuf.*.log')
309
- bufdir = File.dirname(bufpath)
310
-
311
- FileUtils.rm_r bufdir if File.exist?(bufdir)
312
- assert !File.exist?(bufdir)
313
-
314
- plugin.configure(config_element('buffer', '', {'path' => bufpath, 'file_permission' => '0600'}))
315
- assert !File.exist?(bufdir)
316
- plugin.start
317
-
318
- m = metadata()
319
- c = plugin.generate_chunk(m)
320
- assert c.is_a? Fluent::Plugin::Buffer::FileChunk
321
- assert_equal m, c.metadata
322
- assert c.empty?
323
- assert_equal :unstaged, c.state
324
- assert_equal 0600, c.permission
325
- assert_equal bufpath.gsub('.*.', ".b#{Fluent::UniqueId.hex(c.unique_id)}."), c.path
326
- assert{ File.stat(c.path).mode.to_s(8).end_with?('600') }
327
-
328
- c.purge
329
-
330
- plugin.stop; plugin.before_shutdown; plugin.shutdown; plugin.after_shutdown; plugin.close; plugin.terminate
331
- FileUtils.rm_r bufdir
332
- end
333
-
334
- test '#generate_chunk generates blank file chunk with specified permission with system_config' do
335
- omit "NTFS doesn't support UNIX like permissions" if Fluent.windows?
336
-
337
- begin
338
- plugin = Fluent::Plugin::FileBuffer.new
339
- plugin.owner = @d
340
- rand_num = rand(0..100)
341
- bufpath = File.join(File.expand_path("../../tmp/buffer_file_#{rand_num}", __FILE__), 'testbuf.*.log')
342
- bufdir = File.dirname(bufpath)
343
-
344
- FileUtils.rm_r bufdir if File.exist?(bufdir)
345
- assert !File.exist?(bufdir)
346
-
347
- plugin.configure(config_element('buffer', '', { 'path' => bufpath }))
348
-
349
- assert !File.exist?(bufdir)
350
- plugin.start
351
-
352
- m = metadata()
353
- c = nil
354
- Fluent::SystemConfig.overwrite_system_config("file_permission" => "700") do
355
- c = plugin.generate_chunk(m)
356
- end
357
-
358
- assert c.is_a? Fluent::Plugin::Buffer::FileChunk
359
- assert_equal m, c.metadata
360
- assert c.empty?
361
- assert_equal :unstaged, c.state
362
- assert_equal 0700, c.permission
363
- assert_equal bufpath.gsub('.*.', ".b#{Fluent::UniqueId.hex(c.unique_id)}."), c.path
364
- assert{ File.stat(c.path).mode.to_s(8).end_with?('700') }
365
-
366
- c.purge
367
-
368
- plugin.stop; plugin.before_shutdown; plugin.shutdown; plugin.after_shutdown; plugin.close; plugin.terminate
369
- ensure
370
- FileUtils.rm_r bufdir
371
- end
372
- end
373
- end
374
-
375
- sub_test_case 'configured with system root directory and plugin @id' do
376
- setup do
377
- @root_dir = File.expand_path('../../tmp/buffer_file_root', __FILE__)
378
- FileUtils.rm_rf @root_dir
379
-
380
- Fluent::Test.setup
381
- @d = FluentPluginFileBufferTest::DummyOutputPlugin.new
382
- @p = Fluent::Plugin::FileBuffer.new
383
- @p.owner = @d
384
- end
385
-
386
- teardown do
387
- if @p
388
- @p.stop unless @p.stopped?
389
- @p.before_shutdown unless @p.before_shutdown?
390
- @p.shutdown unless @p.shutdown?
391
- @p.after_shutdown unless @p.after_shutdown?
392
- @p.close unless @p.closed?
393
- @p.terminate unless @p.terminated?
394
- end
395
- end
396
-
397
- data('default' => [nil, 'log'],
398
- 'conf' => ['.buf', 'buf'])
399
- test '#start creates directory for buffer chunks' do |params|
400
- conf, suffix = params
401
- c = {}
402
- c['path_suffix'] = conf if conf
403
- Fluent::SystemConfig.overwrite_system_config('root_dir' => @root_dir) do
404
- @d.configure(config_element('ROOT', '', {'@id' => 'dummy_output_with_buf'}))
405
- @p.configure(config_element('buffer', '', c))
406
- end
407
-
408
- expected_buffer_path = File.join(@root_dir, 'worker0', 'dummy_output_with_buf', 'buffer', "buffer.*.#{suffix}")
409
- expected_buffer_dir = File.dirname(expected_buffer_path)
410
- assert_equal expected_buffer_path, @p.path
411
- assert_false Dir.exist?(expected_buffer_dir)
412
-
413
- @p.start
414
-
415
- assert Dir.exist?(expected_buffer_dir)
416
- end
417
- end
418
-
419
- sub_test_case 'there are no existing file chunks' do
420
- setup do
421
- @bufdir = File.expand_path('../../tmp/buffer_file', __FILE__)
422
- @bufpath = File.join(@bufdir, 'testbuf.*.log')
423
- FileUtils.rm_r @bufdir if File.exist?(@bufdir)
424
-
425
- Fluent::Test.setup
426
- @d = FluentPluginFileBufferTest::DummyOutputPlugin.new
427
- @p = Fluent::Plugin::FileBuffer.new
428
- @p.owner = @d
429
- @p.configure(config_element('buffer', '', {'path' => @bufpath}))
430
- @p.start
431
- end
432
- teardown do
433
- if @p
434
- @p.stop unless @p.stopped?
435
- @p.before_shutdown unless @p.before_shutdown?
436
- @p.shutdown unless @p.shutdown?
437
- @p.after_shutdown unless @p.after_shutdown?
438
- @p.close unless @p.closed?
439
- @p.terminate unless @p.terminated?
440
- end
441
- if @bufdir
442
- Dir.glob(File.join(@bufdir, '*')).each do |path|
443
- next if ['.', '..'].include?(File.basename(path))
444
- File.delete(path)
445
- end
446
- end
447
- end
448
-
449
- test '#resume returns empty buffer state' do
450
- ary = @p.resume
451
- assert_equal({}, ary[0])
452
- assert_equal([], ary[1])
453
- end
454
- end
455
-
456
- sub_test_case 'there are some existing file chunks' do
457
- setup do
458
- @bufdir = File.expand_path('../../tmp/buffer_file', __FILE__)
459
- FileUtils.mkdir_p @bufdir unless File.exist?(@bufdir)
460
-
461
- @c1id = Fluent::UniqueId.generate
462
- p1 = File.join(@bufdir, "etest.q#{Fluent::UniqueId.hex(@c1id)}.log")
463
- File.open(p1, 'wb') do |f|
464
- f.write ["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
465
- f.write ["t2.test", event_time('2016-04-17 13:58:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
466
- f.write ["t3.test", event_time('2016-04-17 13:58:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
467
- f.write ["t4.test", event_time('2016-04-17 13:58:22 -0700').to_i, {"message" => "yay"}].to_json + "\n"
468
- end
469
- write_metadata(
470
- p1 + '.meta', @c1id, metadata(timekey: event_time('2016-04-17 13:58:00 -0700').to_i),
471
- 4, event_time('2016-04-17 13:58:00 -0700').to_i, event_time('2016-04-17 13:58:22 -0700').to_i
472
- )
473
-
474
- @c2id = Fluent::UniqueId.generate
475
- p2 = File.join(@bufdir, "etest.q#{Fluent::UniqueId.hex(@c2id)}.log")
476
- File.open(p2, 'wb') do |f|
477
- f.write ["t1.test", event_time('2016-04-17 13:59:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
478
- f.write ["t2.test", event_time('2016-04-17 13:59:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
479
- f.write ["t3.test", event_time('2016-04-17 13:59:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
480
- end
481
- write_metadata(
482
- p2 + '.meta', @c2id, metadata(timekey: event_time('2016-04-17 13:59:00 -0700').to_i),
483
- 3, event_time('2016-04-17 13:59:00 -0700').to_i, event_time('2016-04-17 13:59:23 -0700').to_i
484
- )
485
-
486
- @c3id = Fluent::UniqueId.generate
487
- p3 = File.join(@bufdir, "etest.b#{Fluent::UniqueId.hex(@c3id)}.log")
488
- File.open(p3, 'wb') do |f|
489
- f.write ["t1.test", event_time('2016-04-17 14:00:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
490
- f.write ["t2.test", event_time('2016-04-17 14:00:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
491
- f.write ["t3.test", event_time('2016-04-17 14:00:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
492
- f.write ["t4.test", event_time('2016-04-17 14:00:28 -0700').to_i, {"message" => "yay"}].to_json + "\n"
493
- end
494
- write_metadata(
495
- p3 + '.meta', @c3id, metadata(timekey: event_time('2016-04-17 14:00:00 -0700').to_i),
496
- 4, event_time('2016-04-17 14:00:00 -0700').to_i, event_time('2016-04-17 14:00:28 -0700').to_i
497
- )
498
-
499
- @c4id = Fluent::UniqueId.generate
500
- p4 = File.join(@bufdir, "etest.b#{Fluent::UniqueId.hex(@c4id)}.log")
501
- File.open(p4, 'wb') do |f|
502
- f.write ["t1.test", event_time('2016-04-17 14:01:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
503
- f.write ["t2.test", event_time('2016-04-17 14:01:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
504
- f.write ["t3.test", event_time('2016-04-17 14:01:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
505
- end
506
- write_metadata(
507
- p4 + '.meta', @c4id, metadata(timekey: event_time('2016-04-17 14:01:00 -0700').to_i),
508
- 3, event_time('2016-04-17 14:01:00 -0700').to_i, event_time('2016-04-17 14:01:25 -0700').to_i
509
- )
510
-
511
- @bufpath = File.join(@bufdir, 'etest.*.log')
512
-
513
- Fluent::Test.setup
514
- @d = FluentPluginFileBufferTest::DummyOutputPlugin.new
515
- @p = Fluent::Plugin::FileBuffer.new
516
- @p.owner = @d
517
- @p.configure(config_element('buffer', '', {'path' => @bufpath}))
518
- @p.start
519
- end
520
-
521
- teardown do
522
- if @p
523
- @p.stop unless @p.stopped?
524
- @p.before_shutdown unless @p.before_shutdown?
525
- @p.shutdown unless @p.shutdown?
526
- @p.after_shutdown unless @p.after_shutdown?
527
- @p.close unless @p.closed?
528
- @p.terminate unless @p.terminated?
529
- end
530
- if @bufdir
531
- Dir.glob(File.join(@bufdir, '*')).each do |path|
532
- next if ['.', '..'].include?(File.basename(path))
533
- File.delete(path)
534
- end
535
- end
536
- end
537
-
538
- test '#resume returns staged/queued chunks with metadata' do
539
- assert_equal 2, @p.stage.size
540
- assert_equal 2, @p.queue.size
541
-
542
- stage = @p.stage
543
-
544
- m3 = metadata(timekey: event_time('2016-04-17 14:00:00 -0700').to_i)
545
- assert_equal @c3id, stage[m3].unique_id
546
- assert_equal 4, stage[m3].size
547
- assert_equal :staged, stage[m3].state
548
-
549
- m4 = metadata(timekey: event_time('2016-04-17 14:01:00 -0700').to_i)
550
- assert_equal @c4id, stage[m4].unique_id
551
- assert_equal 3, stage[m4].size
552
- assert_equal :staged, stage[m4].state
553
- end
554
-
555
- test '#resume returns queued chunks ordered by last modified time (FIFO)' do
556
- assert_equal 2, @p.stage.size
557
- assert_equal 2, @p.queue.size
558
-
559
- queue = @p.queue
560
-
561
- assert{ queue[0].modified_at < queue[1].modified_at }
562
-
563
- assert_equal @c1id, queue[0].unique_id
564
- assert_equal :queued, queue[0].state
565
- assert_equal event_time('2016-04-17 13:58:00 -0700').to_i, queue[0].metadata.timekey
566
- assert_nil queue[0].metadata.tag
567
- assert_nil queue[0].metadata.variables
568
- assert_equal Time.parse('2016-04-17 13:58:00 -0700').localtime, queue[0].created_at
569
- assert_equal Time.parse('2016-04-17 13:58:22 -0700').localtime, queue[0].modified_at
570
- assert_equal 4, queue[0].size
571
-
572
- assert_equal @c2id, queue[1].unique_id
573
- assert_equal :queued, queue[1].state
574
- assert_equal event_time('2016-04-17 13:59:00 -0700').to_i, queue[1].metadata.timekey
575
- assert_nil queue[1].metadata.tag
576
- assert_nil queue[1].metadata.variables
577
- assert_equal Time.parse('2016-04-17 13:59:00 -0700').localtime, queue[1].created_at
578
- assert_equal Time.parse('2016-04-17 13:59:23 -0700').localtime, queue[1].modified_at
579
- assert_equal 3, queue[1].size
580
- end
581
- end
582
-
583
- sub_test_case 'there are some existing file chunks with placeholders path' do
584
- setup do
585
- @bufdir = File.expand_path('../../tmp/buffer_${test}_file', __FILE__)
586
- FileUtils.rm_rf(@bufdir)
587
- FileUtils.mkdir_p(@bufdir)
588
-
589
- @c1id = Fluent::UniqueId.generate
590
- p1 = File.join(@bufdir, "etest.q#{Fluent::UniqueId.hex(@c1id)}.log")
591
- File.open(p1, 'wb') do |f|
592
- f.write ["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
593
- end
594
- write_metadata(
595
- p1 + '.meta', @c1id, metadata(timekey: event_time('2016-04-17 13:58:00 -0700').to_i),
596
- 1, event_time('2016-04-17 13:58:00 -0700').to_i, event_time('2016-04-17 13:58:22 -0700').to_i
597
- )
598
-
599
- @c2id = Fluent::UniqueId.generate
600
- p2 = File.join(@bufdir, "etest.b#{Fluent::UniqueId.hex(@c2id)}.log")
601
- File.open(p2, 'wb') do |f|
602
- f.write ["t1.test", event_time('2016-04-17 14:00:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
603
- end
604
- write_metadata(
605
- p2 + '.meta', @c2id, metadata(timekey: event_time('2016-04-17 14:00:00 -0700').to_i),
606
- 1, event_time('2016-04-17 14:00:00 -0700').to_i, event_time('2016-04-17 14:00:28 -0700').to_i
607
- )
608
-
609
- @bufpath = File.join(@bufdir, 'etest.*.log')
610
-
611
- Fluent::Test.setup
612
- @d = FluentPluginFileBufferTest::DummyOutputPlugin.new
613
- @p = Fluent::Plugin::FileBuffer.new
614
- @p.owner = @d
615
- @p.configure(config_element('buffer', '', {'path' => @bufpath}))
616
- @p.start
617
- end
618
-
619
- teardown do
620
- if @p
621
- @p.stop unless @p.stopped?
622
- @p.before_shutdown unless @p.before_shutdown?
623
- @p.shutdown unless @p.shutdown?
624
- @p.after_shutdown unless @p.after_shutdown?
625
- @p.close unless @p.closed?
626
- @p.terminate unless @p.terminated?
627
- end
628
- FileUtils.rm_rf(@bufdir)
629
- end
630
-
631
- test '#resume returns staged/queued chunks with metadata' do
632
- assert_equal 1, @p.stage.size
633
- assert_equal 1, @p.queue.size
634
- end
635
- end
636
-
637
- sub_test_case 'there are some existing file chunks, both in specified path and per-worker directory under specified path, configured as multi workers' do
638
- setup do
639
- @bufdir = File.expand_path('../../tmp/buffer_file/path', __FILE__)
640
- @worker0_dir = File.join(@bufdir, "worker0")
641
- @worker1_dir = File.join(@bufdir, "worker1")
642
- FileUtils.rm_rf @bufdir
643
- FileUtils.mkdir_p @worker0_dir
644
- FileUtils.mkdir_p @worker1_dir
645
-
646
- @bufdir_chunk_1 = Fluent::UniqueId.generate
647
- bc1 = File.join(@bufdir, "buffer.q#{Fluent::UniqueId.hex(@bufdir_chunk_1)}.log")
648
- File.open(bc1, 'wb') do |f|
649
- f.write ["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
650
- f.write ["t2.test", event_time('2016-04-17 13:58:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
651
- f.write ["t3.test", event_time('2016-04-17 13:58:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
652
- f.write ["t4.test", event_time('2016-04-17 13:58:22 -0700').to_i, {"message" => "yay"}].to_json + "\n"
653
- end
654
- write_metadata(
655
- bc1 + '.meta', @bufdir_chunk_1, metadata(timekey: event_time('2016-04-17 13:58:00 -0700').to_i),
656
- 4, event_time('2016-04-17 13:58:00 -0700').to_i, event_time('2016-04-17 13:58:22 -0700').to_i
657
- )
658
-
659
- @bufdir_chunk_2 = Fluent::UniqueId.generate
660
- bc2 = File.join(@bufdir, "buffer.q#{Fluent::UniqueId.hex(@bufdir_chunk_2)}.log")
661
- File.open(bc2, 'wb') do |f|
662
- f.write ["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
663
- f.write ["t2.test", event_time('2016-04-17 13:58:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
664
- f.write ["t3.test", event_time('2016-04-17 13:58:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
665
- f.write ["t4.test", event_time('2016-04-17 13:58:22 -0700').to_i, {"message" => "yay"}].to_json + "\n"
666
- end
667
- write_metadata(
668
- bc2 + '.meta', @bufdir_chunk_2, metadata(timekey: event_time('2016-04-17 13:58:00 -0700').to_i),
669
- 4, event_time('2016-04-17 13:58:00 -0700').to_i, event_time('2016-04-17 13:58:22 -0700').to_i
670
- )
671
-
672
- @worker_dir_chunk_1 = Fluent::UniqueId.generate
673
- wc0_1 = File.join(@worker0_dir, "buffer.q#{Fluent::UniqueId.hex(@worker_dir_chunk_1)}.log")
674
- wc1_1 = File.join(@worker1_dir, "buffer.q#{Fluent::UniqueId.hex(@worker_dir_chunk_1)}.log")
675
- [wc0_1, wc1_1].each do |chunk_path|
676
- File.open(chunk_path, 'wb') do |f|
677
- f.write ["t1.test", event_time('2016-04-17 13:59:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
678
- f.write ["t2.test", event_time('2016-04-17 13:59:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
679
- f.write ["t3.test", event_time('2016-04-17 13:59:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
680
- end
681
- write_metadata(
682
- chunk_path + '.meta', @worker_dir_chunk_1, metadata(timekey: event_time('2016-04-17 13:59:00 -0700').to_i),
683
- 3, event_time('2016-04-17 13:59:00 -0700').to_i, event_time('2016-04-17 13:59:23 -0700').to_i
684
- )
685
- end
686
-
687
- @worker_dir_chunk_2 = Fluent::UniqueId.generate
688
- wc0_2 = File.join(@worker0_dir, "buffer.b#{Fluent::UniqueId.hex(@worker_dir_chunk_2)}.log")
689
- wc1_2 = File.join(@worker1_dir, "buffer.b#{Fluent::UniqueId.hex(@worker_dir_chunk_2)}.log")
690
- [wc0_2, wc1_2].each do |chunk_path|
691
- File.open(chunk_path, 'wb') do |f|
692
- f.write ["t1.test", event_time('2016-04-17 14:00:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
693
- f.write ["t2.test", event_time('2016-04-17 14:00:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
694
- f.write ["t3.test", event_time('2016-04-17 14:00:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
695
- f.write ["t4.test", event_time('2016-04-17 14:00:28 -0700').to_i, {"message" => "yay"}].to_json + "\n"
696
- end
697
- write_metadata(
698
- chunk_path + '.meta', @worker_dir_chunk_2, metadata(timekey: event_time('2016-04-17 14:00:00 -0700').to_i),
699
- 4, event_time('2016-04-17 14:00:00 -0700').to_i, event_time('2016-04-17 14:00:28 -0700').to_i
700
- )
701
- end
702
-
703
- @worker_dir_chunk_3 = Fluent::UniqueId.generate
704
- wc0_3 = File.join(@worker0_dir, "buffer.b#{Fluent::UniqueId.hex(@worker_dir_chunk_3)}.log")
705
- wc1_3 = File.join(@worker1_dir, "buffer.b#{Fluent::UniqueId.hex(@worker_dir_chunk_3)}.log")
706
- [wc0_3, wc1_3].each do |chunk_path|
707
- File.open(chunk_path, 'wb') do |f|
708
- f.write ["t1.test", event_time('2016-04-17 14:01:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
709
- f.write ["t2.test", event_time('2016-04-17 14:01:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
710
- f.write ["t3.test", event_time('2016-04-17 14:01:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
711
- end
712
- write_metadata(
713
- chunk_path + '.meta', @worker_dir_chunk_3, metadata(timekey: event_time('2016-04-17 14:01:00 -0700').to_i),
714
- 3, event_time('2016-04-17 14:01:00 -0700').to_i, event_time('2016-04-17 14:01:25 -0700').to_i
715
- )
716
- end
717
-
718
- Fluent::Test.setup
719
- end
720
-
721
- teardown do
722
- if @p
723
- @p.stop unless @p.stopped?
724
- @p.before_shutdown unless @p.before_shutdown?
725
- @p.shutdown unless @p.shutdown?
726
- @p.after_shutdown unless @p.after_shutdown?
727
- @p.close unless @p.closed?
728
- @p.terminate unless @p.terminated?
729
- end
730
- end
731
-
732
- test 'worker(id=0) #resume returns staged/queued chunks with metadata, not only in worker dir, including the directory specified by path' do
733
- ENV['SERVERENGINE_WORKER_ID'] = '0'
734
-
735
- buf_conf = config_element('buffer', '', {'path' => @bufdir})
736
- @d = FluentPluginFileBufferTest::DummyOutputPlugin.new
737
- with_worker_config(workers: 2, worker_id: 0) do
738
- @d.configure(config_element('output', '', {}, [buf_conf]))
739
- end
740
-
741
- @d.start
742
- @p = @d.buffer
743
-
744
- assert_equal 2, @p.stage.size
745
- assert_equal 3, @p.queue.size
746
-
747
- stage = @p.stage
748
-
749
- m1 = metadata(timekey: event_time('2016-04-17 14:00:00 -0700').to_i)
750
- assert_equal @worker_dir_chunk_2, stage[m1].unique_id
751
- assert_equal 4, stage[m1].size
752
- assert_equal :staged, stage[m1].state
753
-
754
- m2 = metadata(timekey: event_time('2016-04-17 14:01:00 -0700').to_i)
755
- assert_equal @worker_dir_chunk_3, stage[m2].unique_id
756
- assert_equal 3, stage[m2].size
757
- assert_equal :staged, stage[m2].state
758
-
759
- queue = @p.queue
760
-
761
- assert_equal [@bufdir_chunk_1, @bufdir_chunk_2, @worker_dir_chunk_1].sort, queue.map(&:unique_id).sort
762
- assert_equal [3, 4, 4], queue.map(&:size).sort
763
- assert_equal [:queued, :queued, :queued], queue.map(&:state)
764
- end
765
-
766
- test 'worker(id=1) #resume returns staged/queued chunks with metadata, only in worker dir' do
767
- buf_conf = config_element('buffer', '', {'path' => @bufdir})
768
- @d = FluentPluginFileBufferTest::DummyOutputPlugin.new
769
- with_worker_config(workers: 2, worker_id: 1) do
770
- @d.configure(config_element('output', '', {}, [buf_conf]))
771
- end
772
-
773
- @d.start
774
- @p = @d.buffer
775
-
776
- assert_equal 2, @p.stage.size
777
- assert_equal 1, @p.queue.size
778
-
779
- stage = @p.stage
780
-
781
- m1 = metadata(timekey: event_time('2016-04-17 14:00:00 -0700').to_i)
782
- assert_equal @worker_dir_chunk_2, stage[m1].unique_id
783
- assert_equal 4, stage[m1].size
784
- assert_equal :staged, stage[m1].state
785
-
786
- m2 = metadata(timekey: event_time('2016-04-17 14:01:00 -0700').to_i)
787
- assert_equal @worker_dir_chunk_3, stage[m2].unique_id
788
- assert_equal 3, stage[m2].size
789
- assert_equal :staged, stage[m2].state
790
-
791
- queue = @p.queue
792
-
793
- assert_equal @worker_dir_chunk_1, queue[0].unique_id
794
- assert_equal 3, queue[0].size
795
- assert_equal :queued, queue[0].state
796
- end
797
- end
798
-
799
- sub_test_case 'there are some existing file chunks with old format metadta' do
800
- setup do
801
- @bufdir = File.expand_path('../../tmp/buffer_file', __FILE__)
802
- FileUtils.mkdir_p @bufdir unless File.exist?(@bufdir)
803
-
804
- @c1id = Fluent::UniqueId.generate
805
- p1 = File.join(@bufdir, "etest.q#{Fluent::UniqueId.hex(@c1id)}.log")
806
- File.open(p1, 'wb') do |f|
807
- f.write ["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
808
- f.write ["t2.test", event_time('2016-04-17 13:58:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
809
- f.write ["t3.test", event_time('2016-04-17 13:58:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
810
- f.write ["t4.test", event_time('2016-04-17 13:58:22 -0700').to_i, {"message" => "yay"}].to_json + "\n"
811
- end
812
- write_metadata_old(
813
- p1 + '.meta', @c1id, metadata(timekey: event_time('2016-04-17 13:58:00 -0700').to_i),
814
- 4, event_time('2016-04-17 13:58:00 -0700').to_i, event_time('2016-04-17 13:58:22 -0700').to_i
815
- )
816
-
817
- @c2id = Fluent::UniqueId.generate
818
- p2 = File.join(@bufdir, "etest.q#{Fluent::UniqueId.hex(@c2id)}.log")
819
- File.open(p2, 'wb') do |f|
820
- f.write ["t1.test", event_time('2016-04-17 13:59:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
821
- f.write ["t2.test", event_time('2016-04-17 13:59:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
822
- f.write ["t3.test", event_time('2016-04-17 13:59:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
823
- end
824
- write_metadata_old(
825
- p2 + '.meta', @c2id, metadata(timekey: event_time('2016-04-17 13:59:00 -0700').to_i),
826
- 3, event_time('2016-04-17 13:59:00 -0700').to_i, event_time('2016-04-17 13:59:23 -0700').to_i
827
- )
828
-
829
- @c3id = Fluent::UniqueId.generate
830
- p3 = File.join(@bufdir, "etest.b#{Fluent::UniqueId.hex(@c3id)}.log")
831
- File.open(p3, 'wb') do |f|
832
- f.write ["t1.test", event_time('2016-04-17 14:00:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
833
- f.write ["t2.test", event_time('2016-04-17 14:00:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
834
- f.write ["t3.test", event_time('2016-04-17 14:00:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
835
- f.write ["t4.test", event_time('2016-04-17 14:00:28 -0700').to_i, {"message" => "yay"}].to_json + "\n"
836
- end
837
- write_metadata_old(
838
- p3 + '.meta', @c3id, metadata(timekey: event_time('2016-04-17 14:00:00 -0700').to_i),
839
- 4, event_time('2016-04-17 14:00:00 -0700').to_i, event_time('2016-04-17 14:00:28 -0700').to_i
840
- )
841
-
842
- @c4id = Fluent::UniqueId.generate
843
- p4 = File.join(@bufdir, "etest.b#{Fluent::UniqueId.hex(@c4id)}.log")
844
- File.open(p4, 'wb') do |f|
845
- f.write ["t1.test", event_time('2016-04-17 14:01:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
846
- f.write ["t2.test", event_time('2016-04-17 14:01:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
847
- f.write ["t3.test", event_time('2016-04-17 14:01:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
848
- end
849
- write_metadata_old(
850
- p4 + '.meta', @c4id, metadata(timekey: event_time('2016-04-17 14:01:00 -0700').to_i),
851
- 3, event_time('2016-04-17 14:01:00 -0700').to_i, event_time('2016-04-17 14:01:25 -0700').to_i
852
- )
853
-
854
- @bufpath = File.join(@bufdir, 'etest.*.log')
855
-
856
- Fluent::Test.setup
857
- @d = FluentPluginFileBufferTest::DummyOutputPlugin.new
858
- @p = Fluent::Plugin::FileBuffer.new
859
- @p.owner = @d
860
- @p.configure(config_element('buffer', '', {'path' => @bufpath}))
861
- @p.start
862
- end
863
-
864
- teardown do
865
- if @p
866
- @p.stop unless @p.stopped?
867
- @p.before_shutdown unless @p.before_shutdown?
868
- @p.shutdown unless @p.shutdown?
869
- @p.after_shutdown unless @p.after_shutdown?
870
- @p.close unless @p.closed?
871
- @p.terminate unless @p.terminated?
872
- end
873
- if @bufdir
874
- Dir.glob(File.join(@bufdir, '*')).each do |path|
875
- next if ['.', '..'].include?(File.basename(path))
876
- File.delete(path)
877
- end
878
- end
879
- end
880
-
881
- test '#resume returns staged/queued chunks with metadata' do
882
- assert_equal 2, @p.stage.size
883
- assert_equal 2, @p.queue.size
884
-
885
- stage = @p.stage
886
-
887
- m3 = metadata(timekey: event_time('2016-04-17 14:00:00 -0700').to_i)
888
- assert_equal @c3id, stage[m3].unique_id
889
- assert_equal 4, stage[m3].size
890
- assert_equal :staged, stage[m3].state
891
-
892
- m4 = metadata(timekey: event_time('2016-04-17 14:01:00 -0700').to_i)
893
- assert_equal @c4id, stage[m4].unique_id
894
- assert_equal 3, stage[m4].size
895
- assert_equal :staged, stage[m4].state
896
- end
897
- end
898
-
899
- sub_test_case 'there are some existing file chunks with old format metadata file' do
900
- setup do
901
- @bufdir = File.expand_path('../../tmp/buffer_file', __FILE__)
902
-
903
- @c1id = Fluent::UniqueId.generate
904
- p1 = File.join(@bufdir, "etest.201604171358.q#{Fluent::UniqueId.hex(@c1id)}.log")
905
- File.open(p1, 'wb') do |f|
906
- f.write ["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
907
- f.write ["t2.test", event_time('2016-04-17 13:58:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
908
- f.write ["t3.test", event_time('2016-04-17 13:58:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
909
- f.write ["t4.test", event_time('2016-04-17 13:58:22 -0700').to_i, {"message" => "yay"}].to_json + "\n"
910
- end
911
- FileUtils.touch(p1, mtime: Time.parse('2016-04-17 13:58:28 -0700'))
912
-
913
- @c2id = Fluent::UniqueId.generate
914
- p2 = File.join(@bufdir, "etest.201604171359.q#{Fluent::UniqueId.hex(@c2id)}.log")
915
- File.open(p2, 'wb') do |f|
916
- f.write ["t1.test", event_time('2016-04-17 13:59:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
917
- f.write ["t2.test", event_time('2016-04-17 13:59:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
918
- f.write ["t3.test", event_time('2016-04-17 13:59:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
919
- end
920
- FileUtils.touch(p2, mtime: Time.parse('2016-04-17 13:59:30 -0700'))
921
-
922
- @c3id = Fluent::UniqueId.generate
923
- p3 = File.join(@bufdir, "etest.201604171400.b#{Fluent::UniqueId.hex(@c3id)}.log")
924
- File.open(p3, 'wb') do |f|
925
- f.write ["t1.test", event_time('2016-04-17 14:00:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
926
- f.write ["t2.test", event_time('2016-04-17 14:00:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
927
- f.write ["t3.test", event_time('2016-04-17 14:00:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
928
- f.write ["t4.test", event_time('2016-04-17 14:00:28 -0700').to_i, {"message" => "yay"}].to_json + "\n"
929
- end
930
- FileUtils.touch(p3, mtime: Time.parse('2016-04-17 14:00:29 -0700'))
931
-
932
- @c4id = Fluent::UniqueId.generate
933
- p4 = File.join(@bufdir, "etest.201604171401.b#{Fluent::UniqueId.hex(@c4id)}.log")
934
- File.open(p4, 'wb') do |f|
935
- f.write ["t1.test", event_time('2016-04-17 14:01:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
936
- f.write ["t2.test", event_time('2016-04-17 14:01:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
937
- f.write ["t3.test", event_time('2016-04-17 14:01:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
938
- end
939
- FileUtils.touch(p4, mtime: Time.parse('2016-04-17 14:01:22 -0700'))
940
-
941
- @bufpath = File.join(@bufdir, 'etest.*.log')
942
-
943
- Fluent::Test.setup
944
- @d = FluentPluginFileBufferTest::DummyOutputPlugin.new
945
- @p = Fluent::Plugin::FileBuffer.new
946
- @p.owner = @d
947
- @p.configure(config_element('buffer', '', {'path' => @bufpath}))
948
- @p.start
949
- end
950
-
951
- teardown do
952
- if @p
953
- @p.stop unless @p.stopped?
954
- @p.before_shutdown unless @p.before_shutdown?
955
- @p.shutdown unless @p.shutdown?
956
- @p.after_shutdown unless @p.after_shutdown?
957
- @p.close unless @p.closed?
958
- @p.terminate unless @p.terminated?
959
- end
960
- if @bufdir
961
- Dir.glob(File.join(@bufdir, '*')).each do |path|
962
- next if ['.', '..'].include?(File.basename(path))
963
- File.delete(path)
964
- end
965
- end
966
- end
967
-
968
- test '#resume returns queued chunks for files without metadata' do
969
- assert_equal 0, @p.stage.size
970
- assert_equal 4, @p.queue.size
971
-
972
- queue = @p.queue
973
-
974
- m = metadata()
975
-
976
- assert_equal @c1id, queue[0].unique_id
977
- assert_equal m, queue[0].metadata
978
- assert_equal 0, queue[0].size
979
- assert_equal :queued, queue[0].state
980
- assert_equal Time.parse('2016-04-17 13:58:28 -0700'), queue[0].modified_at
981
-
982
- assert_equal @c2id, queue[1].unique_id
983
- assert_equal m, queue[1].metadata
984
- assert_equal 0, queue[1].size
985
- assert_equal :queued, queue[1].state
986
- assert_equal Time.parse('2016-04-17 13:59:30 -0700'), queue[1].modified_at
987
-
988
- assert_equal @c3id, queue[2].unique_id
989
- assert_equal m, queue[2].metadata
990
- assert_equal 0, queue[2].size
991
- assert_equal :queued, queue[2].state
992
- assert_equal Time.parse('2016-04-17 14:00:29 -0700'), queue[2].modified_at
993
-
994
- assert_equal @c4id, queue[3].unique_id
995
- assert_equal m, queue[3].metadata
996
- assert_equal 0, queue[3].size
997
- assert_equal :queued, queue[3].state
998
- assert_equal Time.parse('2016-04-17 14:01:22 -0700'), queue[3].modified_at
999
- end
1000
- end
1001
-
1002
- sub_test_case 'there are the same timekey metadata in stage' do
1003
- setup do
1004
- @bufdir = File.expand_path('../../tmp/buffer_file', __FILE__)
1005
- @bufpath = File.join(@bufdir, 'testbuf.*.log')
1006
- FileUtils.rm_r(@bufdir) if File.exist?(@bufdir)
1007
- FileUtils.mkdir_p(@bufdir)
1008
-
1009
- m = metadata(timekey: event_time('2016-04-17 13:58:00 -0700').to_i)
1010
-
1011
- c1id = Fluent::UniqueId.generate
1012
- p1 = File.join(@bufdir, "testbuf.b#{Fluent::UniqueId.hex(c1id)}.log")
1013
- File.open(p1, 'wb') do |f|
1014
- f.write ["t1.test", event_time('2016-04-17 14:00:15 -0700').to_i, {"message" => "yay1"}].to_json + "\n"
1015
- f.write ["t4.test", event_time('2016-04-17 14:00:28 -0700').to_i, {"message" => "yay2"}].to_json + "\n"
1016
- end
1017
- write_metadata(p1 + '.meta', c1id, m, 2, event_time('2016-04-17 14:00:00 -0700').to_i, event_time('2016-04-17 14:00:28 -0700').to_i)
1018
-
1019
- c2id = Fluent::UniqueId.generate
1020
- p2 = File.join(@bufdir, "testbuf.b#{Fluent::UniqueId.hex(c2id)}.log")
1021
- File.open(p2, 'wb') do |f|
1022
- f.write ["t1.test", event_time('2016-04-17 14:00:15 -0700').to_i, {"message" => "yay3"}].to_json + "\n"
1023
- f.write ["t4.test", event_time('2016-04-17 14:00:28 -0700').to_i, {"message" => "yay4"}].to_json + "\n"
1024
- end
1025
- m2 = m.dup_next
1026
- write_metadata(p2 + '.meta', c2id, m2, 2, event_time('2016-04-17 14:00:00 -0700').to_i, event_time('2016-04-17 14:00:28 -0700').to_i)
1027
-
1028
- c3id = Fluent::UniqueId.generate
1029
- p3 = File.join(@bufdir, "testbuf.b#{Fluent::UniqueId.hex(c3id)}.log")
1030
- File.open(p3, 'wb') do |f|
1031
- f.write ["t1.test", event_time('2016-04-17 14:00:15 -0700').to_i, {"message" => "yay5"}].to_json + "\n"
1032
- f.write ["t4.test", event_time('2016-04-17 14:00:28 -0700').to_i, {"message" => "yay6"}].to_json + "\n"
1033
- end
1034
- m3 = m2.dup_next
1035
- write_metadata(p3 + '.meta', c3id, m3, 2, event_time('2016-04-17 14:00:00 -0700').to_i, event_time('2016-04-17 14:00:28 -0700').to_i)
1036
-
1037
- c4id = Fluent::UniqueId.generate
1038
- p4 = File.join(@bufdir, "testbuf.b#{Fluent::UniqueId.hex(c4id)}.log")
1039
- File.open(p4, 'wb') do |f|
1040
- f.write ["t1.test", event_time('2016-04-17 14:00:15 -0700').to_i, {"message" => "yay5"}].to_json + "\n"
1041
- f.write ["t4.test", event_time('2016-04-17 14:00:28 -0700').to_i, {"message" => "yay6"}].to_json + "\n"
1042
- end
1043
- write_metadata(p4 + '.meta', c4id, m3, 2, event_time('2016-04-17 14:00:00 -0700').to_i, event_time('2016-04-17 14:00:28 -0700').to_i)
1044
-
1045
- Fluent::Test.setup
1046
- @d = FluentPluginFileBufferTest::DummyOutputPlugin.new
1047
- @p = Fluent::Plugin::FileBuffer.new
1048
- @p.owner = @d
1049
- @p.configure(config_element('buffer', '', {'path' => @bufpath}))
1050
- @p.start
1051
- end
1052
- teardown do
1053
- if @p
1054
- @p.stop unless @p.stopped?
1055
- @p.before_shutdown unless @p.before_shutdown?
1056
- @p.shutdown unless @p.shutdown?
1057
- @p.after_shutdown unless @p.after_shutdown?
1058
- @p.close unless @p.closed?
1059
- @p.terminate unless @p.terminated?
1060
- end
1061
-
1062
- if @bufdir
1063
- Dir.glob(File.join(@bufdir, '*')).each do |path|
1064
- next if ['.', '..'].include?(File.basename(path))
1065
- # Windows does not permit to delete files which are used in another process.
1066
- # Just ignore for removing failure.
1067
- File.delete(path) rescue nil
1068
- end
1069
- end
1070
- end
1071
-
1072
- test '#resume returns each chunks' do
1073
- s, e = @p.resume
1074
- assert_equal 3, s.size
1075
- assert_equal [0, 1, 2], s.keys.map(&:seq).sort
1076
- assert_equal 1, e.size
1077
- assert_equal [0], e.map { |e| e.metadata.seq }
1078
- end
1079
- end
1080
-
1081
- sub_test_case 'there are some non-buffer chunk files, with a path without buffer chunk ids' do
1082
- setup do
1083
- @bufdir = File.expand_path('../../tmp/buffer_file', __FILE__)
1084
-
1085
- FileUtils.rm_rf @bufdir
1086
- FileUtils.mkdir_p @bufdir
1087
-
1088
- @c1id = Fluent::UniqueId.generate
1089
- p1 = File.join(@bufdir, "etest.201604171358.q#{Fluent::UniqueId.hex(@c1id)}.log")
1090
- File.open(p1, 'wb') do |f|
1091
- f.write ["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
1092
- f.write ["t2.test", event_time('2016-04-17 13:58:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
1093
- f.write ["t3.test", event_time('2016-04-17 13:58:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
1094
- f.write ["t4.test", event_time('2016-04-17 13:58:22 -0700').to_i, {"message" => "yay"}].to_json + "\n"
1095
- end
1096
- FileUtils.touch(p1, mtime: Time.parse('2016-04-17 13:58:28 -0700'))
1097
-
1098
- @not_chunk = File.join(@bufdir, 'etest.20160416.log')
1099
- File.open(@not_chunk, 'wb') do |f|
1100
- f.write ["t1.test", event_time('2016-04-16 23:58:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
1101
- f.write ["t2.test", event_time('2016-04-16 23:58:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
1102
- f.write ["t3.test", event_time('2016-04-16 23:58:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
1103
- f.write ["t4.test", event_time('2016-04-16 23:58:22 -0700').to_i, {"message" => "yay"}].to_json + "\n"
1104
- end
1105
- FileUtils.touch(@not_chunk, mtime: Time.parse('2016-04-17 00:00:00 -0700'))
1106
-
1107
- @bufpath = File.join(@bufdir, 'etest.*.log')
1108
-
1109
- Fluent::Test.setup
1110
- @d = FluentPluginFileBufferTest::DummyOutputPlugin.new
1111
- @p = Fluent::Plugin::FileBuffer.new
1112
- @p.owner = @d
1113
- @p.configure(config_element('buffer', '', {'path' => @bufpath}))
1114
- @p.start
1115
- end
1116
-
1117
- teardown do
1118
- if @p
1119
- @p.stop unless @p.stopped?
1120
- @p.before_shutdown unless @p.before_shutdown?
1121
- @p.shutdown unless @p.shutdown?
1122
- @p.after_shutdown unless @p.after_shutdown?
1123
- @p.close unless @p.closed?
1124
- @p.terminate unless @p.terminated?
1125
- end
1126
- if @bufdir
1127
- Dir.glob(File.join(@bufdir, '*')).each do |path|
1128
- next if ['.', '..'].include?(File.basename(path))
1129
- File.delete(path)
1130
- end
1131
- end
1132
- end
1133
-
1134
- test '#resume returns queued chunks for files without metadata, while ignoring non-chunk looking files' do
1135
- assert_equal 0, @p.stage.size
1136
- assert_equal 1, @p.queue.size
1137
-
1138
- queue = @p.queue
1139
-
1140
- m = metadata()
1141
-
1142
- assert_equal @c1id, queue[0].unique_id
1143
- assert_equal m, queue[0].metadata
1144
- assert_equal 0, queue[0].size
1145
- assert_equal :queued, queue[0].state
1146
- assert_equal Time.parse('2016-04-17 13:58:28 -0700'), queue[0].modified_at
1147
-
1148
- assert File.exist?(@not_chunk)
1149
- end
1150
- end
1151
-
1152
- sub_test_case 'there are existing broken file chunks' do
1153
- setup do
1154
- @id_output = 'backup_test'
1155
- @bufdir = File.expand_path('../../tmp/broken_buffer_file', __FILE__)
1156
- FileUtils.rm_rf @bufdir rescue nil
1157
- FileUtils.mkdir_p @bufdir
1158
- @bufpath = File.join(@bufdir, 'broken_test.*.log')
1159
-
1160
- Fluent::Test.setup
1161
- end
1162
-
1163
- teardown do
1164
- if @p
1165
- @p.stop unless @p.stopped?
1166
- @p.before_shutdown unless @p.before_shutdown?
1167
- @p.shutdown unless @p.shutdown?
1168
- @p.after_shutdown unless @p.after_shutdown?
1169
- @p.close unless @p.closed?
1170
- @p.terminate unless @p.terminated?
1171
- end
1172
- end
1173
-
1174
- def setup_plugins(buf_conf)
1175
- @d = FluentPluginFileBufferTest::DummyOutputPlugin.new
1176
- @d.configure(config_element('ROOT', '', {'@id' => @id_output}, [config_element('buffer', '', buf_conf)]))
1177
- @p = @d.buffer
1178
- end
1179
-
1180
- def create_first_chunk(mode)
1181
- cid = Fluent::UniqueId.generate
1182
- path = File.join(@bufdir, "broken_test.#{mode}#{Fluent::UniqueId.hex(cid)}.log")
1183
- File.open(path, 'wb') do |f|
1184
- f.write ["t1.test", event_time('2016-04-17 14:00:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
1185
- f.write ["t2.test", event_time('2016-04-17 14:00:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
1186
- f.write ["t3.test", event_time('2016-04-17 14:00:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
1187
- f.write ["t4.test", event_time('2016-04-17 14:00:28 -0700').to_i, {"message" => "yay"}].to_json + "\n"
1188
- end
1189
- write_metadata(
1190
- path + '.meta', cid, metadata(timekey: event_time('2016-04-17 14:00:00 -0700').to_i),
1191
- 4, event_time('2016-04-17 14:00:00 -0700').to_i, event_time('2016-04-17 14:00:28 -0700').to_i
1192
- )
1193
-
1194
- return cid, path
1195
- end
1196
-
1197
- def create_second_chunk(mode)
1198
- cid = Fluent::UniqueId.generate
1199
- path = File.join(@bufdir, "broken_test.#{mode}#{Fluent::UniqueId.hex(cid)}.log")
1200
- File.open(path, 'wb') do |f|
1201
- f.write ["t1.test", event_time('2016-04-17 14:01:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
1202
- f.write ["t2.test", event_time('2016-04-17 14:01:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
1203
- f.write ["t3.test", event_time('2016-04-17 14:01:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
1204
- end
1205
- write_metadata(
1206
- path + '.meta', cid, metadata(timekey: event_time('2016-04-17 14:01:00 -0700').to_i),
1207
- 3, event_time('2016-04-17 14:01:00 -0700').to_i, event_time('2016-04-17 14:01:25 -0700').to_i
1208
- )
1209
-
1210
- return cid, path
1211
- end
1212
-
1213
- def compare_staged_chunk(staged, id, time, num, mode)
1214
- assert_equal 1, staged.size
1215
- m = metadata(timekey: event_time(time).to_i)
1216
- assert_equal id, staged[m].unique_id
1217
- assert_equal num, staged[m].size
1218
- assert_equal mode, staged[m].state
1219
- end
1220
-
1221
- def compare_queued_chunk(queued, id, num, mode)
1222
- assert_equal 1, queued.size
1223
- assert_equal id, queued[0].unique_id
1224
- assert_equal num, queued[0].size
1225
- assert_equal mode, queued[0].state
1226
- end
1227
-
1228
- def compare_log(plugin, msg)
1229
- logs = plugin.log.out.logs
1230
- assert { logs.any? { |log| log.include?(msg) } }
1231
- end
1232
-
1233
- test '#resume backups staged empty chunk' do
1234
- setup_plugins({'path' => @bufpath})
1235
- c1id, p1 = create_first_chunk('b')
1236
- File.open(p1, 'wb') { |f| } # create staged empty chunk file
1237
- c2id, _ = create_second_chunk('b')
1238
-
1239
- Fluent::SystemConfig.overwrite_system_config('root_dir' => @bufdir) do
1240
- @p.start
1241
- end
1242
-
1243
- compare_staged_chunk(@p.stage, c2id, '2016-04-17 14:01:00 -0700', 3, :staged)
1244
- compare_log(@p, 'staged file chunk is empty')
1245
- assert { not File.exist?(p1) }
1246
- assert { File.exist?("#{@bufdir}/backup/worker0/#{@id_output}/#{@d.dump_unique_id_hex(c1id)}.log") }
1247
- end
1248
-
1249
- test '#resume backups staged broken metadata' do
1250
- setup_plugins({'path' => @bufpath})
1251
- c1id, _ = create_first_chunk('b')
1252
- c2id, p2 = create_second_chunk('b')
1253
- File.open(p2 + '.meta', 'wb') { |f| f.write("\0" * 70) } # create staged broken meta file
1254
-
1255
- Fluent::SystemConfig.overwrite_system_config('root_dir' => @bufdir) do
1256
- @p.start
1257
- end
1258
-
1259
- compare_staged_chunk(@p.stage, c1id, '2016-04-17 14:00:00 -0700', 4, :staged)
1260
- compare_log(@p, 'staged meta file is broken')
1261
- assert { not File.exist?(p2) }
1262
- assert { File.exist?("#{@bufdir}/backup/worker0/#{@id_output}/#{@d.dump_unique_id_hex(c2id)}.log") }
1263
- end
1264
-
1265
- test '#resume backups enqueued empty chunk' do
1266
- setup_plugins({'path' => @bufpath})
1267
- c1id, p1 = create_first_chunk('q')
1268
- File.open(p1, 'wb') { |f| } # create enqueued empty chunk file
1269
- c2id, _ = create_second_chunk('q')
1270
-
1271
- Fluent::SystemConfig.overwrite_system_config('root_dir' => @bufdir) do
1272
- @p.start
1273
- end
1274
-
1275
- compare_queued_chunk(@p.queue, c2id, 3, :queued)
1276
- compare_log(@p, 'enqueued file chunk is empty')
1277
- assert { not File.exist?(p1) }
1278
- assert { File.exist?("#{@bufdir}/backup/worker0/#{@id_output}/#{@d.dump_unique_id_hex(c1id)}.log") }
1279
- end
1280
-
1281
- test '#resume backups enqueued broken metadata' do
1282
- setup_plugins({'path' => @bufpath})
1283
- c1id, _ = create_first_chunk('q')
1284
- c2id, p2 = create_second_chunk('q')
1285
- File.open(p2 + '.meta', 'wb') { |f| f.write("\0" * 70) } # create enqueued broken meta file
1286
-
1287
- Fluent::SystemConfig.overwrite_system_config('root_dir' => @bufdir) do
1288
- @p.start
1289
- end
1290
-
1291
- compare_queued_chunk(@p.queue, c1id, 4, :queued)
1292
- compare_log(@p, 'enqueued meta file is broken')
1293
- assert { not File.exist?(p2) }
1294
- assert { File.exist?("#{@bufdir}/backup/worker0/#{@id_output}/#{@d.dump_unique_id_hex(c2id)}.log") }
1295
- end
1296
-
1297
- test '#resume throws away broken chunk with disable_chunk_backup' do
1298
- setup_plugins({'path' => @bufpath, 'disable_chunk_backup' => true})
1299
- c1id, _ = create_first_chunk('b')
1300
- c2id, p2 = create_second_chunk('b')
1301
- File.open(p2 + '.meta', 'wb') { |f| f.write("\0" * 70) } # create staged broken meta file
1302
-
1303
- Fluent::SystemConfig.overwrite_system_config('root_dir' => @bufdir) do
1304
- @p.start
1305
- end
1306
-
1307
- compare_staged_chunk(@p.stage, c1id, '2016-04-17 14:00:00 -0700', 4, :staged)
1308
- compare_log(@p, 'staged meta file is broken')
1309
- compare_log(@p, 'disable_chunk_backup is true')
1310
- assert { not File.exist?(p2) }
1311
- assert { not File.exist?("#{@bufdir}/backup/worker0/#{@id_output}/#{@d.dump_unique_id_hex(c2id)}.log") }
1312
- end
1313
- end
1314
- end