fluentd 0.12.40 → 0.14.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (252) hide show
  1. checksums.yaml +4 -4
  2. data/.github/ISSUE_TEMPLATE.md +6 -0
  3. data/.gitignore +2 -0
  4. data/.travis.yml +33 -21
  5. data/CONTRIBUTING.md +1 -0
  6. data/ChangeLog +810 -237
  7. data/README.md +0 -25
  8. data/Rakefile +2 -1
  9. data/Vagrantfile +17 -0
  10. data/appveyor.yml +35 -0
  11. data/example/filter_stdout.conf +5 -5
  12. data/example/in_forward.conf +2 -2
  13. data/example/in_http.conf +2 -2
  14. data/example/in_out_forward.conf +17 -0
  15. data/example/in_syslog.conf +2 -2
  16. data/example/in_tail.conf +2 -2
  17. data/example/in_tcp.conf +2 -2
  18. data/example/in_udp.conf +2 -2
  19. data/example/out_copy.conf +4 -4
  20. data/example/out_file.conf +2 -2
  21. data/example/out_forward.conf +2 -2
  22. data/example/out_forward_buf_file.conf +23 -0
  23. data/example/v0_12_filter.conf +8 -8
  24. data/fluent.conf +29 -0
  25. data/fluentd.gemspec +18 -11
  26. data/lib/fluent/agent.rb +60 -58
  27. data/lib/fluent/command/cat.rb +1 -1
  28. data/lib/fluent/command/debug.rb +7 -5
  29. data/lib/fluent/command/fluentd.rb +97 -2
  30. data/lib/fluent/compat/call_super_mixin.rb +67 -0
  31. data/lib/fluent/compat/filter.rb +50 -0
  32. data/lib/fluent/compat/formatter.rb +109 -0
  33. data/lib/fluent/compat/input.rb +50 -0
  34. data/lib/fluent/compat/output.rb +617 -0
  35. data/lib/fluent/compat/output_chain.rb +60 -0
  36. data/lib/fluent/compat/parser.rb +163 -0
  37. data/lib/fluent/compat/propagate_default.rb +62 -0
  38. data/lib/fluent/config.rb +23 -20
  39. data/lib/fluent/config/configure_proxy.rb +119 -70
  40. data/lib/fluent/config/dsl.rb +5 -18
  41. data/lib/fluent/config/element.rb +72 -8
  42. data/lib/fluent/config/error.rb +0 -3
  43. data/lib/fluent/config/literal_parser.rb +0 -2
  44. data/lib/fluent/config/parser.rb +4 -4
  45. data/lib/fluent/config/section.rb +39 -28
  46. data/lib/fluent/config/types.rb +2 -13
  47. data/lib/fluent/config/v1_parser.rb +1 -3
  48. data/lib/fluent/configurable.rb +48 -16
  49. data/lib/fluent/daemon.rb +15 -0
  50. data/lib/fluent/engine.rb +26 -52
  51. data/lib/fluent/env.rb +6 -4
  52. data/lib/fluent/event.rb +58 -11
  53. data/lib/fluent/event_router.rb +5 -5
  54. data/lib/fluent/filter.rb +2 -50
  55. data/lib/fluent/formatter.rb +4 -293
  56. data/lib/fluent/input.rb +2 -32
  57. data/lib/fluent/label.rb +2 -2
  58. data/lib/fluent/load.rb +3 -2
  59. data/lib/fluent/log.rb +107 -38
  60. data/lib/fluent/match.rb +0 -36
  61. data/lib/fluent/mixin.rb +117 -7
  62. data/lib/fluent/msgpack_factory.rb +62 -0
  63. data/lib/fluent/output.rb +7 -612
  64. data/lib/fluent/output_chain.rb +23 -0
  65. data/lib/fluent/parser.rb +4 -800
  66. data/lib/fluent/plugin.rb +100 -121
  67. data/lib/fluent/plugin/bare_output.rb +63 -0
  68. data/lib/fluent/plugin/base.rb +121 -0
  69. data/lib/fluent/plugin/buf_file.rb +101 -182
  70. data/lib/fluent/plugin/buf_memory.rb +9 -92
  71. data/lib/fluent/plugin/buffer.rb +473 -0
  72. data/lib/fluent/plugin/buffer/chunk.rb +135 -0
  73. data/lib/fluent/plugin/buffer/file_chunk.rb +339 -0
  74. data/lib/fluent/plugin/buffer/memory_chunk.rb +100 -0
  75. data/lib/fluent/plugin/exec_util.rb +80 -75
  76. data/lib/fluent/plugin/file_util.rb +33 -28
  77. data/lib/fluent/plugin/file_wrapper.rb +120 -0
  78. data/lib/fluent/plugin/filter.rb +51 -0
  79. data/lib/fluent/plugin/filter_grep.rb +13 -40
  80. data/lib/fluent/plugin/filter_record_transformer.rb +22 -18
  81. data/lib/fluent/plugin/formatter.rb +93 -0
  82. data/lib/fluent/plugin/formatter_csv.rb +48 -0
  83. data/lib/fluent/plugin/formatter_hash.rb +32 -0
  84. data/lib/fluent/plugin/formatter_json.rb +47 -0
  85. data/lib/fluent/plugin/formatter_ltsv.rb +42 -0
  86. data/lib/fluent/plugin/formatter_msgpack.rb +32 -0
  87. data/lib/fluent/plugin/formatter_out_file.rb +45 -0
  88. data/lib/fluent/plugin/formatter_single_value.rb +34 -0
  89. data/lib/fluent/plugin/formatter_stdout.rb +39 -0
  90. data/lib/fluent/plugin/in_debug_agent.rb +4 -0
  91. data/lib/fluent/plugin/in_dummy.rb +22 -18
  92. data/lib/fluent/plugin/in_exec.rb +18 -8
  93. data/lib/fluent/plugin/in_forward.rb +36 -79
  94. data/lib/fluent/plugin/in_gc_stat.rb +4 -0
  95. data/lib/fluent/plugin/in_http.rb +21 -18
  96. data/lib/fluent/plugin/in_monitor_agent.rb +15 -48
  97. data/lib/fluent/plugin/in_object_space.rb +6 -1
  98. data/lib/fluent/plugin/in_stream.rb +7 -3
  99. data/lib/fluent/plugin/in_syslog.rb +46 -95
  100. data/lib/fluent/plugin/in_tail.rb +51 -595
  101. data/lib/fluent/plugin/in_tcp.rb +8 -1
  102. data/lib/fluent/plugin/in_udp.rb +8 -14
  103. data/lib/fluent/plugin/input.rb +33 -0
  104. data/lib/fluent/plugin/multi_output.rb +95 -0
  105. data/lib/fluent/plugin/out_buffered_null.rb +59 -0
  106. data/lib/fluent/plugin/out_copy.rb +11 -7
  107. data/lib/fluent/plugin/out_exec.rb +15 -11
  108. data/lib/fluent/plugin/out_exec_filter.rb +18 -10
  109. data/lib/fluent/plugin/out_file.rb +34 -5
  110. data/lib/fluent/plugin/out_forward.rb +19 -9
  111. data/lib/fluent/plugin/out_null.rb +0 -14
  112. data/lib/fluent/plugin/out_roundrobin.rb +11 -7
  113. data/lib/fluent/plugin/out_stdout.rb +5 -7
  114. data/lib/fluent/plugin/out_stream.rb +3 -1
  115. data/lib/fluent/plugin/output.rb +979 -0
  116. data/lib/fluent/plugin/owned_by_mixin.rb +42 -0
  117. data/lib/fluent/plugin/parser.rb +244 -0
  118. data/lib/fluent/plugin/parser_apache.rb +24 -0
  119. data/lib/fluent/plugin/parser_apache2.rb +84 -0
  120. data/lib/fluent/plugin/parser_apache_error.rb +21 -0
  121. data/lib/fluent/plugin/parser_csv.rb +31 -0
  122. data/lib/fluent/plugin/parser_json.rb +79 -0
  123. data/lib/fluent/plugin/parser_ltsv.rb +50 -0
  124. data/lib/fluent/plugin/parser_multiline.rb +102 -0
  125. data/lib/fluent/plugin/parser_nginx.rb +24 -0
  126. data/lib/fluent/plugin/parser_none.rb +36 -0
  127. data/lib/fluent/plugin/parser_syslog.rb +82 -0
  128. data/lib/fluent/plugin/parser_tsv.rb +37 -0
  129. data/lib/fluent/plugin/socket_util.rb +120 -114
  130. data/lib/fluent/plugin/storage.rb +84 -0
  131. data/lib/fluent/plugin/storage_local.rb +116 -0
  132. data/lib/fluent/plugin/string_util.rb +16 -13
  133. data/lib/fluent/plugin_helper.rb +39 -0
  134. data/lib/fluent/plugin_helper/child_process.rb +298 -0
  135. data/lib/fluent/plugin_helper/compat_parameters.rb +99 -0
  136. data/lib/fluent/plugin_helper/event_emitter.rb +80 -0
  137. data/lib/fluent/plugin_helper/event_loop.rb +118 -0
  138. data/lib/fluent/plugin_helper/retry_state.rb +177 -0
  139. data/lib/fluent/plugin_helper/storage.rb +308 -0
  140. data/lib/fluent/plugin_helper/thread.rb +147 -0
  141. data/lib/fluent/plugin_helper/timer.rb +85 -0
  142. data/lib/fluent/plugin_id.rb +63 -0
  143. data/lib/fluent/process.rb +21 -30
  144. data/lib/fluent/registry.rb +21 -9
  145. data/lib/fluent/root_agent.rb +115 -40
  146. data/lib/fluent/supervisor.rb +330 -320
  147. data/lib/fluent/system_config.rb +42 -18
  148. data/lib/fluent/test.rb +6 -1
  149. data/lib/fluent/test/base.rb +23 -3
  150. data/lib/fluent/test/driver/base.rb +247 -0
  151. data/lib/fluent/test/driver/event_feeder.rb +98 -0
  152. data/lib/fluent/test/driver/filter.rb +35 -0
  153. data/lib/fluent/test/driver/input.rb +31 -0
  154. data/lib/fluent/test/driver/output.rb +78 -0
  155. data/lib/fluent/test/driver/test_event_router.rb +45 -0
  156. data/lib/fluent/test/filter_test.rb +0 -1
  157. data/lib/fluent/test/formatter_test.rb +2 -1
  158. data/lib/fluent/test/input_test.rb +23 -17
  159. data/lib/fluent/test/output_test.rb +28 -39
  160. data/lib/fluent/test/parser_test.rb +1 -1
  161. data/lib/fluent/time.rb +104 -1
  162. data/lib/fluent/{status.rb → unique_id.rb} +15 -24
  163. data/lib/fluent/version.rb +1 -1
  164. data/lib/fluent/winsvc.rb +72 -0
  165. data/test/compat/test_calls_super.rb +164 -0
  166. data/test/config/test_config_parser.rb +83 -0
  167. data/test/config/test_configurable.rb +547 -274
  168. data/test/config/test_configure_proxy.rb +146 -29
  169. data/test/config/test_dsl.rb +3 -181
  170. data/test/config/test_element.rb +274 -0
  171. data/test/config/test_literal_parser.rb +1 -1
  172. data/test/config/test_section.rb +79 -7
  173. data/test/config/test_system_config.rb +21 -0
  174. data/test/config/test_types.rb +3 -26
  175. data/test/helper.rb +78 -8
  176. data/test/plugin/test_bare_output.rb +118 -0
  177. data/test/plugin/test_base.rb +75 -0
  178. data/test/plugin/test_buf_file.rb +420 -521
  179. data/test/plugin/test_buf_memory.rb +32 -194
  180. data/test/plugin/test_buffer.rb +981 -0
  181. data/test/plugin/test_buffer_chunk.rb +110 -0
  182. data/test/plugin/test_buffer_file_chunk.rb +770 -0
  183. data/test/plugin/test_buffer_memory_chunk.rb +265 -0
  184. data/test/plugin/test_filter.rb +255 -0
  185. data/test/plugin/test_filter_grep.rb +2 -73
  186. data/test/plugin/test_filter_record_transformer.rb +24 -68
  187. data/test/plugin/test_filter_stdout.rb +6 -6
  188. data/test/plugin/test_in_debug_agent.rb +2 -0
  189. data/test/plugin/test_in_dummy.rb +11 -17
  190. data/test/plugin/test_in_exec.rb +6 -25
  191. data/test/plugin/test_in_forward.rb +112 -151
  192. data/test/plugin/test_in_gc_stat.rb +2 -0
  193. data/test/plugin/test_in_http.rb +106 -157
  194. data/test/plugin/test_in_object_space.rb +21 -5
  195. data/test/plugin/test_in_stream.rb +14 -13
  196. data/test/plugin/test_in_syslog.rb +30 -275
  197. data/test/plugin/test_in_tail.rb +95 -234
  198. data/test/plugin/test_in_tcp.rb +14 -0
  199. data/test/plugin/test_in_udp.rb +21 -13
  200. data/test/plugin/test_input.rb +122 -0
  201. data/test/plugin/test_multi_output.rb +180 -0
  202. data/test/plugin/test_out_buffered_null.rb +79 -0
  203. data/test/plugin/test_out_copy.rb +15 -2
  204. data/test/plugin/test_out_exec.rb +75 -25
  205. data/test/plugin/test_out_exec_filter.rb +74 -8
  206. data/test/plugin/test_out_file.rb +61 -7
  207. data/test/plugin/test_out_forward.rb +92 -15
  208. data/test/plugin/test_out_roundrobin.rb +1 -0
  209. data/test/plugin/test_out_stdout.rb +22 -13
  210. data/test/plugin/test_out_stream.rb +18 -0
  211. data/test/plugin/test_output.rb +515 -0
  212. data/test/plugin/test_output_as_buffered.rb +1540 -0
  213. data/test/plugin/test_output_as_buffered_overflow.rb +247 -0
  214. data/test/plugin/test_output_as_buffered_retries.rb +808 -0
  215. data/test/plugin/test_output_as_buffered_secondary.rb +776 -0
  216. data/test/plugin/test_output_as_standard.rb +362 -0
  217. data/test/plugin/test_owned_by.rb +35 -0
  218. data/test/plugin/test_storage.rb +167 -0
  219. data/test/plugin/test_storage_local.rb +8 -0
  220. data/test/plugin_helper/test_child_process.rb +599 -0
  221. data/test/plugin_helper/test_compat_parameters.rb +175 -0
  222. data/test/plugin_helper/test_event_emitter.rb +51 -0
  223. data/test/plugin_helper/test_event_loop.rb +52 -0
  224. data/test/plugin_helper/test_retry_state.rb +399 -0
  225. data/test/plugin_helper/test_storage.rb +411 -0
  226. data/test/plugin_helper/test_thread.rb +164 -0
  227. data/test/plugin_helper/test_timer.rb +100 -0
  228. data/test/scripts/exec_script.rb +0 -6
  229. data/test/scripts/fluent/plugin/out_test.rb +3 -0
  230. data/test/test_config.rb +13 -4
  231. data/test/test_event.rb +24 -13
  232. data/test/test_event_router.rb +8 -7
  233. data/test/test_event_time.rb +187 -0
  234. data/test/test_formatter.rb +13 -51
  235. data/test/test_input.rb +1 -1
  236. data/test/test_log.rb +239 -16
  237. data/test/test_mixin.rb +1 -1
  238. data/test/test_output.rb +53 -66
  239. data/test/test_parser.rb +105 -323
  240. data/test/test_plugin_helper.rb +81 -0
  241. data/test/test_root_agent.rb +4 -52
  242. data/test/test_supervisor.rb +272 -0
  243. data/test/test_unique_id.rb +47 -0
  244. metadata +180 -54
  245. data/lib/fluent/buffer.rb +0 -365
  246. data/lib/fluent/plugin/filter_parser.rb +0 -107
  247. data/lib/fluent/plugin/in_status.rb +0 -76
  248. data/lib/fluent/test/helpers.rb +0 -86
  249. data/test/plugin/data/log/foo/bar2 +0 -0
  250. data/test/plugin/test_filter_parser.rb +0 -744
  251. data/test/plugin/test_in_status.rb +0 -38
  252. data/test/test_buffer.rb +0 -624
@@ -0,0 +1,110 @@
1
+ require_relative '../helper'
2
+ require 'fluent/plugin/buffer/chunk'
3
+
4
+ class BufferChunkTest < Test::Unit::TestCase
5
+ sub_test_case 'blank buffer chunk' do
6
+ test 'has generated unique id, given metadata, created_at and modified_at' do
7
+ meta = Object.new
8
+ chunk = Fluent::Plugin::Buffer::Chunk.new(meta)
9
+ assert{ chunk.unique_id.bytesize == 16 }
10
+ assert{ chunk.metadata.object_id == meta.object_id }
11
+ assert{ chunk.created_at.is_a? Time }
12
+ assert{ chunk.modified_at.is_a? Time }
13
+ assert chunk.staged?
14
+ assert !chunk.queued?
15
+ assert !chunk.closed?
16
+ end
17
+
18
+ test 'has many methods for chunks, but not implemented' do
19
+ meta = Object.new
20
+ chunk = Fluent::Plugin::Buffer::Chunk.new(meta)
21
+
22
+ assert chunk.respond_to?(:append)
23
+ assert chunk.respond_to?(:commit)
24
+ assert chunk.respond_to?(:rollback)
25
+ assert chunk.respond_to?(:bytesize)
26
+ assert chunk.respond_to?(:size)
27
+ assert chunk.respond_to?(:length)
28
+ assert chunk.respond_to?(:empty?)
29
+ assert chunk.respond_to?(:read)
30
+ assert chunk.respond_to?(:open)
31
+ assert chunk.respond_to?(:write_to)
32
+ assert chunk.respond_to?(:msgpack_each)
33
+ assert_raise(NotImplementedError){ chunk.append(nil) }
34
+ assert_raise(NotImplementedError){ chunk.commit }
35
+ assert_raise(NotImplementedError){ chunk.rollback }
36
+ assert_raise(NotImplementedError){ chunk.bytesize }
37
+ assert_raise(NotImplementedError){ chunk.size }
38
+ assert_raise(NotImplementedError){ chunk.length }
39
+ assert_raise(NotImplementedError){ chunk.empty? }
40
+ assert_raise(NotImplementedError){ chunk.read }
41
+ assert_raise(NotImplementedError){ chunk.open(){} }
42
+ assert_raise(NotImplementedError){ chunk.write_to(nil) }
43
+ assert_raise(NotImplementedError){ chunk.msgpack_each(){|v| v} }
44
+ end
45
+ end
46
+
47
+ class TestChunk < Fluent::Plugin::Buffer::Chunk
48
+ attr_accessor :data
49
+ def initialize(meta)
50
+ super
51
+ @data = ''
52
+ end
53
+ def size
54
+ @data.size
55
+ end
56
+ def open
57
+ require 'stringio'
58
+ io = StringIO.new(@data)
59
+ yield io
60
+ end
61
+ end
62
+
63
+ sub_test_case 'minimum chunk implements #size and #open' do
64
+ test 'chunk lifecycle' do
65
+ c = TestChunk.new(Object.new)
66
+ assert c.staged?
67
+ assert !c.queued?
68
+ assert !c.closed?
69
+
70
+ c.enqueued!
71
+
72
+ assert !c.staged?
73
+ assert c.queued?
74
+ assert !c.closed?
75
+
76
+ c.close
77
+
78
+ assert !c.staged?
79
+ assert !c.queued?
80
+ assert c.closed?
81
+ end
82
+
83
+ test 'can respond to #empty? correctly' do
84
+ c = TestChunk.new(Object.new)
85
+ assert_equal 0, c.size
86
+ assert c.empty?
87
+ end
88
+
89
+ test 'can write its contents to io object' do
90
+ c = TestChunk.new(Object.new)
91
+ c.data << "my data\nyour data\n"
92
+ io = StringIO.new
93
+ c.write_to(io)
94
+ assert "my data\nyour data\n", io.to_s
95
+ end
96
+
97
+ test 'can feed objects into blocks with unpacking msgpack' do
98
+ require 'msgpack'
99
+ c = TestChunk.new(Object.new)
100
+ c.data << MessagePack.pack(['my data', 1])
101
+ c.data << MessagePack.pack(['your data', 2])
102
+ ary = []
103
+ c.msgpack_each do |obj|
104
+ ary << obj
105
+ end
106
+ assert_equal ['my data', 1], ary[0]
107
+ assert_equal ['your data', 2], ary[1]
108
+ end
109
+ end
110
+ end
@@ -0,0 +1,770 @@
1
+ require_relative '../helper'
2
+ require 'fluent/plugin/buffer/file_chunk'
3
+ require 'fluent/unique_id'
4
+
5
+ require 'fileutils'
6
+ require 'msgpack'
7
+ require 'time'
8
+ require 'timecop'
9
+
10
+ class BufferFileChunkTest < Test::Unit::TestCase
11
+ setup do
12
+ @klass = Fluent::Plugin::Buffer::FileChunk
13
+ @chunkdir = File.expand_path('../../tmp/buffer_file_chunk', __FILE__)
14
+ FileUtils.rm_r @chunkdir rescue nil
15
+ FileUtils.mkdir_p @chunkdir
16
+ end
17
+ teardown do
18
+ Timecop.return
19
+ end
20
+
21
+ Metadata = Struct.new(:timekey, :tag, :variables)
22
+ def gen_metadata(timekey: nil, tag: nil, variables: nil)
23
+ Metadata.new(timekey, tag, variables)
24
+ end
25
+
26
+ def read_metadata_file(path)
27
+ File.open(path, 'rb'){|f| MessagePack.unpack(f.read, symbolize_keys: true) }
28
+ end
29
+
30
+ def gen_path(path)
31
+ File.join(@chunkdir, path)
32
+ end
33
+
34
+ def gen_test_chunk_id
35
+ require 'time'
36
+ now = Time.parse('2016-04-07 14:31:33 +0900')
37
+ u1 = ((now.to_i * 1000 * 1000 + now.usec) << 12 | 1725) # 1725 is one of `rand(0xfff)`
38
+ u3 = 2979763054 # one of rand(0xffffffff)
39
+ u4 = 438020492 # ditto
40
+ [u1 >> 32, u1 & 0xffffffff, u3, u4].pack('NNNN')
41
+ # unique_id.unpack('N*').map{|n| n.to_s(16)}.join => "52fde6425d7406bdb19b936e1a1ba98c"
42
+ end
43
+
44
+ def hex_id(id)
45
+ id.unpack('N*').map{|n| n.to_s(16)}.join
46
+ end
47
+
48
+ sub_test_case 'classmethods' do
49
+ data(
50
+ correct_staged: ['/mydir/mypath/myfile.b00ff.log', :staged],
51
+ correct_queued: ['/mydir/mypath/myfile.q00ff.log', :queued],
52
+ incorrect_staged: ['/mydir/mypath/myfile.b00ff.log/unknown', :queued],
53
+ incorrect_queued: ['/mydir/mypath/myfile.q00ff.log/unknown', :queued],
54
+ )
55
+ test 'can .assume_chunk_state' do |data|
56
+ path, expected = data
57
+ assert_equal expected, @klass.assume_chunk_state(path)
58
+ end
59
+
60
+ test '.generate_stage_chunk_path generates path with staged mark & chunk unique_id' do
61
+ assert_equal gen_path("mychunk.b52fde6425d7406bdb19b936e1a1ba98c.log"), @klass.generate_stage_chunk_path(gen_path("mychunk.*.log"), gen_test_chunk_id)
62
+ assert_raise "BUG: buffer chunk path on stage MUST have '.*.'" do
63
+ @klass.generate_stage_chunk_path(gen_path("mychunk.log"), gen_test_chunk_id)
64
+ end
65
+ assert_raise "BUG: buffer chunk path on stage MUST have '.*.'" do
66
+ @klass.generate_stage_chunk_path(gen_path("mychunk.*"), gen_test_chunk_id)
67
+ end
68
+ assert_raise "BUG: buffer chunk path on stage MUST have '.*.'" do
69
+ @klass.generate_stage_chunk_path(gen_path("*.log"), gen_test_chunk_id)
70
+ end
71
+ end
72
+
73
+ test '.generate_queued_chunk_path generates path with enqueued mark for staged chunk path' do
74
+ assert_equal(
75
+ gen_path("mychunk.q52fde6425d7406bdb19b936e1a1ba98c.log"),
76
+ @klass.generate_queued_chunk_path(gen_path("mychunk.b52fde6425d7406bdb19b936e1a1ba98c.log"), gen_test_chunk_id)
77
+ )
78
+ end
79
+
80
+ test '.generate_queued_chunk_path generates special path with chunk unique_id for non staged chunk path' do
81
+ assert_equal(
82
+ gen_path("mychunk.log.q52fde6425d7406bdb19b936e1a1ba98c.chunk"),
83
+ @klass.generate_queued_chunk_path(gen_path("mychunk.log"), gen_test_chunk_id)
84
+ )
85
+ assert_equal(
86
+ gen_path("mychunk.q55555555555555555555555555555555.log.q52fde6425d7406bdb19b936e1a1ba98c.chunk"),
87
+ @klass.generate_queued_chunk_path(gen_path("mychunk.q55555555555555555555555555555555.log"), gen_test_chunk_id)
88
+ )
89
+ end
90
+
91
+ test '.unique_id_from_path recreates unique_id from file path to assume unique_id for v0.12 chunks' do
92
+ assert_equal gen_test_chunk_id, @klass.unique_id_from_path(gen_path("mychunk.q52fde6425d7406bdb19b936e1a1ba98c.log"))
93
+ end
94
+ end
95
+
96
+ sub_test_case 'newly created chunk' do
97
+ setup do
98
+ @chunk_path = File.join(@chunkdir, 'test.*.log')
99
+ @c = Fluent::Plugin::Buffer::FileChunk.new(gen_metadata, @chunk_path, :create)
100
+ end
101
+
102
+ def gen_chunk_path(prefix, unique_id)
103
+ File.join(@chunkdir, "test.#{prefix}#{Fluent::UniqueId.hex(unique_id)}.log")
104
+ end
105
+
106
+ teardown do
107
+ if @c
108
+ @c.purge rescue nil
109
+ end
110
+ if File.exist? @chunk_path
111
+ File.unlink @chunk_path
112
+ end
113
+ end
114
+
115
+ test 'creates new files for chunk and metadata with specified path & permission' do
116
+ assert{ @c.unique_id.size == 16 }
117
+ assert_equal gen_chunk_path('b', @c.unique_id), @c.path
118
+
119
+ assert File.exist?(gen_chunk_path('b', @c.unique_id))
120
+ assert{ File.stat(gen_chunk_path('b', @c.unique_id)).mode.to_s(8).end_with?(@klass.const_get('FILE_PERMISSION').to_s(8)) }
121
+
122
+ assert File.exist?(gen_chunk_path('b', @c.unique_id) + '.meta')
123
+ assert{ File.stat(gen_chunk_path('b', @c.unique_id) + '.meta').mode.to_s(8).end_with?(@klass.const_get('FILE_PERMISSION').to_s(8)) }
124
+
125
+ assert_equal :staged, @c.state
126
+ assert @c.empty?
127
+ end
128
+
129
+ test 'can #append, #commit and #read it' do
130
+ assert @c.empty?
131
+
132
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
133
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
134
+ data = [d1.to_json + "\n", d2.to_json + "\n"]
135
+ @c.append(data)
136
+ @c.commit
137
+
138
+ content = @c.read
139
+ ds = content.split("\n").select{|d| !d.empty? }
140
+
141
+ assert_equal 2, ds.size
142
+ assert_equal d1, JSON.parse(ds[0])
143
+ assert_equal d2, JSON.parse(ds[1])
144
+
145
+ d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
146
+ d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
147
+ @c.append([d3.to_json + "\n", d4.to_json + "\n"])
148
+ @c.commit
149
+
150
+ content = @c.read
151
+ ds = content.split("\n").select{|d| !d.empty? }
152
+
153
+ assert_equal 4, ds.size
154
+ assert_equal d1, JSON.parse(ds[0])
155
+ assert_equal d2, JSON.parse(ds[1])
156
+ assert_equal d3, JSON.parse(ds[2])
157
+ assert_equal d4, JSON.parse(ds[3])
158
+ end
159
+
160
+ test 'can #concat, #commit and #read it' do
161
+ assert @c.empty?
162
+
163
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
164
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
165
+ data = [d1.to_json + "\n", d2.to_json + "\n"].join
166
+ @c.concat(data, 2)
167
+ @c.commit
168
+
169
+ content = @c.read
170
+ ds = content.split("\n").select{|d| !d.empty? }
171
+
172
+ assert_equal 2, ds.size
173
+ assert_equal d1, JSON.parse(ds[0])
174
+ assert_equal d2, JSON.parse(ds[1])
175
+
176
+ d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
177
+ d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
178
+ @c.concat([d3.to_json + "\n", d4.to_json + "\n"].join, 2)
179
+ @c.commit
180
+
181
+ content = @c.read
182
+ ds = content.split("\n").select{|d| !d.empty? }
183
+
184
+ assert_equal 4, ds.size
185
+ assert_equal d1, JSON.parse(ds[0])
186
+ assert_equal d2, JSON.parse(ds[1])
187
+ assert_equal d3, JSON.parse(ds[2])
188
+ assert_equal d4, JSON.parse(ds[3])
189
+ end
190
+
191
+ test 'has its contents in binary (ascii-8bit)' do
192
+ data1 = "aaa bbb ccc".force_encoding('utf-8')
193
+ @c.append([data1])
194
+ @c.commit
195
+ assert_equal Encoding::ASCII_8BIT, @c.instance_eval{ @chunk.external_encoding }
196
+
197
+ content = @c.read
198
+ assert_equal Encoding::ASCII_8BIT, content.encoding
199
+ end
200
+
201
+ test 'has #bytesize and #size' do
202
+ assert @c.empty?
203
+
204
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
205
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
206
+ data = [d1.to_json + "\n", d2.to_json + "\n"]
207
+ @c.append(data)
208
+
209
+ assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
210
+ assert_equal 2, @c.size
211
+
212
+ @c.commit
213
+
214
+ assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
215
+ assert_equal 2, @c.size
216
+
217
+ first_bytesize = @c.bytesize
218
+
219
+ d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
220
+ d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
221
+ @c.append([d3.to_json + "\n", d4.to_json + "\n"])
222
+
223
+ assert_equal first_bytesize + (d3.to_json + "\n" + d4.to_json + "\n").bytesize, @c.bytesize
224
+ assert_equal 4, @c.size
225
+
226
+ @c.commit
227
+
228
+ assert_equal first_bytesize + (d3.to_json + "\n" + d4.to_json + "\n").bytesize, @c.bytesize
229
+ assert_equal 4, @c.size
230
+ end
231
+
232
+ test 'can #rollback to revert non-committed data' do
233
+ assert @c.empty?
234
+
235
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
236
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
237
+ data = [d1.to_json + "\n", d2.to_json + "\n"]
238
+ @c.append(data)
239
+
240
+ assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
241
+ assert_equal 2, @c.size
242
+
243
+ @c.rollback
244
+
245
+ assert @c.empty?
246
+
247
+ assert_equal '', File.open(@c.path, 'rb'){|f| f.read }
248
+
249
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
250
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
251
+ data = [d1.to_json + "\n", d2.to_json + "\n"]
252
+ @c.append(data)
253
+ @c.commit
254
+
255
+ assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
256
+ assert_equal 2, @c.size
257
+
258
+ first_bytesize = @c.bytesize
259
+
260
+ d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
261
+ d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
262
+ @c.append([d3.to_json + "\n", d4.to_json + "\n"])
263
+
264
+ assert_equal first_bytesize + (d3.to_json + "\n" + d4.to_json + "\n").bytesize, @c.bytesize
265
+ assert_equal 4, @c.size
266
+
267
+ @c.rollback
268
+
269
+ assert_equal first_bytesize, @c.bytesize
270
+ assert_equal 2, @c.size
271
+
272
+ assert_equal (d1.to_json + "\n" + d2.to_json + "\n"), File.open(@c.path, 'rb'){|f| f.read }
273
+ end
274
+
275
+ test 'can #rollback to revert non-committed data from #concat' do
276
+ assert @c.empty?
277
+
278
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
279
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
280
+ data = [d1.to_json + "\n", d2.to_json + "\n"].join
281
+ @c.concat(data, 2)
282
+
283
+ assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
284
+ assert_equal 2, @c.size
285
+
286
+ @c.rollback
287
+
288
+ assert @c.empty?
289
+
290
+ assert_equal '', File.open(@c.path, 'rb'){|f| f.read }
291
+
292
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
293
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
294
+ data = [d1.to_json + "\n", d2.to_json + "\n"]
295
+ @c.append(data)
296
+ @c.commit
297
+
298
+ assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
299
+ assert_equal 2, @c.size
300
+
301
+ first_bytesize = @c.bytesize
302
+
303
+ d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
304
+ d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
305
+ @c.concat([d3.to_json + "\n", d4.to_json + "\n"].join, 2)
306
+
307
+ assert_equal first_bytesize + (d3.to_json + "\n" + d4.to_json + "\n").bytesize, @c.bytesize
308
+ assert_equal 4, @c.size
309
+
310
+ @c.rollback
311
+
312
+ assert_equal first_bytesize, @c.bytesize
313
+ assert_equal 2, @c.size
314
+
315
+ assert_equal (d1.to_json + "\n" + d2.to_json + "\n"), File.open(@c.path, 'rb'){|f| f.read }
316
+ end
317
+
318
+ test 'can store its data by #close' do
319
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
320
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
321
+ data = [d1.to_json + "\n", d2.to_json + "\n"]
322
+ @c.append(data)
323
+ @c.commit
324
+ d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
325
+ d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
326
+ @c.append([d3.to_json + "\n", d4.to_json + "\n"])
327
+ @c.commit
328
+
329
+ content = @c.read
330
+
331
+ unique_id = @c.unique_id
332
+ size = @c.size
333
+ created_at = @c.created_at
334
+ modified_at = @c.modified_at
335
+
336
+ @c.close
337
+
338
+ assert_equal content, File.open(@c.path, 'rb'){|f| f.read }
339
+
340
+ stored_meta = {
341
+ timekey: nil, tag: nil, variables: nil,
342
+ id: unique_id,
343
+ s: size,
344
+ c: created_at.to_i,
345
+ m: modified_at.to_i,
346
+ }
347
+
348
+ assert_equal stored_meta, read_metadata_file(@c.path + '.meta')
349
+ end
350
+
351
+ test 'deletes all data by #purge' do
352
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
353
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
354
+ data = [d1.to_json + "\n", d2.to_json + "\n"]
355
+ @c.append(data)
356
+ @c.commit
357
+ d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
358
+ d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
359
+ @c.append([d3.to_json + "\n", d4.to_json + "\n"])
360
+ @c.commit
361
+
362
+ @c.purge
363
+
364
+ assert @c.empty?
365
+ assert_equal 0, @c.bytesize
366
+ assert_equal 0, @c.size
367
+
368
+ assert !File.exist?(@c.path)
369
+ assert !File.exist?(@c.path + '.meta')
370
+ end
371
+
372
+ test 'can #open its contents as io' do
373
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
374
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
375
+ data = [d1.to_json + "\n", d2.to_json + "\n"]
376
+ @c.append(data)
377
+ @c.commit
378
+ d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
379
+ d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
380
+ @c.append([d3.to_json + "\n", d4.to_json + "\n"])
381
+ @c.commit
382
+
383
+ lines = []
384
+ @c.open do |io|
385
+ assert io
386
+ io.readlines.each do |l|
387
+ lines << l
388
+ end
389
+ end
390
+
391
+ assert_equal d1.to_json + "\n", lines[0]
392
+ assert_equal d2.to_json + "\n", lines[1]
393
+ assert_equal d3.to_json + "\n", lines[2]
394
+ assert_equal d4.to_json + "\n", lines[3]
395
+ end
396
+
397
+ test 'can refer system config for file permission' do
398
+ omit "NTFS doesn't support UNIX like permissions" if Fluent.windows?
399
+
400
+ chunk_path = File.join(@chunkdir, 'testperm.*.log')
401
+ Fluent::SystemConfig.overwrite_system_config("file_permission" => "600") do
402
+ c = Fluent::Plugin::Buffer::FileChunk.new(gen_metadata, chunk_path, :create)
403
+ assert{ File.stat(c.path).mode.to_s(8).end_with?('600') }
404
+ assert{ File.stat(c.path + '.meta').mode.to_s(8).end_with?('600') }
405
+ end
406
+ end
407
+
408
+ test '#write_metadata tries to store metadata on file' do
409
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
410
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
411
+ data = [d1.to_json + "\n", d2.to_json + "\n"]
412
+ @c.append(data)
413
+ @c.commit
414
+
415
+ expected = {
416
+ timekey: nil, tag: nil, variables: nil,
417
+ id: @c.unique_id,
418
+ s: @c.size,
419
+ c: @c.created_at.to_i,
420
+ m: @c.modified_at.to_i,
421
+ }
422
+ assert_equal expected, read_metadata_file(@c.path + '.meta')
423
+
424
+ d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
425
+ d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
426
+ @c.append([d3.to_json + "\n", d4.to_json + "\n"])
427
+ # append does write_metadata
428
+
429
+ dummy_now = Time.parse('2016-04-07 16:59:59 +0900')
430
+ Timecop.freeze(dummy_now)
431
+ @c.write_metadata
432
+
433
+ expected = {
434
+ timekey: nil, tag: nil, variables: nil,
435
+ id: @c.unique_id,
436
+ s: @c.size,
437
+ c: @c.created_at.to_i,
438
+ m: dummy_now.to_i,
439
+ }
440
+ assert_equal expected, read_metadata_file(@c.path + '.meta')
441
+
442
+ @c.commit
443
+
444
+ expected = {
445
+ timekey: nil, tag: nil, variables: nil,
446
+ id: @c.unique_id,
447
+ s: @c.size,
448
+ c: @c.created_at.to_i,
449
+ m: @c.modified_at.to_i,
450
+ }
451
+ assert_equal expected, read_metadata_file(@c.path + '.meta')
452
+
453
+ content = @c.read
454
+
455
+ unique_id = @c.unique_id
456
+ size = @c.size
457
+ created_at = @c.created_at
458
+ modified_at = @c.modified_at
459
+
460
+ @c.close
461
+
462
+ assert_equal content, File.open(@c.path, 'rb'){|f| f.read }
463
+
464
+ stored_meta = {
465
+ timekey: nil, tag: nil, variables: nil,
466
+ id: unique_id,
467
+ s: size,
468
+ c: created_at.to_i,
469
+ m: modified_at.to_i,
470
+ }
471
+
472
+ assert_equal stored_meta, read_metadata_file(@c.path + '.meta')
473
+ end
474
+ end
475
+
476
+ sub_test_case 'chunk with file for staged chunk' do
477
+ setup do
478
+ @chunk_id = gen_test_chunk_id
479
+ @chunk_path = File.join(@chunkdir, "test_staged.b#{hex_id(@chunk_id)}.log")
480
+ @enqueued_path = File.join(@chunkdir, "test_staged.q#{hex_id(@chunk_id)}.log")
481
+
482
+ @d1 = {"k" => "x", "f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
483
+ @d2 = {"k" => "x", "f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
484
+ @d3 = {"k" => "x", "f1" => 'x', "f2" => 'y', "f3" => 'z'}
485
+ @d4 = {"k" => "x", "f1" => 'a', "f2" => 'b', "f3" => 'c'}
486
+ @d = [@d1,@d2,@d3,@d4].map{|d| d.to_json + "\n" }.join
487
+ File.open(@chunk_path, 'wb') do |f|
488
+ f.write @d
489
+ end
490
+
491
+ @metadata = {
492
+ timekey: nil, tag: 'testing', variables: {k: "x"},
493
+ id: @chunk_id,
494
+ s: 4,
495
+ c: Time.parse('2016-04-07 17:44:00 +0900').to_i,
496
+ m: Time.parse('2016-04-07 17:44:13 +0900').to_i,
497
+ }
498
+ File.open(@chunk_path + '.meta', 'wb') do |f|
499
+ f.write @metadata.to_msgpack
500
+ end
501
+
502
+ @c = Fluent::Plugin::Buffer::FileChunk.new(gen_metadata, @chunk_path, :staged)
503
+ end
504
+
505
+ teardown do
506
+ if @c
507
+ @c.purge rescue nil
508
+ end
509
+ [@chunk_path, @chunk_path + '.meta', @enqueued_path, @enqueued_path + '.meta'].each do |path|
510
+ File.unlink path if File.exist? path
511
+ end
512
+ end
513
+
514
+ test 'can load as staged chunk from file with metadata' do
515
+ assert_equal @chunk_path, @c.path
516
+ assert_equal :staged, @c.state
517
+
518
+ assert_nil @c.metadata.timekey
519
+ assert_equal 'testing', @c.metadata.tag
520
+ assert_equal({k: "x"}, @c.metadata.variables)
521
+
522
+ assert_equal 4, @c.size
523
+ assert_equal Time.parse('2016-04-07 17:44:00 +0900'), @c.created_at
524
+ assert_equal Time.parse('2016-04-07 17:44:13 +0900'), @c.modified_at
525
+
526
+ content = @c.read
527
+ assert_equal @d, content
528
+ end
529
+
530
+ test 'can be enqueued' do
531
+ stage_path = @c.path
532
+ queue_path = @enqueued_path
533
+ assert File.exist?(stage_path)
534
+ assert File.exist?(stage_path + '.meta')
535
+ assert !File.exist?(queue_path)
536
+ assert !File.exist?(queue_path + '.meta')
537
+
538
+ @c.enqueued!
539
+
540
+ assert_equal queue_path, @c.path
541
+
542
+ assert !File.exist?(stage_path)
543
+ assert !File.exist?(stage_path + '.meta')
544
+ assert File.exist?(queue_path)
545
+ assert File.exist?(queue_path + '.meta')
546
+
547
+ assert_nil @c.metadata.timekey
548
+ assert_equal 'testing', @c.metadata.tag
549
+ assert_equal({k: "x"}, @c.metadata.variables)
550
+
551
+ assert_equal 4, @c.size
552
+ assert_equal Time.parse('2016-04-07 17:44:00 +0900'), @c.created_at
553
+ assert_equal Time.parse('2016-04-07 17:44:13 +0900'), @c.modified_at
554
+
555
+ assert_equal @d, File.open(@c.path, 'rb'){|f| f.read }
556
+ assert_equal @metadata, read_metadata_file(@c.path + '.meta')
557
+ end
558
+
559
+ test '#write_metadata tries to store metadata on file with non-committed data' do
560
+ d5 = {"k" => "x", "f1" => 'a', "f2" => 'b', "f3" => 'c'}
561
+ d5s = d5.to_json + "\n"
562
+ @c.append([d5s])
563
+
564
+ metadata = {
565
+ timekey: nil, tag: 'testing', variables: {k: "x"},
566
+ id: @chunk_id,
567
+ s: 4,
568
+ c: Time.parse('2016-04-07 17:44:00 +0900').to_i,
569
+ m: Time.parse('2016-04-07 17:44:13 +0900').to_i,
570
+ }
571
+ assert_equal metadata, read_metadata_file(@c.path + '.meta')
572
+
573
+ @c.write_metadata
574
+
575
+ metadata = {
576
+ timekey: nil, tag: 'testing', variables: {k: "x"},
577
+ id: @chunk_id,
578
+ s: 5,
579
+ c: Time.parse('2016-04-07 17:44:00 +0900').to_i,
580
+ m: Time.parse('2016-04-07 17:44:38 +0900').to_i,
581
+ }
582
+
583
+ dummy_now = Time.parse('2016-04-07 17:44:38 +0900')
584
+ Timecop.freeze(dummy_now)
585
+ @c.write_metadata
586
+
587
+ assert_equal metadata, read_metadata_file(@c.path + '.meta')
588
+ end
589
+
590
+ test '#file_rename can rename chunk files even in windows, and call callback with file size' do
591
+ data = "aaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbccccccccccccccccccccccccccccc"
592
+
593
+ testing_file1 = gen_path('rename1.test')
594
+ testing_file2 = gen_path('rename2.test')
595
+ f = File.open(testing_file1, 'wb', @c.permission)
596
+ f.set_encoding(Encoding::ASCII_8BIT)
597
+ f.sync = true
598
+ f.binmode
599
+ f.write data
600
+ pos = f.pos
601
+
602
+ assert f.binmode?
603
+ assert f.sync
604
+ assert_equal data.bytesize, f.size
605
+
606
+ io = nil
607
+ @c.file_rename(f, testing_file1, testing_file2, ->(new_io){ io = new_io })
608
+ assert io
609
+ if Fluent.windows?
610
+ assert{ f != io }
611
+ else
612
+ assert_equal f, io
613
+ end
614
+ assert_equal Encoding::ASCII_8BIT, io.external_encoding
615
+ assert io.sync
616
+ assert io.binmode?
617
+ assert_equal data.bytesize, io.size
618
+
619
+ assert_equal pos, io.pos
620
+
621
+ assert_equal '', io.read
622
+
623
+ io.rewind
624
+ assert_equal data, io.read
625
+ end
626
+ end
627
+
628
+ sub_test_case 'chunk with file for enqueued chunk' do
629
+ setup do
630
+ @chunk_id = gen_test_chunk_id
631
+ @enqueued_path = File.join(@chunkdir, "test_staged.q#{hex_id(@chunk_id)}.log")
632
+
633
+ @d1 = {"k" => "x", "f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
634
+ @d2 = {"k" => "x", "f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
635
+ @d3 = {"k" => "x", "f1" => 'x', "f2" => 'y', "f3" => 'z'}
636
+ @d4 = {"k" => "x", "f1" => 'a', "f2" => 'b', "f3" => 'c'}
637
+ @d = [@d1,@d2,@d3,@d4].map{|d| d.to_json + "\n" }.join
638
+ File.open(@enqueued_path, 'wb') do |f|
639
+ f.write @d
640
+ end
641
+
642
+ @dummy_timekey = Time.parse('2016-04-07 17:40:00 +0900').to_i
643
+
644
+ @metadata = {
645
+ timekey: @dummy_timekey, tag: 'testing', variables: {k: "x"},
646
+ id: @chunk_id,
647
+ s: 4,
648
+ c: Time.parse('2016-04-07 17:44:00 +0900').to_i,
649
+ m: Time.parse('2016-04-07 17:44:13 +0900').to_i,
650
+ }
651
+ File.open(@enqueued_path + '.meta', 'wb') do |f|
652
+ f.write @metadata.to_msgpack
653
+ end
654
+
655
+ @c = Fluent::Plugin::Buffer::FileChunk.new(gen_metadata, @enqueued_path, :queued)
656
+ end
657
+
658
+ teardown do
659
+ if @c
660
+ @c.purge rescue nil
661
+ end
662
+ [@enqueued_path, @enqueued_path + '.meta'].each do |path|
663
+ File.unlink path if File.exist? path
664
+ end
665
+ end
666
+
667
+ test 'can load as queued chunk (read only) with metadata' do
668
+ assert @c
669
+ assert_equal @chunk_id, @c.unique_id
670
+ assert_equal :queued, @c.state
671
+ assert_equal gen_metadata(timekey: @dummy_timekey, tag: 'testing', variables: {k: "x"}), @c.metadata
672
+ assert_equal Time.at(@metadata[:c]), @c.created_at
673
+ assert_equal Time.at(@metadata[:m]), @c.modified_at
674
+ assert_equal @metadata[:s], @c.size
675
+ assert_equal @d.bytesize, @c.bytesize
676
+ assert_equal @d, @c.read
677
+
678
+ assert_raise "BUG: appending to non-staged chunk, now 'queued'" do
679
+ @c.append(["queued chunk is read only"])
680
+ end
681
+ assert_raise IOError do
682
+ @c.instance_eval{ @chunk }.write "chunk io is opened as read only"
683
+ end
684
+ end
685
+ end
686
+
687
+ sub_test_case 'chunk with queued chunk file of v0.12, without metadata' do
688
+ setup do
689
+ @chunk_id = gen_test_chunk_id
690
+ @chunk_path = File.join(@chunkdir, "test_v12.2016040811.q#{hex_id(@chunk_id)}.log")
691
+
692
+ @d1 = {"k" => "x", "f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
693
+ @d2 = {"k" => "x", "f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
694
+ @d3 = {"k" => "x", "f1" => 'x', "f2" => 'y', "f3" => 'z'}
695
+ @d4 = {"k" => "x", "f1" => 'a', "f2" => 'b', "f3" => 'c'}
696
+ @d = [@d1,@d2,@d3,@d4].map{|d| d.to_json + "\n" }.join
697
+ File.open(@chunk_path, 'wb') do |f|
698
+ f.write @d
699
+ end
700
+
701
+ @c = Fluent::Plugin::Buffer::FileChunk.new(gen_metadata, @chunk_path, :queued)
702
+ end
703
+
704
+ teardown do
705
+ if @c
706
+ @c.purge rescue nil
707
+ end
708
+ File.unlink @chunk_path if File.exist? @chunk_path
709
+ end
710
+
711
+ test 'can load as queued chunk from file without metadata' do
712
+ assert @c
713
+ assert_equal :queued, @c.state
714
+ assert_equal @chunk_id, @c.unique_id
715
+ assert_equal gen_metadata, @c.metadata
716
+ assert_equal @d.bytesize, @c.bytesize
717
+ assert_equal 0, @c.size
718
+ assert_equal @d, @c.read
719
+
720
+ assert_raise "BUG: appending to non-staged chunk, now 'queued'" do
721
+ @c.append(["queued chunk is read only"])
722
+ end
723
+ assert_raise IOError do
724
+ @c.instance_eval{ @chunk }.write "chunk io is opened as read only"
725
+ end
726
+ end
727
+ end
728
+
729
+ sub_test_case 'chunk with staged chunk file of v0.12, without metadata' do
730
+ setup do
731
+ @chunk_id = gen_test_chunk_id
732
+ @chunk_path = File.join(@chunkdir, "test_v12.2016040811.b#{hex_id(@chunk_id)}.log")
733
+
734
+ @d1 = {"k" => "x", "f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
735
+ @d2 = {"k" => "x", "f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
736
+ @d3 = {"k" => "x", "f1" => 'x', "f2" => 'y', "f3" => 'z'}
737
+ @d4 = {"k" => "x", "f1" => 'a', "f2" => 'b', "f3" => 'c'}
738
+ @d = [@d1,@d2,@d3,@d4].map{|d| d.to_json + "\n" }.join
739
+ File.open(@chunk_path, 'wb') do |f|
740
+ f.write @d
741
+ end
742
+
743
+ @c = Fluent::Plugin::Buffer::FileChunk.new(gen_metadata, @chunk_path, :staged)
744
+ end
745
+
746
+ teardown do
747
+ if @c
748
+ @c.purge rescue nil
749
+ end
750
+ File.unlink @chunk_path if File.exist? @chunk_path
751
+ end
752
+
753
+ test 'can load as queued chunk from file without metadata even if it was loaded as staged chunk' do
754
+ assert @c
755
+ assert_equal :queued, @c.state
756
+ assert_equal @chunk_id, @c.unique_id
757
+ assert_equal gen_metadata, @c.metadata
758
+ assert_equal @d.bytesize, @c.bytesize
759
+ assert_equal 0, @c.size
760
+ assert_equal @d, @c.read
761
+
762
+ assert_raise "BUG: appending to non-staged chunk, now 'queued'" do
763
+ @c.append(["queued chunk is read only"])
764
+ end
765
+ assert_raise IOError do
766
+ @c.instance_eval{ @chunk }.write "chunk io is opened as read only"
767
+ end
768
+ end
769
+ end
770
+ end