fluentd 1.6.3 → 1.7.0.rc1

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of fluentd might be problematic. Click here for more details.

Files changed (78) hide show
  1. checksums.yaml +4 -4
  2. data/.drone.yml +35 -0
  3. data/.github/ISSUE_TEMPLATE/bug_report.md +2 -0
  4. data/README.md +5 -1
  5. data/fluentd.gemspec +1 -1
  6. data/lib/fluent/clock.rb +4 -0
  7. data/lib/fluent/compat/output.rb +3 -3
  8. data/lib/fluent/compat/socket_util.rb +1 -1
  9. data/lib/fluent/config/element.rb +3 -3
  10. data/lib/fluent/config/literal_parser.rb +1 -1
  11. data/lib/fluent/config/section.rb +4 -1
  12. data/lib/fluent/error.rb +4 -0
  13. data/lib/fluent/event.rb +28 -24
  14. data/lib/fluent/event_router.rb +2 -1
  15. data/lib/fluent/log.rb +1 -1
  16. data/lib/fluent/msgpack_factory.rb +8 -0
  17. data/lib/fluent/plugin/bare_output.rb +4 -4
  18. data/lib/fluent/plugin/buf_file_single.rb +211 -0
  19. data/lib/fluent/plugin/buffer.rb +62 -63
  20. data/lib/fluent/plugin/buffer/chunk.rb +21 -3
  21. data/lib/fluent/plugin/buffer/file_chunk.rb +37 -12
  22. data/lib/fluent/plugin/buffer/file_single_chunk.rb +314 -0
  23. data/lib/fluent/plugin/buffer/memory_chunk.rb +2 -1
  24. data/lib/fluent/plugin/compressable.rb +10 -6
  25. data/lib/fluent/plugin/filter_grep.rb +2 -2
  26. data/lib/fluent/plugin/formatter_csv.rb +10 -6
  27. data/lib/fluent/plugin/in_syslog.rb +10 -3
  28. data/lib/fluent/plugin/in_tail.rb +7 -2
  29. data/lib/fluent/plugin/in_tcp.rb +34 -7
  30. data/lib/fluent/plugin/multi_output.rb +4 -4
  31. data/lib/fluent/plugin/out_exec_filter.rb +1 -0
  32. data/lib/fluent/plugin/out_file.rb +13 -3
  33. data/lib/fluent/plugin/out_forward.rb +126 -588
  34. data/lib/fluent/plugin/out_forward/ack_handler.rb +161 -0
  35. data/lib/fluent/plugin/out_forward/connection_manager.rb +113 -0
  36. data/lib/fluent/plugin/out_forward/error.rb +28 -0
  37. data/lib/fluent/plugin/out_forward/failure_detector.rb +84 -0
  38. data/lib/fluent/plugin/out_forward/handshake_protocol.rb +121 -0
  39. data/lib/fluent/plugin/out_forward/load_balancer.rb +111 -0
  40. data/lib/fluent/plugin/out_forward/socket_cache.rb +138 -0
  41. data/lib/fluent/plugin/out_http.rb +231 -0
  42. data/lib/fluent/plugin/output.rb +29 -35
  43. data/lib/fluent/plugin/parser.rb +77 -0
  44. data/lib/fluent/plugin/parser_csv.rb +75 -0
  45. data/lib/fluent/plugin_helper/server.rb +1 -1
  46. data/lib/fluent/plugin_helper/thread.rb +1 -0
  47. data/lib/fluent/root_agent.rb +1 -1
  48. data/lib/fluent/time.rb +4 -2
  49. data/lib/fluent/timezone.rb +21 -7
  50. data/lib/fluent/version.rb +1 -1
  51. data/test/command/test_fluentd.rb +1 -1
  52. data/test/command/test_plugin_generator.rb +18 -2
  53. data/test/config/test_configurable.rb +78 -40
  54. data/test/counter/test_store.rb +1 -1
  55. data/test/helper.rb +1 -0
  56. data/test/helpers/process_extenstion.rb +33 -0
  57. data/test/plugin/out_forward/test_ack_handler.rb +101 -0
  58. data/test/plugin/out_forward/test_connection_manager.rb +145 -0
  59. data/test/plugin/out_forward/test_handshake_protocol.rb +103 -0
  60. data/test/plugin/out_forward/test_load_balancer.rb +60 -0
  61. data/test/plugin/out_forward/test_socket_cache.rb +139 -0
  62. data/test/plugin/test_buf_file.rb +118 -2
  63. data/test/plugin/test_buf_file_single.rb +734 -0
  64. data/test/plugin/test_buffer.rb +4 -48
  65. data/test/plugin/test_buffer_file_chunk.rb +19 -1
  66. data/test/plugin/test_buffer_file_single_chunk.rb +620 -0
  67. data/test/plugin/test_formatter_csv.rb +16 -0
  68. data/test/plugin/test_in_syslog.rb +56 -6
  69. data/test/plugin/test_in_tail.rb +1 -1
  70. data/test/plugin/test_in_tcp.rb +25 -0
  71. data/test/plugin/test_out_forward.rb +75 -201
  72. data/test/plugin/test_out_http.rb +352 -0
  73. data/test/plugin/test_output_as_buffered.rb +27 -24
  74. data/test/plugin/test_parser.rb +40 -0
  75. data/test/plugin/test_parser_csv.rb +83 -0
  76. data/test/plugin_helper/test_record_accessor.rb +1 -1
  77. data/test/test_time_formatter.rb +140 -121
  78. metadata +35 -6
@@ -185,7 +185,6 @@ class BufferTest < Test::Unit::TestCase
185
185
  assert_equal([], plugin.queue)
186
186
  assert_equal({}, plugin.dequeued)
187
187
  assert_equal({}, plugin.queued_num)
188
- assert_equal([], plugin.metadata_list)
189
188
 
190
189
  assert_equal 0, plugin.stage_size
191
190
  assert_equal 0, plugin.queue_size
@@ -207,7 +206,6 @@ class BufferTest < Test::Unit::TestCase
207
206
  assert_equal 203, @p.queue_size
208
207
 
209
208
  # staged, queued
210
- assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
211
209
  assert_equal 1, @p.queued_num[@dm0]
212
210
  assert_equal 2, @p.queued_num[@dm1]
213
211
  end
@@ -240,46 +238,11 @@ class BufferTest < Test::Unit::TestCase
240
238
  assert_nil @p.queue
241
239
  assert_nil @p.dequeued
242
240
  assert_nil @p.queued_num
243
- assert_nil @p.instance_eval{ @metadata_list } # #metadata_list does #dup for @metadata_list
244
241
  assert_equal 0, @p.stage_size
245
242
  assert_equal 0, @p.queue_size
246
243
  assert_equal [], @p.timekeys
247
244
  end
248
245
 
249
- test '#metadata_list returns list of metadata on stage or in queue' do
250
- assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
251
- end
252
-
253
- test '#new_metadata creates metadata instance without inserting metadata_list' do
254
- assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
255
- _m = @p.new_metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
256
- assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
257
- end
258
-
259
- test '#add_metadata adds unknown metadata into list, or return known metadata if already exists' do
260
- assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
261
-
262
- m = @p.new_metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
263
- _mx = @p.add_metadata(m)
264
- assert_equal [@dm2,@dm3,@dm0,@dm1,m], @p.metadata_list
265
- assert_equal m.object_id, m.object_id
266
-
267
- my = @p.add_metadata(@dm1)
268
- assert_equal [@dm2,@dm3,@dm0,@dm1,m], @p.metadata_list
269
- assert_equal @dm1, my
270
- assert{ @dm1.object_id != my.object_id } # 'my' is an object created in #resume
271
- end
272
-
273
- test '#metadata is utility method to create-add-and-return metadata' do
274
- assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
275
-
276
- m1 = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
277
- assert_equal [@dm2,@dm3,@dm0,@dm1,m1], @p.metadata_list
278
- m2 = @p.metadata(timekey: @dm3.timekey)
279
- assert_equal [@dm2,@dm3,@dm0,@dm1,m1], @p.metadata_list
280
- assert_equal @dm3, m2
281
- end
282
-
283
246
  test '#queued_records returns total number of size in all chunks in queue' do
284
247
  assert_equal 3, @p.queue.size
285
248
 
@@ -430,7 +393,6 @@ class BufferTest < Test::Unit::TestCase
430
393
  test '#purge_chunk removes a chunk specified by argument id from dequeued chunks' do
431
394
  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
432
395
  assert_equal({}, @p.dequeued)
433
- assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
434
396
 
435
397
  m0 = @p.dequeue_chunk
436
398
  m1 = @p.dequeue_chunk
@@ -447,13 +409,11 @@ class BufferTest < Test::Unit::TestCase
447
409
 
448
410
  assert_equal [@dm0,@dm1], @p.queue.map(&:metadata)
449
411
  assert_equal({}, @p.dequeued)
450
- assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
451
412
  end
452
413
 
453
- test '#purge_chunk removes an argument metadata from metadata_list if no chunks exist on stage or in queue' do
414
+ test '#purge_chunk removes an argument metadata if no chunks exist on stage or in queue' do
454
415
  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
455
416
  assert_equal({}, @p.dequeued)
456
- assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
457
417
 
458
418
  m0 = @p.dequeue_chunk
459
419
 
@@ -467,13 +427,11 @@ class BufferTest < Test::Unit::TestCase
467
427
 
468
428
  assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
469
429
  assert_equal({}, @p.dequeued)
470
- assert_equal [@dm2,@dm3,@dm1], @p.metadata_list
471
430
  end
472
431
 
473
432
  test '#takeback_chunk returns false if specified chunk_id is already purged' do
474
433
  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
475
434
  assert_equal({}, @p.dequeued)
476
- assert_equal [@dm2,@dm3,@dm0,@dm1], @p.metadata_list
477
435
 
478
436
  m0 = @p.dequeue_chunk
479
437
 
@@ -487,13 +445,11 @@ class BufferTest < Test::Unit::TestCase
487
445
 
488
446
  assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
489
447
  assert_equal({}, @p.dequeued)
490
- assert_equal [@dm2,@dm3,@dm1], @p.metadata_list
491
448
 
492
449
  assert !@p.takeback_chunk(m0.unique_id)
493
450
 
494
451
  assert_equal [@dm1,@dm1], @p.queue.map(&:metadata)
495
452
  assert_equal({}, @p.dequeued)
496
- assert_equal [@dm2,@dm3,@dm1], @p.metadata_list
497
453
  end
498
454
 
499
455
  test '#clear_queue! removes all chunks in queue, but leaves staged chunks' do
@@ -575,7 +531,7 @@ class BufferTest < Test::Unit::TestCase
575
531
  assert !@p.timekeys.include?(timekey)
576
532
 
577
533
  prev_stage_size = @p.stage_size
578
-
534
+
579
535
  m = @p.metadata(timekey: timekey)
580
536
 
581
537
  @p.write({m => ["x" * 256, "y" * 256, "z" * 256]})
@@ -695,7 +651,7 @@ class BufferTest < Test::Unit::TestCase
695
651
 
696
652
  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
697
653
  assert_equal [@dm2,@dm3], @p.stage.keys
698
-
654
+
699
655
  timekey = Time.parse('2016-04-11 16:40:00 +0000').to_i
700
656
  assert !@p.timekeys.include?(timekey)
701
657
 
@@ -718,7 +674,7 @@ class BufferTest < Test::Unit::TestCase
718
674
  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
719
675
  assert_equal [@dm2,@dm3,m], @p.stage.keys
720
676
  assert_equal 1, @p.stage[m].append_count
721
-
677
+
722
678
  assert @p.timekeys.include?(timekey)
723
679
  end
724
680
 
@@ -27,7 +27,25 @@ class BufferFileChunkTest < Test::Unit::TestCase
27
27
  end
28
28
 
29
29
  def read_metadata_file(path)
30
- File.open(path, 'rb'){|f| MessagePack.unpack(f.read, symbolize_keys: true) }
30
+ File.open(path, 'rb') do |f|
31
+ chunk = f.read
32
+ if chunk.size <= 6 # size of BUFFER_HEADER (2) + size of data(4)
33
+ return nil
34
+ end
35
+
36
+ data = nil
37
+ if chunk.slice(0, 2) == Fluent::Plugin::Buffer::FileChunk::BUFFER_HEADER
38
+ size = chunk.slice(2, 4).unpack('N').first
39
+ if size
40
+ data = MessagePack.unpack(chunk.slice(6, size), symbolize_keys: true)
41
+ end
42
+ else
43
+ # old type
44
+ data = MessagePack.unpack(chunk, symbolize_keys: true)
45
+ end
46
+
47
+ data
48
+ end
31
49
  end
32
50
 
33
51
  def gen_path(path)
@@ -0,0 +1,620 @@
1
+ require_relative '../helper'
2
+ require 'fluent/plugin/buffer/file_single_chunk'
3
+ require 'fluent/plugin/compressable'
4
+ require 'fluent/unique_id'
5
+
6
+ require 'fileutils'
7
+ require 'msgpack'
8
+ require 'time'
9
+
10
+ class BufferFileSingleChunkTest < Test::Unit::TestCase
11
+ include Fluent::Plugin::Compressable
12
+
13
+ setup do
14
+ @klass = Fluent::Plugin::Buffer::FileSingleChunk
15
+ @chunkdir = File.expand_path('../../tmp/buffer_file_single_chunk', __FILE__)
16
+ FileUtils.rm_r(@chunkdir) rescue nil
17
+ FileUtils.mkdir_p(@chunkdir)
18
+ end
19
+
20
+ Metadata = Struct.new(:timekey, :tag, :variables)
21
+ def gen_metadata(timekey: nil, tag: 'testing', variables: nil)
22
+ Metadata.new(timekey, tag, variables)
23
+ end
24
+
25
+ def gen_path(path)
26
+ File.join(@chunkdir, path)
27
+ end
28
+
29
+ def gen_test_chunk_id
30
+ now = Time.parse('2016-04-07 14:31:33 +0900')
31
+ u1 = ((now.to_i * 1000 * 1000 + now.usec) << 12 | 1725) # 1725 is one of `rand(0xfff)`
32
+ u3 = 2979763054 # one of rand(0xffffffff)
33
+ u4 = 438020492 # ditto
34
+ [u1 >> 32, u1 & 0xffffffff, u3, u4].pack('NNNN')
35
+ # unique_id.unpack('N*').map{|n| n.to_s(16)}.join => "52fde6425d7406bdb19b936e1a1ba98c"
36
+ end
37
+
38
+ def hex_id(id)
39
+ id.unpack('N*').map { |n| n.to_s(16) }.join
40
+ end
41
+
42
+ sub_test_case 'classmethods' do
43
+ data(
44
+ correct_staged: ['/mydir/mypath/fsb.b00ff.buf', :staged],
45
+ correct_queued: ['/mydir/mypath/fsb.q00ff.buf', :queued],
46
+ incorrect_staged: ['/mydir/mypath/fsb.b00ff.buf/unknown', :unknown],
47
+ incorrect_queued: ['/mydir/mypath/fsb.q00ff.buf/unknown', :unknown],
48
+ output_file: ['/mydir/mypath/fsb.20160716.buf', :unknown],
49
+ )
50
+ test 'can .assume_chunk_state' do |data|
51
+ path, expected = data
52
+ assert_equal expected, @klass.assume_chunk_state(path)
53
+ end
54
+
55
+ test '.generate_stage_chunk_path generates path with staged mark & chunk unique_id' do
56
+ assert_equal gen_path("fsb.foo.b52fde6425d7406bdb19b936e1a1ba98c.buf"), @klass.generate_stage_chunk_path(gen_path("fsb.*.buf"), 'foo', gen_test_chunk_id)
57
+ assert_raise RuntimeError.new("BUG: buffer chunk path on stage MUST have '.*.'") do
58
+ @klass.generate_stage_chunk_path(gen_path("fsb.buf"), 'foo', gen_test_chunk_id)
59
+ end
60
+ assert_raise RuntimeError.new("BUG: buffer chunk path on stage MUST have '.*.'") do
61
+ @klass.generate_stage_chunk_path(gen_path("fsb.*"), 'foo', gen_test_chunk_id)
62
+ end
63
+ assert_raise RuntimeError.new("BUG: buffer chunk path on stage MUST have '.*.'") do
64
+ @klass.generate_stage_chunk_path(gen_path("*.buf"), 'foo', gen_test_chunk_id)
65
+ end
66
+ end
67
+
68
+ test '.generate_queued_chunk_path generates path with enqueued mark for staged chunk path' do
69
+ assert_equal(
70
+ gen_path("fsb.q52fde6425d7406bdb19b936e1a1ba98c.buf"),
71
+ @klass.generate_queued_chunk_path(gen_path("fsb.b52fde6425d7406bdb19b936e1a1ba98c.buf"), gen_test_chunk_id)
72
+ )
73
+ end
74
+
75
+ test '.generate_queued_chunk_path generates special path with chunk unique_id for non staged chunk path' do
76
+ assert_equal(
77
+ gen_path("fsb.buf.q52fde6425d7406bdb19b936e1a1ba98c.chunk"),
78
+ @klass.generate_queued_chunk_path(gen_path("fsb.buf"), gen_test_chunk_id)
79
+ )
80
+ assert_equal(
81
+ gen_path("fsb.q55555555555555555555555555555555.buf.q52fde6425d7406bdb19b936e1a1ba98c.chunk"),
82
+ @klass.generate_queued_chunk_path(gen_path("fsb.q55555555555555555555555555555555.buf"), gen_test_chunk_id)
83
+ )
84
+ end
85
+
86
+ data('1 word tag' => 'foo',
87
+ '2 words tag' => 'test.log',
88
+ 'empty' => '')
89
+ test '.unique_id_and_key_from_path recreates unique_id and key from file path' do |key|
90
+ path = @klass.unique_id_and_key_from_path(gen_path("fsb.#{key}.q52fde6425d7406bdb19b936e1a1ba98c.buf"))
91
+ assert_equal [gen_test_chunk_id, key], path
92
+ end
93
+ end
94
+
95
+ sub_test_case 'newly created chunk' do
96
+ setup do
97
+ @path_conf = File.join(@chunkdir, 'fsb.*.buf')
98
+ @chunk_path = File.join(@chunkdir, "fsb.testing.b#{hex_id(gen_test_chunk_id)}.buf")
99
+ @c = Fluent::Plugin::Buffer::FileSingleChunk.new(gen_metadata, @path_conf, :create, nil)
100
+ end
101
+
102
+ def gen_chunk_path(prefix, unique_id)
103
+ File.join(@chunkdir, "fsb.testing.#{prefix}#{Fluent::UniqueId.hex(unique_id)}.buf")
104
+ end
105
+
106
+ teardown do
107
+ if @c
108
+ @c.purge rescue nil
109
+ end
110
+ if File.exist?(@chunk_path)
111
+ File.unlink(@chunk_path)
112
+ end
113
+ end
114
+
115
+ test 'creates new files for chunk and metadata with specified path & permission' do
116
+ assert_equal 16, @c.unique_id.size
117
+ assert_equal gen_chunk_path('b', @c.unique_id), @c.path
118
+
119
+ assert File.exist?(gen_chunk_path('b', @c.unique_id))
120
+ assert { File.stat(gen_chunk_path('b', @c.unique_id)).mode.to_s(8).end_with?(@klass.const_get('FILE_PERMISSION').to_s(8)) }
121
+
122
+ assert_equal :unstaged, @c.state
123
+ assert @c.empty?
124
+ end
125
+
126
+ test 'can #append, #commit and #read it' do
127
+ assert @c.empty?
128
+
129
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
130
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
131
+ data = [d1.to_json + "\n", d2.to_json + "\n"]
132
+ @c.append(data)
133
+ @c.commit
134
+
135
+ ds = @c.read.split("\n").select { |d| !d.empty? }
136
+ assert_equal 2, ds.size
137
+ assert_equal d1, JSON.parse(ds[0])
138
+ assert_equal d2, JSON.parse(ds[1])
139
+
140
+ d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
141
+ d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
142
+ @c.append([d3.to_json + "\n", d4.to_json + "\n"])
143
+ @c.commit
144
+
145
+ ds = @c.read.split("\n").select{|d| !d.empty? }
146
+ assert_equal 4, ds.size
147
+ assert_equal d1, JSON.parse(ds[0])
148
+ assert_equal d2, JSON.parse(ds[1])
149
+ assert_equal d3, JSON.parse(ds[2])
150
+ assert_equal d4, JSON.parse(ds[3])
151
+ end
152
+
153
+ test 'can #concat, #commit and #read it' do
154
+ assert @c.empty?
155
+
156
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
157
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
158
+ data = [d1.to_json + "\n", d2.to_json + "\n"].join
159
+ @c.concat(data, 2)
160
+ @c.commit
161
+
162
+ ds = @c.read.split("\n").select{|d| !d.empty? }
163
+ assert_equal 2, ds.size
164
+ assert_equal d1, JSON.parse(ds[0])
165
+ assert_equal d2, JSON.parse(ds[1])
166
+
167
+ d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
168
+ d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
169
+ @c.concat([d3.to_json + "\n", d4.to_json + "\n"].join, 2)
170
+ @c.commit
171
+
172
+ ds = @c.read.split("\n").select { |d| !d.empty? }
173
+ assert_equal 4, ds.size
174
+ assert_equal d1, JSON.parse(ds[0])
175
+ assert_equal d2, JSON.parse(ds[1])
176
+ assert_equal d3, JSON.parse(ds[2])
177
+ assert_equal d4, JSON.parse(ds[3])
178
+ end
179
+
180
+ test 'has its contents in binary (ascii-8bit)' do
181
+ data1 = "aaa bbb ccc".force_encoding('utf-8')
182
+ @c.append([data1])
183
+ @c.commit
184
+ assert_equal Encoding::ASCII_8BIT, @c.instance_eval{ @chunk.external_encoding }
185
+ assert_equal Encoding::ASCII_8BIT, @c.read.encoding
186
+ end
187
+
188
+ test 'has #bytesize and #size' do
189
+ assert @c.empty?
190
+
191
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
192
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
193
+ data = [d1.to_json + "\n", d2.to_json + "\n"]
194
+ @c.append(data)
195
+
196
+ assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
197
+ assert_equal 2, @c.size
198
+
199
+ @c.commit
200
+
201
+ assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
202
+ assert_equal 2, @c.size
203
+
204
+ first_bytesize = @c.bytesize
205
+ d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
206
+ d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
207
+ @c.append([d3.to_json + "\n", d4.to_json + "\n"])
208
+
209
+ assert_equal first_bytesize + (d3.to_json + "\n" + d4.to_json + "\n").bytesize, @c.bytesize
210
+ assert_equal 4, @c.size
211
+
212
+ @c.commit
213
+
214
+ assert_equal first_bytesize + (d3.to_json + "\n" + d4.to_json + "\n").bytesize, @c.bytesize
215
+ assert_equal 4, @c.size
216
+ end
217
+
218
+ test 'can #rollback to revert non-committed data' do
219
+ assert @c.empty?
220
+
221
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
222
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
223
+ data = [d1.to_json + "\n", d2.to_json + "\n"]
224
+ @c.append(data)
225
+
226
+ assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
227
+ assert_equal 2, @c.size
228
+
229
+ @c.rollback
230
+
231
+ assert @c.empty?
232
+ assert_equal '', File.read(@c.path)
233
+
234
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
235
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
236
+ data = [d1.to_json + "\n", d2.to_json + "\n"]
237
+ @c.append(data)
238
+ @c.commit
239
+
240
+ assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
241
+ assert_equal 2, @c.size
242
+
243
+ first_bytesize = @c.bytesize
244
+ d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
245
+ d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
246
+ @c.append([d3.to_json + "\n", d4.to_json + "\n"])
247
+
248
+ assert_equal first_bytesize + (d3.to_json + "\n" + d4.to_json + "\n").bytesize, @c.bytesize
249
+ assert_equal 4, @c.size
250
+
251
+ @c.rollback
252
+
253
+ assert_equal first_bytesize, @c.bytesize
254
+ assert_equal 2, @c.size
255
+ assert_equal (d1.to_json + "\n" + d2.to_json + "\n"), File.read(@c.path)
256
+ end
257
+
258
+ test 'can #rollback to revert non-committed data from #concat' do
259
+ assert @c.empty?
260
+
261
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
262
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
263
+ data = [d1.to_json + "\n", d2.to_json + "\n"].join
264
+ @c.concat(data, 2)
265
+
266
+ assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
267
+ assert_equal 2, @c.size
268
+
269
+ @c.rollback
270
+
271
+ assert @c.empty?
272
+ assert_equal '', File.read(@c.path)
273
+
274
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
275
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
276
+ data = [d1.to_json + "\n", d2.to_json + "\n"]
277
+ @c.append(data)
278
+ @c.commit
279
+
280
+ assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
281
+ assert_equal 2, @c.size
282
+
283
+ first_bytesize = @c.bytesize
284
+ d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
285
+ d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
286
+ @c.concat([d3.to_json + "\n", d4.to_json + "\n"].join, 2)
287
+
288
+ assert_equal first_bytesize + (d3.to_json + "\n" + d4.to_json + "\n").bytesize, @c.bytesize
289
+ assert_equal 4, @c.size
290
+
291
+ @c.rollback
292
+
293
+ assert_equal first_bytesize, @c.bytesize
294
+ assert_equal 2, @c.size
295
+ assert_equal (d1.to_json + "\n" + d2.to_json + "\n"), File.read(@c.path)
296
+ end
297
+
298
+ test 'can store its data by #close' do
299
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
300
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
301
+ data = [d1.to_json + "\n", d2.to_json + "\n"]
302
+ @c.append(data)
303
+ @c.commit
304
+ d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
305
+ d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
306
+ @c.append([d3.to_json + "\n", d4.to_json + "\n"])
307
+ @c.commit
308
+ content = @c.read
309
+ @c.close
310
+
311
+ assert_equal content, File.read(@c.path)
312
+ end
313
+
314
+ test 'deletes all data by #purge' do
315
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
316
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
317
+ data = [d1.to_json + "\n", d2.to_json + "\n"]
318
+ @c.append(data)
319
+ @c.commit
320
+ d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
321
+ d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
322
+ @c.append([d3.to_json + "\n", d4.to_json + "\n"])
323
+ @c.commit
324
+ @c.purge
325
+
326
+ assert @c.empty?
327
+ assert_equal 0, @c.bytesize
328
+ assert_equal 0, @c.size
329
+ assert !File.exist?(@c.path)
330
+ end
331
+
332
+ test 'can #open its contents as io' do
333
+ d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
334
+ d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
335
+ data = [d1.to_json + "\n", d2.to_json + "\n"]
336
+ @c.append(data)
337
+ @c.commit
338
+ d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
339
+ d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
340
+ @c.append([d3.to_json + "\n", d4.to_json + "\n"])
341
+ @c.commit
342
+
343
+ lines = []
344
+ @c.open do |io|
345
+ assert io
346
+ io.readlines.each do |l|
347
+ lines << l
348
+ end
349
+ end
350
+
351
+ assert_equal d1.to_json + "\n", lines[0]
352
+ assert_equal d2.to_json + "\n", lines[1]
353
+ assert_equal d3.to_json + "\n", lines[2]
354
+ assert_equal d4.to_json + "\n", lines[3]
355
+ end
356
+
357
+ test 'can refer system config for file permission' do
358
+ omit "NTFS doesn't support UNIX like permissions" if Fluent.windows?
359
+
360
+ chunk_path = File.join(@chunkdir, 'fsb.*.buf')
361
+ Fluent::SystemConfig.overwrite_system_config("file_permission" => "600") do
362
+ c = Fluent::Plugin::Buffer::FileSingleChunk.new(gen_metadata, chunk_path, :create, nil)
363
+ assert{ File.stat(c.path).mode.to_s(8).end_with?('600') }
364
+ end
365
+ end
366
+ end
367
+
368
+ sub_test_case 'chunk with file for staged chunk' do
369
+ setup do
370
+ @chunk_id = gen_test_chunk_id
371
+ @staged_path = File.join(@chunkdir, "fsb.testing.b#{hex_id(@chunk_id)}.buf")
372
+ @enqueued_path = File.join(@chunkdir, "fsb.testing.q#{hex_id(@chunk_id)}.buf")
373
+
374
+ @d1 = {"k" => "x", "f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
375
+ @d2 = {"k" => "x", "f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
376
+ @d3 = {"k" => "x", "f1" => 'x', "f2" => 'y', "f3" => 'z'}
377
+ @d4 = {"k" => "x", "f1" => 'a', "f2" => 'b', "f3" => 'c'}
378
+ @d = [@d1, @d2, @d3, @d4].map{ |d| d.to_json + "\n" }.join
379
+ File.write(@staged_path, @d, :mode => 'wb')
380
+
381
+ @c = Fluent::Plugin::Buffer::FileSingleChunk.new(gen_metadata, @staged_path, :staged, nil)
382
+ end
383
+
384
+ teardown do
385
+ if @c
386
+ @c.purge rescue nil
387
+ end
388
+ [@staged_path, @enqueued_path].each do |path|
389
+ File.unlink(path) if File.exist?(path)
390
+ end
391
+ end
392
+
393
+ test 'can load as staged chunk from file with metadata' do
394
+ assert_equal @staged_path, @c.path
395
+ assert_equal :staged, @c.state
396
+
397
+ assert_nil @c.metadata.timekey
398
+ assert_equal 'testing', @c.metadata.tag
399
+ assert_nil @c.metadata.variables
400
+ assert_equal 0, @c.size
401
+ assert_equal @d, @c.read
402
+
403
+ @c.restore_size(:text)
404
+ assert_equal 4, @c.size
405
+ end
406
+
407
+ test 'can be enqueued' do
408
+ stage_path = @c.path
409
+ queue_path = @enqueued_path
410
+ assert File.exist?(stage_path)
411
+ assert !File.exist?(queue_path)
412
+
413
+ @c.enqueued!
414
+
415
+ assert_equal queue_path, @c.path
416
+ assert !File.exist?(stage_path)
417
+ assert File.exist?(queue_path)
418
+
419
+ assert_nil @c.metadata.timekey
420
+ assert_equal 'testing', @c.metadata.tag
421
+ assert_nil @c.metadata.variables
422
+
423
+ assert_equal 0, @c.size
424
+ assert_equal @d, File.read(@c.path)
425
+
426
+ @c.restore_size(:text)
427
+ assert_equal 4, @c.size
428
+ end
429
+
430
+ test '#file_rename can rename chunk files even in windows, and call callback with file size' do
431
+ data = "aaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbccccccccccccccccccccccccccccc"
432
+
433
+ testing_file1 = gen_path('rename1.test')
434
+ testing_file2 = gen_path('rename2.test')
435
+ f = File.open(testing_file1, 'wb', @c.permission)
436
+ f.set_encoding(Encoding::ASCII_8BIT)
437
+ f.sync = true
438
+ f.binmode
439
+ f.write data
440
+ pos = f.pos
441
+
442
+ assert f.binmode?
443
+ assert f.sync
444
+ assert_equal data.bytesize, f.size
445
+
446
+ io = nil
447
+ @c.file_rename(f, testing_file1, testing_file2, ->(new_io){ io = new_io })
448
+ assert io
449
+ if Fluent.windows?
450
+ assert { f != io }
451
+ else
452
+ assert_equal f, io
453
+ end
454
+ assert_equal Encoding::ASCII_8BIT, io.external_encoding
455
+ assert io.sync
456
+ assert io.binmode?
457
+ assert_equal data.bytesize, io.size
458
+ assert_equal pos, io.pos
459
+ assert_equal '', io.read
460
+
461
+ io.rewind
462
+ assert_equal data, io.read
463
+ end
464
+ end
465
+
466
+ sub_test_case 'chunk with file for enqueued chunk' do
467
+ setup do
468
+ @chunk_id = gen_test_chunk_id
469
+ @enqueued_path = File.join(@chunkdir, "fsb.testing.q#{hex_id(@chunk_id)}.buf")
470
+
471
+ @d1 = {"k" => "x", "f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
472
+ @d2 = {"k" => "x", "f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
473
+ @d3 = {"k" => "x", "f1" => 'x', "f2" => 'y', "f3" => 'z'}
474
+ @d4 = {"k" => "x", "f1" => 'a', "f2" => 'b', "f3" => 'c'}
475
+ @d = [@d1, @d2, @d3, @d4].map { |d| d.to_json + "\n" }.join
476
+ File.write(@enqueued_path, @d, :mode => 'wb')
477
+
478
+ @c = Fluent::Plugin::Buffer::FileSingleChunk.new(gen_metadata, @enqueued_path, :queued, nil)
479
+ end
480
+
481
+ teardown do
482
+ if @c
483
+ @c.purge rescue nil
484
+ end
485
+ File.unlink(@enqueued_path) if File.exist?(@enqueued_path)
486
+ end
487
+
488
+ test 'can load as queued chunk (read only) with metadata' do
489
+ assert @c
490
+ assert_equal @chunk_id, @c.unique_id
491
+ assert_equal :queued, @c.state
492
+ stat = File.stat(@enqueued_path)
493
+ assert_equal stat.ctime.to_i, @c.created_at.to_i
494
+ assert_equal stat.mtime.to_i, @c.modified_at.to_i
495
+ assert_equal 0, @c.size
496
+ assert_equal @d.bytesize, @c.bytesize
497
+ assert_equal @d, @c.read
498
+
499
+ @c.restore_size(:text)
500
+ assert_equal 4, @c.size
501
+
502
+ assert_raise RuntimeError.new("BUG: concatenating to unwritable chunk, now 'queued'") do
503
+ @c.append(["queued chunk is read only"])
504
+ end
505
+ assert_raise IOError do
506
+ @c.instance_eval{ @chunk }.write "chunk io is opened as read only"
507
+ end
508
+ end
509
+ end
510
+
511
+ sub_test_case 'chunk with queued chunk file' do
512
+ setup do
513
+ @chunk_id = gen_test_chunk_id
514
+ @chunk_path = File.join(@chunkdir, "fsb.testing.q#{hex_id(@chunk_id)}.buf")
515
+
516
+ @d1 = {"k" => "x", "f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
517
+ @d2 = {"k" => "x", "f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
518
+ @d3 = {"k" => "x", "f1" => 'x', "f2" => 'y', "f3" => 'z'}
519
+ @d4 = {"k" => "x", "f1" => 'a', "f2" => 'b', "f3" => 'c'}
520
+ @d = [@d1, @d2, @d3, @d4].map { |d| d.to_json + "\n" }.join
521
+ File.write(@chunk_path, @d, :mode => 'wb')
522
+
523
+ @c = Fluent::Plugin::Buffer::FileSingleChunk.new(gen_metadata, @chunk_path, :queued, nil)
524
+ end
525
+
526
+ teardown do
527
+ if @c
528
+ @c.purge rescue nil
529
+ end
530
+ File.unlink(@chunk_path) if File.exist?(@chunk_path)
531
+ end
532
+
533
+ test 'can load as queued chunk' do
534
+ assert @c
535
+ assert_equal :queued, @c.state
536
+ assert_equal @chunk_id, @c.unique_id
537
+ assert_equal gen_metadata, @c.metadata
538
+ assert_equal @d.bytesize, @c.bytesize
539
+ assert_equal 0, @c.size
540
+ assert_equal @d, @c.read
541
+
542
+ assert_raise RuntimeError.new("BUG: concatenating to unwritable chunk, now 'queued'") do
543
+ @c.append(["queued chunk is read only"])
544
+ end
545
+ assert_raise IOError do
546
+ @c.instance_eval{ @chunk }.write "chunk io is opened as read only"
547
+ end
548
+ end
549
+ end
550
+
551
+ sub_test_case 'compressed buffer' do
552
+ setup do
553
+ @src = 'text data for compressing' * 5
554
+ @gzipped_src = compress(@src)
555
+ end
556
+
557
+ test '#append with compress option writes compressed data to chunk when compress is gzip' do
558
+ c = @klass.new(gen_metadata, File.join(@chunkdir,'fsb.*.buf'), :create, nil, compress: :gzip)
559
+ c.append([@src, @src], compress: :gzip)
560
+ c.commit
561
+
562
+ # check chunk is compressed
563
+ assert c.read(compressed: :gzip).size < [@src, @src].join("").size
564
+
565
+ assert_equal @src + @src, c.read
566
+ end
567
+
568
+ test '#open passes io object having decompressed data to a block when compress is gzip' do
569
+ c = @klass.new(gen_metadata, File.join(@chunkdir,'fsb.*.buf'), :create, nil, compress: :gzip)
570
+ c.concat(@gzipped_src, @src.size)
571
+ c.commit
572
+
573
+ decomressed_data = c.open do |io|
574
+ v = io.read
575
+ assert_equal @src, v
576
+ v
577
+ end
578
+ assert_equal @src, decomressed_data
579
+ end
580
+
581
+ test '#open with compressed option passes io object having decompressed data to a block when compress is gzip' do
582
+ c = @klass.new(gen_metadata, File.join(@chunkdir,'fsb.*.buf'), :create, nil, compress: :gzip)
583
+ c.concat(@gzipped_src, @src.size)
584
+ c.commit
585
+
586
+ comressed_data = c.open(compressed: :gzip) do |io|
587
+ v = io.read
588
+ assert_equal @gzipped_src, v
589
+ v
590
+ end
591
+ assert_equal @gzipped_src, comressed_data
592
+ end
593
+
594
+ test '#write_to writes decompressed data when compress is gzip' do
595
+ c = @klass.new(gen_metadata, File.join(@chunkdir,'fsb.*.buf'), :create, nil, compress: :gzip)
596
+ c.concat(@gzipped_src, @src.size)
597
+ c.commit
598
+
599
+ assert_equal @src, c.read
600
+ assert_equal @gzipped_src, c.read(compressed: :gzip)
601
+
602
+ io = StringIO.new
603
+ c.write_to(io)
604
+ assert_equal @src, io.string
605
+ end
606
+
607
+ test '#write_to with compressed option writes compressed data when compress is gzip' do
608
+ c = @klass.new(gen_metadata, File.join(@chunkdir,'fsb.*.buf'), :create, nil, compress: :gzip)
609
+ c.concat(@gzipped_src, @src.size)
610
+ c.commit
611
+
612
+ assert_equal @src, c.read
613
+ assert_equal @gzipped_src, c.read(compressed: :gzip)
614
+
615
+ io = StringIO.new
616
+ c.write_to(io, compressed: :gzip)
617
+ assert_equal @gzipped_src, io.string
618
+ end
619
+ end
620
+ end