fluentd 1.6.3 → 1.7.1

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of fluentd might be problematic. Click here for more details.

Files changed (84) hide show
  1. checksums.yaml +4 -4
  2. data/.drone.yml +35 -0
  3. data/.github/ISSUE_TEMPLATE/bug_report.md +2 -0
  4. data/CHANGELOG.md +83 -0
  5. data/README.md +5 -1
  6. data/fluentd.gemspec +3 -2
  7. data/lib/fluent/clock.rb +4 -0
  8. data/lib/fluent/compat/output.rb +3 -3
  9. data/lib/fluent/compat/socket_util.rb +1 -1
  10. data/lib/fluent/config/element.rb +3 -3
  11. data/lib/fluent/config/literal_parser.rb +1 -1
  12. data/lib/fluent/config/section.rb +4 -1
  13. data/lib/fluent/error.rb +4 -0
  14. data/lib/fluent/event.rb +28 -24
  15. data/lib/fluent/event_router.rb +2 -1
  16. data/lib/fluent/log.rb +1 -1
  17. data/lib/fluent/msgpack_factory.rb +8 -0
  18. data/lib/fluent/plugin/bare_output.rb +4 -4
  19. data/lib/fluent/plugin/buf_file.rb +10 -1
  20. data/lib/fluent/plugin/buf_file_single.rb +219 -0
  21. data/lib/fluent/plugin/buffer.rb +62 -63
  22. data/lib/fluent/plugin/buffer/chunk.rb +21 -3
  23. data/lib/fluent/plugin/buffer/file_chunk.rb +44 -12
  24. data/lib/fluent/plugin/buffer/file_single_chunk.rb +314 -0
  25. data/lib/fluent/plugin/buffer/memory_chunk.rb +2 -1
  26. data/lib/fluent/plugin/compressable.rb +10 -6
  27. data/lib/fluent/plugin/filter_grep.rb +2 -2
  28. data/lib/fluent/plugin/formatter_csv.rb +10 -6
  29. data/lib/fluent/plugin/in_syslog.rb +10 -3
  30. data/lib/fluent/plugin/in_tail.rb +7 -2
  31. data/lib/fluent/plugin/in_tcp.rb +34 -7
  32. data/lib/fluent/plugin/multi_output.rb +4 -4
  33. data/lib/fluent/plugin/out_exec_filter.rb +1 -0
  34. data/lib/fluent/plugin/out_file.rb +13 -3
  35. data/lib/fluent/plugin/out_forward.rb +144 -588
  36. data/lib/fluent/plugin/out_forward/ack_handler.rb +161 -0
  37. data/lib/fluent/plugin/out_forward/connection_manager.rb +113 -0
  38. data/lib/fluent/plugin/out_forward/error.rb +28 -0
  39. data/lib/fluent/plugin/out_forward/failure_detector.rb +84 -0
  40. data/lib/fluent/plugin/out_forward/handshake_protocol.rb +121 -0
  41. data/lib/fluent/plugin/out_forward/load_balancer.rb +111 -0
  42. data/lib/fluent/plugin/out_forward/socket_cache.rb +138 -0
  43. data/lib/fluent/plugin/out_http.rb +231 -0
  44. data/lib/fluent/plugin/output.rb +29 -35
  45. data/lib/fluent/plugin/parser.rb +77 -0
  46. data/lib/fluent/plugin/parser_csv.rb +75 -0
  47. data/lib/fluent/plugin/parser_syslog.rb +106 -3
  48. data/lib/fluent/plugin_helper/server.rb +2 -2
  49. data/lib/fluent/plugin_helper/socket.rb +14 -1
  50. data/lib/fluent/plugin_helper/thread.rb +1 -0
  51. data/lib/fluent/root_agent.rb +1 -1
  52. data/lib/fluent/time.rb +4 -2
  53. data/lib/fluent/timezone.rb +21 -7
  54. data/lib/fluent/version.rb +1 -1
  55. data/test/command/test_fluentd.rb +1 -1
  56. data/test/command/test_plugin_generator.rb +18 -2
  57. data/test/config/test_configurable.rb +78 -40
  58. data/test/counter/test_store.rb +1 -1
  59. data/test/helper.rb +1 -0
  60. data/test/helpers/process_extenstion.rb +33 -0
  61. data/test/plugin/out_forward/test_ack_handler.rb +101 -0
  62. data/test/plugin/out_forward/test_connection_manager.rb +145 -0
  63. data/test/plugin/out_forward/test_handshake_protocol.rb +103 -0
  64. data/test/plugin/out_forward/test_load_balancer.rb +60 -0
  65. data/test/plugin/out_forward/test_socket_cache.rb +139 -0
  66. data/test/plugin/test_buf_file.rb +172 -2
  67. data/test/plugin/test_buf_file_single.rb +801 -0
  68. data/test/plugin/test_buffer.rb +4 -48
  69. data/test/plugin/test_buffer_file_chunk.rb +38 -1
  70. data/test/plugin/test_buffer_file_single_chunk.rb +621 -0
  71. data/test/plugin/test_buffer_memory_chunk.rb +1 -0
  72. data/test/plugin/test_formatter_csv.rb +16 -0
  73. data/test/plugin/test_in_syslog.rb +56 -6
  74. data/test/plugin/test_in_tail.rb +1 -1
  75. data/test/plugin/test_in_tcp.rb +25 -0
  76. data/test/plugin/test_out_forward.rb +150 -201
  77. data/test/plugin/test_out_http.rb +352 -0
  78. data/test/plugin/test_output_as_buffered.rb +27 -24
  79. data/test/plugin/test_parser.rb +40 -0
  80. data/test/plugin/test_parser_csv.rb +83 -0
  81. data/test/plugin/test_parser_syslog.rb +118 -19
  82. data/test/plugin_helper/test_record_accessor.rb +1 -1
  83. data/test/test_time_formatter.rb +140 -121
  84. metadata +35 -6
@@ -0,0 +1,139 @@
1
+ require_relative '../../helper'
2
+
3
+ require 'fluent/plugin/out_forward/socket_cache'
4
+ require 'timecop'
5
+
6
+ class SocketCacheTest < Test::Unit::TestCase
7
+ sub_test_case 'checkout_or' do
8
+ test 'when gived key does not exist' do
9
+ c = Fluent::Plugin::ForwardOutput::SocketCache.new(10, $log)
10
+ sock = mock!.open { 'socket' }.subject
11
+ assert_equal('socket', c.checkout_or('key') { sock.open })
12
+ end
13
+
14
+ test 'when given key exists' do
15
+ c = Fluent::Plugin::ForwardOutput::SocketCache.new(10, $log)
16
+ socket = 'socket'
17
+ assert_equal(socket, c.checkout_or('key') { socket })
18
+ c.checkin(socket)
19
+
20
+ sock = dont_allow(mock!).open
21
+ assert_equal(socket, c.checkout_or('key') { sock.open })
22
+ end
23
+
24
+ test 'when given key exists but used by other' do
25
+ c = Fluent::Plugin::ForwardOutput::SocketCache.new(10, $log)
26
+ assert_equal(@sock, c.checkout_or('key') { @sock })
27
+
28
+ new_sock = 'new sock'
29
+ sock = mock!.open { new_sock }.subject
30
+ assert_equal(new_sock, c.checkout_or('key') { sock.open })
31
+ end
32
+
33
+ test "when given key's value was expired" do
34
+ c = Fluent::Plugin::ForwardOutput::SocketCache.new(0, $log)
35
+ assert_equal(@sock, c.checkout_or('key') { @sock })
36
+
37
+ new_sock = 'new sock'
38
+ sock = mock!.open { new_sock }.subject
39
+ assert_equal(new_sock, c.checkout_or('key') { sock.open })
40
+ end
41
+ end
42
+
43
+ sub_test_case 'checkin' do
44
+ test 'when value exists' do
45
+ c = Fluent::Plugin::ForwardOutput::SocketCache.new(10, $log)
46
+ socket = 'socket'
47
+ c.checkout_or('key') { socket }
48
+ c.checkin(socket)
49
+
50
+ assert_equal(socket, c.instance_variable_get(:@available_sockets)['key'].first.sock)
51
+ assert_equal(1, c.instance_variable_get(:@available_sockets)['key'].size)
52
+ assert_equal(0, c.instance_variable_get(:@inflight_sockets).size)
53
+ end
54
+
55
+ test 'when value does not exist' do
56
+ c = Fluent::Plugin::ForwardOutput::SocketCache.new(10, $log)
57
+ c.checkout_or('key') { 'sock' }
58
+ c.checkin('other sock')
59
+
60
+ assert_equal(0, c.instance_variable_get(:@available_sockets)['key'].size)
61
+ assert_equal(1, c.instance_variable_get(:@inflight_sockets).size)
62
+ end
63
+ end
64
+
65
+ test 'revoke' do
66
+ c = Fluent::Plugin::ForwardOutput::SocketCache.new(10, $log)
67
+ socket = 'socket'
68
+ c.checkout_or('key') { socket }
69
+ c.revoke(socket)
70
+
71
+ assert_equal(1, c.instance_variable_get(:@inactive_sockets).size)
72
+ assert_equal(0, c.instance_variable_get(:@inflight_sockets).size)
73
+ assert_equal(0, c.instance_variable_get(:@available_sockets)['key'].size)
74
+
75
+ sock = mock!.open { 1 }.subject
76
+ assert_equal(1, c.checkout_or('key') { sock.open })
77
+ end
78
+
79
+ sub_test_case 'clear' do
80
+ test 'when value is in available_sockets' do
81
+ c = Fluent::Plugin::ForwardOutput::SocketCache.new(10, $log)
82
+ m = mock!.close { 'closed' }.subject
83
+ m2 = mock!.close { 'closed' }.subject
84
+ m3 = mock!.close { 'closed' }.subject
85
+ c.checkout_or('key') { m }
86
+ c.revoke(m)
87
+ c.checkout_or('key') { m2 }
88
+ c.checkin(m2)
89
+ c.checkout_or('key2') { m3 }
90
+
91
+ assert_equal(1, c.instance_variable_get(:@inflight_sockets).size)
92
+ assert_equal(1, c.instance_variable_get(:@available_sockets)['key'].size)
93
+ assert_equal(1, c.instance_variable_get(:@inactive_sockets).size)
94
+
95
+ c.clear
96
+ assert_equal(0, c.instance_variable_get(:@inflight_sockets).size)
97
+ assert_equal(0, c.instance_variable_get(:@available_sockets)['key'].size)
98
+ assert_equal(0, c.instance_variable_get(:@inactive_sockets).size)
99
+ end
100
+ end
101
+
102
+ sub_test_case 'purge_obsolete_socks' do
103
+ test 'delete key in inactive_socks' do
104
+ c = Fluent::Plugin::ForwardOutput::SocketCache.new(10, $log)
105
+ sock = mock!.close { 'closed' }.subject
106
+ c.checkout_or('key') { sock }
107
+ c.revoke(sock)
108
+ assert_false(c.instance_variable_get(:@inactive_sockets).empty?)
109
+
110
+ c.purge_obsolete_socks
111
+ assert_true(c.instance_variable_get(:@inactive_sockets).empty?)
112
+ end
113
+
114
+ test 'move key from available_sockets to inactive_sockets' do
115
+ Timecop.freeze(Time.parse('2016-04-13 14:00:00 +0900'))
116
+
117
+ c = Fluent::Plugin::ForwardOutput::SocketCache.new(10, $log)
118
+ sock = mock!.close { 'closed' }.subject
119
+ sock2 = dont_allow(mock!).close
120
+ stub(sock).inspect
121
+ stub(sock2).inspect
122
+
123
+ c.checkout_or('key') { sock }
124
+ c.checkin(sock)
125
+
126
+ # wait timeout
127
+ Timecop.freeze(Time.parse('2016-04-13 14:20:00 +0900'))
128
+ c.checkout_or('key') { sock2 }
129
+
130
+ assert_equal(1, c.instance_variable_get(:@inflight_sockets).size)
131
+ assert_equal(sock2, c.instance_variable_get(:@inflight_sockets).values.first.sock)
132
+
133
+ c.purge_obsolete_socks
134
+
135
+ assert_equal(1, c.instance_variable_get(:@inflight_sockets).size)
136
+ assert_equal(sock2, c.instance_variable_get(:@inflight_sockets).values.first.sock)
137
+ end
138
+ end
139
+ end
@@ -27,7 +27,7 @@ class FileBufferTest < Test::Unit::TestCase
27
27
  Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
28
28
  end
29
29
 
30
- def write_metadata(path, chunk_id, metadata, size, ctime, mtime)
30
+ def write_metadata_old(path, chunk_id, metadata, size, ctime, mtime)
31
31
  metadata = {
32
32
  timekey: metadata.timekey, tag: metadata.tag, variables: metadata.variables,
33
33
  id: chunk_id,
@@ -40,6 +40,22 @@ class FileBufferTest < Test::Unit::TestCase
40
40
  end
41
41
  end
42
42
 
43
+ def write_metadata(path, chunk_id, metadata, size, ctime, mtime)
44
+ metadata = {
45
+ timekey: metadata.timekey, tag: metadata.tag, variables: metadata.variables,
46
+ id: chunk_id,
47
+ s: size,
48
+ c: ctime,
49
+ m: mtime,
50
+ }
51
+
52
+ data = metadata.to_msgpack
53
+ size = [data.size].pack('N')
54
+ File.open(path, 'wb') do |f|
55
+ f.write(Fluent::Plugin::Buffer::FileChunk::BUFFER_HEADER + size + data)
56
+ end
57
+ end
58
+
43
59
  sub_test_case 'non configured buffer plugin instance' do
44
60
  setup do
45
61
  Fluent::Test.setup
@@ -521,6 +537,60 @@ class FileBufferTest < Test::Unit::TestCase
521
537
  end
522
538
  end
523
539
 
540
+ sub_test_case 'there are some existing file chunks with placeholders path' do
541
+ setup do
542
+ @bufdir = File.expand_path('../../tmp/buffer_${test}_file', __FILE__)
543
+ FileUtils.rm_rf(@bufdir)
544
+ FileUtils.mkdir_p(@bufdir)
545
+
546
+ @c1id = Fluent::UniqueId.generate
547
+ p1 = File.join(@bufdir, "etest.q#{Fluent::UniqueId.hex(@c1id)}.log")
548
+ File.open(p1, 'wb') do |f|
549
+ f.write ["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
550
+ end
551
+ write_metadata(
552
+ p1 + '.meta', @c1id, metadata(timekey: event_time('2016-04-17 13:58:00 -0700').to_i),
553
+ 1, event_time('2016-04-17 13:58:00 -0700').to_i, event_time('2016-04-17 13:58:22 -0700').to_i
554
+ )
555
+
556
+ @c2id = Fluent::UniqueId.generate
557
+ p2 = File.join(@bufdir, "etest.b#{Fluent::UniqueId.hex(@c2id)}.log")
558
+ File.open(p2, 'wb') do |f|
559
+ f.write ["t1.test", event_time('2016-04-17 14:00:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
560
+ end
561
+ write_metadata(
562
+ p2 + '.meta', @c2id, metadata(timekey: event_time('2016-04-17 14:00:00 -0700').to_i),
563
+ 1, event_time('2016-04-17 14:00:00 -0700').to_i, event_time('2016-04-17 14:00:28 -0700').to_i
564
+ )
565
+
566
+ @bufpath = File.join(@bufdir, 'etest.*.log')
567
+
568
+ Fluent::Test.setup
569
+ @d = FluentPluginFileBufferTest::DummyOutputPlugin.new
570
+ @p = Fluent::Plugin::FileBuffer.new
571
+ @p.owner = @d
572
+ @p.configure(config_element('buffer', '', {'path' => @bufpath}))
573
+ @p.start
574
+ end
575
+
576
+ teardown do
577
+ if @p
578
+ @p.stop unless @p.stopped?
579
+ @p.before_shutdown unless @p.before_shutdown?
580
+ @p.shutdown unless @p.shutdown?
581
+ @p.after_shutdown unless @p.after_shutdown?
582
+ @p.close unless @p.closed?
583
+ @p.terminate unless @p.terminated?
584
+ end
585
+ FileUtils.rm_rf(@bufdir)
586
+ end
587
+
588
+ test '#resume returns staged/queued chunks with metadata' do
589
+ assert_equal 1, @p.stage.size
590
+ assert_equal 1, @p.queue.size
591
+ end
592
+ end
593
+
524
594
  sub_test_case 'there are some existing file chunks, both in specified path and per-worker directory under specified path, configured as multi workers' do
525
595
  setup do
526
596
  @bufdir = File.expand_path('../../tmp/buffer_file/path', __FILE__)
@@ -683,7 +753,107 @@ class FileBufferTest < Test::Unit::TestCase
683
753
  end
684
754
  end
685
755
 
686
- sub_test_case 'there are some existing file chunks without metadata file' do
756
+ sub_test_case 'there are some existing file chunks with old format metadta' do
757
+ setup do
758
+ @bufdir = File.expand_path('../../tmp/buffer_file', __FILE__)
759
+ FileUtils.mkdir_p @bufdir unless File.exist?(@bufdir)
760
+
761
+ @c1id = Fluent::UniqueId.generate
762
+ p1 = File.join(@bufdir, "etest.q#{Fluent::UniqueId.hex(@c1id)}.log")
763
+ File.open(p1, 'wb') do |f|
764
+ f.write ["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
765
+ f.write ["t2.test", event_time('2016-04-17 13:58:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
766
+ f.write ["t3.test", event_time('2016-04-17 13:58:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
767
+ f.write ["t4.test", event_time('2016-04-17 13:58:22 -0700').to_i, {"message" => "yay"}].to_json + "\n"
768
+ end
769
+ write_metadata_old(
770
+ p1 + '.meta', @c1id, metadata(timekey: event_time('2016-04-17 13:58:00 -0700').to_i),
771
+ 4, event_time('2016-04-17 13:58:00 -0700').to_i, event_time('2016-04-17 13:58:22 -0700').to_i
772
+ )
773
+
774
+ @c2id = Fluent::UniqueId.generate
775
+ p2 = File.join(@bufdir, "etest.q#{Fluent::UniqueId.hex(@c2id)}.log")
776
+ File.open(p2, 'wb') do |f|
777
+ f.write ["t1.test", event_time('2016-04-17 13:59:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
778
+ f.write ["t2.test", event_time('2016-04-17 13:59:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
779
+ f.write ["t3.test", event_time('2016-04-17 13:59:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
780
+ end
781
+ write_metadata_old(
782
+ p2 + '.meta', @c2id, metadata(timekey: event_time('2016-04-17 13:59:00 -0700').to_i),
783
+ 3, event_time('2016-04-17 13:59:00 -0700').to_i, event_time('2016-04-17 13:59:23 -0700').to_i
784
+ )
785
+
786
+ @c3id = Fluent::UniqueId.generate
787
+ p3 = File.join(@bufdir, "etest.b#{Fluent::UniqueId.hex(@c3id)}.log")
788
+ File.open(p3, 'wb') do |f|
789
+ f.write ["t1.test", event_time('2016-04-17 14:00:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
790
+ f.write ["t2.test", event_time('2016-04-17 14:00:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
791
+ f.write ["t3.test", event_time('2016-04-17 14:00:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
792
+ f.write ["t4.test", event_time('2016-04-17 14:00:28 -0700').to_i, {"message" => "yay"}].to_json + "\n"
793
+ end
794
+ write_metadata_old(
795
+ p3 + '.meta', @c3id, metadata(timekey: event_time('2016-04-17 14:00:00 -0700').to_i),
796
+ 4, event_time('2016-04-17 14:00:00 -0700').to_i, event_time('2016-04-17 14:00:28 -0700').to_i
797
+ )
798
+
799
+ @c4id = Fluent::UniqueId.generate
800
+ p4 = File.join(@bufdir, "etest.b#{Fluent::UniqueId.hex(@c4id)}.log")
801
+ File.open(p4, 'wb') do |f|
802
+ f.write ["t1.test", event_time('2016-04-17 14:01:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
803
+ f.write ["t2.test", event_time('2016-04-17 14:01:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
804
+ f.write ["t3.test", event_time('2016-04-17 14:01:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
805
+ end
806
+ write_metadata_old(
807
+ p4 + '.meta', @c4id, metadata(timekey: event_time('2016-04-17 14:01:00 -0700').to_i),
808
+ 3, event_time('2016-04-17 14:01:00 -0700').to_i, event_time('2016-04-17 14:01:25 -0700').to_i
809
+ )
810
+
811
+ @bufpath = File.join(@bufdir, 'etest.*.log')
812
+
813
+ Fluent::Test.setup
814
+ @d = FluentPluginFileBufferTest::DummyOutputPlugin.new
815
+ @p = Fluent::Plugin::FileBuffer.new
816
+ @p.owner = @d
817
+ @p.configure(config_element('buffer', '', {'path' => @bufpath}))
818
+ @p.start
819
+ end
820
+
821
+ teardown do
822
+ if @p
823
+ @p.stop unless @p.stopped?
824
+ @p.before_shutdown unless @p.before_shutdown?
825
+ @p.shutdown unless @p.shutdown?
826
+ @p.after_shutdown unless @p.after_shutdown?
827
+ @p.close unless @p.closed?
828
+ @p.terminate unless @p.terminated?
829
+ end
830
+ if @bufdir
831
+ Dir.glob(File.join(@bufdir, '*')).each do |path|
832
+ next if ['.', '..'].include?(File.basename(path))
833
+ File.delete(path)
834
+ end
835
+ end
836
+ end
837
+
838
+ test '#resume returns staged/queued chunks with metadata' do
839
+ assert_equal 2, @p.stage.size
840
+ assert_equal 2, @p.queue.size
841
+
842
+ stage = @p.stage
843
+
844
+ m3 = metadata(timekey: event_time('2016-04-17 14:00:00 -0700').to_i)
845
+ assert_equal @c3id, stage[m3].unique_id
846
+ assert_equal 4, stage[m3].size
847
+ assert_equal :staged, stage[m3].state
848
+
849
+ m4 = metadata(timekey: event_time('2016-04-17 14:01:00 -0700').to_i)
850
+ assert_equal @c4id, stage[m4].unique_id
851
+ assert_equal 3, stage[m4].size
852
+ assert_equal :staged, stage[m4].state
853
+ end
854
+ end
855
+
856
+ sub_test_case 'there are some existing file chunks with old format metadata file' do
687
857
  setup do
688
858
  @bufdir = File.expand_path('../../tmp/buffer_file', __FILE__)
689
859
 
@@ -0,0 +1,801 @@
1
+ require_relative '../helper'
2
+ require 'fluent/plugin/buf_file_single'
3
+ require 'fluent/plugin/output'
4
+ require 'fluent/unique_id'
5
+ require 'fluent/system_config'
6
+ require 'fluent/env'
7
+ require 'fluent/test/driver/output'
8
+
9
+ require 'msgpack'
10
+
11
+ module FluentPluginFileSingleBufferTest
12
+ class DummyOutputPlugin < Fluent::Plugin::Output
13
+ Fluent::Plugin.register_output('buf_file_single_test', self)
14
+ config_section :buffer do
15
+ config_set_default :@type, 'file_single'
16
+ end
17
+ def multi_workers_ready?
18
+ true
19
+ end
20
+ def write(chunk)
21
+ # drop
22
+ end
23
+ end
24
+
25
+ class DummyOutputMPPlugin < Fluent::Plugin::Output
26
+ Fluent::Plugin.register_output('buf_file_single_mp_test', self)
27
+ config_section :buffer do
28
+ config_set_default :@type, 'file_single'
29
+ end
30
+ def formatted_to_msgpack_binary?
31
+ true
32
+ end
33
+ def multi_workers_ready?
34
+ true
35
+ end
36
+ def write(chunk)
37
+ # drop
38
+ end
39
+ end
40
+ end
41
+
42
+ class FileSingleBufferTest < Test::Unit::TestCase
43
+ def metadata(timekey: nil, tag: 'testing', variables: nil)
44
+ Fluent::Plugin::Buffer::Metadata.new(timekey, tag, variables)
45
+ end
46
+
47
+ PATH = File.expand_path('../../tmp/buffer_file_single_dir', __FILE__)
48
+ TAG_CONF = %[
49
+ <buffer tag>
50
+ @type file_single
51
+ path #{PATH}
52
+ </buffer>
53
+ ]
54
+ FIELD_CONF = %[
55
+ <buffer k>
56
+ @type file_single
57
+ path #{PATH}
58
+ </buffer>
59
+ ]
60
+
61
+ setup do
62
+ Fluent::Test.setup
63
+
64
+ @d = nil
65
+ @bufdir = PATH
66
+ FileUtils.rm_rf(@bufdir) rescue nil
67
+ FileUtils.mkdir_p(@bufdir)
68
+ end
69
+
70
+ teardown do
71
+ FileUtils.rm_rf(@bufdir) rescue nil
72
+ end
73
+
74
+ def create_driver(conf = TAG_CONF, klass = FluentPluginFileSingleBufferTest::DummyOutputPlugin)
75
+ Fluent::Test::Driver::Output.new(klass).configure(conf)
76
+ end
77
+
78
+ sub_test_case 'configuration' do
79
+ test 'path has "fsb" prefix and "buf" suffix by default' do
80
+ @d = create_driver
81
+ p = @d.instance.buffer
82
+ assert_equal File.join(@bufdir, 'fsb.*.buf'), p.path
83
+ end
84
+
85
+ data('text based chunk' => [FluentPluginFileSingleBufferTest::DummyOutputPlugin, :text],
86
+ 'msgpack based chunk' => [FluentPluginFileSingleBufferTest::DummyOutputMPPlugin, :msgpack])
87
+ test 'detect chunk_format' do |param|
88
+ klass, expected = param
89
+ @d = create_driver(TAG_CONF, klass)
90
+ p = @d.instance.buffer
91
+ assert_equal expected, p.chunk_format
92
+ end
93
+
94
+ test '"prefix.*.suffix" path will be replaced with default' do
95
+ @d = create_driver(%[
96
+ <buffer tag>
97
+ @type file_single
98
+ path #{@bufdir}/foo.*.bar
99
+ </buffer>
100
+ ])
101
+ p = @d.instance.buffer
102
+ assert_equal File.join(@bufdir, 'fsb.*.buf'), p.path
103
+ end
104
+ end
105
+
106
+ sub_test_case 'buffer configurations and workers' do
107
+ setup do
108
+ @d = FluentPluginFileSingleBufferTest::DummyOutputPlugin.new
109
+ end
110
+
111
+ test 'enables multi worker configuration with unexisting directory path' do
112
+ FileUtils.rm_rf(@bufdir)
113
+ buf_conf = config_element('buffer', '', {'path' => @bufdir})
114
+ assert_nothing_raised do
115
+ Fluent::SystemConfig.overwrite_system_config('root_dir' => @bufdir, 'workers' => 4) do
116
+ @d.configure(config_element('ROOT', '', {}, [buf_conf]))
117
+ end
118
+ end
119
+ end
120
+
121
+ test 'enables multi worker configuration with existing directory path' do
122
+ FileUtils.mkdir_p @bufdir
123
+ buf_conf = config_element('buffer', '', {'path' => @bufdir})
124
+ assert_nothing_raised do
125
+ Fluent::SystemConfig.overwrite_system_config('root_dir' => @bufdir, 'workers' => 4) do
126
+ @d.configure(config_element('ROOT', '', {}, [buf_conf]))
127
+ end
128
+ end
129
+ end
130
+
131
+ test 'enables multi worker configuration with root dir' do
132
+ buf_conf = config_element('buffer', '')
133
+ assert_nothing_raised do
134
+ Fluent::SystemConfig.overwrite_system_config('root_dir' => @bufdir, 'workers' => 4) do
135
+ @d.configure(config_element('ROOT', '', {'@id' => 'dummy_output_with_buf'}, [buf_conf]))
136
+ end
137
+ end
138
+ end
139
+ end
140
+
141
+ test 'raise config error when using same file path' do
142
+ d = FluentPluginFileSingleBufferTest::DummyOutputPlugin.new
143
+ d2 = FluentPluginFileSingleBufferTest::DummyOutputPlugin.new
144
+ Fluent::SystemConfig.overwrite_system_config({}) do
145
+ d.configure(config_element('ROOT', '', {}, [config_element('buffer', '', { 'path' => File.join(PATH, 'foo.*.bar') })]))
146
+ end
147
+
148
+ any_instance_of(Fluent::Plugin::FileSingleBuffer) do |klass|
149
+ stub(klass).called_in_test? { false }
150
+ end
151
+
152
+ err = assert_raise(Fluent::ConfigError) do
153
+ Fluent::SystemConfig.overwrite_system_config({}) do
154
+ d2.configure(config_element('ROOT', '', {}, [config_element('buffer', '', { 'path' => PATH })]))
155
+ end
156
+ end
157
+ assert_match(/plugin already uses same buffer path/, err.message)
158
+ end
159
+
160
+ sub_test_case 'buffer plugin configured only with path' do
161
+ setup do
162
+ @bufpath = File.join(@bufdir, 'testbuf.*.buf')
163
+ FileUtils.rm_rf(@bufdir) if File.exist?(@bufdir)
164
+
165
+ @d = create_driver
166
+ @p = @d.instance.buffer
167
+ end
168
+
169
+ teardown do
170
+ if @p
171
+ @p.stop unless @p.stopped?
172
+ @p.before_shutdown unless @p.before_shutdown?
173
+ @p.shutdown unless @p.shutdown?
174
+ @p.after_shutdown unless @p.after_shutdown?
175
+ @p.close unless @p.closed?
176
+ @p.terminate unless @p.terminated?
177
+ end
178
+ end
179
+
180
+ test 'this is persistent plugin' do
181
+ assert @p.persistent?
182
+ end
183
+
184
+ test '#start creates directory for buffer chunks' do
185
+ @d = create_driver
186
+ @p = @d.instance.buffer
187
+
188
+ FileUtils.rm_rf(@bufdir) if File.exist?(@bufdir)
189
+ assert !File.exist?(@bufdir)
190
+
191
+ @p.start
192
+ assert File.exist?(@bufdir)
193
+ assert { File.stat(@bufdir).mode.to_s(8).end_with?('755') }
194
+ end
195
+
196
+ test '#start creates directory for buffer chunks with specified permission' do
197
+ omit "NTFS doesn't support UNIX like permissions" if Fluent.windows?
198
+
199
+ @d = create_driver(%[
200
+ <buffer tag>
201
+ @type file_single
202
+ path #{PATH}
203
+ dir_permission 700
204
+ </buffer>
205
+ ])
206
+ @p = @d.instance.buffer
207
+
208
+ FileUtils.rm_rf(@bufdir) if File.exist?(@bufdir)
209
+ assert !File.exist?(@bufdir)
210
+
211
+ @p.start
212
+ assert File.exist?(@bufdir)
213
+ assert { File.stat(@bufdir).mode.to_s(8).end_with?('700') }
214
+ end
215
+
216
+ test '#start creates directory for buffer chunks with specified permission via system config' do
217
+ omit "NTFS doesn't support UNIX like permissions" if Fluent.windows?
218
+
219
+ sysconf = {'dir_permission' => '700'}
220
+ Fluent::SystemConfig.overwrite_system_config(sysconf) do
221
+ @d = create_driver
222
+ @p = @d.instance.buffer
223
+
224
+ FileUtils.rm_r @bufdir if File.exist?(@bufdir)
225
+ assert !File.exist?(@bufdir)
226
+
227
+ @p.start
228
+ assert File.exist?(@bufdir)
229
+ assert { File.stat(@bufdir).mode.to_s(8).end_with?('700') }
230
+ end
231
+ end
232
+
233
+ test '#generate_chunk generates blank file chunk on path with unique_id and tag' do
234
+ FileUtils.mkdir_p(@bufdir) unless File.exist?(@bufdir)
235
+
236
+ m1 = metadata()
237
+ c1 = @p.generate_chunk(m1)
238
+ assert c1.is_a? Fluent::Plugin::Buffer::FileSingleChunk
239
+ assert_equal m1, c1.metadata
240
+ assert c1.empty?
241
+ assert_equal :unstaged, c1.state
242
+ assert_equal Fluent::Plugin::Buffer::FileSingleChunk::FILE_PERMISSION, c1.permission
243
+ assert_equal File.join(@bufdir, "fsb.testing.b#{Fluent::UniqueId.hex(c1.unique_id)}.buf"), c1.path
244
+ assert{ File.stat(c1.path).mode.to_s(8).end_with?('644') }
245
+
246
+ c1.purge
247
+ end
248
+
249
+ test '#generate_chunk generates blank file chunk on path with unique_id and field key' do
250
+ FileUtils.mkdir_p(@bufdir) unless File.exist?(@bufdir)
251
+
252
+ @d = create_driver(FIELD_CONF)
253
+ @p = @d.instance.buffer
254
+
255
+ m1 = metadata(tag: nil, variables: {:k => 'foo_bar'})
256
+ c1 = @p.generate_chunk(m1)
257
+ assert c1.is_a? Fluent::Plugin::Buffer::FileSingleChunk
258
+ assert_equal m1, c1.metadata
259
+ assert c1.empty?
260
+ assert_equal :unstaged, c1.state
261
+ assert_equal Fluent::Plugin::Buffer::FileSingleChunk::FILE_PERMISSION, c1.permission
262
+ assert_equal File.join(@bufdir, "fsb.foo_bar.b#{Fluent::UniqueId.hex(c1.unique_id)}.buf"), c1.path
263
+
264
+ c1.purge
265
+ end
266
+
267
+ test '#generate_chunk generates blank file chunk with specified permission' do
268
+ omit "NTFS doesn't support UNIX like permissions" if Fluent.windows?
269
+
270
+ @d = create_driver(%[
271
+ <buffer tag>
272
+ @type file_single
273
+ path #{PATH}
274
+ file_permission 600
275
+ </buffer>
276
+ ])
277
+ @p = @d.instance.buffer
278
+
279
+ FileUtils.rm_r @bufdir if File.exist?(@bufdir)
280
+ assert !File.exist?(@bufdir)
281
+
282
+ @p.start
283
+
284
+ m = metadata()
285
+ c = @p.generate_chunk(m)
286
+ assert c.is_a? Fluent::Plugin::Buffer::FileSingleChunk
287
+ assert_equal m, c.metadata
288
+ assert c.empty?
289
+ assert_equal :unstaged, c.state
290
+ assert_equal 0600, c.permission
291
+ assert_equal File.join(@bufdir, "fsb.testing.b#{Fluent::UniqueId.hex(c.unique_id)}.buf"), c.path
292
+ assert{ File.stat(c.path).mode.to_s(8).end_with?('600') }
293
+
294
+ c.purge
295
+ end
296
+ end
297
+
298
+ sub_test_case 'configured with system root directory and plugin @id' do
299
+ setup do
300
+ @root_dir = File.expand_path('../../tmp/buffer_file_single_root', __FILE__)
301
+ FileUtils.rm_rf(@root_dir)
302
+
303
+ @d = FluentPluginFileSingleBufferTest::DummyOutputPlugin.new
304
+ @p = nil
305
+ end
306
+
307
+ teardown do
308
+ if @p
309
+ @p.stop unless @p.stopped?
310
+ @p.before_shutdown unless @p.before_shutdown?
311
+ @p.shutdown unless @p.shutdown?
312
+ @p.after_shutdown unless @p.after_shutdown?
313
+ @p.close unless @p.closed?
314
+ @p.terminate unless @p.terminated?
315
+ end
316
+ end
317
+
318
+ test '#start creates directory for buffer chunks' do
319
+ Fluent::SystemConfig.overwrite_system_config('root_dir' => @root_dir) do
320
+ @d.configure(config_element('ROOT', '', {'@id' => 'dummy_output_with_buf'}, [config_element('buffer', '', {})]))
321
+ @p = @d.buffer
322
+ end
323
+
324
+ expected_buffer_path = File.join(@root_dir, 'worker0', 'dummy_output_with_buf', 'buffer', "fsb.*.buf")
325
+ expected_buffer_dir = File.dirname(expected_buffer_path)
326
+ assert_equal expected_buffer_path, @d.buffer.path
327
+ assert_false Dir.exist?(expected_buffer_dir)
328
+
329
+ @p.start
330
+ assert Dir.exist?(expected_buffer_dir)
331
+ end
332
+ end
333
+
334
+ sub_test_case 'buffer plugin configuration errors' do
335
+ data('tag and key' => 'tag,key',
336
+ 'multiple keys' => 'key1,key2')
337
+ test 'invalid chunk keys' do |param|
338
+ assert_raise Fluent::ConfigError do
339
+ @d = create_driver(%[
340
+ <buffer #{param}>
341
+ @type file_single
342
+ path #{PATH}
343
+ calc_num_records false
344
+ </buffer>
345
+ ])
346
+ end
347
+ end
348
+
349
+ test 'path is not specified' do
350
+ assert_raise Fluent::ConfigError do
351
+ @d = create_driver(%[
352
+ <buffer tag>
353
+ @type file_single
354
+ </buffer>
355
+ ])
356
+ end
357
+ end
358
+ end
359
+
360
+ sub_test_case 'there are no existing file chunks' do
361
+ setup do
362
+ FileUtils.rm_rf(@bufdir) if File.exist?(@bufdir)
363
+
364
+ @d = create_driver
365
+ @p = @d.instance.buffer
366
+ @p.start
367
+ end
368
+ teardown do
369
+ if @p
370
+ @p.stop unless @p.stopped?
371
+ @p.before_shutdown unless @p.before_shutdown?
372
+ @p.shutdown unless @p.shutdown?
373
+ @p.after_shutdown unless @p.after_shutdown?
374
+ @p.close unless @p.closed?
375
+ @p.terminate unless @p.terminated?
376
+ end
377
+ if @bufdir
378
+ Dir.glob(File.join(@bufdir, '*')).each do |path|
379
+ next if ['.', '..'].include?(File.basename(path))
380
+ File.delete(path)
381
+ end
382
+ end
383
+ end
384
+
385
+ test '#resume returns empty buffer state' do
386
+ ary = @p.resume
387
+ assert_equal({}, ary[0])
388
+ assert_equal([], ary[1])
389
+ end
390
+ end
391
+
392
+ sub_test_case 'there are some existing file chunks' do
393
+ setup do
394
+ @c1id = Fluent::UniqueId.generate
395
+ p1 = File.join(@bufdir, "fsb.testing.q#{Fluent::UniqueId.hex(@c1id)}.buf")
396
+ File.open(p1, 'wb') do |f|
397
+ f.write ["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
398
+ f.write ["t2.test", event_time('2016-04-17 13:58:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
399
+ f.write ["t3.test", event_time('2016-04-17 13:58:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
400
+ f.write ["t4.test", event_time('2016-04-17 13:58:22 -0700').to_i, {"message" => "yay"}].to_json + "\n"
401
+ end
402
+ t = Time.now - 50000
403
+ File.utime(t, t, p1)
404
+
405
+ @c2id = Fluent::UniqueId.generate
406
+ p2 = File.join(@bufdir, "fsb.testing.q#{Fluent::UniqueId.hex(@c2id)}.buf")
407
+ File.open(p2, 'wb') do |f|
408
+ f.write ["t1.test", event_time('2016-04-17 13:59:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
409
+ f.write ["t2.test", event_time('2016-04-17 13:59:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
410
+ f.write ["t3.test", event_time('2016-04-17 13:59:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
411
+ end
412
+ t = Time.now - 40000
413
+ File.utime(t, t, p2)
414
+
415
+ @c3id = Fluent::UniqueId.generate
416
+ p3 = File.join(@bufdir, "fsb.testing.b#{Fluent::UniqueId.hex(@c3id)}.buf")
417
+ File.open(p3, 'wb') do |f|
418
+ f.write ["t1.test", event_time('2016-04-17 14:00:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
419
+ f.write ["t2.test", event_time('2016-04-17 14:00:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
420
+ f.write ["t3.test", event_time('2016-04-17 14:00:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
421
+ f.write ["t4.test", event_time('2016-04-17 14:00:28 -0700').to_i, {"message" => "yay"}].to_json + "\n"
422
+ end
423
+
424
+ @c4id = Fluent::UniqueId.generate
425
+ p4 = File.join(@bufdir, "fsb.foo.b#{Fluent::UniqueId.hex(@c4id)}.buf")
426
+ File.open(p4, 'wb') do |f|
427
+ f.write ["t1.test", event_time('2016-04-17 14:01:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
428
+ f.write ["t2.test", event_time('2016-04-17 14:01:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
429
+ f.write ["t3.test", event_time('2016-04-17 14:01:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
430
+ end
431
+ end
432
+
433
+ teardown do
434
+ if @p
435
+ @p.stop unless @p.stopped?
436
+ @p.before_shutdown unless @p.before_shutdown?
437
+ @p.shutdown unless @p.shutdown?
438
+ @p.after_shutdown unless @p.after_shutdown?
439
+ @p.close unless @p.closed?
440
+ @p.terminate unless @p.terminated?
441
+ end
442
+ if @bufdir
443
+ Dir.glob(File.join(@bufdir, '*')).each do |path|
444
+ next if ['.', '..'].include?(File.basename(path))
445
+ File.delete(path)
446
+ end
447
+ end
448
+ end
449
+
450
+ test '#resume returns staged/queued chunks with metadata' do
451
+ @d = create_driver
452
+ @p = @d.instance.buffer
453
+ @p.start
454
+
455
+ assert_equal 2, @p.stage.size
456
+ assert_equal 2, @p.queue.size
457
+
458
+ stage = @p.stage
459
+
460
+ m3 = metadata()
461
+ assert_equal @c3id, stage[m3].unique_id
462
+ assert_equal 4, stage[m3].size
463
+ assert_equal :staged, stage[m3].state
464
+
465
+ m4 = metadata(tag: 'foo')
466
+ assert_equal @c4id, stage[m4].unique_id
467
+ assert_equal 3, stage[m4].size
468
+ assert_equal :staged, stage[m4].state
469
+ end
470
+
471
+ test '#resume returns queued chunks ordered by last modified time (FIFO)' do
472
+ @d = create_driver
473
+ @p = @d.instance.buffer
474
+ @p.start
475
+
476
+ assert_equal 2, @p.stage.size
477
+ assert_equal 2, @p.queue.size
478
+
479
+ queue = @p.queue
480
+
481
+ assert{ queue[0].modified_at <= queue[1].modified_at }
482
+
483
+ assert_equal @c1id, queue[0].unique_id
484
+ assert_equal :queued, queue[0].state
485
+ assert_equal 'testing', queue[0].metadata.tag
486
+ assert_nil queue[0].metadata.variables
487
+ assert_equal 4, queue[0].size
488
+
489
+ assert_equal @c2id, queue[1].unique_id
490
+ assert_equal :queued, queue[1].state
491
+ assert_equal 'testing', queue[1].metadata.tag
492
+ assert_nil queue[1].metadata.variables
493
+ assert_equal 3, queue[1].size
494
+ end
495
+
496
+ test '#resume returns staged/queued chunks but skips size calculation by calc_num_records' do
497
+ @d = create_driver(%[
498
+ <buffer tag>
499
+ @type file_single
500
+ path #{PATH}
501
+ calc_num_records false
502
+ </buffer>
503
+ ])
504
+ @p = @d.instance.buffer
505
+ @p.start
506
+
507
+ assert_equal 2, @p.stage.size
508
+ assert_equal 2, @p.queue.size
509
+
510
+ stage = @p.stage
511
+
512
+ m3 = metadata()
513
+ assert_equal @c3id, stage[m3].unique_id
514
+ assert_equal 0, stage[m3].size
515
+ assert_equal :staged, stage[m3].state
516
+
517
+ m4 = metadata(tag: 'foo')
518
+ assert_equal @c4id, stage[m4].unique_id
519
+ assert_equal 0, stage[m4].size
520
+ assert_equal :staged, stage[m4].state
521
+ end
522
+ end
523
+
524
+ sub_test_case 'there are some existing file chunks with placeholders path' do
525
+ setup do
526
+ @buf_ph_dir = File.expand_path('../../tmp/buffer_${test}_file_single_dir', __FILE__)
527
+ FileUtils.rm_rf(@buf_ph_dir)
528
+ FileUtils.mkdir_p(@buf_ph_dir)
529
+
530
+ @c1id = Fluent::UniqueId.generate
531
+ p1 = File.join(@buf_ph_dir, "fsb.testing.q#{Fluent::UniqueId.hex(@c1id)}.buf")
532
+ File.open(p1, 'wb') do |f|
533
+ f.write ["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
534
+ end
535
+ t = Time.now - 50000
536
+ File.utime(t, t, p1)
537
+
538
+ @c2id = Fluent::UniqueId.generate
539
+ p2 = File.join(@buf_ph_dir, "fsb.testing.b#{Fluent::UniqueId.hex(@c2id)}.buf")
540
+ File.open(p2, 'wb') do |f|
541
+ f.write ["t1.test", event_time('2016-04-17 14:00:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
542
+ end
543
+ end
544
+
545
+ teardown do
546
+ if @p
547
+ @p.stop unless @p.stopped?
548
+ @p.before_shutdown unless @p.before_shutdown?
549
+ @p.shutdown unless @p.shutdown?
550
+ @p.after_shutdown unless @p.after_shutdown?
551
+ @p.close unless @p.closed?
552
+ @p.terminate unless @p.terminated?
553
+ end
554
+ FileUtils.rm_rf(@buf_ph_dir)
555
+ end
556
+
557
+ test '#resume returns staged/queued chunks with metadata' do
558
+ @d = create_driver(%[
559
+ <buffer tag>
560
+ @type file_single
561
+ path #{@buf_ph_dir}
562
+ </buffer>
563
+ ])
564
+ @p = @d.instance.buffer
565
+ @p.start
566
+
567
+ assert_equal 1, @p.stage.size
568
+ assert_equal 1, @p.queue.size
569
+ end
570
+ end
571
+
572
+ sub_test_case 'there are some existing msgpack file chunks' do
573
+ setup do
574
+ packer = Fluent::MessagePackFactory.packer
575
+ @c1id = Fluent::UniqueId.generate
576
+ p1 = File.join(@bufdir, "fsb.testing.q#{Fluent::UniqueId.hex(@c1id)}.buf")
577
+ File.open(p1, 'wb') do |f|
578
+ packer.write(["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}])
579
+ packer.write(["t2.test", event_time('2016-04-17 13:58:17 -0700').to_i, {"message" => "yay"}])
580
+ packer.write(["t3.test", event_time('2016-04-17 13:58:21 -0700').to_i, {"message" => "yay"}])
581
+ packer.write(["t4.test", event_time('2016-04-17 13:58:22 -0700').to_i, {"message" => "yay"}])
582
+ f.write packer.full_pack
583
+ end
584
+ t = Time.now - 50000
585
+ File.utime(t, t, p1)
586
+
587
+ @c2id = Fluent::UniqueId.generate
588
+ p2 = File.join(@bufdir, "fsb.testing.q#{Fluent::UniqueId.hex(@c2id)}.buf")
589
+ File.open(p2, 'wb') do |f|
590
+ packer.write(["t1.test", event_time('2016-04-17 13:59:15 -0700').to_i, {"message" => "yay"}])
591
+ packer.write(["t2.test", event_time('2016-04-17 13:59:17 -0700').to_i, {"message" => "yay"}])
592
+ packer.write(["t3.test", event_time('2016-04-17 13:59:21 -0700').to_i, {"message" => "yay"}])
593
+ f.write packer.full_pack
594
+ end
595
+ t = Time.now - 40000
596
+ File.utime(t, t, p2)
597
+
598
+ @c3id = Fluent::UniqueId.generate
599
+ p3 = File.join(@bufdir, "fsb.testing.b#{Fluent::UniqueId.hex(@c3id)}.buf")
600
+ File.open(p3, 'wb') do |f|
601
+ packer.write(["t1.test", event_time('2016-04-17 14:00:15 -0700').to_i, {"message" => "yay"}])
602
+ packer.write(["t2.test", event_time('2016-04-17 14:00:17 -0700').to_i, {"message" => "yay"}])
603
+ packer.write(["t3.test", event_time('2016-04-17 14:00:21 -0700').to_i, {"message" => "yay"}])
604
+ packer.write(["t4.test", event_time('2016-04-17 14:00:28 -0700').to_i, {"message" => "yay"}])
605
+ f.write packer.full_pack
606
+ end
607
+
608
+ @c4id = Fluent::UniqueId.generate
609
+ p4 = File.join(@bufdir, "fsb.foo.b#{Fluent::UniqueId.hex(@c4id)}.buf")
610
+ File.open(p4, 'wb') do |f|
611
+ packer.write(["t1.test", event_time('2016-04-17 14:01:15 -0700').to_i, {"message" => "yay"}])
612
+ packer.write(["t2.test", event_time('2016-04-17 14:01:17 -0700').to_i, {"message" => "yay"}])
613
+ packer.write(["t3.test", event_time('2016-04-17 14:01:21 -0700').to_i, {"message" => "yay"}])
614
+ f.write packer.full_pack
615
+ end
616
+ end
617
+
618
+ teardown do
619
+ if @p
620
+ @p.stop unless @p.stopped?
621
+ @p.before_shutdown unless @p.before_shutdown?
622
+ @p.shutdown unless @p.shutdown?
623
+ @p.after_shutdown unless @p.after_shutdown?
624
+ @p.close unless @p.closed?
625
+ @p.terminate unless @p.terminated?
626
+ end
627
+ if @bufdir
628
+ Dir.glob(File.join(@bufdir, '*')).each do |path|
629
+ next if ['.', '..'].include?(File.basename(path))
630
+ File.delete(path)
631
+ end
632
+ end
633
+ end
634
+
635
+ test '#resume returns staged/queued chunks with msgpack format' do
636
+ @d = create_driver(%[
637
+ <buffer tag>
638
+ @type file_single
639
+ path #{PATH}
640
+ chunk_format msgpack
641
+ </buffer>
642
+ ])
643
+ @p = @d.instance.buffer
644
+ @p.start
645
+
646
+ assert_equal 2, @p.stage.size
647
+ assert_equal 2, @p.queue.size
648
+
649
+ stage = @p.stage
650
+
651
+ m3 = metadata()
652
+ assert_equal @c3id, stage[m3].unique_id
653
+ assert_equal 4, stage[m3].size
654
+ assert_equal :staged, stage[m3].state
655
+
656
+ m4 = metadata(tag: 'foo')
657
+ assert_equal @c4id, stage[m4].unique_id
658
+ assert_equal 3, stage[m4].size
659
+ assert_equal :staged, stage[m4].state
660
+ end
661
+ end
662
+
663
+ sub_test_case 'there are some existing file chunks, both in specified path and per-worker directory under specified path, configured as multi workers' do
664
+ setup do
665
+ @worker0_dir = File.join(@bufdir, "worker0")
666
+ @worker1_dir = File.join(@bufdir, "worker1")
667
+ FileUtils.rm_rf(@bufdir)
668
+ FileUtils.mkdir_p(@worker0_dir)
669
+ FileUtils.mkdir_p(@worker1_dir)
670
+
671
+ @bufdir_chunk_1 = Fluent::UniqueId.generate
672
+ bc1 = File.join(@bufdir, "fsb.testing.q#{Fluent::UniqueId.hex(@bufdir_chunk_1)}.buf")
673
+ File.open(bc1, 'wb') do |f|
674
+ f.write ["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
675
+ f.write ["t2.test", event_time('2016-04-17 13:58:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
676
+ f.write ["t3.test", event_time('2016-04-17 13:58:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
677
+ f.write ["t4.test", event_time('2016-04-17 13:58:22 -0700').to_i, {"message" => "yay"}].to_json + "\n"
678
+ end
679
+
680
+ @bufdir_chunk_2 = Fluent::UniqueId.generate
681
+ bc2 = File.join(@bufdir, "fsb.testing.q#{Fluent::UniqueId.hex(@bufdir_chunk_2)}.buf")
682
+ File.open(bc2, 'wb') do |f|
683
+ f.write ["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
684
+ f.write ["t2.test", event_time('2016-04-17 13:58:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
685
+ f.write ["t3.test", event_time('2016-04-17 13:58:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
686
+ f.write ["t4.test", event_time('2016-04-17 13:58:22 -0700').to_i, {"message" => "yay"}].to_json + "\n"
687
+ end
688
+
689
+ @worker_dir_chunk_1 = Fluent::UniqueId.generate
690
+ wc0_1 = File.join(@worker0_dir, "fsb.testing.q#{Fluent::UniqueId.hex(@worker_dir_chunk_1)}.buf")
691
+ wc1_1 = File.join(@worker1_dir, "fsb.testing.q#{Fluent::UniqueId.hex(@worker_dir_chunk_1)}.buf")
692
+ [wc0_1, wc1_1].each do |chunk_path|
693
+ File.open(chunk_path, 'wb') do |f|
694
+ f.write ["t1.test", event_time('2016-04-17 13:59:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
695
+ f.write ["t2.test", event_time('2016-04-17 13:59:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
696
+ f.write ["t3.test", event_time('2016-04-17 13:59:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
697
+ end
698
+ end
699
+
700
+ @worker_dir_chunk_2 = Fluent::UniqueId.generate
701
+ wc0_2 = File.join(@worker0_dir, "fsb.testing.b#{Fluent::UniqueId.hex(@worker_dir_chunk_2)}.buf")
702
+ wc1_2 = File.join(@worker1_dir, "fsb.foo.b#{Fluent::UniqueId.hex(@worker_dir_chunk_2)}.buf")
703
+ [wc0_2, wc1_2].each do |chunk_path|
704
+ File.open(chunk_path, 'wb') do |f|
705
+ f.write ["t1.test", event_time('2016-04-17 14:00:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
706
+ f.write ["t2.test", event_time('2016-04-17 14:00:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
707
+ f.write ["t3.test", event_time('2016-04-17 14:00:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
708
+ f.write ["t4.test", event_time('2016-04-17 14:00:28 -0700').to_i, {"message" => "yay"}].to_json + "\n"
709
+ end
710
+ end
711
+
712
+ @worker_dir_chunk_3 = Fluent::UniqueId.generate
713
+ wc0_3 = File.join(@worker0_dir, "fsb.bar.b#{Fluent::UniqueId.hex(@worker_dir_chunk_3)}.buf")
714
+ wc1_3 = File.join(@worker1_dir, "fsb.baz.b#{Fluent::UniqueId.hex(@worker_dir_chunk_3)}.buf")
715
+ [wc0_3, wc1_3].each do |chunk_path|
716
+ File.open(chunk_path, 'wb') do |f|
717
+ f.write ["t1.test", event_time('2016-04-17 14:01:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
718
+ f.write ["t2.test", event_time('2016-04-17 14:01:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
719
+ f.write ["t3.test", event_time('2016-04-17 14:01:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
720
+ end
721
+ end
722
+ end
723
+
724
+ teardown do
725
+ if @p
726
+ @p.stop unless @p.stopped?
727
+ @p.before_shutdown unless @p.before_shutdown?
728
+ @p.shutdown unless @p.shutdown?
729
+ @p.after_shutdown unless @p.after_shutdown?
730
+ @p.close unless @p.closed?
731
+ @p.terminate unless @p.terminated?
732
+ end
733
+ end
734
+
735
+ test 'worker(id=0) #resume returns staged/queued chunks with metadata, not only in worker dir, including the directory specified by path' do
736
+ ENV['SERVERENGINE_WORKER_ID'] = '0'
737
+
738
+ buf_conf = config_element('buffer', '', {'path' => @bufdir})
739
+ @d = FluentPluginFileSingleBufferTest::DummyOutputPlugin.new
740
+ with_worker_config(workers: 2, worker_id: 0) do
741
+ @d.configure(config_element('output', '', {}, [buf_conf]))
742
+ end
743
+
744
+ @d.start
745
+ @p = @d.buffer
746
+
747
+ assert_equal 2, @p.stage.size
748
+ assert_equal 3, @p.queue.size
749
+
750
+ stage = @p.stage
751
+
752
+ m1 = metadata(tag: 'testing')
753
+ assert_equal @worker_dir_chunk_2, stage[m1].unique_id
754
+ assert_equal 4, stage[m1].size
755
+ assert_equal :staged, stage[m1].state
756
+
757
+ m2 = metadata(tag: 'bar')
758
+ assert_equal @worker_dir_chunk_3, stage[m2].unique_id
759
+ assert_equal 3, stage[m2].size
760
+ assert_equal :staged, stage[m2].state
761
+
762
+ queue = @p.queue
763
+
764
+ assert_equal [@bufdir_chunk_1, @bufdir_chunk_2, @worker_dir_chunk_1].sort, queue.map(&:unique_id).sort
765
+ assert_equal [3, 4, 4], queue.map(&:size).sort
766
+ assert_equal [:queued, :queued, :queued], queue.map(&:state)
767
+ end
768
+
769
+ test 'worker(id=1) #resume returns staged/queued chunks with metadata, only in worker dir' do
770
+ buf_conf = config_element('buffer', '', {'path' => @bufdir})
771
+ @d = FluentPluginFileSingleBufferTest::DummyOutputPlugin.new
772
+ with_worker_config(workers: 2, worker_id: 1) do
773
+ @d.configure(config_element('output', '', {}, [buf_conf]))
774
+ end
775
+
776
+ @d.start
777
+ @p = @d.buffer
778
+
779
+ assert_equal 2, @p.stage.size
780
+ assert_equal 1, @p.queue.size
781
+
782
+ stage = @p.stage
783
+
784
+ m1 = metadata(tag: 'foo')
785
+ assert_equal @worker_dir_chunk_2, stage[m1].unique_id
786
+ assert_equal 4, stage[m1].size
787
+ assert_equal :staged, stage[m1].state
788
+
789
+ m2 = metadata(tag: 'baz')
790
+ assert_equal @worker_dir_chunk_3, stage[m2].unique_id
791
+ assert_equal 3, stage[m2].size
792
+ assert_equal :staged, stage[m2].state
793
+
794
+ queue = @p.queue
795
+
796
+ assert_equal @worker_dir_chunk_1, queue[0].unique_id
797
+ assert_equal 3, queue[0].size
798
+ assert_equal :queued, queue[0].state
799
+ end
800
+ end
801
+ end