fluentd 0.14.11-x86-mingw32 → 0.14.12-x86-mingw32

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of fluentd might be problematic. Click here for more details.

Files changed (119) hide show
  1. checksums.yaml +4 -4
  2. data/.travis.yml +1 -5
  3. data/ChangeLog +54 -2
  4. data/example/in_dummy_blocks.conf +17 -0
  5. data/example/in_forward_tls.conf +14 -0
  6. data/example/in_forward_workers.conf +21 -0
  7. data/example/logevents.conf +25 -0
  8. data/example/out_forward_heartbeat_none.conf +16 -0
  9. data/example/out_forward_tls.conf +18 -0
  10. data/example/suppress_config_dump.conf +7 -0
  11. data/lib/fluent/agent.rb +3 -32
  12. data/lib/fluent/clock.rb +62 -0
  13. data/lib/fluent/command/fluentd.rb +12 -0
  14. data/lib/fluent/compat/input.rb +10 -1
  15. data/lib/fluent/compat/output.rb +40 -1
  16. data/lib/fluent/config/configure_proxy.rb +30 -7
  17. data/lib/fluent/config/section.rb +4 -0
  18. data/lib/fluent/config/types.rb +2 -2
  19. data/lib/fluent/configurable.rb +31 -5
  20. data/lib/fluent/engine.rb +61 -12
  21. data/lib/fluent/event_router.rb +6 -0
  22. data/lib/fluent/load.rb +0 -1
  23. data/lib/fluent/log.rb +118 -42
  24. data/lib/fluent/match.rb +37 -0
  25. data/lib/fluent/plugin.rb +25 -3
  26. data/lib/fluent/plugin/base.rb +4 -0
  27. data/lib/fluent/plugin/buf_file.rb +38 -14
  28. data/lib/fluent/plugin/buffer.rb +20 -20
  29. data/lib/fluent/plugin/buffer/file_chunk.rb +2 -2
  30. data/lib/fluent/plugin/compressable.rb +1 -0
  31. data/lib/fluent/plugin/filter_record_transformer.rb +3 -6
  32. data/lib/fluent/plugin/formatter_csv.rb +4 -1
  33. data/lib/fluent/plugin/formatter_hash.rb +5 -1
  34. data/lib/fluent/plugin/formatter_json.rb +10 -0
  35. data/lib/fluent/plugin/formatter_ltsv.rb +2 -1
  36. data/lib/fluent/plugin/in_dummy.rb +4 -0
  37. data/lib/fluent/plugin/in_exec.rb +4 -0
  38. data/lib/fluent/plugin/in_forward.rb +11 -3
  39. data/lib/fluent/plugin/in_gc_stat.rb +4 -0
  40. data/lib/fluent/plugin/in_http.rb +4 -0
  41. data/lib/fluent/plugin/in_monitor_agent.rb +29 -2
  42. data/lib/fluent/plugin/in_object_space.rb +4 -1
  43. data/lib/fluent/plugin/in_syslog.rb +4 -0
  44. data/lib/fluent/plugin/in_tail.rb +193 -116
  45. data/lib/fluent/plugin/in_tcp.rb +5 -1
  46. data/lib/fluent/plugin/in_udp.rb +4 -0
  47. data/lib/fluent/plugin/input.rb +4 -0
  48. data/lib/fluent/plugin/out_copy.rb +4 -0
  49. data/lib/fluent/plugin/out_exec.rb +4 -0
  50. data/lib/fluent/plugin/out_exec_filter.rb +4 -0
  51. data/lib/fluent/plugin/out_file.rb +70 -30
  52. data/lib/fluent/plugin/out_forward.rb +132 -28
  53. data/lib/fluent/plugin/out_null.rb +10 -0
  54. data/lib/fluent/plugin/out_relabel.rb +4 -0
  55. data/lib/fluent/plugin/out_roundrobin.rb +4 -0
  56. data/lib/fluent/plugin/out_secondary_file.rb +5 -0
  57. data/lib/fluent/plugin/out_stdout.rb +5 -0
  58. data/lib/fluent/plugin/output.rb +18 -9
  59. data/lib/fluent/plugin/storage_local.rb +25 -2
  60. data/lib/fluent/plugin_helper/cert_option.rb +159 -0
  61. data/lib/fluent/plugin_helper/child_process.rb +6 -6
  62. data/lib/fluent/plugin_helper/compat_parameters.rb +1 -1
  63. data/lib/fluent/plugin_helper/event_loop.rb +29 -4
  64. data/lib/fluent/plugin_helper/inject.rb +14 -1
  65. data/lib/fluent/plugin_helper/server.rb +275 -31
  66. data/lib/fluent/plugin_helper/socket.rb +144 -4
  67. data/lib/fluent/plugin_helper/socket_option.rb +2 -17
  68. data/lib/fluent/plugin_helper/storage.rb +7 -1
  69. data/lib/fluent/plugin_helper/thread.rb +16 -4
  70. data/lib/fluent/registry.rb +26 -9
  71. data/lib/fluent/root_agent.rb +7 -3
  72. data/lib/fluent/supervisor.rb +37 -15
  73. data/lib/fluent/system_config.rb +37 -10
  74. data/lib/fluent/test.rb +2 -0
  75. data/lib/fluent/test/driver/base.rb +24 -26
  76. data/lib/fluent/test/helpers.rb +21 -0
  77. data/lib/fluent/version.rb +1 -1
  78. data/test/command/test_fluentd.rb +274 -4
  79. data/test/config/test_configurable.rb +154 -0
  80. data/test/config/test_configure_proxy.rb +180 -1
  81. data/test/config/test_system_config.rb +10 -0
  82. data/test/config/test_types.rb +1 -0
  83. data/test/plugin/test_base.rb +4 -0
  84. data/test/plugin/test_buf_file.rb +241 -9
  85. data/test/plugin/test_buffer.rb +11 -11
  86. data/test/plugin/test_buffer_file_chunk.rb +6 -6
  87. data/test/plugin/test_compressable.rb +3 -0
  88. data/test/plugin/test_filter.rb +4 -0
  89. data/test/plugin/test_filter_record_transformer.rb +20 -0
  90. data/test/plugin/test_formatter_csv.rb +9 -0
  91. data/test/plugin/test_formatter_hash.rb +35 -0
  92. data/test/plugin/test_formatter_json.rb +8 -0
  93. data/test/plugin/test_formatter_ltsv.rb +7 -0
  94. data/test/plugin/test_in_dummy.rb +7 -3
  95. data/test/plugin/test_in_monitor_agent.rb +43 -5
  96. data/test/plugin/test_in_tail.rb +97 -4
  97. data/test/plugin/test_input.rb +4 -0
  98. data/test/plugin/test_out_file.rb +46 -7
  99. data/test/plugin/test_out_forward.rb +59 -7
  100. data/test/plugin/test_output.rb +10 -4
  101. data/test/plugin/test_output_as_buffered.rb +37 -25
  102. data/test/plugin/test_output_as_buffered_compress.rb +1 -1
  103. data/test/plugin/test_output_as_buffered_retries.rb +6 -6
  104. data/test/plugin/test_output_as_buffered_secondary.rb +91 -31
  105. data/test/plugin/test_storage_local.rb +40 -1
  106. data/test/plugin_helper/test_child_process.rb +29 -28
  107. data/test/plugin_helper/test_compat_parameters.rb +1 -1
  108. data/test/plugin_helper/test_inject.rb +27 -9
  109. data/test/plugin_helper/test_server.rb +822 -50
  110. data/test/plugin_helper/test_storage.rb +11 -0
  111. data/test/plugin_helper/test_timer.rb +1 -0
  112. data/test/test_clock.rb +164 -0
  113. data/test/test_log.rb +146 -15
  114. data/test/test_plugin.rb +251 -0
  115. data/test/test_supervisor.rb +65 -57
  116. data/test/test_test_drivers.rb +2 -2
  117. metadata +18 -7
  118. data/lib/fluent/process.rb +0 -504
  119. data/test/test_process.rb +0 -48
@@ -10,6 +10,12 @@ require 'msgpack'
10
10
  module FluentPluginFileBufferTest
11
11
  class DummyOutputPlugin < Fluent::Plugin::Output
12
12
  Fluent::Plugin.register_output('buffer_file_test_output', self)
13
+ config_section :buffer do
14
+ config_set_default :@type, 'file'
15
+ end
16
+ def multi_workers_ready?
17
+ true
18
+ end
13
19
  def write(chunk)
14
20
  # drop
15
21
  end
@@ -39,13 +45,8 @@ class FileBufferTest < Test::Unit::TestCase
39
45
  Fluent::Test.setup
40
46
 
41
47
  @dir = File.expand_path('../../tmp/buffer_file_dir', __FILE__)
42
- unless File.exist?(@dir)
43
- FileUtils.mkdir_p @dir
44
- end
45
- Dir.glob(File.join(@dir, '*')).each do |path|
46
- next if ['.', '..'].include?(File.basename(path))
47
- File.delete(path)
48
- end
48
+ FileUtils.rm_rf @dir
49
+ FileUtils.mkdir_p @dir
49
50
  end
50
51
 
51
52
  test 'path should include * normally' do
@@ -73,6 +74,72 @@ class FileBufferTest < Test::Unit::TestCase
73
74
  end
74
75
  end
75
76
 
77
+ sub_test_case 'buffer configurations and workers' do
78
+ setup do
79
+ @bufdir = File.expand_path('../../tmp/buffer_file', __FILE__)
80
+ FileUtils.rm_rf @bufdir
81
+ Fluent::Test.setup
82
+
83
+ @d = FluentPluginFileBufferTest::DummyOutputPlugin.new
84
+ @p = Fluent::Plugin::FileBuffer.new
85
+ @p.owner = @d
86
+ end
87
+
88
+ test 'raise error if configured path is of existing file' do
89
+ @bufpath = File.join(@bufdir, 'buf')
90
+ FileUtils.mkdir_p @bufdir
91
+ File.open(@bufpath, 'w'){|f| } # create and close the file
92
+ assert File.exist?(@bufpath)
93
+ assert File.file?(@bufpath)
94
+
95
+ buf_conf = config_element('buffer', '', {'path' => @bufpath})
96
+ assert_raise Fluent::ConfigError.new("Plugin 'file' does not support multi workers configuration (Fluent::Plugin::FileBuffer)") do
97
+ Fluent::SystemConfig.overwrite_system_config('workers' => 4) do
98
+ @d.configure(config_element('ROOT', '', {'@id' => 'dummy_output_with_buf'}, [buf_conf]))
99
+ end
100
+ end
101
+ end
102
+
103
+ test 'raise error if fluentd is configured to use file path pattern and multi workers' do
104
+ @bufpath = File.join(@bufdir, 'testbuf.*.log')
105
+ buf_conf = config_element('buffer', '', {'path' => @bufpath})
106
+ assert_raise Fluent::ConfigError.new("Plugin 'file' does not support multi workers configuration (Fluent::Plugin::FileBuffer)") do
107
+ Fluent::SystemConfig.overwrite_system_config('workers' => 4) do
108
+ @d.configure(config_element('ROOT', '', {'@id' => 'dummy_output_with_buf'}, [buf_conf]))
109
+ end
110
+ end
111
+ end
112
+
113
+ test 'enables multi worker configuration with unexisting directory path' do
114
+ assert_false File.exist?(@bufdir)
115
+ buf_conf = config_element('buffer', '', {'path' => @bufdir})
116
+ assert_nothing_raised do
117
+ Fluent::SystemConfig.overwrite_system_config('root_dir' => @bufdir, 'workers' => 4) do
118
+ @d.configure(config_element('ROOT', '', {}, [buf_conf]))
119
+ end
120
+ end
121
+ end
122
+
123
+ test 'enables multi worker configuration with existing directory path' do
124
+ FileUtils.mkdir_p @bufdir
125
+ buf_conf = config_element('buffer', '', {'path' => @bufdir})
126
+ assert_nothing_raised do
127
+ Fluent::SystemConfig.overwrite_system_config('root_dir' => @bufdir, 'workers' => 4) do
128
+ @d.configure(config_element('ROOT', '', {}, [buf_conf]))
129
+ end
130
+ end
131
+ end
132
+
133
+ test 'enables multi worker configuration with root dir' do
134
+ buf_conf = config_element('buffer', '')
135
+ assert_nothing_raised do
136
+ Fluent::SystemConfig.overwrite_system_config('root_dir' => @bufdir, 'workers' => 4) do
137
+ @d.configure(config_element('ROOT', '', {'@id' => 'dummy_output_with_buf'}, [buf_conf]))
138
+ end
139
+ end
140
+ end
141
+ end
142
+
76
143
  sub_test_case 'buffer plugin configured only with path' do
77
144
  setup do
78
145
  @bufdir = File.expand_path('../../tmp/buffer_file', __FILE__)
@@ -141,7 +208,7 @@ class FileBufferTest < Test::Unit::TestCase
141
208
  FileUtils.rm_r bufdir if File.exist?(bufdir)
142
209
  assert !File.exist?(bufdir)
143
210
 
144
- plugin.configure(config_element('buffer', '', {'path' => bufpath, 'dir_permission' => 0700}))
211
+ plugin.configure(config_element('buffer', '', {'path' => bufpath, 'dir_permission' => '0700'}))
145
212
  assert !File.exist?(bufdir)
146
213
 
147
214
  plugin.start
@@ -215,7 +282,7 @@ class FileBufferTest < Test::Unit::TestCase
215
282
  FileUtils.rm_r bufdir if File.exist?(bufdir)
216
283
  assert !File.exist?(bufdir)
217
284
 
218
- plugin.configure(config_element('buffer', '', {'path' => bufpath, 'file_permission' => 0600}))
285
+ plugin.configure(config_element('buffer', '', {'path' => bufpath, 'file_permission' => '0600'}))
219
286
  assert !File.exist?(bufdir)
220
287
  plugin.start
221
288
 
@@ -438,6 +505,168 @@ class FileBufferTest < Test::Unit::TestCase
438
505
  end
439
506
  end
440
507
 
508
+ sub_test_case 'there are some existing file chunks, both in specified path and per-worker directory under specified path, configured as multi workers' do
509
+ setup do
510
+ @bufdir = File.expand_path('../../tmp/buffer_file/path', __FILE__)
511
+ @worker0_dir = File.join(@bufdir, "worker0")
512
+ @worker1_dir = File.join(@bufdir, "worker1")
513
+ FileUtils.rm_rf @bufdir
514
+ FileUtils.mkdir_p @worker0_dir
515
+ FileUtils.mkdir_p @worker1_dir
516
+
517
+ @bufdir_chunk_1 = Fluent::UniqueId.generate
518
+ bc1 = File.join(@bufdir, "buffer.q#{Fluent::UniqueId.hex(@bufdir_chunk_1)}.log")
519
+ File.open(bc1, 'wb') do |f|
520
+ f.write ["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
521
+ f.write ["t2.test", event_time('2016-04-17 13:58:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
522
+ f.write ["t3.test", event_time('2016-04-17 13:58:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
523
+ f.write ["t4.test", event_time('2016-04-17 13:58:22 -0700').to_i, {"message" => "yay"}].to_json + "\n"
524
+ end
525
+ write_metadata(
526
+ bc1 + '.meta', @bufdir_chunk_1, metadata(timekey: event_time('2016-04-17 13:58:00 -0700').to_i),
527
+ 4, event_time('2016-04-17 13:58:00 -0700').to_i, event_time('2016-04-17 13:58:22 -0700').to_i
528
+ )
529
+
530
+ @bufdir_chunk_2 = Fluent::UniqueId.generate
531
+ bc2 = File.join(@bufdir, "buffer.q#{Fluent::UniqueId.hex(@bufdir_chunk_2)}.log")
532
+ File.open(bc2, 'wb') do |f|
533
+ f.write ["t1.test", event_time('2016-04-17 13:58:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
534
+ f.write ["t2.test", event_time('2016-04-17 13:58:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
535
+ f.write ["t3.test", event_time('2016-04-17 13:58:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
536
+ f.write ["t4.test", event_time('2016-04-17 13:58:22 -0700').to_i, {"message" => "yay"}].to_json + "\n"
537
+ end
538
+ write_metadata(
539
+ bc2 + '.meta', @bufdir_chunk_2, metadata(timekey: event_time('2016-04-17 13:58:00 -0700').to_i),
540
+ 4, event_time('2016-04-17 13:58:00 -0700').to_i, event_time('2016-04-17 13:58:22 -0700').to_i
541
+ )
542
+
543
+ @worker_dir_chunk_1 = Fluent::UniqueId.generate
544
+ wc0_1 = File.join(@worker0_dir, "buffer.q#{Fluent::UniqueId.hex(@worker_dir_chunk_1)}.log")
545
+ wc1_1 = File.join(@worker1_dir, "buffer.q#{Fluent::UniqueId.hex(@worker_dir_chunk_1)}.log")
546
+ [wc0_1, wc1_1].each do |chunk_path|
547
+ File.open(chunk_path, 'wb') do |f|
548
+ f.write ["t1.test", event_time('2016-04-17 13:59:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
549
+ f.write ["t2.test", event_time('2016-04-17 13:59:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
550
+ f.write ["t3.test", event_time('2016-04-17 13:59:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
551
+ end
552
+ write_metadata(
553
+ chunk_path + '.meta', @worker_dir_chunk_1, metadata(timekey: event_time('2016-04-17 13:59:00 -0700').to_i),
554
+ 3, event_time('2016-04-17 13:59:00 -0700').to_i, event_time('2016-04-17 13:59:23 -0700').to_i
555
+ )
556
+ end
557
+
558
+ @worker_dir_chunk_2 = Fluent::UniqueId.generate
559
+ wc0_2 = File.join(@worker0_dir, "buffer.b#{Fluent::UniqueId.hex(@worker_dir_chunk_2)}.log")
560
+ wc1_2 = File.join(@worker1_dir, "buffer.b#{Fluent::UniqueId.hex(@worker_dir_chunk_2)}.log")
561
+ [wc0_2, wc1_2].each do |chunk_path|
562
+ File.open(chunk_path, 'wb') do |f|
563
+ f.write ["t1.test", event_time('2016-04-17 14:00:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
564
+ f.write ["t2.test", event_time('2016-04-17 14:00:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
565
+ f.write ["t3.test", event_time('2016-04-17 14:00:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
566
+ f.write ["t4.test", event_time('2016-04-17 14:00:28 -0700').to_i, {"message" => "yay"}].to_json + "\n"
567
+ end
568
+ write_metadata(
569
+ chunk_path + '.meta', @worker_dir_chunk_2, metadata(timekey: event_time('2016-04-17 14:00:00 -0700').to_i),
570
+ 4, event_time('2016-04-17 14:00:00 -0700').to_i, event_time('2016-04-17 14:00:28 -0700').to_i
571
+ )
572
+ end
573
+
574
+ @worker_dir_chunk_3 = Fluent::UniqueId.generate
575
+ wc0_3 = File.join(@worker0_dir, "buffer.b#{Fluent::UniqueId.hex(@worker_dir_chunk_3)}.log")
576
+ wc1_3 = File.join(@worker1_dir, "buffer.b#{Fluent::UniqueId.hex(@worker_dir_chunk_3)}.log")
577
+ [wc0_3, wc1_3].each do |chunk_path|
578
+ File.open(chunk_path, 'wb') do |f|
579
+ f.write ["t1.test", event_time('2016-04-17 14:01:15 -0700').to_i, {"message" => "yay"}].to_json + "\n"
580
+ f.write ["t2.test", event_time('2016-04-17 14:01:17 -0700').to_i, {"message" => "yay"}].to_json + "\n"
581
+ f.write ["t3.test", event_time('2016-04-17 14:01:21 -0700').to_i, {"message" => "yay"}].to_json + "\n"
582
+ end
583
+ write_metadata(
584
+ chunk_path + '.meta', @worker_dir_chunk_3, metadata(timekey: event_time('2016-04-17 14:01:00 -0700').to_i),
585
+ 3, event_time('2016-04-17 14:01:00 -0700').to_i, event_time('2016-04-17 14:01:25 -0700').to_i
586
+ )
587
+ end
588
+
589
+ Fluent::Test.setup
590
+ end
591
+
592
+ teardown do
593
+ if @p
594
+ @p.stop unless @p.stopped?
595
+ @p.before_shutdown unless @p.before_shutdown?
596
+ @p.shutdown unless @p.shutdown?
597
+ @p.after_shutdown unless @p.after_shutdown?
598
+ @p.close unless @p.closed?
599
+ @p.terminate unless @p.terminated?
600
+ end
601
+ end
602
+
603
+ test 'worker(id=0) #resume returns staged/queued chunks with metadata, not only in worker dir, including the directory specified by path' do
604
+ ENV['SERVERENGINE_WORKER_ID'] = '0'
605
+
606
+ buf_conf = config_element('buffer', '', {'path' => @bufdir})
607
+ @d = FluentPluginFileBufferTest::DummyOutputPlugin.new
608
+ with_worker_config(workers: 2, worker_id: 0) do
609
+ @d.configure(config_element('output', '', {}, [buf_conf]))
610
+ end
611
+
612
+ @d.start
613
+ @p = @d.buffer
614
+
615
+ assert_equal 2, @p.stage.size
616
+ assert_equal 3, @p.queue.size
617
+
618
+ stage = @p.stage
619
+
620
+ m1 = metadata(timekey: event_time('2016-04-17 14:00:00 -0700').to_i)
621
+ assert_equal @worker_dir_chunk_2, stage[m1].unique_id
622
+ assert_equal 4, stage[m1].size
623
+ assert_equal :staged, stage[m1].state
624
+
625
+ m2 = metadata(timekey: event_time('2016-04-17 14:01:00 -0700').to_i)
626
+ assert_equal @worker_dir_chunk_3, stage[m2].unique_id
627
+ assert_equal 3, stage[m2].size
628
+ assert_equal :staged, stage[m2].state
629
+
630
+ queue = @p.queue
631
+
632
+ assert_equal [@bufdir_chunk_1, @bufdir_chunk_2, @worker_dir_chunk_1].sort, queue.map(&:unique_id).sort
633
+ assert_equal [3, 4, 4], queue.map(&:size).sort
634
+ assert_equal [:queued, :queued, :queued], queue.map(&:state)
635
+ end
636
+
637
+ test 'worker(id=1) #resume returns staged/queued chunks with metadata, only in worker dir' do
638
+ buf_conf = config_element('buffer', '', {'path' => @bufdir})
639
+ @d = FluentPluginFileBufferTest::DummyOutputPlugin.new
640
+ with_worker_config(workers: 2, worker_id: 1) do
641
+ @d.configure(config_element('output', '', {}, [buf_conf]))
642
+ end
643
+
644
+ @d.start
645
+ @p = @d.buffer
646
+
647
+ assert_equal 2, @p.stage.size
648
+ assert_equal 1, @p.queue.size
649
+
650
+ stage = @p.stage
651
+
652
+ m1 = metadata(timekey: event_time('2016-04-17 14:00:00 -0700').to_i)
653
+ assert_equal @worker_dir_chunk_2, stage[m1].unique_id
654
+ assert_equal 4, stage[m1].size
655
+ assert_equal :staged, stage[m1].state
656
+
657
+ m2 = metadata(timekey: event_time('2016-04-17 14:01:00 -0700').to_i)
658
+ assert_equal @worker_dir_chunk_3, stage[m2].unique_id
659
+ assert_equal 3, stage[m2].size
660
+ assert_equal :staged, stage[m2].state
661
+
662
+ queue = @p.queue
663
+
664
+ assert_equal @worker_dir_chunk_1, queue[0].unique_id
665
+ assert_equal 3, queue[0].size
666
+ assert_equal :queued, queue[0].state
667
+ end
668
+ end
669
+
441
670
  sub_test_case 'there are some existing file chunks without metadata file' do
442
671
  setup do
443
672
  @bufdir = File.expand_path('../../tmp/buffer_file', __FILE__)
@@ -545,6 +774,9 @@ class FileBufferTest < Test::Unit::TestCase
545
774
  setup do
546
775
  @bufdir = File.expand_path('../../tmp/buffer_file', __FILE__)
547
776
 
777
+ FileUtils.rm_rf @bufdir
778
+ FileUtils.mkdir_p @bufdir
779
+
548
780
  @c1id = Fluent::UniqueId.generate
549
781
  p1 = File.join(@bufdir, "etest.201604171358.q#{Fluent::UniqueId.hex(@c1id)}.log")
550
782
  File.open(p1, 'wb') do |f|
@@ -138,7 +138,7 @@ class BufferTest < Test::Unit::TestCase
138
138
  end
139
139
 
140
140
  test 'chunk records limit is ignored in default' do
141
- assert_nil @p.chunk_records_limit
141
+ assert_nil @p.chunk_limit_records
142
142
  end
143
143
 
144
144
  test '#storable? checks total size of staged and enqueued(includes dequeued chunks) against total_limit_size' do
@@ -631,7 +631,7 @@ class BufferTest < Test::Unit::TestCase
631
631
  define_method(:commit){ raise "yay" }
632
632
  end
633
633
 
634
- assert_raise "yay" do
634
+ assert_raise RuntimeError.new("yay") do
635
635
  @p.write({m => [row]})
636
636
  end
637
637
 
@@ -810,7 +810,7 @@ class BufferTest < Test::Unit::TestCase
810
810
  [event_time('2016-04-11 16:40:04 +0000'), {"message" => "z" * 1024 * 128}],
811
811
  ]
812
812
  )
813
- assert_raise "yay" do
813
+ assert_raise RuntimeError.new("yay") do
814
814
  @p.write({m => es2}, format: ->(e){e.to_msgpack_stream})
815
815
  end
816
816
 
@@ -1113,9 +1113,9 @@ class BufferTest < Test::Unit::TestCase
1113
1113
  end
1114
1114
  end
1115
1115
 
1116
- sub_test_case 'with configuration includes chunk_records_limit' do
1116
+ sub_test_case 'with configuration includes chunk_limit_records' do
1117
1117
  setup do
1118
- @p = create_buffer({"chunk_limit_size" => 1024, "total_limit_size" => 10240, "chunk_records_limit" => 6})
1118
+ @p = create_buffer({"chunk_limit_size" => 1024, "total_limit_size" => 10240, "chunk_limit_records" => 6})
1119
1119
  @dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
1120
1120
  @dm1 = dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
1121
1121
  @dm2 = dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
@@ -1139,7 +1139,7 @@ class BufferTest < Test::Unit::TestCase
1139
1139
  end
1140
1140
 
1141
1141
  test '#chunk_size_over? returns true if too many records exists in a chunk even if its bytes is less than limit' do
1142
- assert_equal 6, @p.chunk_records_limit
1142
+ assert_equal 6, @p.chunk_limit_records
1143
1143
 
1144
1144
  m = create_metadata(Time.parse('2016-04-11 16:40:00 +0000').to_i)
1145
1145
 
@@ -1155,7 +1155,7 @@ class BufferTest < Test::Unit::TestCase
1155
1155
  end
1156
1156
 
1157
1157
  test '#chunk_size_full? returns true if enough many records exists in a chunk even if its bytes is less than limit' do
1158
- assert_equal 6, @p.chunk_records_limit
1158
+ assert_equal 6, @p.chunk_limit_records
1159
1159
 
1160
1160
  m = create_metadata(Time.parse('2016-04-11 16:40:00 +0000').to_i)
1161
1161
 
@@ -1171,9 +1171,9 @@ class BufferTest < Test::Unit::TestCase
1171
1171
  end
1172
1172
  end
1173
1173
 
1174
- sub_test_case 'with configuration includes queue_length_limit' do
1174
+ sub_test_case 'with configuration includes queue_limit_length' do
1175
1175
  setup do
1176
- @p = create_buffer({"chunk_limit_size" => 1024, "total_limit_size" => 10240, "queue_length_limit" => 5})
1176
+ @p = create_buffer({"chunk_limit_size" => 1024, "total_limit_size" => 10240, "queue_limit_length" => 5})
1177
1177
  @dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
1178
1178
  @dm1 = dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
1179
1179
  @dm2 = dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
@@ -1196,9 +1196,9 @@ class BufferTest < Test::Unit::TestCase
1196
1196
  @p.start
1197
1197
  end
1198
1198
 
1199
- test '#configure will overwrite standard configuration if queue_length_limit' do
1199
+ test '#configure will overwrite standard configuration if queue_limit_length' do
1200
1200
  assert_equal 1024, @p.chunk_limit_size
1201
- assert_equal 5, @p.queue_length_limit
1201
+ assert_equal 5, @p.queue_limit_length
1202
1202
  assert_equal (1024*5), @p.total_limit_size
1203
1203
  end
1204
1204
  end
@@ -63,13 +63,13 @@ class BufferFileChunkTest < Test::Unit::TestCase
63
63
 
64
64
  test '.generate_stage_chunk_path generates path with staged mark & chunk unique_id' do
65
65
  assert_equal gen_path("mychunk.b52fde6425d7406bdb19b936e1a1ba98c.log"), @klass.generate_stage_chunk_path(gen_path("mychunk.*.log"), gen_test_chunk_id)
66
- assert_raise "BUG: buffer chunk path on stage MUST have '.*.'" do
66
+ assert_raise RuntimeError.new("BUG: buffer chunk path on stage MUST have '.*.'") do
67
67
  @klass.generate_stage_chunk_path(gen_path("mychunk.log"), gen_test_chunk_id)
68
68
  end
69
- assert_raise "BUG: buffer chunk path on stage MUST have '.*.'" do
69
+ assert_raise RuntimeError.new("BUG: buffer chunk path on stage MUST have '.*.'") do
70
70
  @klass.generate_stage_chunk_path(gen_path("mychunk.*"), gen_test_chunk_id)
71
71
  end
72
- assert_raise "BUG: buffer chunk path on stage MUST have '.*.'" do
72
+ assert_raise RuntimeError.new("BUG: buffer chunk path on stage MUST have '.*.'") do
73
73
  @klass.generate_stage_chunk_path(gen_path("*.log"), gen_test_chunk_id)
74
74
  end
75
75
  end
@@ -679,7 +679,7 @@ class BufferFileChunkTest < Test::Unit::TestCase
679
679
  assert_equal @d.bytesize, @c.bytesize
680
680
  assert_equal @d, @c.read
681
681
 
682
- assert_raise "BUG: appending to non-staged chunk, now 'queued'" do
682
+ assert_raise RuntimeError.new("BUG: concatenating to unwritable chunk, now 'queued'") do
683
683
  @c.append(["queued chunk is read only"])
684
684
  end
685
685
  assert_raise IOError do
@@ -721,7 +721,7 @@ class BufferFileChunkTest < Test::Unit::TestCase
721
721
  assert_equal 0, @c.size
722
722
  assert_equal @d, @c.read
723
723
 
724
- assert_raise "BUG: appending to non-staged chunk, now 'queued'" do
724
+ assert_raise RuntimeError.new("BUG: concatenating to unwritable chunk, now 'queued'") do
725
725
  @c.append(["queued chunk is read only"])
726
726
  end
727
727
  assert_raise IOError do
@@ -763,7 +763,7 @@ class BufferFileChunkTest < Test::Unit::TestCase
763
763
  assert_equal 0, @c.size
764
764
  assert_equal @d, @c.read
765
765
 
766
- assert_raise "BUG: appending to non-staged chunk, now 'queued'" do
766
+ assert_raise RuntimeError.new("BUG: concatenating to unwritable chunk, now 'queued'") do
767
767
  @c.append(["queued chunk is read only"])
768
768
  end
769
769
  assert_raise IOError do
@@ -18,6 +18,7 @@ class CompressableTest < Test::Unit::TestCase
18
18
  test 'write compressed data to IO with output_io option' do
19
19
  io = StringIO.new
20
20
  compress(@src, output_io: io)
21
+ waiting(10){ sleep 0.1 until @gzipped_src == io.string }
21
22
  assert_equal @gzipped_src, io.string
22
23
  end
23
24
  end
@@ -35,6 +36,7 @@ class CompressableTest < Test::Unit::TestCase
35
36
  test 'write decompressed data to IO with output_io option' do
36
37
  io = StringIO.new
37
38
  decompress(@gzipped_src, output_io: io)
39
+ waiting(10){ sleep 0.1 until @src == io.string }
38
40
  assert_equal @src, io.string
39
41
  end
40
42
 
@@ -56,6 +58,7 @@ class CompressableTest < Test::Unit::TestCase
56
58
  output_io = StringIO.new
57
59
 
58
60
  decompress(input_io: input_io, output_io: output_io)
61
+ waiting(10){ sleep 0.1 until @src == output_io.string }
59
62
  assert_equal @src, output_io.string
60
63
  end
61
64
 
@@ -165,6 +165,10 @@ class FilterPluginTest < Test::Unit::TestCase
165
165
  end
166
166
  end
167
167
 
168
+ test 'are available with multi worker configuration in default' do
169
+ assert @p.multi_workers_ready?
170
+ end
171
+
168
172
  test 'filters events correctly' do
169
173
  test_es = [
170
174
  [event_time('2016-04-19 13:01:00 -0700'), {"num" => "1", "message" => "Hello filters!"}],