fluentd 0.14.1 → 0.14.2

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of fluentd might be problematic. Click here for more details.

Files changed (129) hide show
  1. checksums.yaml +4 -4
  2. data/ChangeLog +110 -1
  3. data/Rakefile +5 -1
  4. data/appveyor.yml +7 -1
  5. data/example/in_forward.conf +4 -0
  6. data/lib/fluent/compat/exec_util.rb +129 -0
  7. data/lib/fluent/compat/file_util.rb +54 -0
  8. data/lib/fluent/compat/filter.rb +21 -3
  9. data/lib/fluent/compat/formatter.rb +4 -2
  10. data/lib/fluent/compat/formatter_utils.rb +85 -0
  11. data/lib/fluent/compat/handle_tag_and_time_mixin.rb +60 -0
  12. data/lib/fluent/compat/input.rb +1 -3
  13. data/lib/fluent/compat/output.rb +95 -39
  14. data/lib/fluent/compat/parser.rb +17 -0
  15. data/lib/fluent/compat/parser_utils.rb +40 -0
  16. data/lib/fluent/compat/socket_util.rb +165 -0
  17. data/lib/fluent/compat/string_util.rb +34 -0
  18. data/lib/fluent/{test/driver/owner.rb → compat/structured_format_mixin.rb} +5 -11
  19. data/lib/fluent/config/element.rb +2 -2
  20. data/lib/fluent/configurable.rb +2 -1
  21. data/lib/fluent/event.rb +61 -7
  22. data/lib/fluent/event_router.rb +1 -1
  23. data/lib/fluent/plugin.rb +7 -7
  24. data/lib/fluent/plugin/buf_file.rb +5 -2
  25. data/lib/fluent/plugin/buffer.rb +194 -64
  26. data/lib/fluent/plugin/buffer/chunk.rb +28 -3
  27. data/lib/fluent/plugin/buffer/file_chunk.rb +5 -21
  28. data/lib/fluent/plugin/buffer/memory_chunk.rb +1 -11
  29. data/lib/fluent/plugin/exec_util.rb +2 -112
  30. data/lib/fluent/plugin/file_util.rb +3 -38
  31. data/lib/fluent/plugin/file_wrapper.rb +1 -1
  32. data/lib/fluent/plugin/filter_grep.rb +3 -7
  33. data/lib/fluent/plugin/filter_record_transformer.rb +5 -5
  34. data/lib/fluent/plugin/filter_stdout.rb +18 -11
  35. data/lib/fluent/plugin/formatter.rb +0 -48
  36. data/lib/fluent/plugin/formatter_csv.rb +7 -8
  37. data/lib/fluent/plugin/formatter_hash.rb +1 -4
  38. data/lib/fluent/plugin/formatter_json.rb +1 -4
  39. data/lib/fluent/plugin/formatter_ltsv.rb +5 -6
  40. data/lib/fluent/plugin/formatter_msgpack.rb +1 -4
  41. data/lib/fluent/plugin/formatter_out_file.rb +36 -3
  42. data/lib/fluent/plugin/formatter_stdout.rb +36 -1
  43. data/lib/fluent/plugin/in_dummy.rb +9 -2
  44. data/lib/fluent/plugin/in_exec.rb +20 -57
  45. data/lib/fluent/plugin/in_forward.rb +4 -3
  46. data/lib/fluent/plugin/in_object_space.rb +8 -44
  47. data/lib/fluent/plugin/in_syslog.rb +13 -24
  48. data/lib/fluent/plugin/in_tail.rb +3 -0
  49. data/lib/fluent/plugin/out_buffered_stdout.rb +14 -4
  50. data/lib/fluent/plugin/out_exec.rb +7 -5
  51. data/lib/fluent/plugin/out_exec_filter.rb +10 -10
  52. data/lib/fluent/plugin/out_file.rb +1 -3
  53. data/lib/fluent/plugin/out_forward.rb +38 -57
  54. data/lib/fluent/plugin/out_stdout.rb +14 -5
  55. data/lib/fluent/plugin/out_stream.rb +3 -0
  56. data/lib/fluent/plugin/output.rb +31 -14
  57. data/lib/fluent/plugin/parser.rb +0 -69
  58. data/lib/fluent/plugin/parser_apache.rb +10 -6
  59. data/lib/fluent/plugin/parser_apache_error.rb +8 -3
  60. data/lib/fluent/plugin/parser_csv.rb +3 -1
  61. data/lib/fluent/plugin/parser_json.rb +1 -1
  62. data/lib/fluent/plugin/parser_multiline.rb +5 -3
  63. data/lib/fluent/plugin/parser_nginx.rb +10 -6
  64. data/lib/fluent/plugin/parser_regexp.rb +73 -0
  65. data/lib/fluent/plugin/socket_util.rb +2 -148
  66. data/lib/fluent/plugin/storage_local.rb +1 -1
  67. data/lib/fluent/plugin/string_util.rb +3 -18
  68. data/lib/fluent/plugin_helper.rb +1 -0
  69. data/lib/fluent/plugin_helper/compat_parameters.rb +166 -41
  70. data/lib/fluent/plugin_helper/formatter.rb +30 -19
  71. data/lib/fluent/plugin_helper/inject.rb +25 -12
  72. data/lib/fluent/plugin_helper/parser.rb +22 -13
  73. data/lib/fluent/plugin_helper/storage.rb +22 -13
  74. data/lib/fluent/registry.rb +19 -6
  75. data/lib/fluent/supervisor.rb +27 -1
  76. data/lib/fluent/test/driver/base.rb +16 -92
  77. data/lib/fluent/test/driver/base_owned.rb +17 -53
  78. data/lib/fluent/test/driver/base_owner.rb +125 -0
  79. data/lib/fluent/test/driver/filter.rb +24 -2
  80. data/lib/fluent/test/driver/input.rb +2 -2
  81. data/lib/fluent/test/driver/multi_output.rb +2 -2
  82. data/lib/fluent/test/driver/output.rb +3 -5
  83. data/lib/fluent/test/helpers.rb +25 -0
  84. data/lib/fluent/test/input_test.rb +4 -4
  85. data/lib/fluent/test/output_test.rb +3 -3
  86. data/lib/fluent/version.rb +1 -1
  87. data/test/config/test_element.rb +135 -6
  88. data/test/plugin/test_buf_file.rb +71 -3
  89. data/test/plugin/test_buffer.rb +305 -86
  90. data/test/plugin/test_buffer_chunk.rb +60 -2
  91. data/test/plugin/test_buffer_file_chunk.rb +4 -3
  92. data/test/plugin/test_filter_grep.rb +25 -21
  93. data/test/plugin/test_filter_record_transformer.rb +75 -67
  94. data/test/plugin/test_filter_stdout.rb +171 -74
  95. data/test/plugin/test_formatter_csv.rb +94 -0
  96. data/test/plugin/test_formatter_json.rb +30 -0
  97. data/test/plugin/test_formatter_ltsv.rb +52 -0
  98. data/test/plugin/test_formatter_msgpack.rb +28 -0
  99. data/test/plugin/test_formatter_out_file.rb +95 -0
  100. data/test/plugin/test_formatter_single_value.rb +38 -0
  101. data/test/plugin/test_in_dummy.rb +95 -0
  102. data/test/plugin/test_in_exec.rb +27 -31
  103. data/test/plugin/test_in_forward.rb +24 -0
  104. data/test/plugin/test_in_gc_stat.rb +5 -5
  105. data/test/plugin/test_in_object_space.rb +4 -4
  106. data/test/plugin/test_in_syslog.rb +60 -35
  107. data/test/plugin/test_out_buffered_stdout.rb +17 -3
  108. data/test/plugin/test_out_forward.rb +93 -5
  109. data/test/plugin/test_out_stdout.rb +14 -3
  110. data/test/plugin/test_output_as_buffered_retries.rb +20 -0
  111. data/test/plugin/test_output_as_buffered_secondary.rb +16 -0
  112. data/test/plugin/test_output_as_standard.rb +22 -22
  113. data/test/plugin/test_parser_apache.rb +13 -9
  114. data/test/plugin/test_parser_apache_error.rb +11 -6
  115. data/test/plugin/test_parser_csv.rb +35 -25
  116. data/test/plugin/test_parser_nginx.rb +11 -5
  117. data/test/plugin/test_parser_regexp.rb +235 -68
  118. data/test/plugin/test_parser_tsv.rb +54 -58
  119. data/test/plugin_helper/test_compat_parameters.rb +111 -46
  120. data/test/plugin_helper/test_formatter.rb +40 -0
  121. data/test/plugin_helper/test_inject.rb +101 -2
  122. data/test/plugin_helper/test_parser.rb +40 -0
  123. data/test/plugin_helper/test_storage.rb +43 -0
  124. data/test/test_event.rb +93 -0
  125. data/test/test_event_router.rb +13 -4
  126. data/test/test_event_time.rb +0 -3
  127. data/test/test_formatter.rb +7 -164
  128. data/test/test_plugin_classes.rb +28 -1
  129. metadata +24 -3
@@ -1,6 +1,7 @@
1
1
  require_relative '../helper'
2
2
  require 'fluent/plugin/buffer'
3
3
  require 'fluent/plugin/buffer/memory_chunk'
4
+ require 'fluent/event'
4
5
  require 'flexmock/test_unit'
5
6
 
6
7
  require 'fluent/log'
@@ -25,11 +26,6 @@ module FluentPluginBufferTest
25
26
  @purged = false
26
27
  @failing = false
27
28
  end
28
- def append(data)
29
- @append_count += 1
30
- raise DummyMemoryChunkError if @failing
31
- super
32
- end
33
29
  def concat(data, size)
34
30
  @append_count += 1
35
31
  raise DummyMemoryChunkError if @failing
@@ -58,19 +54,25 @@ module FluentPluginBufferTest
58
54
  c.commit
59
55
  c
60
56
  end
57
+ def create_chunk_es(metadata, es)
58
+ c = FluentPluginBufferTest::DummyMemoryChunk.new(metadata)
59
+ c.concat(es.to_msgpack_stream, es.size)
60
+ c.commit
61
+ c
62
+ end
61
63
  def resume
62
64
  dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
63
65
  dm1 = create_metadata(Time.parse('2016-04-11 16:10:00 +0000').to_i, nil, nil)
64
66
  dm2 = create_metadata(Time.parse('2016-04-11 16:20:00 +0000').to_i, nil, nil)
65
67
  dm3 = create_metadata(Time.parse('2016-04-11 16:30:00 +0000').to_i, nil, nil)
66
68
  staged = {
67
- dm2 => create_chunk(dm2, ["b" * 100]),
68
- dm3 => create_chunk(dm3, ["c" * 100]),
69
+ dm2 => create_chunk(dm2, ["b" * 100]).staged!,
70
+ dm3 => create_chunk(dm3, ["c" * 100]).staged!,
69
71
  }
70
72
  queued = [
71
- create_chunk(dm0, ["0" * 100]),
72
- create_chunk(dm1, ["a" * 100]),
73
- create_chunk(dm1, ["a" * 3]),
73
+ create_chunk(dm0, ["0" * 100]).enqueued!,
74
+ create_chunk(dm1, ["a" * 100]).enqueued!,
75
+ create_chunk(dm1, ["a" * 3]).enqueued!,
74
76
  ]
75
77
  return staged, queued
76
78
  end
@@ -102,6 +104,13 @@ class BufferTest < Test::Unit::TestCase
102
104
  c
103
105
  end
104
106
 
107
+ def create_chunk_es(metadata, es)
108
+ c = FluentPluginBufferTest::DummyMemoryChunk.new(metadata)
109
+ c.concat(es.to_msgpack_stream, es.size)
110
+ c.commit
111
+ c
112
+ end
113
+
105
114
  setup do
106
115
  Fluent::Test.setup
107
116
  end
@@ -512,6 +521,18 @@ class BufferTest < Test::Unit::TestCase
512
521
  assert_equal [@dm2,@dm3], @p.stage.keys
513
522
  end
514
523
 
524
+ test '#write returns immediately if argument data is empty event stream' do
525
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
526
+ assert_equal [@dm2,@dm3], @p.stage.keys
527
+
528
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
529
+
530
+ @p.write({m => Fluent::ArrayEventStream.new([])})
531
+
532
+ assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
533
+ assert_equal [@dm2,@dm3], @p.stage.keys
534
+ end
535
+
515
536
  test '#write raises BufferOverflowError if buffer is not storable' do
516
537
  @p.stage_size = 256 * 1024 * 1024
517
538
  @p.queue_size = 256 * 1024 * 1024
@@ -620,31 +641,20 @@ class BufferTest < Test::Unit::TestCase
620
641
  assert_equal row * 8, target_chunk.read
621
642
  end
622
643
 
623
- test '#write w/ bulk returns immediately if argument data is nil or empty string' do
624
- assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
625
- assert_equal [@dm2,@dm3], @p.stage.keys
626
-
627
- m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
628
-
629
- @p.write({}, bulk: true)
630
- @p.write({m => ['', 0]}, bulk: true)
631
-
632
- assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
633
- assert_equal [@dm2,@dm3], @p.stage.keys
634
- end
635
-
636
- test '#write w/ bulk raises BufferOverflowError if buffer is not storable' do
644
+ test '#write w/ format raises BufferOverflowError if buffer is not storable' do
637
645
  @p.stage_size = 256 * 1024 * 1024
638
646
  @p.queue_size = 256 * 1024 * 1024
639
647
 
640
648
  m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
641
649
 
650
+ es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "xxxxxxxxxxxxxx"} ] ])
651
+
642
652
  assert_raise Fluent::Plugin::Buffer::BufferOverflowError do
643
- @p.write({m => ["x" * 256, 1]}, bulk: true)
653
+ @p.write({m => es}, format: ->(e){e.to_msgpack_stream})
644
654
  end
645
655
  end
646
656
 
647
- test '#write w/ bulk stores data into an existing chunk with metadata specified' do
657
+ test '#write w/ format stores data into an existing chunk with metadata specified' do
648
658
  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
649
659
  assert_equal [@dm2,@dm3], @p.stage.keys
650
660
 
@@ -653,17 +663,25 @@ class BufferTest < Test::Unit::TestCase
653
663
 
654
664
  assert_equal 1, @p.stage[@dm3].append_count
655
665
 
656
- @p.write({@dm3 => [("x"*256 + "y"*256 + "z"*256), 3]}, bulk: true)
666
+ es = Fluent::ArrayEventStream.new(
667
+ [
668
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 128}],
669
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "y" * 128}],
670
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "z" * 128}],
671
+ ]
672
+ )
673
+
674
+ @p.write({@dm3 => es}, format: ->(e){e.to_msgpack_stream})
657
675
 
658
676
  assert_equal 2, @p.stage[@dm3].append_count
659
- assert_equal (dm3data + ("x" * 256) + ("y" * 256) + ("z" * 256)), @p.stage[@dm3].read
660
- assert_equal (prev_stage_size + 768), @p.stage_size
677
+ assert_equal (dm3data + es.to_msgpack_stream), @p.stage[@dm3].read
678
+ assert_equal (prev_stage_size + es.to_msgpack_stream.bytesize), @p.stage_size
661
679
 
662
680
  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
663
681
  assert_equal [@dm2,@dm3], @p.stage.keys
664
682
  end
665
683
 
666
- test '#write w/ bulk creates new chunk and store data into it if there are not chunks for specified metadata' do
684
+ test '#write w/ format creates new chunk and store data into it if there are not chunks for specified metadata' do
667
685
  assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
668
686
 
669
687
  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
@@ -671,16 +689,26 @@ class BufferTest < Test::Unit::TestCase
671
689
 
672
690
  m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
673
691
 
674
- row = "x" * 1024 * 1024
675
- row_half = "x" * 1024 * 512
676
- @p.write({m => [row*7 + row_half, 8]}, bulk: true)
692
+ es = Fluent::ArrayEventStream.new(
693
+ [
694
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
695
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
696
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
697
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
698
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
699
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
700
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
701
+ [event_time('2016-04-11 16:40:03 +0000'), {"message" => "z" * 1024 * 512}],
702
+ ]
703
+ )
704
+ @p.write({m => es}, format: ->(e){e.to_msgpack_stream})
677
705
 
678
706
  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
679
707
  assert_equal [@dm2,@dm3,m], @p.stage.keys
680
708
  assert_equal 1, @p.stage[m].append_count
681
709
  end
682
710
 
683
- test '#write w/ bulk tries to enqueue and store data into a new chunk if existing chunk does not have space for bulk' do
711
+ test '#write w/ format tries to enqueue and store data into a new chunk if existing chunk does not have enough space' do
684
712
  assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
685
713
 
686
714
  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
@@ -688,49 +716,80 @@ class BufferTest < Test::Unit::TestCase
688
716
 
689
717
  m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
690
718
 
691
- row = "x" * 1024 * 1024
692
- row_half = "x" * 1024 * 512
693
- @p.write({m => [row*7 + row_half, 8]}, bulk: true)
719
+ es = Fluent::ArrayEventStream.new(
720
+ [
721
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
722
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
723
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
724
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
725
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
726
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
727
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
728
+ [event_time('2016-04-11 16:40:03 +0000'), {"message" => "z" * 1024 * 512}],
729
+ ]
730
+ )
731
+ @p.write({m => es}, format: ->(e){e.to_msgpack_stream})
694
732
 
695
733
  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
696
734
  assert_equal [@dm2,@dm3,m], @p.stage.keys
697
735
  assert_equal 1, @p.stage[m].append_count
698
736
 
699
- @p.write({m => [row, 1]}, bulk: true)
737
+ es2 = Fluent::OneEventStream.new(event_time('2016-04-11 16:40:03 +0000'), {"message" => "z" * 1024 * 1024})
738
+ @p.write({m => es2}, format: ->(e){e.to_msgpack_stream})
700
739
 
701
740
  assert_equal [@dm0,@dm1,@dm1,m], @p.queue.map(&:metadata)
702
741
  assert_equal [@dm2,@dm3,m], @p.stage.keys
703
742
  assert_equal 1, @p.stage[m].append_count
704
- assert_equal 1024*1024, @p.stage[m].bytesize
743
+ assert_equal es2.to_msgpack_stream.bytesize, @p.stage[m].bytesize
705
744
  assert_equal 2, @p.queue.last.append_count # 1 -> write (2) -> rollback&enqueue
706
745
  assert @p.queue.last.rollbacked
707
746
  end
708
747
 
709
- test '#write w/ bulk enqueues chunk if it is already full after adding bulk data' do
748
+ test '#write w/ format enqueues chunk if it is already full after adding data' do
710
749
  assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
711
750
 
712
751
  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
713
752
  assert_equal [@dm2,@dm3], @p.stage.keys
714
753
 
715
754
  m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
716
-
717
- row = "x" * 1024 * 1024
718
- @p.write({m => [row * 8, 8]}, bulk: true)
755
+ es = Fluent::ArrayEventStream.new(
756
+ [
757
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}], # 1024 * 1024 bytes as msgpack stream
758
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
759
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
760
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
761
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
762
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
763
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
764
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * (1024 * 1024 - 25)}],
765
+ ]
766
+ )
767
+ @p.write({m => es}, format: ->(e){e.to_msgpack_stream})
719
768
 
720
769
  assert_equal [@dm0,@dm1,@dm1,m], @p.queue.map(&:metadata)
721
770
  assert_equal [@dm2,@dm3], @p.stage.keys
722
771
  assert_equal 1, @p.queue.last.append_count
723
772
  end
724
773
 
725
- test '#write w/ bulk rollbacks if commit raises errors' do
774
+ test '#write w/ format rollbacks if commit raises errors' do
726
775
  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
727
776
  assert_equal [@dm2,@dm3], @p.stage.keys
728
777
 
729
778
  m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
730
779
 
731
- row = "x" * 1024
732
- row_half = "x" * 512
733
- @p.write({m => [row * 7 + row_half, 8]}, bulk: true)
780
+ es = Fluent::ArrayEventStream.new(
781
+ [
782
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
783
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
784
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
785
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
786
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
787
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
788
+ [event_time('2016-04-11 16:40:01 +0000'), {"message" => "x" * 1024 * 1024}],
789
+ [event_time('2016-04-11 16:40:03 +0000'), {"message" => "z" * 1024 * 512}],
790
+ ]
791
+ )
792
+ @p.write({m => es}, format: ->(e){e.to_msgpack_stream})
734
793
 
735
794
  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
736
795
  assert_equal [@dm2,@dm3,m], @p.stage.keys
@@ -744,8 +803,13 @@ class BufferTest < Test::Unit::TestCase
744
803
  define_method(:commit){ raise "yay" }
745
804
  end
746
805
 
806
+ es2 = Fluent::ArrayEventStream.new(
807
+ [
808
+ [event_time('2016-04-11 16:40:04 +0000'), {"message" => "z" * 1024 * 128}],
809
+ ]
810
+ )
747
811
  assert_raise "yay" do
748
- @p.write({m => [row, 1]}, bulk: true)
812
+ @p.write({m => es2}, format: ->(e){e.to_msgpack_stream})
749
813
  end
750
814
 
751
815
  assert_equal [@dm0,@dm1,@dm1], @p.queue.map(&:metadata)
@@ -753,7 +817,7 @@ class BufferTest < Test::Unit::TestCase
753
817
 
754
818
  assert_equal 2, target_chunk.append_count
755
819
  assert target_chunk.rollbacked
756
- assert_equal row * 7 + row_half, target_chunk.read
820
+ assert_equal es.to_msgpack_stream, target_chunk.read
757
821
  end
758
822
 
759
823
  test '#write writes many metadata and data pairs at once' do
@@ -761,7 +825,7 @@ class BufferTest < Test::Unit::TestCase
761
825
  assert_equal [@dm2,@dm3], @p.stage.keys
762
826
 
763
827
  row = "x" * 1024
764
- @p.write({ @dm0 => [row, row, row], @dm1 => [row, row] }, bulk: false)
828
+ @p.write({ @dm0 => [row, row, row], @dm1 => [row, row] })
765
829
 
766
830
  assert_equal [@dm2,@dm3,@dm0,@dm1], @p.stage.keys
767
831
  end
@@ -771,7 +835,7 @@ class BufferTest < Test::Unit::TestCase
771
835
  assert_equal [@dm2,@dm3], @p.stage.keys
772
836
 
773
837
  row = "x" * 1024
774
- @p.write({ @dm0 => [row, row, row], @dm1 => [row, row] }, bulk: false)
838
+ @p.write({ @dm0 => [row, row, row], @dm1 => [row, row] })
775
839
 
776
840
  assert_equal [@dm2,@dm3,@dm0,@dm1], @p.stage.keys
777
841
 
@@ -788,7 +852,7 @@ class BufferTest < Test::Unit::TestCase
788
852
  @p.stage[@dm1].failing = true
789
853
 
790
854
  assert_raise(FluentPluginBufferTest::DummyMemoryChunkError) do
791
- @p.write({ @dm2 => [row], @dm3 => [row], @dm0 => [row, row, row], @dm1 => [row, row] }, bulk: false)
855
+ @p.write({ @dm2 => [row], @dm3 => [row], @dm0 => [row, row, row], @dm1 => [row, row] })
792
856
  end
793
857
 
794
858
  assert{ @p.stage[@dm2].size == dm2_size }
@@ -803,6 +867,172 @@ class BufferTest < Test::Unit::TestCase
803
867
  end
804
868
  end
805
869
 
870
+ sub_test_case 'standard format with configuration for test with lower chunk limit size' do
871
+ setup do
872
+ @p = create_buffer({"chunk_limit_size" => 1_280_000})
873
+ @format = ->(e){e.to_msgpack_stream}
874
+ @dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
875
+ # 1 record is 128bytes in msgpack stream
876
+ @es0 = es0 = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:01 +0000'), {"message" => "x" * (128 - 22)}] ] * 5000)
877
+ (class << @p; self; end).module_eval do
878
+ define_method(:resume) {
879
+ staged = {
880
+ dm0 => create_chunk_es(dm0, es0).staged!,
881
+ }
882
+ queued = []
883
+ return staged, queued
884
+ }
885
+ end
886
+ @p.start
887
+ end
888
+
889
+ test '#write appends event stream into staged chunk' do
890
+ assert_equal [@dm0], @p.stage.keys
891
+ assert_equal [], @p.queue.map(&:metadata)
892
+
893
+ assert_equal 1_280_000, @p.chunk_limit_size
894
+
895
+ es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * (128 - 22)}] ] * 1000)
896
+ @p.write({@dm0 => es}, format: @format)
897
+
898
+ assert_equal [@dm0], @p.stage.keys
899
+ assert_equal [], @p.queue.map(&:metadata)
900
+
901
+ assert_equal (@es0.to_msgpack_stream + es.to_msgpack_stream), @p.stage[@dm0].read
902
+ end
903
+
904
+ test '#write writes event stream into a new chunk with enqueueing existing chunk if event stream is larger than available space of existing chunk' do
905
+ assert_equal [@dm0], @p.stage.keys
906
+ assert_equal [], @p.queue.map(&:metadata)
907
+
908
+ assert_equal 1_280_000, @p.chunk_limit_size
909
+
910
+ es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * (128 - 22)}] ] * 8000)
911
+ @p.write({@dm0 => es}, format: @format)
912
+
913
+ assert_equal [@dm0], @p.stage.keys
914
+ assert_equal [@dm0], @p.queue.map(&:metadata)
915
+
916
+ assert_equal (es.to_msgpack_stream), @p.stage[@dm0].read
917
+ end
918
+
919
+ test '#write writes event stream into many chunks excluding staged chunk if event stream is larger than chunk limit size' do
920
+ assert_equal [@dm0], @p.stage.keys
921
+ assert_equal [], @p.queue.map(&:metadata)
922
+
923
+ assert_equal 1_280_000, @p.chunk_limit_size
924
+
925
+ es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * (128 - 22)}] ] * 45000)
926
+ @p.write({@dm0 => es}, format: @format)
927
+
928
+ assert_equal [@dm0], @p.stage.keys
929
+ assert_equal 5400, @p.stage[@dm0].size
930
+ assert_equal [@dm0,@dm0,@dm0,@dm0,@dm0], @p.queue.map(&:metadata)
931
+ assert_equal [5000, 9900, 9900, 9900, 9900], @p.queue.map(&:size) # splits: 45000 / 100 => 450 * ...
932
+ # 9900 * 4 + 5400 == 45000
933
+ end
934
+
935
+ test '#write raises BufferChunkOverflowError if a record is biggar than chunk limit size' do
936
+ assert_equal [@dm0], @p.stage.keys
937
+ assert_equal [], @p.queue.map(&:metadata)
938
+
939
+ assert_equal 1_280_000, @p.chunk_limit_size
940
+
941
+ es = Fluent::ArrayEventStream.new([ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * 1_280_000}] ])
942
+ assert_raise Fluent::Plugin::Buffer::BufferChunkOverflowError do
943
+ @p.write({@dm0 => es}, format: @format)
944
+ end
945
+ end
946
+ end
947
+
948
+ sub_test_case 'custom format with configuration for test with lower chunk limit size' do
949
+ setup do
950
+ @p = create_buffer({"chunk_limit_size" => 1_280_000})
951
+ @dm0 = dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
952
+ @row = "x" * 128
953
+ @data0 = data0 = [@row] * 5000
954
+ (class << @p; self; end).module_eval do
955
+ define_method(:resume) {
956
+ staged = {
957
+ dm0 => create_chunk(dm0, data0).staged!,
958
+ }
959
+ queued = []
960
+ return staged, queued
961
+ }
962
+ end
963
+ @p.start
964
+ end
965
+
966
+ test '#write appends event stream into staged chunk' do
967
+ assert_equal [@dm0], @p.stage.keys
968
+ assert_equal [], @p.queue.map(&:metadata)
969
+
970
+ assert_equal 1_280_000, @p.chunk_limit_size
971
+
972
+ data = [@row] * 1000
973
+ @p.write({@dm0 => data})
974
+
975
+ assert_equal [@dm0], @p.stage.keys
976
+ assert_equal [], @p.queue.map(&:metadata)
977
+
978
+ assert_equal (@row * 6000), @p.stage[@dm0].read
979
+ end
980
+
981
+ test '#write writes event stream into a new chunk with enqueueing existing chunk if event stream is larger than available space of existing chunk' do
982
+ assert_equal [@dm0], @p.stage.keys
983
+ assert_equal [], @p.queue.map(&:metadata)
984
+
985
+ staged_chunk_object_id = @p.stage[@dm0].object_id
986
+
987
+ assert_equal 1_280_000, @p.chunk_limit_size
988
+
989
+ data = [@row] * 8000
990
+ @p.write({@dm0 => data})
991
+
992
+ assert_equal [@dm0], @p.queue.map(&:metadata)
993
+ assert_equal [staged_chunk_object_id], @p.queue.map(&:object_id)
994
+ assert_equal [@dm0], @p.stage.keys
995
+
996
+ assert_equal [9800], @p.queue.map(&:size)
997
+ assert_equal 3200, @p.stage[@dm0].size
998
+ # 9800 + 3200 == 5000 + 8000
999
+ end
1000
+
1001
+ test '#write writes event stream into many chunks including staging chunk if event stream is larger than chunk limit size' do
1002
+ assert_equal [@dm0], @p.stage.keys
1003
+ assert_equal [], @p.queue.map(&:metadata)
1004
+
1005
+ staged_chunk_object_id = @p.stage[@dm0].object_id
1006
+
1007
+ assert_equal 1_280_000, @p.chunk_limit_size
1008
+
1009
+ assert_equal 5000, @p.stage[@dm0].size
1010
+
1011
+ data = [@row] * 45000
1012
+ @p.write({@dm0 => data})
1013
+
1014
+ assert_equal staged_chunk_object_id, @p.queue.first.object_id
1015
+
1016
+ assert_equal [@dm0], @p.stage.keys
1017
+ assert_equal 900, @p.stage[@dm0].size
1018
+ assert_equal [@dm0,@dm0,@dm0,@dm0,@dm0], @p.queue.map(&:metadata)
1019
+ assert_equal [9500, 9900, 9900, 9900, 9900], @p.queue.map(&:size) # splits: 45000 / 100 => 450 * ...
1020
+ ##### 900 + 9500 + 9900 * 4 == 5000 + 45000
1021
+ end
1022
+
1023
+ test '#write raises BufferChunkOverflowError if a record is biggar than chunk limit size' do
1024
+ assert_equal [@dm0], @p.stage.keys
1025
+ assert_equal [], @p.queue.map(&:metadata)
1026
+
1027
+ assert_equal 1_280_000, @p.chunk_limit_size
1028
+
1029
+ es = ["x" * 1_280_000 + "x" * 300]
1030
+ assert_raise Fluent::Plugin::Buffer::BufferChunkOverflowError do
1031
+ @p.write({@dm0 => es})
1032
+ end
1033
+ end
1034
+ end
1035
+
806
1036
  sub_test_case 'with configuration for test with lower limits' do
807
1037
  setup do
808
1038
  @p = create_buffer({"chunk_limit_size" => 1024, "total_limit_size" => 10240})
@@ -813,19 +1043,19 @@ class BufferTest < Test::Unit::TestCase
813
1043
  (class << @p; self; end).module_eval do
814
1044
  define_method(:resume) {
815
1045
  staged = {
816
- dm2 => create_chunk(dm2, ["b" * 128] * 7),
817
- dm3 => create_chunk(dm3, ["c" * 128] * 5),
1046
+ dm2 => create_chunk(dm2, ["b" * 128] * 7).staged!,
1047
+ dm3 => create_chunk(dm3, ["c" * 128] * 5).staged!,
818
1048
  }
819
1049
  queued = [
820
- create_chunk(dm0, ["0" * 128] * 8),
821
- create_chunk(dm0, ["0" * 128] * 8),
822
- create_chunk(dm0, ["0" * 128] * 8),
823
- create_chunk(dm0, ["0" * 128] * 8),
824
- create_chunk(dm0, ["0" * 128] * 8),
825
- create_chunk(dm1, ["a" * 128] * 8),
826
- create_chunk(dm1, ["a" * 128] * 8),
827
- create_chunk(dm1, ["a" * 128] * 8), # 8
828
- create_chunk(dm1, ["a" * 128] * 3),
1050
+ create_chunk(dm0, ["0" * 128] * 8).enqueued!,
1051
+ create_chunk(dm0, ["0" * 128] * 8).enqueued!,
1052
+ create_chunk(dm0, ["0" * 128] * 8).enqueued!,
1053
+ create_chunk(dm0, ["0" * 128] * 8).enqueued!,
1054
+ create_chunk(dm0, ["0" * 128] * 8).enqueued!,
1055
+ create_chunk(dm1, ["a" * 128] * 8).enqueued!,
1056
+ create_chunk(dm1, ["a" * 128] * 8).enqueued!,
1057
+ create_chunk(dm1, ["a" * 128] * 8).enqueued!, # 8th queued chunk
1058
+ create_chunk(dm1, ["a" * 128] * 3).enqueued!,
829
1059
  ]
830
1060
  return staged, queued
831
1061
  }
@@ -875,17 +1105,6 @@ class BufferTest < Test::Unit::TestCase
875
1105
  c3 = create_chunk(m, ["a" * 128] * 6 + ["a" * 64])
876
1106
  assert !@p.chunk_size_full?(c3)
877
1107
  end
878
-
879
- test '#write raises BufferChunkOverflowError if incoming data is bigger than chunk bytes limit' do
880
- assert_equal [@dm0,@dm0,@dm0,@dm0,@dm0,@dm1,@dm1,@dm1,@dm1], @p.queue.map(&:metadata)
881
- assert_equal [@dm2,@dm3], @p.stage.keys
882
-
883
- m = create_metadata(Time.parse('2016-04-11 16:40:00 +0000').to_i)
884
-
885
- assert_raise Fluent::Plugin::Buffer::BufferChunkOverflowError do
886
- @p.write({m => ["a" * 128] * 9})
887
- end
888
- end
889
1108
  end
890
1109
 
891
1110
  sub_test_case 'with configuration includes chunk_records_limit' do
@@ -898,14 +1117,14 @@ class BufferTest < Test::Unit::TestCase
898
1117
  (class << @p; self; end).module_eval do
899
1118
  define_method(:resume) {
900
1119
  staged = {
901
- dm2 => create_chunk(dm2, ["b" * 128] * 1),
902
- dm3 => create_chunk(dm3, ["c" * 128] * 2),
1120
+ dm2 => create_chunk(dm2, ["b" * 128] * 1).staged!,
1121
+ dm3 => create_chunk(dm3, ["c" * 128] * 2).staged!,
903
1122
  }
904
1123
  queued = [
905
- create_chunk(dm0, ["0" * 128] * 6),
906
- create_chunk(dm1, ["a" * 128] * 6),
907
- create_chunk(dm1, ["a" * 128] * 6),
908
- create_chunk(dm1, ["a" * 128] * 3),
1124
+ create_chunk(dm0, ["0" * 128] * 6).enqueued!,
1125
+ create_chunk(dm1, ["a" * 128] * 6).enqueued!,
1126
+ create_chunk(dm1, ["a" * 128] * 6).enqueued!,
1127
+ create_chunk(dm1, ["a" * 128] * 3).enqueued!,
909
1128
  ]
910
1129
  return staged, queued
911
1130
  }
@@ -956,14 +1175,14 @@ class BufferTest < Test::Unit::TestCase
956
1175
  (class << @p; self; end).module_eval do
957
1176
  define_method(:resume) {
958
1177
  staged = {
959
- dm2 => create_chunk(dm2, ["b" * 128] * 1),
960
- dm3 => create_chunk(dm3, ["c" * 128] * 2),
1178
+ dm2 => create_chunk(dm2, ["b" * 128] * 1).staged!,
1179
+ dm3 => create_chunk(dm3, ["c" * 128] * 2).staged!,
961
1180
  }
962
1181
  queued = [
963
- create_chunk(dm0, ["0" * 128] * 6),
964
- create_chunk(dm1, ["a" * 128] * 6),
965
- create_chunk(dm1, ["a" * 128] * 6),
966
- create_chunk(dm1, ["a" * 128] * 3),
1182
+ create_chunk(dm0, ["0" * 128] * 6).enqueued!,
1183
+ create_chunk(dm1, ["a" * 128] * 6).enqueued!,
1184
+ create_chunk(dm1, ["a" * 128] * 6).enqueued!,
1185
+ create_chunk(dm1, ["a" * 128] * 3).enqueued!,
967
1186
  ]
968
1187
  return staged, queued
969
1188
  }