fluentd 1.13.3 → 1.16.5

Sign up to get free protection for your applications and to get access to all the features.
Files changed (179) hide show
  1. checksums.yaml +4 -4
  2. data/.github/ISSUE_TEMPLATE/{bug_report.yaml → bug_report.yml} +2 -0
  3. data/.github/ISSUE_TEMPLATE/config.yml +2 -2
  4. data/.github/ISSUE_TEMPLATE/{feature_request.yaml → feature_request.yml} +1 -0
  5. data/.github/workflows/stale-actions.yml +11 -9
  6. data/.github/workflows/test.yml +32 -0
  7. data/CHANGELOG.md +490 -10
  8. data/CONTRIBUTING.md +2 -2
  9. data/MAINTAINERS.md +7 -5
  10. data/README.md +3 -23
  11. data/Rakefile +1 -1
  12. data/SECURITY.md +14 -0
  13. data/fluentd.gemspec +7 -8
  14. data/lib/fluent/command/cat.rb +13 -3
  15. data/lib/fluent/command/ctl.rb +6 -3
  16. data/lib/fluent/command/fluentd.rb +73 -65
  17. data/lib/fluent/command/plugin_config_formatter.rb +1 -1
  18. data/lib/fluent/compat/output.rb +9 -6
  19. data/lib/fluent/config/dsl.rb +1 -1
  20. data/lib/fluent/config/error.rb +12 -0
  21. data/lib/fluent/config/literal_parser.rb +2 -2
  22. data/lib/fluent/config/parser.rb +1 -1
  23. data/lib/fluent/config/v1_parser.rb +3 -3
  24. data/lib/fluent/config/yaml_parser/fluent_value.rb +47 -0
  25. data/lib/fluent/config/yaml_parser/loader.rb +108 -0
  26. data/lib/fluent/config/yaml_parser/parser.rb +166 -0
  27. data/lib/fluent/config/yaml_parser/section_builder.rb +107 -0
  28. data/lib/fluent/config/yaml_parser.rb +56 -0
  29. data/lib/fluent/config.rb +14 -1
  30. data/lib/fluent/counter/server.rb +1 -1
  31. data/lib/fluent/counter/validator.rb +3 -3
  32. data/lib/fluent/daemon.rb +2 -4
  33. data/lib/fluent/engine.rb +1 -1
  34. data/lib/fluent/env.rb +4 -0
  35. data/lib/fluent/error.rb +3 -0
  36. data/lib/fluent/event.rb +8 -4
  37. data/lib/fluent/event_router.rb +47 -2
  38. data/lib/fluent/file_wrapper.rb +137 -0
  39. data/lib/fluent/log/console_adapter.rb +66 -0
  40. data/lib/fluent/log.rb +44 -5
  41. data/lib/fluent/match.rb +1 -1
  42. data/lib/fluent/msgpack_factory.rb +6 -1
  43. data/lib/fluent/oj_options.rb +1 -2
  44. data/lib/fluent/plugin/bare_output.rb +49 -8
  45. data/lib/fluent/plugin/base.rb +26 -9
  46. data/lib/fluent/plugin/buf_file.rb +34 -5
  47. data/lib/fluent/plugin/buf_file_single.rb +32 -3
  48. data/lib/fluent/plugin/buffer/file_chunk.rb +1 -1
  49. data/lib/fluent/plugin/buffer.rb +216 -70
  50. data/lib/fluent/plugin/filter.rb +35 -1
  51. data/lib/fluent/plugin/filter_record_transformer.rb +1 -1
  52. data/lib/fluent/plugin/in_forward.rb +2 -2
  53. data/lib/fluent/plugin/in_http.rb +39 -10
  54. data/lib/fluent/plugin/in_monitor_agent.rb +4 -2
  55. data/lib/fluent/plugin/in_sample.rb +1 -1
  56. data/lib/fluent/plugin/in_syslog.rb +13 -1
  57. data/lib/fluent/plugin/in_tail/group_watch.rb +204 -0
  58. data/lib/fluent/plugin/in_tail/position_file.rb +33 -33
  59. data/lib/fluent/plugin/in_tail.rb +216 -84
  60. data/lib/fluent/plugin/in_tcp.rb +47 -2
  61. data/lib/fluent/plugin/input.rb +39 -1
  62. data/lib/fluent/plugin/metrics.rb +119 -0
  63. data/lib/fluent/plugin/metrics_local.rb +96 -0
  64. data/lib/fluent/plugin/multi_output.rb +43 -6
  65. data/lib/fluent/plugin/out_copy.rb +1 -1
  66. data/lib/fluent/plugin/out_exec_filter.rb +2 -2
  67. data/lib/fluent/plugin/out_file.rb +20 -2
  68. data/lib/fluent/plugin/out_forward/ack_handler.rb +19 -4
  69. data/lib/fluent/plugin/out_forward/socket_cache.rb +2 -0
  70. data/lib/fluent/plugin/out_forward.rb +17 -9
  71. data/lib/fluent/plugin/out_secondary_file.rb +39 -22
  72. data/lib/fluent/plugin/output.rb +167 -78
  73. data/lib/fluent/plugin/parser.rb +3 -4
  74. data/lib/fluent/plugin/parser_apache2.rb +1 -1
  75. data/lib/fluent/plugin/parser_json.rb +1 -1
  76. data/lib/fluent/plugin/parser_syslog.rb +1 -1
  77. data/lib/fluent/plugin/storage_local.rb +3 -5
  78. data/lib/fluent/plugin.rb +10 -1
  79. data/lib/fluent/plugin_helper/child_process.rb +3 -0
  80. data/lib/fluent/plugin_helper/event_emitter.rb +8 -1
  81. data/lib/fluent/plugin_helper/event_loop.rb +2 -2
  82. data/lib/fluent/plugin_helper/http_server/server.rb +2 -1
  83. data/lib/fluent/plugin_helper/metrics.rb +129 -0
  84. data/lib/fluent/plugin_helper/record_accessor.rb +1 -1
  85. data/lib/fluent/plugin_helper/retry_state.rb +14 -4
  86. data/lib/fluent/plugin_helper/server.rb +35 -6
  87. data/lib/fluent/plugin_helper/service_discovery.rb +2 -2
  88. data/lib/fluent/plugin_helper/socket.rb +13 -2
  89. data/lib/fluent/plugin_helper/thread.rb +3 -3
  90. data/lib/fluent/plugin_helper.rb +1 -0
  91. data/lib/fluent/plugin_id.rb +3 -2
  92. data/lib/fluent/registry.rb +2 -1
  93. data/lib/fluent/root_agent.rb +6 -0
  94. data/lib/fluent/rpc.rb +4 -3
  95. data/lib/fluent/supervisor.rb +283 -259
  96. data/lib/fluent/system_config.rb +13 -3
  97. data/lib/fluent/test/driver/base.rb +11 -5
  98. data/lib/fluent/test/driver/filter.rb +4 -0
  99. data/lib/fluent/test/startup_shutdown.rb +6 -8
  100. data/lib/fluent/time.rb +21 -20
  101. data/lib/fluent/version.rb +1 -1
  102. data/lib/fluent/win32api.rb +38 -0
  103. data/lib/fluent/winsvc.rb +5 -8
  104. data/templates/new_gem/test/helper.rb.erb +0 -1
  105. data/test/command/test_cat.rb +31 -2
  106. data/test/command/test_ctl.rb +1 -2
  107. data/test/command/test_fluentd.rb +209 -24
  108. data/test/command/test_plugin_config_formatter.rb +0 -1
  109. data/test/compat/test_parser.rb +6 -6
  110. data/test/config/test_system_config.rb +13 -11
  111. data/test/config/test_types.rb +1 -1
  112. data/test/log/test_console_adapter.rb +110 -0
  113. data/test/plugin/in_tail/test_io_handler.rb +26 -8
  114. data/test/plugin/in_tail/test_position_file.rb +48 -59
  115. data/test/plugin/out_forward/test_ack_handler.rb +39 -0
  116. data/test/plugin/out_forward/test_socket_cache.rb +26 -1
  117. data/test/plugin/test_bare_output.rb +14 -1
  118. data/test/plugin/test_base.rb +133 -1
  119. data/test/plugin/test_buf_file.rb +62 -23
  120. data/test/plugin/test_buf_file_single.rb +65 -0
  121. data/test/plugin/test_buffer.rb +267 -3
  122. data/test/plugin/test_buffer_chunk.rb +11 -0
  123. data/test/plugin/test_filter.rb +12 -1
  124. data/test/plugin/test_filter_parser.rb +1 -1
  125. data/test/plugin/test_filter_stdout.rb +2 -2
  126. data/test/plugin/test_in_forward.rb +9 -11
  127. data/test/plugin/test_in_http.rb +65 -3
  128. data/test/plugin/test_in_monitor_agent.rb +216 -11
  129. data/test/plugin/test_in_object_space.rb +9 -3
  130. data/test/plugin/test_in_syslog.rb +35 -0
  131. data/test/plugin/test_in_tail.rb +1393 -385
  132. data/test/plugin/test_in_tcp.rb +87 -2
  133. data/test/plugin/test_in_udp.rb +28 -0
  134. data/test/plugin/test_in_unix.rb +2 -2
  135. data/test/plugin/test_input.rb +12 -1
  136. data/test/plugin/test_metrics.rb +294 -0
  137. data/test/plugin/test_metrics_local.rb +96 -0
  138. data/test/plugin/test_multi_output.rb +25 -1
  139. data/test/plugin/test_out_exec.rb +6 -4
  140. data/test/plugin/test_out_exec_filter.rb +6 -2
  141. data/test/plugin/test_out_file.rb +34 -17
  142. data/test/plugin/test_out_forward.rb +78 -77
  143. data/test/plugin/test_out_http.rb +1 -0
  144. data/test/plugin/test_out_stdout.rb +2 -2
  145. data/test/plugin/test_output.rb +297 -12
  146. data/test/plugin/test_output_as_buffered.rb +44 -44
  147. data/test/plugin/test_output_as_buffered_compress.rb +32 -18
  148. data/test/plugin/test_output_as_buffered_retries.rb +54 -7
  149. data/test/plugin/test_output_as_buffered_secondary.rb +4 -4
  150. data/test/plugin/test_parser_regexp.rb +1 -6
  151. data/test/plugin/test_parser_syslog.rb +1 -1
  152. data/test/plugin_helper/test_cert_option.rb +1 -1
  153. data/test/plugin_helper/test_child_process.rb +38 -16
  154. data/test/plugin_helper/test_event_emitter.rb +29 -0
  155. data/test/plugin_helper/test_http_server_helper.rb +1 -1
  156. data/test/plugin_helper/test_metrics.rb +137 -0
  157. data/test/plugin_helper/test_retry_state.rb +602 -38
  158. data/test/plugin_helper/test_server.rb +78 -6
  159. data/test/plugin_helper/test_timer.rb +2 -2
  160. data/test/test_config.rb +191 -24
  161. data/test/test_event_router.rb +17 -0
  162. data/test/test_file_wrapper.rb +53 -0
  163. data/test/test_formatter.rb +24 -21
  164. data/test/test_log.rb +122 -40
  165. data/test/test_msgpack_factory.rb +32 -0
  166. data/test/test_plugin_classes.rb +102 -0
  167. data/test/test_root_agent.rb +30 -1
  168. data/test/test_supervisor.rb +477 -257
  169. data/test/test_time_parser.rb +22 -0
  170. metadata +55 -34
  171. data/.drone.yml +0 -35
  172. data/.github/workflows/issue-auto-closer.yml +0 -12
  173. data/.github/workflows/linux-test.yaml +0 -36
  174. data/.github/workflows/macos-test.yaml +0 -30
  175. data/.github/workflows/windows-test.yaml +0 -46
  176. data/.gitlab-ci.yml +0 -103
  177. data/lib/fluent/plugin/file_wrapper.rb +0 -187
  178. data/test/plugin/test_file_wrapper.rb +0 -126
  179. data/test/test_logger_initializer.rb +0 -46
@@ -830,4 +830,69 @@ class FileSingleBufferTest < Test::Unit::TestCase
830
830
  assert_equal :queued, queue[0].state
831
831
  end
832
832
  end
833
+
834
+ sub_test_case 'there are existing broken file chunks' do
835
+ setup do
836
+ FileUtils.rm_rf(@bufdir) rescue nil
837
+ FileUtils.mkdir_p(@bufdir)
838
+ end
839
+
840
+ teardown do
841
+ return unless @p
842
+
843
+ @p.stop unless @p.stopped?
844
+ @p.before_shutdown unless @p.before_shutdown?
845
+ @p.shutdown unless @p.shutdown?
846
+ @p.after_shutdown unless @p.after_shutdown?
847
+ @p.close unless @p.closed?
848
+ @p.terminate unless @p.terminated?
849
+ end
850
+
851
+ test '#resume backups empty chunk' do
852
+ id_output = 'backup_test'
853
+ @d = create_driver(%[
854
+ @id #{id_output}
855
+ <buffer tag>
856
+ @type file_single
857
+ path #{PATH}
858
+ </buffer>
859
+ ])
860
+ @p = @d.instance.buffer
861
+
862
+ c1id = Fluent::UniqueId.generate
863
+ p1 = File.join(@bufdir, "fsb.foo.b#{Fluent::UniqueId.hex(c1id)}.buf")
864
+ File.open(p1, 'wb') { |f| } # create empty chunk file
865
+
866
+ Fluent::SystemConfig.overwrite_system_config('root_dir' => @bufdir) do
867
+ @p.start
868
+ end
869
+
870
+ assert { not File.exist?(p1) }
871
+ assert { File.exist?("#{@bufdir}/backup/worker0/#{id_output}/#{@d.instance.dump_unique_id_hex(c1id)}.log") }
872
+ end
873
+
874
+ test '#resume throws away broken chunk with disable_chunk_backup' do
875
+ id_output = 'backup_test'
876
+ @d = create_driver(%[
877
+ @id #{id_output}
878
+ <buffer tag>
879
+ @type file_single
880
+ path #{PATH}
881
+ disable_chunk_backup true
882
+ </buffer>
883
+ ])
884
+ @p = @d.instance.buffer
885
+
886
+ c1id = Fluent::UniqueId.generate
887
+ p1 = File.join(@bufdir, "fsb.foo.b#{Fluent::UniqueId.hex(c1id)}.buf")
888
+ File.open(p1, 'wb') { |f| } # create empty chunk file
889
+
890
+ Fluent::SystemConfig.overwrite_system_config('root_dir' => @bufdir) do
891
+ @p.start
892
+ end
893
+
894
+ assert { not File.exist?(p1) }
895
+ assert { not File.exist?("#{@bufdir}/backup/worker0/#{id_output}/#{@d.instance.dump_unique_id_hex(c1id)}.log") }
896
+ end
897
+ end
833
898
  end
@@ -18,7 +18,7 @@ module FluentPluginBufferTest
18
18
  end
19
19
  class DummyMemoryChunkError < StandardError; end
20
20
  class DummyMemoryChunk < Fluent::Plugin::Buffer::MemoryChunk
21
- attr_reader :append_count, :rollbacked, :closed, :purged
21
+ attr_reader :append_count, :rollbacked, :closed, :purged, :chunk
22
22
  attr_accessor :failing
23
23
  def initialize(metadata, compress: :text)
24
24
  super
@@ -238,8 +238,14 @@ class BufferTest < Test::Unit::TestCase
238
238
  assert_nil @p.queue
239
239
  assert_nil @p.dequeued
240
240
  assert_nil @p.queued_num
241
- assert_equal 0, @p.stage_size
242
- assert_equal 0, @p.queue_size
241
+ assert_nil @p.stage_length_metrics
242
+ assert_nil @p.stage_size_metrics
243
+ assert_nil @p.queue_length_metrics
244
+ assert_nil @p.queue_size_metrics
245
+ assert_nil @p.available_buffer_space_ratios_metrics
246
+ assert_nil @p.total_queued_size_metrics
247
+ assert_nil @p.newest_timekey_metrics
248
+ assert_nil @p.oldest_timekey_metrics
243
249
  assert_equal [], @p.timekeys
244
250
  end
245
251
 
@@ -844,6 +850,116 @@ class BufferTest < Test::Unit::TestCase
844
850
  test '#compress returns :text' do
845
851
  assert_equal :text, @p.compress
846
852
  end
853
+
854
+ # https://github.com/fluent/fluentd/issues/3089
855
+ test "closed chunk should not be committed" do
856
+ assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
857
+ assert_equal 0.95, @p.chunk_full_threshold
858
+
859
+ purge_count = 0
860
+
861
+ stub.proxy(@p).generate_chunk(anything) do |chunk|
862
+ stub.proxy(chunk).purge do |result|
863
+ purge_count += 1
864
+ result
865
+ end
866
+ stub.proxy(chunk).commit do |result|
867
+ assert_false(chunk.closed?)
868
+ result
869
+ end
870
+ stub.proxy(chunk).rollback do |result|
871
+ assert_false(chunk.closed?)
872
+ result
873
+ end
874
+ chunk
875
+ end
876
+
877
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
878
+ small_row = "x" * 1024 * 400
879
+ big_row = "x" * 1024 * 1024 * 8 # just `chunk_size_limit`, it does't cause BufferOverFlowError.
880
+
881
+ # Write 42 events in 1 event stream, last one is for triggering `ShouldRetry`
882
+ @p.write({m => [small_row] * 40 + [big_row] + ["x"]})
883
+
884
+ # Above event strem will be splitted twice by `Buffer#write_step_by_step`
885
+ #
886
+ # 1. `write_once`: 42 [events] * 1 [stream]
887
+ # 2. `write_step_by_step`: 4 [events]* 10 [streams] + 2 [events] * 1 [stream]
888
+ # 3. `write_step_by_step` (by `ShouldRetry`): 1 [event] * 42 [streams]
889
+ #
890
+ # The problematic data is built in the 2nd stage.
891
+ # In the 2nd stage, 5 streams are packed in a chunk.
892
+ # ((1024 * 400) [bytes] * 4 [events] * 5 [streams] = 8192000 [bytes] < `chunk_limit_size` (8MB)).
893
+ # So 3 chunks are used to store all data.
894
+ # The 1st chunk is already staged by `write_once`.
895
+ # The 2nd & 3rd chunks are newly created as unstaged.
896
+ # The 3rd chunk is purged before `ShouldRetry`, it's no problem:
897
+ # https://github.com/fluent/fluentd/blob/7e9eba736ff40ad985341be800ddc46558be75f2/lib/fluent/plugin/buffer.rb#L850
898
+ # The 2nd chunk is purged in `rescue ShouldRetry`:
899
+ # https://github.com/fluent/fluentd/blob/7e9eba736ff40ad985341be800ddc46558be75f2/lib/fluent/plugin/buffer.rb#L862
900
+ # It causes the issue described in https://github.com/fluent/fluentd/issues/3089#issuecomment-1811839198
901
+
902
+ assert_equal 2, purge_count
903
+ end
904
+
905
+ # https://github.com/fluent/fluentd/issues/4446
906
+ test "#write_step_by_step keeps chunks kept in locked in entire #write process" do
907
+ assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
908
+ assert_equal 0.95, @p.chunk_full_threshold
909
+
910
+ mon_enter_counts_by_chunk = {}
911
+ mon_exit_counts_by_chunk = {}
912
+
913
+ stub.proxy(@p).generate_chunk(anything) do |chunk|
914
+ stub(chunk).mon_enter do
915
+ enter_count = 1 + mon_enter_counts_by_chunk.fetch(chunk, 0)
916
+ exit_count = mon_exit_counts_by_chunk.fetch(chunk, 0)
917
+ mon_enter_counts_by_chunk[chunk] = enter_count
918
+
919
+ # Assert that chunk is passed to &block of write_step_by_step before exiting the lock.
920
+ # (i.e. The lock count must be 2 greater than the exit count).
921
+ # Since ShouldRetry occurs once, the staged chunk takes the lock 3 times when calling the block.
922
+ if chunk.staged?
923
+ lock_in_block = enter_count == 3
924
+ assert_equal(enter_count - 2, exit_count) if lock_in_block
925
+ else
926
+ lock_in_block = enter_count == 2
927
+ assert_equal(enter_count - 2, exit_count) if lock_in_block
928
+ end
929
+ end
930
+ stub(chunk).mon_exit do
931
+ exit_count = 1 + mon_exit_counts_by_chunk.fetch(chunk, 0)
932
+ mon_exit_counts_by_chunk[chunk] = exit_count
933
+ end
934
+ chunk
935
+ end
936
+
937
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
938
+ small_row = "x" * 1024 * 400
939
+ big_row = "x" * 1024 * 1024 * 8 # just `chunk_size_limit`, it does't cause BufferOverFlowError.
940
+
941
+ # Write 42 events in 1 event stream, last one is for triggering `ShouldRetry`
942
+ @p.write({m => [small_row] * 40 + [big_row] + ["x"]})
943
+
944
+ # Above event strem will be splitted twice by `Buffer#write_step_by_step`
945
+ #
946
+ # 1. `write_once`: 42 [events] * 1 [stream]
947
+ # 2. `write_step_by_step`: 4 [events]* 10 [streams] + 2 [events] * 1 [stream]
948
+ # 3. `write_step_by_step` (by `ShouldRetry`): 1 [event] * 42 [streams]
949
+ #
950
+ # Example of staged chunk lock behavior:
951
+ #
952
+ # 1. mon_enter in write_step_by_step
953
+ # 2. ShouldRetry occurs
954
+ # 3. mon_exit in write_step_by_step
955
+ # 4. mon_enter again in write_step_by_step (retry)
956
+ # 5. passed to &block of write_step_by_step
957
+ # 6. mon_enter in the block (write)
958
+ # 7. mon_exit in write_step_by_step
959
+ # 8. mon_exit in write
960
+
961
+ assert_equal(mon_enter_counts_by_chunk.values, mon_exit_counts_by_chunk.values)
962
+ end
847
963
  end
848
964
 
849
965
  sub_test_case 'standard format with configuration for test with lower chunk limit size' do
@@ -938,6 +1054,97 @@ class BufferTest < Test::Unit::TestCase
938
1054
  @p.write({@dm0 => es}, format: @format)
939
1055
  end
940
1056
  end
1057
+
1058
+ data(
1059
+ first_chunk: Fluent::ArrayEventStream.new([[event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * 1_280_000}],
1060
+ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "a"}],
1061
+ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "b"}]]),
1062
+ intermediate_chunk: Fluent::ArrayEventStream.new([[event_time('2016-04-11 16:00:02 +0000'), {"message" => "a"}],
1063
+ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * 1_280_000}],
1064
+ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "b"}]]),
1065
+ last_chunk: Fluent::ArrayEventStream.new([[event_time('2016-04-11 16:00:02 +0000'), {"message" => "a"}],
1066
+ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "b"}],
1067
+ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * 1_280_000}]]),
1068
+ multiple_chunks: Fluent::ArrayEventStream.new([[event_time('2016-04-11 16:00:02 +0000'), {"message" => "a"}],
1069
+ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * 1_280_000}],
1070
+ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "b"}],
1071
+ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * 1_280_000}]])
1072
+ )
1073
+ test '#write exceeds chunk_limit_size, raise BufferChunkOverflowError, but not lost whole messages' do |(es)|
1074
+ assert_equal [@dm0], @p.stage.keys
1075
+ assert_equal [], @p.queue.map(&:metadata)
1076
+
1077
+ assert_equal 1_280_000, @p.chunk_limit_size
1078
+
1079
+ nth = []
1080
+ es.entries.each_with_index do |entry, index|
1081
+ if entry.last["message"].size == @p.chunk_limit_size
1082
+ nth << index
1083
+ end
1084
+ end
1085
+ messages = []
1086
+ nth.each do |n|
1087
+ messages << "a 1280025 bytes record (nth: #{n}) is larger than buffer chunk limit size (1280000)"
1088
+ end
1089
+
1090
+ assert_raise Fluent::Plugin::Buffer::BufferChunkOverflowError.new(messages.join(", ")) do
1091
+ @p.write({@dm0 => es}, format: @format)
1092
+ end
1093
+ # message a and b are concatenated and staged
1094
+ staged_messages = Fluent::MessagePackFactory.msgpack_unpacker.feed_each(@p.stage[@dm0].chunk).collect do |record|
1095
+ record.last
1096
+ end
1097
+ assert_equal([2, [{"message" => "a"}, {"message" => "b"}]],
1098
+ [@p.stage[@dm0].size, staged_messages])
1099
+ # only es0 message is queued
1100
+ assert_equal [@dm0], @p.queue.map(&:metadata)
1101
+ assert_equal [5000], @p.queue.map(&:size)
1102
+ end
1103
+
1104
+ test "confirm that every message which is smaller than chunk threshold does not raise BufferChunkOverflowError" do
1105
+ assert_equal [@dm0], @p.stage.keys
1106
+ assert_equal [], @p.queue.map(&:metadata)
1107
+ timestamp = event_time('2016-04-11 16:00:02 +0000')
1108
+ es = Fluent::ArrayEventStream.new([[timestamp, {"message" => "a" * 1_000_000}],
1109
+ [timestamp, {"message" => "b" * 1_000_000}],
1110
+ [timestamp, {"message" => "c" * 1_000_000}]])
1111
+
1112
+ # https://github.com/fluent/fluentd/issues/1849
1113
+ # Even though 1_000_000 < 1_280_000 (chunk_limit_size), it raised BufferChunkOverflowError before.
1114
+ # It should not be raised and message a,b,c should be stored into 3 chunks.
1115
+ assert_nothing_raised do
1116
+ @p.write({@dm0 => es}, format: @format)
1117
+ end
1118
+ messages = []
1119
+ # pick up first letter to check whether chunk is queued in expected order
1120
+ 3.times do |index|
1121
+ chunk = @p.queue[index]
1122
+ es = Fluent::MessagePackEventStream.new(chunk.chunk)
1123
+ es.ensure_unpacked!
1124
+ records = es.instance_eval{ @unpacked_records }
1125
+ records.each do |record|
1126
+ messages << record["message"][0]
1127
+ end
1128
+ end
1129
+ es = Fluent::MessagePackEventStream.new(@p.stage[@dm0].chunk)
1130
+ es.ensure_unpacked!
1131
+ staged_message = es.instance_eval{ @unpacked_records }.first["message"]
1132
+ # message a and b are queued, message c is staged
1133
+ assert_equal([
1134
+ [@dm0],
1135
+ "c" * 1_000_000,
1136
+ [@dm0, @dm0, @dm0],
1137
+ [5000, 1, 1],
1138
+ [["x"] * 5000, "a", "b"].flatten
1139
+ ],
1140
+ [
1141
+ @p.stage.keys,
1142
+ staged_message,
1143
+ @p.queue.map(&:metadata),
1144
+ @p.queue.map(&:size),
1145
+ messages
1146
+ ])
1147
+ end
941
1148
  end
942
1149
 
943
1150
  sub_test_case 'custom format with configuration for test with lower chunk limit size' do
@@ -1026,6 +1233,38 @@ class BufferTest < Test::Unit::TestCase
1026
1233
  @p.write({@dm0 => es})
1027
1234
  end
1028
1235
  end
1236
+
1237
+ test 'confirm that every array message which is smaller than chunk threshold does not raise BufferChunkOverflowError' do
1238
+ assert_equal [@dm0], @p.stage.keys
1239
+ assert_equal [], @p.queue.map(&:metadata)
1240
+
1241
+ assert_equal 1_280_000, @p.chunk_limit_size
1242
+
1243
+ es = ["a" * 1_000_000, "b" * 1_000_000, "c" * 1_000_000]
1244
+ assert_nothing_raised do
1245
+ @p.write({@dm0 => es})
1246
+ end
1247
+ queue_messages = @p.queue.collect do |chunk|
1248
+ # collect first character of each message
1249
+ chunk.chunk[0]
1250
+ end
1251
+ assert_equal([
1252
+ [@dm0],
1253
+ 1,
1254
+ "c",
1255
+ [@dm0, @dm0, @dm0],
1256
+ [5000, 1, 1],
1257
+ ["x", "a", "b"]
1258
+ ],
1259
+ [
1260
+ @p.stage.keys,
1261
+ @p.stage[@dm0].size,
1262
+ @p.stage[@dm0].chunk[0],
1263
+ @p.queue.map(&:metadata),
1264
+ @p.queue.map(&:size),
1265
+ queue_messages
1266
+ ])
1267
+ end
1029
1268
  end
1030
1269
 
1031
1270
  sub_test_case 'with configuration for test with lower limits' do
@@ -1195,6 +1434,7 @@ class BufferTest < Test::Unit::TestCase
1195
1434
  sub_test_case 'when compress is gzip' do
1196
1435
  setup do
1197
1436
  @p = create_buffer({'compress' => 'gzip'})
1437
+ @dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
1198
1438
  end
1199
1439
 
1200
1440
  test '#compress returns :gzip' do
@@ -1205,6 +1445,30 @@ class BufferTest < Test::Unit::TestCase
1205
1445
  chunk = @p.generate_chunk(create_metadata)
1206
1446
  assert chunk.singleton_class.ancestors.include?(Fluent::Plugin::Buffer::Chunk::Decompressable)
1207
1447
  end
1448
+
1449
+ test '#write compressed data which exceeds chunk_limit_size, it raises BufferChunkOverflowError' do
1450
+ @p = create_buffer({'compress' => 'gzip', 'chunk_limit_size' => 70})
1451
+ timestamp = event_time('2016-04-11 16:00:02 +0000')
1452
+ es = Fluent::ArrayEventStream.new([[timestamp, {"message" => "012345"}], # overflow
1453
+ [timestamp, {"message" => "aaa"}],
1454
+ [timestamp, {"message" => "bbb"}]])
1455
+ assert_equal [], @p.queue.map(&:metadata)
1456
+ assert_equal 70, @p.chunk_limit_size
1457
+
1458
+ # calculate the actual boundary value. it varies on machine
1459
+ c = @p.generate_chunk(create_metadata)
1460
+ c.append(Fluent::ArrayEventStream.new([[timestamp, {"message" => "012345"}]]), compress: :gzip)
1461
+ overflow_bytes = c.bytesize
1462
+
1463
+ messages = "concatenated/appended a #{overflow_bytes} bytes record (nth: 0) is larger than buffer chunk limit size (70)"
1464
+ assert_raise Fluent::Plugin::Buffer::BufferChunkOverflowError.new(messages) do
1465
+ # test format == nil && compress == :gzip
1466
+ @p.write({@dm0 => es})
1467
+ end
1468
+ # message a and b occupies each chunks in full, so both of messages are queued (no staged chunk)
1469
+ assert_equal([2, [@dm0, @dm0], [1, 1], nil],
1470
+ [@p.queue.size, @p.queue.map(&:metadata), @p.queue.map(&:size), @p.stage[@dm0]])
1471
+ end
1208
1472
  end
1209
1473
 
1210
1474
  sub_test_case '#statistics' do
@@ -57,6 +57,17 @@ class BufferChunkTest < Test::Unit::TestCase
57
57
  assert chunk.respond_to?(:msgpack_each)
58
58
  end
59
59
 
60
+ test 'unpacker arg is not implemented for ChunkMessagePackEventStreamer' do
61
+ meta = Object.new
62
+ chunk = Fluent::Plugin::Buffer::Chunk.new(meta)
63
+ chunk.extend Fluent::ChunkMessagePackEventStreamer
64
+
65
+ unpacker = Fluent::MessagePackFactory.thread_local_msgpack_unpacker
66
+
67
+ assert_raise(NotImplementedError){ chunk.each(unpacker: unpacker) }
68
+ assert_raise(NotImplementedError){ chunk.msgpack_each(unpacker: unpacker) }
69
+ end
70
+
60
71
  test 'some methods raise ArgumentError with an option of `compressed: :gzip` and without extending Compressble`' do
61
72
  meta = Object.new
62
73
  chunk = Fluent::Plugin::Buffer::Chunk.new(meta)
@@ -153,7 +153,7 @@ class FilterPluginTest < Test::Unit::TestCase
153
153
 
154
154
  @p.configure(config_element('ROOT', '', {'@log_level' => 'debug'}))
155
155
 
156
- assert{ @p.log.object_id != original_logger.object_id }
156
+ assert(@p.log.object_id != original_logger.object_id)
157
157
  assert_equal Fluent::Log::LEVEL_DEBUG, @p.log.level
158
158
  end
159
159
 
@@ -165,6 +165,17 @@ class FilterPluginTest < Test::Unit::TestCase
165
165
  end
166
166
  end
167
167
 
168
+ test 'can use metrics plugins and fallback methods' do
169
+ @p.configure(config_element('ROOT', '', {'@log_level' => 'debug'}))
170
+
171
+ %w[emit_size_metrics emit_records_metrics].each do |metric_name|
172
+ assert_true @p.instance_variable_get(:"@#{metric_name}").is_a?(Fluent::Plugin::Metrics)
173
+ end
174
+
175
+ assert_equal 0, @p.emit_size
176
+ assert_equal 0, @p.emit_records
177
+ end
178
+
168
179
  test 'are available with multi worker configuration in default' do
169
180
  assert @p.multi_workers_ready?
170
181
  end
@@ -46,7 +46,7 @@ class ParserFilterTest < Test::Unit::TestCase
46
46
  assert_raise(Fluent::ConfigError) {
47
47
  create_driver('')
48
48
  }
49
- assert_raise(Fluent::ConfigError) {
49
+ assert_raise(Fluent::NotFoundPluginError) {
50
50
  create_driver %[
51
51
  key_name foo
52
52
  <parse>
@@ -63,7 +63,7 @@ class StdoutFilterTest < Test::Unit::TestCase
63
63
  end
64
64
 
65
65
  def test_invalid_output_type
66
- assert_raise(Fluent::ConfigError) do
66
+ assert_raise(Fluent::NotFoundPluginError) do
67
67
  d = create_driver(CONFIG + config_element("", "", { "output_type" => "foo" }))
68
68
  d.run {}
69
69
  end
@@ -139,7 +139,7 @@ class StdoutFilterTest < Test::Unit::TestCase
139
139
  def test_invalid_output_type
140
140
  conf = config_element
141
141
  conf.elements << config_element("format", "", { "@type" => "stdout", "output_type" => "foo" })
142
- assert_raise(Fluent::ConfigError) do
142
+ assert_raise(Fluent::NotFoundPluginError) do
143
143
  d = create_driver(conf)
144
144
  d.run {}
145
145
  end
@@ -68,7 +68,6 @@ class ForwardInputTest < Test::Unit::TestCase
68
68
  @d = d = create_driver
69
69
  assert_equal @port, d.instance.port
70
70
  assert_equal '127.0.0.1', d.instance.bind
71
- assert_equal 0, d.instance.linger_timeout
72
71
  assert_equal 0.5, d.instance.blocking_timeout
73
72
  assert !d.instance.backlog
74
73
  end
@@ -77,7 +76,6 @@ class ForwardInputTest < Test::Unit::TestCase
77
76
  @d = d = create_driver(config_auth)
78
77
  assert_equal @port, d.instance.port
79
78
  assert_equal '127.0.0.1', d.instance.bind
80
- assert_equal 0, d.instance.linger_timeout
81
79
  assert !d.instance.backlog
82
80
 
83
81
  assert d.instance.security
@@ -369,7 +367,7 @@ class ForwardInputTest < Test::Unit::TestCase
369
367
  end
370
368
 
371
369
  logs = d.instance.log.out.logs
372
- assert{ logs.select{|line| line =~ /skip invalid event/ }.size == 2 }
370
+ assert{ logs.count{|line| line =~ /skip invalid event/ } == 2 }
373
371
 
374
372
  d.instance_shutdown
375
373
  end
@@ -595,10 +593,10 @@ class ForwardInputTest < Test::Unit::TestCase
595
593
 
596
594
  # check log
597
595
  logs = d.instance.log.logs
598
- assert_equal 1, logs.select{|line|
596
+ assert_equal 1, logs.count{|line|
599
597
  line =~ / \[warn\]: Input chunk size is larger than 'chunk_size_warn_limit':/ &&
600
598
  line =~ / tag="test.tag" host="#{LOCALHOST_HOSTNAME}" limit=16777216 size=16777501/
601
- }.size, "large chunk warning is not logged"
599
+ }, "large chunk warning is not logged"
602
600
 
603
601
  d.instance_shutdown
604
602
  end
@@ -621,10 +619,10 @@ class ForwardInputTest < Test::Unit::TestCase
621
619
 
622
620
  # check log
623
621
  logs = d.instance.log.logs
624
- assert_equal 1, logs.select{ |line|
622
+ assert_equal 1, logs.count{ |line|
625
623
  line =~ / \[warn\]: Input chunk size is larger than 'chunk_size_warn_limit':/ &&
626
624
  line =~ / tag="test.tag" host="#{LOCALHOST_HOSTNAME}" limit=16777216 size=16777501/
627
- }.size, "large chunk warning is not logged"
625
+ }, "large chunk warning is not logged"
628
626
 
629
627
  d.instance_shutdown
630
628
  end
@@ -655,10 +653,10 @@ class ForwardInputTest < Test::Unit::TestCase
655
653
 
656
654
  # check log
657
655
  logs = d.instance.log.logs
658
- assert_equal 1, logs.select{|line|
656
+ assert_equal 1, logs.count{|line|
659
657
  line =~ / \[warn\]: Input chunk size is larger than 'chunk_size_limit', dropped:/ &&
660
658
  line =~ / tag="test.tag" host="#{LOCALHOST_HOSTNAME}" limit=33554432 size=33554989/
661
- }.size, "large chunk warning is not logged"
659
+ }, "large chunk warning is not logged"
662
660
 
663
661
  d.instance_shutdown
664
662
  end
@@ -678,9 +676,9 @@ class ForwardInputTest < Test::Unit::TestCase
678
676
 
679
677
  # check log
680
678
  logs = d.instance.log.logs
681
- assert_equal 1, logs.select{|line|
679
+ assert_equal 1, logs.count{|line|
682
680
  line =~ / \[warn\]: incoming chunk is broken: host="#{LOCALHOST_HOSTNAME}" msg=#{data.inspect}/
683
- }.size, "should not accept broken chunk"
681
+ }, "should not accept broken chunk"
684
682
 
685
683
  d.instance_shutdown
686
684
  end
@@ -7,9 +7,8 @@ require 'timecop'
7
7
  class HttpInputTest < Test::Unit::TestCase
8
8
  class << self
9
9
  def startup
10
- socket_manager_path = ServerEngine::SocketManager::Server.generate_path
11
- @server = ServerEngine::SocketManager::Server.open(socket_manager_path)
12
- ENV['SERVERENGINE_SOCKETMANAGER_PATH'] = socket_manager_path.to_s
10
+ @server = ServerEngine::SocketManager::Server.open
11
+ ENV['SERVERENGINE_SOCKETMANAGER_PATH'] = @server.path.to_s
13
12
  end
14
13
 
15
14
  def shutdown
@@ -540,6 +539,29 @@ class HttpInputTest < Test::Unit::TestCase
540
539
  assert_equal_event_time time, d.events[1][1]
541
540
  end
542
541
 
542
+ def test_application_ndjson
543
+ d = create_driver
544
+ events = [
545
+ ["tag1", 1643935663, "{\"a\":1}\n{\"b\":2}"],
546
+ ["tag2", 1643935664, "{\"a\":3}\r\n{\"b\":4}"]
547
+ ]
548
+
549
+ expected = [
550
+ ["tag1", 1643935663, {"a"=>1}],
551
+ ["tag1", 1643935663, {"b"=>2}],
552
+ ["tag2", 1643935664, {"a"=>3}],
553
+ ["tag2", 1643935664, {"b"=>4}]
554
+ ]
555
+
556
+ d.run(expect_records: 1) do
557
+ events.each do |tag, time, record|
558
+ res = post("/#{tag}?time=#{time}", record, {"Content-Type"=>"application/x-ndjson"})
559
+ assert_equal("200", res.code)
560
+ end
561
+ end
562
+ assert_equal(expected, d.events)
563
+ end
564
+
543
565
  def test_msgpack
544
566
  d = create_driver
545
567
  time = event_time("2011-01-02 13:14:15 UTC")
@@ -856,6 +878,46 @@ class HttpInputTest < Test::Unit::TestCase
856
878
  end
857
879
  end
858
880
 
881
+ def test_cors_allow_credentials
882
+ d = create_driver(config + %[
883
+ cors_allow_origins ["http://foo.com"]
884
+ cors_allow_credentials
885
+ ])
886
+ assert_equal true, d.instance.cors_allow_credentials
887
+
888
+ time = event_time("2011-01-02 13:14:15 UTC")
889
+ event = ["tag1", time, {"a"=>1}]
890
+ res_code = nil
891
+ res_header = nil
892
+
893
+ d.run do
894
+ res = post("/#{event[0]}", {"json"=>event[2].to_json, "time"=>time.to_i.to_s}, {"Origin"=>"http://foo.com"})
895
+ res_code = res.code
896
+ res_header = res["Access-Control-Allow-Credentials"]
897
+ end
898
+ assert_equal(
899
+ {
900
+ response_code: "200",
901
+ allow_credentials_header: "true",
902
+ events: [event]
903
+ },
904
+ {
905
+ response_code: res_code,
906
+ allow_credentials_header: res_header,
907
+ events: d.events
908
+ }
909
+ )
910
+ end
911
+
912
+ def test_cors_allow_credentials_for_wildcard_origins
913
+ assert_raise(Fluent::ConfigError) do
914
+ create_driver(config + %[
915
+ cors_allow_origins ["*"]
916
+ cors_allow_credentials
917
+ ])
918
+ end
919
+ end
920
+
859
921
  def test_content_encoding_gzip
860
922
  d = create_driver
861
923