fluentd 1.14.0-x64-mingw32 → 1.14.4-x64-mingw32

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of fluentd might be problematic. Click here for more details.

Files changed (39) hide show
  1. checksums.yaml +4 -4
  2. data/.drone.yml +6 -6
  3. data/.github/ISSUE_TEMPLATE/bug_report.yaml +1 -0
  4. data/CHANGELOG.md +101 -0
  5. data/README.md +2 -0
  6. data/SECURITY.md +18 -0
  7. data/fluentd.gemspec +3 -3
  8. data/lib/fluent/command/cat.rb +13 -3
  9. data/lib/fluent/config/parser.rb +1 -1
  10. data/lib/fluent/config/v1_parser.rb +1 -1
  11. data/lib/fluent/env.rb +4 -0
  12. data/lib/fluent/plugin/buf_file.rb +2 -2
  13. data/lib/fluent/plugin/buffer.rb +62 -8
  14. data/lib/fluent/plugin/in_tail.rb +53 -7
  15. data/lib/fluent/plugin/out_copy.rb +1 -1
  16. data/lib/fluent/plugin/out_file.rb +13 -1
  17. data/lib/fluent/plugin/out_forward.rb +15 -7
  18. data/lib/fluent/plugin/output.rb +6 -5
  19. data/lib/fluent/plugin/parser_apache2.rb +1 -1
  20. data/lib/fluent/plugin/storage_local.rb +3 -5
  21. data/lib/fluent/plugin_id.rb +2 -1
  22. data/lib/fluent/supervisor.rb +2 -2
  23. data/lib/fluent/time.rb +21 -20
  24. data/lib/fluent/version.rb +1 -1
  25. data/test/command/test_cat.rb +31 -2
  26. data/test/plugin/in_tail/test_io_handler.rb +12 -4
  27. data/test/plugin/test_bare_output.rb +1 -1
  28. data/test/plugin/test_buffer.rb +149 -1
  29. data/test/plugin/test_filter.rb +1 -1
  30. data/test/plugin/test_in_tail.rb +109 -9
  31. data/test/plugin/test_input.rb +1 -1
  32. data/test/plugin/test_out_exec.rb +6 -4
  33. data/test/plugin/test_out_exec_filter.rb +4 -0
  34. data/test/plugin/test_out_file.rb +29 -13
  35. data/test/plugin/test_output_as_buffered_secondary.rb +1 -1
  36. data/test/plugin_helper/test_child_process.rb +9 -9
  37. data/test/plugin_helper/test_timer.rb +2 -2
  38. data/test/test_time_parser.rb +22 -0
  39. metadata +10 -9
@@ -167,6 +167,8 @@ module Fluent::Plugin
167
167
  @usock = nil
168
168
  @keep_alive_watcher_interval = 5 # TODO
169
169
  @suspend_flush = false
170
+ @healthy_nodes_count_metrics = nil
171
+ @registered_nodes_count_metrics = nil
170
172
  end
171
173
 
172
174
  def configure(conf)
@@ -265,6 +267,9 @@ module Fluent::Plugin
265
267
  end
266
268
 
267
269
  raise Fluent::ConfigError, "ack_response_timeout must be a positive integer" if @ack_response_timeout < 1
270
+ @healthy_nodes_count_metrics = metrics_create(namespace: "fluentd", subsystem: "output", name: "healthy_nodes_count", help_text: "Number of count healthy nodes", prefer_gauge: true)
271
+ @registered_nodes_count_metrics = metrics_create(namespace: "fluentd", subsystem: "output", name: "registered_nodes_count", help_text: "Number of count registered nodes", prefer_gauge: true)
272
+
268
273
  end
269
274
 
270
275
  def multi_workers_ready?
@@ -418,18 +423,21 @@ module Fluent::Plugin
418
423
  def statistics
419
424
  stats = super
420
425
  services = service_discovery_services
421
- healthy_nodes_count = 0
422
- registed_nodes_count = services.size
426
+ @healthy_nodes_count_metrics.set(0)
427
+ @registered_nodes_count_metrics.set(services.size)
423
428
  services.each do |s|
424
429
  if s.available?
425
- healthy_nodes_count += 1
430
+ @healthy_nodes_count_metrics.inc
426
431
  end
427
432
  end
428
433
 
429
- stats.merge(
430
- 'healthy_nodes_count' => healthy_nodes_count,
431
- 'registered_nodes_count' => registed_nodes_count,
432
- )
434
+ stats = {
435
+ 'output' => stats["output"].merge({
436
+ 'healthy_nodes_count' => @healthy_nodes_count_metrics.get,
437
+ 'registered_nodes_count' => @registered_nodes_count_metrics.get,
438
+ })
439
+ }
440
+ stats
433
441
  end
434
442
 
435
443
  # MessagePack FixArray length is 3
@@ -14,6 +14,7 @@
14
14
  # limitations under the License.
15
15
  #
16
16
 
17
+ require 'fluent/env'
17
18
  require 'fluent/error'
18
19
  require 'fluent/plugin/base'
19
20
  require 'fluent/plugin/buffer'
@@ -1248,8 +1249,8 @@ module Fluent
1248
1249
  backup_dir = File.dirname(backup_file)
1249
1250
 
1250
1251
  log.warn "bad chunk is moved to #{backup_file}"
1251
- FileUtils.mkdir_p(backup_dir, mode: system_config.dir_permission || 0755) unless Dir.exist?(backup_dir)
1252
- File.open(backup_file, 'ab', system_config.file_permission || 0644) { |f|
1252
+ FileUtils.mkdir_p(backup_dir, mode: system_config.dir_permission || Fluent::DEFAULT_DIR_PERMISSION) unless Dir.exist?(backup_dir)
1253
+ File.open(backup_file, 'ab', system_config.file_permission || Fluent::DEFAULT_FILE_PERMISSION) { |f|
1253
1254
  chunk.write_to(f)
1254
1255
  }
1255
1256
  end
@@ -1275,7 +1276,7 @@ module Fluent
1275
1276
  unless @retry
1276
1277
  @retry = retry_state(@buffer_config.retry_randomize)
1277
1278
  if error
1278
- log.warn "failed to flush the buffer.", retry_time: @retry.steps, next_retry_seconds: @retry.next_time, chunk: chunk_id_hex, error: error
1279
+ log.warn "failed to flush the buffer.", retry_times: @retry.steps, next_retry_time: @retry.next_time.round, chunk: chunk_id_hex, error: error
1279
1280
  log.warn_backtrace error.backtrace
1280
1281
  end
1281
1282
  return
@@ -1304,11 +1305,11 @@ module Fluent
1304
1305
  if error
1305
1306
  if using_secondary
1306
1307
  msg = "failed to flush the buffer with secondary output."
1307
- log.warn msg, retry_time: @retry.steps, next_retry_seconds: @retry.next_time, chunk: chunk_id_hex, error: error
1308
+ log.warn msg, retry_times: @retry.steps, next_retry_time: @retry.next_time.round, chunk: chunk_id_hex, error: error
1308
1309
  log.warn_backtrace error.backtrace
1309
1310
  else
1310
1311
  msg = "failed to flush the buffer."
1311
- log.warn msg, retry_time: @retry.steps, next_retry_seconds: @retry.next_time, chunk: chunk_id_hex, error: error
1312
+ log.warn msg, retry_times: @retry.steps, next_retry_time: @retry.next_time.round, chunk: chunk_id_hex, error: error
1312
1313
  log.warn_backtrace error.backtrace
1313
1314
  end
1314
1315
  end
@@ -21,7 +21,7 @@ module Fluent
21
21
  class Apache2Parser < Parser
22
22
  Plugin.register_parser('apache2', self)
23
23
 
24
- REGEXP = /^(?<host>[^ ]*) [^ ]* (?<user>[^ ]*) \[(?<time>[^\]]*)\] "(?<method>\S+)(?: +(?<path>(?:[^\"]|\\.)*?)(?: +\S*)?)?" (?<code>[^ ]*) (?<size>[^ ]*)(?: "(?<referer>(?:[^\"]|\\.)*)" "(?<agent>(?:[^\"]|\\.)*)")?$/
24
+ REGEXP = /^(?<host>[^ ]*) [^ ]* (?<user>[^ ]*) \[(?<time>[^\]]*)\] "(?<method>\S+)(?: +(?<path>(?:[^\"]|\\")*?)(?: +\S*)?)?" (?<code>[^ ]*) (?<size>[^ ]*)(?: "(?<referer>(?:[^\"]|\\")*)" "(?<agent>(?:[^\"]|\\")*)")?$/
25
25
  TIME_FORMAT = "%d/%b/%Y:%H:%M:%S %z"
26
26
 
27
27
  def initialize
@@ -14,6 +14,7 @@
14
14
  # limitations under the License.
15
15
  #
16
16
 
17
+ require 'fluent/env'
17
18
  require 'fluent/plugin'
18
19
  require 'fluent/plugin/storage'
19
20
 
@@ -25,14 +26,11 @@ module Fluent
25
26
  class LocalStorage < Storage
26
27
  Fluent::Plugin.register_storage('local', self)
27
28
 
28
- DEFAULT_DIR_MODE = 0755
29
- DEFAULT_FILE_MODE = 0644
30
-
31
29
  config_param :path, :string, default: nil
32
- config_param :mode, default: DEFAULT_FILE_MODE do |v|
30
+ config_param :mode, default: Fluent::DEFAULT_FILE_PERMISSION do |v|
33
31
  v.to_i(8)
34
32
  end
35
- config_param :dir_mode, default: DEFAULT_DIR_MODE do |v|
33
+ config_param :dir_mode, default: Fluent::DEFAULT_DIR_PERMISSION do |v|
36
34
  v.to_i(8)
37
35
  end
38
36
  config_param :pretty_print, :bool, default: false
@@ -15,6 +15,7 @@
15
15
  #
16
16
 
17
17
  require 'set'
18
+ require 'fluent/env'
18
19
  require 'fluent/variable_store'
19
20
 
20
21
  module Fluent
@@ -76,7 +77,7 @@ module Fluent
76
77
 
77
78
  # Fluent::Plugin::Base#fluentd_worker_id
78
79
  dir = File.join(system_config.root_dir, "worker#{fluentd_worker_id}", plugin_id)
79
- FileUtils.mkdir_p(dir, mode: system_config.dir_permission || 0755) unless Dir.exist?(dir)
80
+ FileUtils.mkdir_p(dir, mode: system_config.dir_permission || Fluent::DEFAULT_DIR_PERMISSION) unless Dir.exist?(dir)
80
81
  @_plugin_root_dir = dir.freeze
81
82
  dir
82
83
  end
@@ -544,7 +544,7 @@ module Fluent
544
544
  $log.ignore_same_log_interval = ignore_same_log_interval if ignore_same_log_interval
545
545
 
546
546
  if @path && log_dir_perm
547
- File.chmod(log_dir_perm || 0755, File.dirname(@path))
547
+ File.chmod(log_dir_perm || Fluent::DEFAULT_DIR_PERMISSION, File.dirname(@path))
548
548
  end
549
549
  end
550
550
 
@@ -651,7 +651,7 @@ module Fluent
651
651
  end
652
652
  else
653
653
  begin
654
- FileUtils.mkdir_p(root_dir, mode: @system_config.dir_permission || 0755)
654
+ FileUtils.mkdir_p(root_dir, mode: @system_config.dir_permission || Fluent::DEFAULT_DIR_PERMISSION)
655
655
  rescue => e
656
656
  raise Fluent::InvalidRootDirectory, "failed to create root directory:#{root_dir}, #{e.inspect}"
657
657
  end
data/lib/fluent/time.rb CHANGED
@@ -226,19 +226,16 @@ module Fluent
226
226
 
227
227
  format_with_timezone = format && (format.include?("%z") || format.include?("%Z"))
228
228
 
229
- # unixtime_in_expected_tz = unixtime_in_localtime + offset_diff
230
- offset_diff = case
231
- when format_with_timezone then nil
232
- when timezone then
233
- offset = Fluent::Timezone.utc_offset(timezone)
234
- if offset.respond_to?(:call)
235
- ->(t) { Time.now.localtime.utc_offset - offset.call(t) }
236
- else
237
- Time.now.localtime.utc_offset - offset
238
- end
239
- when localtime then 0
240
- else Time.now.localtime.utc_offset # utc
241
- end
229
+ utc_offset = case
230
+ when format_with_timezone then
231
+ nil
232
+ when timezone then
233
+ Fluent::Timezone.utc_offset(timezone)
234
+ when localtime then
235
+ nil
236
+ else
237
+ 0 # utc
238
+ end
242
239
 
243
240
  strptime = format && (Strptime.new(format) rescue nil)
244
241
 
@@ -247,16 +244,20 @@ module Fluent
247
244
  when format_with_timezone then ->(v){ Fluent::EventTime.from_time(Time.strptime(v, format)) }
248
245
  when format == '%iso8601' then ->(v){ Fluent::EventTime.from_time(Time.iso8601(v)) }
249
246
  when strptime then
250
- if offset_diff.respond_to?(:call)
251
- ->(v) { t = strptime.exec(v); Fluent::EventTime.new(t.to_i + offset_diff.call(t), t.nsec) }
247
+ if utc_offset.nil?
248
+ ->(v){ t = strptime.exec(v); Fluent::EventTime.new(t.to_i, t.nsec) }
249
+ elsif utc_offset.respond_to?(:call)
250
+ ->(v) { t = strptime.exec(v); Fluent::EventTime.new(t.to_i + t.utc_offset - utc_offset.call(t), t.nsec) }
252
251
  else
253
- ->(v) { t = strptime.exec(v); Fluent::EventTime.new(t.to_i + offset_diff, t.nsec) }
252
+ ->(v) { t = strptime.exec(v); Fluent::EventTime.new(t.to_i + t.utc_offset - utc_offset, t.nsec) }
254
253
  end
255
- when format then
256
- if offset_diff.respond_to?(:call)
257
- ->(v){ t = Time.strptime(v, format); Fluent::EventTime.new(t.to_i + offset_diff.call(t), t.nsec) }
254
+ when format then
255
+ if utc_offset.nil?
256
+ ->(v){ t = Time.strptime(v, format); Fluent::EventTime.new(t.to_i, t.nsec) }
257
+ elsif utc_offset.respond_to?(:call)
258
+ ->(v){ t = Time.strptime(v, format); Fluent::EventTime.new(t.to_i + t.utc_offset - utc_offset.call(t), t.nsec) }
258
259
  else
259
- ->(v){ t = Time.strptime(v, format); Fluent::EventTime.new(t.to_i + offset_diff, t.nsec) }
260
+ ->(v){ t = Time.strptime(v, format); Fluent::EventTime.new(t.to_i + t.utc_offset - utc_offset, t.nsec) }
260
261
  end
261
262
  else ->(v){ Fluent::EventTime.parse(v) }
262
263
  end
@@ -16,6 +16,6 @@
16
16
 
17
17
  module Fluent
18
18
 
19
- VERSION = '1.14.0'
19
+ VERSION = '1.14.4'
20
20
 
21
21
  end
@@ -69,7 +69,7 @@ class TestFluentCat < ::Test::Unit::TestCase
69
69
  def test_cat_json
70
70
  d = create_driver
71
71
  d.run(expect_records: 1) do
72
- Open3.pipeline_w("ruby #{FLUENT_CAT_COMMAND} --port #{@port} json") do |stdin|
72
+ Open3.pipeline_w("#{ServerEngine.ruby_bin_path} #{FLUENT_CAT_COMMAND} --port #{@port} json") do |stdin|
73
73
  stdin.puts('{"key":"value"}')
74
74
  stdin.close
75
75
  end
@@ -86,7 +86,7 @@ class TestFluentCat < ::Test::Unit::TestCase
86
86
  path = d.instance.write(@chunk)
87
87
  d = create_driver
88
88
  d.run(expect_records: 1) do
89
- Open3.pipeline_w("ruby #{FLUENT_CAT_COMMAND} --port #{@port} --format msgpack secondary") do |stdin|
89
+ Open3.pipeline_w("#{ServerEngine.ruby_bin_path} #{FLUENT_CAT_COMMAND} --port #{@port} --format msgpack secondary") do |stdin|
90
90
  stdin.write(File.read(path))
91
91
  stdin.close
92
92
  end
@@ -96,4 +96,33 @@ class TestFluentCat < ::Test::Unit::TestCase
96
96
  [d.events.size, event.first, event.last])
97
97
  end
98
98
  end
99
+
100
+ sub_test_case "send specific event time" do
101
+ def test_without_event_time
102
+ event_time = Fluent::EventTime.now
103
+ d = create_driver
104
+ d.run(expect_records: 1) do
105
+ Open3.pipeline_w("#{ServerEngine.ruby_bin_path} #{FLUENT_CAT_COMMAND} --port #{@port} tag") do |stdin|
106
+ stdin.puts('{"key":"value"}')
107
+ stdin.close
108
+ end
109
+ end
110
+ event = d.events.first
111
+ assert_in_delta(event_time.to_f, event[1].to_f, 3.0) # expect command to be finished in 3 seconds
112
+ assert_equal([1, "tag", true, @record],
113
+ [d.events.size, event.first, event_time.to_f < event[1].to_f, event.last])
114
+ end
115
+
116
+ def test_with_event_time
117
+ event_time = "2021-01-02 13:14:15.0+00:00"
118
+ d = create_driver
119
+ d.run(expect_records: 1) do
120
+ Open3.pipeline_w("#{ServerEngine.ruby_bin_path} #{FLUENT_CAT_COMMAND} --port #{@port} --event-time '#{event_time}' tag") do |stdin|
121
+ stdin.puts('{"key":"value"}')
122
+ stdin.close
123
+ end
124
+ end
125
+ assert_equal([["tag", Fluent::EventTime.parse(event_time), @record]], d.events)
126
+ end
127
+ end
99
128
  end
@@ -1,11 +1,19 @@
1
1
  require_relative '../../helper'
2
2
 
3
3
  require 'fluent/plugin/in_tail'
4
+ require 'fluent/plugin/metrics_local'
4
5
  require 'tempfile'
5
6
 
6
7
  class IntailIOHandlerTest < Test::Unit::TestCase
7
8
  setup do
8
9
  @file = Tempfile.new('intail_io_handler').binmode
10
+ opened_file_metrics = Fluent::Plugin::LocalMetrics.new
11
+ opened_file_metrics.configure(config_element('metrics', '', {}))
12
+ closed_file_metrics = Fluent::Plugin::LocalMetrics.new
13
+ closed_file_metrics.configure(config_element('metrics', '', {}))
14
+ rotated_file_metrics = Fluent::Plugin::LocalMetrics.new
15
+ rotated_file_metrics.configure(config_element('metrics', '', {}))
16
+ @metrics = Fluent::Plugin::TailInput::MetricsInfo.new(opened_file_metrics, closed_file_metrics, rotated_file_metrics)
9
17
  end
10
18
 
11
19
  teardown do
@@ -30,7 +38,7 @@ class IntailIOHandlerTest < Test::Unit::TestCase
30
38
  end
31
39
 
32
40
  returned_lines = ''
33
- r = Fluent::Plugin::TailInput::TailWatcher::IOHandler.new(watcher, path: @file.path, read_lines_limit: 100, read_bytes_limit_per_second: -1, log: $log, open_on_every_update: false) do |lines, _watcher|
41
+ r = Fluent::Plugin::TailInput::TailWatcher::IOHandler.new(watcher, path: @file.path, read_lines_limit: 100, read_bytes_limit_per_second: -1, log: $log, open_on_every_update: false, metrics: @metrics) do |lines, _watcher|
34
42
  returned_lines << lines.join
35
43
  true
36
44
  end
@@ -62,7 +70,7 @@ class IntailIOHandlerTest < Test::Unit::TestCase
62
70
  end
63
71
 
64
72
  returned_lines = ''
65
- r = Fluent::Plugin::TailInput::TailWatcher::IOHandler.new(watcher, path: @file.path, read_lines_limit: 100, read_bytes_limit_per_second: -1, log: $log, open_on_every_update: true) do |lines, _watcher|
73
+ r = Fluent::Plugin::TailInput::TailWatcher::IOHandler.new(watcher, path: @file.path, read_lines_limit: 100, read_bytes_limit_per_second: -1, log: $log, open_on_every_update: true, metrics: @metrics) do |lines, _watcher|
66
74
  returned_lines << lines.join
67
75
  true
68
76
  end
@@ -93,7 +101,7 @@ class IntailIOHandlerTest < Test::Unit::TestCase
93
101
  end
94
102
 
95
103
  returned_lines = []
96
- r = Fluent::Plugin::TailInput::TailWatcher::IOHandler.new(watcher, path: @file.path, read_lines_limit: 5, read_bytes_limit_per_second: -1, log: $log, open_on_every_update: false) do |lines, _watcher|
104
+ r = Fluent::Plugin::TailInput::TailWatcher::IOHandler.new(watcher, path: @file.path, read_lines_limit: 5, read_bytes_limit_per_second: -1, log: $log, open_on_every_update: false, metrics: @metrics) do |lines, _watcher|
97
105
  returned_lines << lines.dup
98
106
  true
99
107
  end
@@ -119,7 +127,7 @@ class IntailIOHandlerTest < Test::Unit::TestCase
119
127
  end
120
128
 
121
129
  returned_lines = []
122
- r = Fluent::Plugin::TailInput::TailWatcher::IOHandler.new(watcher, path: @file.path, read_lines_limit: 5, read_bytes_limit_per_second: -1, log: $log, open_on_every_update: false) do |lines, _watcher|
130
+ r = Fluent::Plugin::TailInput::TailWatcher::IOHandler.new(watcher, path: @file.path, read_lines_limit: 5, read_bytes_limit_per_second: -1, log: $log, open_on_every_update: false, metrics: @metrics) do |lines, _watcher|
123
131
  returned_lines << lines.dup
124
132
  true
125
133
  end
@@ -83,7 +83,7 @@ class BareOutputTest < Test::Unit::TestCase
83
83
 
84
84
  @p.configure(config_element('ROOT', '', {'@log_level' => 'debug'}))
85
85
 
86
- assert{ @p.log.object_id != original_logger.object_id }
86
+ assert(@p.log.object_id != original_logger.object_id)
87
87
  assert_equal Fluent::Log::LEVEL_DEBUG, @p.log.level
88
88
  end
89
89
 
@@ -18,7 +18,7 @@ module FluentPluginBufferTest
18
18
  end
19
19
  class DummyMemoryChunkError < StandardError; end
20
20
  class DummyMemoryChunk < Fluent::Plugin::Buffer::MemoryChunk
21
- attr_reader :append_count, :rollbacked, :closed, :purged
21
+ attr_reader :append_count, :rollbacked, :closed, :purged, :chunk
22
22
  attr_accessor :failing
23
23
  def initialize(metadata, compress: :text)
24
24
  super
@@ -944,6 +944,97 @@ class BufferTest < Test::Unit::TestCase
944
944
  @p.write({@dm0 => es}, format: @format)
945
945
  end
946
946
  end
947
+
948
+ data(
949
+ first_chunk: Fluent::ArrayEventStream.new([[event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * 1_280_000}],
950
+ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "a"}],
951
+ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "b"}]]),
952
+ intermediate_chunk: Fluent::ArrayEventStream.new([[event_time('2016-04-11 16:00:02 +0000'), {"message" => "a"}],
953
+ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * 1_280_000}],
954
+ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "b"}]]),
955
+ last_chunk: Fluent::ArrayEventStream.new([[event_time('2016-04-11 16:00:02 +0000'), {"message" => "a"}],
956
+ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "b"}],
957
+ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * 1_280_000}]]),
958
+ multiple_chunks: Fluent::ArrayEventStream.new([[event_time('2016-04-11 16:00:02 +0000'), {"message" => "a"}],
959
+ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * 1_280_000}],
960
+ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "b"}],
961
+ [event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * 1_280_000}]])
962
+ )
963
+ test '#write exceeds chunk_limit_size, raise BufferChunkOverflowError, but not lost whole messages' do |(es)|
964
+ assert_equal [@dm0], @p.stage.keys
965
+ assert_equal [], @p.queue.map(&:metadata)
966
+
967
+ assert_equal 1_280_000, @p.chunk_limit_size
968
+
969
+ nth = []
970
+ es.entries.each_with_index do |entry, index|
971
+ if entry.last["message"].size == @p.chunk_limit_size
972
+ nth << index
973
+ end
974
+ end
975
+ messages = []
976
+ nth.each do |n|
977
+ messages << "a 1280025 bytes record (nth: #{n}) is larger than buffer chunk limit size (1280000)"
978
+ end
979
+
980
+ assert_raise Fluent::Plugin::Buffer::BufferChunkOverflowError.new(messages.join(", ")) do
981
+ @p.write({@dm0 => es}, format: @format)
982
+ end
983
+ # message a and b are concatenated and staged
984
+ staged_messages = Fluent::MessagePackFactory.msgpack_unpacker.feed_each(@p.stage[@dm0].chunk).collect do |record|
985
+ record.last
986
+ end
987
+ assert_equal([2, [{"message" => "a"}, {"message" => "b"}]],
988
+ [@p.stage[@dm0].size, staged_messages])
989
+ # only es0 message is queued
990
+ assert_equal [@dm0], @p.queue.map(&:metadata)
991
+ assert_equal [5000], @p.queue.map(&:size)
992
+ end
993
+
994
+ test "confirm that every message which is smaller than chunk threshold does not raise BufferChunkOverflowError" do
995
+ assert_equal [@dm0], @p.stage.keys
996
+ assert_equal [], @p.queue.map(&:metadata)
997
+ timestamp = event_time('2016-04-11 16:00:02 +0000')
998
+ es = Fluent::ArrayEventStream.new([[timestamp, {"message" => "a" * 1_000_000}],
999
+ [timestamp, {"message" => "b" * 1_000_000}],
1000
+ [timestamp, {"message" => "c" * 1_000_000}]])
1001
+
1002
+ # https://github.com/fluent/fluentd/issues/1849
1003
+ # Even though 1_000_000 < 1_280_000 (chunk_limit_size), it raised BufferChunkOverflowError before.
1004
+ # It should not be raised and message a,b,c should be stored into 3 chunks.
1005
+ assert_nothing_raised do
1006
+ @p.write({@dm0 => es}, format: @format)
1007
+ end
1008
+ messages = []
1009
+ # pick up first letter to check whether chunk is queued in expected order
1010
+ 3.times do |index|
1011
+ chunk = @p.queue[index]
1012
+ es = Fluent::MessagePackEventStream.new(chunk.chunk)
1013
+ es.ensure_unpacked!
1014
+ records = es.instance_eval{ @unpacked_records }
1015
+ records.each do |record|
1016
+ messages << record["message"][0]
1017
+ end
1018
+ end
1019
+ es = Fluent::MessagePackEventStream.new(@p.stage[@dm0].chunk)
1020
+ es.ensure_unpacked!
1021
+ staged_message = es.instance_eval{ @unpacked_records }.first["message"]
1022
+ # message a and b are queued, message c is staged
1023
+ assert_equal([
1024
+ [@dm0],
1025
+ "c" * 1_000_000,
1026
+ [@dm0, @dm0, @dm0],
1027
+ [5000, 1, 1],
1028
+ [["x"] * 5000, "a", "b"].flatten
1029
+ ],
1030
+ [
1031
+ @p.stage.keys,
1032
+ staged_message,
1033
+ @p.queue.map(&:metadata),
1034
+ @p.queue.map(&:size),
1035
+ messages
1036
+ ])
1037
+ end
947
1038
  end
948
1039
 
949
1040
  sub_test_case 'custom format with configuration for test with lower chunk limit size' do
@@ -1032,6 +1123,38 @@ class BufferTest < Test::Unit::TestCase
1032
1123
  @p.write({@dm0 => es})
1033
1124
  end
1034
1125
  end
1126
+
1127
+ test 'confirm that every array message which is smaller than chunk threshold does not raise BufferChunkOverflowError' do
1128
+ assert_equal [@dm0], @p.stage.keys
1129
+ assert_equal [], @p.queue.map(&:metadata)
1130
+
1131
+ assert_equal 1_280_000, @p.chunk_limit_size
1132
+
1133
+ es = ["a" * 1_000_000, "b" * 1_000_000, "c" * 1_000_000]
1134
+ assert_nothing_raised do
1135
+ @p.write({@dm0 => es})
1136
+ end
1137
+ queue_messages = @p.queue.collect do |chunk|
1138
+ # collect first character of each message
1139
+ chunk.chunk[0]
1140
+ end
1141
+ assert_equal([
1142
+ [@dm0],
1143
+ 1,
1144
+ "c",
1145
+ [@dm0, @dm0, @dm0],
1146
+ [5000, 1, 1],
1147
+ ["x", "a", "b"]
1148
+ ],
1149
+ [
1150
+ @p.stage.keys,
1151
+ @p.stage[@dm0].size,
1152
+ @p.stage[@dm0].chunk[0],
1153
+ @p.queue.map(&:metadata),
1154
+ @p.queue.map(&:size),
1155
+ queue_messages
1156
+ ])
1157
+ end
1035
1158
  end
1036
1159
 
1037
1160
  sub_test_case 'with configuration for test with lower limits' do
@@ -1201,6 +1324,7 @@ class BufferTest < Test::Unit::TestCase
1201
1324
  sub_test_case 'when compress is gzip' do
1202
1325
  setup do
1203
1326
  @p = create_buffer({'compress' => 'gzip'})
1327
+ @dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
1204
1328
  end
1205
1329
 
1206
1330
  test '#compress returns :gzip' do
@@ -1211,6 +1335,30 @@ class BufferTest < Test::Unit::TestCase
1211
1335
  chunk = @p.generate_chunk(create_metadata)
1212
1336
  assert chunk.singleton_class.ancestors.include?(Fluent::Plugin::Buffer::Chunk::Decompressable)
1213
1337
  end
1338
+
1339
+ test '#write compressed data which exceeds chunk_limit_size, it raises BufferChunkOverflowError' do
1340
+ @p = create_buffer({'compress' => 'gzip', 'chunk_limit_size' => 70})
1341
+ timestamp = event_time('2016-04-11 16:00:02 +0000')
1342
+ es = Fluent::ArrayEventStream.new([[timestamp, {"message" => "012345"}], # overflow
1343
+ [timestamp, {"message" => "aaa"}],
1344
+ [timestamp, {"message" => "bbb"}]])
1345
+ assert_equal [], @p.queue.map(&:metadata)
1346
+ assert_equal 70, @p.chunk_limit_size
1347
+
1348
+ # calculate the actual boundary value. it varies on machine
1349
+ c = @p.generate_chunk(create_metadata)
1350
+ c.append(Fluent::ArrayEventStream.new([[timestamp, {"message" => "012345"}]]), compress: :gzip)
1351
+ overflow_bytes = c.bytesize
1352
+
1353
+ messages = "concatenated/appended a #{overflow_bytes} bytes record (nth: 0) is larger than buffer chunk limit size (70)"
1354
+ assert_raise Fluent::Plugin::Buffer::BufferChunkOverflowError.new(messages) do
1355
+ # test format == nil && compress == :gzip
1356
+ @p.write({@dm0 => es})
1357
+ end
1358
+ # message a and b occupies each chunks in full, so both of messages are queued (no staged chunk)
1359
+ assert_equal([2, [@dm0, @dm0], [1, 1], nil],
1360
+ [@p.queue.size, @p.queue.map(&:metadata), @p.queue.map(&:size), @p.stage[@dm0]])
1361
+ end
1214
1362
  end
1215
1363
 
1216
1364
  sub_test_case '#statistics' do
@@ -153,7 +153,7 @@ class FilterPluginTest < Test::Unit::TestCase
153
153
 
154
154
  @p.configure(config_element('ROOT', '', {'@log_level' => 'debug'}))
155
155
 
156
- assert{ @p.log.object_id != original_logger.object_id }
156
+ assert(@p.log.object_id != original_logger.object_id)
157
157
  assert_equal Fluent::Log::LEVEL_DEBUG, @p.log.level
158
158
  end
159
159