fluentd 1.18.0 → 1.19.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (93) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop.yml +116 -0
  3. data/CHANGELOG.md +235 -12
  4. data/MAINTAINERS.md +8 -2
  5. data/README.md +3 -7
  6. data/Rakefile +2 -0
  7. data/SECURITY.md +5 -3
  8. data/lib/fluent/command/cap_ctl.rb +2 -2
  9. data/lib/fluent/command/fluentd.rb +6 -2
  10. data/lib/fluent/compat/formatter.rb +6 -0
  11. data/lib/fluent/compat/socket_util.rb +2 -2
  12. data/lib/fluent/config/configure_proxy.rb +1 -1
  13. data/lib/fluent/config/element.rb +2 -2
  14. data/lib/fluent/config/literal_parser.rb +3 -3
  15. data/lib/fluent/config/parser.rb +15 -3
  16. data/lib/fluent/config/section.rb +2 -2
  17. data/lib/fluent/config/types.rb +1 -1
  18. data/lib/fluent/config/v1_parser.rb +3 -3
  19. data/lib/fluent/counter/store.rb +1 -1
  20. data/lib/fluent/engine.rb +1 -1
  21. data/lib/fluent/env.rb +3 -2
  22. data/lib/fluent/event.rb +7 -6
  23. data/lib/fluent/log/console_adapter.rb +5 -7
  24. data/lib/fluent/log.rb +23 -0
  25. data/lib/fluent/plugin/bare_output.rb +0 -16
  26. data/lib/fluent/plugin/base.rb +2 -2
  27. data/lib/fluent/plugin/buf_file.rb +15 -1
  28. data/lib/fluent/plugin/buf_file_single.rb +15 -1
  29. data/lib/fluent/plugin/buffer/chunk.rb +74 -10
  30. data/lib/fluent/plugin/buffer/file_chunk.rb +9 -5
  31. data/lib/fluent/plugin/buffer/file_single_chunk.rb +3 -3
  32. data/lib/fluent/plugin/buffer/memory_chunk.rb +2 -2
  33. data/lib/fluent/plugin/buffer.rb +34 -6
  34. data/lib/fluent/plugin/compressable.rb +68 -22
  35. data/lib/fluent/plugin/filter.rb +0 -8
  36. data/lib/fluent/plugin/filter_record_transformer.rb +1 -1
  37. data/lib/fluent/plugin/formatter_csv.rb +18 -4
  38. data/lib/fluent/plugin/formatter_json.rb +7 -4
  39. data/lib/fluent/plugin/formatter_out_file.rb +5 -2
  40. data/lib/fluent/plugin/in_forward.rb +9 -5
  41. data/lib/fluent/plugin/in_http.rb +9 -4
  42. data/lib/fluent/plugin/in_monitor_agent.rb +4 -8
  43. data/lib/fluent/plugin/in_tail/position_file.rb +1 -1
  44. data/lib/fluent/plugin/in_tail.rb +80 -57
  45. data/lib/fluent/plugin/in_tcp.rb +2 -2
  46. data/lib/fluent/plugin/in_udp.rb +1 -1
  47. data/lib/fluent/plugin/input.rb +0 -8
  48. data/lib/fluent/plugin/multi_output.rb +1 -17
  49. data/lib/fluent/plugin/out_exec_filter.rb +2 -2
  50. data/lib/fluent/plugin/out_file.rb +37 -30
  51. data/lib/fluent/plugin/out_forward/connection_manager.rb +2 -2
  52. data/lib/fluent/plugin/out_forward.rb +23 -13
  53. data/lib/fluent/plugin/out_http.rb +1 -1
  54. data/lib/fluent/plugin/out_secondary_file.rb +2 -2
  55. data/lib/fluent/plugin/out_stdout.rb +10 -3
  56. data/lib/fluent/plugin/out_stream.rb +3 -3
  57. data/lib/fluent/plugin/output.rb +24 -35
  58. data/lib/fluent/plugin/owned_by_mixin.rb +2 -2
  59. data/lib/fluent/plugin/parser.rb +3 -3
  60. data/lib/fluent/plugin/parser_json.rb +3 -3
  61. data/lib/fluent/plugin/sd_file.rb +2 -2
  62. data/lib/fluent/plugin/storage_local.rb +8 -4
  63. data/lib/fluent/plugin.rb +1 -1
  64. data/lib/fluent/plugin_helper/child_process.rb +2 -2
  65. data/lib/fluent/plugin_helper/http_server/request.rb +13 -2
  66. data/lib/fluent/plugin_helper/http_server/server.rb +4 -14
  67. data/lib/fluent/plugin_helper/http_server.rb +1 -8
  68. data/lib/fluent/plugin_helper/metrics.rb +7 -0
  69. data/lib/fluent/plugin_helper/server.rb +4 -1
  70. data/lib/fluent/plugin_helper/service_discovery.rb +1 -1
  71. data/lib/fluent/plugin_helper/socket_option.rb +2 -2
  72. data/lib/fluent/plugin_helper/storage.rb +1 -1
  73. data/lib/fluent/plugin_id.rb +3 -3
  74. data/lib/fluent/root_agent.rb +4 -3
  75. data/lib/fluent/static_config_analysis.rb +3 -2
  76. data/lib/fluent/supervisor.rb +51 -5
  77. data/lib/fluent/system_config.rb +13 -4
  78. data/lib/fluent/test/base.rb +1 -1
  79. data/lib/fluent/test/driver/base.rb +2 -2
  80. data/lib/fluent/test/filter_test.rb +2 -2
  81. data/lib/fluent/test/formatter_test.rb +1 -1
  82. data/lib/fluent/test/helpers.rb +4 -0
  83. data/lib/fluent/test/input_test.rb +2 -2
  84. data/lib/fluent/test/output_test.rb +4 -4
  85. data/lib/fluent/test/parser_test.rb +1 -1
  86. data/lib/fluent/tls.rb +24 -0
  87. data/lib/fluent/variable_store.rb +1 -1
  88. data/lib/fluent/version.rb +1 -1
  89. data/lib/fluent/winsvc.rb +38 -8
  90. metadata +85 -16
  91. data/lib/fluent/plugin_helper/http_server/compat/server.rb +0 -92
  92. data/lib/fluent/plugin_helper/http_server/compat/ssl_context_extractor.rb +0 -52
  93. data/lib/fluent/plugin_helper/http_server/compat/webrick_handler.rb +0 -58
@@ -36,7 +36,7 @@ module Fluent::Plugin
36
36
  helpers :timer, :event_loop, :parser, :compat_parameters
37
37
 
38
38
  RESERVED_CHARS = ['/', '*', '%'].freeze
39
- MetricsInfo = Struct.new(:opened, :closed, :rotated, :throttled)
39
+ MetricsInfo = Struct.new(:opened, :closed, :rotated, :throttled, :tracked)
40
40
 
41
41
  class WatcherSetupError < StandardError
42
42
  def initialize(msg)
@@ -63,7 +63,7 @@ module Fluent::Plugin
63
63
 
64
64
  desc 'The paths to read. Multiple paths can be specified, separated by comma.'
65
65
  config_param :path, :string
66
- desc 'path delimiter used for spliting path config'
66
+ desc 'path delimiter used for splitting path config'
67
67
  config_param :path_delimiter, :string, default: ','
68
68
  desc 'Choose using glob patterns. Adding capabilities to handle [] and ?, and {}.'
69
69
  config_param :glob_policy, :enum, list: [:backward_compatible, :extended, :always], default: :backward_compatible
@@ -96,9 +96,9 @@ module Fluent::Plugin
96
96
  config_param :enable_watch_timer, :bool, default: true
97
97
  desc 'Enable the stat watcher based on inotify.'
98
98
  config_param :enable_stat_watcher, :bool, default: true
99
- desc 'The encoding after conversion of the input.'
100
- config_param :encoding, :string, default: nil
101
99
  desc 'The encoding of the input.'
100
+ config_param :encoding, :string, default: nil
101
+ desc "The original encoding of the input. If set, in_tail tries to encode string from this to 'encoding'. Must be set with 'encoding'. "
102
102
  config_param :from_encoding, :string, default: nil
103
103
  desc 'Add the log path being tailed to records. Specify the field name to be used.'
104
104
  config_param :path_key, :string, default: nil
@@ -144,10 +144,10 @@ module Fluent::Plugin
144
144
  end
145
145
 
146
146
  if @glob_policy == :always && @path_delimiter == ','
147
- raise Fluent::ConfigError, "cannot use glob_policy as always with the default path_delimitor: `,\""
147
+ raise Fluent::ConfigError, "cannot use glob_policy as always with the default path_delimiter: `,\""
148
148
  end
149
149
 
150
- if @glob_policy == :extended && /\{.*,.*\}/.match(@path) && extended_glob_pattern(@path)
150
+ if @glob_policy == :extended && /\{.*,.*\}/.match?(@path) && extended_glob_pattern(@path)
151
151
  raise Fluent::ConfigError, "cannot include curly braces with glob patterns in `#{@path}\". Use glob_policy always instead."
152
152
  end
153
153
 
@@ -165,6 +165,7 @@ module Fluent::Plugin
165
165
  @path_formatters = @paths.map{|path| [path, Fluent::Timezone.formatter(@path_timezone, path)]}.to_h
166
166
  @exclude_path_formatters = @exclude_path.map{|path| [path, Fluent::Timezone.formatter(@path_timezone, path)]}.to_h
167
167
  end
168
+ check_dir_permission unless Fluent.windows?
168
169
 
169
170
  # TODO: Use plugin_root_dir and storage plugin to store positions if available
170
171
  if @pos_file
@@ -177,14 +178,14 @@ module Fluent::Plugin
177
178
  if @follow_inodes
178
179
  raise Fluent::ConfigError, "Can't follow inodes without pos_file configuration parameter"
179
180
  end
180
- $log.warn "'pos_file PATH' parameter is not set to a 'tail' source."
181
- $log.warn "this parameter is highly recommended to save the position to resume tailing."
181
+ log.warn "'pos_file PATH' parameter is not set to a 'tail' source."
182
+ log.warn "this parameter is highly recommended to save the position to resume tailing."
182
183
  end
183
184
 
184
185
  configure_tag
185
186
  configure_encoding
186
187
 
187
- @multiline_mode = parser_config["@type"] =~ /multiline/
188
+ @multiline_mode = parser_config["@type"].include?("multiline")
188
189
  @receive_handler = if @multiline_mode
189
190
  method(:parse_multilines)
190
191
  else
@@ -205,11 +206,29 @@ module Fluent::Plugin
205
206
  @read_bytes_limit_per_second = min_bytes
206
207
  end
207
208
  end
209
+
208
210
  opened_file_metrics = metrics_create(namespace: "fluentd", subsystem: "input", name: "files_opened_total", help_text: "Total number of opened files")
209
211
  closed_file_metrics = metrics_create(namespace: "fluentd", subsystem: "input", name: "files_closed_total", help_text: "Total number of closed files")
210
212
  rotated_file_metrics = metrics_create(namespace: "fluentd", subsystem: "input", name: "files_rotated_total", help_text: "Total number of rotated files")
211
213
  throttling_metrics = metrics_create(namespace: "fluentd", subsystem: "input", name: "files_throttled_total", help_text: "Total number of times throttling occurs per file when throttling enabled")
212
- @metrics = MetricsInfo.new(opened_file_metrics, closed_file_metrics, rotated_file_metrics, throttling_metrics)
214
+ # The metrics for currently tracking files. Since the value may decrease, it cannot be represented using the counter type, so 'prefer_gauge: true' is used instead.
215
+ tracked_file_metrics = metrics_create(namespace: "fluentd", subsystem: "input", name: "files_tracked_count", help_text: "Number of tracked files", prefer_gauge: true)
216
+
217
+ @metrics = MetricsInfo.new(opened_file_metrics, closed_file_metrics, rotated_file_metrics, throttling_metrics, tracked_file_metrics)
218
+ end
219
+
220
+ def check_dir_permission
221
+ expand_paths_raw.select { |path|
222
+ not File.exist?(path)
223
+ }.each { |path|
224
+ inaccessible_dir = Pathname.new(File.expand_path(path))
225
+ .ascend
226
+ .reverse_each
227
+ .find { |p| p.directory? && !p.executable? }
228
+ if inaccessible_dir
229
+ log.warn "Skip #{path} because '#{inaccessible_dir}' lacks execute permission."
230
+ end
231
+ }
213
232
  end
214
233
 
215
234
  def configure_tag
@@ -290,7 +309,7 @@ module Fluent::Plugin
290
309
  def close
291
310
  super
292
311
  # close file handles after all threads stopped (in #close of thread plugin helper)
293
- # It may be because we need to wait IOHanlder.ready_to_shutdown()
312
+ # It may be because we need to wait IOHandler.ready_to_shutdown()
294
313
  close_watcher_handles
295
314
  end
296
315
 
@@ -300,7 +319,7 @@ module Fluent::Plugin
300
319
  end
301
320
 
302
321
  def extended_glob_pattern(path)
303
- path.include?('*') || path.include?('?') || /\[.*\]/.match(path)
322
+ path.include?('*') || path.include?('?') || /\[.*\]/.match?(path)
304
323
  end
305
324
 
306
325
  # Curly braces is not supported with default path_delimiter
@@ -313,7 +332,7 @@ module Fluent::Plugin
313
332
  # regular expressions as much as possible.
314
333
  # This is because not using `true' as a returning value
315
334
  # when choosing :always here.
316
- extended_glob_pattern(path) || /\{.*,.*\}/.match(path)
335
+ extended_glob_pattern(path) || /\{.*,.*\}/.match?(path)
317
336
  elsif @glob_policy == :extended
318
337
  extended_glob_pattern(path)
319
338
  elsif @glob_policy == :backward_compatible
@@ -321,7 +340,7 @@ module Fluent::Plugin
321
340
  end
322
341
  end
323
342
 
324
- def expand_paths
343
+ def expand_paths_raw
325
344
  date = Fluent::EventTime.now
326
345
  paths = []
327
346
  @paths.each { |path|
@@ -350,7 +369,7 @@ module Fluent::Plugin
350
369
  false
351
370
  end
352
371
  rescue Errno::ENOENT, Errno::EACCES
353
- log.debug("#{p} is missing after refresh file list")
372
+ log.debug { "#{p} is missing after refresh file list" }
354
373
  false
355
374
  end
356
375
  }
@@ -367,10 +386,14 @@ module Fluent::Plugin
367
386
  end
368
387
  use_glob?(path) ? Dir.glob(path) : path
369
388
  }.flatten.uniq
389
+ paths - excluded
390
+ end
391
+
392
+ def expand_paths
370
393
  # filter out non existing files, so in case pattern is without '*' we don't do unnecessary work
371
394
  hash = {}
372
- (paths - excluded).select { |path|
373
- FileTest.exist?(path)
395
+ expand_paths_raw.select { |path|
396
+ File.exist?(path)
374
397
  }.each { |path|
375
398
  # Even we just checked for existence, there is a race condition here as
376
399
  # of which stat() might fail with ENOENT. See #3224.
@@ -382,7 +405,7 @@ module Fluent::Plugin
382
405
  hash[target_info.path] = target_info
383
406
  end
384
407
  rescue Errno::ENOENT, Errno::EACCES => e
385
- $log.warn "expand_paths: stat() for #{path} failed with #{e.class.name}. Skip file."
408
+ log.warn "expand_paths: stat() for #{path} failed with #{e.class.name}. Skip file."
386
409
  end
387
410
  }
388
411
  hash
@@ -431,7 +454,7 @@ module Fluent::Plugin
431
454
  removed_hash = existence_paths_hash.reject {|key, value| target_paths_hash.key?(key)}
432
455
  added_hash = target_paths_hash.reject {|key, value| existence_paths_hash.key?(key)}
433
456
 
434
- # If an exisiting TailWatcher already follows a target path with the different inode,
457
+ # If an existing TailWatcher already follows a target path with the different inode,
435
458
  # it means that the TailWatcher following the rotated file still exists. In this case,
436
459
  # `refresh_watcher` can't start the new TailWatcher for the new current file. So, we
437
460
  # should output a warning log in order to prevent silent collection stops.
@@ -455,6 +478,7 @@ module Fluent::Plugin
455
478
 
456
479
  stop_watchers(removed_hash, unwatched: need_unwatch_in_stop_watchers) unless removed_hash.empty?
457
480
  start_watchers(added_hash) unless added_hash.empty?
481
+ @metrics.tracked.set(@tails.size)
458
482
  @startup = false if @startup
459
483
  end
460
484
 
@@ -499,7 +523,7 @@ module Fluent::Plugin
499
523
  begin
500
524
  target_info.ino = Fluent::FileWrapper.stat(path).ino
501
525
  rescue Errno::ENOENT, Errno::EACCES
502
- $log.warn "stat() for #{path} failed. Continuing without tailing it."
526
+ log.warn "stat() for #{path} failed. Continuing without tailing it."
503
527
  return
504
528
  end
505
529
 
@@ -562,7 +586,7 @@ module Fluent::Plugin
562
586
  # refresh_watchers calls @tails.keys so we don't use stop_watcher -> start_watcher sequence for safety.
563
587
  def update_watcher(tail_watcher, pe, new_inode)
564
588
  # TODO we should use another callback for this.
565
- # To supress impact to existing logics, limit the case to `@follow_inodes`.
589
+ # To suppress impact to existing logics, limit the case to `@follow_inodes`.
566
590
  # We may not need `@follow_inodes` condition.
567
591
  if @follow_inodes && new_inode.nil?
568
592
  # nil inode means the file disappeared, so we only need to stop it.
@@ -574,7 +598,7 @@ module Fluent::Plugin
574
598
  # In that case, `refresh_watcher` will add the new TailWatcher to tail the same target,
575
599
  # and it causes the log duplication.
576
600
  # (Other `detach_watcher_after_rotate_wait` may have the same problem.
577
- # We need the mechanism not to add duplicated TailWathcer with detaching TailWatcher.)
601
+ # We need the mechanism not to add duplicated TailWatcher with detaching TailWatcher.)
578
602
  detach_watcher_after_rotate_wait(tail_watcher, pe.read_inode)
579
603
  return
580
604
  end
@@ -730,7 +754,7 @@ module Fluent::Plugin
730
754
  record[@path_key] ||= tail_watcher.path unless @path_key.nil?
731
755
  es.add(Fluent::EventTime.now, record)
732
756
  end
733
- log.warn "pattern not matched: #{line.inspect}"
757
+ log.warn { "pattern not matched: #{line.inspect}" }
734
758
  end
735
759
  }
736
760
  rescue => e
@@ -795,6 +819,7 @@ module Fluent::Plugin
795
819
  'closed_file_count' => @metrics.closed.get,
796
820
  'rotated_file_count' => @metrics.rotated.get,
797
821
  'throttled_log_count' => @metrics.throttled.get,
822
+ 'tracked_file_count' => @metrics.tracked.get,
798
823
  })
799
824
  }
800
825
  stats
@@ -803,17 +828,27 @@ module Fluent::Plugin
803
828
  private
804
829
 
805
830
  def io_handler(watcher, path)
806
- TailWatcher::IOHandler.new(
807
- watcher,
831
+ opts = {
808
832
  path: path,
809
833
  log: log,
810
834
  read_lines_limit: @read_lines_limit,
811
835
  read_bytes_limit_per_second: @read_bytes_limit_per_second,
812
836
  open_on_every_update: @open_on_every_update,
813
- from_encoding: @from_encoding,
814
- encoding: @encoding,
815
837
  metrics: @metrics,
816
838
  max_line_size: @max_line_size,
839
+ }
840
+ unless @encoding.nil?
841
+ if @from_encoding.nil?
842
+ opts[:encoding] = @encoding
843
+ else
844
+ opts[:encoding] = @from_encoding
845
+ opts[:encoding_to_convert] = @encoding
846
+ end
847
+ end
848
+
849
+ TailWatcher::IOHandler.new(
850
+ watcher,
851
+ **opts,
817
852
  &method(:receive_lines)
818
853
  )
819
854
  end
@@ -1006,46 +1041,30 @@ module Fluent::Plugin
1006
1041
  end
1007
1042
 
1008
1043
  class FIFO
1009
- def initialize(from_encoding, encoding, log, max_line_size=nil)
1010
- @from_encoding = from_encoding
1011
- @encoding = encoding
1012
- @need_enc = from_encoding != encoding
1013
- @buffer = ''.force_encoding(from_encoding)
1014
- @eol = "\n".encode(from_encoding).freeze
1044
+ def initialize(encoding, log, max_line_size=nil, encoding_to_convert=nil)
1045
+ @buffer = ''.force_encoding(encoding)
1046
+ @eol = "\n".encode(encoding).freeze
1047
+ @encoding_to_convert = encoding_to_convert
1015
1048
  @max_line_size = max_line_size
1016
1049
  @skip_current_line = false
1017
1050
  @skipping_current_line_bytesize = 0
1018
1051
  @log = log
1019
1052
  end
1020
1053
 
1021
- attr_reader :from_encoding, :encoding, :buffer, :max_line_size
1054
+ attr_reader :buffer, :max_line_size
1022
1055
 
1023
1056
  def <<(chunk)
1024
- # Although "chunk" is most likely transient besides String#force_encoding itself
1025
- # won't affect the actual content of it, it is also probable that "chunk" is
1026
- # a reused buffer and changing its encoding causes some problems on the caller side.
1027
- #
1028
- # Actually, the caller here is specific and "chunk" comes from IO#partial with
1029
- # the second argument, which the function always returns as a return value.
1030
- #
1031
- # Feeding a string that has its encoding attribute set to any double-byte or
1032
- # quad-byte encoding to IO#readpartial as the second arguments results in an
1033
- # assertion failure on Ruby < 2.4.0 for unknown reasons.
1034
- orig_encoding = chunk.encoding
1035
- chunk.force_encoding(from_encoding)
1036
1057
  @buffer << chunk
1037
- # Thus the encoding needs to be reverted back here
1038
- chunk.force_encoding(orig_encoding)
1039
1058
  end
1040
1059
 
1041
1060
  def convert(s)
1042
- if @need_enc
1043
- s.encode!(@encoding, @from_encoding)
1061
+ if @encoding_to_convert
1062
+ s.encode!(@encoding_to_convert)
1044
1063
  else
1045
1064
  s
1046
1065
  end
1047
1066
  rescue
1048
- s.encode!(@encoding, @from_encoding, :invalid => :replace, :undef => :replace)
1067
+ s.encode!(@encoding_to_convert, :invalid => :replace, :undef => :replace)
1049
1068
  end
1050
1069
 
1051
1070
  def read_lines(lines)
@@ -1056,8 +1075,9 @@ module Fluent::Plugin
1056
1075
  # Using freeze and slice is faster than slice!
1057
1076
  # See https://github.com/fluent/fluentd/pull/2527
1058
1077
  @buffer.freeze
1059
- rbuf = @buffer.slice(0, idx + 1)
1060
- @buffer = @buffer.slice(idx + 1, @buffer.size)
1078
+ slice_position = idx + 1
1079
+ rbuf = @buffer.slice(0, slice_position)
1080
+ @buffer = @buffer.slice(slice_position, @buffer.size - slice_position)
1061
1081
  idx = @buffer.index(@eol)
1062
1082
 
1063
1083
  is_long_line = @max_line_size && (
@@ -1110,15 +1130,15 @@ module Fluent::Plugin
1110
1130
 
1111
1131
  attr_accessor :shutdown_timeout
1112
1132
 
1113
- def initialize(watcher, path:, read_lines_limit:, read_bytes_limit_per_second:, max_line_size: nil, log:, open_on_every_update:, from_encoding: nil, encoding: nil, metrics:, &receive_lines)
1133
+ def initialize(watcher, path:, read_lines_limit:, read_bytes_limit_per_second:, max_line_size: nil, log:, open_on_every_update:, encoding: Encoding::ASCII_8BIT, encoding_to_convert: nil, metrics:, &receive_lines)
1114
1134
  @watcher = watcher
1115
1135
  @path = path
1116
1136
  @read_lines_limit = read_lines_limit
1117
1137
  @read_bytes_limit_per_second = read_bytes_limit_per_second
1118
1138
  @receive_lines = receive_lines
1119
1139
  @open_on_every_update = open_on_every_update
1120
- @fifo = FIFO.new(from_encoding || Encoding::ASCII_8BIT, encoding || Encoding::ASCII_8BIT, log, max_line_size)
1121
- @iobuf = ''.force_encoding('ASCII-8BIT')
1140
+ @encoding = encoding
1141
+ @fifo = FIFO.new(encoding, log, max_line_size, encoding_to_convert)
1122
1142
  @lines = []
1123
1143
  @io = nil
1124
1144
  @notify_mutex = Mutex.new
@@ -1200,6 +1220,7 @@ module Fluent::Plugin
1200
1220
  end
1201
1221
 
1202
1222
  with_io do |io|
1223
+ iobuf = ''.force_encoding(@encoding)
1203
1224
  begin
1204
1225
  read_more = false
1205
1226
  has_skipped_line = false
@@ -1210,7 +1231,7 @@ module Fluent::Plugin
1210
1231
  @start_reading_time ||= Fluent::Clock.now
1211
1232
  group_watcher&.update_reading_time(@path)
1212
1233
 
1213
- data = io.readpartial(BYTES_TO_READ, @iobuf)
1234
+ data = io.readpartial(BYTES_TO_READ, iobuf)
1214
1235
  @eof = false
1215
1236
  @number_bytes_read += data.bytesize
1216
1237
  @fifo << data
@@ -1237,6 +1258,8 @@ module Fluent::Plugin
1237
1258
  end
1238
1259
  rescue EOFError
1239
1260
  @eof = true
1261
+ ensure
1262
+ iobuf.clear
1240
1263
  end
1241
1264
  end
1242
1265
 
@@ -137,7 +137,7 @@ module Fluent::Plugin
137
137
 
138
138
  @parser.parse(msg) do |time, record|
139
139
  unless time && record
140
- log.warn "pattern not matched", message: msg
140
+ log.on_warn { log.warn "pattern not matched", message: msg }
141
141
  next
142
142
  end
143
143
 
@@ -187,7 +187,7 @@ module Fluent::Plugin
187
187
 
188
188
  @parser.parse(msg) do |time, record|
189
189
  unless time && record
190
- log.warn "pattern not matched", message: msg
190
+ log.on_warn { log.warn "pattern not matched", message: msg }
191
191
  next
192
192
  end
193
193
 
@@ -83,7 +83,7 @@ module Fluent::Plugin
83
83
  begin
84
84
  @parser.parse(data) do |time, record|
85
85
  unless time && record
86
- log.warn "pattern not matched", data: data
86
+ log.on_warn { log.warn "pattern not matched", data: data }
87
87
  next
88
88
  end
89
89
 
@@ -37,14 +37,6 @@ module Fluent
37
37
  @enable_size_metrics = false
38
38
  end
39
39
 
40
- def emit_records
41
- @emit_records_metrics.get
42
- end
43
-
44
- def emit_size
45
- @emit_size_metrics.get
46
- end
47
-
48
40
  def configure(conf)
49
41
  super
50
42
 
@@ -56,22 +56,6 @@ module Fluent
56
56
  @enable_size_metrics = false
57
57
  end
58
58
 
59
- def num_errors
60
- @num_errors_metrics.get
61
- end
62
-
63
- def emit_count
64
- @emit_count_metrics.get
65
- end
66
-
67
- def emit_size
68
- @emit_size_metrics.get
69
- end
70
-
71
- def emit_records
72
- @emit_records_metrics.get
73
- end
74
-
75
59
  def statistics
76
60
  stats = {
77
61
  'num_errors' => @num_errors_metrics.get,
@@ -91,7 +75,7 @@ module Fluent
91
75
  super
92
76
 
93
77
  @num_errors_metrics = metrics_create(namespace: "fluentd", subsystem: "multi_output", name: "num_errors", help_text: "Number of count num errors")
94
- @emit_count_metrics = metrics_create(namespace: "fluentd", subsystem: "multi_output", name: "emit_records", help_text: "Number of count emits")
78
+ @emit_count_metrics = metrics_create(namespace: "fluentd", subsystem: "multi_output", name: "emit_count", help_text: "Number of count emits")
95
79
  @emit_records_metrics = metrics_create(namespace: "fluentd", subsystem: "multi_output", name: "emit_records", help_text: "Number of emit records")
96
80
  @emit_size_metrics = metrics_create(namespace: "fluentd", subsystem: "multi_output", name: "emit_size", help_text: "Total size of emit events")
97
81
  @enable_size_metrics = !!system_config.enable_size_metrics
@@ -193,8 +193,8 @@ module Fluent::Plugin
193
193
  log.warn "child process exits with error code", code: status.to_i, status: status.exitstatus, signal: status.termsig
194
194
  end
195
195
  c.mutex.synchronize do
196
- (c.writeio && c.writeio.close) rescue nil
197
- (c.readio && c.readio.close) rescue nil
196
+ c.writeio&.close rescue nil
197
+ c.readio&.close rescue nil
198
198
  c.pid = c.readio = c.writeio = nil
199
199
  end
200
200
  end
@@ -17,6 +17,7 @@
17
17
  require 'fileutils'
18
18
  require 'zlib'
19
19
  require 'time'
20
+ require 'pathname'
20
21
 
21
22
  require 'fluent/plugin/output'
22
23
  require 'fluent/config/error'
@@ -29,11 +30,12 @@ module Fluent::Plugin
29
30
 
30
31
  helpers :formatter, :inject, :compat_parameters
31
32
 
32
- SUPPORTED_COMPRESS = [:text, :gz, :gzip]
33
+ SUPPORTED_COMPRESS = [:text, :gz, :gzip, :zstd]
33
34
  SUPPORTED_COMPRESS_MAP = {
34
35
  text: nil,
35
36
  gz: :gzip,
36
37
  gzip: :gzip,
38
+ zstd: :zstd,
37
39
  }
38
40
 
39
41
  DEFAULT_TIMEKEY = 60 * 60 * 24
@@ -53,6 +55,8 @@ module Fluent::Plugin
53
55
  config_param :recompress, :bool, default: false
54
56
  desc "Create symlink to temporary buffered file when buffer_type is file (disabled on Windows)."
55
57
  config_param :symlink_path, :string, default: nil
58
+ desc "Use relative path for symlink target (default: false)"
59
+ config_param :symlink_path_use_relative, :bool, default: false
56
60
 
57
61
  config_section :format do
58
62
  config_set_default :@type, 'out_file'
@@ -96,7 +100,12 @@ module Fluent::Plugin
96
100
  if chunk.metadata == @latest_metadata
97
101
  sym_path = @_output_plugin_for_symlink.extract_placeholders(@_symlink_path, chunk)
98
102
  FileUtils.mkdir_p(File.dirname(sym_path), mode: @_output_plugin_for_symlink.dir_perm)
99
- FileUtils.ln_sf(chunk.path, sym_path)
103
+ if @_output_plugin_for_symlink.symlink_path_use_relative
104
+ relative_path = Pathname.new(chunk.path).relative_path_from(Pathname.new(File.dirname(sym_path)))
105
+ FileUtils.ln_sf(relative_path, sym_path)
106
+ else
107
+ FileUtils.ln_sf(chunk.path, sym_path)
108
+ end
100
109
  end
101
110
  chunk
102
111
  end
@@ -184,18 +193,15 @@ module Fluent::Plugin
184
193
  @buffer.symlink_path = @symlink_path
185
194
  @buffer.output_plugin_for_symlink = self
186
195
  end
196
+
197
+ if @compress != :text && @buffer.compress != :text && @buffer.compress != @compress_method
198
+ raise Fluent::ConfigError, "You cannot specify different compression formats for Buffer (Buffer: #{@buffer.compress}, Self: #{@compress})"
199
+ end
187
200
  end
188
201
 
189
202
  @dir_perm = system_config.dir_permission || Fluent::DEFAULT_DIR_PERMISSION
190
203
  @file_perm = system_config.file_permission || Fluent::DEFAULT_FILE_PERMISSION
191
204
  @need_lock = system_config.workers > 1
192
-
193
- # https://github.com/fluent/fluentd/issues/3569
194
- @need_ruby_on_macos_workaround = false
195
- if @append && Fluent.macos?
196
- condition = Gem::Dependency.new('', [">= 2.7.0", "< 3.1.0"])
197
- @need_ruby_on_macos_workaround = true if condition.match?('', RUBY_VERSION)
198
- end
199
205
  end
200
206
 
201
207
  def multi_workers_ready?
@@ -212,17 +218,17 @@ module Fluent::Plugin
212
218
  FileUtils.mkdir_p File.dirname(path), mode: @dir_perm
213
219
 
214
220
  writer = case
215
- when @compress_method.nil?
216
- method(:write_without_compression)
217
- when @compress_method == :gzip
218
- if @buffer.compress != :gzip || @recompress
219
- method(:write_gzip_with_compression)
220
- else
221
- method(:write_gzip_from_gzipped_chunk)
222
- end
223
- else
224
- raise "BUG: unknown compression method #{@compress_method}"
225
- end
221
+ when @compress_method.nil?
222
+ method(:write_without_compression)
223
+ when @compress_method != :text
224
+ if @buffer.compress == :text || @recompress
225
+ method(:write_with_compression).curry.call(@compress_method)
226
+ else
227
+ method(:write_from_compressed_chunk).curry.call(@compress_method)
228
+ end
229
+ else
230
+ raise "BUG: unknown compression method #{@compress_method}"
231
+ end
226
232
 
227
233
  if @append
228
234
  if @need_lock
@@ -244,26 +250,26 @@ module Fluent::Plugin
244
250
 
245
251
  def write_without_compression(path, chunk)
246
252
  File.open(path, "ab", @file_perm) do |f|
247
- if @need_ruby_on_macos_workaround
248
- content = chunk.read()
249
- f.puts content
250
- else
251
- chunk.write_to(f)
252
- end
253
+ chunk.write_to(f)
253
254
  end
254
255
  end
255
256
 
256
- def write_gzip_with_compression(path, chunk)
257
+ def write_with_compression(type, path, chunk)
257
258
  File.open(path, "ab", @file_perm) do |f|
258
- gz = Zlib::GzipWriter.new(f)
259
+ gz = nil
260
+ if type == :gzip
261
+ gz = Zlib::GzipWriter.new(f)
262
+ elsif type == :zstd
263
+ gz = Zstd::StreamWriter.new(f)
264
+ end
259
265
  chunk.write_to(gz, compressed: :text)
260
266
  gz.close
261
267
  end
262
268
  end
263
269
 
264
- def write_gzip_from_gzipped_chunk(path, chunk)
270
+ def write_from_compressed_chunk(type, path, chunk)
265
271
  File.open(path, "ab", @file_perm) do |f|
266
- chunk.write_to(f, compressed: :gzip)
272
+ chunk.write_to(f, compressed: type)
267
273
  end
268
274
  end
269
275
 
@@ -280,6 +286,7 @@ module Fluent::Plugin
280
286
  def compression_suffix(compress)
281
287
  case compress
282
288
  when :gzip then '.gz'
289
+ when :zstd then '.zstd'
283
290
  when nil then ''
284
291
  else
285
292
  raise ArgumentError, "unknown compression type #{compress}"
@@ -24,7 +24,7 @@ module Fluent::Plugin
24
24
  # @param log [Logger]
25
25
  # @param secure [Boolean]
26
26
  # @param connection_factory [Proc]
27
- # @param SocketCache [Fluent::ForwardOutput::SocketCache]
27
+ # @param socket_cache [Fluent::ForwardOutput::SocketCache]
28
28
  def initialize(log:, secure:, connection_factory:, socket_cache:)
29
29
  @log = log
30
30
  @secure = secure
@@ -36,7 +36,7 @@ module Fluent::Plugin
36
36
  @socket_cache && @socket_cache.clear
37
37
  end
38
38
 
39
- # @param ack [Fluent::Plugin::ForwardOutput::AckHander::Ack|nil]
39
+ # @param ack [Fluent::Plugin::ForwardOutput::AckHandler::Ack|nil]
40
40
  def connect(host:, port:, hostname:, ack: nil, &block)
41
41
  if @socket_cache
42
42
  return connect_keepalive(host: host, port: port, hostname: hostname, ack: ack, &block)