fluentd 1.16.4-x86-mingw32 → 1.17.0-x86-mingw32

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. checksums.yaml +4 -4
  2. data/.github/DISCUSSION_TEMPLATE/q-a-japanese.yml +50 -0
  3. data/.github/DISCUSSION_TEMPLATE/q-a.yml +47 -0
  4. data/.github/workflows/test-ruby-head.yml +31 -0
  5. data/.github/workflows/test.yml +3 -3
  6. data/CHANGELOG.md +50 -0
  7. data/README.md +1 -1
  8. data/Rakefile +1 -1
  9. data/fluentd.gemspec +9 -1
  10. data/lib/fluent/command/binlog_reader.rb +1 -1
  11. data/lib/fluent/config/configure_proxy.rb +2 -2
  12. data/lib/fluent/config/types.rb +1 -1
  13. data/lib/fluent/configurable.rb +2 -2
  14. data/lib/fluent/counter/mutex_hash.rb +1 -1
  15. data/lib/fluent/fluent_log_event_router.rb +0 -2
  16. data/lib/fluent/plugin/buf_file.rb +1 -1
  17. data/lib/fluent/plugin/buffer/file_chunk.rb +1 -1
  18. data/lib/fluent/plugin/buffer/file_single_chunk.rb +2 -3
  19. data/lib/fluent/plugin/buffer.rb +75 -68
  20. data/lib/fluent/plugin/filter_parser.rb +26 -8
  21. data/lib/fluent/plugin/in_http.rb +18 -53
  22. data/lib/fluent/plugin/in_tail.rb +34 -2
  23. data/lib/fluent/plugin/out_http.rb +125 -13
  24. data/lib/fluent/plugin/owned_by_mixin.rb +0 -1
  25. data/lib/fluent/plugin/parser_json.rb +22 -5
  26. data/lib/fluent/plugin/parser_msgpack.rb +24 -3
  27. data/lib/fluent/plugin_helper/metrics.rb +2 -2
  28. data/lib/fluent/registry.rb +6 -6
  29. data/lib/fluent/test/output_test.rb +1 -1
  30. data/lib/fluent/unique_id.rb +1 -1
  31. data/lib/fluent/version.rb +1 -1
  32. data/test/log/test_console_adapter.rb +10 -3
  33. data/test/plugin/data/log_numeric/01.log +0 -0
  34. data/test/plugin/data/log_numeric/02.log +0 -0
  35. data/test/plugin/data/log_numeric/12.log +0 -0
  36. data/test/plugin/data/log_numeric/14.log +0 -0
  37. data/test/plugin/test_buffer.rb +59 -0
  38. data/test/plugin/test_in_http.rb +23 -1
  39. data/test/plugin/test_in_tail.rb +141 -0
  40. data/test/plugin/test_out_http.rb +128 -0
  41. data/test/plugin/test_owned_by.rb +0 -1
  42. data/test/plugin/test_parser_json.rb +106 -0
  43. data/test/plugin/test_parser_msgpack.rb +127 -0
  44. data/test/plugin/test_storage.rb +0 -1
  45. data/test/plugin_helper/test_child_process.rb +4 -4
  46. metadata +101 -4
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 55d9dff2b2c0dc6754261babb59c10b05cb4990b04366a10b6c3ed531e939ef6
4
- data.tar.gz: 3fe6adb800c7f359b4dae8ede6c38710549da0bccd6181cc27c95cfa4ec5f776
3
+ metadata.gz: cc7924012e09f9568cbda96bbb4d1449074a4db721795d74900261b984b04047
4
+ data.tar.gz: 8c3e8f5ef86237b5c18943ffbf393188338f9dbd3dbcac3d9fb321973ba3b43e
5
5
  SHA512:
6
- metadata.gz: eed2be6c185fc49df73504535018a7eaf7c5142a5fcebbc08d6e56cba93edc94d1c9ed159456bc52a6925c7e929c8bef4a2f2ca03850798ff52d90054eae849b
7
- data.tar.gz: 5c2d5c814e6eeffdbafd831f0fa04ab79dc0f4979c3e3fa35dae941d7cec35c800b735f6c2efafacf91ae265e65d7d1d171ca941d28c60309399f74e61aad554
6
+ metadata.gz: 7998a2f12349a767e7952dcf3a17a108cfb0f2e2063524862d97b83159012e8b753e46d2e4893e235e6a4bb1d43533f3068dc955601533fddf839f75edcca096
7
+ data.tar.gz: '09dac9716859404ddb3b563408cab32921c894300f2701b3e7aff89a608be154d0090e321adbbed9e0615459b06a4be3038f3e5b16206e624a0e1e770970c803'
@@ -0,0 +1,50 @@
1
+ title: "[QA (Japanese)]"
2
+ labels: ["Q&A (Japanese)"]
3
+ body:
4
+ - type: markdown
5
+ attributes:
6
+ value: |
7
+ 日本語で気軽に質問するためのカテゴリです。もし他の人が困っているのを見つけたらぜひ回答してあげてください。
8
+ - type: textarea
9
+ id: question
10
+ attributes:
11
+ label: やりたいこと
12
+ description: |
13
+ 何について困っているのかを書いてください。試したことや実際の結果を示してください。
14
+ 期待する挙動と実際の結果の違いがあればそれも書くのをおすすめします。
15
+ render: markdown
16
+ validations:
17
+ required: true
18
+ - type: textarea
19
+ id: configuration
20
+ attributes:
21
+ label: 設定した内容
22
+ description: |
23
+ どのような設定をして期待する挙動を実現しようとしたのかを書いてください。(例: fluentd.confの内容を貼り付ける)
24
+ render: apache
25
+ - type: textarea
26
+ id: logs
27
+ attributes:
28
+ label: ログの内容
29
+ description: |
30
+ Fluentdのログを提示してください。エラーログがあると回答の助けになります。(例: fluentd.logの内容を貼り付ける)
31
+ render: shell
32
+ - type: textarea
33
+ id: environment
34
+ attributes:
35
+ label: 環境について
36
+ description: |
37
+ - Fluentd or td-agent version: `fluentd --version` or `td-agent --version`
38
+ - Operating system: `cat /etc/os-release`
39
+ - Kernel version: `uname -r`
40
+
41
+ どんな環境で困っているかの情報がないと、再現できないため誰も回答できないことがあります。
42
+ 必要な情報を記入することをおすすめします。
43
+ value: |
44
+ - Fluentd version:
45
+ - TD Agent version:
46
+ - Fluent Package version:
47
+ - Docker image (tag):
48
+ - Operating system:
49
+ - Kernel version:
50
+ render: markdown
@@ -0,0 +1,47 @@
1
+ title: "[Q&A]"
2
+ labels: ["Q&A"]
3
+ body:
4
+ - type: markdown
5
+ attributes:
6
+ value: |
7
+ It is recommended to support each other.
8
+ - type: textarea
9
+ id: question
10
+ attributes:
11
+ label: What is a problem?
12
+ description: |
13
+ A clear and concise description of what you want to happen.
14
+ What exactly did you do (or not do) that was effective (or ineffective)?
15
+ render: markdown
16
+ validations:
17
+ required: true
18
+ - type: textarea
19
+ id: configuration
20
+ attributes:
21
+ label: Describe the configuration of Fluentd
22
+ description: |
23
+ If there is the actual configuration of Fluentd, it will help.
24
+ - type: textarea
25
+ id: logs
26
+ attributes:
27
+ label: Describe the logs of Fluentd
28
+ description: |
29
+ If there are error logs of Fluentd, it will help.
30
+ - type: textarea
31
+ id: environment
32
+ attributes:
33
+ label: Environment
34
+ description: |
35
+ - Fluentd or td-agent version: `fluentd --version` or `td-agent --version`
36
+ - Operating system: `cat /etc/os-release`
37
+ - Kernel version: `uname -r`
38
+
39
+ Please describe your environment information. If will help to support.
40
+ value: |
41
+ - Fluentd version:
42
+ - TD Agent version:
43
+ - Fluent Package version:
44
+ - Docker image (tag):
45
+ - Operating system:
46
+ - Kernel version:
47
+ render: markdown
@@ -0,0 +1,31 @@
1
+ name: Test with Ruby head
2
+
3
+ on:
4
+ schedule:
5
+ - cron: '11 14 * * 0'
6
+ workflow_dispatch:
7
+
8
+ jobs:
9
+ test:
10
+ runs-on: ${{ matrix.os }}
11
+ continue-on-error: true
12
+ strategy:
13
+ fail-fast: false
14
+ matrix:
15
+ os: ['ubuntu-latest', 'macos-latest', 'windows-latest']
16
+ ruby-version: ['head']
17
+
18
+ name: Ruby ${{ matrix.ruby-version }} on ${{ matrix.os }}
19
+ steps:
20
+ - uses: actions/checkout@v3
21
+ - name: Set up Ruby
22
+ uses: ruby/setup-ruby@v1
23
+ with:
24
+ ruby-version: ${{ matrix.ruby-version }}
25
+ - name: Install addons
26
+ if: ${{ matrix.os == 'ubuntu-latest' }}
27
+ run: sudo apt-get install libgmp3-dev libcap-ng-dev
28
+ - name: Install dependencies
29
+ run: bundle install
30
+ - name: Run tests
31
+ run: bundle exec rake test TESTOPTS="-v --no-show-detail-immediately"
@@ -2,9 +2,9 @@ name: Test
2
2
 
3
3
  on:
4
4
  push:
5
- branches: [v1.16]
5
+ branches: [master]
6
6
  pull_request:
7
- branches: [v1.16]
7
+ branches: [master]
8
8
 
9
9
  jobs:
10
10
  test:
@@ -29,4 +29,4 @@ jobs:
29
29
  - name: Install dependencies
30
30
  run: bundle install
31
31
  - name: Run tests
32
- run: bundle exec rake test TESTOPTS=-v
32
+ run: bundle exec rake test TESTOPTS="-v --no-show-detail-immediately"
data/CHANGELOG.md CHANGED
@@ -1,5 +1,55 @@
1
+ # v1.17
2
+
3
+ ## Release v1.17.0 - 2024/04/30
4
+
5
+ ### Enhancement
6
+
7
+ * in_http: Recognize CSP reports as JSON data
8
+ https://github.com/fluent/fluentd/pull/4282
9
+ * out_http: Add option to reuse connections
10
+ https://github.com/fluent/fluentd/pull/4330
11
+ * in_tail: Expand glob capability for square brackets and one character matcher
12
+ https://github.com/fluent/fluentd/pull/4401
13
+ * out_http: Support AWS Signature Version 4 authentication
14
+ https://github.com/fluent/fluentd/pull/4459
15
+
16
+ ### Bug Fix
17
+
18
+ * Make sure `parser_json` and `parser_msgpack` return `Hash`.
19
+ Make `parser_json` and `parser_msgpack` accept only `Hash` or `Array` of `Hash`.
20
+ https://github.com/fluent/fluentd/pull/4474
21
+ * filter_parser: Add error event for multiple parsed results
22
+ https://github.com/fluent/fluentd/pull/4478
23
+
24
+ ### Misc
25
+
26
+ * Raise minimum required ruby version
27
+ https://github.com/fluent/fluentd/pull/4288
28
+ * Require missing dependent gems as of Ruby 3.4-dev
29
+ https://github.com/fluent/fluentd/pull/4411
30
+ * Minor code refactoring
31
+ https://github.com/fluent/fluentd/pull/4294
32
+ https://github.com/fluent/fluentd/pull/4299
33
+ https://github.com/fluent/fluentd/pull/4302
34
+ https://github.com/fluent/fluentd/pull/4320
35
+ * CI fixes
36
+ https://github.com/fluent/fluentd/pull/4369
37
+ https://github.com/fluent/fluentd/pull/4433
38
+ https://github.com/fluent/fluentd/pull/4452
39
+ https://github.com/fluent/fluentd/pull/4477
40
+ * github: unify YAML file extension to .yml
41
+ https://github.com/fluent/fluentd/pull/4429
42
+
1
43
  # v1.16
2
44
 
45
+ ## Release v1.16.5 - 2024/03/27
46
+
47
+ ### Bug Fix
48
+
49
+ * Buffer: Fix emit error of v1.16.4 sometimes failing to process large data
50
+ exceeding chunk size limit
51
+ https://github.com/fluent/fluentd/pull/4447
52
+
3
53
  ## Release v1.16.4 - 2024/03/14
4
54
 
5
55
  ### Bug Fix
data/README.md CHANGED
@@ -29,7 +29,7 @@ Fluentd: Open-Source Log Collector
29
29
 
30
30
  ### Prerequisites
31
31
 
32
- - Ruby 2.4 or later
32
+ - Ruby 2.7 or later
33
33
  - git
34
34
 
35
35
  `git` should be in `PATH`. On Windows, you can use `Github for Windows` and `GitShell` for easy setup.
data/Rakefile CHANGED
@@ -13,7 +13,7 @@ task test: [:base_test]
13
13
  namespace :build do
14
14
  desc 'Build gems for all platforms'
15
15
  task :all do
16
- Bundler.with_clean_env do
16
+ Bundler.with_original_env do
17
17
  %w[ruby x86-mingw32 x64-mingw32 x64-mingw-ucrt].each do |name|
18
18
  ENV['GEM_BUILD_FAKE_PLATFORM'] = name
19
19
  Rake::Task["build"].execute
data/fluentd.gemspec CHANGED
@@ -16,7 +16,7 @@ Gem::Specification.new do |gem|
16
16
  gem.require_paths = ["lib"]
17
17
  gem.license = "Apache-2.0"
18
18
 
19
- gem.required_ruby_version = '>= 2.4'
19
+ gem.required_ruby_version = '>= 2.7'
20
20
 
21
21
  gem.add_runtime_dependency("bundler")
22
22
  gem.add_runtime_dependency("msgpack", [">= 1.3.1", "< 2.0.0"])
@@ -30,6 +30,11 @@ Gem::Specification.new do |gem|
30
30
  gem.add_runtime_dependency("strptime", [">= 0.2.4", "< 1.0.0"])
31
31
  gem.add_runtime_dependency("webrick", ["~> 1.4"])
32
32
 
33
+ # gems that aren't default gems as of Ruby 3.4
34
+ gem.add_runtime_dependency("base64", ["~> 0.2"])
35
+ gem.add_runtime_dependency("csv", ["~> 3.2"])
36
+ gem.add_runtime_dependency("drb", ["~> 2.2"])
37
+
33
38
  # build gem for a certain platform. see also Rakefile
34
39
  fake_platform = ENV['GEM_BUILD_FAKE_PLATFORM'].to_s
35
40
  gem.platform = fake_platform unless fake_platform.empty?
@@ -51,4 +56,7 @@ Gem::Specification.new do |gem|
51
56
  gem.add_development_dependency("oj", [">= 2.14", "< 4"])
52
57
  gem.add_development_dependency("async", "~> 1.23")
53
58
  gem.add_development_dependency("async-http", ">= 0.50.0")
59
+ gem.add_development_dependency("aws-sigv4", ["~> 1.8"])
60
+ gem.add_development_dependency("aws-sdk-core", ["~> 3.191"])
61
+ gem.add_development_dependency("rexml", ["~> 3.2"])
54
62
  end
@@ -130,7 +130,7 @@ module BinlogReaderCommand
130
130
  private
131
131
 
132
132
  def configure_option_parser
133
- @options.merge!(config_params: {})
133
+ @options[:config_params] = {}
134
134
 
135
135
  @opt_parser.banner = "Usage: fluent-binlog-reader #{self.class.to_s.split('::').last.downcase} [options] file"
136
136
 
@@ -201,7 +201,7 @@ module Fluent
201
201
 
202
202
  def overwrite_defaults(other) # other is owner plugin's corresponding proxy
203
203
  self.defaults = self.defaults.merge(other.defaults)
204
- self.sections.keys.each do |section_key|
204
+ self.sections.each_key do |section_key|
205
205
  if other.sections.has_key?(section_key)
206
206
  self.sections[section_key].overwrite_defaults(other.sections[section_key])
207
207
  end
@@ -274,7 +274,7 @@ module Fluent
274
274
  option_value_type!(name, opts, :deprecated, String)
275
275
  option_value_type!(name, opts, :obsoleted, String)
276
276
  if type == :enum
277
- if !opts.has_key?(:list) || !opts[:list].is_a?(Array) || opts[:list].empty? || !opts[:list].all?{|v| v.is_a?(Symbol) }
277
+ if !opts.has_key?(:list) || !opts[:list].is_a?(Array) || opts[:list].empty? || !opts[:list].all?(Symbol)
278
278
  raise ArgumentError, "#{name}: enum parameter requires :list of Symbols"
279
279
  end
280
280
  end
@@ -123,7 +123,7 @@ module Fluent
123
123
 
124
124
  s = val.to_sym
125
125
  list = opts[:list]
126
- raise "Plugin BUG: config type 'enum' requires :list of symbols" unless list.is_a?(Array) && list.all?{|v| v.is_a? Symbol }
126
+ raise "Plugin BUG: config type 'enum' requires :list of symbols" unless list.is_a?(Array) && list.all?(Symbol)
127
127
  unless list.include?(s)
128
128
  raise ConfigError, "valid options are #{list.join(',')} but got #{val}"
129
129
  end
@@ -31,13 +31,13 @@ module Fluent
31
31
  super
32
32
  # to simulate implicit 'attr_accessor' by config_param / config_section and its value by config_set_default
33
33
  proxy = self.class.merged_configure_proxy
34
- proxy.params.keys.each do |name|
34
+ proxy.params.each_key do |name|
35
35
  next if name.to_s.start_with?('@')
36
36
  if proxy.defaults.has_key?(name)
37
37
  instance_variable_set("@#{name}".to_sym, proxy.defaults[name])
38
38
  end
39
39
  end
40
- proxy.sections.keys.each do |name|
40
+ proxy.sections.each_key do |name|
41
41
  next if name.to_s.start_with?('@')
42
42
  subproxy = proxy.sections[name]
43
43
  if subproxy.multi?
@@ -54,7 +54,7 @@ module Fluent
54
54
  if mutex.try_lock
55
55
  locks[key] = mutex
56
56
  else
57
- locks.values.each(&:unlock)
57
+ locks.each_value(&:unlock)
58
58
  locks = {} # flush locked keys
59
59
  break
60
60
  end
@@ -47,8 +47,6 @@ module Fluent
47
47
  # it's not suppressed in default event router for non-log-event events
48
48
  log_event_router.suppress_missing_match!
49
49
 
50
- log_event_router = log_event_router
51
-
52
50
  unmatched_tags = Fluent::Log.event_tags.select { |t| !log_event_router.match?(t) }
53
51
  unless unmatched_tags.empty?
54
52
  $log.warn "match for some tags of log events are not defined in @FLUENT_LOG label (to be ignored)", tags: unmatched_tags
@@ -56,7 +56,7 @@ module Fluent
56
56
 
57
57
  @variable_store = Fluent::VariableStore.fetch_or_build(:buf_file)
58
58
 
59
- multi_workers_configured = owner.system_config.workers > 1 ? true : false
59
+ multi_workers_configured = owner.system_config.workers > 1
60
60
 
61
61
  using_plugin_root_dir = false
62
62
  unless @path
@@ -399,7 +399,7 @@ module Fluent
399
399
  end
400
400
 
401
401
  if chunk.slice(0, 2) == BUFFER_HEADER
402
- size = chunk.slice(2, 4).unpack('N').first
402
+ size = chunk.slice(2, 4).unpack1('N')
403
403
  if size
404
404
  return Fluent::MessagePackFactory.msgpack_unpacker(symbolize_keys: true).feed(chunk.slice(6, size)).read rescue nil
405
405
  end
@@ -238,17 +238,16 @@ module Fluent
238
238
  callback.call(file) if callback
239
239
  end
240
240
 
241
- URI_PARSER = URI::Parser.new
242
241
  ESCAPE_REGEXP = /[^-_.a-zA-Z0-9]/n
243
242
 
244
243
  def encode_key(metadata)
245
244
  k = @key ? metadata.variables[@key] : metadata.tag
246
245
  k ||= ''
247
- URI_PARSER.escape(k, ESCAPE_REGEXP)
246
+ URI::DEFAULT_PARSER.escape(k, ESCAPE_REGEXP)
248
247
  end
249
248
 
250
249
  def decode_key(key)
251
- URI_PARSER.unescape(key)
250
+ URI::DEFAULT_PARSER.unescape(key)
252
251
  end
253
252
 
254
253
  def create_new_chunk(path, metadata, perm)
@@ -764,94 +764,95 @@ module Fluent
764
764
  while writing_splits_index < splits.size
765
765
  chunk = get_next_chunk.call
766
766
  errors = []
767
+ # The chunk must be locked until being passed to &block.
768
+ chunk.mon_enter
767
769
  modified_chunks << {chunk: chunk, adding_bytesize: 0, errors: errors}
768
- chunk.synchronize do
769
- raise ShouldRetry unless chunk.writable?
770
- staged_chunk_used = true if chunk.staged?
771
-
772
- original_bytesize = committed_bytesize = chunk.bytesize
773
- begin
774
- while writing_splits_index < splits.size
775
- split = splits[writing_splits_index]
776
- formatted_split = format ? format.call(split) : nil
777
770
 
778
- if split.size == 1 # Check BufferChunkOverflowError
779
- determined_bytesize = nil
780
- if @compress != :text
781
- determined_bytesize = nil
782
- elsif formatted_split
783
- determined_bytesize = formatted_split.bytesize
784
- elsif split.first.respond_to?(:bytesize)
785
- determined_bytesize = split.first.bytesize
786
- end
771
+ raise ShouldRetry unless chunk.writable?
772
+ staged_chunk_used = true if chunk.staged?
787
773
 
788
- if determined_bytesize && determined_bytesize > @chunk_limit_size
789
- # It is a obvious case that BufferChunkOverflowError should be raised here.
790
- # But if it raises here, already processed 'split' or
791
- # the proceeding 'split' will be lost completely.
792
- # So it is a last resort to delay raising such a exception
793
- errors << "a #{determined_bytesize} bytes record (nth: #{writing_splits_index}) is larger than buffer chunk limit size (#{@chunk_limit_size})"
794
- writing_splits_index += 1
795
- next
796
- end
774
+ original_bytesize = committed_bytesize = chunk.bytesize
775
+ begin
776
+ while writing_splits_index < splits.size
777
+ split = splits[writing_splits_index]
778
+ formatted_split = format ? format.call(split) : nil
797
779
 
798
- if determined_bytesize.nil? || chunk.bytesize + determined_bytesize > @chunk_limit_size
799
- # The split will (might) cause size over so keep already processed
800
- # 'split' content here (allow performance regression a bit).
801
- chunk.commit
802
- committed_bytesize = chunk.bytesize
803
- end
780
+ if split.size == 1 # Check BufferChunkOverflowError
781
+ determined_bytesize = nil
782
+ if @compress != :text
783
+ determined_bytesize = nil
784
+ elsif formatted_split
785
+ determined_bytesize = formatted_split.bytesize
786
+ elsif split.first.respond_to?(:bytesize)
787
+ determined_bytesize = split.first.bytesize
804
788
  end
805
789
 
806
- if format
807
- chunk.concat(formatted_split, split.size)
808
- else
809
- chunk.append(split, compress: @compress)
790
+ if determined_bytesize && determined_bytesize > @chunk_limit_size
791
+ # It is a obvious case that BufferChunkOverflowError should be raised here.
792
+ # But if it raises here, already processed 'split' or
793
+ # the proceeding 'split' will be lost completely.
794
+ # So it is a last resort to delay raising such a exception
795
+ errors << "a #{determined_bytesize} bytes record (nth: #{writing_splits_index}) is larger than buffer chunk limit size (#{@chunk_limit_size})"
796
+ writing_splits_index += 1
797
+ next
810
798
  end
811
- adding_bytes = chunk.bytesize - committed_bytesize
812
799
 
813
- if chunk_size_over?(chunk) # split size is larger than difference between size_full? and size_over?
814
- chunk.rollback
800
+ if determined_bytesize.nil? || chunk.bytesize + determined_bytesize > @chunk_limit_size
801
+ # The split will (might) cause size over so keep already processed
802
+ # 'split' content here (allow performance regression a bit).
803
+ chunk.commit
815
804
  committed_bytesize = chunk.bytesize
805
+ end
806
+ end
816
807
 
817
- if split.size == 1 # Check BufferChunkOverflowError again
818
- if adding_bytes > @chunk_limit_size
819
- errors << "concatenated/appended a #{adding_bytes} bytes record (nth: #{writing_splits_index}) is larger than buffer chunk limit size (#{@chunk_limit_size})"
820
- writing_splits_index += 1
821
- next
822
- else
823
- # As already processed content is kept after rollback, then unstaged chunk should be queued.
824
- # After that, re-process current split again.
825
- # New chunk should be allocated, to do it, modify @stage and so on.
826
- synchronize { @stage.delete(modified_metadata) }
827
- staged_chunk_used = false
828
- chunk.unstaged!
829
- break
830
- end
831
- end
808
+ if format
809
+ chunk.concat(formatted_split, split.size)
810
+ else
811
+ chunk.append(split, compress: @compress)
812
+ end
813
+ adding_bytes = chunk.bytesize - committed_bytesize
832
814
 
833
- if chunk_size_full?(chunk) || split.size == 1
834
- enqueue_chunk_before_retry = true
815
+ if chunk_size_over?(chunk) # split size is larger than difference between size_full? and size_over?
816
+ chunk.rollback
817
+ committed_bytesize = chunk.bytesize
818
+
819
+ if split.size == 1 # Check BufferChunkOverflowError again
820
+ if adding_bytes > @chunk_limit_size
821
+ errors << "concatenated/appended a #{adding_bytes} bytes record (nth: #{writing_splits_index}) is larger than buffer chunk limit size (#{@chunk_limit_size})"
822
+ writing_splits_index += 1
823
+ next
835
824
  else
836
- splits_count *= 10
825
+ # As already processed content is kept after rollback, then unstaged chunk should be queued.
826
+ # After that, re-process current split again.
827
+ # New chunk should be allocated, to do it, modify @stage and so on.
828
+ synchronize { @stage.delete(modified_metadata) }
829
+ staged_chunk_used = false
830
+ chunk.unstaged!
831
+ break
837
832
  end
833
+ end
838
834
 
839
- raise ShouldRetry
835
+ if chunk_size_full?(chunk) || split.size == 1
836
+ enqueue_chunk_before_retry = true
837
+ else
838
+ splits_count *= 10
840
839
  end
841
840
 
842
- writing_splits_index += 1
841
+ raise ShouldRetry
842
+ end
843
843
 
844
- if chunk_size_full?(chunk)
845
- break
846
- end
844
+ writing_splits_index += 1
845
+
846
+ if chunk_size_full?(chunk)
847
+ break
847
848
  end
848
- rescue
849
- chunk.purge if chunk.unstaged? # unstaged chunk will leak unless purge it
850
- raise
851
849
  end
852
-
853
- modified_chunks.last[:adding_bytesize] = chunk.bytesize - original_bytesize
850
+ rescue
851
+ chunk.purge if chunk.unstaged? # unstaged chunk will leak unless purge it
852
+ raise
854
853
  end
854
+
855
+ modified_chunks.last[:adding_bytesize] = chunk.bytesize - original_bytesize
855
856
  end
856
857
  modified_chunks.each do |data|
857
858
  block.call(data[:chunk], data[:adding_bytesize], data[:errors])
@@ -863,9 +864,15 @@ module Fluent
863
864
  if chunk.unstaged?
864
865
  chunk.purge rescue nil
865
866
  end
867
+ chunk.mon_exit rescue nil
866
868
  end
867
869
  enqueue_chunk(metadata) if enqueue_chunk_before_retry
868
870
  retry
871
+ ensure
872
+ modified_chunks.each do |data|
873
+ chunk = data[:chunk]
874
+ chunk.mon_exit
875
+ end
869
876
  end
870
877
 
871
878
  STATS_KEYS = [
@@ -70,6 +70,13 @@ module Fluent::Plugin
70
70
  end
71
71
  end
72
72
  begin
73
+ # Note: https://github.com/fluent/fluentd/issues/4100
74
+ # If the parser returns multiple records from one raw_value,
75
+ # this returns only the first one record.
76
+ # This should be fixed in the future version.
77
+ result_time = nil
78
+ result_record = nil
79
+
73
80
  @parser.parse(raw_value) do |t, values|
74
81
  if values
75
82
  t = if @reserve_time
@@ -79,20 +86,31 @@ module Fluent::Plugin
79
86
  end
80
87
  @accessor.delete(record) if @remove_key_name_field
81
88
  r = handle_parsed(tag, record, t, values)
82
- return t, r
89
+
90
+ if result_record.nil?
91
+ result_time = t
92
+ result_record = r
93
+ else
94
+ if @emit_invalid_record_to_error
95
+ router.emit_error_event(tag, t, r, Fluent::Plugin::Parser::ParserError.new(
96
+ "Could not emit the event. The parser returned multiple results, but currently filter_parser plugin only returns the first parsed result. Raw data: '#{raw_value}'"
97
+ ))
98
+ end
99
+ end
83
100
  else
84
101
  if @emit_invalid_record_to_error
85
102
  router.emit_error_event(tag, time, record, Fluent::Plugin::Parser::ParserError.new("pattern not matched with data '#{raw_value}'"))
86
103
  end
87
- if @reserve_data
88
- t = time
89
- r = handle_parsed(tag, record, time, {})
90
- return t, r
91
- else
92
- return FAILED_RESULT
93
- end
104
+
105
+ next unless @reserve_data
106
+ next unless result_record.nil?
107
+
108
+ result_time = time
109
+ result_record = handle_parsed(tag, record, time, {})
94
110
  end
95
111
  end
112
+
113
+ return result_time, result_record
96
114
  rescue Fluent::Plugin::Parser::ParserError => e
97
115
  if @emit_invalid_record_to_error
98
116
  raise e