fluentd 1.16.4-x86-mingw32 → 1.16.6-x86-mingw32

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 55d9dff2b2c0dc6754261babb59c10b05cb4990b04366a10b6c3ed531e939ef6
4
- data.tar.gz: 3fe6adb800c7f359b4dae8ede6c38710549da0bccd6181cc27c95cfa4ec5f776
3
+ metadata.gz: bf2c26992ab4316c7fdc3b1244a3cdcb80f071f0761d036946672478f480370d
4
+ data.tar.gz: 27690f3555b7e3c936b3f31a8e5994e52430360dd0a3239c04028e59fff684aa
5
5
  SHA512:
6
- metadata.gz: eed2be6c185fc49df73504535018a7eaf7c5142a5fcebbc08d6e56cba93edc94d1c9ed159456bc52a6925c7e929c8bef4a2f2ca03850798ff52d90054eae849b
7
- data.tar.gz: 5c2d5c814e6eeffdbafd831f0fa04ab79dc0f4979c3e3fa35dae941d7cec35c800b735f6c2efafacf91ae265e65d7d1d171ca941d28c60309399f74e61aad554
6
+ metadata.gz: ebc7ad56450ef9e1ecad23361c2863a56b2024afaea528a3b709d25b26077dde3c30b10dee44bbdf4c74e7d2ef6f58dd2136a3655e2904d55f16a01c206be8e9
7
+ data.tar.gz: c7a760d3db858034fb4d959538e852fab07221e732f4a49c4976b4d8382d2713ea8beba6628713291becc3060cde34f178fa54635fafa1745e347f35d307a877
@@ -3,8 +3,14 @@ name: Test
3
3
  on:
4
4
  push:
5
5
  branches: [v1.16]
6
+ paths-ignore:
7
+ - '*.md'
8
+ - 'lib/fluent/version.rb'
6
9
  pull_request:
7
10
  branches: [v1.16]
11
+ paths-ignore:
12
+ - '*.md'
13
+ - 'lib/fluent/version.rb'
8
14
 
9
15
  jobs:
10
16
  test:
@@ -29,4 +35,4 @@ jobs:
29
35
  - name: Install dependencies
30
36
  run: bundle install
31
37
  - name: Run tests
32
- run: bundle exec rake test TESTOPTS=-v
38
+ run: bundle exec rake test TESTOPTS="-v --no-show-detail-immediately"
data/CHANGELOG.md CHANGED
@@ -1,5 +1,31 @@
1
1
  # v1.16
2
2
 
3
+ ## Release v1.16.6 - 2024/08/16
4
+
5
+ ### Bug Fix
6
+
7
+ * YAML config syntax: Fix issue where `$log_level` element was not supported correctly
8
+ https://github.com/fluent/fluentd/pull/4486
9
+ * parser_json: Fix wrong LoadError warning
10
+ https://github.com/fluent/fluentd/pull/4592
11
+ * `fluentd` command: Fix `--plugin` (`-p`) option not to overwrite default value
12
+ https://github.com/fluent/fluentd/pull/4605
13
+
14
+ ### Misc
15
+
16
+ * out_file: Add warn message for symlink_path setting
17
+ https://github.com/fluent/fluentd/pull/4512
18
+ * Keep console gem v1.23 to avoid LoadError
19
+ https://github.com/fluent/fluentd/pull/4510
20
+
21
+ ## Release v1.16.5 - 2024/03/27
22
+
23
+ ### Bug Fix
24
+
25
+ * Buffer: Fix emit error of v1.16.4 sometimes failing to process large data
26
+ exceeding chunk size limit
27
+ https://github.com/fluent/fluentd/pull/4447
28
+
3
29
  ## Release v1.16.4 - 2024/03/14
4
30
 
5
31
  ### Bug Fix
data/fluentd.gemspec CHANGED
@@ -29,6 +29,7 @@ Gem::Specification.new do |gem|
29
29
  gem.add_runtime_dependency("tzinfo-data", ["~> 1.0"])
30
30
  gem.add_runtime_dependency("strptime", [">= 0.2.4", "< 1.0.0"])
31
31
  gem.add_runtime_dependency("webrick", ["~> 1.4"])
32
+ gem.add_runtime_dependency("console", ["< 1.24"])
32
33
 
33
34
  # build gem for a certain platform. see also Rakefile
34
35
  fake_platform = ENV['GEM_BUILD_FAKE_PLATFORM'].to_s
@@ -45,7 +46,9 @@ Gem::Specification.new do |gem|
45
46
  gem.add_development_dependency("parallel_tests", ["~> 0.15.3"])
46
47
  gem.add_development_dependency("simplecov", ["~> 0.7"])
47
48
  gem.add_development_dependency("rr", ["~> 3.0"])
48
- gem.add_development_dependency("timecop", ["~> 0.9"])
49
+ # timecop v0.9.9 supports `Process.clock_gettime`. It breaks some tests.
50
+ # (https://github.com/fluent/fluentd/pull/4521)
51
+ gem.add_development_dependency("timecop", ["< 0.9.9"])
49
52
  gem.add_development_dependency("test-unit", ["~> 3.3"])
50
53
  gem.add_development_dependency("test-unit-rr", ["~> 1.0"])
51
54
  gem.add_development_dependency("oj", [">= 2.14", "< 4"])
@@ -46,7 +46,7 @@ op.on('--show-plugin-config=PLUGIN', "[DEPRECATED] Show PLUGIN configuration and
46
46
  }
47
47
 
48
48
  op.on('-p', '--plugin DIR', "add plugin directory") {|s|
49
- (cmd_opts[:plugin_dirs] ||= []) << s
49
+ (cmd_opts[:plugin_dirs] ||= default_opts[:plugin_dirs]) << s
50
50
  }
51
51
 
52
52
  op.on('-I PATH', "add library path") {|s|
@@ -138,6 +138,10 @@ module Fluent
138
138
  sb.add_line('@id', v)
139
139
  end
140
140
 
141
+ if (v = config.delete('$log_level'))
142
+ sb.add_line('@log_level', v)
143
+ end
144
+
141
145
  config.each do |key, val|
142
146
  if val.is_a?(Array)
143
147
  val.each do |v|
@@ -764,94 +764,95 @@ module Fluent
764
764
  while writing_splits_index < splits.size
765
765
  chunk = get_next_chunk.call
766
766
  errors = []
767
+ # The chunk must be locked until being passed to &block.
768
+ chunk.mon_enter
767
769
  modified_chunks << {chunk: chunk, adding_bytesize: 0, errors: errors}
768
- chunk.synchronize do
769
- raise ShouldRetry unless chunk.writable?
770
- staged_chunk_used = true if chunk.staged?
771
-
772
- original_bytesize = committed_bytesize = chunk.bytesize
773
- begin
774
- while writing_splits_index < splits.size
775
- split = splits[writing_splits_index]
776
- formatted_split = format ? format.call(split) : nil
777
770
 
778
- if split.size == 1 # Check BufferChunkOverflowError
779
- determined_bytesize = nil
780
- if @compress != :text
781
- determined_bytesize = nil
782
- elsif formatted_split
783
- determined_bytesize = formatted_split.bytesize
784
- elsif split.first.respond_to?(:bytesize)
785
- determined_bytesize = split.first.bytesize
786
- end
771
+ raise ShouldRetry unless chunk.writable?
772
+ staged_chunk_used = true if chunk.staged?
787
773
 
788
- if determined_bytesize && determined_bytesize > @chunk_limit_size
789
- # It is a obvious case that BufferChunkOverflowError should be raised here.
790
- # But if it raises here, already processed 'split' or
791
- # the proceeding 'split' will be lost completely.
792
- # So it is a last resort to delay raising such a exception
793
- errors << "a #{determined_bytesize} bytes record (nth: #{writing_splits_index}) is larger than buffer chunk limit size (#{@chunk_limit_size})"
794
- writing_splits_index += 1
795
- next
796
- end
774
+ original_bytesize = committed_bytesize = chunk.bytesize
775
+ begin
776
+ while writing_splits_index < splits.size
777
+ split = splits[writing_splits_index]
778
+ formatted_split = format ? format.call(split) : nil
797
779
 
798
- if determined_bytesize.nil? || chunk.bytesize + determined_bytesize > @chunk_limit_size
799
- # The split will (might) cause size over so keep already processed
800
- # 'split' content here (allow performance regression a bit).
801
- chunk.commit
802
- committed_bytesize = chunk.bytesize
803
- end
780
+ if split.size == 1 # Check BufferChunkOverflowError
781
+ determined_bytesize = nil
782
+ if @compress != :text
783
+ determined_bytesize = nil
784
+ elsif formatted_split
785
+ determined_bytesize = formatted_split.bytesize
786
+ elsif split.first.respond_to?(:bytesize)
787
+ determined_bytesize = split.first.bytesize
804
788
  end
805
789
 
806
- if format
807
- chunk.concat(formatted_split, split.size)
808
- else
809
- chunk.append(split, compress: @compress)
790
+ if determined_bytesize && determined_bytesize > @chunk_limit_size
791
+ # It is a obvious case that BufferChunkOverflowError should be raised here.
792
+ # But if it raises here, already processed 'split' or
793
+ # the proceeding 'split' will be lost completely.
794
+ # So it is a last resort to delay raising such a exception
795
+ errors << "a #{determined_bytesize} bytes record (nth: #{writing_splits_index}) is larger than buffer chunk limit size (#{@chunk_limit_size})"
796
+ writing_splits_index += 1
797
+ next
810
798
  end
811
- adding_bytes = chunk.bytesize - committed_bytesize
812
799
 
813
- if chunk_size_over?(chunk) # split size is larger than difference between size_full? and size_over?
814
- chunk.rollback
800
+ if determined_bytesize.nil? || chunk.bytesize + determined_bytesize > @chunk_limit_size
801
+ # The split will (might) cause size over so keep already processed
802
+ # 'split' content here (allow performance regression a bit).
803
+ chunk.commit
815
804
  committed_bytesize = chunk.bytesize
805
+ end
806
+ end
816
807
 
817
- if split.size == 1 # Check BufferChunkOverflowError again
818
- if adding_bytes > @chunk_limit_size
819
- errors << "concatenated/appended a #{adding_bytes} bytes record (nth: #{writing_splits_index}) is larger than buffer chunk limit size (#{@chunk_limit_size})"
820
- writing_splits_index += 1
821
- next
822
- else
823
- # As already processed content is kept after rollback, then unstaged chunk should be queued.
824
- # After that, re-process current split again.
825
- # New chunk should be allocated, to do it, modify @stage and so on.
826
- synchronize { @stage.delete(modified_metadata) }
827
- staged_chunk_used = false
828
- chunk.unstaged!
829
- break
830
- end
831
- end
808
+ if format
809
+ chunk.concat(formatted_split, split.size)
810
+ else
811
+ chunk.append(split, compress: @compress)
812
+ end
813
+ adding_bytes = chunk.bytesize - committed_bytesize
832
814
 
833
- if chunk_size_full?(chunk) || split.size == 1
834
- enqueue_chunk_before_retry = true
815
+ if chunk_size_over?(chunk) # split size is larger than difference between size_full? and size_over?
816
+ chunk.rollback
817
+ committed_bytesize = chunk.bytesize
818
+
819
+ if split.size == 1 # Check BufferChunkOverflowError again
820
+ if adding_bytes > @chunk_limit_size
821
+ errors << "concatenated/appended a #{adding_bytes} bytes record (nth: #{writing_splits_index}) is larger than buffer chunk limit size (#{@chunk_limit_size})"
822
+ writing_splits_index += 1
823
+ next
835
824
  else
836
- splits_count *= 10
825
+ # As already processed content is kept after rollback, then unstaged chunk should be queued.
826
+ # After that, re-process current split again.
827
+ # New chunk should be allocated, to do it, modify @stage and so on.
828
+ synchronize { @stage.delete(modified_metadata) }
829
+ staged_chunk_used = false
830
+ chunk.unstaged!
831
+ break
837
832
  end
833
+ end
838
834
 
839
- raise ShouldRetry
835
+ if chunk_size_full?(chunk) || split.size == 1
836
+ enqueue_chunk_before_retry = true
837
+ else
838
+ splits_count *= 10
840
839
  end
841
840
 
842
- writing_splits_index += 1
841
+ raise ShouldRetry
842
+ end
843
843
 
844
- if chunk_size_full?(chunk)
845
- break
846
- end
844
+ writing_splits_index += 1
845
+
846
+ if chunk_size_full?(chunk)
847
+ break
847
848
  end
848
- rescue
849
- chunk.purge if chunk.unstaged? # unstaged chunk will leak unless purge it
850
- raise
851
849
  end
852
-
853
- modified_chunks.last[:adding_bytesize] = chunk.bytesize - original_bytesize
850
+ rescue
851
+ chunk.purge if chunk.unstaged? # unstaged chunk will leak unless purge it
852
+ raise
854
853
  end
854
+
855
+ modified_chunks.last[:adding_bytesize] = chunk.bytesize - original_bytesize
855
856
  end
856
857
  modified_chunks.each do |data|
857
858
  block.call(data[:chunk], data[:adding_bytesize], data[:errors])
@@ -863,9 +864,15 @@ module Fluent
863
864
  if chunk.unstaged?
864
865
  chunk.purge rescue nil
865
866
  end
867
+ chunk.mon_exit rescue nil
866
868
  end
867
869
  enqueue_chunk(metadata) if enqueue_chunk_before_retry
868
870
  retry
871
+ ensure
872
+ modified_chunks.each do |data|
873
+ chunk = data[:chunk]
874
+ chunk.mon_exit
875
+ end
869
876
  end
870
877
 
871
878
  STATS_KEYS = [
@@ -172,6 +172,14 @@ module Fluent::Plugin
172
172
  log.warn "symlink_path is unavailable on Windows platform. disabled."
173
173
  @symlink_path = nil
174
174
  else
175
+ placeholder_validators(:symlink_path, @symlink_path).reject{ |v| v.type == :time }.each do |v|
176
+ begin
177
+ v.validate!
178
+ rescue Fluent::ConfigError => e
179
+ log.warn "#{e}. This means multiple chunks are competing for a single symlink_path, so some logs may not be taken from the symlink."
180
+ end
181
+ end
182
+
175
183
  @buffer.extend SymlinkBufferMixin
176
184
  @buffer.symlink_path = @symlink_path
177
185
  @buffer.output_plugin_for_symlink = self
@@ -50,23 +50,15 @@ module Fluent
50
50
  def configure_json_parser(name)
51
51
  case name
52
52
  when :oj
53
- raise LoadError unless Fluent::OjOptions.available?
54
- [Oj.method(:load), Oj::ParseError]
53
+ return [Oj.method(:load), Oj::ParseError] if Fluent::OjOptions.available?
54
+
55
+ log&.info "Oj is not installed, and failing back to Yajl for json parser"
56
+ configure_json_parser(:yajl)
55
57
  when :json then [JSON.method(:load), JSON::ParserError]
56
58
  when :yajl then [Yajl.method(:load), Yajl::ParseError]
57
59
  else
58
60
  raise "BUG: unknown json parser specified: #{name}"
59
61
  end
60
- rescue LoadError => ex
61
- name = :yajl
62
- if log
63
- if /\boj\z/.match?(ex.message)
64
- log.info "Oj is not installed, and failing back to Yajl for json parser"
65
- else
66
- log.warn ex.message
67
- end
68
- end
69
- retry
70
62
  end
71
63
 
72
64
  def parse(text)
@@ -16,6 +16,6 @@
16
16
 
17
17
  module Fluent
18
18
 
19
- VERSION = '1.16.4'
19
+ VERSION = '1.16.6'
20
20
 
21
21
  end
@@ -128,11 +128,14 @@ class TestFluentdCommand < ::Test::Unit::TestCase
128
128
 
129
129
  # ATTENTION: This stops taking logs when all `pattern_list` match or timeout,
130
130
  # so `patterns_not_match` can test only logs up to that point.
131
+ # You can pass a block to assert something after log matching.
131
132
  def assert_log_matches(cmdline, *pattern_list, patterns_not_match: [], timeout: 20, env: {})
132
133
  matched = false
133
134
  matched_wrongly = false
134
- assert_error_msg = ""
135
+ error_msg_match = ""
135
136
  stdio_buf = ""
137
+ succeeded_block = true
138
+ error_msg_block = ""
136
139
  begin
137
140
  execute_command(cmdline, @tmp_dir, env) do |pid, stdout|
138
141
  begin
@@ -163,6 +166,13 @@ class TestFluentdCommand < ::Test::Unit::TestCase
163
166
  end
164
167
  end
165
168
  end
169
+
170
+ begin
171
+ yield if block_given?
172
+ rescue => e
173
+ succeeded_block = false
174
+ error_msg_block = "failed block execution after matching: #{e}"
175
+ end
166
176
  ensure
167
177
  if SUPERVISOR_PID_PATTERN =~ stdio_buf
168
178
  @supervisor_pid = $1.to_i
@@ -173,19 +183,19 @@ class TestFluentdCommand < ::Test::Unit::TestCase
173
183
  end
174
184
  end
175
185
  rescue Timeout::Error
176
- assert_error_msg = "execution timeout"
186
+ error_msg_match = "execution timeout"
177
187
  # https://github.com/fluent/fluentd/issues/4095
178
188
  # On Windows, timeout without `@supervisor_pid` means that the test is invalid,
179
189
  # since the supervisor process will survive without being killed correctly.
180
190
  flunk("Invalid test: The pid of supervisor could not be taken, which is necessary on Windows.") if Fluent.windows? && @supervisor_pid.nil?
181
191
  rescue => e
182
- assert_error_msg = "unexpected error in launching fluentd: #{e.inspect}"
192
+ error_msg_match = "unexpected error in launching fluentd: #{e.inspect}"
183
193
  else
184
- assert_error_msg = "log doesn't match" unless matched
194
+ error_msg_match = "log doesn't match" unless matched
185
195
  end
186
196
 
187
197
  if patterns_not_match.empty?
188
- assert_error_msg = build_message(assert_error_msg,
198
+ error_msg_match = build_message(error_msg_match,
189
199
  "<?>\nwas expected to include:\n<?>",
190
200
  stdio_buf, pattern_list)
191
201
  else
@@ -197,16 +207,17 @@ class TestFluentdCommand < ::Test::Unit::TestCase
197
207
  lines.any?{|line| line.include?(ptn) }
198
208
  end
199
209
  if matched_wrongly
200
- assert_error_msg << "\n" unless assert_error_msg.empty?
201
- assert_error_msg << "pattern exists in logs wrongly: #{ptn}"
210
+ error_msg_match << "\n" unless error_msg_match.empty?
211
+ error_msg_match << "pattern exists in logs wrongly: #{ptn}"
202
212
  end
203
213
  end
204
- assert_error_msg = build_message(assert_error_msg,
214
+ error_msg_match = build_message(error_msg_match,
205
215
  "<?>\nwas expected to include:\n<?>\nand not include:\n<?>",
206
216
  stdio_buf, pattern_list, patterns_not_match)
207
217
  end
208
218
 
209
- assert matched && !matched_wrongly, assert_error_msg
219
+ assert matched && !matched_wrongly, error_msg_match
220
+ assert succeeded_block, error_msg_block if block_given?
210
221
  end
211
222
 
212
223
  def assert_fluentd_fails_to_start(cmdline, *pattern_list, timeout: 20)
@@ -1288,4 +1299,40 @@ CONF
1288
1299
  "[debug]")
1289
1300
  end
1290
1301
  end
1302
+
1303
+ sub_test_case "plugin option" do
1304
+ test "should be the default value when not specifying" do
1305
+ conf_path = create_conf_file('test.conf', <<~CONF)
1306
+ <source>
1307
+ @type monitor_agent
1308
+ </source>
1309
+ CONF
1310
+ assert File.exist?(conf_path)
1311
+ cmdline = create_cmdline(conf_path)
1312
+
1313
+ assert_log_matches(cmdline, "fluentd worker is now running") do
1314
+ response = Net::HTTP.get(URI.parse("http://localhost:24220/api/config.json"))
1315
+ actual_conf = JSON.parse(response)
1316
+ assert_equal Fluent::Supervisor.default_options[:plugin_dirs], actual_conf["plugin_dirs"]
1317
+ end
1318
+ end
1319
+
1320
+ data(short: "-p")
1321
+ data(long: "--plugin")
1322
+ test "can be added by specifying the option" do |option_name|
1323
+ conf_path = create_conf_file('test.conf', <<~CONF)
1324
+ <source>
1325
+ @type monitor_agent
1326
+ </source>
1327
+ CONF
1328
+ assert File.exist?(conf_path)
1329
+ cmdline = create_cmdline(conf_path, option_name, @tmp_dir, option_name, @tmp_dir)
1330
+
1331
+ assert_log_matches(cmdline, "fluentd worker is now running") do
1332
+ response = Net::HTTP.get(URI.parse("http://localhost:24220/api/config.json"))
1333
+ actual_conf = JSON.parse(response)
1334
+ assert_equal Fluent::Supervisor.default_options[:plugin_dirs] + [@tmp_dir, @tmp_dir], actual_conf["plugin_dirs"]
1335
+ end
1336
+ end
1337
+ end
1291
1338
  end
@@ -5,20 +5,19 @@ require 'fluent/plugin/metrics_local'
5
5
  require 'tempfile'
6
6
 
7
7
  class IntailIOHandlerTest < Test::Unit::TestCase
8
- setup do
9
- @file = Tempfile.new('intail_io_handler').binmode
10
- opened_file_metrics = Fluent::Plugin::LocalMetrics.new
11
- opened_file_metrics.configure(config_element('metrics', '', {}))
12
- closed_file_metrics = Fluent::Plugin::LocalMetrics.new
13
- closed_file_metrics.configure(config_element('metrics', '', {}))
14
- rotated_file_metrics = Fluent::Plugin::LocalMetrics.new
15
- rotated_file_metrics.configure(config_element('metrics', '', {}))
16
- @metrics = Fluent::Plugin::TailInput::MetricsInfo.new(opened_file_metrics, closed_file_metrics, rotated_file_metrics)
17
- end
18
-
19
- teardown do
20
- @file.close rescue nil
21
- @file.unlink rescue nil
8
+ def setup
9
+ Tempfile.create('intail_io_handler') do |file|
10
+ file.binmode
11
+ @file = file
12
+ opened_file_metrics = Fluent::Plugin::LocalMetrics.new
13
+ opened_file_metrics.configure(config_element('metrics', '', {}))
14
+ closed_file_metrics = Fluent::Plugin::LocalMetrics.new
15
+ closed_file_metrics.configure(config_element('metrics', '', {}))
16
+ rotated_file_metrics = Fluent::Plugin::LocalMetrics.new
17
+ rotated_file_metrics.configure(config_element('metrics', '', {}))
18
+ @metrics = Fluent::Plugin::TailInput::MetricsInfo.new(opened_file_metrics, closed_file_metrics, rotated_file_metrics)
19
+ yield
20
+ end
22
21
  end
23
22
 
24
23
  def create_target_info
@@ -6,13 +6,12 @@ require 'fileutils'
6
6
  require 'tempfile'
7
7
 
8
8
  class IntailPositionFileTest < Test::Unit::TestCase
9
- setup do
10
- @file = Tempfile.new('intail_position_file_test').binmode
11
- end
12
-
13
- teardown do
14
- @file.close rescue nil
15
- @file.unlink rescue nil
9
+ def setup
10
+ Tempfile.create('intail_position_file_test') do |file|
11
+ file.binmode
12
+ @file = file
13
+ yield
14
+ end
16
15
  end
17
16
 
18
17
  UNWATCHED_STR = '%016x' % Fluent::Plugin::TailInput::PositionFile::UNWATCHED_POSITION
@@ -901,6 +901,65 @@ class BufferTest < Test::Unit::TestCase
901
901
 
902
902
  assert_equal 2, purge_count
903
903
  end
904
+
905
+ # https://github.com/fluent/fluentd/issues/4446
906
+ test "#write_step_by_step keeps chunks kept in locked in entire #write process" do
907
+ assert_equal 8 * 1024 * 1024, @p.chunk_limit_size
908
+ assert_equal 0.95, @p.chunk_full_threshold
909
+
910
+ mon_enter_counts_by_chunk = {}
911
+ mon_exit_counts_by_chunk = {}
912
+
913
+ stub.proxy(@p).generate_chunk(anything) do |chunk|
914
+ stub(chunk).mon_enter do
915
+ enter_count = 1 + mon_enter_counts_by_chunk.fetch(chunk, 0)
916
+ exit_count = mon_exit_counts_by_chunk.fetch(chunk, 0)
917
+ mon_enter_counts_by_chunk[chunk] = enter_count
918
+
919
+ # Assert that chunk is passed to &block of write_step_by_step before exiting the lock.
920
+ # (i.e. The lock count must be 2 greater than the exit count).
921
+ # Since ShouldRetry occurs once, the staged chunk takes the lock 3 times when calling the block.
922
+ if chunk.staged?
923
+ lock_in_block = enter_count == 3
924
+ assert_equal(enter_count - 2, exit_count) if lock_in_block
925
+ else
926
+ lock_in_block = enter_count == 2
927
+ assert_equal(enter_count - 2, exit_count) if lock_in_block
928
+ end
929
+ end
930
+ stub(chunk).mon_exit do
931
+ exit_count = 1 + mon_exit_counts_by_chunk.fetch(chunk, 0)
932
+ mon_exit_counts_by_chunk[chunk] = exit_count
933
+ end
934
+ chunk
935
+ end
936
+
937
+ m = @p.metadata(timekey: Time.parse('2016-04-11 16:40:00 +0000').to_i)
938
+ small_row = "x" * 1024 * 400
939
+ big_row = "x" * 1024 * 1024 * 8 # just `chunk_size_limit`, it does't cause BufferOverFlowError.
940
+
941
+ # Write 42 events in 1 event stream, last one is for triggering `ShouldRetry`
942
+ @p.write({m => [small_row] * 40 + [big_row] + ["x"]})
943
+
944
+ # Above event strem will be splitted twice by `Buffer#write_step_by_step`
945
+ #
946
+ # 1. `write_once`: 42 [events] * 1 [stream]
947
+ # 2. `write_step_by_step`: 4 [events]* 10 [streams] + 2 [events] * 1 [stream]
948
+ # 3. `write_step_by_step` (by `ShouldRetry`): 1 [event] * 42 [streams]
949
+ #
950
+ # Example of staged chunk lock behavior:
951
+ #
952
+ # 1. mon_enter in write_step_by_step
953
+ # 2. ShouldRetry occurs
954
+ # 3. mon_exit in write_step_by_step
955
+ # 4. mon_enter again in write_step_by_step (retry)
956
+ # 5. passed to &block of write_step_by_step
957
+ # 6. mon_enter in the block (write)
958
+ # 7. mon_exit in write_step_by_step
959
+ # 8. mon_exit in write
960
+
961
+ assert_equal(mon_enter_counts_by_chunk.values, mon_exit_counts_by_chunk.values)
962
+ end
904
963
  end
905
964
 
906
965
  sub_test_case 'standard format with configuration for test with lower chunk limit size' do
@@ -130,7 +130,7 @@ class FileOutputTest < Test::Unit::TestCase
130
130
  'path' => "#{TMP_DIR}/${tag}/${type}/conf_test.%Y%m%d.%H%M.log",
131
131
  'add_path_suffix' => 'false',
132
132
  'append' => "true",
133
- 'symlink_path' => "#{TMP_DIR}/conf_test.current.log",
133
+ 'symlink_path' => "#{TMP_DIR}/${tag}/conf_test.current.log",
134
134
  'compress' => 'gzip',
135
135
  'recompress' => 'true',
136
136
  }, [
@@ -183,6 +183,26 @@ class FileOutputTest < Test::Unit::TestCase
183
183
  Fluent::Test::Driver::Output.new(Fluent::Plugin::NullOutput).configure(conf)
184
184
  end
185
185
  end
186
+
187
+ test 'warning for symlink_path not including correct placeholders corresponding to chunk keys' do
188
+ omit "Windows doesn't support symlink" if Fluent.windows?
189
+ conf = config_element('match', '**', {
190
+ 'path' => "#{TMP_DIR}/${tag}/${key1}/${key2}/conf_test.%Y%m%d.%H%M.log",
191
+ 'symlink_path' => "#{TMP_DIR}/conf_test.current.log",
192
+ }, [
193
+ config_element('buffer', 'time,tag,key1,key2', {
194
+ '@type' => 'file',
195
+ 'timekey' => '1d',
196
+ 'path' => "#{TMP_DIR}/buf_conf_test",
197
+ }),
198
+ ])
199
+ assert_nothing_raised do
200
+ d = create_driver(conf)
201
+ assert do
202
+ d.logs.count { |log| log.include?("multiple chunks are competing for a single symlink_path") } == 2
203
+ end
204
+ end
205
+ end
186
206
  end
187
207
 
188
208
  sub_test_case 'fully configured output' do
@@ -8,6 +8,37 @@ class JsonParserTest < ::Test::Unit::TestCase
8
8
  @parser = Fluent::Test::Driver::Parser.new(Fluent::Plugin::JSONParser)
9
9
  end
10
10
 
11
+ sub_test_case "configure_json_parser" do
12
+ data("oj", [:oj, [Oj.method(:load), Oj::ParseError]])
13
+ data("json", [:json, [JSON.method(:load), JSON::ParserError]])
14
+ data("yajl", [:yajl, [Yajl.method(:load), Yajl::ParseError]])
15
+ def test_return_each_loader((input, expected_return))
16
+ result = @parser.instance.configure_json_parser(input)
17
+ assert_equal expected_return, result
18
+ end
19
+
20
+ def test_raise_exception_for_unknown_input
21
+ assert_raise RuntimeError do
22
+ @parser.instance.configure_json_parser(:unknown)
23
+ end
24
+ end
25
+
26
+ def test_fall_back_oj_to_yajl_if_oj_not_available
27
+ stub(Fluent::OjOptions).available? { false }
28
+
29
+ result = @parser.instance.configure_json_parser(:oj)
30
+
31
+ assert_equal [Yajl.method(:load), Yajl::ParseError], result
32
+ logs = @parser.logs.collect do |log|
33
+ log.gsub(/\A\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2} [-+]\d{4} /, "")
34
+ end
35
+ assert_equal(
36
+ ["[info]: Oj is not installed, and failing back to Yajl for json parser\n"],
37
+ logs
38
+ )
39
+ end
40
+ end
41
+
11
42
  data('oj' => 'oj', 'yajl' => 'yajl')
12
43
  def test_parse(data)
13
44
  @parser.configure('json_parser' => data)
data/test/test_config.rb CHANGED
@@ -167,6 +167,7 @@ class ConfigTest < Test::Unit::TestCase
167
167
  tag: tag.dummy
168
168
  - source:
169
169
  $type: tcp
170
+ $log_level: info
170
171
  tag: tag.tcp
171
172
  parse:
172
173
  $arg:
@@ -176,6 +177,7 @@ class ConfigTest < Test::Unit::TestCase
176
177
  - match:
177
178
  $tag: tag.*
178
179
  $type: stdout
180
+ $log_level: debug
179
181
  buffer:
180
182
  $type: memory
181
183
  flush_interval: 1s
@@ -208,10 +210,12 @@ class ConfigTest < Test::Unit::TestCase
208
210
  'tag.dummy',
209
211
  'tcp',
210
212
  'tag.tcp',
213
+ 'info',
211
214
  'none',
212
215
  'why.parse.section.doesnot.have.arg,huh',
213
216
  'stdout',
214
217
  'tag.*',
218
+ 'debug',
215
219
  'null',
216
220
  '**',
217
221
  '@FLUENT_LOG',
@@ -224,10 +228,12 @@ class ConfigTest < Test::Unit::TestCase
224
228
  dummy_source_conf['tag'],
225
229
  tcp_source_conf['@type'],
226
230
  tcp_source_conf['tag'],
231
+ tcp_source_conf['@log_level'],
227
232
  parse_tcp_conf['@type'],
228
233
  parse_tcp_conf.arg,
229
234
  match_conf['@type'],
230
235
  match_conf.arg,
236
+ match_conf['@log_level'],
231
237
  fluent_log_conf['@type'],
232
238
  fluent_log_conf.arg,
233
239
  label_conf.arg,
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluentd
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.16.4
4
+ version: 1.16.6
5
5
  platform: x86-mingw32
6
6
  authors:
7
7
  - Sadayuki Furuhashi
8
- autorequire:
8
+ autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2024-03-14 00:00:00.000000000 Z
11
+ date: 2024-08-20 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler
@@ -200,6 +200,20 @@ dependencies:
200
200
  - - "~>"
201
201
  - !ruby/object:Gem::Version
202
202
  version: '1.4'
203
+ - !ruby/object:Gem::Dependency
204
+ name: console
205
+ requirement: !ruby/object:Gem::Requirement
206
+ requirements:
207
+ - - "<"
208
+ - !ruby/object:Gem::Version
209
+ version: '1.24'
210
+ type: :runtime
211
+ prerelease: false
212
+ version_requirements: !ruby/object:Gem::Requirement
213
+ requirements:
214
+ - - "<"
215
+ - !ruby/object:Gem::Version
216
+ version: '1.24'
203
217
  - !ruby/object:Gem::Dependency
204
218
  name: win32-service
205
219
  requirement: !ruby/object:Gem::Requirement
@@ -330,16 +344,16 @@ dependencies:
330
344
  name: timecop
331
345
  requirement: !ruby/object:Gem::Requirement
332
346
  requirements:
333
- - - "~>"
347
+ - - "<"
334
348
  - !ruby/object:Gem::Version
335
- version: '0.9'
349
+ version: 0.9.9
336
350
  type: :development
337
351
  prerelease: false
338
352
  version_requirements: !ruby/object:Gem::Requirement
339
353
  requirements:
340
- - - "~>"
354
+ - - "<"
341
355
  - !ruby/object:Gem::Version
342
- version: '0.9'
356
+ version: 0.9.9
343
357
  - !ruby/object:Gem::Dependency
344
358
  name: test-unit
345
359
  requirement: !ruby/object:Gem::Requirement
@@ -997,7 +1011,7 @@ homepage: https://www.fluentd.org/
997
1011
  licenses:
998
1012
  - Apache-2.0
999
1013
  metadata: {}
1000
- post_install_message:
1014
+ post_install_message:
1001
1015
  rdoc_options: []
1002
1016
  require_paths:
1003
1017
  - lib
@@ -1013,7 +1027,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
1013
1027
  version: '0'
1014
1028
  requirements: []
1015
1029
  rubygems_version: 3.4.19
1016
- signing_key:
1030
+ signing_key:
1017
1031
  specification_version: 4
1018
1032
  summary: Fluentd event collector
1019
1033
  test_files: