fluentd 1.16.0-x64-mingw32 → 1.16.2-x64-mingw32

Sign up to get free protection for your applications and to get access to all the features.
Files changed (61) hide show
  1. checksums.yaml +4 -4
  2. data/.github/ISSUE_TEMPLATE/bug_report.yaml +1 -0
  3. data/.github/ISSUE_TEMPLATE/feature_request.yaml +1 -0
  4. data/.github/workflows/stale-actions.yml +24 -0
  5. data/CHANGELOG.md +74 -0
  6. data/CONTRIBUTING.md +1 -1
  7. data/MAINTAINERS.md +3 -3
  8. data/SECURITY.md +5 -9
  9. data/fluentd.gemspec +1 -1
  10. data/lib/fluent/command/ctl.rb +2 -2
  11. data/lib/fluent/command/plugin_config_formatter.rb +1 -1
  12. data/lib/fluent/config/dsl.rb +1 -1
  13. data/lib/fluent/config/v1_parser.rb +2 -2
  14. data/lib/fluent/counter/server.rb +1 -1
  15. data/lib/fluent/counter/validator.rb +3 -3
  16. data/lib/fluent/engine.rb +1 -1
  17. data/lib/fluent/event.rb +8 -4
  18. data/lib/fluent/log.rb +9 -0
  19. data/lib/fluent/match.rb +1 -1
  20. data/lib/fluent/msgpack_factory.rb +6 -1
  21. data/lib/fluent/plugin/base.rb +1 -1
  22. data/lib/fluent/plugin/filter_record_transformer.rb +1 -1
  23. data/lib/fluent/plugin/in_forward.rb +1 -1
  24. data/lib/fluent/plugin/in_http.rb +8 -8
  25. data/lib/fluent/plugin/in_sample.rb +1 -1
  26. data/lib/fluent/plugin/in_tail/position_file.rb +32 -18
  27. data/lib/fluent/plugin/in_tail.rb +58 -24
  28. data/lib/fluent/plugin/in_tcp.rb +43 -0
  29. data/lib/fluent/plugin/out_exec_filter.rb +2 -2
  30. data/lib/fluent/plugin/output.rb +2 -2
  31. data/lib/fluent/plugin/parser_json.rb +1 -1
  32. data/lib/fluent/plugin_helper/event_loop.rb +2 -2
  33. data/lib/fluent/plugin_helper/record_accessor.rb +1 -1
  34. data/lib/fluent/plugin_helper/server.rb +8 -0
  35. data/lib/fluent/plugin_helper/thread.rb +3 -3
  36. data/lib/fluent/plugin_id.rb +1 -1
  37. data/lib/fluent/supervisor.rb +1 -1
  38. data/lib/fluent/version.rb +1 -1
  39. data/templates/new_gem/test/helper.rb.erb +0 -1
  40. data/test/plugin/in_tail/test_position_file.rb +31 -1
  41. data/test/plugin/test_base.rb +1 -1
  42. data/test/plugin/test_buffer_chunk.rb +11 -0
  43. data/test/plugin/test_in_forward.rb +9 -9
  44. data/test/plugin/test_in_tail.rb +379 -0
  45. data/test/plugin/test_in_tcp.rb +74 -4
  46. data/test/plugin/test_in_udp.rb +28 -0
  47. data/test/plugin/test_in_unix.rb +2 -2
  48. data/test/plugin/test_multi_output.rb +1 -1
  49. data/test/plugin/test_out_exec_filter.rb +2 -2
  50. data/test/plugin/test_out_file.rb +2 -2
  51. data/test/plugin/test_output.rb +12 -12
  52. data/test/plugin/test_output_as_buffered.rb +44 -44
  53. data/test/plugin/test_output_as_buffered_compress.rb +32 -18
  54. data/test/plugin/test_output_as_buffered_retries.rb +1 -1
  55. data/test/plugin/test_output_as_buffered_secondary.rb +2 -2
  56. data/test/plugin_helper/test_child_process.rb +2 -2
  57. data/test/plugin_helper/test_server.rb +50 -1
  58. data/test/test_log.rb +38 -1
  59. data/test/test_msgpack_factory.rb +32 -0
  60. data/test/test_supervisor.rb +13 -0
  61. metadata +5 -4
@@ -510,7 +510,7 @@ class BufferedOutputTest < Test::Unit::TestCase
510
510
  logs = @i.log.out.logs.dup
511
511
  @i.start
512
512
  @i.after_start
513
- assert{ logs.select{|log| log.include?('[warn]') }.size == 0 }
513
+ assert{ logs.count{|log| log.include?('[warn]') } == 0 }
514
514
  end
515
515
 
516
516
  test 'a warning reported with 4 chunk keys' do
@@ -522,7 +522,7 @@ class BufferedOutputTest < Test::Unit::TestCase
522
522
  @i.after_start
523
523
  assert_equal ['key1', 'key2', 'key3', 'key4'], @i.chunk_keys
524
524
 
525
- assert{ logs.select{|log| log.include?('[warn]: many chunk keys specified, and it may cause too many chunks on your system.') }.size == 1 }
525
+ assert{ logs.count{|log| log.include?('[warn]: many chunk keys specified, and it may cause too many chunks on your system.') } == 1 }
526
526
  end
527
527
 
528
528
  test 'a warning reported with 4 chunk keys including "tag"' do
@@ -531,7 +531,7 @@ class BufferedOutputTest < Test::Unit::TestCase
531
531
  logs = @i.log.out.logs.dup
532
532
  @i.start # this calls `log.reset`... capturing logs about configure must be done before this line
533
533
  @i.after_start
534
- assert{ logs.select{|log| log.include?('[warn]: many chunk keys specified, and it may cause too many chunks on your system.') }.size == 1 }
534
+ assert{ logs.count{|log| log.include?('[warn]: many chunk keys specified, and it may cause too many chunks on your system.') } == 1 }
535
535
  end
536
536
 
537
537
  test 'time key is not included for warned chunk keys' do
@@ -540,7 +540,7 @@ class BufferedOutputTest < Test::Unit::TestCase
540
540
  logs = @i.log.out.logs.dup
541
541
  @i.start
542
542
  @i.after_start
543
- assert{ logs.select{|log| log.include?('[warn]') }.size == 0 }
543
+ assert{ logs.count{|log| log.include?('[warn]') } == 0 }
544
544
  end
545
545
  end
546
546
 
@@ -968,8 +968,8 @@ class BufferedOutputTest < Test::Unit::TestCase
968
968
  waiting(4){ sleep 0.1 until ary.size == 3 }
969
969
 
970
970
  assert_equal 3, ary.size
971
- assert_equal 2, ary.select{|e| e[0] == "test.tag.1" }.size
972
- assert_equal 1, ary.select{|e| e[0] == "test.tag.2" }.size
971
+ assert_equal 2, ary.count{|e| e[0] == "test.tag.1" }
972
+ assert_equal 1, ary.count{|e| e[0] == "test.tag.2" }
973
973
 
974
974
  Timecop.freeze( Time.parse('2016-04-13 14:04:04 +0900') )
975
975
 
@@ -985,8 +985,8 @@ class BufferedOutputTest < Test::Unit::TestCase
985
985
  assert{ @i.buffer.stage.size == 1 && @i.write_count == 2 }
986
986
 
987
987
  assert_equal 9, ary.size
988
- assert_equal 7, ary.select{|e| e[0] == "test.tag.1" }.size
989
- assert_equal 2, ary.select{|e| e[0] == "test.tag.2" }.size
988
+ assert_equal 7, ary.count{|e| e[0] == "test.tag.1" }
989
+ assert_equal 2, ary.count{|e| e[0] == "test.tag.2" }
990
990
 
991
991
  assert metachecks.all?{|e| e }
992
992
  end
@@ -1224,8 +1224,8 @@ class BufferedOutputTest < Test::Unit::TestCase
1224
1224
 
1225
1225
  # events fulfills a chunk (and queued immediately)
1226
1226
  assert_equal 5, ary.size
1227
- assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
1228
- assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
1227
+ assert_equal 5, ary.count{|e| e[0] == "test.tag.1" }
1228
+ assert_equal 0, ary.count{|e| e[0] == "test.tag.2" }
1229
1229
 
1230
1230
  Timecop.freeze( Time.parse('2016-04-13 14:04:09 +0900') )
1231
1231
 
@@ -1249,8 +1249,8 @@ class BufferedOutputTest < Test::Unit::TestCase
1249
1249
  assert{ @i.buffer.stage.size == 0 && @i.write_count == 3 }
1250
1250
 
1251
1251
  assert_equal 11, ary.size
1252
- assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1253
- assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1252
+ assert_equal 8, ary.count{|e| e[0] == "test.tag.1" }
1253
+ assert_equal 3, ary.count{|e| e[0] == "test.tag.2" }
1254
1254
 
1255
1255
  assert metachecks.all?{|e| e }
1256
1256
  end
@@ -1315,8 +1315,8 @@ class BufferedOutputTest < Test::Unit::TestCase
1315
1315
 
1316
1316
  # events fulfills a chunk (and queued immediately)
1317
1317
  assert_equal 5, ary.size
1318
- assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
1319
- assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
1318
+ assert_equal 5, ary.count{|e| e[0] == "test.tag.1" }
1319
+ assert_equal 0, ary.count{|e| e[0] == "test.tag.2" }
1320
1320
 
1321
1321
  @i.stop
1322
1322
  @i.before_shutdown
@@ -1330,8 +1330,8 @@ class BufferedOutputTest < Test::Unit::TestCase
1330
1330
  assert{ @i.buffer.stage.size == 0 && @i.buffer.queue.size == 0 && @i.write_count == 3 }
1331
1331
 
1332
1332
  assert_equal 11, ary.size
1333
- assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1334
- assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1333
+ assert_equal 8, ary.count{|e| e[0] == "test.tag.1" }
1334
+ assert_equal 3, ary.count{|e| e[0] == "test.tag.2" }
1335
1335
 
1336
1336
  assert metachecks.all?{|e| e }
1337
1337
  end
@@ -1435,8 +1435,8 @@ class BufferedOutputTest < Test::Unit::TestCase
1435
1435
 
1436
1436
  # events fulfills a chunk (and queued immediately)
1437
1437
  assert_equal 5, ary.size
1438
- assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
1439
- assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
1438
+ assert_equal 5, ary.count{|e| e[0] == "test.tag.1" }
1439
+ assert_equal 0, ary.count{|e| e[0] == "test.tag.2" }
1440
1440
  assert ary[0...5].all?{|e| e[2]["name"] == "xxx" && e[2]["service"] == "a" }
1441
1441
 
1442
1442
  Timecop.freeze( Time.parse('2016-04-13 14:04:09 +0900') )
@@ -1465,11 +1465,11 @@ class BufferedOutputTest < Test::Unit::TestCase
1465
1465
  assert{ @i.buffer.stage.size == 0 && @i.write_count == 4 }
1466
1466
 
1467
1467
  assert_equal 11, ary.size
1468
- assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1469
- assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1470
- assert_equal 6, ary.select{|e| e[2]["name"] == "xxx" && e[2]["service"] == "a" }.size
1471
- assert_equal 3, ary.select{|e| e[2]["name"] == "yyy" && e[2]["service"] == "a" }.size
1472
- assert_equal 2, ary.select{|e| e[2]["name"] == "xxx" && e[2]["service"] == "b" }.size
1468
+ assert_equal 8, ary.count{|e| e[0] == "test.tag.1" }
1469
+ assert_equal 3, ary.count{|e| e[0] == "test.tag.2" }
1470
+ assert_equal 6, ary.count{|e| e[2]["name"] == "xxx" && e[2]["service"] == "a" }
1471
+ assert_equal 3, ary.count{|e| e[2]["name"] == "yyy" && e[2]["service"] == "a" }
1472
+ assert_equal 2, ary.count{|e| e[2]["name"] == "xxx" && e[2]["service"] == "b" }
1473
1473
 
1474
1474
  assert metachecks.all?{|e| e }
1475
1475
  end
@@ -1525,8 +1525,8 @@ class BufferedOutputTest < Test::Unit::TestCase
1525
1525
 
1526
1526
  # events fulfills a chunk (and queued immediately)
1527
1527
  assert_equal 5, ary.size
1528
- assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
1529
- assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
1528
+ assert_equal 5, ary.count{|e| e[0] == "test.tag.1" }
1529
+ assert_equal 0, ary.count{|e| e[0] == "test.tag.2" }
1530
1530
 
1531
1531
  @i.stop
1532
1532
  @i.before_shutdown
@@ -1540,11 +1540,11 @@ class BufferedOutputTest < Test::Unit::TestCase
1540
1540
  assert{ @i.buffer.stage.size == 0 && @i.buffer.queue.size == 0 && @i.write_count == 4 }
1541
1541
 
1542
1542
  assert_equal 11, ary.size
1543
- assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1544
- assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1545
- assert_equal 6, ary.select{|e| e[2]["name"] == "xxx" && e[2]["service"] == "a" }.size
1546
- assert_equal 3, ary.select{|e| e[2]["name"] == "yyy" && e[2]["service"] == "a" }.size
1547
- assert_equal 2, ary.select{|e| e[2]["name"] == "xxx" && e[2]["service"] == "b" }.size
1543
+ assert_equal 8, ary.count{|e| e[0] == "test.tag.1" }
1544
+ assert_equal 3, ary.count{|e| e[0] == "test.tag.2" }
1545
+ assert_equal 6, ary.count{|e| e[2]["name"] == "xxx" && e[2]["service"] == "a" }
1546
+ assert_equal 3, ary.count{|e| e[2]["name"] == "yyy" && e[2]["service"] == "a" }
1547
+ assert_equal 2, ary.count{|e| e[2]["name"] == "xxx" && e[2]["service"] == "b" }
1548
1548
 
1549
1549
  assert metachecks.all?{|e| e }
1550
1550
  end
@@ -1683,8 +1683,8 @@ class BufferedOutputTest < Test::Unit::TestCase
1683
1683
 
1684
1684
  # events fulfills a chunk (and queued immediately)
1685
1685
  assert_equal 5, ary.size
1686
- assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
1687
- assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
1686
+ assert_equal 5, ary.count{|e| e[0] == "test.tag.1" }
1687
+ assert_equal 0, ary.count{|e| e[0] == "test.tag.2" }
1688
1688
 
1689
1689
  assert_equal 1, chunks.size
1690
1690
  assert !chunks.first.empty?
@@ -1716,8 +1716,8 @@ class BufferedOutputTest < Test::Unit::TestCase
1716
1716
  assert{ @i.buffer.dequeued.size == 3 }
1717
1717
 
1718
1718
  assert_equal 11, ary.size
1719
- assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1720
- assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1719
+ assert_equal 8, ary.count{|e| e[0] == "test.tag.1" }
1720
+ assert_equal 3, ary.count{|e| e[0] == "test.tag.2" }
1721
1721
 
1722
1722
  assert_equal 3, chunks.size
1723
1723
  assert chunks.all?{|c| !c.empty? }
@@ -1802,8 +1802,8 @@ class BufferedOutputTest < Test::Unit::TestCase
1802
1802
 
1803
1803
  # events fulfills a chunk (and queued immediately)
1804
1804
  assert_equal 5, ary.size
1805
- assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
1806
- assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
1805
+ assert_equal 5, ary.count{|e| e[0] == "test.tag.1" }
1806
+ assert_equal 0, ary.count{|e| e[0] == "test.tag.2" }
1807
1807
 
1808
1808
  assert_equal 1, chunks.size
1809
1809
  assert !chunks.first.empty?
@@ -1835,8 +1835,8 @@ class BufferedOutputTest < Test::Unit::TestCase
1835
1835
  assert{ @i.buffer.dequeued.size == 3 }
1836
1836
 
1837
1837
  assert_equal 11, ary.size
1838
- assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1839
- assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1838
+ assert_equal 8, ary.count{|e| e[0] == "test.tag.1" }
1839
+ assert_equal 3, ary.count{|e| e[0] == "test.tag.2" }
1840
1840
 
1841
1841
  assert_equal 3, chunks.size
1842
1842
  assert chunks.all?{|c| !c.empty? }
@@ -1892,8 +1892,8 @@ class BufferedOutputTest < Test::Unit::TestCase
1892
1892
 
1893
1893
  assert{ @i.write_count == 7 }
1894
1894
  assert_equal 11, ary.size
1895
- assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
1896
- assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
1895
+ assert_equal 8, ary.count{|e| e[0] == "test.tag.1" }
1896
+ assert_equal 3, ary.count{|e| e[0] == "test.tag.2" }
1897
1897
  assert{ chunks.size == 3 }
1898
1898
  assert{ chunks.all?{|c| !c.empty? } }
1899
1899
 
@@ -1963,8 +1963,8 @@ class BufferedOutputTest < Test::Unit::TestCase
1963
1963
 
1964
1964
  # events fulfills a chunk (and queued immediately)
1965
1965
  assert_equal 5, ary.size
1966
- assert_equal 5, ary.select{|e| e[0] == "test.tag.1" }.size
1967
- assert_equal 0, ary.select{|e| e[0] == "test.tag.2" }.size
1966
+ assert_equal 5, ary.count{|e| e[0] == "test.tag.1" }
1967
+ assert_equal 0, ary.count{|e| e[0] == "test.tag.2" }
1968
1968
 
1969
1969
  assert_equal 1, chunks.size
1970
1970
  assert !chunks.first.empty?
@@ -1999,8 +1999,8 @@ class BufferedOutputTest < Test::Unit::TestCase
1999
1999
  assert{ @i.rollback_count == 0 }
2000
2000
 
2001
2001
  assert_equal 11, ary.size
2002
- assert_equal 8, ary.select{|e| e[0] == "test.tag.1" }.size
2003
- assert_equal 3, ary.select{|e| e[0] == "test.tag.2" }.size
2002
+ assert_equal 8, ary.count{|e| e[0] == "test.tag.1" }
2003
+ assert_equal 3, ary.count{|e| e[0] == "test.tag.2" }
2004
2004
 
2005
2005
  assert{ chunks.size == 3 }
2006
2006
  assert{ chunks.all?{|c| !c.empty? } }
@@ -35,6 +35,16 @@ module FluentPluginOutputAsBufferedCompressTest
35
35
  @format ? @format.call(tag, time, record) : [tag, time, record].to_json
36
36
  end
37
37
  end
38
+
39
+ def self.dummy_event_stream
40
+ Fluent::ArrayEventStream.new(
41
+ [
42
+ [event_time('2016-04-13 18:33:00'), { 'name' => 'moris', 'age' => 36, 'message' => 'data1' }],
43
+ [event_time('2016-04-13 18:33:13'), { 'name' => 'moris', 'age' => 36, 'message' => 'data2' }],
44
+ [event_time('2016-04-13 18:33:32'), { 'name' => 'moris', 'age' => 36, 'message' => 'data3' }],
45
+ ]
46
+ )
47
+ end
38
48
  end
39
49
 
40
50
  class BufferedOutputCompressTest < Test::Unit::TestCase
@@ -60,16 +70,6 @@ class BufferedOutputCompressTest < Test::Unit::TestCase
60
70
  end
61
71
  end
62
72
 
63
- def dummy_event_stream
64
- Fluent::ArrayEventStream.new(
65
- [
66
- [event_time('2016-04-13 18:33:00'), { 'name' => 'moris', 'age' => 36, 'message' => 'data1' }],
67
- [event_time('2016-04-13 18:33:13'), { 'name' => 'moris', 'age' => 36, 'message' => 'data2' }],
68
- [event_time('2016-04-13 18:33:32'), { 'name' => 'moris', 'age' => 36, 'message' => 'data3' }],
69
- ]
70
- )
71
- end
72
-
73
73
  TMP_DIR = File.expand_path('../../tmp/test_output_as_buffered_compress', __FILE__)
74
74
 
75
75
  setup do
@@ -89,20 +89,34 @@ class BufferedOutputCompressTest < Test::Unit::TestCase
89
89
  end
90
90
 
91
91
  data(
92
- handle_simple_stream: config_element('buffer', '', { 'flush_interval' => 1, 'compress' => 'gzip' }),
93
- handle_stream_with_standard_format: config_element('buffer', 'tag', { 'flush_interval' => 1, 'compress' => 'gzip' }),
94
- handle_simple_stream_and_file_chunk: config_element('buffer', '', { '@type' => 'file', 'path' => File.join(TMP_DIR,'test.*.log'), 'flush_interval' => 1, 'compress' => 'gzip' }),
95
- handle_stream_with_standard_format_and_file_chunk: config_element('buffer', 'tag', { '@type' => 'file', 'path' => File.join(TMP_DIR,'test.*.log'), 'flush_interval' => 1, 'compress' => 'gzip' }),
92
+ :buffer_config,
93
+ [
94
+ config_element('buffer', '', { 'flush_interval' => 1, 'compress' => 'gzip' }),
95
+ config_element('buffer', 'tag', { 'flush_interval' => 1, 'compress' => 'gzip' }),
96
+ config_element('buffer', '', { '@type' => 'file', 'path' => File.join(TMP_DIR,'test.*.log'), 'flush_interval' => 1, 'compress' => 'gzip' }),
97
+ config_element('buffer', 'tag', { '@type' => 'file', 'path' => File.join(TMP_DIR,'test.*.log'), 'flush_interval' => 1, 'compress' => 'gzip' }),
98
+ ],
96
99
  )
97
- test 'call a standard format when output plugin adds data to chunk' do |buffer_config|
100
+ data(
101
+ :input_es,
102
+ [
103
+ FluentPluginOutputAsBufferedCompressTest.dummy_event_stream,
104
+ # If already compressed data is incoming, it must be written as is (i.e. without decompressed).
105
+ # https://github.com/fluent/fluentd/issues/4146
106
+ Fluent::CompressedMessagePackEventStream.new(FluentPluginOutputAsBufferedCompressTest.dummy_event_stream.to_compressed_msgpack_stream),
107
+ ],
108
+ )
109
+ test 'call a standard format when output plugin adds data to chunk' do |data|
110
+ buffer_config = data[:buffer_config]
111
+ es = data[:input_es].dup # Note: the data matrix is shared in all patterns, so we need `dup` here.
112
+
98
113
  @i = create_output(:async)
99
114
  @i.configure(config_element('ROOT','', {}, [buffer_config]))
100
115
  @i.start
101
116
  @i.after_start
102
117
 
103
118
  io = StringIO.new
104
- es = dummy_event_stream
105
- expected = es.map { |e| e }
119
+ expected = es.dup.map { |t, r| [t, r] }
106
120
  compressed_data = ''
107
121
 
108
122
  assert_equal :gzip, @i.buffer.compress
@@ -138,7 +152,7 @@ class BufferedOutputCompressTest < Test::Unit::TestCase
138
152
  @i.after_start
139
153
 
140
154
  io = StringIO.new
141
- es = dummy_event_stream
155
+ es = FluentPluginOutputAsBufferedCompressTest.dummy_event_stream
142
156
  expected = es.map { |e| "#{e[1]}\n" }.join # e[1] is record
143
157
  compressed_data = ''
144
158
 
@@ -93,7 +93,7 @@ class BufferedOutputRetryTest < Test::Unit::TestCase
93
93
  end
94
94
  def get_log_time(msg, logs)
95
95
  log_time = nil
96
- log = logs.select{|l| l.include?(msg) }.first
96
+ log = logs.find{|l| l.include?(msg) }
97
97
  if log && /^(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2} [-+]\d{4}) \[error\]/ =~ log
98
98
  log_time = Time.parse($1)
99
99
  end
@@ -634,8 +634,8 @@ class BufferedOutputSecondaryTest < Test::Unit::TestCase
634
634
 
635
635
  assert @i.retry
636
636
  logs = @i.log.out.logs
637
- waiting(4){ sleep 0.1 until logs.select{|l| l.include?("[warn]: failed to flush the buffer chunk, timeout to commit.") }.size == 2 }
638
- assert{ logs.select{|l| l.include?("[warn]: failed to flush the buffer chunk, timeout to commit.") }.size == 2 }
637
+ waiting(4){ sleep 0.1 until logs.count{|l| l.include?("[warn]: failed to flush the buffer chunk, timeout to commit.") } == 2 }
638
+ assert{ logs.count{|l| l.include?("[warn]: failed to flush the buffer chunk, timeout to commit.") } == 2 }
639
639
  end
640
640
 
641
641
  test 'retry_wait for secondary is same with one for primary' do
@@ -559,7 +559,7 @@ class ChildProcessTest < Test::Unit::TestCase
559
559
  unless Fluent.windows?
560
560
  test 'can specify subprocess name' do
561
561
  io = IO.popen([["cat", "caaaaaaaaaaat"], '-'])
562
- process_naming_enabled = (open("|ps opid,cmd"){|_io| _io.readlines }.select{|line| line.include?("caaaaaaaaaaat") }.size > 0)
562
+ process_naming_enabled = (open("|ps opid,cmd"){|_io| _io.readlines }.count{|line| line.include?("caaaaaaaaaaat") } > 0)
563
563
  Process.kill(:TERM, io.pid) rescue nil
564
564
  io.close rescue nil
565
565
 
@@ -584,7 +584,7 @@ class ChildProcessTest < Test::Unit::TestCase
584
584
  m.lock
585
585
  pid = pids.first
586
586
  # 16357 sleeeeeeeeeper -e sleep 10; puts "hello"
587
- assert{ proc_lines.select{|line| line =~ /^\s*#{pid}\s/ }.first.strip.split(/\s+/)[1] == "sleeeeeeeeeper" }
587
+ assert{ proc_lines.find{|line| line =~ /^\s*#{pid}\s/ }.strip.split(/\s+/)[1] == "sleeeeeeeeeper" }
588
588
  @d.stop; @d.shutdown; @d.close; @d.terminate
589
589
  end
590
590
  end
@@ -29,6 +29,7 @@ class ServerPluginHelperTest < Test::Unit::TestCase
29
29
  ENV['SERVERENGINE_SOCKETMANAGER_PATH'] = @socket_manager_path.to_s
30
30
 
31
31
  @d = Dummy.new
32
+ @d.under_plugin_development = true
32
33
  @d.start
33
34
  @d.after_start
34
35
  end
@@ -794,6 +795,50 @@ class ServerPluginHelperTest < Test::Unit::TestCase
794
795
  end
795
796
  end
796
797
  end
798
+
799
+ sub_test_case 'over max_bytes' do
800
+ data("cut off on Non-Windows", { max_bytes: 32, records: ["a" * 40], expected: ["a" * 32] }, keep: true) unless Fluent.windows?
801
+ data("drop on Windows", { max_bytes: 32, records: ["a" * 40], expected: [] }, keep: true) if Fluent.windows?
802
+ test 'with sock' do |data|
803
+ max_bytes, records, expected = data.values
804
+
805
+ actual_records = []
806
+ @d.server_create_udp(:myserver, @port, max_bytes: max_bytes) do |data, sock|
807
+ actual_records << data
808
+ end
809
+
810
+ open_client(:udp, "127.0.0.1", @port) do |sock|
811
+ records.each do |record|
812
+ sock.send(record, 0)
813
+ end
814
+ end
815
+
816
+ waiting(10) { sleep 0.1 until actual_records.size >= expected.size }
817
+ sleep 1 if expected.size == 0 # To confirm no record recieved.
818
+
819
+ assert_equal expected, actual_records
820
+ end
821
+
822
+ test 'without sock' do |data|
823
+ max_bytes, records, expected = data.values
824
+
825
+ actual_records = []
826
+ @d.server_create_udp(:myserver, @port, max_bytes: max_bytes) do |data|
827
+ actual_records << data
828
+ end
829
+
830
+ open_client(:udp, "127.0.0.1", @port) do |sock|
831
+ records.each do |record|
832
+ sock.send(record, 0)
833
+ end
834
+ end
835
+
836
+ waiting(10) { sleep 0.1 until actual_records.size >= expected.size }
837
+ sleep 1 if expected.size == 0 # To confirm no record recieved.
838
+
839
+ assert_equal expected, actual_records
840
+ end
841
+ end
797
842
  end
798
843
 
799
844
  module CertUtil
@@ -1284,7 +1329,7 @@ class ServerPluginHelperTest < Test::Unit::TestCase
1284
1329
  # OpenSSL 1.1.1: "TLSv1.2"
1285
1330
  if tls_version == "TLSv1/SSLv3" || tls_version == "TLSv1.2"
1286
1331
  matched = true
1287
- unless cipher_name.match(/#{conf.ciphers}/)
1332
+ unless cipher_name.match?(/#{conf.ciphers}/)
1288
1333
  matched = false
1289
1334
  break
1290
1335
  end
@@ -1575,6 +1620,10 @@ class ServerPluginHelperTest < Test::Unit::TestCase
1575
1620
 
1576
1621
  def open_client(proto, addr, port)
1577
1622
  client = case proto
1623
+ when :udp
1624
+ c = UDPSocket.open
1625
+ c.connect(addr, port)
1626
+ c
1578
1627
  when :tcp
1579
1628
  TCPSocket.open(addr, port)
1580
1629
  when :tls
data/test/test_log.rb CHANGED
@@ -472,6 +472,43 @@ class LogTest < Test::Unit::TestCase
472
472
  ]
473
473
  assert_equal(expected, log.out.logs)
474
474
  end
475
+
476
+ def test_reject_on_max_size
477
+ ignore_same_log_interval = 10
478
+
479
+ logger = Fluent::Log.new(
480
+ ServerEngine::DaemonLogger.new(@log_device, log_level: ServerEngine::DaemonLogger::INFO),
481
+ ignore_same_log_interval: ignore_same_log_interval,
482
+ )
483
+
484
+ # Output unique log every second.
485
+ Fluent::Log::IGNORE_SAME_LOG_MAX_CACHE_SIZE.times do |i|
486
+ logger.info "Test #{i}"
487
+ Timecop.freeze(@timestamp + i)
488
+ end
489
+ logger.info "Over max size!"
490
+
491
+ # The newest cache and the latest caches in `ignore_same_log_interval` should exist.
492
+ assert { Thread.current[:last_same_log].size == ignore_same_log_interval + 1 }
493
+ end
494
+
495
+ def test_clear_on_max_size
496
+ ignore_same_log_interval = 10
497
+
498
+ logger = Fluent::Log.new(
499
+ ServerEngine::DaemonLogger.new(@log_device, log_level: ServerEngine::DaemonLogger::INFO),
500
+ ignore_same_log_interval: ignore_same_log_interval,
501
+ )
502
+
503
+ # Output unique log at the same time.
504
+ Fluent::Log::IGNORE_SAME_LOG_MAX_CACHE_SIZE.times do |i|
505
+ logger.info "Test #{i}"
506
+ end
507
+ logger.info "Over max size!"
508
+
509
+ # Can't reject old logs, so all cache should be cleared and only the newest should exist.
510
+ assert { Thread.current[:last_same_log].size == 1 }
511
+ end
475
512
  end
476
513
 
477
514
  def test_dup
@@ -660,7 +697,7 @@ class LogTest < Test::Unit::TestCase
660
697
  log.reopen!
661
698
  log.info message
662
699
 
663
- assert { path.read.lines.select{ |line| line.include?(message) }.size == 2 }
700
+ assert { path.read.lines.count{ |line| line.include?(message) } == 2 }
664
701
  # Assert reopening the same file.
665
702
  # Especially, on Windows, the filepath is fixed for each process with rotate,
666
703
  # so we need to care about this.
@@ -15,4 +15,36 @@ class MessagePackFactoryTest < Test::Unit::TestCase
15
15
  assert mp.msgpack_factory
16
16
  assert mp.msgpack_factory
17
17
  end
18
+
19
+ sub_test_case 'thread_local_msgpack_packer' do
20
+ test 'packer is cached' do
21
+ packer1 = Fluent::MessagePackFactory.thread_local_msgpack_packer
22
+ packer2 = Fluent::MessagePackFactory.thread_local_msgpack_packer
23
+ assert_equal packer1, packer2
24
+ end
25
+ end
26
+
27
+ sub_test_case 'thread_local_msgpack_unpacker' do
28
+ test 'unpacker is cached' do
29
+ unpacker1 = Fluent::MessagePackFactory.thread_local_msgpack_unpacker
30
+ unpacker2 = Fluent::MessagePackFactory.thread_local_msgpack_unpacker
31
+ assert_equal unpacker1, unpacker2
32
+ end
33
+
34
+ # We need to reset the buffer every time so that received incomplete data
35
+ # must not affect data from other senders.
36
+ test 'reset the internal buffer of unpacker every time' do
37
+ unpacker1 = Fluent::MessagePackFactory.thread_local_msgpack_unpacker
38
+ unpacker1.feed_each("\xA6foo") do |result|
39
+ flunk("This callback must not be called since the data is uncomplete.")
40
+ end
41
+
42
+ records = []
43
+ unpacker2 = Fluent::MessagePackFactory.thread_local_msgpack_unpacker
44
+ unpacker2.feed_each("\xA3foo") do |result|
45
+ records.append(result)
46
+ end
47
+ assert_equal ["foo"], records
48
+ end
49
+ end
18
50
  end
@@ -621,6 +621,19 @@ class SupervisorTest < ::Test::Unit::TestCase
621
621
  assert_equal 10, $log.out.instance_variable_get(:@shift_size)
622
622
  end
623
623
 
624
+ def test_can_start_with_rotate_but_no_log_path
625
+ config_path = "#{@tmp_dir}/empty.conf"
626
+ write_config config_path, ""
627
+
628
+ sv = Fluent::Supervisor.new(
629
+ config_path: config_path,
630
+ log_rotate_age: 5,
631
+ )
632
+ sv.__send__(:setup_global_logger)
633
+
634
+ assert_true $log.stdout?
635
+ end
636
+
624
637
  sub_test_case "system log rotation" do
625
638
  def parse_text(text)
626
639
  basepath = File.expand_path(File.dirname(__FILE__) + '/../../')
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluentd
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.16.0
4
+ version: 1.16.2
5
5
  platform: x64-mingw32
6
6
  authors:
7
7
  - Sadayuki Furuhashi
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2023-03-29 00:00:00.000000000 Z
11
+ date: 2023-07-14 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler
@@ -124,14 +124,14 @@ dependencies:
124
124
  requirements:
125
125
  - - "~>"
126
126
  - !ruby/object:Gem::Version
127
- version: 0.2.2
127
+ version: 0.2.5
128
128
  type: :runtime
129
129
  prerelease: false
130
130
  version_requirements: !ruby/object:Gem::Requirement
131
131
  requirements:
132
132
  - - "~>"
133
133
  - !ruby/object:Gem::Version
134
- version: 0.2.2
134
+ version: 0.2.5
135
135
  - !ruby/object:Gem::Dependency
136
136
  name: tzinfo
137
137
  requirement: !ruby/object:Gem::Requirement
@@ -442,6 +442,7 @@ files:
442
442
  - ".github/PULL_REQUEST_TEMPLATE.md"
443
443
  - ".github/workflows/linux-test.yaml"
444
444
  - ".github/workflows/macos-test.yaml"
445
+ - ".github/workflows/stale-actions.yml"
445
446
  - ".github/workflows/windows-test.yaml"
446
447
  - ".gitignore"
447
448
  - ADOPTERS.md