fluentd 1.14.3-x64-mingw32 → 1.14.6-x64-mingw32

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of fluentd might be problematic. Click here for more details.

Files changed (53) hide show
  1. checksums.yaml +4 -4
  2. data/.github/ISSUE_TEMPLATE/config.yml +2 -2
  3. data/.github/workflows/linux-test.yaml +1 -1
  4. data/.github/workflows/macos-test.yaml +5 -1
  5. data/.github/workflows/windows-test.yaml +6 -6
  6. data/CHANGELOG.md +79 -16
  7. data/CONTRIBUTING.md +1 -1
  8. data/MAINTAINERS.md +2 -2
  9. data/README.md +2 -23
  10. data/Rakefile +1 -1
  11. data/fluentd.gemspec +2 -1
  12. data/lib/fluent/command/fluentd.rb +4 -0
  13. data/lib/fluent/config/error.rb +12 -0
  14. data/lib/fluent/env.rb +4 -0
  15. data/lib/fluent/event_router.rb +19 -1
  16. data/lib/fluent/plugin/bare_output.rb +1 -1
  17. data/lib/fluent/plugin/base.rb +1 -1
  18. data/lib/fluent/plugin/buffer.rb +43 -23
  19. data/lib/fluent/plugin/in_forward.rb +1 -1
  20. data/lib/fluent/plugin/in_http.rb +11 -1
  21. data/lib/fluent/plugin/in_tail.rb +10 -0
  22. data/lib/fluent/plugin/out_file.rb +13 -1
  23. data/lib/fluent/plugin/output.rb +41 -32
  24. data/lib/fluent/plugin/parser.rb +3 -4
  25. data/lib/fluent/plugin_helper/retry_state.rb +14 -4
  26. data/lib/fluent/plugin_helper/server.rb +21 -4
  27. data/lib/fluent/plugin_helper/socket.rb +13 -2
  28. data/lib/fluent/registry.rb +2 -1
  29. data/lib/fluent/rpc.rb +4 -3
  30. data/lib/fluent/supervisor.rb +5 -2
  31. data/lib/fluent/version.rb +1 -1
  32. data/test/compat/test_parser.rb +1 -1
  33. data/test/plugin/test_bare_output.rb +1 -1
  34. data/test/plugin/test_buffer.rb +77 -0
  35. data/test/plugin/test_filter.rb +1 -1
  36. data/test/plugin/test_filter_parser.rb +1 -1
  37. data/test/plugin/test_filter_stdout.rb +2 -2
  38. data/test/plugin/test_in_forward.rb +0 -2
  39. data/test/plugin/test_in_http.rb +23 -0
  40. data/test/plugin/test_in_tail.rb +35 -0
  41. data/test/plugin/test_input.rb +1 -1
  42. data/test/plugin/test_out_exec.rb +6 -4
  43. data/test/plugin/test_out_file.rb +29 -13
  44. data/test/plugin/test_out_stdout.rb +2 -2
  45. data/test/plugin/test_output_as_buffered_retries.rb +53 -6
  46. data/test/plugin/test_output_as_buffered_secondary.rb +2 -2
  47. data/test/plugin_helper/test_retry_state.rb +602 -38
  48. data/test/plugin_helper/test_server.rb +18 -0
  49. data/test/plugin_helper/test_timer.rb +2 -2
  50. data/test/test_event_router.rb +17 -0
  51. data/test/test_formatter.rb +1 -1
  52. data/test/test_supervisor.rb +41 -6
  53. metadata +19 -5
@@ -273,7 +273,7 @@ module Fluent
273
273
  super
274
274
 
275
275
  @num_errors_metrics = metrics_create(namespace: "fluentd", subsystem: "output", name: "num_errors", help_text: "Number of count num errors")
276
- @emit_count_metrics = metrics_create(namespace: "fluentd", subsystem: "output", name: "emit_records", help_text: "Number of count emits")
276
+ @emit_count_metrics = metrics_create(namespace: "fluentd", subsystem: "output", name: "emit_count", help_text: "Number of count emits")
277
277
  @emit_records_metrics = metrics_create(namespace: "fluentd", subsystem: "output", name: "emit_records", help_text: "Number of emit records")
278
278
  @emit_size_metrics = metrics_create(namespace: "fluentd", subsystem: "output", name: "emit_size", help_text: "Total size of emit events")
279
279
  @write_count_metrics = metrics_create(namespace: "fluentd", subsystem: "output", name: "write_count", help_text: "Number of writing events")
@@ -1275,46 +1275,55 @@ module Fluent
1275
1275
 
1276
1276
  unless @retry
1277
1277
  @retry = retry_state(@buffer_config.retry_randomize)
1278
- if error
1279
- log.warn "failed to flush the buffer.", retry_times: @retry.steps, next_retry_time: @retry.next_time.round, chunk: chunk_id_hex, error: error
1280
- log.warn_backtrace error.backtrace
1278
+
1279
+ if @retry.limit?
1280
+ handle_limit_reached(error)
1281
+ elsif error
1282
+ log_retry_error(error, chunk_id_hex, using_secondary)
1281
1283
  end
1284
+
1282
1285
  return
1283
1286
  end
1284
1287
 
1285
1288
  # @retry exists
1286
1289
 
1287
- if @retry.limit?
1288
- if error
1289
- records = @buffer.queued_records
1290
- msg = "failed to flush the buffer, and hit limit for retries. dropping all chunks in the buffer queue."
1291
- log.error msg, retry_times: @retry.steps, records: records, error: error
1292
- log.error_backtrace error.backtrace
1293
- end
1294
- @buffer.clear_queue!
1295
- log.debug "buffer queue cleared"
1296
- @retry = nil
1290
+ # Ensure that the current time is greater than or equal to @retry.next_time to avoid the situation when
1291
+ # @retry.step is called almost as many times as the number of flush threads in a short time.
1292
+ if Time.now >= @retry.next_time
1293
+ @retry.step
1297
1294
  else
1298
- # Ensure that the current time is greater than or equal to @retry.next_time to avoid the situation when
1299
- # @retry.step is called almost as many times as the number of flush threads in a short time.
1300
- if Time.now >= @retry.next_time
1301
- @retry.step
1302
- else
1303
- @retry.recalc_next_time # to prevent all flush threads from retrying at the same time
1304
- end
1305
- if error
1306
- if using_secondary
1307
- msg = "failed to flush the buffer with secondary output."
1308
- log.warn msg, retry_times: @retry.steps, next_retry_time: @retry.next_time.round, chunk: chunk_id_hex, error: error
1309
- log.warn_backtrace error.backtrace
1310
- else
1311
- msg = "failed to flush the buffer."
1312
- log.warn msg, retry_times: @retry.steps, next_retry_time: @retry.next_time.round, chunk: chunk_id_hex, error: error
1313
- log.warn_backtrace error.backtrace
1314
- end
1315
- end
1295
+ @retry.recalc_next_time # to prevent all flush threads from retrying at the same time
1316
1296
  end
1297
+
1298
+ if @retry.limit?
1299
+ handle_limit_reached(error)
1300
+ elsif error
1301
+ log_retry_error(error, chunk_id_hex, using_secondary)
1302
+ end
1303
+ end
1304
+ end
1305
+
1306
+ def log_retry_error(error, chunk_id_hex, using_secondary)
1307
+ return unless error
1308
+ if using_secondary
1309
+ msg = "failed to flush the buffer with secondary output."
1310
+ else
1311
+ msg = "failed to flush the buffer."
1317
1312
  end
1313
+ log.warn(msg, retry_times: @retry.steps, next_retry_time: @retry.next_time.round, chunk: chunk_id_hex, error: error)
1314
+ log.warn_backtrace(error.backtrace)
1315
+ end
1316
+
1317
+ def handle_limit_reached(error)
1318
+ if error
1319
+ records = @buffer.queued_records
1320
+ msg = "Hit limit for retries. dropping all chunks in the buffer queue."
1321
+ log.error msg, retry_times: @retry.steps, records: records, error: error
1322
+ log.error_backtrace error.backtrace
1323
+ end
1324
+ @buffer.clear_queue!
1325
+ log.debug "buffer queue cleared"
1326
+ @retry = nil
1318
1327
  end
1319
1328
 
1320
1329
  def retry_state(randomize)
@@ -89,7 +89,7 @@ module Fluent
89
89
  # : format[, timezone]
90
90
 
91
91
  config_param :time_key, :string, default: nil
92
- config_param :null_value_pattern, :string, default: nil
92
+ config_param :null_value_pattern, :regexp, default: nil
93
93
  config_param :null_empty_string, :bool, default: false
94
94
  config_param :estimate_current_event, :bool, default: true
95
95
  config_param :keep_time_key, :bool, default: false
@@ -115,9 +115,8 @@ module Fluent
115
115
  super
116
116
 
117
117
  @time_parser = time_parser_create
118
- @null_value_regexp = @null_value_pattern && Regexp.new(@null_value_pattern)
119
118
  @type_converters = build_type_converters(@types)
120
- @execute_convert_values = @type_converters || @null_value_regexp || @null_empty_string
119
+ @execute_convert_values = @type_converters || @null_value_pattern || @null_empty_string
121
120
  @timeout_checker = if @timeout
122
121
  class << self
123
122
  alias_method :parse_orig, :parse
@@ -220,7 +219,7 @@ module Fluent
220
219
  return time, record
221
220
  end
222
221
 
223
- def string_like_null(value, null_empty_string = @null_empty_string, null_value_regexp = @null_value_regexp)
222
+ def string_like_null(value, null_empty_string = @null_empty_string, null_value_regexp = @null_value_pattern)
224
223
  null_empty_string && value.empty? || null_value_regexp && string_safe_encoding(value){|s| null_value_regexp.match(s) }
225
224
  end
226
225
 
@@ -44,6 +44,8 @@ module Fluent
44
44
 
45
45
  @timeout = timeout
46
46
  @timeout_at = @start + timeout
47
+ @has_reached_timeout = false
48
+ @has_timed_out = false
47
49
  @current = :primary
48
50
 
49
51
  if randomize_width < 0 || randomize_width > 0.5
@@ -98,7 +100,7 @@ module Fluent
98
100
  naive
99
101
  end
100
102
  elsif @current == :secondary
101
- naive = naive_next_time(@steps - @secondary_transition_steps + 1)
103
+ naive = naive_next_time(@steps - @secondary_transition_steps)
102
104
  if naive >= @timeout_at
103
105
  @timeout_at
104
106
  else
@@ -123,7 +125,15 @@ module Fluent
123
125
  @current = :secondary
124
126
  @secondary_transition_steps = @steps
125
127
  end
128
+
126
129
  @next_time = calc_next_time
130
+
131
+ if @has_reached_timeout
132
+ @has_timed_out = @next_time >= @timeout_at
133
+ else
134
+ @has_reached_timeout = @next_time >= @timeout_at
135
+ end
136
+
127
137
  nil
128
138
  end
129
139
 
@@ -135,7 +145,7 @@ module Fluent
135
145
  if @forever
136
146
  false
137
147
  else
138
- @next_time >= @timeout_at || !!(@max_steps && @steps >= @max_steps)
148
+ @has_timed_out || !!(@max_steps && @steps >= @max_steps)
139
149
  end
140
150
  end
141
151
  end
@@ -165,7 +175,7 @@ module Fluent
165
175
  end
166
176
 
167
177
  def calc_interval(num)
168
- interval = raw_interval(num - 1)
178
+ interval = raw_interval(num)
169
179
  if @max_interval && interval > @max_interval
170
180
  @max_interval
171
181
  else
@@ -175,7 +185,7 @@ module Fluent
175
185
  # Calculate previous finite value to avoid inf related errors. If this re-computing is heavy, use cache.
176
186
  until interval.finite?
177
187
  num -= 1
178
- interval = raw_interval(num - 1)
188
+ interval = raw_interval(num)
179
189
  end
180
190
  interval
181
191
  end
@@ -80,8 +80,8 @@ module Fluent
80
80
  raise ArgumentError, "BUG: block not specified which handles connection" unless block_given?
81
81
  raise ArgumentError, "BUG: block must have just one argument" unless block.arity == 1
82
82
 
83
- if proto == :tcp || proto == :tls # default linger_timeout only for server
84
- socket_options[:linger_timeout] ||= 0
83
+ if proto == :tcp || proto == :tls
84
+ socket_options[:linger_timeout] ||= @transport_config&.linger_timeout || 0
85
85
  end
86
86
 
87
87
  socket_option_validate!(proto, **socket_options)
@@ -132,8 +132,8 @@ module Fluent
132
132
  raise ArgumentError, "BUG: block not specified which handles received data" unless block_given?
133
133
  raise ArgumentError, "BUG: block must have 1 or 2 arguments" unless callback.arity == 1 || callback.arity == 2
134
134
 
135
- if proto == :tcp || proto == :tls # default linger_timeout only for server
136
- socket_options[:linger_timeout] ||= 0
135
+ if proto == :tcp || proto == :tls
136
+ socket_options[:linger_timeout] ||= @transport_config&.linger_timeout || 0
137
137
  end
138
138
 
139
139
  unless socket
@@ -263,6 +263,23 @@ module Fluent
263
263
  include Fluent::Configurable
264
264
  config_section :transport, required: false, multi: false, init: true, param_name: :transport_config do
265
265
  config_argument :protocol, :enum, list: [:tcp, :tls], default: :tcp
266
+
267
+ ### Socket Params ###
268
+
269
+ # SO_LINGER 0 to send RST rather than FIN to avoid lots of connections sitting in TIME_WAIT at src.
270
+ # Set positive value if needing to send FIN on closing.
271
+ # NOTE:
272
+ # Socket-options can be specified from each plugin as needed, so most of them is not defined here for now.
273
+ # This is because there is no positive reason to do so.
274
+ # `linger_timeout` option in particular needs to be defined here
275
+ # although it can be specified from each plugin as well.
276
+ # This is because this helper fixes the default value to `0` for its own reason
277
+ # and it has a critical effect on the behavior.
278
+ desc 'The timeout time used to set linger option.'
279
+ config_param :linger_timeout, :integer, default: 0
280
+
281
+ ### TLS Params ###
282
+
266
283
  config_param :version, :enum, list: Fluent::TLS::SUPPORTED_VERSIONS, default: Fluent::TLS::DEFAULT_VERSION
267
284
  config_param :min_version, :enum, list: Fluent::TLS::SUPPORTED_VERSIONS, default: nil
268
285
  config_param :max_version, :enum, list: Fluent::TLS::SUPPORTED_VERSIONS, default: nil
@@ -96,6 +96,7 @@ module Fluent
96
96
  enable_system_cert_store: true, allow_self_signed_cert: false, cert_paths: nil,
97
97
  cert_path: nil, private_key_path: nil, private_key_passphrase: nil,
98
98
  cert_thumbprint: nil, cert_logical_store_name: nil, cert_use_enterprise_store: true,
99
+ connect_timeout: nil,
99
100
  **kwargs, &block)
100
101
 
101
102
  host_is_ipaddress = IPAddr.new(host) rescue false
@@ -158,13 +159,23 @@ module Fluent
158
159
  end
159
160
  Fluent::TLS.set_version_to_context(context, version, min_version, max_version)
160
161
 
161
- tcpsock = socket_create_tcp(host, port, **kwargs)
162
+ tcpsock = socket_create_tcp(host, port, connect_timeout: connect_timeout, **kwargs)
162
163
  sock = WrappedSocket::TLS.new(tcpsock, context)
163
164
  sock.sync_close = true
164
165
  sock.hostname = fqdn if verify_fqdn && fqdn && sock.respond_to?(:hostname=)
165
166
 
166
167
  log.trace "entering TLS handshake"
167
- sock.connect
168
+ if connect_timeout
169
+ begin
170
+ Timeout.timeout(connect_timeout) { sock.connect }
171
+ rescue Timeout::Error
172
+ log.warn "timeout while connecting tls session", host: host
173
+ sock.close rescue nil
174
+ raise
175
+ end
176
+ else
177
+ sock.connect
178
+ end
168
179
 
169
180
  begin
170
181
  if verify_fqdn
@@ -45,7 +45,8 @@ module Fluent
45
45
  if value = @map[type]
46
46
  return value
47
47
  end
48
- raise ConfigError, "Unknown #{@kind} plugin '#{type}'. Run 'gem search -rd fluent-plugin' to find plugins" # TODO error class
48
+ raise NotFoundPluginError.new("Unknown #{@kind} plugin '#{type}'. Run 'gem search -rd fluent-plugin' to find plugins",
49
+ kind: @kind, type: type)
49
50
  end
50
51
 
51
52
  def reverse_lookup(value)
data/lib/fluent/rpc.rb CHANGED
@@ -20,9 +20,10 @@ module Fluent
20
20
  module RPC
21
21
  class Server
22
22
  def initialize(endpoint, log)
23
- bind, port = endpoint.split(':')
24
- @bind = bind
25
- @port = port
23
+ m = endpoint.match(/^\[?(?<host>[0-9a-zA-Z:\-\.]+)\]?:(?<port>[0-9]+)$/)
24
+ raise Fluent::ConfigError, "Invalid rpc_endpoint: #{endpoint}" unless m
25
+ @bind = m[:host]
26
+ @port = m[:port]
26
27
  @log = log
27
28
 
28
29
  @server = WEBrick::HTTPServer.new(
@@ -397,6 +397,7 @@ module Fluent
397
397
  log_path = params['log_path']
398
398
  chuser = params['chuser']
399
399
  chgroup = params['chgroup']
400
+ chumask = params['chumask']
400
401
  log_rotate_age = params['log_rotate_age']
401
402
  log_rotate_size = params['log_rotate_size']
402
403
 
@@ -436,7 +437,7 @@ module Fluent
436
437
  logger_initializer: logger_initializer,
437
438
  chuser: chuser,
438
439
  chgroup: chgroup,
439
- chumask: 0,
440
+ chumask: chumask,
440
441
  suppress_repeated_stacktrace: suppress_repeated_stacktrace,
441
442
  ignore_repeated_log_interval: ignore_repeated_log_interval,
442
443
  ignore_same_log_interval: ignore_same_log_interval,
@@ -565,6 +566,7 @@ module Fluent
565
566
  setup_path: nil,
566
567
  chuser: nil,
567
568
  chgroup: nil,
569
+ chumask: "0",
568
570
  root_dir: nil,
569
571
  suppress_interval: 0,
570
572
  suppress_repeated_stacktrace: true,
@@ -603,6 +605,7 @@ module Fluent
603
605
  @plugin_dirs = opt[:plugin_dirs]
604
606
  @chgroup = opt[:chgroup]
605
607
  @chuser = opt[:chuser]
608
+ @chumask = opt[:chumask]
606
609
 
607
610
  @log_rotate_age = opt[:log_rotate_age]
608
611
  @log_rotate_size = opt[:log_rotate_size]
@@ -709,7 +712,7 @@ module Fluent
709
712
  create_socket_manager if @standalone_worker
710
713
  if @standalone_worker
711
714
  ServerEngine::Privilege.change(@chuser, @chgroup)
712
- File.umask(0)
715
+ File.umask(@chumask.to_i(8))
713
716
  end
714
717
  MessagePackFactory.init(enable_time_support: @system_config.enable_msgpack_time_support)
715
718
  Fluent::Engine.init(@system_config)
@@ -16,6 +16,6 @@
16
16
 
17
17
  module Fluent
18
18
 
19
- VERSION = '1.14.3'
19
+ VERSION = '1.14.6'
20
20
 
21
21
  end
@@ -22,7 +22,7 @@ class TextParserTest < ::Test::Unit::TestCase
22
22
  Fluent::TextParser.register_template('multi_event_test', Proc.new { MultiEventTestParser.new })
23
23
 
24
24
  def test_lookup_unknown_format
25
- assert_raise Fluent::ConfigError do
25
+ assert_raise Fluent::NotFoundPluginError do
26
26
  Fluent::Plugin.new_parser('unknown')
27
27
  end
28
28
  end
@@ -83,7 +83,7 @@ class BareOutputTest < Test::Unit::TestCase
83
83
 
84
84
  @p.configure(config_element('ROOT', '', {'@log_level' => 'debug'}))
85
85
 
86
- assert{ @p.log.object_id != original_logger.object_id }
86
+ assert(@p.log.object_id != original_logger.object_id)
87
87
  assert_equal Fluent::Log::LEVEL_DEBUG, @p.log.level
88
88
  end
89
89
 
@@ -990,6 +990,51 @@ class BufferTest < Test::Unit::TestCase
990
990
  assert_equal [@dm0], @p.queue.map(&:metadata)
991
991
  assert_equal [5000], @p.queue.map(&:size)
992
992
  end
993
+
994
+ test "confirm that every message which is smaller than chunk threshold does not raise BufferChunkOverflowError" do
995
+ assert_equal [@dm0], @p.stage.keys
996
+ assert_equal [], @p.queue.map(&:metadata)
997
+ timestamp = event_time('2016-04-11 16:00:02 +0000')
998
+ es = Fluent::ArrayEventStream.new([[timestamp, {"message" => "a" * 1_000_000}],
999
+ [timestamp, {"message" => "b" * 1_000_000}],
1000
+ [timestamp, {"message" => "c" * 1_000_000}]])
1001
+
1002
+ # https://github.com/fluent/fluentd/issues/1849
1003
+ # Even though 1_000_000 < 1_280_000 (chunk_limit_size), it raised BufferChunkOverflowError before.
1004
+ # It should not be raised and message a,b,c should be stored into 3 chunks.
1005
+ assert_nothing_raised do
1006
+ @p.write({@dm0 => es}, format: @format)
1007
+ end
1008
+ messages = []
1009
+ # pick up first letter to check whether chunk is queued in expected order
1010
+ 3.times do |index|
1011
+ chunk = @p.queue[index]
1012
+ es = Fluent::MessagePackEventStream.new(chunk.chunk)
1013
+ es.ensure_unpacked!
1014
+ records = es.instance_eval{ @unpacked_records }
1015
+ records.each do |record|
1016
+ messages << record["message"][0]
1017
+ end
1018
+ end
1019
+ es = Fluent::MessagePackEventStream.new(@p.stage[@dm0].chunk)
1020
+ es.ensure_unpacked!
1021
+ staged_message = es.instance_eval{ @unpacked_records }.first["message"]
1022
+ # message a and b are queued, message c is staged
1023
+ assert_equal([
1024
+ [@dm0],
1025
+ "c" * 1_000_000,
1026
+ [@dm0, @dm0, @dm0],
1027
+ [5000, 1, 1],
1028
+ [["x"] * 5000, "a", "b"].flatten
1029
+ ],
1030
+ [
1031
+ @p.stage.keys,
1032
+ staged_message,
1033
+ @p.queue.map(&:metadata),
1034
+ @p.queue.map(&:size),
1035
+ messages
1036
+ ])
1037
+ end
993
1038
  end
994
1039
 
995
1040
  sub_test_case 'custom format with configuration for test with lower chunk limit size' do
@@ -1078,6 +1123,38 @@ class BufferTest < Test::Unit::TestCase
1078
1123
  @p.write({@dm0 => es})
1079
1124
  end
1080
1125
  end
1126
+
1127
+ test 'confirm that every array message which is smaller than chunk threshold does not raise BufferChunkOverflowError' do
1128
+ assert_equal [@dm0], @p.stage.keys
1129
+ assert_equal [], @p.queue.map(&:metadata)
1130
+
1131
+ assert_equal 1_280_000, @p.chunk_limit_size
1132
+
1133
+ es = ["a" * 1_000_000, "b" * 1_000_000, "c" * 1_000_000]
1134
+ assert_nothing_raised do
1135
+ @p.write({@dm0 => es})
1136
+ end
1137
+ queue_messages = @p.queue.collect do |chunk|
1138
+ # collect first character of each message
1139
+ chunk.chunk[0]
1140
+ end
1141
+ assert_equal([
1142
+ [@dm0],
1143
+ 1,
1144
+ "c",
1145
+ [@dm0, @dm0, @dm0],
1146
+ [5000, 1, 1],
1147
+ ["x", "a", "b"]
1148
+ ],
1149
+ [
1150
+ @p.stage.keys,
1151
+ @p.stage[@dm0].size,
1152
+ @p.stage[@dm0].chunk[0],
1153
+ @p.queue.map(&:metadata),
1154
+ @p.queue.map(&:size),
1155
+ queue_messages
1156
+ ])
1157
+ end
1081
1158
  end
1082
1159
 
1083
1160
  sub_test_case 'with configuration for test with lower limits' do
@@ -153,7 +153,7 @@ class FilterPluginTest < Test::Unit::TestCase
153
153
 
154
154
  @p.configure(config_element('ROOT', '', {'@log_level' => 'debug'}))
155
155
 
156
- assert{ @p.log.object_id != original_logger.object_id }
156
+ assert(@p.log.object_id != original_logger.object_id)
157
157
  assert_equal Fluent::Log::LEVEL_DEBUG, @p.log.level
158
158
  end
159
159
 
@@ -46,7 +46,7 @@ class ParserFilterTest < Test::Unit::TestCase
46
46
  assert_raise(Fluent::ConfigError) {
47
47
  create_driver('')
48
48
  }
49
- assert_raise(Fluent::ConfigError) {
49
+ assert_raise(Fluent::NotFoundPluginError) {
50
50
  create_driver %[
51
51
  key_name foo
52
52
  <parse>
@@ -63,7 +63,7 @@ class StdoutFilterTest < Test::Unit::TestCase
63
63
  end
64
64
 
65
65
  def test_invalid_output_type
66
- assert_raise(Fluent::ConfigError) do
66
+ assert_raise(Fluent::NotFoundPluginError) do
67
67
  d = create_driver(CONFIG + config_element("", "", { "output_type" => "foo" }))
68
68
  d.run {}
69
69
  end
@@ -139,7 +139,7 @@ class StdoutFilterTest < Test::Unit::TestCase
139
139
  def test_invalid_output_type
140
140
  conf = config_element
141
141
  conf.elements << config_element("format", "", { "@type" => "stdout", "output_type" => "foo" })
142
- assert_raise(Fluent::ConfigError) do
142
+ assert_raise(Fluent::NotFoundPluginError) do
143
143
  d = create_driver(conf)
144
144
  d.run {}
145
145
  end
@@ -68,7 +68,6 @@ class ForwardInputTest < Test::Unit::TestCase
68
68
  @d = d = create_driver
69
69
  assert_equal @port, d.instance.port
70
70
  assert_equal '127.0.0.1', d.instance.bind
71
- assert_equal 0, d.instance.linger_timeout
72
71
  assert_equal 0.5, d.instance.blocking_timeout
73
72
  assert !d.instance.backlog
74
73
  end
@@ -77,7 +76,6 @@ class ForwardInputTest < Test::Unit::TestCase
77
76
  @d = d = create_driver(config_auth)
78
77
  assert_equal @port, d.instance.port
79
78
  assert_equal '127.0.0.1', d.instance.bind
80
- assert_equal 0, d.instance.linger_timeout
81
79
  assert !d.instance.backlog
82
80
 
83
81
  assert d.instance.security
@@ -540,6 +540,29 @@ class HttpInputTest < Test::Unit::TestCase
540
540
  assert_equal_event_time time, d.events[1][1]
541
541
  end
542
542
 
543
+ def test_application_ndjson
544
+ d = create_driver
545
+ events = [
546
+ ["tag1", 1643935663, "{\"a\":1}\n{\"b\":2}"],
547
+ ["tag2", 1643935664, "{\"a\":3}\r\n{\"b\":4}"]
548
+ ]
549
+
550
+ expected = [
551
+ ["tag1", 1643935663, {"a"=>1}],
552
+ ["tag1", 1643935663, {"b"=>2}],
553
+ ["tag2", 1643935664, {"a"=>3}],
554
+ ["tag2", 1643935664, {"b"=>4}]
555
+ ]
556
+
557
+ d.run(expect_records: 1) do
558
+ events.each do |tag, time, record|
559
+ res = post("/#{tag}?time=#{time}", record, {"Content-Type"=>"application/x-ndjson"})
560
+ assert_equal("200", res.code)
561
+ end
562
+ end
563
+ assert_equal(expected, d.events)
564
+ end
565
+
543
566
  def test_msgpack
544
567
  d = create_driver
545
568
  time = event_time("2011-01-02 13:14:15 UTC")
@@ -1707,6 +1707,41 @@ class TailInputTest < Test::Unit::TestCase
1707
1707
  mock(plugin.router).emit_stream('pre.foo.bar.log.post', anything).once
1708
1708
  plugin.receive_lines(['foo', 'bar'], DummyWatcher.new('foo.bar.log'))
1709
1709
  end
1710
+
1711
+ data(
1712
+ small: ["128", 128],
1713
+ KiB: ["1k", 1024]
1714
+ )
1715
+ test 'max_line_size' do |(label, size)|
1716
+ config = config_element("", "", {
1717
+ "tag" => "max_line_size",
1718
+ "path" => "#{TMP_DIR}/with_long_lines.txt",
1719
+ "format" => "none",
1720
+ "read_from_head" => true,
1721
+ "max_line_size" => label,
1722
+ "log_level" => "debug"
1723
+ })
1724
+ File.open("#{TMP_DIR}/with_long_lines.txt", "w+") do |f|
1725
+ f.puts "foo"
1726
+ f.puts "x" * size # 'x' * size + \n > @max_line_size
1727
+ f.puts "bar"
1728
+ end
1729
+ d = create_driver(config, false)
1730
+ timestamp = Time.parse("Mon Nov 29 11:22:33 UTC 2021")
1731
+ Timecop.freeze(timestamp)
1732
+ d.run(expect_records: 2)
1733
+ assert_equal([
1734
+ [{"message" => "foo"},{"message" => "bar"}],
1735
+ [
1736
+ "2021-11-29 11:22:33 +0000 [warn]: received line length is longer than #{size}\n",
1737
+ "2021-11-29 11:22:33 +0000 [debug]: skipped line: #{'x' * size}\n"
1738
+ ]
1739
+ ],
1740
+ [
1741
+ d.events.collect { |event| event.last },
1742
+ d.logs[-2..]
1743
+ ])
1744
+ end
1710
1745
  end
1711
1746
 
1712
1747
  # Ensure that no fatal exception is raised when a file is missing and that
@@ -73,7 +73,7 @@ class InputTest < Test::Unit::TestCase
73
73
 
74
74
  @p.configure(config_element('ROOT', '', {'@log_level' => 'debug'}))
75
75
 
76
- assert{ @p.log.object_id != original_logger.object_id }
76
+ assert(@p.log.object_id != original_logger.object_id)
77
77
  assert_equal Fluent::Log::LEVEL_DEBUG, @p.log.level
78
78
  end
79
79
 
@@ -243,7 +243,7 @@ class ExecOutputTest < Test::Unit::TestCase
243
243
  sub_test_case 'when executed process dies unexpectedly' do
244
244
  setup do
245
245
  @gen_config = ->(num){ <<EOC
246
- command ruby -e "ARGV.first.to_i == 0 ? open(ARGV[1]){|f| STDOUT.write f.read} : (sleep 1 ; exit ARGV.first.to_i)" #{num} >#{TMP_DIR}/fail_out
246
+ command ruby -e "ARGV.first.to_i == 0 ? open(ARGV[1]){|f| STDOUT.write(f.read); STDOUT.flush} : (sleep 1 ; exit ARGV.first.to_i)" #{num} >#{TMP_DIR}/fail_out
247
247
  <inject>
248
248
  tag_key tag
249
249
  time_key time
@@ -265,7 +265,7 @@ EOC
265
265
  expect_path = "#{TMP_DIR}/fail_out"
266
266
 
267
267
  d.end_if{ File.exist?(expect_path) }
268
- d.run(default_tag: 'test', flush: true, wait_flush_completion: false, shutdown: false) do
268
+ d.run(default_tag: 'test', flush: true, wait_flush_completion: true, shutdown: false) do
269
269
  d.feed(time, records[0])
270
270
  d.feed(time, records[1])
271
271
  end
@@ -281,7 +281,8 @@ EOC
281
281
  assert{ d.instance.buffer.queue.empty? }
282
282
  assert{ d.instance.dequeued_chunks.empty? }
283
283
 
284
- d.instance_shutdown
284
+ ensure
285
+ d.instance_shutdown if d && d.instance
285
286
  end
286
287
 
287
288
  test 'flushed chunk will be taken back after child process unexpectedly exits' do
@@ -304,7 +305,8 @@ EOC
304
305
 
305
306
  assert{ File.exist?(expect_path) && File.size(expect_path) == 0 }
306
307
 
307
- d.instance_shutdown
308
+ ensure
309
+ d.instance_shutdown if d && d.instance
308
310
  end
309
311
  end
310
312
  end