fluentd 1.14.1-x64-mingw32 → 1.14.5-x64-mingw32
Sign up to get free protection for your applications and to get access to all the features.
Potentially problematic release.
This version of fluentd might be problematic. Click here for more details.
- checksums.yaml +4 -4
- data/.drone.yml +6 -6
- data/.github/ISSUE_TEMPLATE/bug_report.yaml +1 -0
- data/CHANGELOG.md +94 -0
- data/README.md +3 -22
- data/Rakefile +1 -1
- data/SECURITY.md +18 -0
- data/fluentd.gemspec +5 -4
- data/lib/fluent/command/cat.rb +13 -3
- data/lib/fluent/config/error.rb +12 -0
- data/lib/fluent/env.rb +4 -0
- data/lib/fluent/plugin/base.rb +1 -1
- data/lib/fluent/plugin/buf_file.rb +2 -2
- data/lib/fluent/plugin/buffer.rb +62 -8
- data/lib/fluent/plugin/in_http.rb +11 -1
- data/lib/fluent/plugin/in_tail.rb +21 -4
- data/lib/fluent/plugin/out_file.rb +13 -1
- data/lib/fluent/plugin/output.rb +11 -6
- data/lib/fluent/plugin/parser_apache2.rb +1 -1
- data/lib/fluent/plugin/storage_local.rb +3 -5
- data/lib/fluent/plugin_helper/socket.rb +13 -2
- data/lib/fluent/plugin_id.rb +2 -1
- data/lib/fluent/registry.rb +2 -1
- data/lib/fluent/supervisor.rb +2 -2
- data/lib/fluent/time.rb +21 -20
- data/lib/fluent/version.rb +1 -1
- data/test/command/test_cat.rb +31 -2
- data/test/compat/test_parser.rb +1 -1
- data/test/plugin/test_bare_output.rb +1 -1
- data/test/plugin/test_buffer.rb +149 -1
- data/test/plugin/test_filter.rb +1 -1
- data/test/plugin/test_filter_parser.rb +1 -1
- data/test/plugin/test_filter_stdout.rb +2 -2
- data/test/plugin/test_in_http.rb +23 -0
- data/test/plugin/test_in_tail.rb +90 -6
- data/test/plugin/test_input.rb +1 -1
- data/test/plugin/test_out_exec.rb +6 -4
- data/test/plugin/test_out_exec_filter.rb +4 -0
- data/test/plugin/test_out_file.rb +29 -13
- data/test/plugin/test_out_stdout.rb +2 -2
- data/test/plugin/test_output_as_buffered_retries.rb +47 -0
- data/test/plugin/test_output_as_buffered_secondary.rb +1 -1
- data/test/plugin_helper/test_child_process.rb +9 -9
- data/test/plugin_helper/test_timer.rb +2 -2
- data/test/test_formatter.rb +1 -1
- data/test/test_time_parser.rb +22 -0
- metadata +26 -11
data/lib/fluent/plugin/output.rb
CHANGED
@@ -14,6 +14,7 @@
|
|
14
14
|
# limitations under the License.
|
15
15
|
#
|
16
16
|
|
17
|
+
require 'fluent/env'
|
17
18
|
require 'fluent/error'
|
18
19
|
require 'fluent/plugin/base'
|
19
20
|
require 'fluent/plugin/buffer'
|
@@ -1248,8 +1249,8 @@ module Fluent
|
|
1248
1249
|
backup_dir = File.dirname(backup_file)
|
1249
1250
|
|
1250
1251
|
log.warn "bad chunk is moved to #{backup_file}"
|
1251
|
-
FileUtils.mkdir_p(backup_dir, mode: system_config.dir_permission ||
|
1252
|
-
File.open(backup_file, 'ab', system_config.file_permission ||
|
1252
|
+
FileUtils.mkdir_p(backup_dir, mode: system_config.dir_permission || Fluent::DEFAULT_DIR_PERMISSION) unless Dir.exist?(backup_dir)
|
1253
|
+
File.open(backup_file, 'ab', system_config.file_permission || Fluent::DEFAULT_FILE_PERMISSION) { |f|
|
1253
1254
|
chunk.write_to(f)
|
1254
1255
|
}
|
1255
1256
|
end
|
@@ -1274,11 +1275,15 @@ module Fluent
|
|
1274
1275
|
|
1275
1276
|
unless @retry
|
1276
1277
|
@retry = retry_state(@buffer_config.retry_randomize)
|
1277
|
-
if
|
1278
|
-
|
1279
|
-
|
1278
|
+
if @retry.limit?
|
1279
|
+
# @retry_max_times == 0, fail imediately by the following block
|
1280
|
+
else
|
1281
|
+
if error
|
1282
|
+
log.warn "failed to flush the buffer.", retry_times: @retry.steps, next_retry_time: @retry.next_time.round, chunk: chunk_id_hex, error: error
|
1283
|
+
log.warn_backtrace error.backtrace
|
1284
|
+
end
|
1285
|
+
return
|
1280
1286
|
end
|
1281
|
-
return
|
1282
1287
|
end
|
1283
1288
|
|
1284
1289
|
# @retry exists
|
@@ -21,7 +21,7 @@ module Fluent
|
|
21
21
|
class Apache2Parser < Parser
|
22
22
|
Plugin.register_parser('apache2', self)
|
23
23
|
|
24
|
-
REGEXP = /^(?<host>[^ ]*) [^ ]* (?<user>[^ ]*) \[(?<time>[^\]]*)\] "(?<method>\S+)(?: +(?<path>(?:[^\"]
|
24
|
+
REGEXP = /^(?<host>[^ ]*) [^ ]* (?<user>[^ ]*) \[(?<time>[^\]]*)\] "(?<method>\S+)(?: +(?<path>(?:[^\"]|\\")*?)(?: +\S*)?)?" (?<code>[^ ]*) (?<size>[^ ]*)(?: "(?<referer>(?:[^\"]|\\")*)" "(?<agent>(?:[^\"]|\\")*)")?$/
|
25
25
|
TIME_FORMAT = "%d/%b/%Y:%H:%M:%S %z"
|
26
26
|
|
27
27
|
def initialize
|
@@ -14,6 +14,7 @@
|
|
14
14
|
# limitations under the License.
|
15
15
|
#
|
16
16
|
|
17
|
+
require 'fluent/env'
|
17
18
|
require 'fluent/plugin'
|
18
19
|
require 'fluent/plugin/storage'
|
19
20
|
|
@@ -25,14 +26,11 @@ module Fluent
|
|
25
26
|
class LocalStorage < Storage
|
26
27
|
Fluent::Plugin.register_storage('local', self)
|
27
28
|
|
28
|
-
DEFAULT_DIR_MODE = 0755
|
29
|
-
DEFAULT_FILE_MODE = 0644
|
30
|
-
|
31
29
|
config_param :path, :string, default: nil
|
32
|
-
config_param :mode, default:
|
30
|
+
config_param :mode, default: Fluent::DEFAULT_FILE_PERMISSION do |v|
|
33
31
|
v.to_i(8)
|
34
32
|
end
|
35
|
-
config_param :dir_mode, default:
|
33
|
+
config_param :dir_mode, default: Fluent::DEFAULT_DIR_PERMISSION do |v|
|
36
34
|
v.to_i(8)
|
37
35
|
end
|
38
36
|
config_param :pretty_print, :bool, default: false
|
@@ -96,6 +96,7 @@ module Fluent
|
|
96
96
|
enable_system_cert_store: true, allow_self_signed_cert: false, cert_paths: nil,
|
97
97
|
cert_path: nil, private_key_path: nil, private_key_passphrase: nil,
|
98
98
|
cert_thumbprint: nil, cert_logical_store_name: nil, cert_use_enterprise_store: true,
|
99
|
+
connect_timeout: nil,
|
99
100
|
**kwargs, &block)
|
100
101
|
|
101
102
|
host_is_ipaddress = IPAddr.new(host) rescue false
|
@@ -158,13 +159,23 @@ module Fluent
|
|
158
159
|
end
|
159
160
|
Fluent::TLS.set_version_to_context(context, version, min_version, max_version)
|
160
161
|
|
161
|
-
tcpsock = socket_create_tcp(host, port, **kwargs)
|
162
|
+
tcpsock = socket_create_tcp(host, port, connect_timeout: connect_timeout, **kwargs)
|
162
163
|
sock = WrappedSocket::TLS.new(tcpsock, context)
|
163
164
|
sock.sync_close = true
|
164
165
|
sock.hostname = fqdn if verify_fqdn && fqdn && sock.respond_to?(:hostname=)
|
165
166
|
|
166
167
|
log.trace "entering TLS handshake"
|
167
|
-
|
168
|
+
if connect_timeout
|
169
|
+
begin
|
170
|
+
Timeout.timeout(connect_timeout) { sock.connect }
|
171
|
+
rescue Timeout::Error
|
172
|
+
log.warn "timeout while connecting tls session", host: host
|
173
|
+
sock.close rescue nil
|
174
|
+
raise
|
175
|
+
end
|
176
|
+
else
|
177
|
+
sock.connect
|
178
|
+
end
|
168
179
|
|
169
180
|
begin
|
170
181
|
if verify_fqdn
|
data/lib/fluent/plugin_id.rb
CHANGED
@@ -15,6 +15,7 @@
|
|
15
15
|
#
|
16
16
|
|
17
17
|
require 'set'
|
18
|
+
require 'fluent/env'
|
18
19
|
require 'fluent/variable_store'
|
19
20
|
|
20
21
|
module Fluent
|
@@ -76,7 +77,7 @@ module Fluent
|
|
76
77
|
|
77
78
|
# Fluent::Plugin::Base#fluentd_worker_id
|
78
79
|
dir = File.join(system_config.root_dir, "worker#{fluentd_worker_id}", plugin_id)
|
79
|
-
FileUtils.mkdir_p(dir, mode: system_config.dir_permission ||
|
80
|
+
FileUtils.mkdir_p(dir, mode: system_config.dir_permission || Fluent::DEFAULT_DIR_PERMISSION) unless Dir.exist?(dir)
|
80
81
|
@_plugin_root_dir = dir.freeze
|
81
82
|
dir
|
82
83
|
end
|
data/lib/fluent/registry.rb
CHANGED
@@ -45,7 +45,8 @@ module Fluent
|
|
45
45
|
if value = @map[type]
|
46
46
|
return value
|
47
47
|
end
|
48
|
-
raise
|
48
|
+
raise NotFoundPluginError.new("Unknown #{@kind} plugin '#{type}'. Run 'gem search -rd fluent-plugin' to find plugins",
|
49
|
+
kind: @kind, type: type)
|
49
50
|
end
|
50
51
|
|
51
52
|
def reverse_lookup(value)
|
data/lib/fluent/supervisor.rb
CHANGED
@@ -544,7 +544,7 @@ module Fluent
|
|
544
544
|
$log.ignore_same_log_interval = ignore_same_log_interval if ignore_same_log_interval
|
545
545
|
|
546
546
|
if @path && log_dir_perm
|
547
|
-
File.chmod(log_dir_perm ||
|
547
|
+
File.chmod(log_dir_perm || Fluent::DEFAULT_DIR_PERMISSION, File.dirname(@path))
|
548
548
|
end
|
549
549
|
end
|
550
550
|
|
@@ -651,7 +651,7 @@ module Fluent
|
|
651
651
|
end
|
652
652
|
else
|
653
653
|
begin
|
654
|
-
FileUtils.mkdir_p(root_dir, mode: @system_config.dir_permission ||
|
654
|
+
FileUtils.mkdir_p(root_dir, mode: @system_config.dir_permission || Fluent::DEFAULT_DIR_PERMISSION)
|
655
655
|
rescue => e
|
656
656
|
raise Fluent::InvalidRootDirectory, "failed to create root directory:#{root_dir}, #{e.inspect}"
|
657
657
|
end
|
data/lib/fluent/time.rb
CHANGED
@@ -226,19 +226,16 @@ module Fluent
|
|
226
226
|
|
227
227
|
format_with_timezone = format && (format.include?("%z") || format.include?("%Z"))
|
228
228
|
|
229
|
-
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
|
237
|
-
|
238
|
-
|
239
|
-
when localtime then 0
|
240
|
-
else Time.now.localtime.utc_offset # utc
|
241
|
-
end
|
229
|
+
utc_offset = case
|
230
|
+
when format_with_timezone then
|
231
|
+
nil
|
232
|
+
when timezone then
|
233
|
+
Fluent::Timezone.utc_offset(timezone)
|
234
|
+
when localtime then
|
235
|
+
nil
|
236
|
+
else
|
237
|
+
0 # utc
|
238
|
+
end
|
242
239
|
|
243
240
|
strptime = format && (Strptime.new(format) rescue nil)
|
244
241
|
|
@@ -247,16 +244,20 @@ module Fluent
|
|
247
244
|
when format_with_timezone then ->(v){ Fluent::EventTime.from_time(Time.strptime(v, format)) }
|
248
245
|
when format == '%iso8601' then ->(v){ Fluent::EventTime.from_time(Time.iso8601(v)) }
|
249
246
|
when strptime then
|
250
|
-
if
|
251
|
-
->(v)
|
247
|
+
if utc_offset.nil?
|
248
|
+
->(v){ t = strptime.exec(v); Fluent::EventTime.new(t.to_i, t.nsec) }
|
249
|
+
elsif utc_offset.respond_to?(:call)
|
250
|
+
->(v) { t = strptime.exec(v); Fluent::EventTime.new(t.to_i + t.utc_offset - utc_offset.call(t), t.nsec) }
|
252
251
|
else
|
253
|
-
->(v) { t = strptime.exec(v); Fluent::EventTime.new(t.to_i +
|
252
|
+
->(v) { t = strptime.exec(v); Fluent::EventTime.new(t.to_i + t.utc_offset - utc_offset, t.nsec) }
|
254
253
|
end
|
255
|
-
when format
|
256
|
-
if
|
257
|
-
->(v){ t = Time.strptime(v, format); Fluent::EventTime.new(t.to_i
|
254
|
+
when format then
|
255
|
+
if utc_offset.nil?
|
256
|
+
->(v){ t = Time.strptime(v, format); Fluent::EventTime.new(t.to_i, t.nsec) }
|
257
|
+
elsif utc_offset.respond_to?(:call)
|
258
|
+
->(v){ t = Time.strptime(v, format); Fluent::EventTime.new(t.to_i + t.utc_offset - utc_offset.call(t), t.nsec) }
|
258
259
|
else
|
259
|
-
->(v){ t = Time.strptime(v, format); Fluent::EventTime.new(t.to_i +
|
260
|
+
->(v){ t = Time.strptime(v, format); Fluent::EventTime.new(t.to_i + t.utc_offset - utc_offset, t.nsec) }
|
260
261
|
end
|
261
262
|
else ->(v){ Fluent::EventTime.parse(v) }
|
262
263
|
end
|
data/lib/fluent/version.rb
CHANGED
data/test/command/test_cat.rb
CHANGED
@@ -69,7 +69,7 @@ class TestFluentCat < ::Test::Unit::TestCase
|
|
69
69
|
def test_cat_json
|
70
70
|
d = create_driver
|
71
71
|
d.run(expect_records: 1) do
|
72
|
-
Open3.pipeline_w("
|
72
|
+
Open3.pipeline_w("#{ServerEngine.ruby_bin_path} #{FLUENT_CAT_COMMAND} --port #{@port} json") do |stdin|
|
73
73
|
stdin.puts('{"key":"value"}')
|
74
74
|
stdin.close
|
75
75
|
end
|
@@ -86,7 +86,7 @@ class TestFluentCat < ::Test::Unit::TestCase
|
|
86
86
|
path = d.instance.write(@chunk)
|
87
87
|
d = create_driver
|
88
88
|
d.run(expect_records: 1) do
|
89
|
-
Open3.pipeline_w("
|
89
|
+
Open3.pipeline_w("#{ServerEngine.ruby_bin_path} #{FLUENT_CAT_COMMAND} --port #{@port} --format msgpack secondary") do |stdin|
|
90
90
|
stdin.write(File.read(path))
|
91
91
|
stdin.close
|
92
92
|
end
|
@@ -96,4 +96,33 @@ class TestFluentCat < ::Test::Unit::TestCase
|
|
96
96
|
[d.events.size, event.first, event.last])
|
97
97
|
end
|
98
98
|
end
|
99
|
+
|
100
|
+
sub_test_case "send specific event time" do
|
101
|
+
def test_without_event_time
|
102
|
+
event_time = Fluent::EventTime.now
|
103
|
+
d = create_driver
|
104
|
+
d.run(expect_records: 1) do
|
105
|
+
Open3.pipeline_w("#{ServerEngine.ruby_bin_path} #{FLUENT_CAT_COMMAND} --port #{@port} tag") do |stdin|
|
106
|
+
stdin.puts('{"key":"value"}')
|
107
|
+
stdin.close
|
108
|
+
end
|
109
|
+
end
|
110
|
+
event = d.events.first
|
111
|
+
assert_in_delta(event_time.to_f, event[1].to_f, 3.0) # expect command to be finished in 3 seconds
|
112
|
+
assert_equal([1, "tag", true, @record],
|
113
|
+
[d.events.size, event.first, event_time.to_f < event[1].to_f, event.last])
|
114
|
+
end
|
115
|
+
|
116
|
+
def test_with_event_time
|
117
|
+
event_time = "2021-01-02 13:14:15.0+00:00"
|
118
|
+
d = create_driver
|
119
|
+
d.run(expect_records: 1) do
|
120
|
+
Open3.pipeline_w("#{ServerEngine.ruby_bin_path} #{FLUENT_CAT_COMMAND} --port #{@port} --event-time '#{event_time}' tag") do |stdin|
|
121
|
+
stdin.puts('{"key":"value"}')
|
122
|
+
stdin.close
|
123
|
+
end
|
124
|
+
end
|
125
|
+
assert_equal([["tag", Fluent::EventTime.parse(event_time), @record]], d.events)
|
126
|
+
end
|
127
|
+
end
|
99
128
|
end
|
data/test/compat/test_parser.rb
CHANGED
@@ -22,7 +22,7 @@ class TextParserTest < ::Test::Unit::TestCase
|
|
22
22
|
Fluent::TextParser.register_template('multi_event_test', Proc.new { MultiEventTestParser.new })
|
23
23
|
|
24
24
|
def test_lookup_unknown_format
|
25
|
-
assert_raise Fluent::
|
25
|
+
assert_raise Fluent::NotFoundPluginError do
|
26
26
|
Fluent::Plugin.new_parser('unknown')
|
27
27
|
end
|
28
28
|
end
|
@@ -83,7 +83,7 @@ class BareOutputTest < Test::Unit::TestCase
|
|
83
83
|
|
84
84
|
@p.configure(config_element('ROOT', '', {'@log_level' => 'debug'}))
|
85
85
|
|
86
|
-
assert
|
86
|
+
assert(@p.log.object_id != original_logger.object_id)
|
87
87
|
assert_equal Fluent::Log::LEVEL_DEBUG, @p.log.level
|
88
88
|
end
|
89
89
|
|
data/test/plugin/test_buffer.rb
CHANGED
@@ -18,7 +18,7 @@ module FluentPluginBufferTest
|
|
18
18
|
end
|
19
19
|
class DummyMemoryChunkError < StandardError; end
|
20
20
|
class DummyMemoryChunk < Fluent::Plugin::Buffer::MemoryChunk
|
21
|
-
attr_reader :append_count, :rollbacked, :closed, :purged
|
21
|
+
attr_reader :append_count, :rollbacked, :closed, :purged, :chunk
|
22
22
|
attr_accessor :failing
|
23
23
|
def initialize(metadata, compress: :text)
|
24
24
|
super
|
@@ -944,6 +944,97 @@ class BufferTest < Test::Unit::TestCase
|
|
944
944
|
@p.write({@dm0 => es}, format: @format)
|
945
945
|
end
|
946
946
|
end
|
947
|
+
|
948
|
+
data(
|
949
|
+
first_chunk: Fluent::ArrayEventStream.new([[event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * 1_280_000}],
|
950
|
+
[event_time('2016-04-11 16:00:02 +0000'), {"message" => "a"}],
|
951
|
+
[event_time('2016-04-11 16:00:02 +0000'), {"message" => "b"}]]),
|
952
|
+
intermediate_chunk: Fluent::ArrayEventStream.new([[event_time('2016-04-11 16:00:02 +0000'), {"message" => "a"}],
|
953
|
+
[event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * 1_280_000}],
|
954
|
+
[event_time('2016-04-11 16:00:02 +0000'), {"message" => "b"}]]),
|
955
|
+
last_chunk: Fluent::ArrayEventStream.new([[event_time('2016-04-11 16:00:02 +0000'), {"message" => "a"}],
|
956
|
+
[event_time('2016-04-11 16:00:02 +0000'), {"message" => "b"}],
|
957
|
+
[event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * 1_280_000}]]),
|
958
|
+
multiple_chunks: Fluent::ArrayEventStream.new([[event_time('2016-04-11 16:00:02 +0000'), {"message" => "a"}],
|
959
|
+
[event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * 1_280_000}],
|
960
|
+
[event_time('2016-04-11 16:00:02 +0000'), {"message" => "b"}],
|
961
|
+
[event_time('2016-04-11 16:00:02 +0000'), {"message" => "x" * 1_280_000}]])
|
962
|
+
)
|
963
|
+
test '#write exceeds chunk_limit_size, raise BufferChunkOverflowError, but not lost whole messages' do |(es)|
|
964
|
+
assert_equal [@dm0], @p.stage.keys
|
965
|
+
assert_equal [], @p.queue.map(&:metadata)
|
966
|
+
|
967
|
+
assert_equal 1_280_000, @p.chunk_limit_size
|
968
|
+
|
969
|
+
nth = []
|
970
|
+
es.entries.each_with_index do |entry, index|
|
971
|
+
if entry.last["message"].size == @p.chunk_limit_size
|
972
|
+
nth << index
|
973
|
+
end
|
974
|
+
end
|
975
|
+
messages = []
|
976
|
+
nth.each do |n|
|
977
|
+
messages << "a 1280025 bytes record (nth: #{n}) is larger than buffer chunk limit size (1280000)"
|
978
|
+
end
|
979
|
+
|
980
|
+
assert_raise Fluent::Plugin::Buffer::BufferChunkOverflowError.new(messages.join(", ")) do
|
981
|
+
@p.write({@dm0 => es}, format: @format)
|
982
|
+
end
|
983
|
+
# message a and b are concatenated and staged
|
984
|
+
staged_messages = Fluent::MessagePackFactory.msgpack_unpacker.feed_each(@p.stage[@dm0].chunk).collect do |record|
|
985
|
+
record.last
|
986
|
+
end
|
987
|
+
assert_equal([2, [{"message" => "a"}, {"message" => "b"}]],
|
988
|
+
[@p.stage[@dm0].size, staged_messages])
|
989
|
+
# only es0 message is queued
|
990
|
+
assert_equal [@dm0], @p.queue.map(&:metadata)
|
991
|
+
assert_equal [5000], @p.queue.map(&:size)
|
992
|
+
end
|
993
|
+
|
994
|
+
test "confirm that every message which is smaller than chunk threshold does not raise BufferChunkOverflowError" do
|
995
|
+
assert_equal [@dm0], @p.stage.keys
|
996
|
+
assert_equal [], @p.queue.map(&:metadata)
|
997
|
+
timestamp = event_time('2016-04-11 16:00:02 +0000')
|
998
|
+
es = Fluent::ArrayEventStream.new([[timestamp, {"message" => "a" * 1_000_000}],
|
999
|
+
[timestamp, {"message" => "b" * 1_000_000}],
|
1000
|
+
[timestamp, {"message" => "c" * 1_000_000}]])
|
1001
|
+
|
1002
|
+
# https://github.com/fluent/fluentd/issues/1849
|
1003
|
+
# Even though 1_000_000 < 1_280_000 (chunk_limit_size), it raised BufferChunkOverflowError before.
|
1004
|
+
# It should not be raised and message a,b,c should be stored into 3 chunks.
|
1005
|
+
assert_nothing_raised do
|
1006
|
+
@p.write({@dm0 => es}, format: @format)
|
1007
|
+
end
|
1008
|
+
messages = []
|
1009
|
+
# pick up first letter to check whether chunk is queued in expected order
|
1010
|
+
3.times do |index|
|
1011
|
+
chunk = @p.queue[index]
|
1012
|
+
es = Fluent::MessagePackEventStream.new(chunk.chunk)
|
1013
|
+
es.ensure_unpacked!
|
1014
|
+
records = es.instance_eval{ @unpacked_records }
|
1015
|
+
records.each do |record|
|
1016
|
+
messages << record["message"][0]
|
1017
|
+
end
|
1018
|
+
end
|
1019
|
+
es = Fluent::MessagePackEventStream.new(@p.stage[@dm0].chunk)
|
1020
|
+
es.ensure_unpacked!
|
1021
|
+
staged_message = es.instance_eval{ @unpacked_records }.first["message"]
|
1022
|
+
# message a and b are queued, message c is staged
|
1023
|
+
assert_equal([
|
1024
|
+
[@dm0],
|
1025
|
+
"c" * 1_000_000,
|
1026
|
+
[@dm0, @dm0, @dm0],
|
1027
|
+
[5000, 1, 1],
|
1028
|
+
[["x"] * 5000, "a", "b"].flatten
|
1029
|
+
],
|
1030
|
+
[
|
1031
|
+
@p.stage.keys,
|
1032
|
+
staged_message,
|
1033
|
+
@p.queue.map(&:metadata),
|
1034
|
+
@p.queue.map(&:size),
|
1035
|
+
messages
|
1036
|
+
])
|
1037
|
+
end
|
947
1038
|
end
|
948
1039
|
|
949
1040
|
sub_test_case 'custom format with configuration for test with lower chunk limit size' do
|
@@ -1032,6 +1123,38 @@ class BufferTest < Test::Unit::TestCase
|
|
1032
1123
|
@p.write({@dm0 => es})
|
1033
1124
|
end
|
1034
1125
|
end
|
1126
|
+
|
1127
|
+
test 'confirm that every array message which is smaller than chunk threshold does not raise BufferChunkOverflowError' do
|
1128
|
+
assert_equal [@dm0], @p.stage.keys
|
1129
|
+
assert_equal [], @p.queue.map(&:metadata)
|
1130
|
+
|
1131
|
+
assert_equal 1_280_000, @p.chunk_limit_size
|
1132
|
+
|
1133
|
+
es = ["a" * 1_000_000, "b" * 1_000_000, "c" * 1_000_000]
|
1134
|
+
assert_nothing_raised do
|
1135
|
+
@p.write({@dm0 => es})
|
1136
|
+
end
|
1137
|
+
queue_messages = @p.queue.collect do |chunk|
|
1138
|
+
# collect first character of each message
|
1139
|
+
chunk.chunk[0]
|
1140
|
+
end
|
1141
|
+
assert_equal([
|
1142
|
+
[@dm0],
|
1143
|
+
1,
|
1144
|
+
"c",
|
1145
|
+
[@dm0, @dm0, @dm0],
|
1146
|
+
[5000, 1, 1],
|
1147
|
+
["x", "a", "b"]
|
1148
|
+
],
|
1149
|
+
[
|
1150
|
+
@p.stage.keys,
|
1151
|
+
@p.stage[@dm0].size,
|
1152
|
+
@p.stage[@dm0].chunk[0],
|
1153
|
+
@p.queue.map(&:metadata),
|
1154
|
+
@p.queue.map(&:size),
|
1155
|
+
queue_messages
|
1156
|
+
])
|
1157
|
+
end
|
1035
1158
|
end
|
1036
1159
|
|
1037
1160
|
sub_test_case 'with configuration for test with lower limits' do
|
@@ -1201,6 +1324,7 @@ class BufferTest < Test::Unit::TestCase
|
|
1201
1324
|
sub_test_case 'when compress is gzip' do
|
1202
1325
|
setup do
|
1203
1326
|
@p = create_buffer({'compress' => 'gzip'})
|
1327
|
+
@dm0 = create_metadata(Time.parse('2016-04-11 16:00:00 +0000').to_i, nil, nil)
|
1204
1328
|
end
|
1205
1329
|
|
1206
1330
|
test '#compress returns :gzip' do
|
@@ -1211,6 +1335,30 @@ class BufferTest < Test::Unit::TestCase
|
|
1211
1335
|
chunk = @p.generate_chunk(create_metadata)
|
1212
1336
|
assert chunk.singleton_class.ancestors.include?(Fluent::Plugin::Buffer::Chunk::Decompressable)
|
1213
1337
|
end
|
1338
|
+
|
1339
|
+
test '#write compressed data which exceeds chunk_limit_size, it raises BufferChunkOverflowError' do
|
1340
|
+
@p = create_buffer({'compress' => 'gzip', 'chunk_limit_size' => 70})
|
1341
|
+
timestamp = event_time('2016-04-11 16:00:02 +0000')
|
1342
|
+
es = Fluent::ArrayEventStream.new([[timestamp, {"message" => "012345"}], # overflow
|
1343
|
+
[timestamp, {"message" => "aaa"}],
|
1344
|
+
[timestamp, {"message" => "bbb"}]])
|
1345
|
+
assert_equal [], @p.queue.map(&:metadata)
|
1346
|
+
assert_equal 70, @p.chunk_limit_size
|
1347
|
+
|
1348
|
+
# calculate the actual boundary value. it varies on machine
|
1349
|
+
c = @p.generate_chunk(create_metadata)
|
1350
|
+
c.append(Fluent::ArrayEventStream.new([[timestamp, {"message" => "012345"}]]), compress: :gzip)
|
1351
|
+
overflow_bytes = c.bytesize
|
1352
|
+
|
1353
|
+
messages = "concatenated/appended a #{overflow_bytes} bytes record (nth: 0) is larger than buffer chunk limit size (70)"
|
1354
|
+
assert_raise Fluent::Plugin::Buffer::BufferChunkOverflowError.new(messages) do
|
1355
|
+
# test format == nil && compress == :gzip
|
1356
|
+
@p.write({@dm0 => es})
|
1357
|
+
end
|
1358
|
+
# message a and b occupies each chunks in full, so both of messages are queued (no staged chunk)
|
1359
|
+
assert_equal([2, [@dm0, @dm0], [1, 1], nil],
|
1360
|
+
[@p.queue.size, @p.queue.map(&:metadata), @p.queue.map(&:size), @p.stage[@dm0]])
|
1361
|
+
end
|
1214
1362
|
end
|
1215
1363
|
|
1216
1364
|
sub_test_case '#statistics' do
|
data/test/plugin/test_filter.rb
CHANGED
@@ -153,7 +153,7 @@ class FilterPluginTest < Test::Unit::TestCase
|
|
153
153
|
|
154
154
|
@p.configure(config_element('ROOT', '', {'@log_level' => 'debug'}))
|
155
155
|
|
156
|
-
assert
|
156
|
+
assert(@p.log.object_id != original_logger.object_id)
|
157
157
|
assert_equal Fluent::Log::LEVEL_DEBUG, @p.log.level
|
158
158
|
end
|
159
159
|
|
@@ -63,7 +63,7 @@ class StdoutFilterTest < Test::Unit::TestCase
|
|
63
63
|
end
|
64
64
|
|
65
65
|
def test_invalid_output_type
|
66
|
-
assert_raise(Fluent::
|
66
|
+
assert_raise(Fluent::NotFoundPluginError) do
|
67
67
|
d = create_driver(CONFIG + config_element("", "", { "output_type" => "foo" }))
|
68
68
|
d.run {}
|
69
69
|
end
|
@@ -139,7 +139,7 @@ class StdoutFilterTest < Test::Unit::TestCase
|
|
139
139
|
def test_invalid_output_type
|
140
140
|
conf = config_element
|
141
141
|
conf.elements << config_element("format", "", { "@type" => "stdout", "output_type" => "foo" })
|
142
|
-
assert_raise(Fluent::
|
142
|
+
assert_raise(Fluent::NotFoundPluginError) do
|
143
143
|
d = create_driver(conf)
|
144
144
|
d.run {}
|
145
145
|
end
|
data/test/plugin/test_in_http.rb
CHANGED
@@ -540,6 +540,29 @@ class HttpInputTest < Test::Unit::TestCase
|
|
540
540
|
assert_equal_event_time time, d.events[1][1]
|
541
541
|
end
|
542
542
|
|
543
|
+
def test_application_ndjson
|
544
|
+
d = create_driver
|
545
|
+
events = [
|
546
|
+
["tag1", 1643935663, "{\"a\":1}\n{\"b\":2}"],
|
547
|
+
["tag2", 1643935664, "{\"a\":3}\r\n{\"b\":4}"]
|
548
|
+
]
|
549
|
+
|
550
|
+
expected = [
|
551
|
+
["tag1", 1643935663, {"a"=>1}],
|
552
|
+
["tag1", 1643935663, {"b"=>2}],
|
553
|
+
["tag2", 1643935664, {"a"=>3}],
|
554
|
+
["tag2", 1643935664, {"b"=>4}]
|
555
|
+
]
|
556
|
+
|
557
|
+
d.run(expect_records: 1) do
|
558
|
+
events.each do |tag, time, record|
|
559
|
+
res = post("/#{tag}?time=#{time}", record, {"Content-Type"=>"application/x-ndjson"})
|
560
|
+
assert_equal("200", res.code)
|
561
|
+
end
|
562
|
+
end
|
563
|
+
assert_equal(expected, d.events)
|
564
|
+
end
|
565
|
+
|
543
566
|
def test_msgpack
|
544
567
|
d = create_driver
|
545
568
|
time = event_time("2011-01-02 13:14:15 UTC")
|