fluentd 1.18.0 → 1.19.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.rubocop.yml +116 -0
- data/CHANGELOG.md +235 -12
- data/MAINTAINERS.md +8 -2
- data/README.md +3 -7
- data/Rakefile +2 -0
- data/SECURITY.md +5 -3
- data/lib/fluent/command/cap_ctl.rb +2 -2
- data/lib/fluent/command/fluentd.rb +6 -2
- data/lib/fluent/compat/formatter.rb +6 -0
- data/lib/fluent/compat/socket_util.rb +2 -2
- data/lib/fluent/config/configure_proxy.rb +1 -1
- data/lib/fluent/config/element.rb +2 -2
- data/lib/fluent/config/literal_parser.rb +3 -3
- data/lib/fluent/config/parser.rb +15 -3
- data/lib/fluent/config/section.rb +2 -2
- data/lib/fluent/config/types.rb +1 -1
- data/lib/fluent/config/v1_parser.rb +3 -3
- data/lib/fluent/counter/store.rb +1 -1
- data/lib/fluent/engine.rb +1 -1
- data/lib/fluent/env.rb +3 -2
- data/lib/fluent/event.rb +7 -6
- data/lib/fluent/log/console_adapter.rb +5 -7
- data/lib/fluent/log.rb +23 -0
- data/lib/fluent/plugin/bare_output.rb +0 -16
- data/lib/fluent/plugin/base.rb +2 -2
- data/lib/fluent/plugin/buf_file.rb +15 -1
- data/lib/fluent/plugin/buf_file_single.rb +15 -1
- data/lib/fluent/plugin/buffer/chunk.rb +74 -10
- data/lib/fluent/plugin/buffer/file_chunk.rb +9 -5
- data/lib/fluent/plugin/buffer/file_single_chunk.rb +3 -3
- data/lib/fluent/plugin/buffer/memory_chunk.rb +2 -2
- data/lib/fluent/plugin/buffer.rb +34 -6
- data/lib/fluent/plugin/compressable.rb +68 -22
- data/lib/fluent/plugin/filter.rb +0 -8
- data/lib/fluent/plugin/filter_record_transformer.rb +1 -1
- data/lib/fluent/plugin/formatter_csv.rb +18 -4
- data/lib/fluent/plugin/formatter_json.rb +7 -4
- data/lib/fluent/plugin/formatter_out_file.rb +5 -2
- data/lib/fluent/plugin/in_forward.rb +9 -5
- data/lib/fluent/plugin/in_http.rb +9 -4
- data/lib/fluent/plugin/in_monitor_agent.rb +4 -8
- data/lib/fluent/plugin/in_tail/position_file.rb +1 -1
- data/lib/fluent/plugin/in_tail.rb +80 -57
- data/lib/fluent/plugin/in_tcp.rb +2 -2
- data/lib/fluent/plugin/in_udp.rb +1 -1
- data/lib/fluent/plugin/input.rb +0 -8
- data/lib/fluent/plugin/multi_output.rb +1 -17
- data/lib/fluent/plugin/out_exec_filter.rb +2 -2
- data/lib/fluent/plugin/out_file.rb +37 -30
- data/lib/fluent/plugin/out_forward/connection_manager.rb +2 -2
- data/lib/fluent/plugin/out_forward.rb +23 -13
- data/lib/fluent/plugin/out_http.rb +1 -1
- data/lib/fluent/plugin/out_secondary_file.rb +2 -2
- data/lib/fluent/plugin/out_stdout.rb +10 -3
- data/lib/fluent/plugin/out_stream.rb +3 -3
- data/lib/fluent/plugin/output.rb +24 -35
- data/lib/fluent/plugin/owned_by_mixin.rb +2 -2
- data/lib/fluent/plugin/parser.rb +3 -3
- data/lib/fluent/plugin/parser_json.rb +3 -3
- data/lib/fluent/plugin/sd_file.rb +2 -2
- data/lib/fluent/plugin/storage_local.rb +8 -4
- data/lib/fluent/plugin.rb +1 -1
- data/lib/fluent/plugin_helper/child_process.rb +2 -2
- data/lib/fluent/plugin_helper/http_server/request.rb +13 -2
- data/lib/fluent/plugin_helper/http_server/server.rb +4 -14
- data/lib/fluent/plugin_helper/http_server.rb +1 -8
- data/lib/fluent/plugin_helper/metrics.rb +7 -0
- data/lib/fluent/plugin_helper/server.rb +4 -1
- data/lib/fluent/plugin_helper/service_discovery.rb +1 -1
- data/lib/fluent/plugin_helper/socket_option.rb +2 -2
- data/lib/fluent/plugin_helper/storage.rb +1 -1
- data/lib/fluent/plugin_id.rb +3 -3
- data/lib/fluent/root_agent.rb +4 -3
- data/lib/fluent/static_config_analysis.rb +3 -2
- data/lib/fluent/supervisor.rb +51 -5
- data/lib/fluent/system_config.rb +13 -4
- data/lib/fluent/test/base.rb +1 -1
- data/lib/fluent/test/driver/base.rb +2 -2
- data/lib/fluent/test/filter_test.rb +2 -2
- data/lib/fluent/test/formatter_test.rb +1 -1
- data/lib/fluent/test/helpers.rb +4 -0
- data/lib/fluent/test/input_test.rb +2 -2
- data/lib/fluent/test/output_test.rb +4 -4
- data/lib/fluent/test/parser_test.rb +1 -1
- data/lib/fluent/tls.rb +24 -0
- data/lib/fluent/variable_store.rb +1 -1
- data/lib/fluent/version.rb +1 -1
- data/lib/fluent/winsvc.rb +38 -8
- metadata +85 -16
- data/lib/fluent/plugin_helper/http_server/compat/server.rb +0 -92
- data/lib/fluent/plugin_helper/http_server/compat/ssl_context_extractor.rb +0 -52
- data/lib/fluent/plugin_helper/http_server/compat/webrick_handler.rb +0 -58
@@ -16,29 +16,35 @@
|
|
16
16
|
|
17
17
|
require 'stringio'
|
18
18
|
require 'zlib'
|
19
|
+
require 'zstd-ruby'
|
19
20
|
|
20
21
|
module Fluent
|
21
22
|
module Plugin
|
22
23
|
module Compressable
|
23
|
-
def compress(data, **kwargs)
|
24
|
+
def compress(data, type: :gzip, **kwargs)
|
24
25
|
output_io = kwargs[:output_io]
|
25
26
|
io = output_io || StringIO.new
|
26
|
-
|
27
|
-
|
27
|
+
if type == :gzip
|
28
|
+
writer = Zlib::GzipWriter.new(io)
|
29
|
+
elsif type == :zstd
|
30
|
+
writer = Zstd::StreamWriter.new(io)
|
31
|
+
else
|
32
|
+
raise ArgumentError, "Unknown compression type: #{type}"
|
28
33
|
end
|
29
|
-
|
34
|
+
writer.write(data)
|
35
|
+
writer.finish
|
30
36
|
output_io || io.string
|
31
37
|
end
|
32
38
|
|
33
39
|
# compressed_data is String like `compress(data1) + compress(data2) + ... + compress(dataN)`
|
34
40
|
# https://www.ruby-forum.com/topic/971591#979503
|
35
|
-
def decompress(compressed_data = nil, output_io: nil, input_io: nil)
|
41
|
+
def decompress(compressed_data = nil, output_io: nil, input_io: nil, type: :gzip)
|
36
42
|
case
|
37
43
|
when input_io && output_io
|
38
|
-
io_decompress(input_io, output_io)
|
44
|
+
io_decompress(input_io, output_io, type)
|
39
45
|
when input_io
|
40
46
|
output_io = StringIO.new
|
41
|
-
io = io_decompress(input_io, output_io)
|
47
|
+
io = io_decompress(input_io, output_io, type)
|
42
48
|
io.string
|
43
49
|
when compressed_data.nil? || compressed_data.empty?
|
44
50
|
# check compressed_data(String) is 0 length
|
@@ -46,51 +52,91 @@ module Fluent
|
|
46
52
|
when output_io
|
47
53
|
# execute after checking compressed_data is empty or not
|
48
54
|
io = StringIO.new(compressed_data)
|
49
|
-
io_decompress(io, output_io)
|
55
|
+
io_decompress(io, output_io, type)
|
50
56
|
else
|
51
|
-
string_decompress(compressed_data)
|
57
|
+
string_decompress(compressed_data, type)
|
52
58
|
end
|
53
59
|
end
|
54
60
|
|
55
61
|
private
|
56
62
|
|
57
|
-
def
|
63
|
+
def string_decompress_gzip(compressed_data)
|
58
64
|
io = StringIO.new(compressed_data)
|
59
|
-
|
60
65
|
out = ''
|
61
66
|
loop do
|
62
|
-
|
63
|
-
out <<
|
64
|
-
unused =
|
65
|
-
|
66
|
-
|
67
|
+
reader = Zlib::GzipReader.new(io)
|
68
|
+
out << reader.read
|
69
|
+
unused = reader.unused
|
70
|
+
reader.finish
|
67
71
|
unless unused.nil?
|
68
72
|
adjust = unused.length
|
69
73
|
io.pos -= adjust
|
70
74
|
end
|
71
75
|
break if io.eof?
|
72
76
|
end
|
77
|
+
out
|
78
|
+
end
|
73
79
|
|
80
|
+
def string_decompress_zstd(compressed_data)
|
81
|
+
io = StringIO.new(compressed_data)
|
82
|
+
reader = Zstd::StreamReader.new(io)
|
83
|
+
out = ''
|
84
|
+
loop do
|
85
|
+
# Zstd::StreamReader needs to specify the size of the buffer
|
86
|
+
out << reader.read(1024)
|
87
|
+
# Zstd::StreamReader doesn't provide unused data, so we have to manually adjust the position
|
88
|
+
break if io.eof?
|
89
|
+
end
|
74
90
|
out
|
75
91
|
end
|
76
92
|
|
77
|
-
def
|
93
|
+
def string_decompress(compressed_data, type = :gzip)
|
94
|
+
if type == :gzip
|
95
|
+
string_decompress_gzip(compressed_data)
|
96
|
+
elsif type == :zstd
|
97
|
+
string_decompress_zstd(compressed_data)
|
98
|
+
else
|
99
|
+
raise ArgumentError, "Unknown compression type: #{type}"
|
100
|
+
end
|
101
|
+
end
|
102
|
+
|
103
|
+
def io_decompress_gzip(input, output)
|
78
104
|
loop do
|
79
|
-
|
80
|
-
v =
|
105
|
+
reader = Zlib::GzipReader.new(input)
|
106
|
+
v = reader.read
|
81
107
|
output.write(v)
|
82
|
-
unused =
|
83
|
-
|
84
|
-
|
108
|
+
unused = reader.unused
|
109
|
+
reader.finish
|
85
110
|
unless unused.nil?
|
86
111
|
adjust = unused.length
|
87
112
|
input.pos -= adjust
|
88
113
|
end
|
89
114
|
break if input.eof?
|
90
115
|
end
|
116
|
+
output
|
117
|
+
end
|
91
118
|
|
119
|
+
def io_decompress_zstd(input, output)
|
120
|
+
reader = Zstd::StreamReader.new(input)
|
121
|
+
loop do
|
122
|
+
# Zstd::StreamReader needs to specify the size of the buffer
|
123
|
+
v = reader.read(1024)
|
124
|
+
output.write(v)
|
125
|
+
# Zstd::StreamReader doesn't provide unused data, so we have to manually adjust the position
|
126
|
+
break if input.eof?
|
127
|
+
end
|
92
128
|
output
|
93
129
|
end
|
130
|
+
|
131
|
+
def io_decompress(input, output, type = :gzip)
|
132
|
+
if type == :gzip
|
133
|
+
io_decompress_gzip(input, output)
|
134
|
+
elsif type == :zstd
|
135
|
+
io_decompress_zstd(input, output)
|
136
|
+
else
|
137
|
+
raise ArgumentError, "Unknown compression type: #{type}"
|
138
|
+
end
|
139
|
+
end
|
94
140
|
end
|
95
141
|
end
|
96
142
|
end
|
data/lib/fluent/plugin/filter.rb
CHANGED
@@ -207,7 +207,7 @@ module Fluent::Plugin
|
|
207
207
|
value.each do |k, v|
|
208
208
|
placeholders.store(%Q[${#{key}["#{k}"]}], v) # record["foo"]
|
209
209
|
end
|
210
|
-
else # string,
|
210
|
+
else # string, integer, float, and others?
|
211
211
|
placeholders.store("${#{key}}", value)
|
212
212
|
end
|
213
213
|
end
|
@@ -35,6 +35,22 @@ module Fluent
|
|
35
35
|
config_param :fields, :array, value_type: :string
|
36
36
|
config_param :add_newline, :bool, default: true
|
37
37
|
|
38
|
+
def csv_cacheable?
|
39
|
+
!!owner
|
40
|
+
end
|
41
|
+
|
42
|
+
def csv_thread_key
|
43
|
+
csv_cacheable? ? "#{owner.plugin_id}_csv_formatter_#{@usage}_csv" : nil
|
44
|
+
end
|
45
|
+
|
46
|
+
def csv_for_thread
|
47
|
+
if csv_cacheable?
|
48
|
+
Thread.current[csv_thread_key] ||= CSV.new("".force_encoding(Encoding::ASCII_8BIT), **@generate_opts)
|
49
|
+
else
|
50
|
+
CSV.new("".force_encoding(Encoding::ASCII_8BIT), **@generate_opts)
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
38
54
|
def configure(conf)
|
39
55
|
super
|
40
56
|
|
@@ -51,12 +67,10 @@ module Fluent
|
|
51
67
|
|
52
68
|
@generate_opts = {col_sep: @delimiter, force_quotes: @force_quotes, headers: @fields,
|
53
69
|
row_sep: @add_newline ? :auto : "".force_encoding(Encoding::ASCII_8BIT)}
|
54
|
-
# Cache CSV object per thread to avoid internal state sharing
|
55
|
-
@cache = {}
|
56
70
|
end
|
57
71
|
|
58
72
|
def format(tag, time, record)
|
59
|
-
csv =
|
73
|
+
csv = csv_for_thread
|
60
74
|
line = (csv << record).string.dup
|
61
75
|
# Need manual cleanup because CSV writer doesn't provide such method.
|
62
76
|
csv.rewind
|
@@ -65,7 +79,7 @@ module Fluent
|
|
65
79
|
end
|
66
80
|
|
67
81
|
def format_with_nested_fields(tag, time, record)
|
68
|
-
csv =
|
82
|
+
csv = csv_for_thread
|
69
83
|
values = @accessors.map { |a| a.call(record) }
|
70
84
|
line = (csv << values).string.dup
|
71
85
|
# Need manual cleanup because CSV writer doesn't provide such method.
|
@@ -34,11 +34,11 @@ module Fluent
|
|
34
34
|
if Fluent::OjOptions.available?
|
35
35
|
@dump_proc = Oj.method(:dump)
|
36
36
|
else
|
37
|
-
log.info "Oj isn't installed, fallback to
|
38
|
-
@dump_proc =
|
37
|
+
log.info "Oj isn't installed, fallback to JSON as json parser"
|
38
|
+
@dump_proc = JSON.method(:generate)
|
39
39
|
end
|
40
40
|
else
|
41
|
-
@dump_proc =
|
41
|
+
@dump_proc = JSON.method(:generate)
|
42
42
|
end
|
43
43
|
|
44
44
|
# format json is used on various highload environment, so re-define method to skip if check
|
@@ -48,7 +48,10 @@ module Fluent
|
|
48
48
|
end
|
49
49
|
|
50
50
|
def format(tag, time, record)
|
51
|
-
|
51
|
+
json_str = @dump_proc.call(record)
|
52
|
+
"#{json_str}#{@newline}"
|
53
|
+
ensure
|
54
|
+
json_str&.clear
|
52
55
|
end
|
53
56
|
|
54
57
|
def format_without_nl(tag, time, record)
|
@@ -16,7 +16,7 @@
|
|
16
16
|
|
17
17
|
require 'fluent/plugin/formatter'
|
18
18
|
require 'fluent/time'
|
19
|
-
require '
|
19
|
+
require 'json'
|
20
20
|
|
21
21
|
module Fluent
|
22
22
|
module Plugin
|
@@ -43,10 +43,13 @@ module Fluent
|
|
43
43
|
end
|
44
44
|
|
45
45
|
def format(tag, time, record)
|
46
|
+
json_str = JSON.generate(record)
|
46
47
|
header = ''
|
47
48
|
header << "#{@timef.format(time)}#{@delimiter}" if @output_time
|
48
49
|
header << "#{tag}#{@delimiter}" if @output_tag
|
49
|
-
"#{header}#{
|
50
|
+
"#{header}#{json_str}#{@newline}"
|
51
|
+
ensure
|
52
|
+
json_str&.clear
|
50
53
|
end
|
51
54
|
end
|
52
55
|
end
|
@@ -55,7 +55,7 @@ module Fluent::Plugin
|
|
55
55
|
desc 'Received chunk is dropped if it is larger than this value.'
|
56
56
|
config_param :chunk_size_limit, :size, default: nil
|
57
57
|
desc 'Skip an event if incoming event is invalid.'
|
58
|
-
config_param :skip_invalid_event, :bool, default:
|
58
|
+
config_param :skip_invalid_event, :bool, default: true
|
59
59
|
|
60
60
|
desc "The field name of the client's source address."
|
61
61
|
config_param :source_address_key, :string, default: nil
|
@@ -307,10 +307,14 @@ module Fluent::Plugin
|
|
307
307
|
case entries
|
308
308
|
when String
|
309
309
|
# PackedForward
|
310
|
-
option = msg[2]
|
311
|
-
size =
|
312
|
-
|
313
|
-
|
310
|
+
option = msg[2] || {}
|
311
|
+
size = option['size'] || 0
|
312
|
+
|
313
|
+
if option['compressed'] && option['compressed'] != 'text'
|
314
|
+
es = Fluent::CompressedMessagePackEventStream.new(entries, nil, size.to_i, compress: option['compressed'].to_sym)
|
315
|
+
else
|
316
|
+
es = Fluent::MessagePackEventStream.new(entries, nil, size.to_i)
|
317
|
+
end
|
314
318
|
es = check_and_skip_invalid_event(tag, es, conn.remote_host) if @skip_invalid_event
|
315
319
|
if @enable_field_injection
|
316
320
|
es = add_source_info(es, conn)
|
@@ -504,8 +504,9 @@ module Fluent::Plugin
|
|
504
504
|
# ==========
|
505
505
|
# For every incoming request, we check if we have some CORS
|
506
506
|
# restrictions and allow listed origins through @cors_allow_origins.
|
507
|
+
# If origin is empty, it's likely a server-to-server request and considered safe.
|
507
508
|
unless @cors_allow_origins.nil?
|
508
|
-
unless @cors_allow_origins.include?('*') || include_cors_allow_origin
|
509
|
+
unless @cors_allow_origins.include?('*') || include_cors_allow_origin || @origin.nil?
|
509
510
|
send_response_and_close(RES_403_STATUS, {'Connection' => 'close'}, "")
|
510
511
|
return
|
511
512
|
end
|
@@ -530,12 +531,12 @@ module Fluent::Plugin
|
|
530
531
|
@env['REMOTE_ADDR'] = @remote_addr if @remote_addr
|
531
532
|
|
532
533
|
uri = URI.parse(@parser.request_url)
|
533
|
-
params =
|
534
|
+
params = parse_query(uri.query)
|
534
535
|
|
535
536
|
if @format_name != 'default'
|
536
537
|
params[EVENT_RECORD_PARAMETER] = @body
|
537
538
|
elsif /^application\/x-www-form-urlencoded/.match?(@content_type)
|
538
|
-
params.update
|
539
|
+
params.update parse_query(@body)
|
539
540
|
elsif @content_type =~ /^multipart\/form-data; boundary=(.+)/
|
540
541
|
boundary = WEBrick::HTTPUtils.dequote($1)
|
541
542
|
params.update WEBrick::HTTPUtils.parse_form_data(@body, boundary)
|
@@ -552,7 +553,7 @@ module Fluent::Plugin
|
|
552
553
|
|
553
554
|
if (@add_query_params)
|
554
555
|
|
555
|
-
query_params =
|
556
|
+
query_params = parse_query(uri.query)
|
556
557
|
|
557
558
|
query_params.each_pair {|k,v|
|
558
559
|
params["QUERY_#{k.tr('-','_').upcase}"] = v
|
@@ -642,6 +643,10 @@ module Fluent::Plugin
|
|
642
643
|
|
643
644
|
!r.nil?
|
644
645
|
end
|
646
|
+
|
647
|
+
def parse_query(query)
|
648
|
+
query.nil? ? {} : Hash[URI.decode_www_form(query, Encoding::ASCII_8BIT)]
|
649
|
+
end
|
645
650
|
end
|
646
651
|
end
|
647
652
|
end
|
@@ -15,8 +15,6 @@
|
|
15
15
|
#
|
16
16
|
|
17
17
|
require 'json'
|
18
|
-
require 'webrick'
|
19
|
-
require 'cgi'
|
20
18
|
|
21
19
|
require 'fluent/config/types'
|
22
20
|
require 'fluent/plugin/input'
|
@@ -151,9 +149,7 @@ module Fluent::Plugin
|
|
151
149
|
def build_option(req)
|
152
150
|
qs = Hash.new { |_, _| [] }
|
153
151
|
# parse ?=query string
|
154
|
-
|
155
|
-
qs.merge!(CGI.parse(req.query_string))
|
156
|
-
end
|
152
|
+
qs.merge!(req.query || {})
|
157
153
|
|
158
154
|
# if ?debug=1 is set, set :with_debug_info for get_monitor_info
|
159
155
|
# and :pretty_json for render_json_error
|
@@ -208,7 +204,7 @@ module Fluent::Plugin
|
|
208
204
|
def start
|
209
205
|
super
|
210
206
|
|
211
|
-
log.debug "listening monitoring http server on http://#{@bind}:#{@port}/api/plugins for worker#{fluentd_worker_id}"
|
207
|
+
log.debug { "listening monitoring http server on http://#{@bind}:#{@port}/api/plugins for worker#{fluentd_worker_id}" }
|
212
208
|
api_handler = APIHandler.new(self)
|
213
209
|
http_server_create_http_server(:in_monitor_http_server_helper, addr: @bind, port: @port, logger: log, default_app: NotFoundJson) do |serv|
|
214
210
|
serv.get('/api/plugins') { |req| api_handler.plugins_ltsv(req) }
|
@@ -218,7 +214,7 @@ module Fluent::Plugin
|
|
218
214
|
end
|
219
215
|
|
220
216
|
if @tag
|
221
|
-
log.debug "tag parameter is specified. Emit plugins info to '#{@tag}'"
|
217
|
+
log.debug { "tag parameter is specified. Emit plugins info to '#{@tag}'" }
|
222
218
|
|
223
219
|
opts = {with_config: false, with_retry: false}
|
224
220
|
timer_execute(:in_monitor_agent_emit, @emit_interval, repeat: true) {
|
@@ -254,7 +250,7 @@ module Fluent::Plugin
|
|
254
250
|
array.concat Fluent::Engine.root_agent.filters
|
255
251
|
|
256
252
|
Fluent::Engine.root_agent.labels.each { |name, l|
|
257
|
-
# TODO: Add label name to outputs / filters for
|
253
|
+
# TODO: Add label name to outputs / filters for identifying plugins
|
258
254
|
array.concat l.outputs
|
259
255
|
array.concat l.filters
|
260
256
|
}
|
@@ -91,7 +91,7 @@ module Fluent::Plugin
|
|
91
91
|
@map = map
|
92
92
|
end
|
93
93
|
|
94
|
-
# This method is
|
94
|
+
# This method is similar to #compact but it tries to get less lock to avoid a lock contention
|
95
95
|
def try_compact
|
96
96
|
last_modified = nil
|
97
97
|
size = nil
|