fluentd 1.11.1-x86-mingw32 → 1.12.0-x86-mingw32
Sign up to get free protection for your applications and to get access to all the features.
Potentially problematic release.
This version of fluentd might be problematic. Click here for more details.
- checksums.yaml +4 -4
- data/.github/ISSUE_TEMPLATE/bug_report.md +1 -1
- data/.github/ISSUE_TEMPLATE/config.yml +5 -0
- data/.github/workflows/stale-actions.yml +22 -0
- data/.travis.yml +22 -2
- data/CHANGELOG.md +111 -0
- data/README.md +1 -1
- data/appveyor.yml +3 -0
- data/bin/fluent-cap-ctl +7 -0
- data/bin/fluent-ctl +7 -0
- data/example/copy_roundrobin.conf +3 -3
- data/example/counter.conf +1 -1
- data/example/filter_stdout.conf +2 -2
- data/example/{in_dummy_blocks.conf → in_sample_blocks.conf} +4 -4
- data/example/{in_dummy_with_compression.conf → in_sample_with_compression.conf} +3 -3
- data/example/logevents.conf +5 -5
- data/example/multi_filters.conf +1 -1
- data/example/out_exec_filter.conf +2 -2
- data/example/out_forward.conf +1 -1
- data/example/out_forward_buf_file.conf +1 -1
- data/example/out_forward_client.conf +5 -5
- data/example/out_forward_heartbeat_none.conf +1 -1
- data/example/out_forward_sd.conf +1 -1
- data/example/out_forward_shared_key.conf +2 -2
- data/example/out_forward_tls.conf +1 -1
- data/example/out_forward_users.conf +3 -3
- data/example/out_null.conf +4 -4
- data/example/secondary_file.conf +1 -1
- data/fluentd.gemspec +7 -6
- data/lib/fluent/capability.rb +87 -0
- data/lib/fluent/command/cap_ctl.rb +174 -0
- data/lib/fluent/command/ctl.rb +177 -0
- data/lib/fluent/command/plugin_config_formatter.rb +2 -1
- data/lib/fluent/env.rb +4 -0
- data/lib/fluent/log.rb +33 -3
- data/lib/fluent/match.rb +9 -0
- data/lib/fluent/plugin.rb +5 -0
- data/lib/fluent/plugin/buffer.rb +32 -42
- data/lib/fluent/plugin/buffer/chunk.rb +2 -1
- data/lib/fluent/plugin/formatter.rb +24 -0
- data/lib/fluent/plugin/formatter_csv.rb +1 -1
- data/lib/fluent/plugin/formatter_hash.rb +3 -1
- data/lib/fluent/plugin/formatter_json.rb +3 -1
- data/lib/fluent/plugin/formatter_ltsv.rb +5 -3
- data/lib/fluent/plugin/formatter_out_file.rb +6 -4
- data/lib/fluent/plugin/formatter_single_value.rb +4 -2
- data/lib/fluent/plugin/formatter_tsv.rb +4 -2
- data/lib/fluent/plugin/in_dummy.rb +2 -123
- data/lib/fluent/plugin/in_exec.rb +4 -2
- data/lib/fluent/plugin/in_http.rb +25 -4
- data/lib/fluent/plugin/in_sample.rb +141 -0
- data/lib/fluent/plugin/in_tail.rb +109 -41
- data/lib/fluent/plugin/in_tail/position_file.rb +39 -14
- data/lib/fluent/plugin/in_tcp.rb +1 -0
- data/lib/fluent/plugin/out_http.rb +20 -2
- data/lib/fluent/plugin/output.rb +15 -6
- data/lib/fluent/plugin/parser_json.rb +5 -2
- data/lib/fluent/plugin_helper/cert_option.rb +5 -8
- data/lib/fluent/plugin_helper/http_server/compat/server.rb +1 -1
- data/lib/fluent/plugin_helper/inject.rb +4 -1
- data/lib/fluent/plugin_helper/retry_state.rb +4 -0
- data/lib/fluent/plugin_helper/socket.rb +1 -1
- data/lib/fluent/supervisor.rb +151 -48
- data/lib/fluent/system_config.rb +2 -1
- data/lib/fluent/time.rb +1 -0
- data/lib/fluent/version.rb +1 -1
- data/lib/fluent/winsvc.rb +22 -4
- data/test/command/test_binlog_reader.rb +22 -6
- data/test/command/test_cap_ctl.rb +100 -0
- data/test/command/test_ctl.rb +57 -0
- data/test/command/test_plugin_config_formatter.rb +57 -2
- data/test/plugin/in_tail/test_position_file.rb +45 -25
- data/test/plugin/test_buffer.rb +4 -0
- data/test/plugin/test_filter_stdout.rb +6 -1
- data/test/plugin/test_formatter_hash.rb +6 -3
- data/test/plugin/test_formatter_json.rb +14 -4
- data/test/plugin/test_formatter_ltsv.rb +13 -5
- data/test/plugin/test_formatter_out_file.rb +35 -14
- data/test/plugin/test_formatter_single_value.rb +12 -6
- data/test/plugin/test_formatter_tsv.rb +12 -4
- data/test/plugin/test_in_exec.rb +18 -0
- data/test/plugin/test_in_http.rb +25 -0
- data/test/plugin/{test_in_dummy.rb → test_in_sample.rb} +25 -25
- data/test/plugin/test_in_tail.rb +433 -30
- data/test/plugin/test_out_file.rb +23 -18
- data/test/plugin/test_output.rb +12 -0
- data/test/plugin/test_parser_syslog.rb +2 -2
- data/test/plugin_helper/data/cert/empty.pem +0 -0
- data/test/plugin_helper/test_cert_option.rb +7 -0
- data/test/plugin_helper/test_compat_parameters.rb +7 -2
- data/test/plugin_helper/test_http_server_helper.rb +5 -0
- data/test/plugin_helper/test_inject.rb +42 -0
- data/test/plugin_helper/test_server.rb +34 -0
- data/test/plugin_helper/test_socket.rb +8 -0
- data/test/test_capability.rb +74 -0
- data/test/test_formatter.rb +34 -10
- data/test/test_log.rb +44 -0
- data/test/test_match.rb +11 -0
- data/test/test_output.rb +6 -1
- data/test/test_static_config_analysis.rb +2 -2
- data/test/test_supervisor.rb +119 -1
- metadata +50 -18
@@ -29,7 +29,8 @@ class FluentPluginConfigFormatter
|
|
29
29
|
AVAILABLE_FORMATS = [:markdown, :txt, :json]
|
30
30
|
SUPPORTED_TYPES = [
|
31
31
|
"input", "output", "filter",
|
32
|
-
"buffer", "parser", "formatter", "storage"
|
32
|
+
"buffer", "parser", "formatter", "storage",
|
33
|
+
"service_discovery"
|
33
34
|
]
|
34
35
|
|
35
36
|
DOCS_BASE_URL = "https://docs.fluentd.org/v/1.0"
|
data/lib/fluent/env.rb
CHANGED
data/lib/fluent/log.rb
CHANGED
@@ -113,6 +113,7 @@ module Fluent
|
|
113
113
|
|
114
114
|
@suppress_repeated_stacktrace = opts[:suppress_repeated_stacktrace]
|
115
115
|
@ignore_repeated_log_interval = opts[:ignore_repeated_log_interval]
|
116
|
+
@ignore_same_log_interval = opts[:ignore_same_log_interval]
|
116
117
|
|
117
118
|
@process_type = opts[:process_type] # :supervisor, :worker0, :workers Or :standalone
|
118
119
|
@process_type ||= :standalone # to keep behavior of existing code
|
@@ -141,7 +142,8 @@ module Fluent
|
|
141
142
|
dl_opts[:log_level] = @level - 1
|
142
143
|
logger = ServerEngine::DaemonLogger.new(@out, dl_opts)
|
143
144
|
clone = self.class.new(logger, suppress_repeated_stacktrace: @suppress_repeated_stacktrace, process_type: @process_type,
|
144
|
-
worker_id: @worker_id, ignore_repeated_log_interval: @ignore_repeated_log_interval
|
145
|
+
worker_id: @worker_id, ignore_repeated_log_interval: @ignore_repeated_log_interval,
|
146
|
+
ignore_same_log_interval: @ignore_same_log_interval)
|
145
147
|
clone.format = @format
|
146
148
|
clone.time_format = @time_format
|
147
149
|
clone.log_event_enabled = @log_event_enabled
|
@@ -151,7 +153,7 @@ module Fluent
|
|
151
153
|
|
152
154
|
attr_reader :format
|
153
155
|
attr_reader :time_format
|
154
|
-
attr_accessor :log_event_enabled, :ignore_repeated_log_interval
|
156
|
+
attr_accessor :log_event_enabled, :ignore_repeated_log_interval, :ignore_same_log_interval
|
155
157
|
attr_accessor :out
|
156
158
|
attr_accessor :level
|
157
159
|
attr_accessor :optional_header, :optional_attrs
|
@@ -428,6 +430,27 @@ module Fluent
|
|
428
430
|
(cached_log.msg == message) && (time - cached_log.time <= @ignore_repeated_log_interval)
|
429
431
|
end
|
430
432
|
|
433
|
+
def ignore_same_log?(time, message)
|
434
|
+
cached_log = Thread.current[:last_same_log]
|
435
|
+
if cached_log.nil?
|
436
|
+
Thread.current[:last_same_log] = {message => time}
|
437
|
+
return false
|
438
|
+
end
|
439
|
+
|
440
|
+
prev_time = cached_log[message]
|
441
|
+
if prev_time
|
442
|
+
if (time - prev_time) <= @ignore_same_log_interval
|
443
|
+
true
|
444
|
+
else
|
445
|
+
cached_log[message] = time
|
446
|
+
false
|
447
|
+
end
|
448
|
+
else
|
449
|
+
cached_log[message] = time
|
450
|
+
false
|
451
|
+
end
|
452
|
+
end
|
453
|
+
|
431
454
|
def suppress_stacktrace?(backtrace)
|
432
455
|
cached_log = Thread.current[:last_repeated_stacktrace]
|
433
456
|
return false if cached_log.nil?
|
@@ -507,7 +530,11 @@ module Fluent
|
|
507
530
|
end
|
508
531
|
}
|
509
532
|
|
510
|
-
if @
|
533
|
+
if @ignore_same_log_interval
|
534
|
+
if ignore_same_log?(time, message)
|
535
|
+
return nil, nil
|
536
|
+
end
|
537
|
+
elsif @ignore_repeated_log_interval
|
511
538
|
if ignore_repeated_log?(:last_repeated_log, time, message)
|
512
539
|
return nil, nil
|
513
540
|
else
|
@@ -569,6 +596,9 @@ module Fluent
|
|
569
596
|
if logger.instance_variable_defined?(:@ignore_repeated_log_interval)
|
570
597
|
@ignore_repeated_log_interval = logger.instance_variable_get(:@ignore_repeated_log_interval)
|
571
598
|
end
|
599
|
+
if logger.instance_variable_defined?(:@ignore_same_log_interval)
|
600
|
+
@ignore_same_log_interval = logger.instance_variable_get(:@ignore_same_log_interval)
|
601
|
+
end
|
572
602
|
|
573
603
|
self.format = @logger.format
|
574
604
|
self.time_format = @logger.time_format
|
data/lib/fluent/match.rb
CHANGED
@@ -33,6 +33,15 @@ module Fluent
|
|
33
33
|
|
34
34
|
class GlobMatchPattern < MatchPattern
|
35
35
|
def initialize(pat)
|
36
|
+
if pat.start_with?('/')
|
37
|
+
if pat.end_with?('/')
|
38
|
+
@regex = Regexp.new("\\A"+pat[1..-2]+"\\Z")
|
39
|
+
return
|
40
|
+
else
|
41
|
+
raise Fluent::ConfigError, "invalid match - regex"
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
36
45
|
stack = []
|
37
46
|
regex = ['']
|
38
47
|
escape = false
|
data/lib/fluent/plugin.rb
CHANGED
@@ -121,6 +121,11 @@ module Fluent
|
|
121
121
|
new_impl('sd', SD_REGISTRY, type, parent)
|
122
122
|
end
|
123
123
|
|
124
|
+
class << self
|
125
|
+
# This should be defined for fluent-plugin-config-formatter type arguments.
|
126
|
+
alias_method :new_service_discovery, :new_sd
|
127
|
+
end
|
128
|
+
|
124
129
|
def self.new_parser(type, parent: nil)
|
125
130
|
if type[0] == '/' && type[-1] == '/'
|
126
131
|
# This usage is not recommended for new API... create RegexpParser directly
|
data/lib/fluent/plugin/buffer.rb
CHANGED
@@ -148,12 +148,9 @@ module Fluent
|
|
148
148
|
# Actually this overhead is very small but this class is generated *per chunk* (and used in hash object).
|
149
149
|
# This means that this class is one of the most called object in Fluentd.
|
150
150
|
# See https://github.com/fluent/fluentd/pull/2560
|
151
|
-
# But, this optimization has a side effect on Windows due to differing object_id.
|
152
|
-
# This difference causes flood of buffer files.
|
153
|
-
# So, this optimization should be enabled on non-Windows platform.
|
154
151
|
def hash
|
155
|
-
timekey.
|
156
|
-
end
|
152
|
+
timekey.hash
|
153
|
+
end
|
157
154
|
end
|
158
155
|
|
159
156
|
# for tests
|
@@ -176,6 +173,7 @@ module Fluent
|
|
176
173
|
|
177
174
|
@stage_size = @queue_size = 0
|
178
175
|
@timekeys = Hash.new(0)
|
176
|
+
@enable_update_timekeys = false
|
179
177
|
@mutex = Mutex.new
|
180
178
|
end
|
181
179
|
|
@@ -191,24 +189,23 @@ module Fluent
|
|
191
189
|
end
|
192
190
|
end
|
193
191
|
|
192
|
+
def enable_update_timekeys
|
193
|
+
@enable_update_timekeys = true
|
194
|
+
end
|
195
|
+
|
194
196
|
def start
|
195
197
|
super
|
196
198
|
|
197
199
|
@stage, @queue = resume
|
198
200
|
@stage.each_pair do |metadata, chunk|
|
199
201
|
@stage_size += chunk.bytesize
|
200
|
-
if chunk.metadata && chunk.metadata.timekey
|
201
|
-
add_timekey(metadata.timekey)
|
202
|
-
end
|
203
202
|
end
|
204
203
|
@queue.each do |chunk|
|
205
204
|
@queued_num[chunk.metadata] ||= 0
|
206
205
|
@queued_num[chunk.metadata] += 1
|
207
206
|
@queue_size += chunk.bytesize
|
208
|
-
if chunk.metadata && chunk.metadata.timekey
|
209
|
-
add_timekey(chunk.metadata.timekey)
|
210
|
-
end
|
211
207
|
end
|
208
|
+
update_timekeys
|
212
209
|
log.debug "buffer started", instance: self.object_id, stage_size: @stage_size, queue_size: @queue_size
|
213
210
|
end
|
214
211
|
|
@@ -257,12 +254,9 @@ module Fluent
|
|
257
254
|
Metadata.new(timekey, tag, variables)
|
258
255
|
end
|
259
256
|
|
257
|
+
# Keep this method for existing code
|
260
258
|
def metadata(timekey: nil, tag: nil, variables: nil)
|
261
|
-
|
262
|
-
if (t = meta.timekey)
|
263
|
-
add_timekey(t)
|
264
|
-
end
|
265
|
-
meta
|
259
|
+
Metadata.new(timekey, tag, variables)
|
266
260
|
end
|
267
261
|
|
268
262
|
def timekeys
|
@@ -456,9 +450,23 @@ module Fluent
|
|
456
450
|
end
|
457
451
|
end
|
458
452
|
|
453
|
+
def update_timekeys
|
454
|
+
synchronize do
|
455
|
+
chunks = @stage.values
|
456
|
+
chunks.concat(@queue)
|
457
|
+
@timekeys = chunks.each_with_object({}) do |chunk, keys|
|
458
|
+
if chunk.metadata && chunk.metadata.timekey
|
459
|
+
t = chunk.metadata.timekey
|
460
|
+
keys[t] = keys.fetch(t, 0) + 1
|
461
|
+
end
|
462
|
+
end
|
463
|
+
end
|
464
|
+
end
|
465
|
+
|
459
466
|
# At flush_at_shutdown, all staged chunks should be enqueued for buffer flush. Set true to force_enqueue for it.
|
460
467
|
def enqueue_all(force_enqueue = false)
|
461
468
|
log.on_trace { log.trace "enqueueing all chunks in buffer", instance: self.object_id }
|
469
|
+
update_timekeys if @enable_update_timekeys
|
462
470
|
|
463
471
|
if block_given?
|
464
472
|
synchronize{ @stage.keys }.each do |metadata|
|
@@ -537,10 +545,6 @@ module Fluent
|
|
537
545
|
log.trace "chunk purged", instance: self.object_id, chunk_id: dump_unique_id_hex(chunk_id), metadata: metadata
|
538
546
|
end
|
539
547
|
|
540
|
-
if metadata && metadata.timekey
|
541
|
-
del_timekey(metadata.timekey)
|
542
|
-
end
|
543
|
-
|
544
548
|
nil
|
545
549
|
end
|
546
550
|
|
@@ -604,7 +608,11 @@ module Fluent
|
|
604
608
|
|
605
609
|
if chunk_size_over?(chunk)
|
606
610
|
if format && empty_chunk
|
607
|
-
|
611
|
+
if chunk.bytesize > @chunk_limit_size
|
612
|
+
log.warn "chunk bytes limit exceeds for an emitted event stream: #{adding_bytesize}bytes"
|
613
|
+
else
|
614
|
+
log.warn "chunk size limit exceeds for an emitted event stream: #{chunk.size}records"
|
615
|
+
end
|
608
616
|
end
|
609
617
|
chunk.rollback
|
610
618
|
|
@@ -768,11 +776,11 @@ module Fluent
|
|
768
776
|
'total_queued_size' => stage_size + queue_size,
|
769
777
|
}
|
770
778
|
|
771
|
-
|
779
|
+
tkeys = timekeys
|
780
|
+
if (m = tkeys.min)
|
772
781
|
stats['oldest_timekey'] = m
|
773
782
|
end
|
774
|
-
|
775
|
-
if (m = timekeys.max)
|
783
|
+
if (m = tkeys.max)
|
776
784
|
stats['newest_timekey'] = m
|
777
785
|
end
|
778
786
|
|
@@ -789,24 +797,6 @@ module Fluent
|
|
789
797
|
!@queue.empty?
|
790
798
|
end
|
791
799
|
end
|
792
|
-
|
793
|
-
def add_timekey(t)
|
794
|
-
@mutex.synchronize do
|
795
|
-
@timekeys[t] += 1
|
796
|
-
end
|
797
|
-
nil
|
798
|
-
end
|
799
|
-
|
800
|
-
def del_timekey(t)
|
801
|
-
@mutex.synchronize do
|
802
|
-
if @timekeys[t] <= 1
|
803
|
-
@timekeys.delete(t)
|
804
|
-
else
|
805
|
-
@timekeys[t] -= 1
|
806
|
-
end
|
807
|
-
end
|
808
|
-
nil
|
809
|
-
end
|
810
800
|
end
|
811
801
|
end
|
812
802
|
end
|
@@ -206,8 +206,9 @@ module Fluent
|
|
206
206
|
output_io = if chunk_io.is_a?(StringIO)
|
207
207
|
StringIO.new
|
208
208
|
else
|
209
|
-
Tempfile.new('decompressed-data')
|
209
|
+
Tempfile.new('decompressed-data')
|
210
210
|
end
|
211
|
+
output_io.binmode if output_io.is_a?(Tempfile)
|
211
212
|
decompress(input_io: chunk_io, output_io: output_io)
|
212
213
|
output_io.seek(0, IO::SEEK_SET)
|
213
214
|
yield output_io
|
@@ -46,5 +46,29 @@ module Fluent
|
|
46
46
|
@proc.call(tag, time, record)
|
47
47
|
end
|
48
48
|
end
|
49
|
+
|
50
|
+
module Newline
|
51
|
+
module Mixin
|
52
|
+
include Fluent::Configurable
|
53
|
+
|
54
|
+
DEFAULT_NEWLINE = if Fluent.windows?
|
55
|
+
:crlf
|
56
|
+
else
|
57
|
+
:lf
|
58
|
+
end
|
59
|
+
|
60
|
+
config_param :newline, :enum, list: [:lf, :crlf], default: DEFAULT_NEWLINE
|
61
|
+
|
62
|
+
def configure(conf)
|
63
|
+
super
|
64
|
+
@newline = case newline
|
65
|
+
when :lf
|
66
|
+
"\n".freeze
|
67
|
+
when :crlf
|
68
|
+
"\r\n".freeze
|
69
|
+
end
|
70
|
+
end
|
71
|
+
end
|
72
|
+
end
|
49
73
|
end
|
50
74
|
end
|
@@ -27,7 +27,7 @@ module Fluent
|
|
27
27
|
helpers :record_accessor
|
28
28
|
|
29
29
|
config_param :delimiter, default: ',' do |val|
|
30
|
-
['\t', 'TAB'].include?(val) ? "\t" : val
|
30
|
+
['\t', 'TAB'].include?(val) ? "\t".freeze : val.freeze
|
31
31
|
end
|
32
32
|
config_param :force_quotes, :bool, default: true
|
33
33
|
# "array" looks good for type of :fields, but this implementation removes tailing comma
|
@@ -19,13 +19,15 @@ require 'fluent/plugin/formatter'
|
|
19
19
|
module Fluent
|
20
20
|
module Plugin
|
21
21
|
class HashFormatter < Formatter
|
22
|
+
include Fluent::Plugin::Newline::Mixin
|
23
|
+
|
22
24
|
Plugin.register_formatter('hash', self)
|
23
25
|
|
24
26
|
config_param :add_newline, :bool, default: true
|
25
27
|
|
26
28
|
def format(tag, time, record)
|
27
29
|
line = record.to_s
|
28
|
-
line <<
|
30
|
+
line << @newline if @add_newline
|
29
31
|
line
|
30
32
|
end
|
31
33
|
end
|
@@ -20,6 +20,8 @@ require 'fluent/env'
|
|
20
20
|
module Fluent
|
21
21
|
module Plugin
|
22
22
|
class JSONFormatter < Formatter
|
23
|
+
include Fluent::Plugin::Newline::Mixin
|
24
|
+
|
23
25
|
Plugin.register_formatter('json', self)
|
24
26
|
|
25
27
|
config_param :json_parser, :string, default: 'oj'
|
@@ -44,7 +46,7 @@ module Fluent
|
|
44
46
|
end
|
45
47
|
|
46
48
|
def format(tag, time, record)
|
47
|
-
"#{@dump_proc.call(record)}
|
49
|
+
"#{@dump_proc.call(record)}#{@newline}"
|
48
50
|
end
|
49
51
|
|
50
52
|
def format_without_nl(tag, time, record)
|
@@ -19,12 +19,14 @@ require 'fluent/plugin/formatter'
|
|
19
19
|
module Fluent
|
20
20
|
module Plugin
|
21
21
|
class LabeledTSVFormatter < Formatter
|
22
|
+
include Fluent::Plugin::Newline::Mixin
|
23
|
+
|
22
24
|
Plugin.register_formatter('ltsv', self)
|
23
25
|
|
24
26
|
# http://ltsv.org/
|
25
27
|
|
26
|
-
config_param :delimiter, :string, default: "\t"
|
27
|
-
config_param :label_delimiter, :string, default: ":"
|
28
|
+
config_param :delimiter, :string, default: "\t".freeze
|
29
|
+
config_param :label_delimiter, :string, default: ":".freeze
|
28
30
|
config_param :add_newline, :bool, default: true
|
29
31
|
|
30
32
|
# TODO: escaping for \t in values
|
@@ -34,7 +36,7 @@ module Fluent
|
|
34
36
|
formatted << @delimiter if formatted.length.nonzero?
|
35
37
|
formatted << "#{label}#{@label_delimiter}#{value}"
|
36
38
|
end
|
37
|
-
formatted <<
|
39
|
+
formatted << @newline if @add_newline
|
38
40
|
formatted
|
39
41
|
end
|
40
42
|
end
|
@@ -21,15 +21,17 @@ require 'yajl'
|
|
21
21
|
module Fluent
|
22
22
|
module Plugin
|
23
23
|
class OutFileFormatter < Formatter
|
24
|
+
include Fluent::Plugin::Newline::Mixin
|
25
|
+
|
24
26
|
Plugin.register_formatter('out_file', self)
|
25
27
|
|
26
28
|
config_param :output_time, :bool, default: true
|
27
29
|
config_param :output_tag, :bool, default: true
|
28
30
|
config_param :delimiter, default: "\t" do |val|
|
29
31
|
case val
|
30
|
-
when /SPACE/i then ' '
|
31
|
-
when /COMMA/i then ','
|
32
|
-
else "\t"
|
32
|
+
when /SPACE/i then ' '.freeze
|
33
|
+
when /COMMA/i then ','.freeze
|
34
|
+
else "\t".freeze
|
33
35
|
end
|
34
36
|
end
|
35
37
|
config_set_default :time_type, :string
|
@@ -44,7 +46,7 @@ module Fluent
|
|
44
46
|
header = ''
|
45
47
|
header << "#{@timef.format(time)}#{@delimiter}" if @output_time
|
46
48
|
header << "#{tag}#{@delimiter}" if @output_tag
|
47
|
-
"#{header}#{Yajl.dump(record)}
|
49
|
+
"#{header}#{Yajl.dump(record)}#{@newline}"
|
48
50
|
end
|
49
51
|
end
|
50
52
|
end
|
@@ -19,14 +19,16 @@ require 'fluent/plugin/formatter'
|
|
19
19
|
module Fluent
|
20
20
|
module Plugin
|
21
21
|
class SingleValueFormatter < Formatter
|
22
|
+
include Fluent::Plugin::Newline::Mixin
|
23
|
+
|
22
24
|
Plugin.register_formatter('single_value', self)
|
23
25
|
|
24
|
-
config_param :message_key, :string, default: 'message'
|
26
|
+
config_param :message_key, :string, default: 'message'.freeze
|
25
27
|
config_param :add_newline, :bool, default: true
|
26
28
|
|
27
29
|
def format(tag, time, record)
|
28
30
|
text = record[@message_key].to_s.dup
|
29
|
-
text <<
|
31
|
+
text << @newline if @add_newline
|
30
32
|
text
|
31
33
|
end
|
32
34
|
end
|
@@ -19,18 +19,20 @@ require 'fluent/plugin/formatter'
|
|
19
19
|
module Fluent
|
20
20
|
module Plugin
|
21
21
|
class TSVFormatter < Formatter
|
22
|
+
include Fluent::Plugin::Newline::Mixin
|
23
|
+
|
22
24
|
Plugin.register_formatter('tsv', self)
|
23
25
|
|
24
26
|
desc 'Field names included in each lines'
|
25
27
|
config_param :keys, :array, value_type: :string
|
26
28
|
desc 'The delimiter character (or string) of TSV values'
|
27
|
-
config_param :delimiter, :string, default: "\t"
|
29
|
+
config_param :delimiter, :string, default: "\t".freeze
|
28
30
|
desc 'The parameter to enable writing to new lines'
|
29
31
|
config_param :add_newline, :bool, default: true
|
30
32
|
|
31
33
|
def format(tag, time, record)
|
32
34
|
formatted = @keys.map{|k| record[k].to_s }.join(@delimiter)
|
33
|
-
formatted <<
|
35
|
+
formatted << @newline if @add_newline
|
34
36
|
formatted
|
35
37
|
end
|
36
38
|
end
|