fluentd 0.14.7-x86-mingw32 → 0.14.10-x86-mingw32
Sign up to get free protection for your applications and to get access to all the features.
Potentially problematic release.
This version of fluentd might be problematic. Click here for more details.
- checksums.yaml +4 -4
- data/.gitignore +2 -0
- data/.travis.yml +2 -0
- data/CONTRIBUTING.md +6 -1
- data/ChangeLog +95 -0
- data/Rakefile +21 -0
- data/appveyor.yml +1 -0
- data/code-of-conduct.md +3 -0
- data/example/out_exec_filter.conf +42 -0
- data/fluentd.gemspec +1 -1
- data/lib/fluent/agent.rb +2 -2
- data/lib/fluent/command/binlog_reader.rb +1 -1
- data/lib/fluent/command/cat.rb +15 -4
- data/lib/fluent/compat/output.rb +14 -9
- data/lib/fluent/compat/parser.rb +141 -11
- data/lib/fluent/config/configure_proxy.rb +2 -11
- data/lib/fluent/config/section.rb +8 -1
- data/lib/fluent/configurable.rb +1 -3
- data/lib/fluent/env.rb +1 -1
- data/lib/fluent/log.rb +1 -1
- data/lib/fluent/plugin/base.rb +17 -0
- data/lib/fluent/plugin/filter_parser.rb +108 -0
- data/lib/fluent/plugin/filter_record_transformer.rb +14 -35
- data/lib/fluent/plugin/filter_stdout.rb +1 -1
- data/lib/fluent/plugin/formatter.rb +5 -0
- data/lib/fluent/plugin/formatter_msgpack.rb +4 -0
- data/lib/fluent/plugin/formatter_stdout.rb +3 -2
- data/lib/fluent/plugin/formatter_tsv.rb +34 -0
- data/lib/fluent/plugin/in_exec.rb +48 -93
- data/lib/fluent/plugin/in_forward.rb +66 -265
- data/lib/fluent/plugin/in_http.rb +68 -65
- data/lib/fluent/plugin/in_monitor_agent.rb +8 -4
- data/lib/fluent/plugin/in_syslog.rb +42 -58
- data/lib/fluent/plugin/in_tail.rb +29 -14
- data/lib/fluent/plugin/in_tcp.rb +54 -14
- data/lib/fluent/plugin/in_udp.rb +49 -13
- data/lib/fluent/plugin/multi_output.rb +1 -3
- data/lib/fluent/plugin/out_exec.rb +58 -71
- data/lib/fluent/plugin/out_exec_filter.rb +199 -279
- data/lib/fluent/plugin/out_file.rb +172 -81
- data/lib/fluent/plugin/out_forward.rb +229 -206
- data/lib/fluent/plugin/out_stdout.rb +6 -21
- data/lib/fluent/plugin/output.rb +90 -59
- data/lib/fluent/plugin/parser.rb +121 -61
- data/lib/fluent/plugin/parser_csv.rb +9 -3
- data/lib/fluent/plugin/parser_json.rb +37 -35
- data/lib/fluent/plugin/parser_ltsv.rb +11 -19
- data/lib/fluent/plugin/parser_msgpack.rb +50 -0
- data/lib/fluent/plugin/parser_regexp.rb +15 -42
- data/lib/fluent/plugin/parser_tsv.rb +8 -3
- data/lib/fluent/plugin_helper.rb +10 -1
- data/lib/fluent/plugin_helper/child_process.rb +139 -73
- data/lib/fluent/plugin_helper/compat_parameters.rb +93 -4
- data/lib/fluent/plugin_helper/event_emitter.rb +14 -1
- data/lib/fluent/plugin_helper/event_loop.rb +24 -6
- data/lib/fluent/plugin_helper/extract.rb +16 -4
- data/lib/fluent/plugin_helper/formatter.rb +9 -11
- data/lib/fluent/plugin_helper/inject.rb +16 -1
- data/lib/fluent/plugin_helper/parser.rb +3 -3
- data/lib/fluent/plugin_helper/server.rb +494 -0
- data/lib/fluent/plugin_helper/socket.rb +101 -0
- data/lib/fluent/plugin_helper/socket_option.rb +84 -0
- data/lib/fluent/plugin_helper/timer.rb +1 -0
- data/lib/fluent/root_agent.rb +1 -1
- data/lib/fluent/test/driver/base.rb +95 -49
- data/lib/fluent/test/driver/base_owner.rb +18 -8
- data/lib/fluent/test/driver/multi_output.rb +2 -1
- data/lib/fluent/test/driver/output.rb +29 -6
- data/lib/fluent/test/helpers.rb +3 -1
- data/lib/fluent/test/log.rb +4 -0
- data/lib/fluent/test/startup_shutdown.rb +13 -0
- data/lib/fluent/time.rb +14 -8
- data/lib/fluent/version.rb +1 -1
- data/lib/fluent/winsvc.rb +1 -1
- data/test/command/test_binlog_reader.rb +5 -1
- data/test/compat/test_parser.rb +10 -0
- data/test/config/test_configurable.rb +193 -0
- data/test/config/test_configure_proxy.rb +0 -43
- data/test/helper.rb +36 -1
- data/test/plugin/test_base.rb +16 -0
- data/test/plugin/test_filter_parser.rb +665 -0
- data/test/plugin/test_filter_record_transformer.rb +36 -100
- data/test/plugin/test_filter_stdout.rb +18 -27
- data/test/plugin/test_in_dummy.rb +1 -1
- data/test/plugin/test_in_exec.rb +206 -94
- data/test/plugin/test_in_forward.rb +268 -347
- data/test/plugin/test_in_http.rb +310 -186
- data/test/plugin/test_in_monitor_agent.rb +65 -35
- data/test/plugin/test_in_syslog.rb +39 -3
- data/test/plugin/test_in_tcp.rb +78 -62
- data/test/plugin/test_in_udp.rb +101 -80
- data/test/plugin/test_out_exec.rb +223 -68
- data/test/plugin/test_out_exec_filter.rb +520 -169
- data/test/plugin/test_out_file.rb +637 -177
- data/test/plugin/test_out_forward.rb +242 -234
- data/test/plugin/test_out_null.rb +1 -1
- data/test/plugin/test_out_secondary_file.rb +4 -2
- data/test/plugin/test_out_stdout.rb +14 -35
- data/test/plugin/test_output_as_buffered.rb +60 -2
- data/test/plugin/test_parser.rb +359 -0
- data/test/plugin/test_parser_csv.rb +1 -2
- data/test/plugin/test_parser_json.rb +3 -4
- data/test/plugin/test_parser_labeled_tsv.rb +1 -2
- data/test/plugin/test_parser_none.rb +1 -2
- data/test/plugin/test_parser_regexp.rb +8 -4
- data/test/plugin/test_parser_tsv.rb +4 -3
- data/test/plugin_helper/test_child_process.rb +184 -0
- data/test/plugin_helper/test_compat_parameters.rb +88 -1
- data/test/plugin_helper/test_extract.rb +0 -1
- data/test/plugin_helper/test_formatter.rb +5 -2
- data/test/plugin_helper/test_inject.rb +21 -0
- data/test/plugin_helper/test_parser.rb +6 -5
- data/test/plugin_helper/test_server.rb +905 -0
- data/test/test_event_time.rb +3 -1
- data/test/test_output.rb +53 -2
- data/test/test_plugin_classes.rb +20 -0
- data/test/test_root_agent.rb +139 -0
- data/test/test_test_drivers.rb +135 -0
- metadata +28 -8
- data/test/plugin/test_parser_base.rb +0 -32
@@ -22,6 +22,7 @@ module Fluent::Plugin
|
|
22
22
|
|
23
23
|
helpers :inject, :formatter, :compat_parameters
|
24
24
|
|
25
|
+
DEFAULT_LINE_FORMAT_TYPE = 'stdout'
|
25
26
|
DEFAULT_FORMAT_TYPE = 'json'
|
26
27
|
TIME_FORMAT = '%Y-%m-%d %H:%M:%S.%9N %z'
|
27
28
|
|
@@ -32,33 +33,22 @@ module Fluent::Plugin
|
|
32
33
|
end
|
33
34
|
|
34
35
|
config_section :format do
|
35
|
-
config_set_default :@type,
|
36
|
+
config_set_default :@type, DEFAULT_LINE_FORMAT_TYPE
|
37
|
+
config_set_default :output_type, DEFAULT_FORMAT_TYPE
|
36
38
|
end
|
37
39
|
|
38
40
|
def prefer_buffered_processing
|
39
41
|
false
|
40
42
|
end
|
41
43
|
|
42
|
-
|
43
|
-
@delayed
|
44
|
-
end
|
45
|
-
|
46
|
-
attr_accessor :delayed
|
47
|
-
|
48
|
-
def initialize
|
49
|
-
super
|
50
|
-
@delayed = false
|
51
|
-
end
|
44
|
+
attr_accessor :formatter
|
52
45
|
|
53
46
|
def configure(conf)
|
54
|
-
if conf['output_type'] && !conf['format']
|
55
|
-
conf['format'] = conf['output_type']
|
56
|
-
end
|
57
47
|
compat_parameters_convert(conf, :inject, :formatter)
|
58
48
|
|
59
49
|
super
|
60
50
|
|
61
|
-
@formatter = formatter_create
|
51
|
+
@formatter = formatter_create
|
62
52
|
end
|
63
53
|
|
64
54
|
def process(tag, es)
|
@@ -70,16 +60,11 @@ module Fluent::Plugin
|
|
70
60
|
|
71
61
|
def format(tag, time, record)
|
72
62
|
record = inject_values_to_record(tag, time, record)
|
73
|
-
|
63
|
+
@formatter.format(tag, time, record).chomp + "\n"
|
74
64
|
end
|
75
65
|
|
76
66
|
def write(chunk)
|
77
67
|
chunk.write_to($log)
|
78
68
|
end
|
79
|
-
|
80
|
-
def try_write(chunk)
|
81
|
-
chunk.write_to($log)
|
82
|
-
commit_write(chunk.unique_id)
|
83
|
-
end
|
84
69
|
end
|
85
70
|
end
|
data/lib/fluent/plugin/output.rb
CHANGED
@@ -40,6 +40,8 @@ module Fluent
|
|
40
40
|
|
41
41
|
CHUNKING_FIELD_WARN_NUM = 4
|
42
42
|
|
43
|
+
PROCESS_CLOCK_ID = Process::CLOCK_MONOTONIC_RAW rescue Process::CLOCK_MONOTONIC
|
44
|
+
|
43
45
|
config_param :time_as_integer, :bool, default: false
|
44
46
|
|
45
47
|
# `<buffer>` and `<secondary>` sections are available only when '#format' and '#write' are implemented
|
@@ -138,7 +140,7 @@ module Fluent
|
|
138
140
|
end
|
139
141
|
|
140
142
|
# Internal states
|
141
|
-
FlushThreadState = Struct.new(:thread, :
|
143
|
+
FlushThreadState = Struct.new(:thread, :next_clock)
|
142
144
|
DequeuedChunkInfo = Struct.new(:chunk_id, :time, :timeout) do
|
143
145
|
def expired?
|
144
146
|
time + timeout < Time.now
|
@@ -150,10 +152,9 @@ module Fluent
|
|
150
152
|
|
151
153
|
# for tests
|
152
154
|
attr_reader :buffer, :retry, :secondary, :chunk_keys, :chunk_key_time, :chunk_key_tag
|
153
|
-
attr_accessor :output_enqueue_thread_waiting, :
|
154
|
-
|
155
|
+
attr_accessor :output_enqueue_thread_waiting, :dequeued_chunks, :dequeued_chunks_mutex
|
155
156
|
# output_enqueue_thread_waiting: for test of output.rb itself
|
156
|
-
|
157
|
+
attr_accessor :retry_for_error_chunk # if true, error flush will be retried even if under_plugin_development is true
|
157
158
|
|
158
159
|
def initialize
|
159
160
|
super
|
@@ -161,7 +162,6 @@ module Fluent
|
|
161
162
|
@buffering = false
|
162
163
|
@delayed_commit = false
|
163
164
|
@as_secondary = false
|
164
|
-
@in_tests = false
|
165
165
|
@primary_instance = nil
|
166
166
|
|
167
167
|
# TODO: well organized counters
|
@@ -188,6 +188,7 @@ module Fluent
|
|
188
188
|
@secondary = nil
|
189
189
|
@retry = nil
|
190
190
|
@dequeued_chunks = nil
|
191
|
+
@dequeued_chunks_mutex = nil
|
191
192
|
@output_enqueue_thread = nil
|
192
193
|
@output_flush_threads = nil
|
193
194
|
|
@@ -195,6 +196,8 @@ module Fluent
|
|
195
196
|
@chunk_keys = @chunk_key_time = @chunk_key_tag = nil
|
196
197
|
@flush_mode = nil
|
197
198
|
@timekey_zone = nil
|
199
|
+
|
200
|
+
@retry_for_error_chunk = false
|
198
201
|
end
|
199
202
|
|
200
203
|
def acts_as_secondary(primary)
|
@@ -207,6 +210,7 @@ module Fluent
|
|
207
210
|
@timekey_zone = @primary_instance.timekey_zone
|
208
211
|
@output_time_formatter_cache = {}
|
209
212
|
end
|
213
|
+
self.context_router = primary.context_router
|
210
214
|
|
211
215
|
(class << self; self; end).module_eval do
|
212
216
|
define_method(:commit_write){ |chunk_id| @primary_instance.commit_write(chunk_id, delayed: delayed_commit, secondary: true) }
|
@@ -317,7 +321,6 @@ module Fluent
|
|
317
321
|
@secondary = Plugin.new_output(secondary_type)
|
318
322
|
@secondary.acts_as_secondary(self)
|
319
323
|
@secondary.configure(secondary_conf)
|
320
|
-
@secondary.router = router if @secondary.has_router?
|
321
324
|
if (self.class != @secondary.class) && (@custom_format || @secondary.implement?(:custom_format))
|
322
325
|
log.warn "secondary type should be same with primary one", primary: self.class.to_s, secondary: @secondary.class.to_s
|
323
326
|
end
|
@@ -399,10 +402,8 @@ module Fluent
|
|
399
402
|
end
|
400
403
|
@output_flush_thread_current_position = 0
|
401
404
|
|
402
|
-
|
403
|
-
|
404
|
-
@output_enqueue_thread = thread_create(:enqueue_thread, &method(:enqueue_thread_run))
|
405
|
-
end
|
405
|
+
if !@under_plugin_development && (@flush_mode == :interval || @chunk_key_time)
|
406
|
+
@output_enqueue_thread = thread_create(:enqueue_thread, &method(:enqueue_thread_run))
|
406
407
|
end
|
407
408
|
end
|
408
409
|
@secondary.start if @secondary
|
@@ -899,9 +900,9 @@ module Fluent
|
|
899
900
|
@retry_mutex.synchronize do
|
900
901
|
if @retry # success to flush chunks in retries
|
901
902
|
if secondary
|
902
|
-
log.warn "retry succeeded by secondary.",
|
903
|
+
log.warn "retry succeeded by secondary.", chunk_id: dump_unique_id_hex(chunk_id)
|
903
904
|
else
|
904
|
-
log.warn "retry succeeded.",
|
905
|
+
log.warn "retry succeeded.", chunk_id: dump_unique_id_hex(chunk_id)
|
905
906
|
end
|
906
907
|
@retry = nil
|
907
908
|
end
|
@@ -919,6 +920,8 @@ module Fluent
|
|
919
920
|
# in many cases, false can be just ignored
|
920
921
|
if @buffer.takeback_chunk(chunk_id)
|
921
922
|
@counters_monitor.synchronize{ @rollback_count += 1 }
|
923
|
+
primary = @as_secondary ? @primary_instance : self
|
924
|
+
primary.update_retry_state(chunk_id, @as_secondary)
|
922
925
|
true
|
923
926
|
else
|
924
927
|
false
|
@@ -931,7 +934,9 @@ module Fluent
|
|
931
934
|
info = @dequeued_chunks.shift
|
932
935
|
if @buffer.takeback_chunk(info.chunk_id)
|
933
936
|
@counters_monitor.synchronize{ @rollback_count += 1 }
|
934
|
-
log.warn "failed to flush the buffer chunk, timeout to commit.",
|
937
|
+
log.warn "failed to flush the buffer chunk, timeout to commit.", chunk_id: dump_unique_id_hex(info.chunk_id), flushed_at: info.time
|
938
|
+
primary = @as_secondary ? @primary_instance : self
|
939
|
+
primary.update_retry_state(info.chunk_id, @as_secondary)
|
935
940
|
end
|
936
941
|
end
|
937
942
|
end
|
@@ -944,7 +949,9 @@ module Fluent
|
|
944
949
|
info = @dequeued_chunks.shift
|
945
950
|
if @buffer.takeback_chunk(info.chunk_id)
|
946
951
|
@counters_monitor.synchronize{ @rollback_count += 1 }
|
947
|
-
log.info "delayed commit for buffer chunks was cancelled in shutdown",
|
952
|
+
log.info "delayed commit for buffer chunks was cancelled in shutdown", chunk_id: dump_unique_id_hex(info.chunk_id)
|
953
|
+
primary = @as_secondary ? @primary_instance : self
|
954
|
+
primary.update_retry_state(info.chunk_id, @as_secondary)
|
948
955
|
end
|
949
956
|
end
|
950
957
|
end
|
@@ -981,11 +988,11 @@ module Fluent
|
|
981
988
|
if output.delayed_commit
|
982
989
|
log.trace "executing delayed write and commit", chunk: dump_unique_id_hex(chunk.unique_id)
|
983
990
|
@counters_monitor.synchronize{ @write_count += 1 }
|
984
|
-
output.try_write(chunk)
|
985
991
|
@dequeued_chunks_mutex.synchronize do
|
986
992
|
# delayed_commit_timeout for secondary is configured in <buffer> of primary (<secondary> don't get <buffer>)
|
987
993
|
@dequeued_chunks << DequeuedChunkInfo.new(chunk.unique_id, Time.now, self.delayed_commit_timeout)
|
988
994
|
end
|
995
|
+
output.try_write(chunk)
|
989
996
|
else # output plugin without delayed purge
|
990
997
|
chunk_id = chunk.unique_id
|
991
998
|
dump_chunk_id = dump_unique_id_hex(chunk_id)
|
@@ -994,44 +1001,64 @@ module Fluent
|
|
994
1001
|
log.trace "executing sync write", chunk: dump_chunk_id
|
995
1002
|
output.write(chunk)
|
996
1003
|
log.trace "write operation done, committing", chunk: dump_chunk_id
|
997
|
-
commit_write(chunk_id, secondary: using_secondary)
|
1004
|
+
commit_write(chunk_id, delayed: false, secondary: using_secondary)
|
998
1005
|
log.trace "done to commit a chunk", chunk: dump_chunk_id
|
999
1006
|
end
|
1000
1007
|
rescue => e
|
1001
|
-
log.debug "taking back chunk for errors.",
|
1008
|
+
log.debug "taking back chunk for errors.", chunk: dump_unique_id_hex(chunk.unique_id)
|
1009
|
+
if output.delayed_commit
|
1010
|
+
@dequeued_chunks_mutex.synchronize do
|
1011
|
+
@dequeued_chunks.delete_if{|d| d.chunk_id == chunk.unique_id }
|
1012
|
+
end
|
1013
|
+
end
|
1002
1014
|
@buffer.takeback_chunk(chunk.unique_id)
|
1003
1015
|
|
1004
|
-
|
1005
|
-
|
1016
|
+
update_retry_state(chunk.unique_id, using_secondary, e)
|
1017
|
+
|
1018
|
+
raise if @under_plugin_development && !@retry_for_error_chunk
|
1019
|
+
end
|
1020
|
+
end
|
1021
|
+
|
1022
|
+
def update_retry_state(chunk_id, using_secondary, error = nil)
|
1023
|
+
@retry_mutex.synchronize do
|
1024
|
+
@counters_monitor.synchronize{ @num_errors += 1 }
|
1025
|
+
chunk_id_hex = dump_unique_id_hex(chunk_id)
|
1026
|
+
|
1027
|
+
unless @retry
|
1028
|
+
@retry = retry_state(@buffer_config.retry_randomize)
|
1029
|
+
if error
|
1030
|
+
log.warn "failed to flush the buffer.", retry_time: @retry.steps, next_retry_seconds: @retry.next_time, chunk: chunk_id_hex, error: error
|
1031
|
+
log.warn_backtrace error.backtrace
|
1032
|
+
end
|
1033
|
+
return
|
1006
1034
|
end
|
1007
1035
|
|
1008
|
-
@
|
1009
|
-
|
1010
|
-
|
1011
|
-
|
1012
|
-
|
1013
|
-
|
1014
|
-
|
1015
|
-
|
1016
|
-
|
1017
|
-
|
1018
|
-
|
1019
|
-
|
1020
|
-
msg = if using_secondary
|
1021
|
-
"failed to flush the buffer with secondary output."
|
1022
|
-
else
|
1023
|
-
"failed to flush the buffer."
|
1024
|
-
end
|
1025
|
-
log.warn msg, plugin_id: plugin_id, retry_time: @retry.steps, next_retry: @retry.next_time, chunk: dump_unique_id_hex(chunk.unique_id), error: e
|
1026
|
-
log.warn_backtrace e.backtrace
|
1027
|
-
end
|
1036
|
+
# @retry exists
|
1037
|
+
|
1038
|
+
if error
|
1039
|
+
if @retry.limit?
|
1040
|
+
records = @buffer.queued_records
|
1041
|
+
msg = "failed to flush the buffer, and hit limit for retries. dropping all chunks in the buffer queue."
|
1042
|
+
log.error msg, retry_times: @retry.steps, records: records, error: error
|
1043
|
+
log.error_backtrace error.backtrace
|
1044
|
+
elsif using_secondary
|
1045
|
+
msg = "failed to flush the buffer with secondary output."
|
1046
|
+
log.warn msg, retry_time: @retry.steps, next_retry_seconds: @retry.next_time, chunk: chunk_id_hex, error: error
|
1047
|
+
log.warn_backtrace error.backtrace
|
1028
1048
|
else
|
1029
|
-
|
1030
|
-
@
|
1031
|
-
log.
|
1032
|
-
log.warn_backtrace e.backtrace
|
1049
|
+
msg = "failed to flush the buffer."
|
1050
|
+
log.warn msg, retry_time: @retry.steps, next_retry_seconds: @retry.next_time, chunk: chunk_id_hex, error: error
|
1051
|
+
log.warn_backtrace error.backtrace
|
1033
1052
|
end
|
1034
1053
|
end
|
1054
|
+
|
1055
|
+
if @retry.limit?
|
1056
|
+
@buffer.clear_queue!
|
1057
|
+
log.debug "buffer queue cleared"
|
1058
|
+
@retry = nil
|
1059
|
+
else
|
1060
|
+
@retry.step
|
1061
|
+
end
|
1035
1062
|
end
|
1036
1063
|
end
|
1037
1064
|
|
@@ -1058,8 +1085,12 @@ module Fluent
|
|
1058
1085
|
# Without locks: it is rough but enough to select "next" writer selection
|
1059
1086
|
@output_flush_thread_current_position = (@output_flush_thread_current_position + 1) % @buffer_config.flush_thread_count
|
1060
1087
|
state = @output_flush_threads[@output_flush_thread_current_position]
|
1061
|
-
state.
|
1062
|
-
state.thread.run
|
1088
|
+
state.next_clock = 0
|
1089
|
+
if state.thread && state.thread.status # "run"/"sleep"/"aborting" or false(successfully stop) or nil(killed by exception)
|
1090
|
+
state.thread.run
|
1091
|
+
else
|
1092
|
+
log.warn "thread is already dead"
|
1093
|
+
end
|
1063
1094
|
end
|
1064
1095
|
|
1065
1096
|
def force_flush
|
@@ -1096,7 +1127,7 @@ module Fluent
|
|
1096
1127
|
# only for tests of output plugin
|
1097
1128
|
def flush_thread_wakeup
|
1098
1129
|
@output_flush_threads.each do |state|
|
1099
|
-
state.
|
1130
|
+
state.next_clock = 0
|
1100
1131
|
state.thread.run
|
1101
1132
|
end
|
1102
1133
|
end
|
@@ -1150,7 +1181,7 @@ module Fluent
|
|
1150
1181
|
end
|
1151
1182
|
rescue => e
|
1152
1183
|
raise if @under_plugin_development
|
1153
|
-
log.error "unexpected error while checking flushed chunks. ignored.",
|
1184
|
+
log.error "unexpected error while checking flushed chunks. ignored.", error: e
|
1154
1185
|
log.error_backtrace
|
1155
1186
|
ensure
|
1156
1187
|
@output_enqueue_thread_waiting = false
|
@@ -1160,7 +1191,7 @@ module Fluent
|
|
1160
1191
|
end
|
1161
1192
|
rescue => e
|
1162
1193
|
# normal errors are rescued by inner begin-rescue clause.
|
1163
|
-
log.error "error on enqueue thread",
|
1194
|
+
log.error "error on enqueue thread", error: e
|
1164
1195
|
log.error_backtrace
|
1165
1196
|
raise
|
1166
1197
|
end
|
@@ -1169,9 +1200,7 @@ module Fluent
|
|
1169
1200
|
def flush_thread_run(state)
|
1170
1201
|
flush_thread_interval = @buffer_config.flush_thread_interval
|
1171
1202
|
|
1172
|
-
|
1173
|
-
clock_id = Process::CLOCK_MONOTONIC rescue Process::CLOCK_MONOTONIC_RAW
|
1174
|
-
state.next_time = Process.clock_gettime(clock_id) + flush_thread_interval
|
1203
|
+
state.next_clock = Process.clock_gettime(PROCESS_CLOCK_ID) + flush_thread_interval
|
1175
1204
|
|
1176
1205
|
while !self.after_started? && !self.stopped?
|
1177
1206
|
sleep 0.5
|
@@ -1181,16 +1210,18 @@ module Fluent
|
|
1181
1210
|
begin
|
1182
1211
|
# This thread don't use `thread_current_running?` because this thread should run in `before_shutdown` phase
|
1183
1212
|
while @output_flush_threads_running
|
1184
|
-
|
1185
|
-
interval = state.
|
1213
|
+
current_clock = Process.clock_gettime(PROCESS_CLOCK_ID)
|
1214
|
+
interval = state.next_clock - current_clock
|
1186
1215
|
|
1187
|
-
if state.next_time <=
|
1216
|
+
if state.next_clock <= current_clock && (!@retry || @retry_mutex.synchronize{ @retry.next_time } <= Time.now)
|
1188
1217
|
try_flush
|
1189
|
-
|
1218
|
+
|
1219
|
+
# next_flush_time uses flush_thread_interval or flush_thread_burst_interval (or retrying)
|
1190
1220
|
interval = next_flush_time.to_f - Time.now.to_f
|
1191
|
-
# TODO: if secondary && delayed-commit, next_flush_time will be much longer than expected
|
1192
|
-
#
|
1193
|
-
|
1221
|
+
# TODO: if secondary && delayed-commit, next_flush_time will be much longer than expected
|
1222
|
+
# because @retry still exists (#commit_write is not called yet in #try_flush)
|
1223
|
+
# @retry should be cleared if delayed commit is enabled? Or any other solution?
|
1224
|
+
state.next_clock = Process.clock_gettime(PROCESS_CLOCK_ID) + interval
|
1194
1225
|
end
|
1195
1226
|
|
1196
1227
|
if @dequeued_chunks_mutex.synchronize{ !@dequeued_chunks.empty? && @dequeued_chunks.first.expired? }
|
@@ -1204,7 +1235,7 @@ module Fluent
|
|
1204
1235
|
rescue => e
|
1205
1236
|
# normal errors are rescued by output plugins in #try_flush
|
1206
1237
|
# so this rescue section is for critical & unrecoverable errors
|
1207
|
-
log.error "error on output thread",
|
1238
|
+
log.error "error on output thread", error: e
|
1208
1239
|
log.error_backtrace
|
1209
1240
|
raise
|
1210
1241
|
end
|
data/lib/fluent/plugin/parser.rb
CHANGED
@@ -31,17 +31,43 @@ module Fluent
|
|
31
31
|
|
32
32
|
configured_in :parse
|
33
33
|
|
34
|
-
|
35
|
-
|
34
|
+
### types can be specified as string-based hash style
|
35
|
+
# field1:type, field2:type, field3:type:option, field4:type:option
|
36
|
+
### or, JSON format
|
37
|
+
# {"field1":"type", "field2":"type", "field3":"type:option", "field4":"type:option"}
|
38
|
+
config_param :types, :hash, value_type: :string, default: nil
|
36
39
|
|
40
|
+
# available options are:
|
41
|
+
# array: (1st) delimiter
|
42
|
+
# time : type[, format, timezone] -> type should be a valid "time_type"(string/unixtime/float)
|
43
|
+
# : format[, timezone]
|
44
|
+
|
45
|
+
config_param :time_key, :string, default: nil
|
46
|
+
config_param :null_value_pattern, :string, default: nil
|
47
|
+
config_param :null_empty_string, :bool, default: false
|
48
|
+
config_param :estimate_current_event, :bool, default: true
|
37
49
|
config_param :keep_time_key, :bool, default: false
|
38
50
|
|
39
|
-
|
51
|
+
AVAILABLE_PARSER_VALUE_TYPES = ['string', 'integer', 'float', 'bool', 'time', 'array']
|
52
|
+
|
53
|
+
# for tests
|
54
|
+
attr_reader :type_converters
|
55
|
+
|
56
|
+
PARSER_TYPES = [:text_per_line, :text, :binary]
|
57
|
+
def parser_type
|
58
|
+
:text_per_line
|
59
|
+
end
|
60
|
+
|
61
|
+
def configure(conf)
|
40
62
|
super
|
41
|
-
|
63
|
+
|
64
|
+
@time_parser = time_parser_create
|
65
|
+
@null_value_regexp = @null_value_pattern && Regexp.new(@null_value_pattern)
|
66
|
+
@type_converters = build_type_converters(@types)
|
67
|
+
@execute_convert_values = @type_converters || @null_value_regexp || @null_empty_string
|
42
68
|
end
|
43
69
|
|
44
|
-
def parse(text)
|
70
|
+
def parse(text, &block)
|
45
71
|
raise NotImplementedError, "Implement this method in child class"
|
46
72
|
end
|
47
73
|
|
@@ -51,80 +77,114 @@ module Fluent
|
|
51
77
|
parse(*a, &b)
|
52
78
|
end
|
53
79
|
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
80
|
+
def implement?(feature)
|
81
|
+
methods_of_plugin = self.class.instance_methods(false)
|
82
|
+
case feature
|
83
|
+
when :parse_io then methods_of_plugin.include?(:parse_io)
|
84
|
+
when :parse_partial_data then methods_of_plugin.include?(:parse_partial_data)
|
85
|
+
else
|
86
|
+
raise ArgumentError, "Unknown feature for parser plugin: #{feature}"
|
87
|
+
end
|
88
|
+
end
|
59
89
|
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
config_param :null_empty_string, :bool, default: false
|
90
|
+
def parse_io(io, &block)
|
91
|
+
raise NotImplementedError, "Optional API #parse_io is not implemented"
|
92
|
+
end
|
64
93
|
|
65
|
-
def
|
66
|
-
|
94
|
+
def parse_partial_data(data, &block)
|
95
|
+
raise NotImplementedError, "Optional API #parse_partial_data is not implemented"
|
96
|
+
end
|
67
97
|
|
68
|
-
|
69
|
-
|
98
|
+
def parse_time(record)
|
99
|
+
if @time_key && record.respond_to?(:has_key?) && record.has_key?(@time_key)
|
100
|
+
src = if @keep_time_key
|
101
|
+
record[@time_key]
|
102
|
+
else
|
103
|
+
record.delete(@time_key)
|
104
|
+
end
|
105
|
+
@time_parser.parse(src)
|
106
|
+
elsif @estimate_current_event
|
107
|
+
Fluent::EventTime.now
|
108
|
+
else
|
109
|
+
nil
|
70
110
|
end
|
111
|
+
rescue Fluent::TimeParser::TimeParseError => e
|
112
|
+
raise ParserError, e.message
|
113
|
+
end
|
71
114
|
|
72
|
-
|
73
|
-
|
74
|
-
|
115
|
+
# def parse(text, &block)
|
116
|
+
# time, record = convert_values(time, record)
|
117
|
+
# yield time, record
|
118
|
+
# end
|
119
|
+
def convert_values(time, record)
|
120
|
+
return time, record unless @execute_convert_values
|
75
121
|
|
76
|
-
|
122
|
+
record.each_key do |key|
|
123
|
+
value = record[key]
|
124
|
+
next unless value # nil/null value is always left as-is.
|
77
125
|
|
78
|
-
|
79
|
-
|
126
|
+
if value.is_a?(String) && string_like_null(value)
|
127
|
+
record[key] = nil
|
128
|
+
next
|
129
|
+
end
|
130
|
+
|
131
|
+
if @type_converters && @type_converters.has_key?(key)
|
132
|
+
record[key] = @type_converters[key].call(value)
|
133
|
+
end
|
80
134
|
end
|
81
135
|
|
82
|
-
|
136
|
+
return time, record
|
83
137
|
end
|
84
138
|
|
85
|
-
def
|
86
|
-
|
87
|
-
|
88
|
-
if @time_key
|
89
|
-
value = @keep_time_key ? record[@time_key] : record.delete(@time_key)
|
90
|
-
time = if value.nil?
|
91
|
-
if @estimate_current_event
|
92
|
-
Fluent::EventTime.now
|
93
|
-
else
|
94
|
-
nil
|
95
|
-
end
|
96
|
-
else
|
97
|
-
@mutex.synchronize { @time_parser.parse(value) }
|
98
|
-
end
|
99
|
-
elsif @estimate_current_event
|
100
|
-
time = Fluent::EventTime.now
|
101
|
-
else
|
102
|
-
time = nil
|
103
|
-
end
|
139
|
+
def string_like_null(value, null_empty_string = @null_empty_string, null_value_regexp = @null_value_regexp)
|
140
|
+
null_empty_string && value.empty? || null_value_regexp && string_safe_encoding(value){|s| null_value_regexp.match(s) }
|
141
|
+
end
|
104
142
|
|
105
|
-
|
143
|
+
TRUTHY_VALUES = ['true', 'yes', '1']
|
106
144
|
|
107
|
-
|
108
|
-
|
145
|
+
def build_type_converters(types)
|
146
|
+
return nil unless types
|
109
147
|
|
110
|
-
|
148
|
+
converters = {}
|
111
149
|
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
150
|
+
types.each_pair do |field_name, type_definition|
|
151
|
+
type, option = type_definition.split(":", 2)
|
152
|
+
unless AVAILABLE_PARSER_VALUE_TYPES.include?(type)
|
153
|
+
raise Fluent::ConfigError, "unknown value conversion for key:'#{field_name}', type:'#{type}'"
|
116
154
|
end
|
117
|
-
}
|
118
|
-
end
|
119
155
|
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
156
|
+
conv = case type
|
157
|
+
when 'string' then ->(v){ v.to_s }
|
158
|
+
when 'integer' then ->(v){ v.to_i rescue v.to_s.to_i }
|
159
|
+
when 'float' then ->(v){ v.to_f rescue v.to_s.to_f }
|
160
|
+
when 'bool' then ->(v){ TRUTHY_VALUES.include?(v.to_s.downcase) }
|
161
|
+
when 'time'
|
162
|
+
# comma-separated: time:[timezone:]time_format
|
163
|
+
# time_format is unixtime/float/string-time-format
|
164
|
+
timep = if option
|
165
|
+
time_type = 'string' # estimate
|
166
|
+
timezone, time_format = option.split(':', 2)
|
167
|
+
unless Fluent::Timezone.validate(timezone)
|
168
|
+
timezone, time_format = nil, option
|
169
|
+
end
|
170
|
+
if Fluent::TimeMixin::TIME_TYPES.include?(time_format)
|
171
|
+
time_type, time_format = time_format, nil # unixtime/float
|
172
|
+
end
|
173
|
+
time_parser_create(type: time_type.to_sym, format: time_format, timezone: timezone)
|
174
|
+
else
|
175
|
+
time_parser_create(type: :string, format: nil, timezone: nil)
|
176
|
+
end
|
177
|
+
->(v){ timep.parse(v) rescue nil }
|
178
|
+
when 'array'
|
179
|
+
delimiter = option ? option.to_s : ','
|
180
|
+
->(v){ string_safe_encoding(v.to_s){|s| s.split(delimiter) } }
|
181
|
+
else
|
182
|
+
raise "BUG: unknown type even after check: #{type}"
|
183
|
+
end
|
184
|
+
converters[field_name] = conv
|
126
185
|
end
|
127
|
-
|
186
|
+
|
187
|
+
converters
|
128
188
|
end
|
129
189
|
end
|
130
190
|
end
|