fluentd 1.13.2-x86-mingw32 → 1.14.2-x86-mingw32
Sign up to get free protection for your applications and to get access to all the features.
Potentially problematic release.
This version of fluentd might be problematic. Click here for more details.
- checksums.yaml +4 -4
- data/.drone.yml +6 -6
- data/.github/ISSUE_TEMPLATE/bug_report.yaml +1 -0
- data/.github/workflows/windows-test.yaml +3 -3
- data/CHANGELOG.md +120 -0
- data/example/v0_12_filter.conf +2 -2
- data/fluentd.gemspec +1 -1
- data/lib/fluent/command/cat.rb +13 -3
- data/lib/fluent/command/fluentd.rb +8 -0
- data/lib/fluent/compat/output.rb +9 -6
- data/lib/fluent/config/parser.rb +1 -1
- data/lib/fluent/config/v1_parser.rb +1 -1
- data/lib/fluent/event_router.rb +28 -1
- data/lib/fluent/plugin/bare_output.rb +49 -8
- data/lib/fluent/plugin/buf_file.rb +2 -2
- data/lib/fluent/plugin/buffer.rb +84 -22
- data/lib/fluent/plugin/file_wrapper.rb +22 -0
- data/lib/fluent/plugin/filter.rb +35 -1
- data/lib/fluent/plugin/in_http.rb +21 -2
- data/lib/fluent/plugin/in_monitor_agent.rb +4 -2
- data/lib/fluent/plugin/in_syslog.rb +13 -1
- data/lib/fluent/plugin/in_tail/position_file.rb +20 -18
- data/lib/fluent/plugin/in_tail.rb +46 -6
- data/lib/fluent/plugin/input.rb +39 -1
- data/lib/fluent/plugin/metrics.rb +119 -0
- data/lib/fluent/plugin/metrics_local.rb +96 -0
- data/lib/fluent/plugin/multi_output.rb +43 -6
- data/lib/fluent/plugin/out_copy.rb +1 -1
- data/lib/fluent/plugin/out_forward.rb +15 -7
- data/lib/fluent/plugin/output.rb +80 -38
- data/lib/fluent/plugin/parser_apache2.rb +1 -1
- data/lib/fluent/plugin/storage_local.rb +3 -5
- data/lib/fluent/plugin.rb +10 -1
- data/lib/fluent/plugin_helper/event_emitter.rb +8 -1
- data/lib/fluent/plugin_helper/metrics.rb +129 -0
- data/lib/fluent/plugin_helper/server.rb +4 -2
- data/lib/fluent/plugin_helper.rb +1 -0
- data/lib/fluent/plugin_id.rb +2 -1
- data/lib/fluent/root_agent.rb +6 -0
- data/lib/fluent/supervisor.rb +4 -2
- data/lib/fluent/system_config.rb +9 -1
- data/lib/fluent/time.rb +21 -20
- data/lib/fluent/version.rb +1 -1
- data/test/command/test_cat.rb +31 -2
- data/test/config/test_system_config.rb +6 -0
- data/test/plugin/in_tail/test_io_handler.rb +12 -4
- data/test/plugin/in_tail/test_position_file.rb +48 -8
- data/test/plugin/test_bare_output.rb +13 -0
- data/test/plugin/test_buffer.rb +8 -2
- data/test/plugin/test_file_wrapper.rb +11 -0
- data/test/plugin/test_filter.rb +11 -0
- data/test/plugin/test_in_http.rb +40 -0
- data/test/plugin/test_in_monitor_agent.rb +214 -8
- data/test/plugin/test_in_syslog.rb +35 -0
- data/test/plugin/test_in_tail.rb +72 -29
- data/test/plugin/test_input.rb +11 -0
- data/test/plugin/test_metrics.rb +294 -0
- data/test/plugin/test_metrics_local.rb +96 -0
- data/test/plugin/test_multi_output.rb +25 -1
- data/test/plugin/test_output.rb +16 -0
- data/test/plugin_helper/test_event_emitter.rb +29 -0
- data/test/plugin_helper/test_metrics.rb +137 -0
- data/test/test_plugin_classes.rb +102 -0
- data/test/test_root_agent.rb +30 -1
- data/test/test_time_parser.rb +22 -0
- metadata +13 -4
data/lib/fluent/plugin/buffer.rb
CHANGED
@@ -16,6 +16,8 @@
|
|
16
16
|
|
17
17
|
require 'fluent/plugin/base'
|
18
18
|
require 'fluent/plugin/owned_by_mixin'
|
19
|
+
require 'fluent/plugin_id'
|
20
|
+
require 'fluent/plugin_helper'
|
19
21
|
require 'fluent/unique_id'
|
20
22
|
require 'fluent/ext_monitor_require'
|
21
23
|
|
@@ -24,7 +26,9 @@ module Fluent
|
|
24
26
|
class Buffer < Base
|
25
27
|
include OwnedByMixin
|
26
28
|
include UniqueId::Mixin
|
29
|
+
include PluginId
|
27
30
|
include MonitorMixin
|
31
|
+
include PluginHelper::Mixin # for metrics
|
28
32
|
|
29
33
|
class BufferError < StandardError; end
|
30
34
|
class BufferOverflowError < BufferError; end
|
@@ -39,6 +43,8 @@ module Fluent
|
|
39
43
|
|
40
44
|
configured_in :buffer
|
41
45
|
|
46
|
+
helpers_internal :metrics
|
47
|
+
|
42
48
|
# TODO: system total buffer limit size in bytes by SystemConfig
|
43
49
|
|
44
50
|
config_param :chunk_limit_size, :size, default: DEFAULT_CHUNK_LIMIT_SIZE
|
@@ -153,8 +159,11 @@ module Fluent
|
|
153
159
|
end
|
154
160
|
end
|
155
161
|
|
162
|
+
# for metrics
|
163
|
+
attr_reader :stage_size_metrics, :stage_length_metrics, :queue_size_metrics, :queue_length_metrics
|
164
|
+
attr_reader :available_buffer_space_ratios_metrics, :total_queued_size_metrics
|
165
|
+
attr_reader :newest_timekey_metrics, :oldest_timekey_metrics
|
156
166
|
# for tests
|
157
|
-
attr_accessor :stage_size, :queue_size
|
158
167
|
attr_reader :stage, :queue, :dequeued, :queued_num
|
159
168
|
|
160
169
|
def initialize
|
@@ -171,12 +180,35 @@ module Fluent
|
|
171
180
|
@queued_num = {} # metadata => int (number of queued chunks)
|
172
181
|
@dequeued_num = {} # metadata => int (number of dequeued chunks)
|
173
182
|
|
174
|
-
@
|
183
|
+
@stage_length_metrics = nil
|
184
|
+
@stage_size_metrics = nil
|
185
|
+
@queue_length_metrics = nil
|
186
|
+
@queue_size_metrics = nil
|
187
|
+
@available_buffer_space_ratios_metrics = nil
|
188
|
+
@total_queued_size_metrics = nil
|
189
|
+
@newest_timekey_metrics = nil
|
190
|
+
@oldest_timekey_metrics = nil
|
175
191
|
@timekeys = Hash.new(0)
|
176
192
|
@enable_update_timekeys = false
|
177
193
|
@mutex = Mutex.new
|
178
194
|
end
|
179
195
|
|
196
|
+
def stage_size
|
197
|
+
@stage_size_metrics.get
|
198
|
+
end
|
199
|
+
|
200
|
+
def stage_size=(value)
|
201
|
+
@stage_size_metrics.set(value)
|
202
|
+
end
|
203
|
+
|
204
|
+
def queue_size
|
205
|
+
@queue_size_metrics.get
|
206
|
+
end
|
207
|
+
|
208
|
+
def queue_size=(value)
|
209
|
+
@queue_size_metrics.set(value)
|
210
|
+
end
|
211
|
+
|
180
212
|
def persistent?
|
181
213
|
false
|
182
214
|
end
|
@@ -187,6 +219,28 @@ module Fluent
|
|
187
219
|
unless @queue_limit_length.nil?
|
188
220
|
@total_limit_size = @chunk_limit_size * @queue_limit_length
|
189
221
|
end
|
222
|
+
@stage_length_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "stage_length",
|
223
|
+
help_text: 'Length of stage buffers', prefer_gauge: true)
|
224
|
+
@stage_length_metrics.set(0)
|
225
|
+
@stage_size_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "stage_byte_size",
|
226
|
+
help_text: 'Total size of stage buffers', prefer_gauge: true)
|
227
|
+
@stage_size_metrics.set(0) # Ensure zero.
|
228
|
+
@queue_length_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "queue_length",
|
229
|
+
help_text: 'Length of queue buffers', prefer_gauge: true)
|
230
|
+
@queue_length_metrics.set(0)
|
231
|
+
@queue_size_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "queue_byte_size",
|
232
|
+
help_text: 'Total size of queue buffers', prefer_gauge: true)
|
233
|
+
@queue_size_metrics.set(0) # Ensure zero.
|
234
|
+
@available_buffer_space_ratios_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "available_buffer_space_ratios",
|
235
|
+
help_text: 'Ratio of available space in buffer', prefer_gauge: true)
|
236
|
+
@available_buffer_space_ratios_metrics.set(100) # Default is 100%.
|
237
|
+
@total_queued_size_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "total_queued_size",
|
238
|
+
help_text: 'Total size of stage and queue buffers', prefer_gauge: true)
|
239
|
+
@total_queued_size_metrics.set(0)
|
240
|
+
@newest_timekey_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "newest_timekey",
|
241
|
+
help_text: 'Newest timekey in buffer', prefer_gauge: true)
|
242
|
+
@oldest_timekey_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "oldest_timekey",
|
243
|
+
help_text: 'Oldest timekey in buffer', prefer_gauge: true)
|
190
244
|
end
|
191
245
|
|
192
246
|
def enable_update_timekeys
|
@@ -198,15 +252,15 @@ module Fluent
|
|
198
252
|
|
199
253
|
@stage, @queue = resume
|
200
254
|
@stage.each_pair do |metadata, chunk|
|
201
|
-
@
|
255
|
+
@stage_size_metrics.add(chunk.bytesize)
|
202
256
|
end
|
203
257
|
@queue.each do |chunk|
|
204
258
|
@queued_num[chunk.metadata] ||= 0
|
205
259
|
@queued_num[chunk.metadata] += 1
|
206
|
-
@
|
260
|
+
@queue_size_metrics.add(chunk.bytesize)
|
207
261
|
end
|
208
262
|
update_timekeys
|
209
|
-
log.debug "buffer started", instance: self.object_id, stage_size: @
|
263
|
+
log.debug "buffer started", instance: self.object_id, stage_size: @stage_size_metrics.get, queue_size: @queue_size_metrics.get
|
210
264
|
end
|
211
265
|
|
212
266
|
def close
|
@@ -228,17 +282,19 @@ module Fluent
|
|
228
282
|
def terminate
|
229
283
|
super
|
230
284
|
@dequeued = @stage = @queue = @queued_num = nil
|
231
|
-
@
|
285
|
+
@stage_length_metrics = @stage_size_metrics = @queue_length_metrics = @queue_size_metrics = nil
|
286
|
+
@available_buffer_space_ratios_metrics = @total_queued_size_metrics = nil
|
287
|
+
@newest_timekey_metrics = @oldest_timekey_metrics = nil
|
232
288
|
@timekeys.clear
|
233
289
|
end
|
234
290
|
|
235
291
|
def storable?
|
236
|
-
@total_limit_size > @
|
292
|
+
@total_limit_size > @stage_size_metrics.get + @queue_size_metrics.get
|
237
293
|
end
|
238
294
|
|
239
295
|
## TODO: for back pressure feature
|
240
296
|
# def used?(ratio)
|
241
|
-
# @total_limit_size * ratio > @
|
297
|
+
# @total_limit_size * ratio > @stage_size_metrics.get + @queue_size_metrics.get
|
242
298
|
# end
|
243
299
|
|
244
300
|
def resume
|
@@ -344,7 +400,7 @@ module Fluent
|
|
344
400
|
#
|
345
401
|
staged_bytesizes_by_chunk.each do |chunk, bytesize|
|
346
402
|
chunk.synchronize do
|
347
|
-
synchronize { @
|
403
|
+
synchronize { @stage_size_metrics.add(bytesize) }
|
348
404
|
log.on_trace { log.trace { "chunk #{chunk.path} size_added: #{bytesize} new_size: #{chunk.bytesize}" } }
|
349
405
|
end
|
350
406
|
end
|
@@ -361,7 +417,7 @@ module Fluent
|
|
361
417
|
u.metadata.seq = 0
|
362
418
|
synchronize {
|
363
419
|
@stage[m] = u.staged!
|
364
|
-
@
|
420
|
+
@stage_size_metrics.add(u.bytesize)
|
365
421
|
}
|
366
422
|
end
|
367
423
|
end
|
@@ -428,8 +484,8 @@ module Fluent
|
|
428
484
|
chunk.enqueued!
|
429
485
|
end
|
430
486
|
bytesize = chunk.bytesize
|
431
|
-
@
|
432
|
-
@
|
487
|
+
@stage_size_metrics.sub(bytesize)
|
488
|
+
@queue_size_metrics.add(bytesize)
|
433
489
|
end
|
434
490
|
end
|
435
491
|
nil
|
@@ -446,7 +502,7 @@ module Fluent
|
|
446
502
|
@queued_num[metadata] = @queued_num.fetch(metadata, 0) + 1
|
447
503
|
chunk.enqueued!
|
448
504
|
end
|
449
|
-
@
|
505
|
+
@queue_size_metrics.add(chunk.bytesize)
|
450
506
|
end
|
451
507
|
end
|
452
508
|
|
@@ -531,7 +587,7 @@ module Fluent
|
|
531
587
|
begin
|
532
588
|
bytesize = chunk.bytesize
|
533
589
|
chunk.purge
|
534
|
-
@
|
590
|
+
@queue_size_metrics.sub(bytesize)
|
535
591
|
rescue => e
|
536
592
|
log.error "failed to purge buffer chunk", chunk_id: dump_unique_id_hex(chunk_id), error_class: e.class, error: e
|
537
593
|
log.error_backtrace
|
@@ -562,7 +618,7 @@ module Fluent
|
|
562
618
|
log.error_backtrace
|
563
619
|
end
|
564
620
|
end
|
565
|
-
@
|
621
|
+
@queue_size_metrics.set(0)
|
566
622
|
end
|
567
623
|
end
|
568
624
|
|
@@ -765,23 +821,29 @@ module Fluent
|
|
765
821
|
]
|
766
822
|
|
767
823
|
def statistics
|
768
|
-
stage_size, queue_size = @
|
824
|
+
stage_size, queue_size = @stage_size_metrics.get, @queue_size_metrics.get
|
769
825
|
buffer_space = 1.0 - ((stage_size + queue_size * 1.0) / @total_limit_size)
|
826
|
+
@stage_length_metrics.set(@stage.size)
|
827
|
+
@queue_length_metrics.set(@queue.size)
|
828
|
+
@available_buffer_space_ratios_metrics.set(buffer_space * 100)
|
829
|
+
@total_queued_size_metrics.set(stage_size + queue_size)
|
770
830
|
stats = {
|
771
|
-
'stage_length' => @
|
831
|
+
'stage_length' => @stage_length_metrics.get,
|
772
832
|
'stage_byte_size' => stage_size,
|
773
|
-
'queue_length' => @
|
833
|
+
'queue_length' => @queue_length_metrics.get,
|
774
834
|
'queue_byte_size' => queue_size,
|
775
|
-
'available_buffer_space_ratios' =>
|
776
|
-
'total_queued_size' =>
|
835
|
+
'available_buffer_space_ratios' => @available_buffer_space_ratios_metrics.get.round(1),
|
836
|
+
'total_queued_size' => @total_queued_size_metrics.get,
|
777
837
|
}
|
778
838
|
|
779
839
|
tkeys = timekeys
|
780
840
|
if (m = tkeys.min)
|
781
|
-
|
841
|
+
@oldest_timekey_metrics.set(m)
|
842
|
+
stats['oldest_timekey'] = @oldest_timekey_metrics.get
|
782
843
|
end
|
783
844
|
if (m = tkeys.max)
|
784
|
-
|
845
|
+
@newest_timekey_metrics.set(m)
|
846
|
+
stats['newest_timekey'] = @newest_timekey_metrics.get
|
785
847
|
end
|
786
848
|
|
787
849
|
{ 'buffer' => stats }
|
@@ -155,7 +155,29 @@ module Fluent
|
|
155
155
|
by_handle_file_information.unpack("I11Q1")[11] # fileindex
|
156
156
|
end
|
157
157
|
|
158
|
+
# DeletePending is a Windows-specific file state that roughly means
|
159
|
+
# "this file is queued for deletion, so close any open handlers"
|
160
|
+
#
|
161
|
+
# This flag can be retrieved via GetFileInformationByHandleEx().
|
162
|
+
#
|
163
|
+
# https://docs.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-getfileinformationbyhandleex
|
164
|
+
#
|
165
|
+
def delete_pending
|
166
|
+
file_standard_info = 0x01
|
167
|
+
bufsize = 1024
|
168
|
+
buf = '\0' * bufsize
|
169
|
+
|
170
|
+
unless GetFileInformationByHandleEx.call(@file_handle, file_standard_info, buf, bufsize)
|
171
|
+
return false
|
172
|
+
end
|
173
|
+
|
174
|
+
return buf.unpack("QQICC")[3] != 0
|
175
|
+
end
|
176
|
+
|
177
|
+
private :delete_pending
|
178
|
+
|
158
179
|
def stat
|
180
|
+
raise Errno::ENOENT if delete_pending
|
159
181
|
s = File.stat(@path)
|
160
182
|
s.instance_variable_set :@ino, self.ino
|
161
183
|
def s.ino; @ino; end
|
data/lib/fluent/plugin/filter.rb
CHANGED
@@ -28,13 +28,47 @@ module Fluent
|
|
28
28
|
include PluginLoggerMixin
|
29
29
|
include PluginHelper::Mixin
|
30
30
|
|
31
|
-
helpers_internal :event_emitter
|
31
|
+
helpers_internal :event_emitter, :metrics
|
32
32
|
|
33
33
|
attr_reader :has_filter_with_time
|
34
34
|
|
35
35
|
def initialize
|
36
36
|
super
|
37
37
|
@has_filter_with_time = has_filter_with_time?
|
38
|
+
@emit_records_metrics = nil
|
39
|
+
@emit_size_metrics = nil
|
40
|
+
@counter_mutex = Mutex.new
|
41
|
+
@enable_size_metrics = false
|
42
|
+
end
|
43
|
+
|
44
|
+
def emit_records
|
45
|
+
@emit_records_metrics.get
|
46
|
+
end
|
47
|
+
|
48
|
+
def emit_size
|
49
|
+
@emit_size_metrics.get
|
50
|
+
end
|
51
|
+
|
52
|
+
def configure(conf)
|
53
|
+
super
|
54
|
+
|
55
|
+
@emit_records_metrics = metrics_create(namespace: "fluentd", subsystem: "filter", name: "emit_records", help_text: "Number of count emit records")
|
56
|
+
@emit_size_metrics = metrics_create(namespace: "fluentd", subsystem: "filter", name: "emit_size", help_text: "Total size of emit events")
|
57
|
+
@enable_size_metrics = !!system_config.enable_size_metrics
|
58
|
+
end
|
59
|
+
|
60
|
+
def statistics
|
61
|
+
stats = {
|
62
|
+
'emit_records' => @emit_records_metrics.get,
|
63
|
+
'emit_size' => @emit_size_metrics.get,
|
64
|
+
}
|
65
|
+
|
66
|
+
{ 'filter' => stats }
|
67
|
+
end
|
68
|
+
|
69
|
+
def measure_metrics(es)
|
70
|
+
@emit_records_metrics.add(es.size)
|
71
|
+
@emit_size_metrics.add(es.to_msgpack_stream.bytesize) if @enable_size_metrics
|
38
72
|
end
|
39
73
|
|
40
74
|
def filter(tag, time, record)
|
@@ -74,6 +74,8 @@ module Fluent::Plugin
|
|
74
74
|
config_param :blocking_timeout, :time, default: 0.5
|
75
75
|
desc 'Set a allow list of domains that can do CORS (Cross-Origin Resource Sharing)'
|
76
76
|
config_param :cors_allow_origins, :array, default: nil
|
77
|
+
desc 'Tells browsers whether to expose the response to frontend when the credentials mode is "include".'
|
78
|
+
config_param :cors_allow_credentials, :bool, default: false
|
77
79
|
desc 'Respond with empty gif image of 1x1 pixel.'
|
78
80
|
config_param :respond_with_empty_img, :bool, default: false
|
79
81
|
desc 'Respond status code with 204.'
|
@@ -112,6 +114,12 @@ module Fluent::Plugin
|
|
112
114
|
|
113
115
|
super
|
114
116
|
|
117
|
+
if @cors_allow_credentials
|
118
|
+
if @cors_allow_origins.nil? || @cors_allow_origins.include?('*')
|
119
|
+
raise Fluent::ConfigError, "Cannot enable cors_allow_credentials without specific origins"
|
120
|
+
end
|
121
|
+
end
|
122
|
+
|
115
123
|
m = if @parser_configs.first['@type'] == 'in_http'
|
116
124
|
@parser_msgpack = parser_create(usage: 'parser_in_http_msgpack', type: 'msgpack')
|
117
125
|
@parser_msgpack.time_key = nil
|
@@ -279,7 +287,10 @@ module Fluent::Plugin
|
|
279
287
|
private
|
280
288
|
|
281
289
|
def on_server_connect(conn)
|
282
|
-
handler = Handler.new(conn, @km, method(:on_request),
|
290
|
+
handler = Handler.new(conn, @km, method(:on_request),
|
291
|
+
@body_size_limit, @format_name, log,
|
292
|
+
@cors_allow_origins, @cors_allow_credentials,
|
293
|
+
@add_query_params)
|
283
294
|
|
284
295
|
conn.on(:data) do |data|
|
285
296
|
handler.on_read(data)
|
@@ -356,7 +367,8 @@ module Fluent::Plugin
|
|
356
367
|
class Handler
|
357
368
|
attr_reader :content_type
|
358
369
|
|
359
|
-
def initialize(io, km, callback, body_size_limit, format_name, log,
|
370
|
+
def initialize(io, km, callback, body_size_limit, format_name, log,
|
371
|
+
cors_allow_origins, cors_allow_credentials, add_query_params)
|
360
372
|
@io = io
|
361
373
|
@km = km
|
362
374
|
@callback = callback
|
@@ -365,6 +377,7 @@ module Fluent::Plugin
|
|
365
377
|
@format_name = format_name
|
366
378
|
@log = log
|
367
379
|
@cors_allow_origins = cors_allow_origins
|
380
|
+
@cors_allow_credentials = cors_allow_credentials
|
368
381
|
@idle = 0
|
369
382
|
@add_query_params = add_query_params
|
370
383
|
@km.add(self)
|
@@ -491,6 +504,9 @@ module Fluent::Plugin
|
|
491
504
|
send_response_and_close(RES_200_STATUS, header, "")
|
492
505
|
elsif include_cors_allow_origin
|
493
506
|
header["Access-Control-Allow-Origin"] = @origin
|
507
|
+
if @cors_allow_credentials
|
508
|
+
header["Access-Control-Allow-Credentials"] = true
|
509
|
+
end
|
494
510
|
send_response_and_close(RES_200_STATUS, header, "")
|
495
511
|
else
|
496
512
|
send_response_and_close(RES_403_STATUS, {}, "")
|
@@ -576,6 +592,9 @@ module Fluent::Plugin
|
|
576
592
|
header['Access-Control-Allow-Origin'] = '*'
|
577
593
|
elsif include_cors_allow_origin
|
578
594
|
header['Access-Control-Allow-Origin'] = @origin
|
595
|
+
if @cors_allow_credentials
|
596
|
+
header["Access-Control-Allow-Credentials"] = true
|
597
|
+
end
|
579
598
|
end
|
580
599
|
end
|
581
600
|
|
@@ -238,7 +238,7 @@ module Fluent::Plugin
|
|
238
238
|
'buffer_queue_length' => ->(){ throw(:skip) unless instance_variable_defined?(:@buffer) && !@buffer.nil? && @buffer.is_a?(::Fluent::Plugin::Buffer); @buffer.queue.size },
|
239
239
|
'buffer_timekeys' => ->(){ throw(:skip) unless instance_variable_defined?(:@buffer) && !@buffer.nil? && @buffer.is_a?(::Fluent::Plugin::Buffer); @buffer.timekeys },
|
240
240
|
'buffer_total_queued_size' => ->(){ throw(:skip) unless instance_variable_defined?(:@buffer) && !@buffer.nil? && @buffer.is_a?(::Fluent::Plugin::Buffer); @buffer.stage_size + @buffer.queue_size },
|
241
|
-
'retry_count' => ->(){
|
241
|
+
'retry_count' => ->(){ respond_to?(:num_errors) ? num_errors : nil },
|
242
242
|
}
|
243
243
|
|
244
244
|
def all_plugins
|
@@ -335,7 +335,9 @@ module Fluent::Plugin
|
|
335
335
|
}
|
336
336
|
|
337
337
|
if pe.respond_to?(:statistics)
|
338
|
-
obj.merge!(pe.statistics
|
338
|
+
obj.merge!(pe.statistics.dig('output') || {})
|
339
|
+
obj.merge!(pe.statistics.dig('filter') || {})
|
340
|
+
obj.merge!(pe.statistics.dig('input') || {})
|
339
341
|
end
|
340
342
|
|
341
343
|
obj['retry'] = get_retry_info(pe.retry) if opts[:with_retry] && pe.instance_variable_defined?(:@retry)
|
@@ -89,6 +89,8 @@ module Fluent::Plugin
|
|
89
89
|
config_param :source_hostname_key, :string, default: nil
|
90
90
|
desc 'Try to resolve hostname from IP addresses or not.'
|
91
91
|
config_param :resolve_hostname, :bool, default: nil
|
92
|
+
desc 'Check the remote connection is still available by sending a keepalive packet if this value is true.'
|
93
|
+
config_param :send_keepalive_packet, :bool, default: false
|
92
94
|
desc 'The field name of source address of sender.'
|
93
95
|
config_param :source_address_key, :string, default: nil
|
94
96
|
desc 'The field name of the severity.'
|
@@ -143,6 +145,11 @@ module Fluent::Plugin
|
|
143
145
|
end
|
144
146
|
|
145
147
|
@_event_loop_run_timeout = @blocking_timeout
|
148
|
+
|
149
|
+
protocol = @protocol_type || @transport_config.protocol
|
150
|
+
if @send_keepalive_packet && protocol == :udp
|
151
|
+
raise Fluent::ConfigError, "send_keepalive_packet is available for tcp/tls"
|
152
|
+
end
|
146
153
|
end
|
147
154
|
|
148
155
|
def multi_workers_ready?
|
@@ -173,7 +180,12 @@ module Fluent::Plugin
|
|
173
180
|
|
174
181
|
delimiter = octet_count_frame ? " " : @delimiter
|
175
182
|
delimiter_size = delimiter.size
|
176
|
-
server_create_connection(
|
183
|
+
server_create_connection(
|
184
|
+
tls ? :in_syslog_tls_server : :in_syslog_tcp_server, @port,
|
185
|
+
bind: @bind,
|
186
|
+
resolve_name: @resolve_hostname,
|
187
|
+
send_keepalive_packet: @send_keepalive_packet
|
188
|
+
) do |conn|
|
177
189
|
conn.data do |data|
|
178
190
|
buffer = conn.buffer
|
179
191
|
buffer << data
|
@@ -22,19 +22,18 @@ module Fluent::Plugin
|
|
22
22
|
UNWATCHED_POSITION = 0xffffffffffffffff
|
23
23
|
POSITION_FILE_ENTRY_REGEX = /^([^\t]+)\t([0-9a-fA-F]+)\t([0-9a-fA-F]+)/.freeze
|
24
24
|
|
25
|
-
def self.load(file, follow_inodes,
|
26
|
-
pf = new(file, follow_inodes,
|
27
|
-
pf.load
|
25
|
+
def self.load(file, follow_inodes, existing_targets, logger:)
|
26
|
+
pf = new(file, follow_inodes, logger: logger)
|
27
|
+
pf.load(existing_targets)
|
28
28
|
pf
|
29
29
|
end
|
30
30
|
|
31
|
-
def initialize(file, follow_inodes,
|
31
|
+
def initialize(file, follow_inodes, logger: nil)
|
32
32
|
@file = file
|
33
33
|
@logger = logger
|
34
34
|
@file_mutex = Mutex.new
|
35
35
|
@map = {}
|
36
36
|
@follow_inodes = follow_inodes
|
37
|
-
@existing_paths = existing_paths
|
38
37
|
end
|
39
38
|
|
40
39
|
def [](target_info)
|
@@ -60,8 +59,8 @@ module Fluent::Plugin
|
|
60
59
|
end
|
61
60
|
end
|
62
61
|
|
63
|
-
def load
|
64
|
-
compact
|
62
|
+
def load(existing_targets = nil)
|
63
|
+
compact(existing_targets)
|
65
64
|
|
66
65
|
map = {}
|
67
66
|
@file_mutex.synchronize do
|
@@ -118,9 +117,9 @@ module Fluent::Plugin
|
|
118
117
|
|
119
118
|
private
|
120
119
|
|
121
|
-
def compact
|
120
|
+
def compact(existing_targets = nil)
|
122
121
|
@file_mutex.synchronize do
|
123
|
-
entries = fetch_compacted_entries.values.map(&:to_entry_fmt)
|
122
|
+
entries = fetch_compacted_entries(existing_targets).values.map(&:to_entry_fmt)
|
124
123
|
|
125
124
|
@file.pos = 0
|
126
125
|
@file.truncate(0)
|
@@ -128,7 +127,7 @@ module Fluent::Plugin
|
|
128
127
|
end
|
129
128
|
end
|
130
129
|
|
131
|
-
def fetch_compacted_entries
|
130
|
+
def fetch_compacted_entries(existing_targets = nil)
|
132
131
|
entries = {}
|
133
132
|
|
134
133
|
@file.pos = 0
|
@@ -151,23 +150,26 @@ module Fluent::Plugin
|
|
151
150
|
end
|
152
151
|
|
153
152
|
if @follow_inodes
|
154
|
-
entries[ino] = Entry.new(path, pos, ino, file_pos + path.
|
153
|
+
entries[ino] = Entry.new(path, pos, ino, file_pos + path.bytesize + 1)
|
155
154
|
else
|
156
|
-
entries[path] = Entry.new(path, pos, ino, file_pos + path.
|
155
|
+
entries[path] = Entry.new(path, pos, ino, file_pos + path.bytesize + 1)
|
157
156
|
end
|
158
157
|
file_pos += line.size
|
159
158
|
end
|
160
159
|
end
|
161
160
|
|
162
|
-
entries = remove_deleted_files_entries(entries,
|
161
|
+
entries = remove_deleted_files_entries(entries, existing_targets)
|
163
162
|
entries
|
164
163
|
end
|
165
164
|
|
166
|
-
def remove_deleted_files_entries(existent_entries,
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
|
165
|
+
def remove_deleted_files_entries(existent_entries, existing_targets)
|
166
|
+
if existing_targets
|
167
|
+
existent_entries.select { |path_or_ino|
|
168
|
+
existing_targets.key?(path_or_ino)
|
169
|
+
}
|
170
|
+
else
|
171
|
+
existent_entries
|
172
|
+
end
|
171
173
|
end
|
172
174
|
end
|
173
175
|
|