fluentd 1.13.3-x64-mingw32 → 1.14.0-x64-mingw32

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of fluentd might be problematic. Click here for more details.

Files changed (47) hide show
  1. checksums.yaml +4 -4
  2. data/.github/workflows/windows-test.yaml +3 -3
  3. data/CHANGELOG.md +44 -0
  4. data/lib/fluent/command/fluentd.rb +8 -0
  5. data/lib/fluent/compat/output.rb +9 -6
  6. data/lib/fluent/event_router.rb +28 -1
  7. data/lib/fluent/plugin/bare_output.rb +49 -8
  8. data/lib/fluent/plugin/buffer.rb +84 -22
  9. data/lib/fluent/plugin/filter.rb +35 -1
  10. data/lib/fluent/plugin/in_http.rb +21 -2
  11. data/lib/fluent/plugin/in_monitor_agent.rb +4 -2
  12. data/lib/fluent/plugin/in_syslog.rb +13 -1
  13. data/lib/fluent/plugin/in_tail/position_file.rb +1 -1
  14. data/lib/fluent/plugin/in_tail.rb +4 -1
  15. data/lib/fluent/plugin/input.rb +39 -1
  16. data/lib/fluent/plugin/metrics.rb +119 -0
  17. data/lib/fluent/plugin/metrics_local.rb +96 -0
  18. data/lib/fluent/plugin/multi_output.rb +43 -6
  19. data/lib/fluent/plugin/output.rb +74 -33
  20. data/lib/fluent/plugin.rb +10 -1
  21. data/lib/fluent/plugin_helper/event_emitter.rb +8 -1
  22. data/lib/fluent/plugin_helper/metrics.rb +129 -0
  23. data/lib/fluent/plugin_helper/server.rb +4 -2
  24. data/lib/fluent/plugin_helper.rb +1 -0
  25. data/lib/fluent/root_agent.rb +6 -0
  26. data/lib/fluent/supervisor.rb +2 -0
  27. data/lib/fluent/system_config.rb +9 -1
  28. data/lib/fluent/version.rb +1 -1
  29. data/test/config/test_system_config.rb +6 -0
  30. data/test/plugin/in_tail/test_position_file.rb +26 -4
  31. data/test/plugin/test_bare_output.rb +13 -0
  32. data/test/plugin/test_buffer.rb +8 -2
  33. data/test/plugin/test_filter.rb +11 -0
  34. data/test/plugin/test_in_http.rb +40 -0
  35. data/test/plugin/test_in_monitor_agent.rb +214 -8
  36. data/test/plugin/test_in_syslog.rb +35 -0
  37. data/test/plugin/test_in_tail.rb +9 -26
  38. data/test/plugin/test_input.rb +11 -0
  39. data/test/plugin/test_metrics.rb +294 -0
  40. data/test/plugin/test_metrics_local.rb +96 -0
  41. data/test/plugin/test_multi_output.rb +25 -1
  42. data/test/plugin/test_output.rb +16 -0
  43. data/test/plugin_helper/test_event_emitter.rb +29 -0
  44. data/test/plugin_helper/test_metrics.rb +137 -0
  45. data/test/test_plugin_classes.rb +102 -0
  46. data/test/test_root_agent.rb +30 -1
  47. metadata +11 -2
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 504fb7ee1a236fd83f1562020ce5cdea6207c9b3af0ec2d7bfa1ea17f0214c03
4
- data.tar.gz: 5868dca822982fe4501beba7c2637f647334a8ff187b097aca2805be1c6eb16d
3
+ metadata.gz: baf1f05f1840de4f82caba28f3e388b5db65d80eb9b6f598d07079c69da0a238
4
+ data.tar.gz: e250af49dd38843eaaeda0f83349017bfc8c3cdbe776a465661953498ce343ef
5
5
  SHA512:
6
- metadata.gz: 36e1633f9e1c12fdd1c228ae11c747d335efe0c2ad4d12ab666f8674964b5d0bc2a28b233bd71f1101d2ff45e970a9893b33209ef22ebb82baf36eb0afb37621
7
- data.tar.gz: 817231dbb280c9fd18fe09eb8fc2912d6de761be28cd5394b8b96b03435f5555903caea124d2d1edd98c0ba8a4c8460f85e9057127ef2fee1a8147d6e680b071
6
+ metadata.gz: e11881ad301dbee0bb94ac3e13f5e53bf2e85edba0fbe30ac89a18a924ed0a0ca88d15a24cba9362c28ade855eca91cefd3036e60d3f5971a22f3baf739a5276
7
+ data.tar.gz: 5e67af6f5c4ab235b576487474b9989b59c577303621a0622013145eb914e9dc6b0901f17e8877fd18294d0d99c8745e7430fd5acb9614e3ec6a469b6d9a717a
@@ -18,7 +18,7 @@ jobs:
18
18
  - windows-latest
19
19
  experimental: [false]
20
20
  include:
21
- - ruby-version: '3.0.1'
21
+ - ruby-version: '3.0.2'
22
22
  os: windows-latest
23
23
  experimental: true
24
24
  # On Ruby 3.0, we need to use fiddle 1.0.8 or later to retrieve correct
@@ -28,7 +28,7 @@ jobs:
28
28
  # * https://github.com/ruby/fiddle/issues/72
29
29
  # * https://bugs.ruby-lang.org/issues/17813
30
30
  # * https://github.com/oneclick/rubyinstaller2/blob/8225034c22152d8195bc0aabc42a956c79d6c712/lib/ruby_installer/build/dll_directory.rb
31
- ruby-lib-opt: RUBYLIB=%RUNNER_TOOL_CACHE%/Ruby/3.0.1/x64/lib/ruby/gems/3.0.0/gems/fiddle-1.0.8/lib
31
+ ruby-lib-opt: RUBYLIB=%RUNNER_TOOL_CACHE%/Ruby/3.0.2/x64/lib/ruby/gems/3.0.0/gems/fiddle-1.0.8/lib
32
32
 
33
33
  name: Unit testing with Ruby ${{ matrix.ruby-version }} on ${{ matrix.os }}
34
34
  steps:
@@ -38,7 +38,7 @@ jobs:
38
38
  with:
39
39
  ruby-version: ${{ matrix.ruby-version }}
40
40
  - name: Add Fiddle 1.0.8
41
- if: ${{ matrix.ruby-version == '3.0.1' }}
41
+ if: ${{ matrix.ruby-version == '3.0.2' }}
42
42
  run: gem install fiddle --version 1.0.8
43
43
  - name: Install dependencies
44
44
  run: ridk exec bundle install
data/CHANGELOG.md CHANGED
@@ -1,3 +1,47 @@
1
+ # v1.14.0
2
+
3
+ ## Release v1.14.0 - 2021/08/30
4
+
5
+ ### Enhancement
6
+
7
+ * Added `enable_input_metrics`, `enable_size_metrics` system
8
+ configuration parameter
9
+ This feature might need to pay higher CPU cost, so input event metrics
10
+ features are disabled by default. These features are also enabled by
11
+ `--enable-input-metrics`,`--enable-size-metrics` command line
12
+ option.
13
+ https://github.com/fluent/fluentd/pull/3440
14
+ * Added reserved word `@ROOT` for getting root router.
15
+ This is incompatible change. Do not use `@ROOT` for label name.
16
+ https://github.com/fluent/fluentd/pull/3358
17
+ * in_syslog: Added `send_keepalive_packet` option
18
+ https://github.com/fluent/fluentd/pull/3474
19
+ * in_http: Added `cors_allow_credentials` option.
20
+ This option tells browsers whether to expose the response to
21
+ frontend when the credentials mode is "include".
22
+ https://github.com/fluent/fluentd/pull/3481
23
+ https://github.com/fluent/fluentd/pull/3491
24
+
25
+ ### Bug fix
26
+
27
+ * in_tail: Fixed a bug that deleted paths are not removed
28
+ from pos file by file compaction at start up
29
+ https://github.com/fluent/fluentd/pull/3467
30
+ * in_tail: Revived a warning message of retrying unaccessible file
31
+ https://github.com/fluent/fluentd/pull/3478
32
+ * TLSServer: Fixed a crash bug on logging peer host name errors
33
+ https://github.com/fluent/fluentd/pull/3483
34
+
35
+ ### Misc
36
+
37
+ * Added metrics plugin mechanism
38
+ The implementations is changed to use metrics plugin.
39
+ In the future, 3rd party plugin will be able to handle these metrics.
40
+ https://github.com/fluent/fluentd/pull/3471
41
+ https://github.com/fluent/fluentd/pull/3473
42
+ https://github.com/fluent/fluentd/pull/3479
43
+ https://github.com/fluent/fluentd/pull/3484
44
+
1
45
  # v1.13.3
2
46
 
3
47
  ## Release v1.13.3 - 2021/07/27
@@ -134,6 +134,14 @@ op.on('--strict-config-value', "Parse config values strictly", TrueClass) {|b|
134
134
  opts[:strict_config_value] = b
135
135
  }
136
136
 
137
+ op.on('--enable-input-metrics', "Enable input plugin metrics on fluentd", TrueClass) {|b|
138
+ opts[:enable_input_metrics] = b
139
+ }
140
+
141
+ op.on('--enable-size-metrics', "Enable plugin record size metrics on fluentd", TrueClass) {|b|
142
+ opts[:enable_size_metrics] = b
143
+ }
144
+
137
145
  op.on('-v', '--verbose', "increase verbose level (-v: debug, -vv: trace)", TrueClass) {|b|
138
146
  if b
139
147
  opts[:log_level] = [opts[:log_level] - 1, Fluent::Log::LEVEL_TRACE].max
@@ -310,7 +310,7 @@ module Fluent
310
310
  # original implementation of v0.12 BufferedOutput
311
311
  def emit(tag, es, chain, key="")
312
312
  # this method will not be used except for the case that plugin calls super
313
- @emit_count += 1
313
+ @emit_count_metrics.inc
314
314
  data = format_stream(tag, es)
315
315
  if @buffer.emit(key, data, chain)
316
316
  submit_flush
@@ -337,14 +337,14 @@ module Fluent
337
337
  # because v0.12 BufferedOutput may overrides #format_stream, but original #handle_stream_simple method doesn't consider about it
338
338
  def handle_stream_simple(tag, es, enqueue: false)
339
339
  if @overrides_emit
340
- current_emit_count = @emit_count
340
+ current_emit_count = @emit_count_metrics.get
341
341
  size = es.size
342
342
  key = data = nil
343
343
  begin
344
344
  emit(tag, es, NULL_OUTPUT_CHAIN)
345
345
  key, data = self.last_emit_via_buffer
346
346
  ensure
347
- @emit_count = current_emit_count
347
+ @emit_count_metrics.set(current_emit_count)
348
348
  self.last_emit_via_buffer = nil
349
349
  end
350
350
  # on-the-fly key assignment can be done, and it's not configurable if Plugin#emit does it dynamically
@@ -352,7 +352,8 @@ module Fluent
352
352
  write_guard do
353
353
  @buffer.write({meta => data}, format: ->(_data){ _data }, size: ->(){ size }, enqueue: enqueue)
354
354
  end
355
- @counter_mutex.synchronize{ @emit_records += size }
355
+ @emit_records_metrics.add(es.size)
356
+ @emit_size_metrics.add(es.to_msgpack_stream.bytesize) if @enable_size_metrics
356
357
  return [meta]
357
358
  end
358
359
 
@@ -363,7 +364,8 @@ module Fluent
363
364
  write_guard do
364
365
  @buffer.write({meta => bulk}, format: ->(_data){ _data }, size: ->(){ size }, enqueue: enqueue)
365
366
  end
366
- @counter_mutex.synchronize{ @emit_records += size }
367
+ @emit_records_metrics.add(es.size)
368
+ @emit_size_metrics.add(es.to_msgpack_stream.bytesize) if @enable_size_metrics
367
369
  return [meta]
368
370
  end
369
371
 
@@ -373,7 +375,8 @@ module Fluent
373
375
  write_guard do
374
376
  @buffer.write({meta => data}, enqueue: enqueue)
375
377
  end
376
- @counter_mutex.synchronize{ @emit_records += size }
378
+ @emit_records_metrics.add(es.size)
379
+ @emit_size_metrics.add(es.to_msgpack_stream.bytesize) if @enable_size_metrics
377
380
  [meta]
378
381
  end
379
382
 
@@ -47,6 +47,8 @@ module Fluent
47
47
  @match_cache = MatchCache.new
48
48
  @default_collector = default_collector
49
49
  @emit_error_handler = emit_error_handler
50
+ @metric_callbacks = {}
51
+ @caller_plugin_id = nil
50
52
  end
51
53
 
52
54
  attr_accessor :default_collector
@@ -83,6 +85,22 @@ module Fluent
83
85
  @match_rules << Rule.new(pattern, collector)
84
86
  end
85
87
 
88
+ def add_metric_callbacks(caller_plugin_id, callback)
89
+ @metric_callbacks[caller_plugin_id] = callback
90
+ end
91
+
92
+ def caller_plugin_id=(caller_plugin_id)
93
+ @caller_plugin_id = caller_plugin_id
94
+ end
95
+
96
+ def find_callback
97
+ if @caller_plugin_id
98
+ @metric_callbacks[@caller_plugin_id]
99
+ else
100
+ nil
101
+ end
102
+ end
103
+
86
104
  def emit(tag, time, record)
87
105
  unless record.nil?
88
106
  emit_stream(tag, OneEventStream.new(time, record))
@@ -95,6 +113,9 @@ module Fluent
95
113
 
96
114
  def emit_stream(tag, es)
97
115
  match(tag).emit_events(tag, es)
116
+ if callback = find_callback
117
+ callback.call(es)
118
+ end
98
119
  rescue => e
99
120
  @emit_error_handler.handle_emits_error(tag, es, e)
100
121
  end
@@ -175,7 +196,11 @@ module Fluent
175
196
  if optimizable?
176
197
  optimized_filter_stream(tag, es)
177
198
  else
178
- @filters.reduce(es) { |acc, filter| filter.filter_stream(tag, acc) }
199
+ @filters.reduce(es) { |acc, filter|
200
+ filtered_es = filter.filter_stream(tag, acc)
201
+ filter.measure_metrics(filtered_es)
202
+ filtered_es
203
+ }
179
204
  end
180
205
  end
181
206
 
@@ -193,6 +218,7 @@ module Fluent
193
218
  begin
194
219
  filtered_time, filtered_record = filter.filter_with_time(tag, filtered_time, filtered_record)
195
220
  throw :break_loop unless filtered_record && filtered_time
221
+ filter.measure_metrics(OneEventStream.new(time, record))
196
222
  rescue => e
197
223
  filter.router.emit_error_event(tag, filtered_time, filtered_record, e)
198
224
  end
@@ -200,6 +226,7 @@ module Fluent
200
226
  begin
201
227
  filtered_record = filter.filter(tag, filtered_time, filtered_record)
202
228
  throw :break_loop unless filtered_record
229
+ filter.measure_metrics(OneEventStream.new(time, record))
203
230
  rescue => e
204
231
  filter.router.emit_error_event(tag, filtered_time, filtered_record, e)
205
232
  end
@@ -23,37 +23,78 @@ require 'fluent/plugin_helper'
23
23
  module Fluent
24
24
  module Plugin
25
25
  class BareOutput < Base
26
+ include PluginHelper::Mixin # for metrics
27
+
26
28
  # DO NOT USE THIS plugin for normal output plugin. Use Output instead.
27
29
  # This output plugin base class is only for meta-output plugins
28
30
  # which cannot be implemented on MultiOutput.
29
31
  # E.g,: forest, config-expander
30
32
 
33
+ helpers_internal :metrics
34
+
31
35
  include PluginId
32
36
  include PluginLoggerMixin
33
37
  include PluginHelper::Mixin
34
38
 
35
- attr_reader :num_errors, :emit_count, :emit_records
36
-
37
39
  def process(tag, es)
38
40
  raise NotImplementedError, "BUG: output plugins MUST implement this method"
39
41
  end
40
42
 
43
+ def num_errors
44
+ @num_errors_metrics.get
45
+ end
46
+
47
+ def emit_count
48
+ @emit_count_metrics.get
49
+ end
50
+
51
+ def emit_size
52
+ @emit_size_metrics.get
53
+ end
54
+
55
+ def emit_records
56
+ @emit_records_metrics.get
57
+ end
58
+
41
59
  def initialize
42
60
  super
43
61
  @counter_mutex = Mutex.new
44
62
  # TODO: well organized counters
45
- @num_errors = 0
46
- @emit_count = 0
47
- @emit_records = 0
63
+ @num_errors_metrics = nil
64
+ @emit_count_metrics = nil
65
+ @emit_records_metrics = nil
66
+ @emit_size_metrics = nil
67
+ end
68
+
69
+ def configure(conf)
70
+ super
71
+
72
+ @num_errors_metrics = metrics_create(namespace: "fluentd", subsystem: "bare_output", name: "num_errors", help_text: "Number of count num errors")
73
+ @emit_count_metrics = metrics_create(namespace: "fluentd", subsystem: "bare_output", name: "emit_records", help_text: "Number of count emits")
74
+ @emit_records_metrics = metrics_create(namespace: "fluentd", subsystem: "bare_output", name: "emit_records", help_text: "Number of emit records")
75
+ @emit_size_metrics = metrics_create(namespace: "fluentd", subsystem: "bare_output", name: "emit_size", help_text: "Total size of emit events")
76
+ @enable_size_metrics = !!system_config.enable_size_metrics
77
+ end
78
+
79
+ def statistics
80
+ stats = {
81
+ 'num_errors' => @num_errors_metrics.get,
82
+ 'emit_records' => @emit_records_metrics.get,
83
+ 'emit_count' => @emit_count_metrics.get,
84
+ 'emit_size' => @emit_size_metrics.get,
85
+ }
86
+
87
+ { 'bare_output' => stats }
48
88
  end
49
89
 
50
90
  def emit_sync(tag, es)
51
- @counter_mutex.synchronize{ @emit_count += 1 }
91
+ @emit_count_metrics.inc
52
92
  begin
53
93
  process(tag, es)
54
- @counter_mutex.synchronize{ @emit_records += es.size }
94
+ @emit_records_metrics.add(es.size)
95
+ @emit_size_metrics.add(es.to_msgpack_stream.bytesize) if @enable_size_metrics
55
96
  rescue
56
- @counter_mutex.synchronize{ @num_errors += 1 }
97
+ @num_errors_metrics.inc
57
98
  raise
58
99
  end
59
100
  end
@@ -16,6 +16,8 @@
16
16
 
17
17
  require 'fluent/plugin/base'
18
18
  require 'fluent/plugin/owned_by_mixin'
19
+ require 'fluent/plugin_id'
20
+ require 'fluent/plugin_helper'
19
21
  require 'fluent/unique_id'
20
22
  require 'fluent/ext_monitor_require'
21
23
 
@@ -24,7 +26,9 @@ module Fluent
24
26
  class Buffer < Base
25
27
  include OwnedByMixin
26
28
  include UniqueId::Mixin
29
+ include PluginId
27
30
  include MonitorMixin
31
+ include PluginHelper::Mixin # for metrics
28
32
 
29
33
  class BufferError < StandardError; end
30
34
  class BufferOverflowError < BufferError; end
@@ -39,6 +43,8 @@ module Fluent
39
43
 
40
44
  configured_in :buffer
41
45
 
46
+ helpers_internal :metrics
47
+
42
48
  # TODO: system total buffer limit size in bytes by SystemConfig
43
49
 
44
50
  config_param :chunk_limit_size, :size, default: DEFAULT_CHUNK_LIMIT_SIZE
@@ -153,8 +159,11 @@ module Fluent
153
159
  end
154
160
  end
155
161
 
162
+ # for metrics
163
+ attr_reader :stage_size_metrics, :stage_length_metrics, :queue_size_metrics, :queue_length_metrics
164
+ attr_reader :available_buffer_space_ratios_metrics, :total_queued_size_metrics
165
+ attr_reader :newest_timekey_metrics, :oldest_timekey_metrics
156
166
  # for tests
157
- attr_accessor :stage_size, :queue_size
158
167
  attr_reader :stage, :queue, :dequeued, :queued_num
159
168
 
160
169
  def initialize
@@ -171,12 +180,35 @@ module Fluent
171
180
  @queued_num = {} # metadata => int (number of queued chunks)
172
181
  @dequeued_num = {} # metadata => int (number of dequeued chunks)
173
182
 
174
- @stage_size = @queue_size = 0
183
+ @stage_length_metrics = nil
184
+ @stage_size_metrics = nil
185
+ @queue_length_metrics = nil
186
+ @queue_size_metrics = nil
187
+ @available_buffer_space_ratios_metrics = nil
188
+ @total_queued_size_metrics = nil
189
+ @newest_timekey_metrics = nil
190
+ @oldest_timekey_metrics = nil
175
191
  @timekeys = Hash.new(0)
176
192
  @enable_update_timekeys = false
177
193
  @mutex = Mutex.new
178
194
  end
179
195
 
196
+ def stage_size
197
+ @stage_size_metrics.get
198
+ end
199
+
200
+ def stage_size=(value)
201
+ @stage_size_metrics.set(value)
202
+ end
203
+
204
+ def queue_size
205
+ @queue_size_metrics.get
206
+ end
207
+
208
+ def queue_size=(value)
209
+ @queue_size_metrics.set(value)
210
+ end
211
+
180
212
  def persistent?
181
213
  false
182
214
  end
@@ -187,6 +219,28 @@ module Fluent
187
219
  unless @queue_limit_length.nil?
188
220
  @total_limit_size = @chunk_limit_size * @queue_limit_length
189
221
  end
222
+ @stage_length_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "stage_length",
223
+ help_text: 'Length of stage buffers', prefer_gauge: true)
224
+ @stage_length_metrics.set(0)
225
+ @stage_size_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "stage_byte_size",
226
+ help_text: 'Total size of stage buffers', prefer_gauge: true)
227
+ @stage_size_metrics.set(0) # Ensure zero.
228
+ @queue_length_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "queue_length",
229
+ help_text: 'Length of queue buffers', prefer_gauge: true)
230
+ @queue_length_metrics.set(0)
231
+ @queue_size_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "queue_byte_size",
232
+ help_text: 'Total size of queue buffers', prefer_gauge: true)
233
+ @queue_size_metrics.set(0) # Ensure zero.
234
+ @available_buffer_space_ratios_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "available_buffer_space_ratios",
235
+ help_text: 'Ratio of available space in buffer', prefer_gauge: true)
236
+ @available_buffer_space_ratios_metrics.set(100) # Default is 100%.
237
+ @total_queued_size_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "total_queued_size",
238
+ help_text: 'Total size of stage and queue buffers', prefer_gauge: true)
239
+ @total_queued_size_metrics.set(0)
240
+ @newest_timekey_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "newest_timekey",
241
+ help_text: 'Newest timekey in buffer', prefer_gauge: true)
242
+ @oldest_timekey_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "oldest_timekey",
243
+ help_text: 'Oldest timekey in buffer', prefer_gauge: true)
190
244
  end
191
245
 
192
246
  def enable_update_timekeys
@@ -198,15 +252,15 @@ module Fluent
198
252
 
199
253
  @stage, @queue = resume
200
254
  @stage.each_pair do |metadata, chunk|
201
- @stage_size += chunk.bytesize
255
+ @stage_size_metrics.add(chunk.bytesize)
202
256
  end
203
257
  @queue.each do |chunk|
204
258
  @queued_num[chunk.metadata] ||= 0
205
259
  @queued_num[chunk.metadata] += 1
206
- @queue_size += chunk.bytesize
260
+ @queue_size_metrics.add(chunk.bytesize)
207
261
  end
208
262
  update_timekeys
209
- log.debug "buffer started", instance: self.object_id, stage_size: @stage_size, queue_size: @queue_size
263
+ log.debug "buffer started", instance: self.object_id, stage_size: @stage_size_metrics.get, queue_size: @queue_size_metrics.get
210
264
  end
211
265
 
212
266
  def close
@@ -228,17 +282,19 @@ module Fluent
228
282
  def terminate
229
283
  super
230
284
  @dequeued = @stage = @queue = @queued_num = nil
231
- @stage_size = @queue_size = 0
285
+ @stage_length_metrics = @stage_size_metrics = @queue_length_metrics = @queue_size_metrics = nil
286
+ @available_buffer_space_ratios_metrics = @total_queued_size_metrics = nil
287
+ @newest_timekey_metrics = @oldest_timekey_metrics = nil
232
288
  @timekeys.clear
233
289
  end
234
290
 
235
291
  def storable?
236
- @total_limit_size > @stage_size + @queue_size
292
+ @total_limit_size > @stage_size_metrics.get + @queue_size_metrics.get
237
293
  end
238
294
 
239
295
  ## TODO: for back pressure feature
240
296
  # def used?(ratio)
241
- # @total_limit_size * ratio > @stage_size + @queue_size
297
+ # @total_limit_size * ratio > @stage_size_metrics.get + @queue_size_metrics.get
242
298
  # end
243
299
 
244
300
  def resume
@@ -344,7 +400,7 @@ module Fluent
344
400
  #
345
401
  staged_bytesizes_by_chunk.each do |chunk, bytesize|
346
402
  chunk.synchronize do
347
- synchronize { @stage_size += bytesize }
403
+ synchronize { @stage_size_metrics.add(bytesize) }
348
404
  log.on_trace { log.trace { "chunk #{chunk.path} size_added: #{bytesize} new_size: #{chunk.bytesize}" } }
349
405
  end
350
406
  end
@@ -361,7 +417,7 @@ module Fluent
361
417
  u.metadata.seq = 0
362
418
  synchronize {
363
419
  @stage[m] = u.staged!
364
- @stage_size += u.bytesize
420
+ @stage_size_metrics.add(u.bytesize)
365
421
  }
366
422
  end
367
423
  end
@@ -428,8 +484,8 @@ module Fluent
428
484
  chunk.enqueued!
429
485
  end
430
486
  bytesize = chunk.bytesize
431
- @stage_size -= bytesize
432
- @queue_size += bytesize
487
+ @stage_size_metrics.sub(bytesize)
488
+ @queue_size_metrics.add(bytesize)
433
489
  end
434
490
  end
435
491
  nil
@@ -446,7 +502,7 @@ module Fluent
446
502
  @queued_num[metadata] = @queued_num.fetch(metadata, 0) + 1
447
503
  chunk.enqueued!
448
504
  end
449
- @queue_size += chunk.bytesize
505
+ @queue_size_metrics.add(chunk.bytesize)
450
506
  end
451
507
  end
452
508
 
@@ -531,7 +587,7 @@ module Fluent
531
587
  begin
532
588
  bytesize = chunk.bytesize
533
589
  chunk.purge
534
- @queue_size -= bytesize
590
+ @queue_size_metrics.sub(bytesize)
535
591
  rescue => e
536
592
  log.error "failed to purge buffer chunk", chunk_id: dump_unique_id_hex(chunk_id), error_class: e.class, error: e
537
593
  log.error_backtrace
@@ -562,7 +618,7 @@ module Fluent
562
618
  log.error_backtrace
563
619
  end
564
620
  end
565
- @queue_size = 0
621
+ @queue_size_metrics.set(0)
566
622
  end
567
623
  end
568
624
 
@@ -765,23 +821,29 @@ module Fluent
765
821
  ]
766
822
 
767
823
  def statistics
768
- stage_size, queue_size = @stage_size, @queue_size
824
+ stage_size, queue_size = @stage_size_metrics.get, @queue_size_metrics.get
769
825
  buffer_space = 1.0 - ((stage_size + queue_size * 1.0) / @total_limit_size)
826
+ @stage_length_metrics.set(@stage.size)
827
+ @queue_length_metrics.set(@queue.size)
828
+ @available_buffer_space_ratios_metrics.set(buffer_space * 100)
829
+ @total_queued_size_metrics.set(stage_size + queue_size)
770
830
  stats = {
771
- 'stage_length' => @stage.size,
831
+ 'stage_length' => @stage_length_metrics.get,
772
832
  'stage_byte_size' => stage_size,
773
- 'queue_length' => @queue.size,
833
+ 'queue_length' => @queue_length_metrics.get,
774
834
  'queue_byte_size' => queue_size,
775
- 'available_buffer_space_ratios' => (buffer_space * 100).round(1),
776
- 'total_queued_size' => stage_size + queue_size,
835
+ 'available_buffer_space_ratios' => @available_buffer_space_ratios_metrics.get.round(1),
836
+ 'total_queued_size' => @total_queued_size_metrics.get,
777
837
  }
778
838
 
779
839
  tkeys = timekeys
780
840
  if (m = tkeys.min)
781
- stats['oldest_timekey'] = m
841
+ @oldest_timekey_metrics.set(m)
842
+ stats['oldest_timekey'] = @oldest_timekey_metrics.get
782
843
  end
783
844
  if (m = tkeys.max)
784
- stats['newest_timekey'] = m
845
+ @newest_timekey_metrics.set(m)
846
+ stats['newest_timekey'] = @newest_timekey_metrics.get
785
847
  end
786
848
 
787
849
  { 'buffer' => stats }