fluentd 1.13.1 → 1.14.0

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of fluentd might be problematic. Click here for more details.

Files changed (72) hide show
  1. checksums.yaml +4 -4
  2. data/.github/ISSUE_TEMPLATE/bug_report.yaml +69 -0
  3. data/.github/ISSUE_TEMPLATE/feature_request.yaml +38 -0
  4. data/.github/workflows/windows-test.yaml +3 -3
  5. data/CHANGELOG.md +105 -0
  6. data/README.md +2 -2
  7. data/example/v0_12_filter.conf +2 -2
  8. data/fluentd.gemspec +1 -1
  9. data/lib/fluent/command/fluentd.rb +8 -0
  10. data/lib/fluent/command/plugin_generator.rb +15 -5
  11. data/lib/fluent/compat/output.rb +9 -6
  12. data/lib/fluent/config/types.rb +15 -0
  13. data/lib/fluent/config/v1_parser.rb +3 -2
  14. data/lib/fluent/config.rb +1 -1
  15. data/lib/fluent/env.rb +2 -1
  16. data/lib/fluent/event_router.rb +28 -1
  17. data/lib/fluent/oj_options.rb +62 -0
  18. data/lib/fluent/plugin/bare_output.rb +49 -8
  19. data/lib/fluent/plugin/buffer.rb +84 -22
  20. data/lib/fluent/plugin/file_wrapper.rb +22 -0
  21. data/lib/fluent/plugin/filter.rb +35 -1
  22. data/lib/fluent/plugin/formatter.rb +1 -0
  23. data/lib/fluent/plugin/formatter_json.rb +9 -7
  24. data/lib/fluent/plugin/in_http.rb +21 -2
  25. data/lib/fluent/plugin/in_monitor_agent.rb +4 -2
  26. data/lib/fluent/plugin/in_syslog.rb +13 -1
  27. data/lib/fluent/plugin/in_tail/position_file.rb +20 -18
  28. data/lib/fluent/plugin/in_tail.rb +45 -3
  29. data/lib/fluent/plugin/input.rb +39 -1
  30. data/lib/fluent/plugin/metrics.rb +119 -0
  31. data/lib/fluent/plugin/metrics_local.rb +96 -0
  32. data/lib/fluent/plugin/multi_output.rb +43 -6
  33. data/lib/fluent/plugin/output.rb +74 -33
  34. data/lib/fluent/plugin/parser_json.rb +2 -3
  35. data/lib/fluent/plugin.rb +10 -1
  36. data/lib/fluent/plugin_helper/event_emitter.rb +8 -1
  37. data/lib/fluent/plugin_helper/metrics.rb +129 -0
  38. data/lib/fluent/plugin_helper/server.rb +4 -2
  39. data/lib/fluent/plugin_helper.rb +1 -0
  40. data/lib/fluent/root_agent.rb +6 -0
  41. data/lib/fluent/supervisor.rb +2 -0
  42. data/lib/fluent/system_config.rb +9 -1
  43. data/lib/fluent/test/driver/storage.rb +30 -0
  44. data/lib/fluent/version.rb +1 -1
  45. data/templates/new_gem/lib/fluent/plugin/storage.rb.erb +40 -0
  46. data/templates/new_gem/test/plugin/test_storage.rb.erb +18 -0
  47. data/test/command/test_plugin_generator.rb +2 -1
  48. data/test/config/test_system_config.rb +6 -0
  49. data/test/config/test_types.rb +7 -0
  50. data/test/plugin/in_tail/test_position_file.rb +48 -8
  51. data/test/plugin/test_bare_output.rb +13 -0
  52. data/test/plugin/test_buffer.rb +8 -2
  53. data/test/plugin/test_file_wrapper.rb +11 -0
  54. data/test/plugin/test_filter.rb +11 -0
  55. data/test/plugin/test_in_http.rb +40 -0
  56. data/test/plugin/test_in_monitor_agent.rb +214 -8
  57. data/test/plugin/test_in_syslog.rb +35 -0
  58. data/test/plugin/test_in_tail.rb +138 -26
  59. data/test/plugin/test_input.rb +11 -0
  60. data/test/plugin/test_metrics.rb +294 -0
  61. data/test/plugin/test_metrics_local.rb +96 -0
  62. data/test/plugin/test_multi_output.rb +25 -1
  63. data/test/plugin/test_output.rb +16 -0
  64. data/test/plugin_helper/test_event_emitter.rb +29 -0
  65. data/test/plugin_helper/test_metrics.rb +137 -0
  66. data/test/test_event_time.rb +2 -2
  67. data/test/test_oj_options.rb +55 -0
  68. data/test/test_plugin_classes.rb +102 -0
  69. data/test/test_root_agent.rb +30 -1
  70. metadata +21 -6
  71. data/.github/ISSUE_TEMPLATE/bug_report.md +0 -40
  72. data/.github/ISSUE_TEMPLATE/feature_request.md +0 -23
data/lib/fluent/config.rb CHANGED
@@ -62,7 +62,7 @@ module Fluent
62
62
  Parser.parse(str, fname, basepath)
63
63
  when :ruby
64
64
  require 'fluent/config/dsl'
65
- $log.warn("Ruby DSL configuration format is deprecated. Please use original configuration format. https://docs.fluentd.org/configuration/config-file")
65
+ $log.warn("Ruby DSL configuration format is deprecated. Please use original configuration format. https://docs.fluentd.org/configuration/config-file") if $log
66
66
  Config::DSL::Parser.parse(str, File.join(basepath, fname))
67
67
  else
68
68
  raise "[BUG] unknown configuration parser specification:'#{parser}'"
data/lib/fluent/env.rb CHANGED
@@ -15,13 +15,14 @@
15
15
  #
16
16
 
17
17
  require 'serverengine/utils'
18
+ require 'fluent/oj_options'
18
19
 
19
20
  module Fluent
20
21
  DEFAULT_CONFIG_PATH = ENV['FLUENT_CONF'] || '/etc/fluent/fluent.conf'
21
22
  DEFAULT_PLUGIN_DIR = ENV['FLUENT_PLUGIN'] || '/etc/fluent/plugin'
22
23
  DEFAULT_SOCKET_PATH = ENV['FLUENT_SOCKET'] || '/var/run/fluent/fluent.sock'
23
24
  DEFAULT_BACKUP_DIR = ENV['FLUENT_BACKUP_DIR'] || '/tmp/fluent'
24
- DEFAULT_OJ_OPTIONS = {bigdecimal_load: :float, mode: :compat, use_to_json: true}
25
+ DEFAULT_OJ_OPTIONS = Fluent::OjOptions.load_env
25
26
  DEFAULT_DIR_PERMISSION = 0755
26
27
  DEFAULT_FILE_PERMISSION = 0644
27
28
 
@@ -47,6 +47,8 @@ module Fluent
47
47
  @match_cache = MatchCache.new
48
48
  @default_collector = default_collector
49
49
  @emit_error_handler = emit_error_handler
50
+ @metric_callbacks = {}
51
+ @caller_plugin_id = nil
50
52
  end
51
53
 
52
54
  attr_accessor :default_collector
@@ -83,6 +85,22 @@ module Fluent
83
85
  @match_rules << Rule.new(pattern, collector)
84
86
  end
85
87
 
88
+ def add_metric_callbacks(caller_plugin_id, callback)
89
+ @metric_callbacks[caller_plugin_id] = callback
90
+ end
91
+
92
+ def caller_plugin_id=(caller_plugin_id)
93
+ @caller_plugin_id = caller_plugin_id
94
+ end
95
+
96
+ def find_callback
97
+ if @caller_plugin_id
98
+ @metric_callbacks[@caller_plugin_id]
99
+ else
100
+ nil
101
+ end
102
+ end
103
+
86
104
  def emit(tag, time, record)
87
105
  unless record.nil?
88
106
  emit_stream(tag, OneEventStream.new(time, record))
@@ -95,6 +113,9 @@ module Fluent
95
113
 
96
114
  def emit_stream(tag, es)
97
115
  match(tag).emit_events(tag, es)
116
+ if callback = find_callback
117
+ callback.call(es)
118
+ end
98
119
  rescue => e
99
120
  @emit_error_handler.handle_emits_error(tag, es, e)
100
121
  end
@@ -175,7 +196,11 @@ module Fluent
175
196
  if optimizable?
176
197
  optimized_filter_stream(tag, es)
177
198
  else
178
- @filters.reduce(es) { |acc, filter| filter.filter_stream(tag, acc) }
199
+ @filters.reduce(es) { |acc, filter|
200
+ filtered_es = filter.filter_stream(tag, acc)
201
+ filter.measure_metrics(filtered_es)
202
+ filtered_es
203
+ }
179
204
  end
180
205
  end
181
206
 
@@ -193,6 +218,7 @@ module Fluent
193
218
  begin
194
219
  filtered_time, filtered_record = filter.filter_with_time(tag, filtered_time, filtered_record)
195
220
  throw :break_loop unless filtered_record && filtered_time
221
+ filter.measure_metrics(OneEventStream.new(time, record))
196
222
  rescue => e
197
223
  filter.router.emit_error_event(tag, filtered_time, filtered_record, e)
198
224
  end
@@ -200,6 +226,7 @@ module Fluent
200
226
  begin
201
227
  filtered_record = filter.filter(tag, filtered_time, filtered_record)
202
228
  throw :break_loop unless filtered_record
229
+ filter.measure_metrics(OneEventStream.new(time, record))
203
230
  rescue => e
204
231
  filter.router.emit_error_event(tag, filtered_time, filtered_record, e)
205
232
  end
@@ -0,0 +1,62 @@
1
+ require 'fluent/config/types'
2
+
3
+ module Fluent
4
+ class OjOptions
5
+ OPTIONS = {
6
+ 'bigdecimal_load': :symbol,
7
+ 'max_nesting': :integer,
8
+ 'mode': :symbol,
9
+ 'use_to_json': :bool
10
+ }
11
+
12
+ ALLOWED_VALUES = {
13
+ 'bigdecimal_load': %i[bigdecimal float auto],
14
+ 'mode': %i[strict null compat json rails object custom]
15
+ }
16
+
17
+ DEFAULTS = {
18
+ 'bigdecimal_load': :float,
19
+ 'mode': :compat,
20
+ 'use_to_json': true
21
+ }
22
+
23
+ @@available = false
24
+
25
+ def self.available?
26
+ @@available
27
+ end
28
+
29
+ def self.load_env
30
+ options = self.get_options
31
+ begin
32
+ require 'oj'
33
+ Oj.default_options = options
34
+ @@available = true
35
+ rescue LoadError
36
+ @@available = false
37
+ end
38
+ options
39
+ end
40
+
41
+ private
42
+
43
+ def self.get_options
44
+ options = {}
45
+ DEFAULTS.each { |key, value| options[key] = value }
46
+
47
+ OPTIONS.each do |key, type|
48
+ env_value = ENV["FLUENT_OJ_OPTION_#{key.upcase}"]
49
+ next if env_value.nil?
50
+
51
+ cast_value = Fluent::Config.reformatted_value(OPTIONS[key], env_value, { strict: true })
52
+ next if cast_value.nil?
53
+
54
+ next if ALLOWED_VALUES[key] && !ALLOWED_VALUES[key].include?(cast_value)
55
+
56
+ options[key.to_sym] = cast_value
57
+ end
58
+
59
+ options
60
+ end
61
+ end
62
+ end
@@ -23,37 +23,78 @@ require 'fluent/plugin_helper'
23
23
  module Fluent
24
24
  module Plugin
25
25
  class BareOutput < Base
26
+ include PluginHelper::Mixin # for metrics
27
+
26
28
  # DO NOT USE THIS plugin for normal output plugin. Use Output instead.
27
29
  # This output plugin base class is only for meta-output plugins
28
30
  # which cannot be implemented on MultiOutput.
29
31
  # E.g,: forest, config-expander
30
32
 
33
+ helpers_internal :metrics
34
+
31
35
  include PluginId
32
36
  include PluginLoggerMixin
33
37
  include PluginHelper::Mixin
34
38
 
35
- attr_reader :num_errors, :emit_count, :emit_records
36
-
37
39
  def process(tag, es)
38
40
  raise NotImplementedError, "BUG: output plugins MUST implement this method"
39
41
  end
40
42
 
43
+ def num_errors
44
+ @num_errors_metrics.get
45
+ end
46
+
47
+ def emit_count
48
+ @emit_count_metrics.get
49
+ end
50
+
51
+ def emit_size
52
+ @emit_size_metrics.get
53
+ end
54
+
55
+ def emit_records
56
+ @emit_records_metrics.get
57
+ end
58
+
41
59
  def initialize
42
60
  super
43
61
  @counter_mutex = Mutex.new
44
62
  # TODO: well organized counters
45
- @num_errors = 0
46
- @emit_count = 0
47
- @emit_records = 0
63
+ @num_errors_metrics = nil
64
+ @emit_count_metrics = nil
65
+ @emit_records_metrics = nil
66
+ @emit_size_metrics = nil
67
+ end
68
+
69
+ def configure(conf)
70
+ super
71
+
72
+ @num_errors_metrics = metrics_create(namespace: "fluentd", subsystem: "bare_output", name: "num_errors", help_text: "Number of count num errors")
73
+ @emit_count_metrics = metrics_create(namespace: "fluentd", subsystem: "bare_output", name: "emit_records", help_text: "Number of count emits")
74
+ @emit_records_metrics = metrics_create(namespace: "fluentd", subsystem: "bare_output", name: "emit_records", help_text: "Number of emit records")
75
+ @emit_size_metrics = metrics_create(namespace: "fluentd", subsystem: "bare_output", name: "emit_size", help_text: "Total size of emit events")
76
+ @enable_size_metrics = !!system_config.enable_size_metrics
77
+ end
78
+
79
+ def statistics
80
+ stats = {
81
+ 'num_errors' => @num_errors_metrics.get,
82
+ 'emit_records' => @emit_records_metrics.get,
83
+ 'emit_count' => @emit_count_metrics.get,
84
+ 'emit_size' => @emit_size_metrics.get,
85
+ }
86
+
87
+ { 'bare_output' => stats }
48
88
  end
49
89
 
50
90
  def emit_sync(tag, es)
51
- @counter_mutex.synchronize{ @emit_count += 1 }
91
+ @emit_count_metrics.inc
52
92
  begin
53
93
  process(tag, es)
54
- @counter_mutex.synchronize{ @emit_records += es.size }
94
+ @emit_records_metrics.add(es.size)
95
+ @emit_size_metrics.add(es.to_msgpack_stream.bytesize) if @enable_size_metrics
55
96
  rescue
56
- @counter_mutex.synchronize{ @num_errors += 1 }
97
+ @num_errors_metrics.inc
57
98
  raise
58
99
  end
59
100
  end
@@ -16,6 +16,8 @@
16
16
 
17
17
  require 'fluent/plugin/base'
18
18
  require 'fluent/plugin/owned_by_mixin'
19
+ require 'fluent/plugin_id'
20
+ require 'fluent/plugin_helper'
19
21
  require 'fluent/unique_id'
20
22
  require 'fluent/ext_monitor_require'
21
23
 
@@ -24,7 +26,9 @@ module Fluent
24
26
  class Buffer < Base
25
27
  include OwnedByMixin
26
28
  include UniqueId::Mixin
29
+ include PluginId
27
30
  include MonitorMixin
31
+ include PluginHelper::Mixin # for metrics
28
32
 
29
33
  class BufferError < StandardError; end
30
34
  class BufferOverflowError < BufferError; end
@@ -39,6 +43,8 @@ module Fluent
39
43
 
40
44
  configured_in :buffer
41
45
 
46
+ helpers_internal :metrics
47
+
42
48
  # TODO: system total buffer limit size in bytes by SystemConfig
43
49
 
44
50
  config_param :chunk_limit_size, :size, default: DEFAULT_CHUNK_LIMIT_SIZE
@@ -153,8 +159,11 @@ module Fluent
153
159
  end
154
160
  end
155
161
 
162
+ # for metrics
163
+ attr_reader :stage_size_metrics, :stage_length_metrics, :queue_size_metrics, :queue_length_metrics
164
+ attr_reader :available_buffer_space_ratios_metrics, :total_queued_size_metrics
165
+ attr_reader :newest_timekey_metrics, :oldest_timekey_metrics
156
166
  # for tests
157
- attr_accessor :stage_size, :queue_size
158
167
  attr_reader :stage, :queue, :dequeued, :queued_num
159
168
 
160
169
  def initialize
@@ -171,12 +180,35 @@ module Fluent
171
180
  @queued_num = {} # metadata => int (number of queued chunks)
172
181
  @dequeued_num = {} # metadata => int (number of dequeued chunks)
173
182
 
174
- @stage_size = @queue_size = 0
183
+ @stage_length_metrics = nil
184
+ @stage_size_metrics = nil
185
+ @queue_length_metrics = nil
186
+ @queue_size_metrics = nil
187
+ @available_buffer_space_ratios_metrics = nil
188
+ @total_queued_size_metrics = nil
189
+ @newest_timekey_metrics = nil
190
+ @oldest_timekey_metrics = nil
175
191
  @timekeys = Hash.new(0)
176
192
  @enable_update_timekeys = false
177
193
  @mutex = Mutex.new
178
194
  end
179
195
 
196
+ def stage_size
197
+ @stage_size_metrics.get
198
+ end
199
+
200
+ def stage_size=(value)
201
+ @stage_size_metrics.set(value)
202
+ end
203
+
204
+ def queue_size
205
+ @queue_size_metrics.get
206
+ end
207
+
208
+ def queue_size=(value)
209
+ @queue_size_metrics.set(value)
210
+ end
211
+
180
212
  def persistent?
181
213
  false
182
214
  end
@@ -187,6 +219,28 @@ module Fluent
187
219
  unless @queue_limit_length.nil?
188
220
  @total_limit_size = @chunk_limit_size * @queue_limit_length
189
221
  end
222
+ @stage_length_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "stage_length",
223
+ help_text: 'Length of stage buffers', prefer_gauge: true)
224
+ @stage_length_metrics.set(0)
225
+ @stage_size_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "stage_byte_size",
226
+ help_text: 'Total size of stage buffers', prefer_gauge: true)
227
+ @stage_size_metrics.set(0) # Ensure zero.
228
+ @queue_length_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "queue_length",
229
+ help_text: 'Length of queue buffers', prefer_gauge: true)
230
+ @queue_length_metrics.set(0)
231
+ @queue_size_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "queue_byte_size",
232
+ help_text: 'Total size of queue buffers', prefer_gauge: true)
233
+ @queue_size_metrics.set(0) # Ensure zero.
234
+ @available_buffer_space_ratios_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "available_buffer_space_ratios",
235
+ help_text: 'Ratio of available space in buffer', prefer_gauge: true)
236
+ @available_buffer_space_ratios_metrics.set(100) # Default is 100%.
237
+ @total_queued_size_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "total_queued_size",
238
+ help_text: 'Total size of stage and queue buffers', prefer_gauge: true)
239
+ @total_queued_size_metrics.set(0)
240
+ @newest_timekey_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "newest_timekey",
241
+ help_text: 'Newest timekey in buffer', prefer_gauge: true)
242
+ @oldest_timekey_metrics = metrics_create(namespace: "fluentd", subsystem: "buffer", name: "oldest_timekey",
243
+ help_text: 'Oldest timekey in buffer', prefer_gauge: true)
190
244
  end
191
245
 
192
246
  def enable_update_timekeys
@@ -198,15 +252,15 @@ module Fluent
198
252
 
199
253
  @stage, @queue = resume
200
254
  @stage.each_pair do |metadata, chunk|
201
- @stage_size += chunk.bytesize
255
+ @stage_size_metrics.add(chunk.bytesize)
202
256
  end
203
257
  @queue.each do |chunk|
204
258
  @queued_num[chunk.metadata] ||= 0
205
259
  @queued_num[chunk.metadata] += 1
206
- @queue_size += chunk.bytesize
260
+ @queue_size_metrics.add(chunk.bytesize)
207
261
  end
208
262
  update_timekeys
209
- log.debug "buffer started", instance: self.object_id, stage_size: @stage_size, queue_size: @queue_size
263
+ log.debug "buffer started", instance: self.object_id, stage_size: @stage_size_metrics.get, queue_size: @queue_size_metrics.get
210
264
  end
211
265
 
212
266
  def close
@@ -228,17 +282,19 @@ module Fluent
228
282
  def terminate
229
283
  super
230
284
  @dequeued = @stage = @queue = @queued_num = nil
231
- @stage_size = @queue_size = 0
285
+ @stage_length_metrics = @stage_size_metrics = @queue_length_metrics = @queue_size_metrics = nil
286
+ @available_buffer_space_ratios_metrics = @total_queued_size_metrics = nil
287
+ @newest_timekey_metrics = @oldest_timekey_metrics = nil
232
288
  @timekeys.clear
233
289
  end
234
290
 
235
291
  def storable?
236
- @total_limit_size > @stage_size + @queue_size
292
+ @total_limit_size > @stage_size_metrics.get + @queue_size_metrics.get
237
293
  end
238
294
 
239
295
  ## TODO: for back pressure feature
240
296
  # def used?(ratio)
241
- # @total_limit_size * ratio > @stage_size + @queue_size
297
+ # @total_limit_size * ratio > @stage_size_metrics.get + @queue_size_metrics.get
242
298
  # end
243
299
 
244
300
  def resume
@@ -344,7 +400,7 @@ module Fluent
344
400
  #
345
401
  staged_bytesizes_by_chunk.each do |chunk, bytesize|
346
402
  chunk.synchronize do
347
- synchronize { @stage_size += bytesize }
403
+ synchronize { @stage_size_metrics.add(bytesize) }
348
404
  log.on_trace { log.trace { "chunk #{chunk.path} size_added: #{bytesize} new_size: #{chunk.bytesize}" } }
349
405
  end
350
406
  end
@@ -361,7 +417,7 @@ module Fluent
361
417
  u.metadata.seq = 0
362
418
  synchronize {
363
419
  @stage[m] = u.staged!
364
- @stage_size += u.bytesize
420
+ @stage_size_metrics.add(u.bytesize)
365
421
  }
366
422
  end
367
423
  end
@@ -428,8 +484,8 @@ module Fluent
428
484
  chunk.enqueued!
429
485
  end
430
486
  bytesize = chunk.bytesize
431
- @stage_size -= bytesize
432
- @queue_size += bytesize
487
+ @stage_size_metrics.sub(bytesize)
488
+ @queue_size_metrics.add(bytesize)
433
489
  end
434
490
  end
435
491
  nil
@@ -446,7 +502,7 @@ module Fluent
446
502
  @queued_num[metadata] = @queued_num.fetch(metadata, 0) + 1
447
503
  chunk.enqueued!
448
504
  end
449
- @queue_size += chunk.bytesize
505
+ @queue_size_metrics.add(chunk.bytesize)
450
506
  end
451
507
  end
452
508
 
@@ -531,7 +587,7 @@ module Fluent
531
587
  begin
532
588
  bytesize = chunk.bytesize
533
589
  chunk.purge
534
- @queue_size -= bytesize
590
+ @queue_size_metrics.sub(bytesize)
535
591
  rescue => e
536
592
  log.error "failed to purge buffer chunk", chunk_id: dump_unique_id_hex(chunk_id), error_class: e.class, error: e
537
593
  log.error_backtrace
@@ -562,7 +618,7 @@ module Fluent
562
618
  log.error_backtrace
563
619
  end
564
620
  end
565
- @queue_size = 0
621
+ @queue_size_metrics.set(0)
566
622
  end
567
623
  end
568
624
 
@@ -765,23 +821,29 @@ module Fluent
765
821
  ]
766
822
 
767
823
  def statistics
768
- stage_size, queue_size = @stage_size, @queue_size
824
+ stage_size, queue_size = @stage_size_metrics.get, @queue_size_metrics.get
769
825
  buffer_space = 1.0 - ((stage_size + queue_size * 1.0) / @total_limit_size)
826
+ @stage_length_metrics.set(@stage.size)
827
+ @queue_length_metrics.set(@queue.size)
828
+ @available_buffer_space_ratios_metrics.set(buffer_space * 100)
829
+ @total_queued_size_metrics.set(stage_size + queue_size)
770
830
  stats = {
771
- 'stage_length' => @stage.size,
831
+ 'stage_length' => @stage_length_metrics.get,
772
832
  'stage_byte_size' => stage_size,
773
- 'queue_length' => @queue.size,
833
+ 'queue_length' => @queue_length_metrics.get,
774
834
  'queue_byte_size' => queue_size,
775
- 'available_buffer_space_ratios' => (buffer_space * 100).round(1),
776
- 'total_queued_size' => stage_size + queue_size,
835
+ 'available_buffer_space_ratios' => @available_buffer_space_ratios_metrics.get.round(1),
836
+ 'total_queued_size' => @total_queued_size_metrics.get,
777
837
  }
778
838
 
779
839
  tkeys = timekeys
780
840
  if (m = tkeys.min)
781
- stats['oldest_timekey'] = m
841
+ @oldest_timekey_metrics.set(m)
842
+ stats['oldest_timekey'] = @oldest_timekey_metrics.get
782
843
  end
783
844
  if (m = tkeys.max)
784
- stats['newest_timekey'] = m
845
+ @newest_timekey_metrics.set(m)
846
+ stats['newest_timekey'] = @newest_timekey_metrics.get
785
847
  end
786
848
 
787
849
  { 'buffer' => stats }