prometheus_exporter 2.1.1 → 2.2.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (51) hide show
  1. checksums.yaml +4 -4
  2. data/.github/workflows/ci.yml +4 -1
  3. data/.streerc +2 -0
  4. data/CHANGELOG +5 -0
  5. data/README.md +9 -7
  6. data/bench/bench.rb +12 -11
  7. data/examples/custom_collector.rb +1 -3
  8. data/lib/prometheus_exporter/client.rb +16 -32
  9. data/lib/prometheus_exporter/instrumentation/active_record.rb +20 -8
  10. data/lib/prometheus_exporter/instrumentation/delayed_job.rb +20 -11
  11. data/lib/prometheus_exporter/instrumentation/good_job.rb +2 -4
  12. data/lib/prometheus_exporter/instrumentation/hutch.rb +1 -1
  13. data/lib/prometheus_exporter/instrumentation/method_profiler.rb +12 -12
  14. data/lib/prometheus_exporter/instrumentation/periodic_stats.rb +13 -21
  15. data/lib/prometheus_exporter/instrumentation/process.rb +12 -6
  16. data/lib/prometheus_exporter/instrumentation/puma.rb +1 -1
  17. data/lib/prometheus_exporter/instrumentation/resque.rb +1 -3
  18. data/lib/prometheus_exporter/instrumentation/shoryuken.rb +6 -7
  19. data/lib/prometheus_exporter/instrumentation/sidekiq.rb +4 -6
  20. data/lib/prometheus_exporter/instrumentation/sidekiq_process.rb +12 -19
  21. data/lib/prometheus_exporter/instrumentation/sidekiq_queue.rb +15 -18
  22. data/lib/prometheus_exporter/instrumentation/sidekiq_stats.rb +10 -15
  23. data/lib/prometheus_exporter/instrumentation/unicorn.rb +2 -2
  24. data/lib/prometheus_exporter/metric/base.rb +8 -7
  25. data/lib/prometheus_exporter/metric/counter.rb +1 -3
  26. data/lib/prometheus_exporter/metric/gauge.rb +2 -6
  27. data/lib/prometheus_exporter/metric/histogram.rb +0 -2
  28. data/lib/prometheus_exporter/metric/summary.rb +5 -14
  29. data/lib/prometheus_exporter/middleware.rb +40 -32
  30. data/lib/prometheus_exporter/server/active_record_collector.rb +11 -6
  31. data/lib/prometheus_exporter/server/collector.rb +12 -16
  32. data/lib/prometheus_exporter/server/collector_base.rb +0 -2
  33. data/lib/prometheus_exporter/server/delayed_job_collector.rb +65 -28
  34. data/lib/prometheus_exporter/server/good_job_collector.rb +1 -1
  35. data/lib/prometheus_exporter/server/hutch_collector.rb +19 -11
  36. data/lib/prometheus_exporter/server/metrics_container.rb +4 -4
  37. data/lib/prometheus_exporter/server/process_collector.rb +7 -5
  38. data/lib/prometheus_exporter/server/puma_collector.rb +4 -10
  39. data/lib/prometheus_exporter/server/resque_collector.rb +1 -1
  40. data/lib/prometheus_exporter/server/runner.rb +34 -13
  41. data/lib/prometheus_exporter/server/shoryuken_collector.rb +22 -17
  42. data/lib/prometheus_exporter/server/sidekiq_collector.rb +22 -14
  43. data/lib/prometheus_exporter/server/sidekiq_process_collector.rb +9 -5
  44. data/lib/prometheus_exporter/server/sidekiq_queue_collector.rb +7 -6
  45. data/lib/prometheus_exporter/server/sidekiq_stats_collector.rb +12 -11
  46. data/lib/prometheus_exporter/server/unicorn_collector.rb +4 -4
  47. data/lib/prometheus_exporter/server/web_collector.rb +39 -22
  48. data/lib/prometheus_exporter/server/web_server.rb +10 -20
  49. data/lib/prometheus_exporter/version.rb +1 -1
  50. data/prometheus_exporter.gemspec +16 -18
  51. metadata +31 -2
@@ -6,9 +6,7 @@ module PrometheusExporter::Instrumentation
6
6
  client ||= PrometheusExporter::Client.default
7
7
  sidekiq_process_collector = new
8
8
 
9
- worker_loop do
10
- client.send_json(sidekiq_process_collector.collect)
11
- end
9
+ worker_loop { client.send_json(sidekiq_process_collector.collect) }
12
10
 
13
11
  super
14
12
  end
@@ -19,10 +17,7 @@ module PrometheusExporter::Instrumentation
19
17
  end
20
18
 
21
19
  def collect
22
- {
23
- type: 'sidekiq_process',
24
- process: collect_stats
25
- }
20
+ { type: "sidekiq_process", process: collect_stats }
26
21
  end
27
22
 
28
23
  def collect_stats
@@ -30,23 +25,21 @@ module PrometheusExporter::Instrumentation
30
25
  return {} unless process
31
26
 
32
27
  {
33
- busy: process['busy'],
34
- concurrency: process['concurrency'],
28
+ busy: process["busy"],
29
+ concurrency: process["concurrency"],
35
30
  labels: {
36
- labels: process['labels'].sort.join(','),
37
- queues: process['queues'].sort.join(','),
38
- quiet: process['quiet'],
39
- tag: process['tag'],
40
- hostname: process['hostname'],
41
- identity: process['identity'],
42
- }
31
+ labels: process["labels"].sort.join(","),
32
+ queues: process["queues"].sort.join(","),
33
+ quiet: process["quiet"],
34
+ tag: process["tag"],
35
+ hostname: process["hostname"],
36
+ identity: process["identity"],
37
+ },
43
38
  }
44
39
  end
45
40
 
46
41
  def current_process
47
- ::Sidekiq::ProcessSet.new.find do |sp|
48
- sp['hostname'] == @hostname && sp['pid'] == @pid
49
- end
42
+ ::Sidekiq::ProcessSet.new.find { |sp| sp["hostname"] == @hostname && sp["pid"] == @pid }
50
43
  end
51
44
  end
52
45
  end
@@ -6,9 +6,7 @@ module PrometheusExporter::Instrumentation
6
6
  client ||= PrometheusExporter::Client.default
7
7
  sidekiq_queue_collector = new(all_queues: all_queues)
8
8
 
9
- worker_loop do
10
- client.send_json(sidekiq_queue_collector.collect)
11
- end
9
+ worker_loop { client.send_json(sidekiq_queue_collector.collect) }
12
10
 
13
11
  super
14
12
  end
@@ -20,10 +18,7 @@ module PrometheusExporter::Instrumentation
20
18
  end
21
19
 
22
20
  def collect
23
- {
24
- type: 'sidekiq_queue',
25
- queues: collect_queue_stats
26
- }
21
+ { type: "sidekiq_queue", queues: collect_queue_stats }
27
22
  end
28
23
 
29
24
  def collect_queue_stats
@@ -34,13 +29,17 @@ module PrometheusExporter::Instrumentation
34
29
  sidekiq_queues.select! { |sidekiq_queue| queues.include?(sidekiq_queue.name) }
35
30
  end
36
31
 
37
- sidekiq_queues.map do |queue|
38
- {
39
- backlog: queue.size,
40
- latency_seconds: queue.latency.to_i,
41
- labels: { queue: queue.name }
42
- }
43
- end.compact
32
+ sidekiq_queues
33
+ .map do |queue|
34
+ {
35
+ backlog: queue.size,
36
+ latency_seconds: queue.latency.to_i,
37
+ labels: {
38
+ queue: queue.name,
39
+ },
40
+ }
41
+ end
42
+ .compact
44
43
  end
45
44
 
46
45
  private
@@ -48,11 +47,9 @@ module PrometheusExporter::Instrumentation
48
47
  def collect_current_process_queues
49
48
  ps = ::Sidekiq::ProcessSet.new
50
49
 
51
- process = ps.find do |sp|
52
- sp['hostname'] == @hostname && sp['pid'] == @pid
53
- end
50
+ process = ps.find { |sp| sp["hostname"] == @hostname && sp["pid"] == @pid }
54
51
 
55
- process.nil? ? [] : process['queues']
52
+ process.nil? ? [] : process["queues"]
56
53
  end
57
54
  end
58
55
  end
@@ -6,31 +6,26 @@ module PrometheusExporter::Instrumentation
6
6
  client ||= PrometheusExporter::Client.default
7
7
  sidekiq_stats_collector = new
8
8
 
9
- worker_loop do
10
- client.send_json(sidekiq_stats_collector.collect)
11
- end
9
+ worker_loop { client.send_json(sidekiq_stats_collector.collect) }
12
10
 
13
11
  super
14
12
  end
15
13
 
16
14
  def collect
17
- {
18
- type: 'sidekiq_stats',
19
- stats: collect_stats
20
- }
15
+ { type: "sidekiq_stats", stats: collect_stats }
21
16
  end
22
17
 
23
18
  def collect_stats
24
19
  stats = ::Sidekiq::Stats.new
25
20
  {
26
- 'dead_size' => stats.dead_size,
27
- 'enqueued' => stats.enqueued,
28
- 'failed' => stats.failed,
29
- 'processed' => stats.processed,
30
- 'processes_size' => stats.processes_size,
31
- 'retry_size' => stats.retry_size,
32
- 'scheduled_size' => stats.scheduled_size,
33
- 'workers_size' => stats.workers_size,
21
+ "dead_size" => stats.dead_size,
22
+ "enqueued" => stats.enqueued,
23
+ "failed" => stats.failed,
24
+ "processed" => stats.processed,
25
+ "processes_size" => stats.processes_size,
26
+ "retry_size" => stats.retry_size,
27
+ "scheduled_size" => stats.scheduled_size,
28
+ "workers_size" => stats.workers_size,
34
29
  }
35
30
  end
36
31
  end
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  begin
4
- require 'raindrops'
4
+ require "raindrops"
5
5
  rescue LoadError
6
6
  # No raindrops available, dont do anything
7
7
  end
@@ -29,7 +29,7 @@ module PrometheusExporter::Instrumentation
29
29
 
30
30
  def collect
31
31
  metric = {}
32
- metric[:type] = 'unicorn'
32
+ metric[:type] = "unicorn"
33
33
  collect_unicorn_stats(metric)
34
34
  metric
35
35
  end
@@ -2,7 +2,6 @@
2
2
 
3
3
  module PrometheusExporter::Metric
4
4
  class Base
5
-
6
5
  @default_prefix = nil if !defined?(@default_prefix)
7
6
  @default_labels = nil if !defined?(@default_labels)
8
7
  @default_aggregation = nil if !defined?(@default_aggregation)
@@ -77,11 +76,14 @@ module PrometheusExporter::Metric
77
76
  def labels_text(labels)
78
77
  labels = Base.default_labels.merge(labels || {})
79
78
  if labels && labels.length > 0
80
- s = labels.map do |key, value|
81
- value = value.to_s
82
- value = escape_value(value) if needs_escape?(value)
83
- "#{key}=\"#{value}\""
84
- end.join(",")
79
+ s =
80
+ labels
81
+ .map do |key, value|
82
+ value = value.to_s
83
+ value = escape_value(value) if needs_escape?(value)
84
+ "#{key}=\"#{value}\""
85
+ end
86
+ .join(",")
85
87
  "{#{s}}"
86
88
  end
87
89
  end
@@ -109,6 +111,5 @@ module PrometheusExporter::Metric
109
111
  def needs_escape?(str)
110
112
  str.match?(/[\n"\\]/m)
111
113
  end
112
-
113
114
  end
114
115
  end
@@ -18,9 +18,7 @@ module PrometheusExporter::Metric
18
18
  end
19
19
 
20
20
  def metric_text
21
- @data.map do |labels, value|
22
- "#{prefix(@name)}#{labels_text(labels)} #{value}"
23
- end.join("\n")
21
+ @data.map { |labels, value| "#{prefix(@name)}#{labels_text(labels)} #{value}" }.join("\n")
24
22
  end
25
23
 
26
24
  def to_h
@@ -18,9 +18,7 @@ module PrometheusExporter::Metric
18
18
  end
19
19
 
20
20
  def metric_text
21
- @data.map do |labels, value|
22
- "#{prefix(@name)}#{labels_text(labels)} #{value}"
23
- end.join("\n")
21
+ @data.map { |labels, value| "#{prefix(@name)}#{labels_text(labels)} #{value}" }.join("\n")
24
22
  end
25
23
 
26
24
  def reset!
@@ -39,9 +37,7 @@ module PrometheusExporter::Metric
39
37
  if value.nil?
40
38
  data.delete(labels)
41
39
  else
42
- if !(Numeric === value)
43
- raise ArgumentError, 'value must be a number'
44
- end
40
+ raise ArgumentError, "value must be a number" if !(Numeric === value)
45
41
  @data[labels] = value
46
42
  end
47
43
  end
@@ -2,7 +2,6 @@
2
2
 
3
3
  module PrometheusExporter::Metric
4
4
  class Histogram < Base
5
-
6
5
  DEFAULT_BUCKETS = [0.005, 0.01, 0.025, 0.05, 0.1, 0.25, 0.5, 1, 2.5, 5.0, 10.0].freeze
7
6
 
8
7
  @default_buckets = nil if !defined?(@default_buckets)
@@ -100,6 +99,5 @@ module PrometheusExporter::Metric
100
99
  def with_bucket(labels, bucket)
101
100
  labels.merge("le" => bucket)
102
101
  end
103
-
104
102
  end
105
103
  end
@@ -2,7 +2,6 @@
2
2
 
3
3
  module PrometheusExporter::Metric
4
4
  class Summary < Base
5
-
6
5
  DEFAULT_QUANTILES = [0.99, 0.9, 0.5, 0.1, 0.01]
7
6
  ROTATE_AGE = 120
8
7
 
@@ -49,9 +48,7 @@ module PrometheusExporter::Metric
49
48
  result = {}
50
49
 
51
50
  if length > 0
52
- @quantiles.each do |quantile|
53
- result[quantile] = sorted[(length * quantile).ceil - 1]
54
- end
51
+ @quantiles.each { |quantile| result[quantile] = sorted[(length * quantile).ceil - 1] }
55
52
  end
56
53
 
57
54
  result
@@ -61,12 +58,9 @@ module PrometheusExporter::Metric
61
58
  buffer = @buffers[@current_buffer]
62
59
 
63
60
  result = {}
64
- buffer.each do |labels, raw_data|
65
- result[labels] = calculate_quantiles(raw_data)
66
- end
61
+ buffer.each { |labels, raw_data| result[labels] = calculate_quantiles(raw_data) }
67
62
 
68
63
  result
69
-
70
64
  end
71
65
 
72
66
  def metric_text
@@ -87,8 +81,8 @@ module PrometheusExporter::Metric
87
81
 
88
82
  # makes sure we have storage
89
83
  def ensure_summary(labels)
90
- @buffers[0][labels] ||= []
91
- @buffers[1][labels] ||= []
84
+ @buffers[0][labels] ||= []
85
+ @buffers[1][labels] ||= []
92
86
  @sums[labels] ||= 0.0
93
87
  @counts[labels] ||= 0
94
88
  nil
@@ -97,9 +91,7 @@ module PrometheusExporter::Metric
97
91
  def rotate_if_needed
98
92
  if (now = Process.clock_gettime(Process::CLOCK_MONOTONIC)) > (@last_rotated + ROTATE_AGE)
99
93
  @last_rotated = now
100
- @buffers[@current_buffer].each do |labels, raw|
101
- raw.clear
102
- end
94
+ @buffers[@current_buffer].each { |labels, raw| raw.clear }
103
95
  @current_buffer = @current_buffer == 0 ? 1 : 0
104
96
  end
105
97
  nil
@@ -116,6 +108,5 @@ module PrometheusExporter::Metric
116
108
  @sums[labels] += value
117
109
  @counts[labels] += 1
118
110
  end
119
-
120
111
  end
121
112
  end
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require 'prometheus_exporter/instrumentation/method_profiler'
4
- require 'prometheus_exporter/client'
3
+ require "prometheus_exporter/instrumentation/method_profiler"
4
+ require "prometheus_exporter/client"
5
5
 
6
6
  class PrometheusExporter::Middleware
7
7
  MethodProfiler = PrometheusExporter::Instrumentation::MethodProfiler
@@ -11,26 +11,42 @@ class PrometheusExporter::Middleware
11
11
  @client = config[:client] || PrometheusExporter::Client.default
12
12
 
13
13
  if config[:instrument]
14
- if defined?(RedisClient)
15
- apply_redis_client_middleware!
16
- end
17
- if defined?(Redis::VERSION) && (Gem::Version.new(Redis::VERSION) >= Gem::Version.new('5.0.0'))
14
+ apply_redis_client_middleware! if defined?(RedisClient)
15
+
16
+ if defined?(Redis::VERSION) && (Gem::Version.new(Redis::VERSION) >= Gem::Version.new("5.0.0"))
18
17
  # redis 5 support handled via RedisClient
19
- elsif defined? Redis::Client
20
- MethodProfiler.patch(Redis::Client, [
21
- :call, :call_pipeline
22
- ], :redis, instrument: config[:instrument])
18
+ elsif defined?(Redis::Client)
19
+ MethodProfiler.patch(
20
+ Redis::Client,
21
+ %i[call call_pipeline],
22
+ :redis,
23
+ instrument: config[:instrument],
24
+ )
23
25
  end
24
- if defined? PG::Connection
25
- MethodProfiler.patch(PG::Connection, [
26
- :exec, :async_exec, :exec_prepared, :exec_params, :send_query_prepared, :query
27
- ], :sql, instrument: config[:instrument])
26
+
27
+ if defined?(PG::Connection)
28
+ MethodProfiler.patch(
29
+ PG::Connection,
30
+ %i[exec async_exec exec_prepared exec_params send_query_prepared query],
31
+ :sql,
32
+ instrument: config[:instrument],
33
+ )
28
34
  end
29
- if defined? Mysql2::Client
35
+
36
+ if defined?(Mysql2::Client)
30
37
  MethodProfiler.patch(Mysql2::Client, [:query], :sql, instrument: config[:instrument])
31
38
  MethodProfiler.patch(Mysql2::Statement, [:execute], :sql, instrument: config[:instrument])
32
39
  MethodProfiler.patch(Mysql2::Result, [:each], :sql, instrument: config[:instrument])
33
40
  end
41
+
42
+ if defined?(Dalli::Client)
43
+ MethodProfiler.patch(
44
+ Dalli::Client,
45
+ %i[delete fetch get add set],
46
+ :memcache,
47
+ instrument: config[:instrument],
48
+ )
49
+ end
34
50
  end
35
51
  end
36
52
 
@@ -49,12 +65,10 @@ class PrometheusExporter::Middleware
49
65
  timings: info,
50
66
  queue_time: queue_time,
51
67
  status: status,
52
- default_labels: default_labels(env, result)
68
+ default_labels: default_labels(env, result),
53
69
  }
54
70
  labels = custom_labels(env)
55
- if labels
56
- obj = obj.merge(custom_labels: labels)
57
- end
71
+ obj = obj.merge(custom_labels: labels) if labels
58
72
 
59
73
  @client.send_json(obj)
60
74
  end
@@ -72,10 +86,7 @@ class PrometheusExporter::Middleware
72
86
  controller = "preflight"
73
87
  end
74
88
 
75
- {
76
- action: action || "other",
77
- controller: controller || "other"
78
- }
89
+ { action: action || "other", controller: controller || "other" }
79
90
  end
80
91
 
81
92
  # allows subclasses to add custom labels based on env
@@ -103,32 +114,29 @@ class PrometheusExporter::Middleware
103
114
 
104
115
  # determine queue start from well-known trace headers
105
116
  def queue_start(env)
106
-
107
117
  # get the content of the x-queue-start or x-request-start header
108
- value = env['HTTP_X_REQUEST_START'] || env['HTTP_X_QUEUE_START']
109
- unless value.nil? || value == ''
118
+ value = env["HTTP_X_REQUEST_START"] || env["HTTP_X_QUEUE_START"]
119
+ unless value.nil? || value == ""
110
120
  # nginx returns time as milliseconds with 3 decimal places
111
121
  # apache returns time as microseconds without decimal places
112
122
  # this method takes care to convert both into a proper second + fractions timestamp
113
- value = value.to_s.gsub(/t=|\./, '')
123
+ value = value.to_s.gsub(/t=|\./, "")
114
124
  return "#{value[0, 10]}.#{value[10, 13]}".to_f
115
125
  end
116
126
 
117
127
  # get the content of the x-amzn-trace-id header
118
128
  # see also: https://docs.aws.amazon.com/elasticloadbalancing/latest/application/load-balancer-request-tracing.html
119
- value = env['HTTP_X_AMZN_TRACE_ID']
120
- value&.split('Root=')&.last&.split('-')&.fetch(1)&.to_i(16)
121
-
129
+ value = env["HTTP_X_AMZN_TRACE_ID"]
130
+ value&.split("Root=")&.last&.split("-")&.fetch(1)&.to_i(16)
122
131
  end
123
132
 
124
133
  private
125
134
 
126
135
  module RedisInstrumenter
127
- MethodProfiler.define_methods_on_module(self, ["call", "call_pipelined"], "redis")
136
+ MethodProfiler.define_methods_on_module(self, %w[call call_pipelined], "redis")
128
137
  end
129
138
 
130
139
  def apply_redis_client_middleware!
131
140
  RedisClient.register(RedisInstrumenter)
132
141
  end
133
-
134
142
  end
@@ -10,15 +10,15 @@ module PrometheusExporter::Server
10
10
  dead: "Dead connections in pool",
11
11
  idle: "Idle connections in pool",
12
12
  waiting: "Connection requests waiting",
13
- size: "Maximum allowed connection pool size"
13
+ size: "Maximum allowed connection pool size",
14
14
  }
15
15
 
16
16
  def initialize
17
17
  @active_record_metrics = MetricsContainer.new(ttl: MAX_METRIC_AGE)
18
- @active_record_metrics.filter = -> (new_metric, old_metric) do
18
+ @active_record_metrics.filter = ->(new_metric, old_metric) do
19
19
  new_metric["pid"] == old_metric["pid"] &&
20
- new_metric["hostname"] == old_metric["hostname"] &&
21
- new_metric["metric_labels"]["pool_name"] == old_metric["metric_labels"]["pool_name"]
20
+ new_metric["hostname"] == old_metric["hostname"] &&
21
+ new_metric["metric_labels"]["pool_name"] == old_metric["metric_labels"]["pool_name"]
22
22
  end
23
23
  end
24
24
 
@@ -32,13 +32,18 @@ module PrometheusExporter::Server
32
32
  metrics = {}
33
33
 
34
34
  @active_record_metrics.map do |m|
35
- metric_key = (m["metric_labels"] || {}).merge("pid" => m["pid"], "hostname" => m["hostname"])
35
+ metric_key =
36
+ (m["metric_labels"] || {}).merge("pid" => m["pid"], "hostname" => m["hostname"])
36
37
  metric_key.merge!(m["custom_labels"]) if m["custom_labels"]
37
38
 
38
39
  ACTIVE_RECORD_GAUGES.map do |k, help|
39
40
  k = k.to_s
40
41
  if v = m[k]
41
- g = metrics[k] ||= PrometheusExporter::Metric::Gauge.new("active_record_connection_pool_#{k}", help)
42
+ g =
43
+ metrics[k] ||= PrometheusExporter::Metric::Gauge.new(
44
+ "active_record_connection_pool_#{k}",
45
+ help,
46
+ )
42
47
  g.observe(v, metric_key)
43
48
  end
44
49
  end
@@ -1,9 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module PrometheusExporter::Server
4
-
5
4
  class Collector < CollectorBase
6
-
7
5
  def initialize(json_serializer: nil)
8
6
  @process_metrics = []
9
7
  @metrics = {}
@@ -40,19 +38,15 @@ module PrometheusExporter::Server
40
38
  collector.collect(obj)
41
39
  else
42
40
  metric = @metrics[obj["name"]]
43
- if !metric
44
- metric = register_metric_unsafe(obj)
45
- end
41
+ metric = register_metric_unsafe(obj) if !metric
46
42
 
47
43
  keys = obj["keys"] || {}
48
- if obj["custom_labels"]
49
- keys = obj["custom_labels"].merge(keys)
50
- end
44
+ keys = obj["custom_labels"].merge(keys) if obj["custom_labels"]
51
45
 
52
46
  case obj["prometheus_exporter_action"]
53
- when 'increment'
47
+ when "increment"
54
48
  metric.increment(keys, obj["value"])
55
- when 'decrement'
49
+ when "decrement"
56
50
  metric.decrement(keys, obj["value"])
57
51
  else
58
52
  metric.observe(obj["value"], keys)
@@ -63,15 +57,14 @@ module PrometheusExporter::Server
63
57
 
64
58
  def prometheus_metrics_text
65
59
  @mutex.synchronize do
66
- (@metrics.values + @collectors.values.map(&:metrics).flatten)
67
- .map(&:to_prometheus_text).join("\n")
60
+ (@metrics.values + @collectors.values.map(&:metrics).flatten).map(
61
+ &:to_prometheus_text
62
+ ).join("\n")
68
63
  end
69
64
  end
70
65
 
71
66
  def register_metric(metric)
72
- @mutex.synchronize do
73
- @metrics[metric.name] = metric
74
- end
67
+ @mutex.synchronize { @metrics[metric.name] = metric }
75
68
  end
76
69
 
77
70
  protected
@@ -101,7 +94,10 @@ module PrometheusExporter::Server
101
94
  end
102
95
 
103
96
  def symbolize_keys(hash)
104
- hash.inject({}) { |memo, k| memo[k.first.to_sym] = k.last; memo }
97
+ hash.inject({}) do |memo, k|
98
+ memo[k.first.to_sym] = k.last
99
+ memo
100
+ end
105
101
  end
106
102
  end
107
103
  end
@@ -1,10 +1,8 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module PrometheusExporter::Server
4
-
5
4
  # minimal interface to implement a customer collector
6
5
  class CollectorBase
7
-
8
6
  # called each time a string is delivered from the web
9
7
  def process(str)
10
8
  end