prometheus_exporter 2.1.0 → 2.2.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.github/workflows/ci.yml +10 -6
- data/.rubocop +1 -0
- data/.rubocop.yml +12 -1
- data/.streerc +2 -0
- data/CHANGELOG +12 -1
- data/README.md +22 -9
- data/bench/bench.rb +12 -11
- data/bin/prometheus_exporter +2 -2
- data/examples/custom_collector.rb +1 -3
- data/gemfiles/ar_70.gemfile +2 -0
- data/gemfiles/ar_71.gemfile +7 -0
- data/lib/prometheus_exporter/client.rb +16 -32
- data/lib/prometheus_exporter/instrumentation/active_record.rb +20 -8
- data/lib/prometheus_exporter/instrumentation/delayed_job.rb +23 -13
- data/lib/prometheus_exporter/instrumentation/good_job.rb +2 -4
- data/lib/prometheus_exporter/instrumentation/hutch.rb +1 -1
- data/lib/prometheus_exporter/instrumentation/method_profiler.rb +16 -16
- data/lib/prometheus_exporter/instrumentation/periodic_stats.rb +13 -21
- data/lib/prometheus_exporter/instrumentation/process.rb +14 -6
- data/lib/prometheus_exporter/instrumentation/puma.rb +1 -1
- data/lib/prometheus_exporter/instrumentation/resque.rb +1 -3
- data/lib/prometheus_exporter/instrumentation/shoryuken.rb +6 -7
- data/lib/prometheus_exporter/instrumentation/sidekiq.rb +4 -6
- data/lib/prometheus_exporter/instrumentation/sidekiq_process.rb +12 -19
- data/lib/prometheus_exporter/instrumentation/sidekiq_queue.rb +15 -18
- data/lib/prometheus_exporter/instrumentation/sidekiq_stats.rb +10 -15
- data/lib/prometheus_exporter/instrumentation/unicorn.rb +2 -2
- data/lib/prometheus_exporter/metric/base.rb +8 -7
- data/lib/prometheus_exporter/metric/counter.rb +1 -3
- data/lib/prometheus_exporter/metric/gauge.rb +2 -6
- data/lib/prometheus_exporter/metric/histogram.rb +0 -2
- data/lib/prometheus_exporter/metric/summary.rb +5 -14
- data/lib/prometheus_exporter/middleware.rb +40 -32
- data/lib/prometheus_exporter/server/active_record_collector.rb +11 -6
- data/lib/prometheus_exporter/server/collector.rb +12 -16
- data/lib/prometheus_exporter/server/collector_base.rb +0 -2
- data/lib/prometheus_exporter/server/delayed_job_collector.rb +65 -28
- data/lib/prometheus_exporter/server/good_job_collector.rb +1 -1
- data/lib/prometheus_exporter/server/hutch_collector.rb +19 -11
- data/lib/prometheus_exporter/server/metrics_container.rb +4 -4
- data/lib/prometheus_exporter/server/process_collector.rb +7 -3
- data/lib/prometheus_exporter/server/puma_collector.rb +4 -10
- data/lib/prometheus_exporter/server/resque_collector.rb +1 -1
- data/lib/prometheus_exporter/server/runner.rb +34 -13
- data/lib/prometheus_exporter/server/shoryuken_collector.rb +22 -17
- data/lib/prometheus_exporter/server/sidekiq_collector.rb +22 -14
- data/lib/prometheus_exporter/server/sidekiq_process_collector.rb +9 -5
- data/lib/prometheus_exporter/server/sidekiq_queue_collector.rb +7 -6
- data/lib/prometheus_exporter/server/sidekiq_stats_collector.rb +12 -11
- data/lib/prometheus_exporter/server/unicorn_collector.rb +4 -4
- data/lib/prometheus_exporter/server/web_collector.rb +39 -22
- data/lib/prometheus_exporter/server/web_server.rb +10 -20
- data/lib/prometheus_exporter/version.rb +1 -1
- data/prometheus_exporter.gemspec +20 -22
- metadata +44 -13
@@ -2,21 +2,14 @@
|
|
2
2
|
|
3
3
|
module PrometheusExporter::Instrumentation
|
4
4
|
class PeriodicStats
|
5
|
-
|
6
5
|
def self.start(*args, frequency:, client: nil, **kwargs)
|
7
6
|
client ||= PrometheusExporter::Client.default
|
8
7
|
|
9
|
-
if !(Numeric === frequency)
|
10
|
-
raise ArgumentError.new("Expected frequency to be a number")
|
11
|
-
end
|
8
|
+
raise ArgumentError.new("Expected frequency to be a number") if !(Numeric === frequency)
|
12
9
|
|
13
|
-
if frequency < 0
|
14
|
-
raise ArgumentError.new("Expected frequency to be a positive number")
|
15
|
-
end
|
10
|
+
raise ArgumentError.new("Expected frequency to be a positive number") if frequency < 0
|
16
11
|
|
17
|
-
if !@worker_loop
|
18
|
-
raise ArgumentError.new("Worker loop was not set")
|
19
|
-
end
|
12
|
+
raise ArgumentError.new("Worker loop was not set") if !@worker_loop
|
20
13
|
|
21
14
|
klass = self
|
22
15
|
|
@@ -24,18 +17,18 @@ module PrometheusExporter::Instrumentation
|
|
24
17
|
|
25
18
|
@stop_thread = false
|
26
19
|
|
27
|
-
@thread =
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
20
|
+
@thread =
|
21
|
+
Thread.new do
|
22
|
+
while !@stop_thread
|
23
|
+
begin
|
24
|
+
@worker_loop.call
|
25
|
+
rescue => e
|
26
|
+
client.logger.error("#{klass} Prometheus Exporter Failed To Collect Stats #{e}")
|
27
|
+
ensure
|
28
|
+
sleep frequency
|
29
|
+
end
|
35
30
|
end
|
36
31
|
end
|
37
|
-
end
|
38
|
-
|
39
32
|
end
|
40
33
|
|
41
34
|
def self.started?
|
@@ -57,6 +50,5 @@ module PrometheusExporter::Instrumentation
|
|
57
50
|
end
|
58
51
|
@thread = nil
|
59
52
|
end
|
60
|
-
|
61
53
|
end
|
62
54
|
end
|
@@ -3,9 +3,7 @@
|
|
3
3
|
# collects stats from currently running process
|
4
4
|
module PrometheusExporter::Instrumentation
|
5
5
|
class Process < PeriodicStats
|
6
|
-
|
7
6
|
def self.start(client: nil, type: "ruby", frequency: 30, labels: nil)
|
8
|
-
|
9
7
|
metric_labels =
|
10
8
|
if labels && type
|
11
9
|
labels.merge(type: type)
|
@@ -46,14 +44,22 @@ module PrometheusExporter::Instrumentation
|
|
46
44
|
end
|
47
45
|
|
48
46
|
def rss
|
49
|
-
@pagesize ||=
|
50
|
-
|
47
|
+
@pagesize ||=
|
48
|
+
begin
|
49
|
+
`getconf PAGESIZE`.to_i
|
50
|
+
rescue StandardError
|
51
|
+
4096
|
52
|
+
end
|
53
|
+
begin
|
54
|
+
File.read("/proc/#{pid}/statm").split(" ")[1].to_i * @pagesize
|
55
|
+
rescue StandardError
|
56
|
+
0
|
57
|
+
end
|
51
58
|
end
|
52
59
|
|
53
60
|
def collect_process_stats(metric)
|
54
61
|
metric[:pid] = pid
|
55
62
|
metric[:rss] = rss
|
56
|
-
|
57
63
|
end
|
58
64
|
|
59
65
|
def collect_gc_stats(metric)
|
@@ -63,10 +69,12 @@ module PrometheusExporter::Instrumentation
|
|
63
69
|
metric[:major_gc_ops_total] = stat[:major_gc_count]
|
64
70
|
metric[:minor_gc_ops_total] = stat[:minor_gc_count]
|
65
71
|
metric[:allocated_objects_total] = stat[:total_allocated_objects]
|
72
|
+
metric[:malloc_increase_bytes_limit] = stat[:malloc_increase_bytes_limit]
|
73
|
+
metric[:oldmalloc_increase_bytes_limit] = stat[:oldmalloc_increase_bytes_limit]
|
66
74
|
end
|
67
75
|
|
68
76
|
def collect_v8_stats(metric)
|
69
|
-
return if !defined?
|
77
|
+
return if !defined?(MiniRacer)
|
70
78
|
|
71
79
|
metric[:v8_heap_count] = metric[:v8_heap_size] = 0
|
72
80
|
metric[:v8_heap_size] = metric[:v8_physical_size] = 0
|
@@ -7,9 +7,7 @@ module PrometheusExporter::Instrumentation
|
|
7
7
|
resque_collector = new
|
8
8
|
client ||= PrometheusExporter::Client.default
|
9
9
|
|
10
|
-
worker_loop
|
11
|
-
client.send_json(resque_collector.collect)
|
12
|
-
end
|
10
|
+
worker_loop { client.send_json(resque_collector.collect) }
|
13
11
|
|
14
12
|
super
|
15
13
|
end
|
@@ -2,7 +2,6 @@
|
|
2
2
|
|
3
3
|
module PrometheusExporter::Instrumentation
|
4
4
|
class Shoryuken
|
5
|
-
|
6
5
|
def initialize(client: nil)
|
7
6
|
@client = client || PrometheusExporter::Client.default
|
8
7
|
end
|
@@ -19,12 +18,12 @@ module PrometheusExporter::Instrumentation
|
|
19
18
|
ensure
|
20
19
|
duration = ::Process.clock_gettime(::Process::CLOCK_MONOTONIC) - start
|
21
20
|
@client.send_json(
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
21
|
+
type: "shoryuken",
|
22
|
+
queue: queue,
|
23
|
+
name: worker.class.name,
|
24
|
+
success: success,
|
25
|
+
shutdown: shutdown,
|
26
|
+
duration: duration,
|
28
27
|
)
|
29
28
|
end
|
30
29
|
end
|
@@ -3,8 +3,7 @@
|
|
3
3
|
require "yaml"
|
4
4
|
|
5
5
|
module PrometheusExporter::Instrumentation
|
6
|
-
JOB_WRAPPER_CLASS_NAME =
|
7
|
-
"ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
|
6
|
+
JOB_WRAPPER_CLASS_NAME = "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
|
8
7
|
DELAYED_CLASS_NAMES = %w[
|
9
8
|
Sidekiq::Extensions::DelayedClass
|
10
9
|
Sidekiq::Extensions::DelayedModel
|
@@ -24,7 +23,7 @@ module PrometheusExporter::Instrumentation
|
|
24
23
|
type: "sidekiq",
|
25
24
|
name: get_name(job["class"], job),
|
26
25
|
dead: true,
|
27
|
-
custom_labels: worker_custom_labels
|
26
|
+
custom_labels: worker_custom_labels,
|
28
27
|
)
|
29
28
|
end
|
30
29
|
end
|
@@ -44,8 +43,7 @@ module PrometheusExporter::Instrumentation
|
|
44
43
|
end
|
45
44
|
|
46
45
|
def initialize(options = { client: nil })
|
47
|
-
@client =
|
48
|
-
options.fetch(:client, nil) || PrometheusExporter::Client.default
|
46
|
+
@client = options.fetch(:client, nil) || PrometheusExporter::Client.default
|
49
47
|
end
|
50
48
|
|
51
49
|
def call(worker, msg, queue)
|
@@ -67,7 +65,7 @@ module PrometheusExporter::Instrumentation
|
|
67
65
|
success: success,
|
68
66
|
shutdown: shutdown,
|
69
67
|
duration: duration,
|
70
|
-
custom_labels: self.class.get_worker_custom_labels(worker.class, msg)
|
68
|
+
custom_labels: self.class.get_worker_custom_labels(worker.class, msg),
|
71
69
|
)
|
72
70
|
end
|
73
71
|
|
@@ -6,9 +6,7 @@ module PrometheusExporter::Instrumentation
|
|
6
6
|
client ||= PrometheusExporter::Client.default
|
7
7
|
sidekiq_process_collector = new
|
8
8
|
|
9
|
-
worker_loop
|
10
|
-
client.send_json(sidekiq_process_collector.collect)
|
11
|
-
end
|
9
|
+
worker_loop { client.send_json(sidekiq_process_collector.collect) }
|
12
10
|
|
13
11
|
super
|
14
12
|
end
|
@@ -19,10 +17,7 @@ module PrometheusExporter::Instrumentation
|
|
19
17
|
end
|
20
18
|
|
21
19
|
def collect
|
22
|
-
{
|
23
|
-
type: 'sidekiq_process',
|
24
|
-
process: collect_stats
|
25
|
-
}
|
20
|
+
{ type: "sidekiq_process", process: collect_stats }
|
26
21
|
end
|
27
22
|
|
28
23
|
def collect_stats
|
@@ -30,23 +25,21 @@ module PrometheusExporter::Instrumentation
|
|
30
25
|
return {} unless process
|
31
26
|
|
32
27
|
{
|
33
|
-
busy: process[
|
34
|
-
concurrency: process[
|
28
|
+
busy: process["busy"],
|
29
|
+
concurrency: process["concurrency"],
|
35
30
|
labels: {
|
36
|
-
labels: process[
|
37
|
-
queues: process[
|
38
|
-
quiet: process[
|
39
|
-
tag: process[
|
40
|
-
hostname: process[
|
41
|
-
identity: process[
|
42
|
-
}
|
31
|
+
labels: process["labels"].sort.join(","),
|
32
|
+
queues: process["queues"].sort.join(","),
|
33
|
+
quiet: process["quiet"],
|
34
|
+
tag: process["tag"],
|
35
|
+
hostname: process["hostname"],
|
36
|
+
identity: process["identity"],
|
37
|
+
},
|
43
38
|
}
|
44
39
|
end
|
45
40
|
|
46
41
|
def current_process
|
47
|
-
::Sidekiq::ProcessSet.new.find
|
48
|
-
sp['hostname'] == @hostname && sp['pid'] == @pid
|
49
|
-
end
|
42
|
+
::Sidekiq::ProcessSet.new.find { |sp| sp["hostname"] == @hostname && sp["pid"] == @pid }
|
50
43
|
end
|
51
44
|
end
|
52
45
|
end
|
@@ -6,9 +6,7 @@ module PrometheusExporter::Instrumentation
|
|
6
6
|
client ||= PrometheusExporter::Client.default
|
7
7
|
sidekiq_queue_collector = new(all_queues: all_queues)
|
8
8
|
|
9
|
-
worker_loop
|
10
|
-
client.send_json(sidekiq_queue_collector.collect)
|
11
|
-
end
|
9
|
+
worker_loop { client.send_json(sidekiq_queue_collector.collect) }
|
12
10
|
|
13
11
|
super
|
14
12
|
end
|
@@ -20,10 +18,7 @@ module PrometheusExporter::Instrumentation
|
|
20
18
|
end
|
21
19
|
|
22
20
|
def collect
|
23
|
-
{
|
24
|
-
type: 'sidekiq_queue',
|
25
|
-
queues: collect_queue_stats
|
26
|
-
}
|
21
|
+
{ type: "sidekiq_queue", queues: collect_queue_stats }
|
27
22
|
end
|
28
23
|
|
29
24
|
def collect_queue_stats
|
@@ -34,13 +29,17 @@ module PrometheusExporter::Instrumentation
|
|
34
29
|
sidekiq_queues.select! { |sidekiq_queue| queues.include?(sidekiq_queue.name) }
|
35
30
|
end
|
36
31
|
|
37
|
-
sidekiq_queues
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
32
|
+
sidekiq_queues
|
33
|
+
.map do |queue|
|
34
|
+
{
|
35
|
+
backlog: queue.size,
|
36
|
+
latency_seconds: queue.latency.to_i,
|
37
|
+
labels: {
|
38
|
+
queue: queue.name,
|
39
|
+
},
|
40
|
+
}
|
41
|
+
end
|
42
|
+
.compact
|
44
43
|
end
|
45
44
|
|
46
45
|
private
|
@@ -48,11 +47,9 @@ module PrometheusExporter::Instrumentation
|
|
48
47
|
def collect_current_process_queues
|
49
48
|
ps = ::Sidekiq::ProcessSet.new
|
50
49
|
|
51
|
-
process = ps.find
|
52
|
-
sp['hostname'] == @hostname && sp['pid'] == @pid
|
53
|
-
end
|
50
|
+
process = ps.find { |sp| sp["hostname"] == @hostname && sp["pid"] == @pid }
|
54
51
|
|
55
|
-
process.nil? ? [] : process[
|
52
|
+
process.nil? ? [] : process["queues"]
|
56
53
|
end
|
57
54
|
end
|
58
55
|
end
|
@@ -6,31 +6,26 @@ module PrometheusExporter::Instrumentation
|
|
6
6
|
client ||= PrometheusExporter::Client.default
|
7
7
|
sidekiq_stats_collector = new
|
8
8
|
|
9
|
-
worker_loop
|
10
|
-
client.send_json(sidekiq_stats_collector.collect)
|
11
|
-
end
|
9
|
+
worker_loop { client.send_json(sidekiq_stats_collector.collect) }
|
12
10
|
|
13
11
|
super
|
14
12
|
end
|
15
13
|
|
16
14
|
def collect
|
17
|
-
{
|
18
|
-
type: 'sidekiq_stats',
|
19
|
-
stats: collect_stats
|
20
|
-
}
|
15
|
+
{ type: "sidekiq_stats", stats: collect_stats }
|
21
16
|
end
|
22
17
|
|
23
18
|
def collect_stats
|
24
19
|
stats = ::Sidekiq::Stats.new
|
25
20
|
{
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
21
|
+
"dead_size" => stats.dead_size,
|
22
|
+
"enqueued" => stats.enqueued,
|
23
|
+
"failed" => stats.failed,
|
24
|
+
"processed" => stats.processed,
|
25
|
+
"processes_size" => stats.processes_size,
|
26
|
+
"retry_size" => stats.retry_size,
|
27
|
+
"scheduled_size" => stats.scheduled_size,
|
28
|
+
"workers_size" => stats.workers_size,
|
34
29
|
}
|
35
30
|
end
|
36
31
|
end
|
@@ -1,7 +1,7 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
3
|
begin
|
4
|
-
require
|
4
|
+
require "raindrops"
|
5
5
|
rescue LoadError
|
6
6
|
# No raindrops available, dont do anything
|
7
7
|
end
|
@@ -29,7 +29,7 @@ module PrometheusExporter::Instrumentation
|
|
29
29
|
|
30
30
|
def collect
|
31
31
|
metric = {}
|
32
|
-
metric[:type] =
|
32
|
+
metric[:type] = "unicorn"
|
33
33
|
collect_unicorn_stats(metric)
|
34
34
|
metric
|
35
35
|
end
|
@@ -2,7 +2,6 @@
|
|
2
2
|
|
3
3
|
module PrometheusExporter::Metric
|
4
4
|
class Base
|
5
|
-
|
6
5
|
@default_prefix = nil if !defined?(@default_prefix)
|
7
6
|
@default_labels = nil if !defined?(@default_labels)
|
8
7
|
@default_aggregation = nil if !defined?(@default_aggregation)
|
@@ -77,11 +76,14 @@ module PrometheusExporter::Metric
|
|
77
76
|
def labels_text(labels)
|
78
77
|
labels = Base.default_labels.merge(labels || {})
|
79
78
|
if labels && labels.length > 0
|
80
|
-
s =
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
79
|
+
s =
|
80
|
+
labels
|
81
|
+
.map do |key, value|
|
82
|
+
value = value.to_s
|
83
|
+
value = escape_value(value) if needs_escape?(value)
|
84
|
+
"#{key}=\"#{value}\""
|
85
|
+
end
|
86
|
+
.join(",")
|
85
87
|
"{#{s}}"
|
86
88
|
end
|
87
89
|
end
|
@@ -109,6 +111,5 @@ module PrometheusExporter::Metric
|
|
109
111
|
def needs_escape?(str)
|
110
112
|
str.match?(/[\n"\\]/m)
|
111
113
|
end
|
112
|
-
|
113
114
|
end
|
114
115
|
end
|
@@ -18,9 +18,7 @@ module PrometheusExporter::Metric
|
|
18
18
|
end
|
19
19
|
|
20
20
|
def metric_text
|
21
|
-
@data.map
|
22
|
-
"#{prefix(@name)}#{labels_text(labels)} #{value}"
|
23
|
-
end.join("\n")
|
21
|
+
@data.map { |labels, value| "#{prefix(@name)}#{labels_text(labels)} #{value}" }.join("\n")
|
24
22
|
end
|
25
23
|
|
26
24
|
def to_h
|
@@ -18,9 +18,7 @@ module PrometheusExporter::Metric
|
|
18
18
|
end
|
19
19
|
|
20
20
|
def metric_text
|
21
|
-
@data.map
|
22
|
-
"#{prefix(@name)}#{labels_text(labels)} #{value}"
|
23
|
-
end.join("\n")
|
21
|
+
@data.map { |labels, value| "#{prefix(@name)}#{labels_text(labels)} #{value}" }.join("\n")
|
24
22
|
end
|
25
23
|
|
26
24
|
def reset!
|
@@ -39,9 +37,7 @@ module PrometheusExporter::Metric
|
|
39
37
|
if value.nil?
|
40
38
|
data.delete(labels)
|
41
39
|
else
|
42
|
-
if !(Numeric === value)
|
43
|
-
raise ArgumentError, 'value must be a number'
|
44
|
-
end
|
40
|
+
raise ArgumentError, "value must be a number" if !(Numeric === value)
|
45
41
|
@data[labels] = value
|
46
42
|
end
|
47
43
|
end
|
@@ -2,7 +2,6 @@
|
|
2
2
|
|
3
3
|
module PrometheusExporter::Metric
|
4
4
|
class Histogram < Base
|
5
|
-
|
6
5
|
DEFAULT_BUCKETS = [0.005, 0.01, 0.025, 0.05, 0.1, 0.25, 0.5, 1, 2.5, 5.0, 10.0].freeze
|
7
6
|
|
8
7
|
@default_buckets = nil if !defined?(@default_buckets)
|
@@ -100,6 +99,5 @@ module PrometheusExporter::Metric
|
|
100
99
|
def with_bucket(labels, bucket)
|
101
100
|
labels.merge("le" => bucket)
|
102
101
|
end
|
103
|
-
|
104
102
|
end
|
105
103
|
end
|
@@ -2,7 +2,6 @@
|
|
2
2
|
|
3
3
|
module PrometheusExporter::Metric
|
4
4
|
class Summary < Base
|
5
|
-
|
6
5
|
DEFAULT_QUANTILES = [0.99, 0.9, 0.5, 0.1, 0.01]
|
7
6
|
ROTATE_AGE = 120
|
8
7
|
|
@@ -49,9 +48,7 @@ module PrometheusExporter::Metric
|
|
49
48
|
result = {}
|
50
49
|
|
51
50
|
if length > 0
|
52
|
-
@quantiles.each
|
53
|
-
result[quantile] = sorted[(length * quantile).ceil - 1]
|
54
|
-
end
|
51
|
+
@quantiles.each { |quantile| result[quantile] = sorted[(length * quantile).ceil - 1] }
|
55
52
|
end
|
56
53
|
|
57
54
|
result
|
@@ -61,12 +58,9 @@ module PrometheusExporter::Metric
|
|
61
58
|
buffer = @buffers[@current_buffer]
|
62
59
|
|
63
60
|
result = {}
|
64
|
-
buffer.each
|
65
|
-
result[labels] = calculate_quantiles(raw_data)
|
66
|
-
end
|
61
|
+
buffer.each { |labels, raw_data| result[labels] = calculate_quantiles(raw_data) }
|
67
62
|
|
68
63
|
result
|
69
|
-
|
70
64
|
end
|
71
65
|
|
72
66
|
def metric_text
|
@@ -87,8 +81,8 @@ module PrometheusExporter::Metric
|
|
87
81
|
|
88
82
|
# makes sure we have storage
|
89
83
|
def ensure_summary(labels)
|
90
|
-
@buffers[0][labels] ||=
|
91
|
-
@buffers[1][labels] ||=
|
84
|
+
@buffers[0][labels] ||= []
|
85
|
+
@buffers[1][labels] ||= []
|
92
86
|
@sums[labels] ||= 0.0
|
93
87
|
@counts[labels] ||= 0
|
94
88
|
nil
|
@@ -97,9 +91,7 @@ module PrometheusExporter::Metric
|
|
97
91
|
def rotate_if_needed
|
98
92
|
if (now = Process.clock_gettime(Process::CLOCK_MONOTONIC)) > (@last_rotated + ROTATE_AGE)
|
99
93
|
@last_rotated = now
|
100
|
-
@buffers[@current_buffer].each
|
101
|
-
raw.clear
|
102
|
-
end
|
94
|
+
@buffers[@current_buffer].each { |labels, raw| raw.clear }
|
103
95
|
@current_buffer = @current_buffer == 0 ? 1 : 0
|
104
96
|
end
|
105
97
|
nil
|
@@ -116,6 +108,5 @@ module PrometheusExporter::Metric
|
|
116
108
|
@sums[labels] += value
|
117
109
|
@counts[labels] += 1
|
118
110
|
end
|
119
|
-
|
120
111
|
end
|
121
112
|
end
|
@@ -1,7 +1,7 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
require
|
4
|
-
require
|
3
|
+
require "prometheus_exporter/instrumentation/method_profiler"
|
4
|
+
require "prometheus_exporter/client"
|
5
5
|
|
6
6
|
class PrometheusExporter::Middleware
|
7
7
|
MethodProfiler = PrometheusExporter::Instrumentation::MethodProfiler
|
@@ -11,26 +11,42 @@ class PrometheusExporter::Middleware
|
|
11
11
|
@client = config[:client] || PrometheusExporter::Client.default
|
12
12
|
|
13
13
|
if config[:instrument]
|
14
|
-
if defined?(RedisClient)
|
15
|
-
|
16
|
-
|
17
|
-
if defined?(Redis::VERSION) && (Gem::Version.new(Redis::VERSION) >= Gem::Version.new('5.0.0'))
|
14
|
+
apply_redis_client_middleware! if defined?(RedisClient)
|
15
|
+
|
16
|
+
if defined?(Redis::VERSION) && (Gem::Version.new(Redis::VERSION) >= Gem::Version.new("5.0.0"))
|
18
17
|
# redis 5 support handled via RedisClient
|
19
|
-
elsif defined?
|
20
|
-
MethodProfiler.patch(
|
21
|
-
|
22
|
-
|
18
|
+
elsif defined?(Redis::Client)
|
19
|
+
MethodProfiler.patch(
|
20
|
+
Redis::Client,
|
21
|
+
%i[call call_pipeline],
|
22
|
+
:redis,
|
23
|
+
instrument: config[:instrument],
|
24
|
+
)
|
23
25
|
end
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
26
|
+
|
27
|
+
if defined?(PG::Connection)
|
28
|
+
MethodProfiler.patch(
|
29
|
+
PG::Connection,
|
30
|
+
%i[exec async_exec exec_prepared exec_params send_query_prepared query],
|
31
|
+
:sql,
|
32
|
+
instrument: config[:instrument],
|
33
|
+
)
|
28
34
|
end
|
29
|
-
|
35
|
+
|
36
|
+
if defined?(Mysql2::Client)
|
30
37
|
MethodProfiler.patch(Mysql2::Client, [:query], :sql, instrument: config[:instrument])
|
31
38
|
MethodProfiler.patch(Mysql2::Statement, [:execute], :sql, instrument: config[:instrument])
|
32
39
|
MethodProfiler.patch(Mysql2::Result, [:each], :sql, instrument: config[:instrument])
|
33
40
|
end
|
41
|
+
|
42
|
+
if defined?(Dalli::Client)
|
43
|
+
MethodProfiler.patch(
|
44
|
+
Dalli::Client,
|
45
|
+
%i[delete fetch get add set],
|
46
|
+
:memcache,
|
47
|
+
instrument: config[:instrument],
|
48
|
+
)
|
49
|
+
end
|
34
50
|
end
|
35
51
|
end
|
36
52
|
|
@@ -49,12 +65,10 @@ class PrometheusExporter::Middleware
|
|
49
65
|
timings: info,
|
50
66
|
queue_time: queue_time,
|
51
67
|
status: status,
|
52
|
-
default_labels: default_labels(env, result)
|
68
|
+
default_labels: default_labels(env, result),
|
53
69
|
}
|
54
70
|
labels = custom_labels(env)
|
55
|
-
if labels
|
56
|
-
obj = obj.merge(custom_labels: labels)
|
57
|
-
end
|
71
|
+
obj = obj.merge(custom_labels: labels) if labels
|
58
72
|
|
59
73
|
@client.send_json(obj)
|
60
74
|
end
|
@@ -72,10 +86,7 @@ class PrometheusExporter::Middleware
|
|
72
86
|
controller = "preflight"
|
73
87
|
end
|
74
88
|
|
75
|
-
{
|
76
|
-
action: action || "other",
|
77
|
-
controller: controller || "other"
|
78
|
-
}
|
89
|
+
{ action: action || "other", controller: controller || "other" }
|
79
90
|
end
|
80
91
|
|
81
92
|
# allows subclasses to add custom labels based on env
|
@@ -103,32 +114,29 @@ class PrometheusExporter::Middleware
|
|
103
114
|
|
104
115
|
# determine queue start from well-known trace headers
|
105
116
|
def queue_start(env)
|
106
|
-
|
107
117
|
# get the content of the x-queue-start or x-request-start header
|
108
|
-
value = env[
|
109
|
-
unless value.nil? || value ==
|
118
|
+
value = env["HTTP_X_REQUEST_START"] || env["HTTP_X_QUEUE_START"]
|
119
|
+
unless value.nil? || value == ""
|
110
120
|
# nginx returns time as milliseconds with 3 decimal places
|
111
121
|
# apache returns time as microseconds without decimal places
|
112
122
|
# this method takes care to convert both into a proper second + fractions timestamp
|
113
|
-
value = value.to_s.gsub(/t=|\./,
|
123
|
+
value = value.to_s.gsub(/t=|\./, "")
|
114
124
|
return "#{value[0, 10]}.#{value[10, 13]}".to_f
|
115
125
|
end
|
116
126
|
|
117
127
|
# get the content of the x-amzn-trace-id header
|
118
128
|
# see also: https://docs.aws.amazon.com/elasticloadbalancing/latest/application/load-balancer-request-tracing.html
|
119
|
-
value = env[
|
120
|
-
value&.split(
|
121
|
-
|
129
|
+
value = env["HTTP_X_AMZN_TRACE_ID"]
|
130
|
+
value&.split("Root=")&.last&.split("-")&.fetch(1)&.to_i(16)
|
122
131
|
end
|
123
132
|
|
124
133
|
private
|
125
134
|
|
126
135
|
module RedisInstrumenter
|
127
|
-
MethodProfiler.define_methods_on_module(self, [
|
136
|
+
MethodProfiler.define_methods_on_module(self, %w[call call_pipelined], "redis")
|
128
137
|
end
|
129
138
|
|
130
139
|
def apply_redis_client_middleware!
|
131
140
|
RedisClient.register(RedisInstrumenter)
|
132
141
|
end
|
133
|
-
|
134
142
|
end
|