prometheus_exporter 0.7.0 → 2.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG +298 -35
- data/README.md +276 -53
- data/{bin → exe}/prometheus_exporter +20 -7
- data/lib/prometheus_exporter/client.rb +41 -32
- data/lib/prometheus_exporter/instrumentation/active_record.rb +29 -35
- data/lib/prometheus_exporter/instrumentation/delayed_job.rb +28 -13
- data/lib/prometheus_exporter/instrumentation/good_job.rb +28 -0
- data/lib/prometheus_exporter/instrumentation/hutch.rb +1 -1
- data/lib/prometheus_exporter/instrumentation/method_profiler.rb +67 -27
- data/lib/prometheus_exporter/instrumentation/periodic_stats.rb +54 -0
- data/lib/prometheus_exporter/instrumentation/process.rb +25 -27
- data/lib/prometheus_exporter/instrumentation/puma.rb +36 -27
- data/lib/prometheus_exporter/instrumentation/resque.rb +33 -0
- data/lib/prometheus_exporter/instrumentation/shoryuken.rb +6 -7
- data/lib/prometheus_exporter/instrumentation/sidekiq.rb +51 -23
- data/lib/prometheus_exporter/instrumentation/sidekiq_process.rb +45 -0
- data/lib/prometheus_exporter/instrumentation/sidekiq_queue.rb +38 -33
- data/lib/prometheus_exporter/instrumentation/sidekiq_stats.rb +32 -0
- data/lib/prometheus_exporter/instrumentation/unicorn.rb +12 -17
- data/lib/prometheus_exporter/instrumentation.rb +5 -0
- data/lib/prometheus_exporter/metric/base.rb +20 -17
- data/lib/prometheus_exporter/metric/counter.rb +1 -3
- data/lib/prometheus_exporter/metric/gauge.rb +6 -6
- data/lib/prometheus_exporter/metric/histogram.rb +15 -5
- data/lib/prometheus_exporter/metric/summary.rb +5 -14
- data/lib/prometheus_exporter/middleware.rb +72 -38
- data/lib/prometheus_exporter/server/active_record_collector.rb +16 -14
- data/lib/prometheus_exporter/server/collector.rb +29 -17
- data/lib/prometheus_exporter/server/collector_base.rb +0 -2
- data/lib/prometheus_exporter/server/delayed_job_collector.rb +76 -33
- data/lib/prometheus_exporter/server/good_job_collector.rb +52 -0
- data/lib/prometheus_exporter/server/hutch_collector.rb +19 -11
- data/lib/prometheus_exporter/server/metrics_container.rb +66 -0
- data/lib/prometheus_exporter/server/process_collector.rb +15 -14
- data/lib/prometheus_exporter/server/puma_collector.rb +21 -18
- data/lib/prometheus_exporter/server/resque_collector.rb +50 -0
- data/lib/prometheus_exporter/server/runner.rb +49 -13
- data/lib/prometheus_exporter/server/shoryuken_collector.rb +22 -17
- data/lib/prometheus_exporter/server/sidekiq_collector.rb +22 -14
- data/lib/prometheus_exporter/server/sidekiq_process_collector.rb +47 -0
- data/lib/prometheus_exporter/server/sidekiq_queue_collector.rb +12 -12
- data/lib/prometheus_exporter/server/sidekiq_stats_collector.rb +49 -0
- data/lib/prometheus_exporter/server/type_collector.rb +2 -0
- data/lib/prometheus_exporter/server/unicorn_collector.rb +32 -33
- data/lib/prometheus_exporter/server/web_collector.rb +48 -31
- data/lib/prometheus_exporter/server/web_server.rb +70 -48
- data/lib/prometheus_exporter/server.rb +4 -0
- data/lib/prometheus_exporter/version.rb +1 -1
- data/lib/prometheus_exporter.rb +12 -13
- metadata +19 -206
- data/.github/workflows/ci.yml +0 -42
- data/.gitignore +0 -13
- data/.rubocop.yml +0 -7
- data/Appraisals +0 -10
- data/CODE_OF_CONDUCT.md +0 -74
- data/Gemfile +0 -8
- data/Guardfile +0 -8
- data/Rakefile +0 -12
- data/bench/bench.rb +0 -45
- data/examples/custom_collector.rb +0 -27
- data/gemfiles/.bundle/config +0 -2
- data/gemfiles/ar_60.gemfile +0 -5
- data/gemfiles/ar_61.gemfile +0 -7
- data/prometheus_exporter.gemspec +0 -46
|
@@ -4,39 +4,46 @@ require "json"
|
|
|
4
4
|
|
|
5
5
|
# collects stats from puma
|
|
6
6
|
module PrometheusExporter::Instrumentation
|
|
7
|
-
class Puma
|
|
8
|
-
def self.start(client: nil, frequency: 30)
|
|
9
|
-
puma_collector = new
|
|
7
|
+
class Puma < PeriodicStats
|
|
8
|
+
def self.start(client: nil, frequency: 30, labels: {})
|
|
9
|
+
puma_collector = new(labels)
|
|
10
10
|
client ||= PrometheusExporter::Client.default
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
client.send_json metric
|
|
16
|
-
rescue => e
|
|
17
|
-
STDERR.puts("Prometheus Exporter Failed To Collect Puma Stats #{e}")
|
|
18
|
-
ensure
|
|
19
|
-
sleep frequency
|
|
20
|
-
end
|
|
21
|
-
end
|
|
11
|
+
|
|
12
|
+
worker_loop do
|
|
13
|
+
metric = puma_collector.collect
|
|
14
|
+
client.send_json metric
|
|
22
15
|
end
|
|
16
|
+
|
|
17
|
+
super
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
def initialize(metric_labels = {})
|
|
21
|
+
@metric_labels = metric_labels
|
|
23
22
|
end
|
|
24
23
|
|
|
25
24
|
def collect
|
|
26
|
-
metric = {
|
|
27
|
-
|
|
25
|
+
metric = {
|
|
26
|
+
pid: pid,
|
|
27
|
+
type: "puma",
|
|
28
|
+
hostname: ::PrometheusExporter.hostname,
|
|
29
|
+
metric_labels: @metric_labels,
|
|
30
|
+
}
|
|
28
31
|
collect_puma_stats(metric)
|
|
29
32
|
metric
|
|
30
33
|
end
|
|
31
34
|
|
|
35
|
+
def pid
|
|
36
|
+
@pid = ::Process.pid
|
|
37
|
+
end
|
|
38
|
+
|
|
32
39
|
def collect_puma_stats(metric)
|
|
33
40
|
stats = JSON.parse(::Puma.stats)
|
|
34
41
|
|
|
35
42
|
if stats.key?("workers")
|
|
36
43
|
metric[:phase] = stats["phase"]
|
|
37
|
-
metric[:
|
|
38
|
-
metric[:
|
|
39
|
-
metric[:
|
|
44
|
+
metric[:workers] = stats["workers"]
|
|
45
|
+
metric[:booted_workers] = stats["booted_workers"]
|
|
46
|
+
metric[:old_workers] = stats["old_workers"]
|
|
40
47
|
|
|
41
48
|
stats["worker_status"].each do |worker|
|
|
42
49
|
next if worker["last_status"].empty?
|
|
@@ -50,15 +57,17 @@ module PrometheusExporter::Instrumentation
|
|
|
50
57
|
private
|
|
51
58
|
|
|
52
59
|
def collect_worker_status(metric, status)
|
|
53
|
-
metric[:
|
|
54
|
-
metric[:
|
|
55
|
-
metric[:
|
|
56
|
-
metric[:
|
|
60
|
+
metric[:request_backlog] ||= 0
|
|
61
|
+
metric[:running_threads] ||= 0
|
|
62
|
+
metric[:thread_pool_capacity] ||= 0
|
|
63
|
+
metric[:max_threads] ||= 0
|
|
64
|
+
metric[:busy_threads] ||= 0
|
|
57
65
|
|
|
58
|
-
metric[:
|
|
59
|
-
metric[:
|
|
60
|
-
metric[:
|
|
61
|
-
metric[:
|
|
66
|
+
metric[:request_backlog] += status["backlog"]
|
|
67
|
+
metric[:running_threads] += status["running"]
|
|
68
|
+
metric[:thread_pool_capacity] += status["pool_capacity"]
|
|
69
|
+
metric[:max_threads] += status["max_threads"]
|
|
70
|
+
metric[:busy_threads] += status["busy_threads"]
|
|
62
71
|
end
|
|
63
72
|
end
|
|
64
73
|
end
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# collects stats from resque
|
|
4
|
+
module PrometheusExporter::Instrumentation
|
|
5
|
+
class Resque < PeriodicStats
|
|
6
|
+
def self.start(client: nil, frequency: 30)
|
|
7
|
+
resque_collector = new
|
|
8
|
+
client ||= PrometheusExporter::Client.default
|
|
9
|
+
|
|
10
|
+
worker_loop { client.send_json(resque_collector.collect) }
|
|
11
|
+
|
|
12
|
+
super
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
def collect
|
|
16
|
+
metric = {}
|
|
17
|
+
metric[:type] = "resque"
|
|
18
|
+
collect_resque_stats(metric)
|
|
19
|
+
metric
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
def collect_resque_stats(metric)
|
|
23
|
+
info = ::Resque.info
|
|
24
|
+
|
|
25
|
+
metric[:processed_jobs] = info[:processed]
|
|
26
|
+
metric[:failed_jobs] = info[:failed]
|
|
27
|
+
metric[:pending_jobs] = info[:pending]
|
|
28
|
+
metric[:queues] = info[:queues]
|
|
29
|
+
metric[:worker] = info[:workers]
|
|
30
|
+
metric[:working] = info[:working]
|
|
31
|
+
end
|
|
32
|
+
end
|
|
33
|
+
end
|
|
@@ -2,7 +2,6 @@
|
|
|
2
2
|
|
|
3
3
|
module PrometheusExporter::Instrumentation
|
|
4
4
|
class Shoryuken
|
|
5
|
-
|
|
6
5
|
def initialize(client: nil)
|
|
7
6
|
@client = client || PrometheusExporter::Client.default
|
|
8
7
|
end
|
|
@@ -19,12 +18,12 @@ module PrometheusExporter::Instrumentation
|
|
|
19
18
|
ensure
|
|
20
19
|
duration = ::Process.clock_gettime(::Process::CLOCK_MONOTONIC) - start
|
|
21
20
|
@client.send_json(
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
21
|
+
type: "shoryuken",
|
|
22
|
+
queue: queue,
|
|
23
|
+
name: worker.class.name,
|
|
24
|
+
success: success,
|
|
25
|
+
shutdown: shutdown,
|
|
26
|
+
duration: duration,
|
|
28
27
|
)
|
|
29
28
|
end
|
|
30
29
|
end
|
|
@@ -1,32 +1,49 @@
|
|
|
1
1
|
# frozen_string_literal: true
|
|
2
2
|
|
|
3
|
-
require
|
|
3
|
+
require "yaml"
|
|
4
4
|
|
|
5
5
|
module PrometheusExporter::Instrumentation
|
|
6
|
-
JOB_WRAPPER_CLASS_NAME =
|
|
7
|
-
DELAYED_CLASS_NAMES = [
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
6
|
+
JOB_WRAPPER_CLASS_NAME = "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
|
|
7
|
+
DELAYED_CLASS_NAMES = %w[
|
|
8
|
+
Sidekiq::Extensions::DelayedClass
|
|
9
|
+
Sidekiq::Extensions::DelayedModel
|
|
10
|
+
Sidekiq::Extensions::DelayedMailer
|
|
11
11
|
]
|
|
12
12
|
|
|
13
13
|
class Sidekiq
|
|
14
14
|
def self.death_handler
|
|
15
|
-
->
|
|
15
|
+
->(job, ex) do
|
|
16
16
|
job_is_fire_and_forget = job["retry"] == false
|
|
17
17
|
|
|
18
|
+
worker_class = Object.const_get(job["class"])
|
|
19
|
+
worker_custom_labels = self.get_worker_custom_labels(worker_class, job)
|
|
20
|
+
|
|
18
21
|
unless job_is_fire_and_forget
|
|
19
22
|
PrometheusExporter::Client.default.send_json(
|
|
20
23
|
type: "sidekiq",
|
|
21
|
-
name: job["class"],
|
|
24
|
+
name: get_name(job["class"], job),
|
|
22
25
|
dead: true,
|
|
26
|
+
custom_labels: worker_custom_labels,
|
|
23
27
|
)
|
|
24
28
|
end
|
|
25
29
|
end
|
|
26
30
|
end
|
|
27
31
|
|
|
28
|
-
def
|
|
29
|
-
|
|
32
|
+
def self.get_worker_custom_labels(worker_class, msg)
|
|
33
|
+
return {} unless worker_class.respond_to?(:custom_labels)
|
|
34
|
+
|
|
35
|
+
# TODO remove when version 3.0.0 is released
|
|
36
|
+
method_arity = worker_class.method(:custom_labels).arity
|
|
37
|
+
|
|
38
|
+
if method_arity > 0
|
|
39
|
+
worker_class.custom_labels(msg)
|
|
40
|
+
else
|
|
41
|
+
worker_class.custom_labels
|
|
42
|
+
end
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
def initialize(options = { client: nil })
|
|
46
|
+
@client = options.fetch(:client, nil) || PrometheusExporter::Client.default
|
|
30
47
|
end
|
|
31
48
|
|
|
32
49
|
def call(worker, msg, queue)
|
|
@@ -43,18 +60,18 @@ module PrometheusExporter::Instrumentation
|
|
|
43
60
|
duration = ::Process.clock_gettime(::Process::CLOCK_MONOTONIC) - start
|
|
44
61
|
@client.send_json(
|
|
45
62
|
type: "sidekiq",
|
|
46
|
-
name: get_name(worker, msg),
|
|
63
|
+
name: self.class.get_name(worker.class.to_s, msg),
|
|
47
64
|
queue: queue,
|
|
48
65
|
success: success,
|
|
49
66
|
shutdown: shutdown,
|
|
50
|
-
duration: duration
|
|
67
|
+
duration: duration,
|
|
68
|
+
custom_labels: self.class.get_worker_custom_labels(worker.class, msg),
|
|
51
69
|
)
|
|
52
70
|
end
|
|
53
71
|
|
|
54
72
|
private
|
|
55
73
|
|
|
56
|
-
def get_name(
|
|
57
|
-
class_name = worker.class.to_s
|
|
74
|
+
def self.get_name(class_name, msg)
|
|
58
75
|
if class_name == JOB_WRAPPER_CLASS_NAME
|
|
59
76
|
get_job_wrapper_name(msg)
|
|
60
77
|
elsif DELAYED_CLASS_NAMES.include?(class_name)
|
|
@@ -64,24 +81,35 @@ module PrometheusExporter::Instrumentation
|
|
|
64
81
|
end
|
|
65
82
|
end
|
|
66
83
|
|
|
67
|
-
def get_job_wrapper_name(msg)
|
|
68
|
-
msg[
|
|
84
|
+
def self.get_job_wrapper_name(msg)
|
|
85
|
+
msg["wrapped"]
|
|
69
86
|
end
|
|
70
87
|
|
|
71
|
-
def get_delayed_name(msg, class_name)
|
|
72
|
-
# fallback to class_name since we're relying on the internal implementation
|
|
73
|
-
# of the delayed extensions
|
|
74
|
-
# https://github.com/mperham/sidekiq/blob/master/lib/sidekiq/extensions/class_methods.rb
|
|
88
|
+
def self.get_delayed_name(msg, class_name)
|
|
75
89
|
begin
|
|
76
|
-
|
|
90
|
+
# fallback to class_name since we're relying on the internal implementation
|
|
91
|
+
# of the delayed extensions
|
|
92
|
+
# https://github.com/mperham/sidekiq/blob/master/lib/sidekiq/extensions/class_methods.rb
|
|
93
|
+
target, method_name, _args = YAML.load(msg["args"].first)
|
|
77
94
|
if target.class == Class
|
|
78
95
|
"#{target.name}##{method_name}"
|
|
79
96
|
else
|
|
80
97
|
"#{target.class.name}##{method_name}"
|
|
81
98
|
end
|
|
82
|
-
rescue
|
|
83
|
-
|
|
99
|
+
rescue Psych::DisallowedClass, ArgumentError
|
|
100
|
+
parsed = Psych.parse(msg["args"].first)
|
|
101
|
+
children = parsed.root.children
|
|
102
|
+
target = (children[0].value || children[0].tag).sub("!", "")
|
|
103
|
+
method_name = (children[1].value || children[1].tag).sub(":", "")
|
|
104
|
+
|
|
105
|
+
if target && method_name
|
|
106
|
+
"#{target}##{method_name}"
|
|
107
|
+
else
|
|
108
|
+
class_name
|
|
109
|
+
end
|
|
84
110
|
end
|
|
111
|
+
rescue StandardError
|
|
112
|
+
class_name
|
|
85
113
|
end
|
|
86
114
|
end
|
|
87
115
|
end
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module PrometheusExporter::Instrumentation
|
|
4
|
+
class SidekiqProcess < PeriodicStats
|
|
5
|
+
def self.start(client: nil, frequency: 30)
|
|
6
|
+
client ||= PrometheusExporter::Client.default
|
|
7
|
+
sidekiq_process_collector = new
|
|
8
|
+
|
|
9
|
+
worker_loop { client.send_json(sidekiq_process_collector.collect) }
|
|
10
|
+
|
|
11
|
+
super
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
def initialize
|
|
15
|
+
@pid = ::Process.pid
|
|
16
|
+
@hostname = Socket.gethostname
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
def collect
|
|
20
|
+
{ type: "sidekiq_process", process: collect_stats }
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
def collect_stats
|
|
24
|
+
process = current_process
|
|
25
|
+
return {} unless process
|
|
26
|
+
|
|
27
|
+
{
|
|
28
|
+
busy: process["busy"],
|
|
29
|
+
concurrency: process["concurrency"],
|
|
30
|
+
labels: {
|
|
31
|
+
labels: process["labels"].sort.join(","),
|
|
32
|
+
queues: process["queues"].sort.join(","),
|
|
33
|
+
quiet: process["quiet"],
|
|
34
|
+
tag: process["tag"],
|
|
35
|
+
hostname: process["hostname"],
|
|
36
|
+
identity: process["identity"],
|
|
37
|
+
},
|
|
38
|
+
}
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
def current_process
|
|
42
|
+
::Sidekiq::ProcessSet.new.find { |sp| sp["hostname"] == @hostname && sp["pid"] == @pid }
|
|
43
|
+
end
|
|
44
|
+
end
|
|
45
|
+
end
|
|
@@ -1,50 +1,55 @@
|
|
|
1
1
|
# frozen_string_literal: true
|
|
2
2
|
|
|
3
3
|
module PrometheusExporter::Instrumentation
|
|
4
|
-
class SidekiqQueue
|
|
5
|
-
def self.start(client: nil, frequency: 30)
|
|
4
|
+
class SidekiqQueue < PeriodicStats
|
|
5
|
+
def self.start(client: nil, frequency: 30, all_queues: false)
|
|
6
6
|
client ||= PrometheusExporter::Client.default
|
|
7
|
-
sidekiq_queue_collector = new
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
end
|
|
19
|
-
end
|
|
7
|
+
sidekiq_queue_collector = new(all_queues: all_queues)
|
|
8
|
+
|
|
9
|
+
worker_loop { client.send_json(sidekiq_queue_collector.collect) }
|
|
10
|
+
|
|
11
|
+
super
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
def initialize(all_queues: false)
|
|
15
|
+
@all_queues = all_queues
|
|
16
|
+
@pid = ::Process.pid
|
|
17
|
+
@hostname = Socket.gethostname
|
|
20
18
|
end
|
|
21
19
|
|
|
22
20
|
def collect
|
|
23
|
-
{
|
|
24
|
-
type: 'sidekiq_queue',
|
|
25
|
-
queues: collect_queue_stats
|
|
26
|
-
}
|
|
21
|
+
{ type: "sidekiq_queue", queues: collect_queue_stats }
|
|
27
22
|
end
|
|
28
23
|
|
|
29
24
|
def collect_queue_stats
|
|
30
|
-
|
|
31
|
-
pid = ::Process.pid
|
|
32
|
-
ps = ::Sidekiq::ProcessSet.new
|
|
25
|
+
sidekiq_queues = ::Sidekiq::Queue.all
|
|
33
26
|
|
|
34
|
-
|
|
35
|
-
|
|
27
|
+
unless @all_queues
|
|
28
|
+
queues = collect_current_process_queues
|
|
29
|
+
sidekiq_queues.select! { |sidekiq_queue| queues.include?(sidekiq_queue.name) }
|
|
36
30
|
end
|
|
37
31
|
|
|
38
|
-
|
|
32
|
+
sidekiq_queues
|
|
33
|
+
.map do |queue|
|
|
34
|
+
{
|
|
35
|
+
backlog: queue.size,
|
|
36
|
+
latency_seconds: queue.latency.to_i,
|
|
37
|
+
labels: {
|
|
38
|
+
queue: queue.name,
|
|
39
|
+
},
|
|
40
|
+
}
|
|
41
|
+
end
|
|
42
|
+
.compact
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
private
|
|
46
|
+
|
|
47
|
+
def collect_current_process_queues
|
|
48
|
+
ps = ::Sidekiq::ProcessSet.new
|
|
49
|
+
|
|
50
|
+
process = ps.find { |sp| sp["hostname"] == @hostname && sp["pid"] == @pid }
|
|
39
51
|
|
|
40
|
-
|
|
41
|
-
next unless queues.include? queue.name
|
|
42
|
-
{
|
|
43
|
-
backlog_total: queue.size,
|
|
44
|
-
latency_seconds: queue.latency.to_i,
|
|
45
|
-
labels: { queue: queue.name }
|
|
46
|
-
}
|
|
47
|
-
end.compact
|
|
52
|
+
process.nil? ? [] : process["queues"]
|
|
48
53
|
end
|
|
49
54
|
end
|
|
50
55
|
end
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module PrometheusExporter::Instrumentation
|
|
4
|
+
class SidekiqStats < PeriodicStats
|
|
5
|
+
def self.start(client: nil, frequency: 30)
|
|
6
|
+
client ||= PrometheusExporter::Client.default
|
|
7
|
+
sidekiq_stats_collector = new
|
|
8
|
+
|
|
9
|
+
worker_loop { client.send_json(sidekiq_stats_collector.collect) }
|
|
10
|
+
|
|
11
|
+
super
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
def collect
|
|
15
|
+
{ type: "sidekiq_stats", stats: collect_stats }
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
def collect_stats
|
|
19
|
+
stats = ::Sidekiq::Stats.new
|
|
20
|
+
{
|
|
21
|
+
"dead_size" => stats.dead_size,
|
|
22
|
+
"enqueued" => stats.enqueued,
|
|
23
|
+
"failed" => stats.failed,
|
|
24
|
+
"processed" => stats.processed,
|
|
25
|
+
"processes_size" => stats.processes_size,
|
|
26
|
+
"retry_size" => stats.retry_size,
|
|
27
|
+
"scheduled_size" => stats.scheduled_size,
|
|
28
|
+
"workers_size" => stats.workers_size,
|
|
29
|
+
}
|
|
30
|
+
end
|
|
31
|
+
end
|
|
32
|
+
end
|
|
@@ -1,29 +1,24 @@
|
|
|
1
1
|
# frozen_string_literal: true
|
|
2
2
|
|
|
3
3
|
begin
|
|
4
|
-
require
|
|
4
|
+
require "raindrops"
|
|
5
5
|
rescue LoadError
|
|
6
6
|
# No raindrops available, dont do anything
|
|
7
7
|
end
|
|
8
8
|
|
|
9
9
|
module PrometheusExporter::Instrumentation
|
|
10
10
|
# collects stats from unicorn
|
|
11
|
-
class Unicorn
|
|
11
|
+
class Unicorn < PeriodicStats
|
|
12
12
|
def self.start(pid_file:, listener_address:, client: nil, frequency: 30)
|
|
13
13
|
unicorn_collector = new(pid_file: pid_file, listener_address: listener_address)
|
|
14
14
|
client ||= PrometheusExporter::Client.default
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
client.send_json metric
|
|
20
|
-
rescue StandardError => e
|
|
21
|
-
STDERR.puts("Prometheus Exporter Failed To Collect Unicorn Stats #{e}")
|
|
22
|
-
ensure
|
|
23
|
-
sleep frequency
|
|
24
|
-
end
|
|
25
|
-
end
|
|
15
|
+
|
|
16
|
+
worker_loop do
|
|
17
|
+
metric = unicorn_collector.collect
|
|
18
|
+
client.send_json metric
|
|
26
19
|
end
|
|
20
|
+
|
|
21
|
+
super
|
|
27
22
|
end
|
|
28
23
|
|
|
29
24
|
def initialize(pid_file:, listener_address:)
|
|
@@ -34,7 +29,7 @@ module PrometheusExporter::Instrumentation
|
|
|
34
29
|
|
|
35
30
|
def collect
|
|
36
31
|
metric = {}
|
|
37
|
-
metric[:type] =
|
|
32
|
+
metric[:type] = "unicorn"
|
|
38
33
|
collect_unicorn_stats(metric)
|
|
39
34
|
metric
|
|
40
35
|
end
|
|
@@ -42,9 +37,9 @@ module PrometheusExporter::Instrumentation
|
|
|
42
37
|
def collect_unicorn_stats(metric)
|
|
43
38
|
stats = listener_address_stats
|
|
44
39
|
|
|
45
|
-
metric[:
|
|
46
|
-
metric[:
|
|
47
|
-
metric[:
|
|
40
|
+
metric[:active_workers] = stats.active
|
|
41
|
+
metric[:request_backlog] = stats.queued
|
|
42
|
+
metric[:workers] = worker_process_count
|
|
48
43
|
end
|
|
49
44
|
|
|
50
45
|
private
|
|
@@ -1,13 +1,18 @@
|
|
|
1
1
|
# frozen_string_literal: true
|
|
2
2
|
|
|
3
3
|
require_relative "client"
|
|
4
|
+
require_relative "instrumentation/periodic_stats"
|
|
4
5
|
require_relative "instrumentation/process"
|
|
5
6
|
require_relative "instrumentation/method_profiler"
|
|
6
7
|
require_relative "instrumentation/sidekiq"
|
|
7
8
|
require_relative "instrumentation/sidekiq_queue"
|
|
9
|
+
require_relative "instrumentation/sidekiq_process"
|
|
10
|
+
require_relative "instrumentation/sidekiq_stats"
|
|
8
11
|
require_relative "instrumentation/delayed_job"
|
|
9
12
|
require_relative "instrumentation/puma"
|
|
10
13
|
require_relative "instrumentation/hutch"
|
|
11
14
|
require_relative "instrumentation/unicorn"
|
|
12
15
|
require_relative "instrumentation/active_record"
|
|
13
16
|
require_relative "instrumentation/shoryuken"
|
|
17
|
+
require_relative "instrumentation/resque"
|
|
18
|
+
require_relative "instrumentation/good_job"
|
|
@@ -2,9 +2,9 @@
|
|
|
2
2
|
|
|
3
3
|
module PrometheusExporter::Metric
|
|
4
4
|
class Base
|
|
5
|
-
|
|
6
5
|
@default_prefix = nil if !defined?(@default_prefix)
|
|
7
6
|
@default_labels = nil if !defined?(@default_labels)
|
|
7
|
+
@default_aggregation = nil if !defined?(@default_aggregation)
|
|
8
8
|
|
|
9
9
|
# prefix applied to all metrics
|
|
10
10
|
def self.default_prefix=(name)
|
|
@@ -23,6 +23,14 @@ module PrometheusExporter::Metric
|
|
|
23
23
|
@default_labels || {}
|
|
24
24
|
end
|
|
25
25
|
|
|
26
|
+
def self.default_aggregation=(aggregation)
|
|
27
|
+
@default_aggregation = aggregation
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
def self.default_aggregation
|
|
31
|
+
@default_aggregation ||= Summary
|
|
32
|
+
end
|
|
33
|
+
|
|
26
34
|
attr_accessor :help, :name, :data
|
|
27
35
|
|
|
28
36
|
def initialize(name, help)
|
|
@@ -66,13 +74,16 @@ module PrometheusExporter::Metric
|
|
|
66
74
|
end
|
|
67
75
|
|
|
68
76
|
def labels_text(labels)
|
|
69
|
-
labels = (labels || {})
|
|
77
|
+
labels = Base.default_labels.merge(labels || {})
|
|
70
78
|
if labels && labels.length > 0
|
|
71
|
-
s =
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
79
|
+
s =
|
|
80
|
+
labels
|
|
81
|
+
.map do |key, value|
|
|
82
|
+
value = value.to_s
|
|
83
|
+
value = escape_value(value) if needs_escape?(value)
|
|
84
|
+
"#{key}=\"#{value}\""
|
|
85
|
+
end
|
|
86
|
+
.join(",")
|
|
76
87
|
"{#{s}}"
|
|
77
88
|
end
|
|
78
89
|
end
|
|
@@ -97,16 +108,8 @@ module PrometheusExporter::Metric
|
|
|
97
108
|
end
|
|
98
109
|
end
|
|
99
110
|
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
def needs_escape?(str)
|
|
103
|
-
str.match?(/[\n"\\]/m)
|
|
104
|
-
end
|
|
105
|
-
else
|
|
106
|
-
def needs_escape?(str)
|
|
107
|
-
!!str.match(/[\n"\\]/m)
|
|
108
|
-
end
|
|
111
|
+
def needs_escape?(str)
|
|
112
|
+
str.match?(/[\n"\\]/m)
|
|
109
113
|
end
|
|
110
|
-
|
|
111
114
|
end
|
|
112
115
|
end
|
|
@@ -18,9 +18,7 @@ module PrometheusExporter::Metric
|
|
|
18
18
|
end
|
|
19
19
|
|
|
20
20
|
def metric_text
|
|
21
|
-
@data.map
|
|
22
|
-
"#{prefix(@name)}#{labels_text(labels)} #{value}"
|
|
23
|
-
end.join("\n")
|
|
21
|
+
@data.map { |labels, value| "#{prefix(@name)}#{labels_text(labels)} #{value}" }.join("\n")
|
|
24
22
|
end
|
|
25
23
|
|
|
26
24
|
def to_h
|
|
@@ -5,6 +5,10 @@ module PrometheusExporter::Metric
|
|
|
5
5
|
attr_reader :data
|
|
6
6
|
|
|
7
7
|
def initialize(name, help)
|
|
8
|
+
if name.end_with?("_total")
|
|
9
|
+
raise ArgumentError, "The metric name of gauge must not have _total suffix. Given: #{name}"
|
|
10
|
+
end
|
|
11
|
+
|
|
8
12
|
super
|
|
9
13
|
reset!
|
|
10
14
|
end
|
|
@@ -14,9 +18,7 @@ module PrometheusExporter::Metric
|
|
|
14
18
|
end
|
|
15
19
|
|
|
16
20
|
def metric_text
|
|
17
|
-
@data.map
|
|
18
|
-
"#{prefix(@name)}#{labels_text(labels)} #{value}"
|
|
19
|
-
end.join("\n")
|
|
21
|
+
@data.map { |labels, value| "#{prefix(@name)}#{labels_text(labels)} #{value}" }.join("\n")
|
|
20
22
|
end
|
|
21
23
|
|
|
22
24
|
def reset!
|
|
@@ -35,9 +37,7 @@ module PrometheusExporter::Metric
|
|
|
35
37
|
if value.nil?
|
|
36
38
|
data.delete(labels)
|
|
37
39
|
else
|
|
38
|
-
if !(Numeric === value)
|
|
39
|
-
raise ArgumentError, 'value must be a number'
|
|
40
|
-
end
|
|
40
|
+
raise ArgumentError, "value must be a number" if !(Numeric === value)
|
|
41
41
|
@data[labels] = value
|
|
42
42
|
end
|
|
43
43
|
end
|