prometheus_exporter 0.5.3 → 0.8.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.github/workflows/ci.yml +42 -0
- data/.gitignore +2 -0
- data/.rubocop.yml +5 -0
- data/Appraisals +10 -0
- data/CHANGELOG +28 -0
- data/README.md +138 -16
- data/bin/prometheus_exporter +29 -2
- data/gemfiles/.bundle/config +2 -0
- data/gemfiles/ar_60.gemfile +5 -0
- data/gemfiles/ar_61.gemfile +7 -0
- data/lib/prometheus_exporter.rb +1 -0
- data/lib/prometheus_exporter/client.rb +39 -8
- data/lib/prometheus_exporter/instrumentation.rb +1 -0
- data/lib/prometheus_exporter/instrumentation/active_record.rb +19 -12
- data/lib/prometheus_exporter/instrumentation/delayed_job.rb +3 -2
- data/lib/prometheus_exporter/instrumentation/method_profiler.rb +2 -1
- data/lib/prometheus_exporter/instrumentation/process.rb +1 -1
- data/lib/prometheus_exporter/instrumentation/puma.rb +17 -5
- data/lib/prometheus_exporter/instrumentation/resque.rb +40 -0
- data/lib/prometheus_exporter/instrumentation/sidekiq.rb +44 -3
- data/lib/prometheus_exporter/instrumentation/sidekiq_queue.rb +13 -2
- data/lib/prometheus_exporter/instrumentation/unicorn.rb +1 -1
- data/lib/prometheus_exporter/middleware.rb +40 -17
- data/lib/prometheus_exporter/server.rb +1 -0
- data/lib/prometheus_exporter/server/active_record_collector.rb +2 -1
- data/lib/prometheus_exporter/server/collector.rb +1 -0
- data/lib/prometheus_exporter/server/delayed_job_collector.rb +9 -8
- data/lib/prometheus_exporter/server/puma_collector.rb +9 -1
- data/lib/prometheus_exporter/server/resque_collector.rb +54 -0
- data/lib/prometheus_exporter/server/runner.rb +13 -3
- data/lib/prometheus_exporter/server/sidekiq_collector.rb +2 -2
- data/lib/prometheus_exporter/server/web_collector.rb +2 -5
- data/lib/prometheus_exporter/server/web_server.rb +33 -23
- data/lib/prometheus_exporter/version.rb +1 -1
- data/prometheus_exporter.gemspec +7 -3
- metadata +59 -11
- data/.travis.yml +0 -12
@@ -18,7 +18,7 @@ module PrometheusExporter::Instrumentation
|
|
18
18
|
metric = unicorn_collector.collect
|
19
19
|
client.send_json metric
|
20
20
|
rescue StandardError => e
|
21
|
-
|
21
|
+
client.logger.error("Prometheus Exporter Failed To Collect Unicorn Stats #{e}")
|
22
22
|
ensure
|
23
23
|
sleep frequency
|
24
24
|
end
|
@@ -36,22 +36,40 @@ class PrometheusExporter::Middleware
|
|
36
36
|
|
37
37
|
result
|
38
38
|
ensure
|
39
|
+
|
40
|
+
obj = {
|
41
|
+
type: "web",
|
42
|
+
timings: info,
|
43
|
+
queue_time: queue_time,
|
44
|
+
default_labels: default_labels(env, result)
|
45
|
+
}
|
46
|
+
labels = custom_labels(env)
|
47
|
+
if labels
|
48
|
+
obj = obj.merge(custom_labels: labels)
|
49
|
+
end
|
50
|
+
|
51
|
+
@client.send_json(obj)
|
52
|
+
end
|
53
|
+
|
54
|
+
def default_labels(env, result)
|
39
55
|
status = (result && result[0]) || -1
|
40
56
|
params = env["action_dispatch.request.parameters"]
|
41
|
-
action
|
57
|
+
action = controller = nil
|
42
58
|
if params
|
43
59
|
action = params["action"]
|
44
60
|
controller = params["controller"]
|
45
61
|
end
|
46
62
|
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
queue_time: queue_time,
|
51
|
-
action: action,
|
52
|
-
controller: controller,
|
63
|
+
{
|
64
|
+
action: action || "other",
|
65
|
+
controller: controller || "other",
|
53
66
|
status: status
|
54
|
-
|
67
|
+
}
|
68
|
+
end
|
69
|
+
|
70
|
+
# allows subclasses to add custom labels based on env
|
71
|
+
def custom_labels(env)
|
72
|
+
nil
|
55
73
|
end
|
56
74
|
|
57
75
|
private
|
@@ -72,19 +90,24 @@ class PrometheusExporter::Middleware
|
|
72
90
|
Process.clock_gettime(Process::CLOCK_REALTIME)
|
73
91
|
end
|
74
92
|
|
75
|
-
#
|
93
|
+
# determine queue start from well-known trace headers
|
76
94
|
def queue_start(env)
|
95
|
+
|
96
|
+
# get the content of the x-queue-start or x-request-start header
|
77
97
|
value = env['HTTP_X_REQUEST_START'] || env['HTTP_X_QUEUE_START']
|
78
98
|
unless value.nil? || value == ''
|
79
|
-
|
99
|
+
# nginx returns time as milliseconds with 3 decimal places
|
100
|
+
# apache returns time as microseconds without decimal places
|
101
|
+
# this method takes care to convert both into a proper second + fractions timestamp
|
102
|
+
value = value.to_s.gsub(/t=|\./, '')
|
103
|
+
return "#{value[0, 10]}.#{value[10, 13]}".to_f
|
80
104
|
end
|
81
|
-
end
|
82
105
|
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
"#{str[0, 10]}.#{str[10, 13]}".to_f
|
106
|
+
# get the content of the x-amzn-trace-id header
|
107
|
+
# see also: https://docs.aws.amazon.com/elasticloadbalancing/latest/application/load-balancer-request-tracing.html
|
108
|
+
value = env['HTTP_X_AMZN_TRACE_ID']
|
109
|
+
value&.split('Root=')&.last&.split('-')&.fetch(1)&.to_i(16)
|
110
|
+
|
89
111
|
end
|
112
|
+
|
90
113
|
end
|
@@ -47,7 +47,8 @@ module PrometheusExporter::Server
|
|
47
47
|
obj["created_at"] = now
|
48
48
|
|
49
49
|
@active_record_metrics.delete_if do |current|
|
50
|
-
(obj["pid"] == current["pid"] && obj["hostname"] == current["hostname"]
|
50
|
+
(obj["pid"] == current["pid"] && obj["hostname"] == current["hostname"] &&
|
51
|
+
obj["metric_labels"]["pool_name"] == current["metric_labels"]["pool_name"]) ||
|
51
52
|
(current["created_at"] + MAX_ACTIVERECORD_METRIC_AGE < now)
|
52
53
|
end
|
53
54
|
|
@@ -20,6 +20,7 @@ module PrometheusExporter::Server
|
|
20
20
|
register_collector(UnicornCollector.new)
|
21
21
|
register_collector(ActiveRecordCollector.new)
|
22
22
|
register_collector(ShoryukenCollector.new)
|
23
|
+
register_collector(ResqueCollector.new)
|
23
24
|
end
|
24
25
|
|
25
26
|
def register_collector(collector)
|
@@ -19,21 +19,22 @@ module PrometheusExporter::Server
|
|
19
19
|
end
|
20
20
|
|
21
21
|
def collect(obj)
|
22
|
-
default_labels = { job_name: obj['name'] }
|
22
|
+
default_labels = { job_name: obj['name'], queue_name: obj['queue_name'] }
|
23
23
|
custom_labels = obj['custom_labels']
|
24
|
+
|
24
25
|
labels = custom_labels.nil? ? default_labels : default_labels.merge(custom_labels)
|
25
26
|
|
26
27
|
ensure_delayed_job_metrics
|
27
28
|
@delayed_job_duration_seconds.observe(obj["duration"], labels)
|
28
29
|
@delayed_jobs_total.observe(1, labels)
|
29
30
|
@delayed_failed_jobs_total.observe(1, labels) if !obj["success"]
|
30
|
-
@delayed_jobs_max_attempts_reached_total.observe(1) if obj["attempts"] >= obj["max_attempts"]
|
31
|
-
@delayed_job_duration_seconds_summary.observe(obj["duration"])
|
32
|
-
@delayed_job_duration_seconds_summary.observe(obj["duration"], status: "success") if obj["success"]
|
33
|
-
@delayed_job_duration_seconds_summary.observe(obj["duration"], status: "failed") if !obj["success"]
|
34
|
-
@delayed_job_attempts_summary.observe(obj["attempts"]) if obj["success"]
|
35
|
-
@delayed_jobs_enqueued.observe(obj["enqueued"])
|
36
|
-
@delayed_jobs_pending.observe(obj["pending"])
|
31
|
+
@delayed_jobs_max_attempts_reached_total.observe(1, labels) if obj["attempts"] >= obj["max_attempts"]
|
32
|
+
@delayed_job_duration_seconds_summary.observe(obj["duration"], labels)
|
33
|
+
@delayed_job_duration_seconds_summary.observe(obj["duration"], labels.merge(status: "success")) if obj["success"]
|
34
|
+
@delayed_job_duration_seconds_summary.observe(obj["duration"], labels.merge(status: "failed")) if !obj["success"]
|
35
|
+
@delayed_job_attempts_summary.observe(obj["attempts"], labels) if obj["success"]
|
36
|
+
@delayed_jobs_enqueued.observe(obj["enqueued"], labels)
|
37
|
+
@delayed_jobs_pending.observe(obj["pending"], labels)
|
37
38
|
end
|
38
39
|
|
39
40
|
def metrics
|
@@ -34,6 +34,9 @@ module PrometheusExporter::Server
|
|
34
34
|
if m["custom_labels"]
|
35
35
|
labels.merge!(m["custom_labels"])
|
36
36
|
end
|
37
|
+
if m["metric_labels"]
|
38
|
+
labels.merge!(m["metric_labels"])
|
39
|
+
end
|
37
40
|
|
38
41
|
PUMA_GAUGES.map do |k, help|
|
39
42
|
k = k.to_s
|
@@ -51,7 +54,12 @@ module PrometheusExporter::Server
|
|
51
54
|
now = ::Process.clock_gettime(::Process::CLOCK_MONOTONIC)
|
52
55
|
|
53
56
|
obj["created_at"] = now
|
54
|
-
|
57
|
+
|
58
|
+
@puma_metrics.delete_if do |current|
|
59
|
+
(obj["pid"] == current["pid"] && obj["hostname"] == current["hostname"]) ||
|
60
|
+
(current["created_at"] + MAX_PUMA_METRIC_AGE < now)
|
61
|
+
end
|
62
|
+
|
55
63
|
@puma_metrics << obj
|
56
64
|
end
|
57
65
|
end
|
@@ -0,0 +1,54 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module PrometheusExporter::Server
|
4
|
+
class ResqueCollector < TypeCollector
|
5
|
+
MAX_RESQUE_METRIC_AGE = 30
|
6
|
+
RESQUE_GAUGES = {
|
7
|
+
processed_jobs_total: "Total number of processed Resque jobs.",
|
8
|
+
failed_jobs_total: "Total number of failed Resque jobs.",
|
9
|
+
pending_jobs_total: "Total number of pending Resque jobs.",
|
10
|
+
queues_total: "Total number of Resque queues.",
|
11
|
+
workers_total: "Total number of Resque workers running.",
|
12
|
+
working_total: "Total number of Resque workers working."
|
13
|
+
}
|
14
|
+
|
15
|
+
def initialize
|
16
|
+
@resque_metrics = []
|
17
|
+
@gauges = {}
|
18
|
+
end
|
19
|
+
|
20
|
+
def type
|
21
|
+
"resque"
|
22
|
+
end
|
23
|
+
|
24
|
+
def metrics
|
25
|
+
return [] if resque_metrics.length == 0
|
26
|
+
|
27
|
+
resque_metrics.map do |metric|
|
28
|
+
labels = metric.fetch("custom_labels", {})
|
29
|
+
|
30
|
+
RESQUE_GAUGES.map do |name, help|
|
31
|
+
name = name.to_s
|
32
|
+
if value = metric[name]
|
33
|
+
gauge = gauges[name] ||= PrometheusExporter::Metric::Gauge.new("resque_#{name}", help)
|
34
|
+
gauge.observe(value, labels)
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
38
|
+
|
39
|
+
gauges.values
|
40
|
+
end
|
41
|
+
|
42
|
+
def collect(object)
|
43
|
+
now = ::Process.clock_gettime(::Process::CLOCK_MONOTONIC)
|
44
|
+
|
45
|
+
object["created_at"] = now
|
46
|
+
resque_metrics.delete_if { |metric| metric["created_at"] + MAX_RESQUE_METRIC_AGE < now }
|
47
|
+
resque_metrics << object
|
48
|
+
end
|
49
|
+
|
50
|
+
private
|
51
|
+
|
52
|
+
attr_reader :resque_metrics, :gauges
|
53
|
+
end
|
54
|
+
end
|
@@ -1,6 +1,6 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
|
3
|
+
require_relative '../client'
|
4
4
|
require_relative '../instrumentation/unicorn'
|
5
5
|
|
6
6
|
module PrometheusExporter::Server
|
@@ -15,6 +15,8 @@ module PrometheusExporter::Server
|
|
15
15
|
@collector_class = nil
|
16
16
|
@type_collectors = nil
|
17
17
|
@prefix = nil
|
18
|
+
@auth = nil
|
19
|
+
@realm = nil
|
18
20
|
|
19
21
|
options.each do |k, v|
|
20
22
|
send("#{k}=", v) if self.class.method_defined?("#{k}=")
|
@@ -40,12 +42,20 @@ module PrometheusExporter::Server
|
|
40
42
|
)
|
41
43
|
end
|
42
44
|
|
43
|
-
server = server_class.new
|
45
|
+
server = server_class.new(port: port, bind: bind, collector: collector, timeout: timeout, verbose: verbose, auth: auth, realm: realm)
|
44
46
|
server.start
|
45
47
|
end
|
46
48
|
|
47
49
|
attr_accessor :unicorn_listen_address, :unicorn_pid_file
|
48
|
-
attr_writer :prefix, :port, :bind, :collector_class, :type_collectors, :timeout, :verbose, :server_class, :label
|
50
|
+
attr_writer :prefix, :port, :bind, :collector_class, :type_collectors, :timeout, :verbose, :server_class, :label, :auth, :realm
|
51
|
+
|
52
|
+
def auth
|
53
|
+
@auth || nil
|
54
|
+
end
|
55
|
+
|
56
|
+
def realm
|
57
|
+
@realm || PrometheusExporter::DEFAULT_REALM
|
58
|
+
end
|
49
59
|
|
50
60
|
def prefix
|
51
61
|
@prefix || PrometheusExporter::DEFAULT_PREFIX
|
@@ -17,7 +17,7 @@ module PrometheusExporter::Server
|
|
17
17
|
end
|
18
18
|
|
19
19
|
def collect(obj)
|
20
|
-
default_labels = { job_name: obj['name'] }
|
20
|
+
default_labels = { job_name: obj['name'], queue: obj['queue'] }
|
21
21
|
custom_labels = obj['custom_labels']
|
22
22
|
labels = custom_labels.nil? ? default_labels : default_labels.merge(custom_labels)
|
23
23
|
|
@@ -52,7 +52,7 @@ module PrometheusExporter::Server
|
|
52
52
|
if !@sidekiq_jobs_total
|
53
53
|
|
54
54
|
@sidekiq_job_duration_seconds =
|
55
|
-
PrometheusExporter::Metric::
|
55
|
+
PrometheusExporter::Metric::Summary.new(
|
56
56
|
"sidekiq_job_duration_seconds", "Total time spent in sidekiq jobs.")
|
57
57
|
|
58
58
|
@sidekiq_jobs_total =
|
@@ -56,14 +56,11 @@ module PrometheusExporter::Server
|
|
56
56
|
end
|
57
57
|
|
58
58
|
def observe(obj)
|
59
|
-
default_labels =
|
60
|
-
controller: obj['controller'] || 'other',
|
61
|
-
action: obj['action'] || 'other'
|
62
|
-
}
|
59
|
+
default_labels = obj['default_labels']
|
63
60
|
custom_labels = obj['custom_labels']
|
64
61
|
labels = custom_labels.nil? ? default_labels : default_labels.merge(custom_labels)
|
65
62
|
|
66
|
-
@http_requests_total.observe(1, labels
|
63
|
+
@http_requests_total.observe(1, labels)
|
67
64
|
|
68
65
|
if timings = obj["timings"]
|
69
66
|
@http_duration_seconds.observe(timings["total_duration"], labels)
|
@@ -9,9 +9,14 @@ module PrometheusExporter::Server
|
|
9
9
|
class WebServer
|
10
10
|
attr_reader :collector
|
11
11
|
|
12
|
-
def initialize(
|
13
|
-
|
14
|
-
@
|
12
|
+
def initialize(opts)
|
13
|
+
@port = opts[:port] || PrometheusExporter::DEFAULT_PORT
|
14
|
+
@bind = opts[:bind] || PrometheusExporter::DEFAULT_BIND_ADDRESS
|
15
|
+
@collector = opts[:collector] || Collector.new
|
16
|
+
@timeout = opts[:timeout] || PrometheusExporter::DEFAULT_TIMEOUT
|
17
|
+
@verbose = opts[:verbose] || false
|
18
|
+
@auth = opts[:auth]
|
19
|
+
@realm = opts[:realm] || PrometheusExporter::DEFAULT_REALM
|
15
20
|
|
16
21
|
@metrics_total = PrometheusExporter::Metric::Counter.new("collector_metrics_total", "Total metrics processed by exporter web.")
|
17
22
|
|
@@ -23,33 +28,34 @@ module PrometheusExporter::Server
|
|
23
28
|
@sessions_total.observe(0)
|
24
29
|
@bad_metrics_total.observe(0)
|
25
30
|
|
26
|
-
access_log, logger = nil
|
31
|
+
@access_log, @logger = nil
|
32
|
+
log_target = opts[:log_target]
|
27
33
|
|
28
|
-
if verbose
|
29
|
-
access_log = [
|
34
|
+
if @verbose
|
35
|
+
@access_log = [
|
30
36
|
[$stderr, WEBrick::AccessLog::COMMON_LOG_FORMAT],
|
31
37
|
[$stderr, WEBrick::AccessLog::REFERER_LOG_FORMAT],
|
32
38
|
]
|
33
|
-
logger = WEBrick::Log.new($stderr)
|
39
|
+
@logger = WEBrick::Log.new(log_target || $stderr)
|
34
40
|
else
|
35
|
-
access_log = []
|
36
|
-
logger = WEBrick::Log.new("/dev/null")
|
41
|
+
@access_log = []
|
42
|
+
@logger = WEBrick::Log.new(log_target || "/dev/null")
|
37
43
|
end
|
38
44
|
|
45
|
+
@logger.info "Using Basic Authentication via #{@auth}" if @verbose && @auth
|
46
|
+
|
39
47
|
@server = WEBrick::HTTPServer.new(
|
40
|
-
Port: port,
|
41
|
-
BindAddress: bind,
|
42
|
-
Logger: logger,
|
43
|
-
AccessLog: access_log,
|
48
|
+
Port: @port,
|
49
|
+
BindAddress: @bind,
|
50
|
+
Logger: @logger,
|
51
|
+
AccessLog: @access_log,
|
44
52
|
)
|
45
53
|
|
46
|
-
@collector = collector || Collector.new
|
47
|
-
@port = port
|
48
|
-
@timeout = timeout
|
49
|
-
|
50
54
|
@server.mount_proc '/' do |req, res|
|
51
55
|
res['Content-Type'] = 'text/plain; charset=utf-8'
|
52
56
|
if req.path == '/metrics'
|
57
|
+
authenticate(req, res) if @auth
|
58
|
+
|
53
59
|
res.status = 200
|
54
60
|
if req.header["accept-encoding"].to_s.include?("gzip")
|
55
61
|
sio = StringIO.new
|
@@ -82,10 +88,7 @@ module PrometheusExporter::Server
|
|
82
88
|
@collector.process(block)
|
83
89
|
rescue => e
|
84
90
|
if @verbose
|
85
|
-
|
86
|
-
STDERR.puts e.inspect
|
87
|
-
STDERR.puts e.backtrace
|
88
|
-
STDERR.puts
|
91
|
+
logger.error "\n\n#{e.inspect}\n#{e.backtrace}\n\n"
|
89
92
|
end
|
90
93
|
@bad_metrics_total.observe
|
91
94
|
res.body = "Bad Metrics #{e}"
|
@@ -103,7 +106,7 @@ module PrometheusExporter::Server
|
|
103
106
|
begin
|
104
107
|
@server.start
|
105
108
|
rescue => e
|
106
|
-
|
109
|
+
logger.error "Failed to start prometheus collector web on port #{@port}: #{e}"
|
107
110
|
end
|
108
111
|
end
|
109
112
|
end
|
@@ -120,7 +123,7 @@ module PrometheusExporter::Server
|
|
120
123
|
end
|
121
124
|
rescue Timeout::Error
|
122
125
|
# we timed out ... bummer
|
123
|
-
|
126
|
+
logger.error "Generating Prometheus metrics text timed out"
|
124
127
|
end
|
125
128
|
|
126
129
|
metrics = []
|
@@ -159,5 +162,12 @@ module PrometheusExporter::Server
|
|
159
162
|
gauge
|
160
163
|
end
|
161
164
|
|
165
|
+
def authenticate(req, res)
|
166
|
+
htpasswd = WEBrick::HTTPAuth::Htpasswd.new(@auth)
|
167
|
+
basic_auth = WEBrick::HTTPAuth::BasicAuth.new({ Realm: @realm, UserDB: htpasswd, Logger: @logger })
|
168
|
+
|
169
|
+
basic_auth.authenticate(req, res)
|
170
|
+
end
|
171
|
+
|
162
172
|
end
|
163
173
|
end
|
data/prometheus_exporter.gemspec
CHANGED
@@ -24,19 +24,23 @@ Gem::Specification.new do |spec|
|
|
24
24
|
spec.executables = ["prometheus_exporter"]
|
25
25
|
spec.require_paths = ["lib"]
|
26
26
|
|
27
|
+
spec.add_dependency "webrick"
|
28
|
+
|
27
29
|
spec.add_development_dependency "rubocop", ">= 0.69"
|
28
|
-
spec.add_development_dependency "bundler", "
|
30
|
+
spec.add_development_dependency "bundler", ">= 2.1.4"
|
29
31
|
spec.add_development_dependency "rake", "~> 13.0"
|
30
32
|
spec.add_development_dependency "minitest", "~> 5.0"
|
31
33
|
spec.add_development_dependency "guard", "~> 2.0"
|
32
|
-
spec.add_development_dependency "mini_racer", "~> 0.1"
|
34
|
+
spec.add_development_dependency "mini_racer", "~> 0.3.1"
|
33
35
|
spec.add_development_dependency "guard-minitest", "~> 2.0"
|
34
36
|
spec.add_development_dependency "oj", "~> 3.0"
|
35
37
|
spec.add_development_dependency "rack-test", "~> 0.8.3"
|
36
38
|
spec.add_development_dependency "minitest-stub-const", "~> 0.6"
|
37
39
|
spec.add_development_dependency "rubocop-discourse", ">2"
|
40
|
+
spec.add_development_dependency "appraisal", "~> 2.3"
|
41
|
+
spec.add_development_dependency "activerecord", "~> 6.0.0"
|
38
42
|
if !RUBY_ENGINE == 'jruby'
|
39
43
|
spec.add_development_dependency "raindrops", "~> 0.19"
|
40
44
|
end
|
41
|
-
spec.required_ruby_version = '>= 2.
|
45
|
+
spec.required_ruby_version = '>= 2.6.0'
|
42
46
|
end
|