prometheus_exporter 0.5.1 → 0.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. checksums.yaml +4 -4
  2. data/.github/workflows/ci.yml +42 -0
  3. data/.gitignore +2 -0
  4. data/.rubocop.yml +7 -1
  5. data/Appraisals +10 -0
  6. data/CHANGELOG +36 -3
  7. data/README.md +278 -5
  8. data/bin/prometheus_exporter +21 -0
  9. data/gemfiles/.bundle/config +2 -0
  10. data/gemfiles/ar_60.gemfile +5 -0
  11. data/gemfiles/ar_61.gemfile +7 -0
  12. data/lib/prometheus_exporter.rb +2 -0
  13. data/lib/prometheus_exporter/client.rb +27 -3
  14. data/lib/prometheus_exporter/instrumentation.rb +2 -0
  15. data/lib/prometheus_exporter/instrumentation/active_record.rb +14 -7
  16. data/lib/prometheus_exporter/instrumentation/delayed_job.rb +3 -2
  17. data/lib/prometheus_exporter/instrumentation/method_profiler.rb +2 -1
  18. data/lib/prometheus_exporter/instrumentation/process.rb +2 -0
  19. data/lib/prometheus_exporter/instrumentation/puma.rb +16 -4
  20. data/lib/prometheus_exporter/instrumentation/resque.rb +40 -0
  21. data/lib/prometheus_exporter/instrumentation/sidekiq.rb +44 -3
  22. data/lib/prometheus_exporter/instrumentation/sidekiq_queue.rb +50 -0
  23. data/lib/prometheus_exporter/metric/base.rb +4 -0
  24. data/lib/prometheus_exporter/metric/counter.rb +4 -0
  25. data/lib/prometheus_exporter/metric/gauge.rb +4 -0
  26. data/lib/prometheus_exporter/metric/histogram.rb +6 -0
  27. data/lib/prometheus_exporter/metric/summary.rb +7 -0
  28. data/lib/prometheus_exporter/middleware.rb +40 -17
  29. data/lib/prometheus_exporter/server.rb +2 -0
  30. data/lib/prometheus_exporter/server/active_record_collector.rb +3 -1
  31. data/lib/prometheus_exporter/server/collector.rb +2 -0
  32. data/lib/prometheus_exporter/server/delayed_job_collector.rb +20 -8
  33. data/lib/prometheus_exporter/server/hutch_collector.rb +6 -0
  34. data/lib/prometheus_exporter/server/puma_collector.rb +9 -1
  35. data/lib/prometheus_exporter/server/resque_collector.rb +54 -0
  36. data/lib/prometheus_exporter/server/runner.rb +24 -2
  37. data/lib/prometheus_exporter/server/shoryuken_collector.rb +8 -0
  38. data/lib/prometheus_exporter/server/sidekiq_collector.rb +11 -2
  39. data/lib/prometheus_exporter/server/sidekiq_queue_collector.rb +46 -0
  40. data/lib/prometheus_exporter/server/web_collector.rb +7 -5
  41. data/lib/prometheus_exporter/server/web_server.rb +29 -17
  42. data/lib/prometheus_exporter/version.rb +1 -1
  43. data/prometheus_exporter.gemspec +9 -5
  44. metadata +67 -17
  45. data/.travis.yml +0 -12
@@ -2,6 +2,7 @@
2
2
  # frozen_string_literal: true
3
3
 
4
4
  require 'optparse'
5
+ require 'json'
5
6
 
6
7
  require_relative "./../lib/prometheus_exporter"
7
8
  require_relative "./../lib/prometheus_exporter/server"
@@ -34,6 +35,9 @@ def run
34
35
  opt.on('--prefix METRIC_PREFIX', "Prefix to apply to all metrics (default: #{PrometheusExporter::DEFAULT_PREFIX})") do |o|
35
36
  options[:prefix] = o.to_s
36
37
  end
38
+ opt.on('--label METRIC_LABEL', "Label to apply to all metrics (default: #{PrometheusExporter::DEFAULT_LABEL})") do |o|
39
+ options[:label] = JSON.parse(o.to_s)
40
+ end
37
41
  opt.on('-c', '--collector FILE', String, "(optional) Custom collector to run") do |o|
38
42
  custom_collector_filename = o.to_s
39
43
  end
@@ -43,6 +47,12 @@ def run
43
47
  opt.on('-v', '--verbose') do |o|
44
48
  options[:verbose] = true
45
49
  end
50
+ opt.on('--auth FILE', String, "(optional) enable basic authentication using a htpasswd FILE") do |o|
51
+ options[:auth] = o
52
+ end
53
+ opt.on('--realm REALM', String, "(optional) Use REALM for basic authentication (default: \"#{PrometheusExporter::DEFAULT_REALM}\")") do |o|
54
+ options[:realm] = o
55
+ end
46
56
 
47
57
  opt.on('--unicorn-listen-address ADDRESS', String, '(optional) Address where unicorn listens on (unix or TCP address)') do |o|
48
58
  options[:unicorn_listen_address] = o
@@ -53,6 +63,17 @@ def run
53
63
  end
54
64
  end.parse!
55
65
 
66
+ if options.has_key?(:realm) && !options.has_key?(:auth)
67
+ STDERR.puts "[Warn] Providing REALM without AUTH has no effect"
68
+ end
69
+
70
+ if options.has_key?(:auth)
71
+ unless File.exist?(options[:auth]) && File.readable?(options[:auth])
72
+ STDERR.puts "[Error] The AUTH file either doesn't exist or we don't have access to it"
73
+ exit 1
74
+ end
75
+ end
76
+
56
77
  if custom_collector_filename
57
78
  eval File.read(custom_collector_filename), nil, File.expand_path(custom_collector_filename)
58
79
  found = false
@@ -0,0 +1,2 @@
1
+ ---
2
+ BUNDLE_RETRY: "1"
@@ -0,0 +1,5 @@
1
+ # This file was generated by Appraisal
2
+
3
+ source "https://rubygems.org"
4
+
5
+ gemspec path: "../"
@@ -0,0 +1,7 @@
1
+ # This file was generated by Appraisal
2
+
3
+ source "https://rubygems.org"
4
+
5
+ gem "activerecord", "~> 6.1.0"
6
+
7
+ gemspec path: "../"
@@ -9,7 +9,9 @@ module PrometheusExporter
9
9
  DEFAULT_PORT = 9394
10
10
  DEFAULT_BIND_ADDRESS = 'localhost'
11
11
  DEFAULT_PREFIX = 'ruby_'
12
+ DEFAULT_LABEL = {}
12
13
  DEFAULT_TIMEOUT = 2
14
+ DEFAULT_REALM = 'Prometheus Exporter'
13
15
 
14
16
  class OjCompat
15
17
  def self.parse(obj)
@@ -64,8 +64,10 @@ module PrometheusExporter
64
64
  @metrics = []
65
65
 
66
66
  @queue = Queue.new
67
+
67
68
  @socket = nil
68
69
  @socket_started = nil
70
+ @socket_pid = nil
69
71
 
70
72
  max_queue_size ||= MAX_QUEUE_SIZE
71
73
  max_queue_size = max_queue_size.to_i
@@ -107,7 +109,16 @@ module PrometheusExporter
107
109
  end
108
110
 
109
111
  def send_json(obj)
110
- payload = @custom_labels.nil? ? obj : obj.merge(custom_labels: @custom_labels)
112
+ payload =
113
+ if @custom_labels
114
+ if obj[:custom_labels]
115
+ obj.merge(custom_labels: @custom_labels.merge(obj[:custom_labels]))
116
+ else
117
+ obj.merge(custom_labels: @custom_labels)
118
+ end
119
+ else
120
+ obj
121
+ end
111
122
  send(@json_serializer.dump(payload))
112
123
  end
113
124
 
@@ -173,11 +184,14 @@ module PrometheusExporter
173
184
  end
174
185
  end
175
186
  end
187
+ rescue ThreadError => e
188
+ raise unless e.message =~ /can't alloc thread/
189
+ STDERR.puts "Prometheus Exporter, failed to send message ThreadError #{e}"
176
190
  end
177
191
 
178
192
  def close_socket!
179
193
  begin
180
- if @socket
194
+ if @socket && !@socket.closed?
181
195
  @socket.write("0\r\n")
182
196
  @socket.write("\r\n")
183
197
  @socket.flush
@@ -191,12 +205,20 @@ module PrometheusExporter
191
205
  end
192
206
 
193
207
  def close_socket_if_old!
194
- if @socket && ((@socket_started + MAX_SOCKET_AGE) < Time.now.to_f)
208
+ if @socket_pid == Process.pid && @socket && @socket_started && ((@socket_started + MAX_SOCKET_AGE) < Time.now.to_f)
195
209
  close_socket!
196
210
  end
197
211
  end
198
212
 
199
213
  def ensure_socket!
214
+ # if process was forked socket may be owned by parent
215
+ # leave it alone and reset
216
+ if @socket_pid != Process.pid
217
+ @socket = nil
218
+ @socket_started = nil
219
+ @socket_pid = nil
220
+ end
221
+
200
222
  close_socket_if_old!
201
223
  if !@socket
202
224
  @socket = TCPSocket.new @host, @port
@@ -207,12 +229,14 @@ module PrometheusExporter
207
229
  @socket.write("Content-Type: application/octet-stream\r\n")
208
230
  @socket.write("\r\n")
209
231
  @socket_started = Time.now.to_f
232
+ @socket_pid = Process.pid
210
233
  end
211
234
 
212
235
  nil
213
236
  rescue
214
237
  @socket = nil
215
238
  @socket_started = nil
239
+ @socket_pid = nil
216
240
  raise
217
241
  end
218
242
 
@@ -4,9 +4,11 @@ require_relative "client"
4
4
  require_relative "instrumentation/process"
5
5
  require_relative "instrumentation/method_profiler"
6
6
  require_relative "instrumentation/sidekiq"
7
+ require_relative "instrumentation/sidekiq_queue"
7
8
  require_relative "instrumentation/delayed_job"
8
9
  require_relative "instrumentation/puma"
9
10
  require_relative "instrumentation/hutch"
10
11
  require_relative "instrumentation/unicorn"
11
12
  require_relative "instrumentation/active_record"
12
13
  require_relative "instrumentation/shoryuken"
14
+ require_relative "instrumentation/resque"
@@ -67,21 +67,28 @@ module PrometheusExporter::Instrumentation
67
67
  ObjectSpace.each_object(::ActiveRecord::ConnectionAdapters::ConnectionPool) do |pool|
68
68
  next if pool.connections.nil?
69
69
 
70
- labels_from_config = pool.spec.config
71
- .select { |k, v| @config_labels.include? k }
72
- .map { |k, v| [k.to_s.prepend("dbconfig_"), v] }
73
-
74
- labels = @metric_labels.merge(pool_name: pool.spec.name).merge(Hash[labels_from_config])
75
-
76
70
  metric = {
77
71
  pid: pid,
78
72
  type: "active_record",
79
73
  hostname: ::PrometheusExporter.hostname,
80
- metric_labels: labels
74
+ metric_labels: labels(pool)
81
75
  }
82
76
  metric.merge!(pool.stat)
83
77
  metrics << metric
84
78
  end
85
79
  end
80
+
81
+ private
82
+
83
+ def labels(pool)
84
+ if ::ActiveRecord.version < Gem::Version.new("6.1.0.rc1")
85
+ @metric_labels.merge(pool_name: pool.spec.name).merge(pool.spec.config
86
+ .select { |k, v| @config_labels.include? k }
87
+ .map { |k, v| [k.to_s.dup.prepend("dbconfig_"), v] }.to_h)
88
+ else
89
+ @metric_labels.merge(pool_name: pool.db_config.name).merge(
90
+ @config_labels.each_with_object({}) { |l, acc| acc["dbconfig_#{l}"] = pool.db_config.public_send(l) })
91
+ end
92
+ end
86
93
  end
87
94
  end
@@ -13,8 +13,8 @@ module PrometheusExporter::Instrumentation
13
13
  callbacks do |lifecycle|
14
14
  lifecycle.around(:invoke_job) do |job, *args, &block|
15
15
  max_attempts = Delayed::Worker.max_attempts
16
- enqueued_count = Delayed::Job.count
17
- pending_count = Delayed::Job.where(attempts: 0, locked_at: nil).count
16
+ enqueued_count = Delayed::Job.where(queue: job.queue).count
17
+ pending_count = Delayed::Job.where(attempts: 0, locked_at: nil, queue: job.queue).count
18
18
  instrumenter.call(job, max_attempts, enqueued_count, pending_count, *args, &block)
19
19
  end
20
20
  end
@@ -41,6 +41,7 @@ module PrometheusExporter::Instrumentation
41
41
  @client.send_json(
42
42
  type: "delayed_job",
43
43
  name: job.handler.to_s.match(JOB_CLASS_REGEXP).to_a[1].to_s,
44
+ queue_name: job.queue,
44
45
  success: success,
45
46
  duration: duration,
46
47
  attempts: attempts,
@@ -5,6 +5,7 @@ module PrometheusExporter::Instrumentation; end
5
5
 
6
6
  class PrometheusExporter::Instrumentation::MethodProfiler
7
7
  def self.patch(klass, methods, name)
8
+ patch_source_line = __LINE__ + 3
8
9
  patches = methods.map do |method_name|
9
10
  <<~RUBY
10
11
  unless defined?(#{method_name}__mp_unpatched)
@@ -26,7 +27,7 @@ class PrometheusExporter::Instrumentation::MethodProfiler
26
27
  RUBY
27
28
  end.join("\n")
28
29
 
29
- klass.class_eval patches
30
+ klass.class_eval patches, __FILE__, patch_source_line
30
31
  end
31
32
 
32
33
  def self.transfer
@@ -3,6 +3,8 @@
3
3
  # collects stats from currently running process
4
4
  module PrometheusExporter::Instrumentation
5
5
  class Process
6
+ @thread = nil if !defined?(@thread)
7
+
6
8
  def self.start(client: nil, type: "ruby", frequency: 30, labels: nil)
7
9
 
8
10
  metric_labels =
@@ -5,8 +5,8 @@ require "json"
5
5
  # collects stats from puma
6
6
  module PrometheusExporter::Instrumentation
7
7
  class Puma
8
- def self.start(client: nil, frequency: 30)
9
- puma_collector = new
8
+ def self.start(client: nil, frequency: 30, labels: {})
9
+ puma_collector = new(labels)
10
10
  client ||= PrometheusExporter::Client.default
11
11
  Thread.new do
12
12
  while true
@@ -22,13 +22,25 @@ module PrometheusExporter::Instrumentation
22
22
  end
23
23
  end
24
24
 
25
+ def initialize(metric_labels = {})
26
+ @metric_labels = metric_labels
27
+ end
28
+
25
29
  def collect
26
- metric = {}
27
- metric[:type] = "puma"
30
+ metric = {
31
+ pid: pid,
32
+ type: "puma",
33
+ hostname: ::PrometheusExporter.hostname,
34
+ metric_labels: @metric_labels
35
+ }
28
36
  collect_puma_stats(metric)
29
37
  metric
30
38
  end
31
39
 
40
+ def pid
41
+ @pid = ::Process.pid
42
+ end
43
+
32
44
  def collect_puma_stats(metric)
33
45
  stats = JSON.parse(::Puma.stats)
34
46
 
@@ -0,0 +1,40 @@
1
+ # frozen_string_literal: true
2
+
3
+ # collects stats from resque
4
+ module PrometheusExporter::Instrumentation
5
+ class Resque
6
+ def self.start(client: nil, frequency: 30)
7
+ resque_collector = new
8
+ client ||= PrometheusExporter::Client.default
9
+ Thread.new do
10
+ while true
11
+ begin
12
+ client.send_json(resque_collector.collect)
13
+ rescue => e
14
+ STDERR.puts("Prometheus Exporter Failed To Collect Resque Stats #{e}")
15
+ ensure
16
+ sleep frequency
17
+ end
18
+ end
19
+ end
20
+ end
21
+
22
+ def collect
23
+ metric = {}
24
+ metric[:type] = "resque"
25
+ collect_resque_stats(metric)
26
+ metric
27
+ end
28
+
29
+ def collect_resque_stats(metric)
30
+ info = ::Resque.info
31
+
32
+ metric[:processed_jobs_total] = info[:processed]
33
+ metric[:failed_jobs_total] = info[:failed]
34
+ metric[:pending_jobs_total] = info[:pending]
35
+ metric[:queues_total] = info[:queues]
36
+ metric[:worker_total] = info[:workers]
37
+ metric[:working_total] = info[:working]
38
+ end
39
+ end
40
+ end
@@ -1,6 +1,15 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ require 'yaml'
4
+
3
5
  module PrometheusExporter::Instrumentation
6
+ JOB_WRAPPER_CLASS_NAME = 'ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper'
7
+ DELAYED_CLASS_NAMES = [
8
+ 'Sidekiq::Extensions::DelayedClass',
9
+ 'Sidekiq::Extensions::DelayedModel',
10
+ 'Sidekiq::Extensions::DelayedMailer',
11
+ ]
12
+
4
13
  class Sidekiq
5
14
  def self.death_handler
6
15
  -> (job, ex) do
@@ -32,15 +41,47 @@ module PrometheusExporter::Instrumentation
32
41
  raise e
33
42
  ensure
34
43
  duration = ::Process.clock_gettime(::Process::CLOCK_MONOTONIC) - start
35
- class_name = worker.class.to_s == 'ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper' ?
36
- msg['wrapped'] : worker.class.to_s
37
44
  @client.send_json(
38
45
  type: "sidekiq",
39
- name: class_name,
46
+ name: get_name(worker, msg),
47
+ queue: queue,
40
48
  success: success,
41
49
  shutdown: shutdown,
42
50
  duration: duration
43
51
  )
44
52
  end
53
+
54
+ private
55
+
56
+ def get_name(worker, msg)
57
+ class_name = worker.class.to_s
58
+ if class_name == JOB_WRAPPER_CLASS_NAME
59
+ get_job_wrapper_name(msg)
60
+ elsif DELAYED_CLASS_NAMES.include?(class_name)
61
+ get_delayed_name(msg, class_name)
62
+ else
63
+ class_name
64
+ end
65
+ end
66
+
67
+ def get_job_wrapper_name(msg)
68
+ msg['wrapped']
69
+ end
70
+
71
+ def get_delayed_name(msg, class_name)
72
+ # fallback to class_name since we're relying on the internal implementation
73
+ # of the delayed extensions
74
+ # https://github.com/mperham/sidekiq/blob/master/lib/sidekiq/extensions/class_methods.rb
75
+ begin
76
+ (target, method_name, _args) = YAML.load(msg['args'].first)
77
+ if target.class == Class
78
+ "#{target.name}##{method_name}"
79
+ else
80
+ "#{target.class.name}##{method_name}"
81
+ end
82
+ rescue
83
+ class_name
84
+ end
85
+ end
45
86
  end
46
87
  end
@@ -0,0 +1,50 @@
1
+ # frozen_string_literal: true
2
+
3
+ module PrometheusExporter::Instrumentation
4
+ class SidekiqQueue
5
+ def self.start(client: nil, frequency: 30)
6
+ client ||= PrometheusExporter::Client.default
7
+ sidekiq_queue_collector = new
8
+
9
+ Thread.new do
10
+ loop do
11
+ begin
12
+ client.send_json(sidekiq_queue_collector.collect)
13
+ rescue StandardError => e
14
+ STDERR.puts("Prometheus Exporter Failed To Collect Sidekiq Queue metrics #{e}")
15
+ ensure
16
+ sleep frequency
17
+ end
18
+ end
19
+ end
20
+ end
21
+
22
+ def collect
23
+ {
24
+ type: 'sidekiq_queue',
25
+ queues: collect_queue_stats
26
+ }
27
+ end
28
+
29
+ def collect_queue_stats
30
+ hostname = Socket.gethostname
31
+ pid = ::Process.pid
32
+ ps = ::Sidekiq::ProcessSet.new
33
+
34
+ process = ps.find do |sp|
35
+ sp['hostname'] == hostname && sp['pid'] == pid
36
+ end
37
+
38
+ queues = process.nil? ? [] : process['queues']
39
+
40
+ ::Sidekiq::Queue.all.map do |queue|
41
+ next unless queues.include? queue.name
42
+ {
43
+ backlog_total: queue.size,
44
+ latency_seconds: queue.latency.to_i,
45
+ labels: { queue: queue.name }
46
+ }
47
+ end.compact
48
+ end
49
+ end
50
+ end