prometheus_exporter 0.4.9 → 0.5.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (45) hide show
  1. checksums.yaml +4 -4
  2. data/.travis.yml +6 -1
  3. data/CHANGELOG +43 -0
  4. data/Gemfile +2 -0
  5. data/Guardfile +2 -0
  6. data/README.md +84 -3
  7. data/Rakefile +2 -0
  8. data/bench/bench.rb +2 -0
  9. data/bin/prometheus_exporter +8 -1
  10. data/examples/custom_collector.rb +2 -0
  11. data/lib/prometheus_exporter/client.rb +19 -7
  12. data/lib/prometheus_exporter/instrumentation/active_record.rb +87 -0
  13. data/lib/prometheus_exporter/instrumentation/delayed_job.rb +9 -3
  14. data/lib/prometheus_exporter/instrumentation/method_profiler.rb +2 -0
  15. data/lib/prometheus_exporter/instrumentation/process.rb +1 -12
  16. data/lib/prometheus_exporter/instrumentation/puma.rb +4 -2
  17. data/lib/prometheus_exporter/instrumentation/shoryuken.rb +31 -0
  18. data/lib/prometheus_exporter/instrumentation/sidekiq.rb +9 -5
  19. data/lib/prometheus_exporter/instrumentation/unicorn.rb +8 -6
  20. data/lib/prometheus_exporter/instrumentation.rb +5 -0
  21. data/lib/prometheus_exporter/metric/base.rb +8 -0
  22. data/lib/prometheus_exporter/metric/counter.rb +9 -1
  23. data/lib/prometheus_exporter/metric/gauge.rb +9 -1
  24. data/lib/prometheus_exporter/metric/histogram.rb +14 -0
  25. data/lib/prometheus_exporter/metric/summary.rb +15 -1
  26. data/lib/prometheus_exporter/metric.rb +2 -0
  27. data/lib/prometheus_exporter/server/active_record_collector.rb +56 -0
  28. data/lib/prometheus_exporter/server/collector.rb +9 -2
  29. data/lib/prometheus_exporter/server/collector_base.rb +2 -0
  30. data/lib/prometheus_exporter/server/delayed_job_collector.rb +14 -1
  31. data/lib/prometheus_exporter/server/hutch_collector.rb +2 -0
  32. data/lib/prometheus_exporter/server/process_collector.rb +1 -0
  33. data/lib/prometheus_exporter/server/puma_collector.rb +11 -1
  34. data/lib/prometheus_exporter/server/runner.rb +8 -30
  35. data/lib/prometheus_exporter/server/shoryuken_collector.rb +59 -0
  36. data/lib/prometheus_exporter/server/sidekiq_collector.rb +2 -0
  37. data/lib/prometheus_exporter/server/type_collector.rb +2 -0
  38. data/lib/prometheus_exporter/server/unicorn_collector.rb +8 -1
  39. data/lib/prometheus_exporter/server/web_server.rb +2 -1
  40. data/lib/prometheus_exporter/server.rb +4 -0
  41. data/lib/prometheus_exporter/version.rb +3 -1
  42. data/lib/prometheus_exporter.rb +14 -0
  43. data/prometheus_exporter.gemspec +17 -13
  44. metadata +38 -6
  45. data/.travis +0 -9
@@ -6,7 +6,7 @@ module PrometheusExporter::Metric
6
6
 
7
7
  def initialize(name, help)
8
8
  super
9
- @data = {}
9
+ reset!
10
10
  end
11
11
 
12
12
  def type
@@ -19,6 +19,14 @@ module PrometheusExporter::Metric
19
19
  end.join("\n")
20
20
  end
21
21
 
22
+ def reset!
23
+ @data = {}
24
+ end
25
+
26
+ def to_h
27
+ @data.dup
28
+ end
29
+
22
30
  def observe(value, labels = {})
23
31
  if value.nil?
24
32
  data.delete(labels)
@@ -8,11 +8,25 @@ module PrometheusExporter::Metric
8
8
  def initialize(name, help, opts = {})
9
9
  super(name, help)
10
10
  @buckets = (opts[:buckets] || DEFAULT_BUCKETS).sort.reverse
11
+ reset!
12
+ end
13
+
14
+ def reset!
11
15
  @sums = {}
12
16
  @counts = {}
13
17
  @observations = {}
14
18
  end
15
19
 
20
+ def to_h
21
+ data = {}
22
+ @observations.each do |labels, buckets|
23
+ count = @counts[labels]
24
+ sum = @sums[labels]
25
+ data[labels] = { "count" => count, "sum" => sum }
26
+ end
27
+ data
28
+ end
29
+
16
30
  def type
17
31
  "histogram"
18
32
  end
@@ -10,12 +10,26 @@ module PrometheusExporter::Metric
10
10
 
11
11
  def initialize(name, help, opts = {})
12
12
  super(name, help)
13
+ reset!
14
+ @quantiles = opts[:quantiles] || DEFAULT_QUANTILES
15
+ end
16
+
17
+ def reset!
13
18
  @buffers = [{}, {}]
14
19
  @last_rotated = Process.clock_gettime(Process::CLOCK_MONOTONIC)
15
20
  @current_buffer = 0
16
21
  @counts = {}
17
22
  @sums = {}
18
- @quantiles = opts[:quantiles] || DEFAULT_QUANTILES
23
+ end
24
+
25
+ def to_h
26
+ data = {}
27
+ calculate_all_quantiles.each do |labels, quantiles|
28
+ count = @counts[labels]
29
+ sum = @sums[labels]
30
+ data[labels] = { "count" => count, "sum" => sum }
31
+ end
32
+ data
19
33
  end
20
34
 
21
35
  def type
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require_relative "metric/base"
2
4
  require_relative "metric/counter"
3
5
  require_relative "metric/gauge"
@@ -0,0 +1,56 @@
1
+ # frozen_string_literal: true
2
+
3
+ module PrometheusExporter::Server
4
+ class ActiveRecordCollector < TypeCollector
5
+ MAX_ACTIVERECORD_METRIC_AGE = 60
6
+ ACTIVE_RECORD_GAUGES = {
7
+ connections: "Total connections in pool",
8
+ busy: "Connections in use in pool",
9
+ dead: "Dead connections in pool",
10
+ idle: "Idle connections in pool",
11
+ waiting: "Connection requests waiting",
12
+ size: "Maximum allowed connection pool size"
13
+ }
14
+
15
+ def initialize
16
+ @active_record_metrics = []
17
+ end
18
+
19
+ def type
20
+ "active_record"
21
+ end
22
+
23
+ def metrics
24
+ return [] if @active_record_metrics.length == 0
25
+
26
+ metrics = {}
27
+
28
+ @active_record_metrics.map do |m|
29
+ metric_key = (m["metric_labels"] || {}).merge("pid" => m["pid"])
30
+
31
+ ACTIVE_RECORD_GAUGES.map do |k, help|
32
+ k = k.to_s
33
+ if v = m[k]
34
+ g = metrics[k] ||= PrometheusExporter::Metric::Gauge.new("active_record_connection_pool_#{k}", help)
35
+ g.observe(v, metric_key)
36
+ end
37
+ end
38
+ end
39
+
40
+ metrics.values
41
+ end
42
+
43
+ def collect(obj)
44
+ now = ::Process.clock_gettime(::Process::CLOCK_MONOTONIC)
45
+
46
+ obj["created_at"] = now
47
+
48
+ @active_record_metrics.delete_if do |current|
49
+ (obj["pid"] == current["pid"] && obj["hostname"] == current["hostname"]) ||
50
+ (current["created_at"] + MAX_ACTIVERECORD_METRIC_AGE < now)
51
+ end
52
+
53
+ @active_record_metrics << obj
54
+ end
55
+ end
56
+ end
@@ -17,6 +17,8 @@ module PrometheusExporter::Server
17
17
  register_collector(PumaCollector.new)
18
18
  register_collector(HutchCollector.new)
19
19
  register_collector(UnicornCollector.new)
20
+ register_collector(ActiveRecordCollector.new)
21
+ register_collector(ShoryukenCollector.new)
20
22
  end
21
23
 
22
24
  def register_collector(collector)
@@ -72,6 +74,7 @@ module PrometheusExporter::Server
72
74
  def register_metric_unsafe(obj)
73
75
  name = obj["name"]
74
76
  help = obj["help"]
77
+ opts = symbolize_keys(obj["opts"] || {})
75
78
 
76
79
  metric =
77
80
  case obj["type"]
@@ -80,9 +83,9 @@ module PrometheusExporter::Server
80
83
  when "counter"
81
84
  PrometheusExporter::Metric::Counter.new(name, help)
82
85
  when "summary"
83
- PrometheusExporter::Metric::Summary.new(name, help)
86
+ PrometheusExporter::Metric::Summary.new(name, help, opts)
84
87
  when "histogram"
85
- PrometheusExporter::Metric::Histogram.new(name, help)
88
+ PrometheusExporter::Metric::Histogram.new(name, help, opts)
86
89
  end
87
90
 
88
91
  if metric
@@ -91,5 +94,9 @@ module PrometheusExporter::Server
91
94
  STDERR.puts "failed to register metric #{obj}"
92
95
  end
93
96
  end
97
+
98
+ def symbolize_keys(hash)
99
+ hash.inject({}) { |memo, k| memo[k.first.to_sym] = k.last; memo }
100
+ end
94
101
  end
95
102
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module PrometheusExporter::Server
2
4
 
3
5
  # minimal interface to implement a customer collector
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module PrometheusExporter::Server
2
4
  class DelayedJobCollector < TypeCollector
3
5
 
@@ -19,12 +21,15 @@ module PrometheusExporter::Server
19
21
  @delayed_job_duration_seconds_summary.observe(obj["duration"], status: "success") if obj["success"]
20
22
  @delayed_job_duration_seconds_summary.observe(obj["duration"], status: "failed") if !obj["success"]
21
23
  @delayed_job_attempts_summary.observe(obj["attempts"]) if obj["success"]
24
+ @delayed_jobs_enqueued.observe(obj["enqueued"])
25
+ @delayed_jobs_pending.observe(obj["pending"])
22
26
  end
23
27
 
24
28
  def metrics
25
29
  if @delayed_jobs_total
26
30
  [@delayed_job_duration_seconds, @delayed_jobs_total, @delayed_failed_jobs_total,
27
- @delayed_jobs_max_attempts_reached_total, @delayed_job_duration_seconds_summary, @delayed_job_attempts_summary]
31
+ @delayed_jobs_max_attempts_reached_total, @delayed_job_duration_seconds_summary, @delayed_job_attempts_summary,
32
+ @delayed_jobs_enqueued, @delayed_jobs_pending]
28
33
  else
29
34
  []
30
35
  end
@@ -43,6 +48,14 @@ module PrometheusExporter::Server
43
48
  PrometheusExporter::Metric::Counter.new(
44
49
  "delayed_jobs_total", "Total number of delayed jobs executed.")
45
50
 
51
+ @delayed_jobs_enqueued =
52
+ PrometheusExporter::Metric::Gauge.new(
53
+ "delayed_jobs_enqueued", "Number of enqueued delayed jobs.")
54
+
55
+ @delayed_jobs_pending =
56
+ PrometheusExporter::Metric::Gauge.new(
57
+ "delayed_jobs_pending", "Number of pending delayed jobs.")
58
+
46
59
  @delayed_failed_jobs_total =
47
60
  PrometheusExporter::Metric::Counter.new(
48
61
  "delayed_failed_jobs_total", "Total number failed delayed jobs executed.")
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module PrometheusExporter::Server
2
4
  class HutchCollector < TypeCollector
3
5
 
@@ -35,6 +35,7 @@ module PrometheusExporter::Server
35
35
 
36
36
  @process_metrics.map do |m|
37
37
  metric_key = m["metric_labels"].merge("pid" => m["pid"])
38
+ metric_key.merge!(m["custom_labels"] || {})
38
39
 
39
40
  PROCESS_GAUGES.map do |k, help|
40
41
  k = k.to_s
@@ -1,5 +1,8 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module PrometheusExporter::Server
2
4
  class PumaCollector < TypeCollector
5
+ MAX_PUMA_METRIC_AGE = 30
3
6
  PUMA_GAUGES = {
4
7
  workers_total: "Number of puma workers.",
5
8
  booted_workers_total: "Number of puma workers booted.",
@@ -26,7 +29,10 @@ module PrometheusExporter::Server
26
29
  @puma_metrics.map do |m|
27
30
  labels = {}
28
31
  if m["phase"]
29
- labels.merge(phase: m["phase"])
32
+ labels.merge!(phase: m["phase"])
33
+ end
34
+ if m["custom_labels"]
35
+ labels.merge!(m["custom_labels"])
30
36
  end
31
37
 
32
38
  PUMA_GAUGES.map do |k, help|
@@ -42,6 +48,10 @@ module PrometheusExporter::Server
42
48
  end
43
49
 
44
50
  def collect(obj)
51
+ now = ::Process.clock_gettime(::Process::CLOCK_MONOTONIC)
52
+
53
+ obj["created_at"] = now
54
+ @puma_metrics.delete_if { |m| m["created_at"] + MAX_PUMA_METRIC_AGE < now }
45
55
  @puma_metrics << obj
46
56
  end
47
57
  end
@@ -1,10 +1,11 @@
1
1
  # frozen_string_literal: true
2
+
2
3
  require 'prometheus_exporter/client'
3
4
  require_relative '../instrumentation/unicorn'
4
5
 
5
6
  module PrometheusExporter::Server
6
- class RunnerException < StandardError; end;
7
- class WrongInheritance < RunnerException; end;
7
+ class RunnerException < StandardError; end
8
+ class WrongInheritance < RunnerException; end
8
9
 
9
10
  class Runner
10
11
  def initialize(options = {})
@@ -31,69 +32,46 @@ module PrometheusExporter::Server
31
32
  )
32
33
  end
33
34
 
34
- server = server_class.new port: port, collector: collector, timeout: timeout, verbose: verbose
35
+ server = server_class.new port: port, bind: bind, collector: collector, timeout: timeout, verbose: verbose
35
36
  server.start
36
37
  end
37
38
 
38
- def prefix=(prefix)
39
- @prefix = prefix
40
- end
39
+ attr_accessor :unicorn_listen_address, :unicorn_pid_file
40
+ attr_writer :prefix, :port, :bind, :collector_class, :type_collectors, :timeout, :verbose, :server_class
41
41
 
42
42
  def prefix
43
43
  @prefix || PrometheusExporter::DEFAULT_PREFIX
44
44
  end
45
45
 
46
- def port=(port)
47
- @port = port
48
- end
49
-
50
46
  def port
51
47
  @port || PrometheusExporter::DEFAULT_PORT
52
48
  end
53
49
 
54
- def collector_class=(collector_class)
55
- @collector_class = collector_class
50
+ def bind
51
+ @bind || PrometheusExporter::DEFAULT_BIND_ADDRESS
56
52
  end
57
53
 
58
54
  def collector_class
59
55
  @collector_class || PrometheusExporter::Server::Collector
60
56
  end
61
57
 
62
- def type_collectors=(type_collectors)
63
- @type_collectors = type_collectors
64
- end
65
-
66
58
  def type_collectors
67
59
  @type_collectors || []
68
60
  end
69
61
 
70
- def timeout=(timeout)
71
- @timeout = timeout
72
- end
73
-
74
62
  def timeout
75
63
  @timeout || PrometheusExporter::DEFAULT_TIMEOUT
76
64
  end
77
65
 
78
- def verbose=(verbose)
79
- @verbose = verbose
80
- end
81
-
82
66
  def verbose
83
67
  return @verbose if defined? @verbose
84
68
  false
85
69
  end
86
70
 
87
- def server_class=(server_class)
88
- @server_class = server_class
89
- end
90
-
91
71
  def server_class
92
72
  @server_class || PrometheusExporter::Server::WebServer
93
73
  end
94
74
 
95
- attr_accessor :unicorn_listen_address, :unicorn_pid_file
96
-
97
75
  def collector
98
76
  @_collector ||= collector_class.new
99
77
  end
@@ -0,0 +1,59 @@
1
+ # frozen_string_literal: true
2
+
3
+ module PrometheusExporter::Server
4
+ class ShoryukenCollector < TypeCollector
5
+
6
+ def type
7
+ "shoryuken"
8
+ end
9
+
10
+ def collect(obj)
11
+ default_labels = { job_name: obj['name'] , queue_name: obj['queue'] }
12
+ custom_labels = obj['custom_labels']
13
+ labels = custom_labels.nil? ? default_labels : default_labels.merge(custom_labels)
14
+
15
+ ensure_shoryuken_metrics
16
+ @shoryuken_job_duration_seconds.observe(obj["duration"], labels)
17
+ @shoryuken_jobs_total.observe(1, labels)
18
+ @shoryuken_restarted_jobs_total.observe(1, labels) if obj["shutdown"]
19
+ @shoryuken_failed_jobs_total.observe(1, labels) if !obj["success"] && !obj["shutdown"]
20
+ end
21
+
22
+ def metrics
23
+ if @shoryuken_jobs_total
24
+ [
25
+ @shoryuken_job_duration_seconds,
26
+ @shoryuken_jobs_total,
27
+ @shoryuken_restarted_jobs_total,
28
+ @shoryuken_failed_jobs_total,
29
+ ]
30
+ else
31
+ []
32
+ end
33
+ end
34
+
35
+ protected
36
+
37
+ def ensure_shoryuken_metrics
38
+ if !@shoryuken_jobs_total
39
+
40
+ @shoryuken_job_duration_seconds =
41
+ PrometheusExporter::Metric::Counter.new(
42
+ "shoryuken_job_duration_seconds", "Total time spent in shoryuken jobs.")
43
+
44
+ @shoryuken_jobs_total =
45
+ PrometheusExporter::Metric::Counter.new(
46
+ "shoryuken_jobs_total", "Total number of shoryuken jobs executed.")
47
+
48
+ @shoryuken_restarted_jobs_total =
49
+ PrometheusExporter::Metric::Counter.new(
50
+ "shoryuken_restarted_jobs_total", "Total number of shoryuken jobs that we restarted because of a shoryuken shutdown.")
51
+
52
+ @shoryuken_failed_jobs_total =
53
+ PrometheusExporter::Metric::Counter.new(
54
+ "shoryuken_failed_jobs_total", "Total number of failed shoryuken jobs.")
55
+
56
+ end
57
+ end
58
+ end
59
+ end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module PrometheusExporter::Server
2
4
  class SidekiqCollector < TypeCollector
3
5
 
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module PrometheusExporter::Server
2
4
  class TypeCollector
3
5
  def type
@@ -3,6 +3,8 @@
3
3
  # custom type collector for prometheus_exporter for handling the metrics sent from
4
4
  # PrometheusExporter::Instrumentation::Unicorn
5
5
  class PrometheusExporter::Server::UnicornCollector < PrometheusExporter::Server::TypeCollector
6
+ MAX_UNICORN_METRIC_AGE = 60
7
+
6
8
  UNICORN_GAUGES = {
7
9
  workers_total: 'Number of unicorn workers.',
8
10
  active_workers_total: 'Number of active unicorn workers',
@@ -23,11 +25,13 @@ class PrometheusExporter::Server::UnicornCollector < PrometheusExporter::Server:
23
25
  metrics = {}
24
26
 
25
27
  @unicorn_metrics.map do |m|
28
+ labels = m["custom_labels"] || {}
29
+
26
30
  UNICORN_GAUGES.map do |k, help|
27
31
  k = k.to_s
28
32
  if (v = m[k])
29
33
  g = metrics[k] ||= PrometheusExporter::Metric::Gauge.new("unicorn_#{k}", help)
30
- g.observe(v)
34
+ g.observe(v, labels)
31
35
  end
32
36
  end
33
37
  end
@@ -36,6 +40,9 @@ class PrometheusExporter::Server::UnicornCollector < PrometheusExporter::Server:
36
40
  end
37
41
 
38
42
  def collect(obj)
43
+ now = ::Process.clock_gettime(::Process::CLOCK_MONOTONIC)
44
+ obj["created_at"] = now
45
+ @unicorn_metrics.delete_if { |m| m['created_at'] + MAX_UNICORN_METRIC_AGE < now }
39
46
  @unicorn_metrics << obj
40
47
  end
41
48
  end
@@ -9,7 +9,7 @@ module PrometheusExporter::Server
9
9
  class WebServer
10
10
  attr_reader :collector
11
11
 
12
- def initialize(port: , collector: nil, timeout: PrometheusExporter::DEFAULT_TIMEOUT, verbose: false)
12
+ def initialize(port: , bind: nil, collector: nil, timeout: PrometheusExporter::DEFAULT_TIMEOUT, verbose: false)
13
13
 
14
14
  @verbose = verbose
15
15
 
@@ -38,6 +38,7 @@ module PrometheusExporter::Server
38
38
 
39
39
  @server = WEBrick::HTTPServer.new(
40
40
  Port: port,
41
+ BindAddress: bind,
41
42
  Logger: logger,
42
43
  AccessLog: access_log,
43
44
  )
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require_relative "metric"
2
4
  require_relative "server/type_collector"
3
5
  require_relative "server/web_collector"
@@ -11,3 +13,5 @@ require_relative "server/runner"
11
13
  require_relative "server/puma_collector"
12
14
  require_relative "server/hutch_collector"
13
15
  require_relative "server/unicorn_collector"
16
+ require_relative "server/active_record_collector"
17
+ require_relative "server/shoryuken_collector"
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module PrometheusExporter
2
- VERSION = "0.4.9"
4
+ VERSION = '0.5.0'
3
5
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require_relative "prometheus_exporter/version"
2
4
  require "json"
3
5
  require "thread"
@@ -5,6 +7,7 @@ require "thread"
5
7
  module PrometheusExporter
6
8
  # per: https://github.com/prometheus/prometheus/wiki/Default-port-allocations
7
9
  DEFAULT_PORT = 9394
10
+ DEFAULT_BIND_ADDRESS = 'localhost'
8
11
  DEFAULT_PREFIX = 'ruby_'
9
12
  DEFAULT_TIMEOUT = 2
10
13
 
@@ -17,6 +20,17 @@ module PrometheusExporter
17
20
  end
18
21
  end
19
22
 
23
+ def self.hostname
24
+ @hostname ||=
25
+ begin
26
+ require 'socket'
27
+ Socket.gethostname
28
+ rescue => e
29
+ STDERR.puts "Unable to lookup hostname #{e}"
30
+ "unknown-host"
31
+ end
32
+ end
33
+
20
34
  def self.detect_json_serializer(preferred)
21
35
  if preferred.nil?
22
36
  preferred = :oj if has_oj?
@@ -5,24 +5,27 @@ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
5
5
  require "prometheus_exporter/version"
6
6
 
7
7
  Gem::Specification.new do |spec|
8
- spec.name = "prometheus_exporter"
9
- spec.version = PrometheusExporter::VERSION
10
- spec.authors = ["Sam Saffron"]
11
- spec.email = ["sam.saffron@gmail.com"]
8
+ spec.name = "prometheus_exporter"
9
+ spec.version = PrometheusExporter::VERSION
10
+ spec.authors = ["Sam Saffron"]
11
+ spec.email = ["sam.saffron@gmail.com"]
12
12
 
13
- spec.summary = %q{Prometheus Exporter}
14
- spec.description = %q{Prometheus metric collector and exporter for Ruby}
15
- spec.homepage = "https://github.com/discourse/prometheus_exporter"
16
- spec.license = "MIT"
13
+ spec.summary = %q{Prometheus Exporter}
14
+ spec.description = %q{Prometheus metric collector and exporter for Ruby}
15
+ spec.homepage = "https://github.com/discourse/prometheus_exporter"
16
+ spec.license = "MIT"
17
17
 
18
- spec.files = `git ls-files -z`.split("\x0").reject do |f|
18
+ spec.post_install_message = "prometheus_exporter will only bind to localhost by default as of v0.5"
19
+
20
+ spec.files = `git ls-files -z`.split("\x0").reject do |f|
19
21
  f.match(%r{^(test|spec|features|bin)/})
20
22
  end
21
- spec.bindir = "bin"
22
- spec.executables = ["prometheus_exporter"]
23
- spec.require_paths = ["lib"]
23
+ spec.bindir = "bin"
24
+ spec.executables = ["prometheus_exporter"]
25
+ spec.require_paths = ["lib"]
24
26
 
25
- spec.add_development_dependency "bundler", "~> 1.16"
27
+ spec.add_development_dependency "rubocop", ">= 0.69"
28
+ spec.add_development_dependency "bundler", "> 1.16"
26
29
  spec.add_development_dependency "rake", "~> 10.0"
27
30
  spec.add_development_dependency "minitest", "~> 5.0"
28
31
  spec.add_development_dependency "guard", "~> 2.0"
@@ -31,6 +34,7 @@ Gem::Specification.new do |spec|
31
34
  spec.add_development_dependency "oj", "~> 3.0"
32
35
  spec.add_development_dependency "rack-test", "~> 0.8.3"
33
36
  spec.add_development_dependency "minitest-stub-const", "~> 0.6"
37
+ spec.add_development_dependency 'rubocop-discourse', '~> 1.0'
34
38
  if !RUBY_ENGINE == 'jruby'
35
39
  spec.add_development_dependency "raindrops", "~> 0.19"
36
40
  end