sidekiq_queue_metrics 2.1.1 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 8baa544934c8ad85fe77e15b8058d6b0d0e47f8e789ddf5077b24c2d0d59a197
4
- data.tar.gz: 907c164a937d60b602c0700daa49113b7dcf3d710ac6473fc1a83ae3ef961638
3
+ metadata.gz: 9513e994c0d230463b79ed7a2d43e47b6159ea9d5ac36b3288d3da3b2f07f091
4
+ data.tar.gz: c6c488224f22b3b74038312c6ba273af65489aa04ccc1e68057389088aa682dc
5
5
  SHA512:
6
- metadata.gz: 5fde602302fd8474d3ea9e3702a1266a37bd20bccf89d9f4be3b557255f8e9975d831b31dd5cc9f3a7ba65e9dbc04febc360b5a9129516b914ce07f21c063d63
7
- data.tar.gz: ea67ac50f705404a421455f2cfda336f5ab29f756244d1a5a0d566b206ed10a55b7bf9c23939a7f2e557f102d5445226aed259a5d84d328b5b7a31c7f85c0d16
6
+ metadata.gz: df033456251c68174d066d57bdbdf6acd122554d03e601735b79c98524d573083e5b92880c18abe3516ede22f7d56eec682995e41f2b97c76f9bbe3b95b5b414
7
+ data.tar.gz: 855585c740c62a970edef6222d7d13cad47a02528e5856441b66f452de9137461c88c6d01c2c271ccb189338f65d63dbae985ea8a7ded80f3ea87084e2a8f78b
data/.gitignore CHANGED
@@ -1,2 +1,3 @@
1
1
  .idea
2
2
  *.gem
3
+ Gemfile.lock
@@ -9,3 +9,4 @@ before_install:
9
9
 
10
10
  script:
11
11
  - "bundle exec rspec"
12
+ - "SIDEKIQ_VERSION=4.2.10 bundle update && bundle exec rspec"
data/Gemfile CHANGED
@@ -1,5 +1,9 @@
1
1
  source :rubygems
2
2
 
3
- gem 'sidekiq'
3
+ sidekiq_version = ENV.fetch('SIDEKIQ_VERSION', '> 0')
4
+
5
+ gemspec
6
+
7
+ gem 'sidekiq', sidekiq_version
4
8
  gem 'eldritch'
5
- gem 'rspec'
9
+ gem 'rspec'
@@ -1,10 +1,22 @@
1
1
  module Sidekiq::QueueMetrics
2
+ def self.support_death_handlers?
3
+ Sidekiq::VERSION >= '5.1'
4
+ end
5
+
2
6
  def self.init(config)
3
7
  config.server_middleware do |chain|
4
8
  chain.add Sidekiq::QueueMetrics::JobSuccessMonitor
5
9
  end
6
10
 
7
- config.death_handlers << Sidekiq::QueueMetrics::JobDeathMonitor.proc
11
+ config.on(:startup) { UpgradeManager.upgrade_if_needed }
12
+
13
+ if support_death_handlers?
14
+ config.death_handlers << Sidekiq::QueueMetrics::JobDeathMonitor.proc
15
+ else
16
+ config.server_middleware do |chain|
17
+ chain.add Sidekiq::QueueMetrics::JobDeathMiddleware
18
+ end
19
+ end
8
20
  end
9
21
 
10
22
  def self.storage_location=(key)
@@ -22,4 +34,4 @@ module Sidekiq::QueueMetrics
22
34
  def self.storage_location
23
35
  @storage_location
24
36
  end
25
- end
37
+ end
@@ -0,0 +1,43 @@
1
+ module Sidekiq::QueueMetrics
2
+ module Helpers
3
+ FAILED_JOBS_KEY = 'failed_jobs'.freeze
4
+
5
+ def self.build_queue_stats_key(queue)
6
+ "#{stats_key}:#{queue}"
7
+ end
8
+
9
+ def self.build_failed_jobs_key(queue)
10
+ "#{FAILED_JOBS_KEY}:#{queue}"
11
+ end
12
+
13
+ def self.stats_key
14
+ Sidekiq::QueueMetrics.storage_location || 'queue_stats'
15
+ end
16
+
17
+ def self.convert_hash_values(original_hash, &block)
18
+ original_hash.reduce({}) do |result, (k,v)|
19
+ result[k] = case v
20
+ when Array then v
21
+ when Hash then convert_hash_values(v, &block)
22
+ else block.(v)
23
+ end
24
+
25
+ result
26
+ end
27
+ end
28
+
29
+ def self.build_metrics_for_view(last_metrics, current_metrics)
30
+ current_metrics.each_with_object({}) do |(queue, metric), new_queue_metrics|
31
+ new_queue_metrics[queue] = metric.each_with_object({}) do |(name, count), updated_metrics|
32
+ previous_metric_value = last_metrics[queue] ? last_metrics[queue][name] : nil
33
+ animate = !previous_metric_value.nil? && previous_metric_value != count
34
+
35
+ updated_metrics[name] = {
36
+ 'count' => count,
37
+ 'animate' => animate
38
+ }
39
+ end
40
+ end
41
+ end
42
+ end
43
+ end
@@ -0,0 +1,17 @@
1
+ module Sidekiq::QueueMetrics
2
+ class JobDeathMiddleware
3
+ def call(worker, msg, queue)
4
+ call_dead_monitor(msg) if is_dead_job?(msg)
5
+
6
+ yield if block_given?
7
+ end
8
+
9
+ def is_dead_job?(msg)
10
+ msg.key?('retry_count') && msg['retry_count'] == 0
11
+ end
12
+
13
+ def call_dead_monitor(msg)
14
+ Sidekiq::QueueMetrics::JobDeathMonitor.proc.call(msg, msg['error_class'])
15
+ end
16
+ end
17
+ end
@@ -17,4 +17,4 @@ module Sidekiq::QueueMetrics
17
17
  'failed'
18
18
  end
19
19
  end
20
- end
20
+ end
@@ -11,4 +11,4 @@ module Sidekiq::QueueMetrics
11
11
  'processed'
12
12
  end
13
13
  end
14
- end
14
+ end
@@ -4,25 +4,11 @@ require 'sidekiq_queue_metrics/storage'
4
4
  module Sidekiq::QueueMetrics
5
5
  class Monitor
6
6
  def monitor(queue)
7
- stats = existing_stats
8
- stats[queue] ||= {}
9
-
10
- if stats[queue][status_counter].nil?
11
- stats[queue][status_counter] = 1
12
- else
13
- stats[queue][status_counter] += 1
14
- end
15
-
16
- Storage.set_stats(stats.to_json)
17
- end
18
-
19
- protected
20
- def status_counter
7
+ Storage.increment_stat(queue, status_counter)
21
8
  end
22
9
 
23
- private
24
- def existing_stats
25
- JSON.load(Storage.get_stats || '{}')
10
+ protected def status_counter
11
+ fail 'This method should be implemented by child monitors'
26
12
  end
27
13
  end
28
- end
14
+ end
@@ -6,34 +6,34 @@ module Sidekiq::QueueMetrics
6
6
  class << self
7
7
  def fetch
8
8
  queues = []
9
- success_and_failed_stats = enqueued_jobs = scheduled_jobs = retry_stats = {}
9
+ enqueued_jobs = scheduled_jobs = retry_stats = {}
10
+
10
11
  together do
11
12
  async do
12
13
  queues = Sidekiq::Queue.all.map(&:name).map(&:to_s)
13
14
  queues.each {|queue| enqueued_jobs[queue] = fetch_enqueued_jobs(queue)}
14
15
  end
15
16
 
16
- async {success_and_failed_stats = fetch_success_and_failed_stats}
17
17
  async {retry_stats = fetch_retry_stats}
18
18
  async {scheduled_jobs = fetch_scheduled_stats}
19
19
  end
20
20
 
21
- queues.map do |queue|
22
- stats = {'processed' => 0, 'failed' => 0}
23
- if success_and_failed_stats.has_key?(queue)
24
- stats['processed'] = val_or_default(success_and_failed_stats[queue]['processed'])
25
- stats['failed'] = val_or_default(success_and_failed_stats[queue]['failed'])
26
- end
21
+ queues.reduce({}) do |stats, queue|
22
+ stats[queue] = {
23
+ 'enqueued' => val_or_default(enqueued_jobs[queue]),
24
+ 'in_retry' => val_or_default(retry_stats[queue]),
25
+ 'scheduled' => val_or_default(scheduled_jobs[queue])
26
+ }.merge(fetch_success_and_failed_stats(queue))
27
27
 
28
- stats['enqueued'] = val_or_default(enqueued_jobs[queue])
29
- stats['in_retry'] = val_or_default(retry_stats[queue])
30
- stats['scheduled'] = val_or_default(scheduled_jobs[queue])
31
- {queue => stats}
32
- end.reduce({}, :merge)
28
+ stats
29
+ end
33
30
  end
34
31
 
35
- def fetch_success_and_failed_stats
36
- JSON.load(Storage.get_stats || '{}')
32
+ def fetch_success_and_failed_stats(queue)
33
+ default_metric_values = { 'processed' => 0, 'failed' => 0 }
34
+ default_metric_values.merge(
35
+ Sidekiq::QueueMetrics::Storage.get_stats(queue)
36
+ )
37
37
  end
38
38
 
39
39
  def fetch_enqueued_jobs(queue)
@@ -52,8 +52,7 @@ module Sidekiq::QueueMetrics
52
52
  Storage.failed_jobs(queue).reverse
53
53
  end
54
54
 
55
- private
56
- def val_or_default(val, default = 0)
55
+ private def val_or_default(val, default = 0)
57
56
  val || default
58
57
  end
59
58
  end
@@ -1,42 +1,38 @@
1
1
  module Sidekiq::QueueMetrics
2
2
  class Storage
3
- FAILED_JOBS_KEY = 'failed_jobs'.freeze
4
-
5
3
  class << self
6
- def set_stats(key = stats_key, value)
4
+ def increment_stat(queue, stat, value = 1)
7
5
  Sidekiq.redis_pool.with do |conn|
8
- conn.set(key, value)
6
+ conn.hincrby(Helpers.build_queue_stats_key(queue), stat, value)
9
7
  end
10
8
  end
11
9
 
12
- def get_stats(key = stats_key)
13
- Sidekiq.redis_pool.with do |conn|
14
- conn.get(key)
10
+ def get_stats(queue)
11
+ stats = Sidekiq.redis_pool.with do |conn|
12
+ conn.hgetall(Helpers.build_queue_stats_key(queue))
15
13
  end
14
+
15
+ Helpers.convert_hash_values(stats) { |value| value.to_i }
16
16
  end
17
17
 
18
18
  def add_failed_job(job, max_count = Sidekiq::QueueMetrics.max_recently_failed_jobs)
19
- Sidekiq.redis_pool.with do |conn|
20
- queue = job['queue']
21
- failed_jobs = JSON.parse(conn.get("#{FAILED_JOBS_KEY}:#{queue}") || '[]')
19
+ queue = job['queue']
22
20
 
23
- if failed_jobs.size >= max_count
24
- (failed_jobs.size - max_count + 1).times {failed_jobs.shift}
25
- end
21
+ Sidekiq.redis_pool.with do |conn|
22
+ failed_job_key_for_queue = Helpers.build_failed_jobs_key(queue)
26
23
 
27
- conn.set("#{FAILED_JOBS_KEY}:#{queue}", (failed_jobs << job).to_json)
24
+ conn.lpush(failed_job_key_for_queue, Sidekiq.dump_json(job))
25
+ conn.rpop(failed_job_key_for_queue) if conn.llen(failed_job_key_for_queue) >= max_count
28
26
  end
29
27
  end
30
28
 
31
29
  def failed_jobs(queue)
32
- Sidekiq.redis_pool.with do |conn|
33
- JSON.parse(conn.get("#{FAILED_JOBS_KEY}:#{queue}") || '[]')
30
+ result = Sidekiq.redis_pool.with do |conn|
31
+ conn.lrange(Helpers.build_failed_jobs_key(queue), 0, -1)
34
32
  end
35
- end
36
33
 
37
- def stats_key
38
- Sidekiq::QueueMetrics.storage_location || 'queue_stats'
34
+ result.map(&Sidekiq.method(:load_json))
39
35
  end
40
36
  end
41
37
  end
42
- end
38
+ end
@@ -0,0 +1,76 @@
1
+ require 'redlock'
2
+
3
+ module Sidekiq
4
+ module QueueMetrics
5
+ class UpgradeManager
6
+ def self.logger
7
+ @@logger ||= Logger.new(STDOUT)
8
+ end
9
+
10
+ # Check if an upgrade is needed and it's not already in progress.
11
+ # If it's in progress, it will block during that time waiting for the upgrade to complete.
12
+ #
13
+ # In case the lock is not released because the upgrade is taking too long
14
+ # it will raise an exception
15
+ #
16
+ # @raises [Redlock::LockError]
17
+ #
18
+ def self.upgrade_if_needed
19
+ acquire_lock do
20
+ return unless upgrade_needed?
21
+
22
+ v2_to_v3_upgrade
23
+ end
24
+ rescue Redlock::LockError
25
+ fail 'A long running upgrade is in progress. Try restarting the application once finished'
26
+ end
27
+
28
+ def self.v2_to_v3_upgrade
29
+ logger.info('Starting sidekiq_queue_metrics v3 upgrade')
30
+
31
+ Sidekiq.redis_pool.with do |conn|
32
+ old_collected_metrics = JSON.load(conn.get(Helpers.stats_key))
33
+ old_collected_metrics.each do |(queue, stats)|
34
+ logger.info("Upgrading #{queue} statistics")
35
+
36
+ stats.each { |(stat, value)| Sidekiq::QueueMetrics::Storage.increment_stat(queue, stat, value) }
37
+
38
+ failed_jobs_key = Helpers.build_failed_jobs_key(queue)
39
+
40
+ if conn.exists(failed_jobs_key) && conn.type(failed_jobs_key) == 'string'
41
+ temporal_failed_key = "_#{failed_jobs_key}"
42
+
43
+ failed_jobs = JSON.parse(conn.get(Helpers.build_failed_jobs_key(queue)) || '[]')
44
+
45
+ conn.rename(failed_jobs_key, temporal_failed_key)
46
+
47
+ failed_jobs.each { |job| Sidekiq::QueueMetrics::Storage::add_failed_job(job) }
48
+
49
+ conn.del(temporal_failed_key)
50
+ end
51
+ end
52
+
53
+ conn.del(Helpers.stats_key)
54
+ end
55
+
56
+ logger.info("Sucessfully upgraded")
57
+ end
58
+
59
+ def self.upgrade_needed?
60
+ Sidekiq.redis_pool.with { |conn| conn.exists(Helpers.stats_key) }
61
+ end
62
+
63
+ def self.acquire_lock(&block)
64
+ Sidekiq.redis_pool.with do |conn|
65
+ lock_manager = Redlock::Client.new([conn], {
66
+ retry_count: 5,
67
+ retry_delay: 500,
68
+ retry_jitter: 150, # milliseconds
69
+ redis_timeout: 0.1 # seconds
70
+ })
71
+ lock_manager.lock!('sidekiq_queue_metrics:upgrade_lock', 10000, &block)
72
+ end
73
+ end
74
+ end
75
+ end
76
+ end
@@ -1,5 +1,5 @@
1
1
  module Sidekiq
2
2
  module QueueMetrics
3
- VERSION = '2.1.1'
3
+ VERSION = '3.0.0'
4
4
  end
5
5
  end
@@ -60,11 +60,11 @@
60
60
  <td><%= job['class'] %></td>
61
61
  <td><%= job['args'].join(', ') %></td>
62
62
  <td>
63
- <a href="<%= "/sidekiq/queue_metrics/queues/#{@queue}/jobs/#{job['jid']}" %>">
63
+ <a href="<%= "#{root_path}queue_metrics/queues/#{@queue}/jobs/#{job['jid']}" %>">
64
64
  <%= relative_time(Time.at(job['enqueued_at'])) %>
65
65
  </a>
66
66
  </td>
67
67
  <td><%= job['error_message'] %></td>
68
68
  </tr>
69
69
  <% end %>
70
- </table>
70
+ </table>
@@ -59,7 +59,7 @@
59
59
  <thead>
60
60
  <tr>
61
61
  <th>
62
- <a href="<%= "/sidekiq/queue_metrics/queues/#{queue}/summary" %>">
62
+ <a href="<%= "#{root_path}queue_metrics/queues/#{queue}/summary" %>">
63
63
  <span class="heading"><%= queue %></span>
64
64
  </a>
65
65
  </th>
@@ -73,4 +73,4 @@
73
73
  </tr>
74
74
  <% end %>
75
75
  </table>
76
- <% end %>
76
+ <% end %>
@@ -5,17 +5,11 @@ module Sidekiq::QueueMetrics
5
5
 
6
6
  app.get "/queue_metrics" do
7
7
  queue_metrics = Sidekiq::QueueMetrics.fetch
8
- @@last_metrics ||= queue_metrics
9
-
10
- @queue_metrics = queue_metrics.each_with_object({}) do |queue_metric, new_queue_metrics|
11
- queue, metric = queue_metric
12
- new_queue_metrics[queue] = metric.each_with_object({}) do |current_metric, updated_metrics|
13
- name, count = current_metric
14
- updated_metrics[name] = {'count' => count, 'animate' => @@last_metrics[queue][name] != count}
15
- end
16
- end
17
8
 
9
+ @@last_metrics ||= queue_metrics
10
+ @queue_metrics = Helpers.build_metrics_for_view(@@last_metrics, queue_metrics)
18
11
  @@last_metrics = queue_metrics
12
+
19
13
  render(:erb, File.read(File.join(view_path, "queues_stats.erb")))
20
14
  end
21
15
 
@@ -35,4 +29,4 @@ module Sidekiq::QueueMetrics
35
29
  end
36
30
  end
37
31
  end
38
- end
32
+ end
@@ -17,8 +17,11 @@ Gem::Specification.new do |s|
17
17
  s.test_files = s.files.grep(%r{^(test|spec|features)/})
18
18
  s.require_paths = ["lib"]
19
19
 
20
- s.add_dependency 'sidekiq', '>= 5.1'
20
+ s.add_dependency 'sidekiq'
21
21
  s.add_dependency 'eldritch'
22
- s.add_development_dependency "bundler", "~> 1.5"
22
+ s.add_dependency 'redlock'
23
+
24
+ s.add_development_dependency 'bundler', '~> 1.5'
23
25
  s.add_development_dependency 'rspec'
26
+ s.add_development_dependency 'fakeredis'
24
27
  end
@@ -0,0 +1,107 @@
1
+ describe Sidekiq::QueueMetrics::Helpers do
2
+ describe '.build_queue_stats_key' do
3
+ context 'with default storage location' do
4
+ it 'should return the name of the key where metrics are stored for a given queue' do
5
+ expect(subject.build_queue_stats_key('test')).to eql('queue_stats:test')
6
+ end
7
+ end
8
+
9
+ context 'with a different storage location' do
10
+ before { Sidekiq::QueueMetrics.storage_location = 'different_storage_location' }
11
+ after { Sidekiq::QueueMetrics.storage_location = nil}
12
+
13
+ it 'should return the name of the key where metrics are stored for a given queue' do
14
+ expect(subject.build_queue_stats_key('test')).to eql('different_storage_location:test')
15
+ end
16
+ end
17
+ end
18
+
19
+ describe '.build_failed_jobs_key' do
20
+ it 'should return the name of the key where failed jobs are stored for a given queue' do
21
+ expect(subject.build_failed_jobs_key('test')).to eql('failed_jobs:test')
22
+ end
23
+ end
24
+
25
+ describe '.build_metrics_for_view' do
26
+ context 'when there is no changes' do
27
+ let(:last_metrics) {{
28
+ 'mailer_queue' => {
29
+ 'processed' => 3
30
+ }
31
+ }}
32
+ let(:current_metrics) {{
33
+ 'mailer_queue' => {
34
+ 'processed' => 3
35
+ }
36
+ }}
37
+
38
+ it 'should add an animate key and a count with the same metric value' do
39
+ expected_result = {
40
+ 'mailer_queue' => {
41
+ 'processed' => { 'count' => 3, 'animate' => false }
42
+ }
43
+ }
44
+
45
+ expect(
46
+ subject.build_metrics_for_view(last_metrics, current_metrics)
47
+ ).to eql(expected_result)
48
+ end
49
+ end
50
+
51
+ context 'when only values are changed' do
52
+ let(:last_metrics) {{
53
+ 'mailer_queue' => {
54
+ 'processed' => 3
55
+ }
56
+ }}
57
+ let(:current_metrics) {{
58
+ 'mailer_queue' => {
59
+ 'processed' => 4
60
+ }
61
+ }}
62
+
63
+ it 'should add an animate key and a count with the last metric value' do
64
+ expected_result = {
65
+ 'mailer_queue' => {
66
+ 'processed' => { 'count' => 4, 'animate' => true }
67
+ }
68
+ }
69
+
70
+ expect(
71
+ subject.build_metrics_for_view(last_metrics, current_metrics)
72
+ ).to eql(expected_result)
73
+ end
74
+ end
75
+
76
+ context' when a new queue is added' do
77
+ let(:last_metrics) {{
78
+ 'mailer_queue' => {
79
+ 'processed' => 3
80
+ }
81
+ }}
82
+ let(:current_metrics) {{
83
+ 'mailer_queue' => {
84
+ 'processed' => 4
85
+ },
86
+ 'new_queue' => {
87
+ 'failed' => 1
88
+ }
89
+ }}
90
+
91
+ it 'should add the new queue' do
92
+ expected_result = {
93
+ 'mailer_queue' => {
94
+ 'processed' => { 'count' => 4, 'animate' => true }
95
+ },
96
+ 'new_queue' => {
97
+ 'failed' => { 'count' => 1, 'animate' => false }
98
+ }
99
+ }
100
+
101
+ expect(
102
+ subject.build_metrics_for_view(last_metrics, current_metrics)
103
+ ).to eql(expected_result)
104
+ end
105
+ end
106
+ end
107
+ end
@@ -0,0 +1,39 @@
1
+ describe Sidekiq::QueueMetrics::JobDeathMiddleware do
2
+ let(:redis_connection) { Redis.new }
3
+
4
+ before(:all) do
5
+ Sidekiq.redis = ConnectionPool.new { redis_connection }
6
+ end
7
+
8
+ before { redis_connection.flushall }
9
+
10
+ context 'when retry_count key is not present' do
11
+ it 'should call the job dead monitor' do
12
+ expect_any_instance_of(Sidekiq::QueueMetrics::JobDeathMonitor).not_to receive(:monitor)
13
+
14
+ subject.call(Class.new, {}, 'test_queue')
15
+ end
16
+ end
17
+
18
+ context 'when retry_count key is greater than 0' do
19
+ it 'should call the job dead monitor' do
20
+ expect_any_instance_of(Sidekiq::QueueMetrics::JobDeathMonitor).not_to receive(:monitor)
21
+
22
+ subject.call(Class.new, { 'retry_count' => 1 }, 'test_queue')
23
+ end
24
+ end
25
+
26
+ context 'when retry_count key is 0' do
27
+ it 'should call the job dead monitor' do
28
+ expect_any_instance_of(Sidekiq::QueueMetrics::JobDeathMonitor).to receive(:monitor).with({
29
+ 'retry_count' => 0,
30
+ 'error_class' => 'StandardError'
31
+ })
32
+
33
+ subject.call(Class.new, {
34
+ 'retry_count' => 0,
35
+ 'error_class' => 'StandardError'
36
+ }, 'test_queue')
37
+ end
38
+ end
39
+ end
@@ -1,53 +1,70 @@
1
1
  describe Sidekiq::QueueMetrics::JobDeathMonitor do
2
+ let(:redis_connection) { Redis.new }
3
+
4
+ before(:all) do
5
+ Sidekiq.redis = ConnectionPool.new { redis_connection }
6
+ end
7
+
8
+ before { redis_connection.flushall }
9
+
2
10
  describe '#call' do
3
11
  let(:job) {{'queue' => 'mailer_queue'}}
4
- let(:exception) {double('exception')}
5
- let(:monitor) {Sidekiq::QueueMetrics::JobDeathMonitor.proc}
12
+ let(:monitor) { Sidekiq::QueueMetrics::JobDeathMonitor.proc }
6
13
 
7
14
  context 'when stats does not exist' do
8
15
  it 'should create stats key and add stats of queue' do
9
- expect(Sidekiq::QueueMetrics::Storage).to receive(:get_stats).and_return(nil)
10
- expect(Sidekiq::QueueMetrics::Storage).to receive(:set_stats).with({mailer_queue: {failed: 1}}.to_json)
11
- expect(Sidekiq::QueueMetrics::Storage).to receive(:add_failed_job).with(job)
16
+ monitor.call(job)
17
+
18
+ expect(
19
+ Sidekiq::QueueMetrics::Storage.get_stats('mailer_queue')
20
+ ).to eq({ 'failed' => 1 })
21
+ end
12
22
 
23
+ it 'should add the job to the failed jobs list' do
13
24
  monitor.call(job)
25
+
26
+ expect(
27
+ Sidekiq::QueueMetrics::Storage.failed_jobs('mailer_queue')
28
+ ).to eql([job])
14
29
  end
15
30
  end
16
31
 
17
32
  context 'when stats exists' do
18
33
  it 'should create a new queue when it does not exist' do
19
- job_queue = {'queue' => 'job_queue'}
20
- existing_stats = {mailer_queue: {failed: 1}}.to_json
21
- expected_stats = {mailer_queue: {failed: 1}, job_queue: {failed: 1}}.to_json
34
+ Sidekiq::QueueMetrics::Storage.increment_stat('mailer_queue', 'failed')
22
35
 
23
- expect(Sidekiq::QueueMetrics::Storage).to receive(:get_stats).and_return(existing_stats)
24
- expect(Sidekiq::QueueMetrics::Storage).to receive(:set_stats).with(expected_stats)
25
- expect(Sidekiq::QueueMetrics::Storage).to receive(:add_failed_job).with(job_queue)
36
+ job_queue = {'queue' => 'job_queue'}
26
37
 
27
38
  monitor.call(job_queue)
39
+
40
+ expect(
41
+ Sidekiq::QueueMetrics::Storage.get_stats('mailer_queue')
42
+ ).to eq({ 'failed' => 1 })
43
+
44
+ expect(
45
+ Sidekiq::QueueMetrics::Storage.get_stats('job_queue')
46
+ ).to eq({ 'failed' => 1 })
28
47
  end
29
48
 
30
49
  it 'should update existing queue' do
31
- existing_stats = {mailer_queue: {failed: 1}}.to_json
32
- expected_stats = {mailer_queue: {failed: 2}}.to_json
33
-
34
- expect(Sidekiq::QueueMetrics::Storage).to receive(:get_stats).and_return(existing_stats)
35
- expect(Sidekiq::QueueMetrics::Storage).to receive(:set_stats).with(expected_stats)
36
- expect(Sidekiq::QueueMetrics::Storage).to receive(:add_failed_job).with(job)
50
+ Sidekiq::QueueMetrics::Storage.increment_stat('mailer_queue', 'failed')
37
51
 
38
52
  monitor.call(job)
53
+
54
+ expect(
55
+ Sidekiq::QueueMetrics::Storage.get_stats('mailer_queue')
56
+ ).to eq({ 'failed' => 2 })
39
57
  end
40
58
 
41
59
  it 'should create failed counter when other counters exists' do
42
- existing_stats = {mailer_queue: {processed: 1}}.to_json
43
- expected_stats = {mailer_queue: {processed: 1, failed: 1}}.to_json
44
-
45
- expect(Sidekiq::QueueMetrics::Storage).to receive(:get_stats).and_return(existing_stats)
46
- expect(Sidekiq::QueueMetrics::Storage).to receive(:set_stats).with(expected_stats)
47
- expect(Sidekiq::QueueMetrics::Storage).to receive(:add_failed_job).with(job)
60
+ Sidekiq::QueueMetrics::Storage.increment_stat('mailer_queue', 'processed')
48
61
 
49
62
  monitor.call(job)
63
+
64
+ expect(
65
+ Sidekiq::QueueMetrics::Storage.get_stats('mailer_queue')
66
+ ).to eq({ 'processed' => 1, 'failed' => 1 })
50
67
  end
51
68
  end
52
69
  end
53
- end
70
+ end
@@ -1,48 +1,62 @@
1
1
  describe Sidekiq::QueueMetrics::JobSuccessMonitor do
2
+ let(:redis_connection) { Redis.new }
3
+
4
+ let(:worker) { double(:worker) }
5
+ let(:job) { double(:job) }
6
+
7
+ before(:all) do
8
+ Sidekiq.redis = ConnectionPool.new { redis_connection }
9
+ end
10
+
11
+ before { redis_connection.flushall }
12
+
2
13
  describe '#call' do
3
- let(:job) {double('job')}
4
- let(:worker) {double('worker')}
5
- let(:monitor) {Sidekiq::QueueMetrics::JobSuccessMonitor.new}
14
+ let(:monitor) { Sidekiq::QueueMetrics::JobSuccessMonitor.new }
6
15
 
7
16
  context 'when stats does not exist' do
8
17
  it 'should create stats key and add stats of queue' do
9
- expect(Sidekiq::QueueMetrics::Storage).to receive(:get_stats).and_return(nil)
10
- expect(Sidekiq::QueueMetrics::Storage).to receive(:set_stats).with({mailer_queue: {processed: 1}}.to_json)
11
-
12
18
  monitor.call(worker, job, 'mailer_queue')
19
+
20
+ expect(
21
+ Sidekiq::QueueMetrics::Storage.get_stats('mailer_queue')
22
+ ).to eq({ 'processed' => 1 })
13
23
  end
14
24
  end
15
25
 
16
26
  context 'when stats exists' do
17
27
  it 'should create a new queue when it does not exist' do
18
- existing_stats = {mailer_queue: {processed: 1}}.to_json
19
- expected_stats = {mailer_queue: {processed: 1}, job_queue: {processed: 1}}.to_json
20
-
21
- expect(Sidekiq::QueueMetrics::Storage).to receive(:get_stats).and_return(existing_stats)
22
- expect(Sidekiq::QueueMetrics::Storage).to receive(:set_stats).with(expected_stats)
28
+ Sidekiq::QueueMetrics::Storage.increment_stat('mailer_queue', 'processed')
23
29
 
24
30
  monitor.call(worker, job, 'job_queue')
31
+
32
+ expect(
33
+ Sidekiq::QueueMetrics::Storage.get_stats('mailer_queue')
34
+ ).to eq({ 'processed' => 1 })
35
+
36
+ expect(
37
+ Sidekiq::QueueMetrics::Storage.get_stats('job_queue')
38
+ ).to eq({ 'processed' => 1 })
25
39
  end
26
40
 
27
41
  it 'should update existing queue' do
28
- existing_stats = {mailer_queue: {processed: 1}}.to_json
29
- expected_stats = {mailer_queue: {processed: 2}}.to_json
30
-
31
- expect(Sidekiq::QueueMetrics::Storage).to receive(:get_stats).and_return(existing_stats)
32
- expect(Sidekiq::QueueMetrics::Storage).to receive(:set_stats).with(expected_stats)
42
+ Sidekiq::QueueMetrics::Storage.increment_stat('mailer_queue', 'processed')
33
43
 
34
44
  monitor.call(worker, job, 'mailer_queue')
45
+
46
+ expect(
47
+ Sidekiq::QueueMetrics::Storage.get_stats('mailer_queue')
48
+ ).to eq({ 'processed' => 2 })
35
49
  end
36
50
 
37
51
  it 'should create failed counter when other counters exists' do
38
- existing_stats = {mailer_queue: {failed: 1}}.to_json
39
- expected_stats = {mailer_queue: {failed: 1, processed: 1}}.to_json
40
-
41
- expect(Sidekiq::QueueMetrics::Storage).to receive(:get_stats).and_return(existing_stats)
42
- expect(Sidekiq::QueueMetrics::Storage).to receive(:set_stats).with(expected_stats)
52
+ Sidekiq::QueueMetrics::Storage.increment_stat('mailer_queue', 'failed')
43
53
 
44
54
  monitor.call(worker, job, 'mailer_queue')
55
+
56
+ expect(
57
+ Sidekiq::QueueMetrics::Storage.get_stats('mailer_queue')
58
+ ).to eq({ 'processed' => 1, 'failed' => 1 })
45
59
  end
46
60
  end
47
61
  end
48
- end
62
+ end
@@ -1,4 +1,38 @@
1
1
  describe Sidekiq::QueueMetrics do
2
+ let(:redis_connection) { Redis.new }
3
+
4
+ before(:all) do
5
+ Sidekiq.redis = ConnectionPool.new { redis_connection }
6
+ end
7
+
8
+ before { redis_connection.flushall }
9
+
10
+ describe '#init' do
11
+ if Sidekiq::QueueMetrics.support_death_handlers?
12
+ it 'attach the expected listeners for failed job' do
13
+ Sidekiq::QueueMetrics.init(Sidekiq)
14
+
15
+ expect(Sidekiq.death_handlers).to_not be_empty
16
+ end
17
+
18
+ it 'doesn\'t attach the JobDeathMiddleware to the server middleware chain' do
19
+ Sidekiq::QueueMetrics.init(Sidekiq)
20
+
21
+ expect(
22
+ Sidekiq.server_middleware.entries.select { |x| x.klass == Sidekiq::QueueMetrics::JobDeathMiddleware }
23
+ ).to be_empty
24
+ end
25
+ else
26
+ it 'attach the JobDeathMiddleware to the server middleware chain' do
27
+ Sidekiq::QueueMetrics.init(Sidekiq)
28
+
29
+ expect(
30
+ Sidekiq.server_middleware.entries.select { |x| x.klass == Sidekiq::QueueMetrics::JobDeathMiddleware }
31
+ ).not_to be_empty
32
+ end
33
+ end
34
+ end
35
+
2
36
  describe '#fetch' do
3
37
  before(:each) do
4
38
  queues = [OpenStruct.new(name: :mailer_queue), OpenStruct.new(name: :heavy_jobs_queue)]
@@ -6,13 +40,22 @@ describe Sidekiq::QueueMetrics do
6
40
  end
7
41
 
8
42
  it 'should fetch current queue stats' do
9
- stats = {mailer_queue: {processed: 2, failed: 1}, heavy_jobs_queue: {processed: 1, failed: 0}}
10
- jobs_in_retry_queue = [OpenStruct.new(queue: 'mailer_queue'), OpenStruct.new(queue: 'heavy_jobs_queue')]
11
- scheduled_jobs = [OpenStruct.new(queue: 'mailer_queue'), OpenStruct.new(queue: 'heavy_jobs_queue')]
43
+ Sidekiq::QueueMetrics::Storage.increment_stat('mailer_queue', 'processed', 2)
44
+ Sidekiq::QueueMetrics::Storage.increment_stat('mailer_queue', 'failed')
45
+ Sidekiq::QueueMetrics::Storage.increment_stat('heavy_jobs_queue', 'processed')
46
+
47
+ jobs_in_retry_queue = [
48
+ OpenStruct.new(queue: 'mailer_queue'),
49
+ OpenStruct.new(queue: 'heavy_jobs_queue')
50
+ ]
51
+ scheduled_jobs = [
52
+ OpenStruct.new(queue: 'mailer_queue'),
53
+ OpenStruct.new(queue: 'heavy_jobs_queue')
54
+ ]
12
55
 
13
- expect(Sidekiq::QueueMetrics::Storage).to receive(:get_stats).and_return(stats.to_json)
14
56
  expect(Sidekiq::RetrySet).to receive(:new).and_return(jobs_in_retry_queue)
15
57
  expect(Sidekiq::ScheduledSet).to receive(:new).and_return(scheduled_jobs)
58
+
16
59
  expect(Sidekiq::Queue).to receive(:new).with('mailer_queue').and_return(OpenStruct.new(size: 1))
17
60
  expect(Sidekiq::Queue).to receive(:new).with('heavy_jobs_queue').and_return(OpenStruct.new(size: 1))
18
61
 
@@ -32,12 +75,14 @@ describe Sidekiq::QueueMetrics do
32
75
  end
33
76
 
34
77
  it 'should have default value as zero' do
35
- stats = {mailer_queue: {processed: 2}, heavy_jobs_queue: {failed: 1}}
78
+ Sidekiq::QueueMetrics::Storage.increment_stat('mailer_queue', 'processed', 2)
79
+ Sidekiq::QueueMetrics::Storage.increment_stat('heavy_jobs_queue', 'failed')
80
+
36
81
  scheduled_jobs = jobs_in_retry_queue = []
37
82
 
38
- expect(Sidekiq::QueueMetrics::Storage).to receive(:get_stats).and_return(stats.to_json)
39
83
  expect(Sidekiq::RetrySet).to receive(:new).and_return(jobs_in_retry_queue)
40
84
  expect(Sidekiq::ScheduledSet).to receive(:new).and_return(scheduled_jobs)
85
+
41
86
  expect(Sidekiq::Queue).to receive(:new).with('mailer_queue').and_return(OpenStruct.new(size: 0))
42
87
  expect(Sidekiq::Queue).to receive(:new).with('heavy_jobs_queue').and_return(OpenStruct.new(size: 0))
43
88
 
@@ -53,7 +98,6 @@ describe Sidekiq::QueueMetrics do
53
98
  it 'should return Sidekiq::QueueMetrics for all sidekiq queues' do
54
99
  jobs_in_retry_queue = scheduled_jobs = []
55
100
 
56
- expect(Sidekiq::QueueMetrics::Storage).to receive(:get_stats).and_return(nil)
57
101
  expect(Sidekiq::RetrySet).to receive(:new).and_return(jobs_in_retry_queue)
58
102
  expect(Sidekiq::ScheduledSet).to receive(:new).and_return(scheduled_jobs)
59
103
  expect(Sidekiq::Queue).to receive(:new).with('mailer_queue').and_return(OpenStruct.new(size: 0))
@@ -78,11 +122,14 @@ describe Sidekiq::QueueMetrics do
78
122
  describe '#failed_jobs' do
79
123
  it 'should return failed jobs for a queue' do
80
124
  queue = 'default_queue'
81
- job_1 = double(:job)
82
- job_2 = double(:job)
83
- expect(Sidekiq::QueueMetrics::Storage).to receive(:failed_jobs).and_return([job_1, job_2])
84
125
 
85
- expect(Sidekiq::QueueMetrics.failed_jobs(queue)).to eq([job_2, job_1])
126
+ job_1 = {'queue' => queue, 'args' => [1]}
127
+ job_2 = {'queue' => queue, 'args' => [2]}
128
+
129
+ Sidekiq::QueueMetrics::Storage.add_failed_job(job_1)
130
+ Sidekiq::QueueMetrics::Storage.add_failed_job(job_2)
131
+
132
+ expect(Sidekiq::QueueMetrics.failed_jobs(queue)).to eq([job_1, job_2])
86
133
  end
87
134
  end
88
- end
135
+ end
@@ -1,82 +1,52 @@
1
1
  describe Sidekiq::QueueMetrics::Storage do
2
- class MockRedisPool
3
- attr_reader :conn
2
+ let(:redis_connection) { Redis.new }
3
+ let(:queue) { 'mailer_queue' }
4
+ let(:job) { {'queue' => queue, 'args' => [1]} }
4
5
 
5
- def initialize(conn)
6
- @conn = conn
7
- end
8
-
9
- def with
10
- yield conn
11
- end
6
+ before(:all) do
7
+ Sidekiq.redis = ConnectionPool.new { redis_connection }
12
8
  end
13
9
 
14
- let(:mock_redis_conn) {double(:connection)}
15
- let(:mock_redis_pool) {MockRedisPool.new(mock_redis_conn)}
10
+ before { redis_connection.flushall }
16
11
 
17
12
  describe '#add_failed_job' do
18
13
  it 'should add first failed job' do
19
- job = {'queue' => 'mailer_queue'}
20
- expect(Sidekiq).to receive(:redis_pool).and_return(mock_redis_pool)
21
- expect(mock_redis_conn).to receive(:get).with("failed_jobs:mailer_queue").and_return(nil)
22
-
23
- expect(mock_redis_conn).to receive(:set).with("failed_jobs:mailer_queue", [job].to_json)
24
-
25
- Sidekiq::QueueMetrics::Storage.add_failed_job(job)
14
+ expect do
15
+ Sidekiq::QueueMetrics::Storage.add_failed_job(job)
16
+ end.to change{ Sidekiq::QueueMetrics::Storage.failed_jobs(queue).length }.from(0).to(1)
26
17
  end
27
18
 
28
19
  it 'should add failed job to existing jobs' do
29
- key = "failed_jobs:mailer_queue"
30
- new_job = {'queue' => 'mailer_queue', 'args' => [1]}
31
- existing_jobs = [{'queue' => 'mailer_queue', 'args' => [2]}]
32
-
33
- expect(Sidekiq).to receive(:redis_pool).and_return(mock_redis_pool)
34
- expect(mock_redis_conn).to receive(:get).with(key).and_return(existing_jobs.to_json)
35
-
36
- expect(mock_redis_conn).to receive(:set).with(key, [existing_jobs.first, new_job].to_json)
20
+ Sidekiq::QueueMetrics::Storage.add_failed_job(job)
37
21
 
38
- Sidekiq::QueueMetrics::Storage.add_failed_job(new_job)
22
+ expect do
23
+ Sidekiq::QueueMetrics::Storage.add_failed_job({'queue' => queue, 'args' => [2]})
24
+ end.to change{ Sidekiq::QueueMetrics::Storage.failed_jobs(queue).length }.from(1).to(2)
39
25
  end
40
26
 
41
27
  it 'should delete old job when failed jobs limit has reached' do
42
- key = "failed_jobs:mailer_queue"
43
- new_job = {'queue' => 'mailer_queue', 'args' => [1]}
44
- oldest_job = {'queue' => 'mailer_queue', 'args' => [2]}
45
- older_job = {'queue' => 'mailer_queue', 'args' => [3]}
46
-
47
- existing_jobs = [oldest_job, older_job]
48
-
49
- expect(Sidekiq).to receive(:redis_pool).and_return(mock_redis_pool)
50
- expect(mock_redis_conn).to receive(:get).with(key).and_return(existing_jobs.to_json)
51
-
52
- expect(mock_redis_conn).to receive(:set).with(key, [older_job, new_job].to_json)
28
+ Sidekiq::QueueMetrics::Storage.add_failed_job(job)
29
+ Sidekiq::QueueMetrics::Storage.add_failed_job({'queue' => queue, 'args' => [2]})
53
30
 
54
- Sidekiq::QueueMetrics::Storage.add_failed_job(new_job, 2)
31
+ expect do
32
+ Sidekiq::QueueMetrics::Storage.add_failed_job({'queue' => queue, 'args' => [3]}, 2)
33
+ end.to_not change { Sidekiq::QueueMetrics::Storage.failed_jobs(queue).length }
55
34
  end
56
35
  end
57
36
 
58
37
  describe '#failed_jobs' do
59
38
  context 'when failed jobs are not present' do
60
39
  it 'should return failed jobs for a given queue' do
61
- queue = 'mailer_queue'
62
- expect(Sidekiq).to receive(:redis_pool).and_return(mock_redis_pool)
63
-
64
- expect(mock_redis_conn).to receive(:get).with("failed_jobs:#{queue}").and_return(nil)
65
-
66
40
  expect(Sidekiq::QueueMetrics::Storage.failed_jobs(queue)).to be_empty
67
41
  end
68
42
  end
69
43
 
70
44
  context 'when failed jobs are present' do
71
45
  it 'should return failed jobs for a given queue' do
72
- queue = 'mailer_queue'
73
- jobs = [{'queue' => 'mailer_queue', 'args' => [1]}]
74
- expect(Sidekiq).to receive(:redis_pool).and_return(mock_redis_pool)
75
-
76
- expect(mock_redis_conn).to receive(:get).with("failed_jobs:#{queue}").and_return(jobs.to_json)
46
+ Sidekiq::QueueMetrics::Storage.add_failed_job(job)
77
47
 
78
- expect(Sidekiq::QueueMetrics::Storage.failed_jobs(queue)).to eq(jobs)
48
+ expect(Sidekiq::QueueMetrics::Storage.failed_jobs(queue)).to eq([job])
79
49
  end
80
50
  end
81
51
  end
82
- end
52
+ end
@@ -0,0 +1,87 @@
1
+ describe Sidekiq::QueueMetrics::UpgradeManager do
2
+ let(:redis_connection) { Redis.new }
3
+
4
+ before(:all) do
5
+ Sidekiq.redis = ConnectionPool.new { redis_connection }
6
+ end
7
+
8
+ before { redis_connection.flushall }
9
+
10
+ describe 'upgrading to v3' do
11
+ let(:old_queue_stats) {{
12
+ 'mailer_queue' => {
13
+ 'failed' => 1,
14
+ 'processed' => 3
15
+ },
16
+ 'other_queue' => {
17
+ 'failed' => 1,
18
+ 'processed' => 143
19
+ }
20
+ }}
21
+
22
+ let(:failed_jobs_mailer_queue) {
23
+ [{ 'queue' => 'mailer_queue', 'args' => [1]}]
24
+ }
25
+
26
+ let(:failed_jobs_other_queue) {
27
+ [{ 'queue' => 'other_queue', 'args' => [2]}]
28
+ }
29
+
30
+ describe '.v2_to_v3_upgrade' do
31
+ before do
32
+ redis_connection.set(Sidekiq::QueueMetrics::Helpers.stats_key, JSON.generate(old_queue_stats))
33
+ redis_connection.set(Sidekiq::QueueMetrics::Helpers.build_failed_jobs_key('mailer_queue'), JSON.generate(failed_jobs_mailer_queue))
34
+ redis_connection.set(Sidekiq::QueueMetrics::Helpers.build_failed_jobs_key('other_queue'), JSON.generate(failed_jobs_other_queue))
35
+ end
36
+
37
+ it 'should delete the old stats key' do
38
+ Sidekiq::QueueMetrics::UpgradeManager.v2_to_v3_upgrade
39
+
40
+ expect(redis_connection.exists(Sidekiq::QueueMetrics::Helpers.stats_key)).to be_falsey
41
+ end
42
+
43
+ it 'should set the previous values into the new stats format' do
44
+ Sidekiq::QueueMetrics::UpgradeManager.v2_to_v3_upgrade
45
+
46
+ mailer_queue_stats = Sidekiq::QueueMetrics::Storage.get_stats('mailer_queue')
47
+ other_queue_stats = Sidekiq::QueueMetrics::Storage.get_stats('other_queue')
48
+
49
+ expect(mailer_queue_stats['processed']).to be(3)
50
+ expect(mailer_queue_stats['failed']).to be(1)
51
+
52
+ expect(other_queue_stats['processed']).to be(143)
53
+ expect(other_queue_stats['failed']).to be(1)
54
+ end
55
+
56
+ it 'should add the failed jobs into the same key with new format' do
57
+ Sidekiq::QueueMetrics::UpgradeManager.v2_to_v3_upgrade
58
+
59
+ expect(Sidekiq::QueueMetrics::Storage.failed_jobs('mailer_queue')).to eql(failed_jobs_mailer_queue)
60
+ expect(Sidekiq::QueueMetrics::Storage.failed_jobs('other_queue')).to eql(failed_jobs_other_queue)
61
+ end
62
+
63
+ it 'should delete temporal failed jobs keys' do
64
+ mailer_temporal_key = "_#{Sidekiq::QueueMetrics::Helpers.build_failed_jobs_key('mailer_queue')}"
65
+ other_temporal_key = "_#{Sidekiq::QueueMetrics::Helpers.build_failed_jobs_key('other_queue')}"
66
+
67
+ Sidekiq::QueueMetrics::UpgradeManager.v2_to_v3_upgrade
68
+
69
+ expect(redis_connection.exists(mailer_temporal_key)).to be_falsey
70
+ expect(redis_connection.exists(other_temporal_key)).to be_falsey
71
+ end
72
+ end
73
+
74
+ describe '.upgrade_needed?' do
75
+ it 'should be true if the old queue stats key exists' do
76
+ redis_connection.set(Sidekiq::QueueMetrics::Helpers.stats_key, JSON.generate(old_queue_stats))
77
+
78
+ expect(Sidekiq::QueueMetrics::UpgradeManager.upgrade_needed?).to be_truthy
79
+ end
80
+
81
+ it 'should be false if the old queue stats key is not set' do
82
+ expect(Sidekiq::QueueMetrics::UpgradeManager.upgrade_needed?).to be_falsey
83
+ end
84
+ end
85
+ end
86
+ end
87
+
@@ -15,6 +15,8 @@
15
15
  # See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
16
16
  #
17
17
 
18
+ require 'fakeredis/rspec'
19
+
18
20
  require './lib/sidekiq_queue_metrics'
19
21
 
20
22
  RSpec.configure do |config|
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: sidekiq_queue_metrics
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.1.1
4
+ version: 3.0.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Ajit Singh
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2019-06-11 00:00:00.000000000 Z
11
+ date: 2019-11-13 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: sidekiq
@@ -16,14 +16,14 @@ dependencies:
16
16
  requirements:
17
17
  - - ">="
18
18
  - !ruby/object:Gem::Version
19
- version: '5.1'
19
+ version: '0'
20
20
  type: :runtime
21
21
  prerelease: false
22
22
  version_requirements: !ruby/object:Gem::Requirement
23
23
  requirements:
24
24
  - - ">="
25
25
  - !ruby/object:Gem::Version
26
- version: '5.1'
26
+ version: '0'
27
27
  - !ruby/object:Gem::Dependency
28
28
  name: eldritch
29
29
  requirement: !ruby/object:Gem::Requirement
@@ -38,6 +38,20 @@ dependencies:
38
38
  - - ">="
39
39
  - !ruby/object:Gem::Version
40
40
  version: '0'
41
+ - !ruby/object:Gem::Dependency
42
+ name: redlock
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - ">="
46
+ - !ruby/object:Gem::Version
47
+ version: '0'
48
+ type: :runtime
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - ">="
53
+ - !ruby/object:Gem::Version
54
+ version: '0'
41
55
  - !ruby/object:Gem::Dependency
42
56
  name: bundler
43
57
  requirement: !ruby/object:Gem::Requirement
@@ -66,6 +80,20 @@ dependencies:
66
80
  - - ">="
67
81
  - !ruby/object:Gem::Version
68
82
  version: '0'
83
+ - !ruby/object:Gem::Dependency
84
+ name: fakeredis
85
+ requirement: !ruby/object:Gem::Requirement
86
+ requirements:
87
+ - - ">="
88
+ - !ruby/object:Gem::Version
89
+ version: '0'
90
+ type: :development
91
+ prerelease: false
92
+ version_requirements: !ruby/object:Gem::Requirement
93
+ requirements:
94
+ - - ">="
95
+ - !ruby/object:Gem::Version
96
+ version: '0'
69
97
  description:
70
98
  email: jeetsingh.ajit@gamil.com
71
99
  executables: []
@@ -76,26 +104,31 @@ files:
76
104
  - ".rspec"
77
105
  - ".travis.yml"
78
106
  - Gemfile
79
- - Gemfile.lock
80
107
  - LICENSE
81
108
  - README.md
82
109
  - lib/sidekiq_queue_metrics.rb
83
110
  - lib/sidekiq_queue_metrics/configuration.rb
111
+ - lib/sidekiq_queue_metrics/helpers.rb
112
+ - lib/sidekiq_queue_metrics/job_death_middleware.rb
84
113
  - lib/sidekiq_queue_metrics/monitor/job_death_monitor.rb
85
114
  - lib/sidekiq_queue_metrics/monitor/job_success_monitor.rb
86
115
  - lib/sidekiq_queue_metrics/monitor/monitor.rb
87
116
  - lib/sidekiq_queue_metrics/queue_metrics.rb
88
117
  - lib/sidekiq_queue_metrics/storage.rb
118
+ - lib/sidekiq_queue_metrics/upgrade_manager.rb
89
119
  - lib/sidekiq_queue_metrics/version.rb
90
120
  - lib/sidekiq_queue_metrics/views/failed_job.erb
91
121
  - lib/sidekiq_queue_metrics/views/queue_summary.erb
92
122
  - lib/sidekiq_queue_metrics/views/queues_stats.erb
93
123
  - lib/sidekiq_queue_metrics/web_extension.rb
94
124
  - sidekiq_queue_metrics.gemspec
125
+ - spec/lib/sidekiq_queue_metrics/helpers_spec.rb
126
+ - spec/lib/sidekiq_queue_metrics/job_death_middleware_spec.rb
95
127
  - spec/lib/sidekiq_queue_metrics/monitor/job_death_monitor_spec.rb
96
128
  - spec/lib/sidekiq_queue_metrics/monitor/job_success_monitor_spec.rb
97
129
  - spec/lib/sidekiq_queue_metrics/queue_metrics_spec.rb
98
130
  - spec/lib/sidekiq_queue_metrics/storage_spec.rb
131
+ - spec/lib/sidekiq_queue_metrics/upgrade_manager_spec.rb
99
132
  - spec/spec_helper.rb
100
133
  homepage: https://github.com/ajitsing/sidekiq_queue_metrics
101
134
  licenses:
@@ -116,14 +149,16 @@ required_rubygems_version: !ruby/object:Gem::Requirement
116
149
  - !ruby/object:Gem::Version
117
150
  version: '0'
118
151
  requirements: []
119
- rubyforge_project:
120
- rubygems_version: 2.7.6
152
+ rubygems_version: 3.0.4
121
153
  signing_key:
122
154
  specification_version: 4
123
155
  summary: Records stats of each sidekiq queue and exposes APIs to retrieve them
124
156
  test_files:
157
+ - spec/lib/sidekiq_queue_metrics/helpers_spec.rb
158
+ - spec/lib/sidekiq_queue_metrics/job_death_middleware_spec.rb
125
159
  - spec/lib/sidekiq_queue_metrics/monitor/job_death_monitor_spec.rb
126
160
  - spec/lib/sidekiq_queue_metrics/monitor/job_success_monitor_spec.rb
127
161
  - spec/lib/sidekiq_queue_metrics/queue_metrics_spec.rb
128
162
  - spec/lib/sidekiq_queue_metrics/storage_spec.rb
163
+ - spec/lib/sidekiq_queue_metrics/upgrade_manager_spec.rb
129
164
  - spec/spec_helper.rb
@@ -1,42 +0,0 @@
1
- GEM
2
- remote: http://rubygems.org/
3
- specs:
4
- concurrent-ruby (1.0.5)
5
- connection_pool (2.2.2)
6
- diff-lcs (1.3)
7
- eldritch (1.1.3)
8
- reentrant_mutex (~> 1.1.0)
9
- rack (2.0.6)
10
- rack-protection (2.0.3)
11
- rack
12
- redis (4.0.1)
13
- reentrant_mutex (1.1.1)
14
- rspec (3.7.0)
15
- rspec-core (~> 3.7.0)
16
- rspec-expectations (~> 3.7.0)
17
- rspec-mocks (~> 3.7.0)
18
- rspec-core (3.7.1)
19
- rspec-support (~> 3.7.0)
20
- rspec-expectations (3.7.0)
21
- diff-lcs (>= 1.2.0, < 2.0)
22
- rspec-support (~> 3.7.0)
23
- rspec-mocks (3.7.0)
24
- diff-lcs (>= 1.2.0, < 2.0)
25
- rspec-support (~> 3.7.0)
26
- rspec-support (3.7.1)
27
- sidekiq (5.1.3)
28
- concurrent-ruby (~> 1.0)
29
- connection_pool (~> 2.2, >= 2.2.0)
30
- rack-protection (>= 1.5.0)
31
- redis (>= 3.3.5, < 5)
32
-
33
- PLATFORMS
34
- ruby
35
-
36
- DEPENDENCIES
37
- eldritch
38
- rspec
39
- sidekiq
40
-
41
- BUNDLED WITH
42
- 1.16.1