sidekiq 6.3.1 → 7.0.7

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of sidekiq might be problematic. Click here for more details.

Files changed (118) hide show
  1. checksums.yaml +4 -4
  2. data/Changes.md +205 -11
  3. data/LICENSE.txt +9 -0
  4. data/README.md +45 -32
  5. data/bin/sidekiq +4 -9
  6. data/bin/sidekiqload +189 -117
  7. data/bin/sidekiqmon +4 -1
  8. data/lib/generators/sidekiq/job_generator.rb +57 -0
  9. data/lib/generators/sidekiq/templates/{worker.rb.erb → job.rb.erb} +2 -2
  10. data/lib/generators/sidekiq/templates/{worker_spec.rb.erb → job_spec.rb.erb} +1 -1
  11. data/lib/generators/sidekiq/templates/{worker_test.rb.erb → job_test.rb.erb} +1 -1
  12. data/lib/sidekiq/api.rb +308 -188
  13. data/lib/sidekiq/capsule.rb +127 -0
  14. data/lib/sidekiq/cli.rb +85 -80
  15. data/lib/sidekiq/client.rb +74 -81
  16. data/lib/sidekiq/{util.rb → component.rb} +13 -40
  17. data/lib/sidekiq/config.rb +270 -0
  18. data/lib/sidekiq/deploy.rb +62 -0
  19. data/lib/sidekiq/embedded.rb +61 -0
  20. data/lib/sidekiq/fetch.rb +23 -24
  21. data/lib/sidekiq/job.rb +375 -10
  22. data/lib/sidekiq/job_logger.rb +16 -28
  23. data/lib/sidekiq/job_retry.rb +81 -57
  24. data/lib/sidekiq/job_util.rb +105 -0
  25. data/lib/sidekiq/launcher.rb +103 -95
  26. data/lib/sidekiq/logger.rb +9 -44
  27. data/lib/sidekiq/manager.rb +40 -41
  28. data/lib/sidekiq/metrics/query.rb +153 -0
  29. data/lib/sidekiq/metrics/shared.rb +95 -0
  30. data/lib/sidekiq/metrics/tracking.rb +136 -0
  31. data/lib/sidekiq/middleware/chain.rb +96 -51
  32. data/lib/sidekiq/middleware/current_attributes.rb +17 -13
  33. data/lib/sidekiq/middleware/i18n.rb +6 -4
  34. data/lib/sidekiq/middleware/modules.rb +21 -0
  35. data/lib/sidekiq/monitor.rb +17 -4
  36. data/lib/sidekiq/paginator.rb +17 -9
  37. data/lib/sidekiq/processor.rb +60 -60
  38. data/lib/sidekiq/rails.rb +12 -10
  39. data/lib/sidekiq/redis_client_adapter.rb +115 -0
  40. data/lib/sidekiq/redis_connection.rb +13 -82
  41. data/lib/sidekiq/ring_buffer.rb +29 -0
  42. data/lib/sidekiq/scheduled.rb +75 -37
  43. data/lib/sidekiq/testing/inline.rb +4 -4
  44. data/lib/sidekiq/testing.rb +41 -68
  45. data/lib/sidekiq/transaction_aware_client.rb +44 -0
  46. data/lib/sidekiq/version.rb +2 -1
  47. data/lib/sidekiq/web/action.rb +3 -3
  48. data/lib/sidekiq/web/application.rb +45 -11
  49. data/lib/sidekiq/web/csrf_protection.rb +3 -3
  50. data/lib/sidekiq/web/helpers.rb +35 -21
  51. data/lib/sidekiq/web.rb +10 -17
  52. data/lib/sidekiq/worker_compatibility_alias.rb +13 -0
  53. data/lib/sidekiq.rb +85 -202
  54. data/sidekiq.gemspec +20 -10
  55. data/web/assets/javascripts/application.js +76 -26
  56. data/web/assets/javascripts/base-charts.js +106 -0
  57. data/web/assets/javascripts/chart.min.js +13 -0
  58. data/web/assets/javascripts/chartjs-plugin-annotation.min.js +7 -0
  59. data/web/assets/javascripts/dashboard-charts.js +166 -0
  60. data/web/assets/javascripts/dashboard.js +3 -240
  61. data/web/assets/javascripts/metrics.js +264 -0
  62. data/web/assets/stylesheets/application-dark.css +17 -17
  63. data/web/assets/stylesheets/application-rtl.css +2 -91
  64. data/web/assets/stylesheets/application.css +69 -302
  65. data/web/locales/ar.yml +70 -70
  66. data/web/locales/cs.yml +62 -62
  67. data/web/locales/da.yml +60 -53
  68. data/web/locales/de.yml +65 -65
  69. data/web/locales/el.yml +43 -24
  70. data/web/locales/en.yml +82 -69
  71. data/web/locales/es.yml +68 -68
  72. data/web/locales/fa.yml +65 -65
  73. data/web/locales/fr.yml +67 -67
  74. data/web/locales/he.yml +65 -64
  75. data/web/locales/hi.yml +59 -59
  76. data/web/locales/it.yml +53 -53
  77. data/web/locales/ja.yml +73 -68
  78. data/web/locales/ko.yml +52 -52
  79. data/web/locales/lt.yml +66 -66
  80. data/web/locales/nb.yml +61 -61
  81. data/web/locales/nl.yml +52 -52
  82. data/web/locales/pl.yml +45 -45
  83. data/web/locales/pt-br.yml +63 -55
  84. data/web/locales/pt.yml +51 -51
  85. data/web/locales/ru.yml +67 -66
  86. data/web/locales/sv.yml +53 -53
  87. data/web/locales/ta.yml +60 -60
  88. data/web/locales/uk.yml +62 -61
  89. data/web/locales/ur.yml +64 -64
  90. data/web/locales/vi.yml +67 -67
  91. data/web/locales/zh-cn.yml +43 -16
  92. data/web/locales/zh-tw.yml +42 -8
  93. data/web/views/_footer.erb +5 -2
  94. data/web/views/_job_info.erb +18 -2
  95. data/web/views/_metrics_period_select.erb +12 -0
  96. data/web/views/_nav.erb +1 -1
  97. data/web/views/_paging.erb +2 -0
  98. data/web/views/_poll_link.erb +1 -1
  99. data/web/views/_summary.erb +1 -1
  100. data/web/views/busy.erb +42 -26
  101. data/web/views/dashboard.erb +36 -4
  102. data/web/views/metrics.erb +82 -0
  103. data/web/views/metrics_for_job.erb +71 -0
  104. data/web/views/morgue.erb +5 -9
  105. data/web/views/queue.erb +15 -15
  106. data/web/views/queues.erb +3 -1
  107. data/web/views/retries.erb +5 -9
  108. data/web/views/scheduled.erb +12 -13
  109. metadata +68 -32
  110. data/LICENSE +0 -9
  111. data/lib/generators/sidekiq/worker_generator.rb +0 -57
  112. data/lib/sidekiq/delay.rb +0 -41
  113. data/lib/sidekiq/exception_handler.rb +0 -27
  114. data/lib/sidekiq/extensions/action_mailer.rb +0 -48
  115. data/lib/sidekiq/extensions/active_record.rb +0 -43
  116. data/lib/sidekiq/extensions/class_methods.rb +0 -43
  117. data/lib/sidekiq/extensions/generic_proxy.rb +0 -33
  118. data/lib/sidekiq/worker.rb +0 -311
@@ -1,14 +1,15 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require "sidekiq"
4
- require "sidekiq/util"
5
- require "sidekiq/api"
4
+ require "sidekiq/component"
6
5
 
7
6
  module Sidekiq
8
7
  module Scheduled
9
8
  SETS = %w[retry schedule]
10
9
 
11
10
  class Enq
11
+ include Sidekiq::Component
12
+
12
13
  LUA_ZPOPBYSCORE = <<~LUA
13
14
  local key, now = KEYS[1], ARGV[1]
14
15
  local jobs = redis.call("zrangebyscore", key, "-inf", now, "limit", 0, 1)
@@ -18,34 +19,43 @@ module Sidekiq
18
19
  end
19
20
  LUA
20
21
 
21
- def initialize
22
+ def initialize(container)
23
+ @config = container
24
+ @client = Sidekiq::Client.new(config: container)
25
+ @done = false
22
26
  @lua_zpopbyscore_sha = nil
23
27
  end
24
28
 
25
- def enqueue_jobs(now = Time.now.to_f.to_s, sorted_sets = SETS)
29
+ def enqueue_jobs(sorted_sets = SETS)
26
30
  # A job's "score" in Redis is the time at which it should be processed.
27
31
  # Just check Redis for the set of jobs with a timestamp before now.
28
- Sidekiq.redis do |conn|
32
+ redis do |conn|
29
33
  sorted_sets.each do |sorted_set|
30
34
  # Get next item in the queue with score (time to execute) <= now.
31
35
  # We need to go through the list one at a time to reduce the risk of something
32
36
  # going wrong between the time jobs are popped from the scheduled queue and when
33
37
  # they are pushed onto a work queue and losing the jobs.
34
- while (job = zpopbyscore(conn, keys: [sorted_set], argv: [now]))
35
- Sidekiq::Client.push(Sidekiq.load_json(job))
36
- Sidekiq.logger.debug { "enqueued #{sorted_set}: #{job}" }
38
+ while !@done && (job = zpopbyscore(conn, keys: [sorted_set], argv: [Time.now.to_f.to_s]))
39
+ @client.push(Sidekiq.load_json(job))
40
+ logger.debug { "enqueued #{sorted_set}: #{job}" }
37
41
  end
38
42
  end
39
43
  end
40
44
  end
41
45
 
46
+ def terminate
47
+ @done = true
48
+ end
49
+
42
50
  private
43
51
 
44
52
  def zpopbyscore(conn, keys: nil, argv: nil)
45
- @lua_zpopbyscore_sha = conn.script(:load, LUA_ZPOPBYSCORE) if @lua_zpopbyscore_sha.nil?
53
+ if @lua_zpopbyscore_sha.nil?
54
+ @lua_zpopbyscore_sha = conn.script(:load, LUA_ZPOPBYSCORE)
55
+ end
46
56
 
47
- conn.evalsha(@lua_zpopbyscore_sha, keys: keys, argv: argv)
48
- rescue Redis::CommandError => e
57
+ conn.call("EVALSHA", @lua_zpopbyscore_sha, keys.size, *keys, *argv)
58
+ rescue RedisClient::CommandError => e
49
59
  raise unless e.message.start_with?("NOSCRIPT")
50
60
 
51
61
  @lua_zpopbyscore_sha = nil
@@ -59,12 +69,13 @@ module Sidekiq
59
69
  # just pops the job back onto its original queue so the
60
70
  # workers can pick it up like any other job.
61
71
  class Poller
62
- include Util
72
+ include Sidekiq::Component
63
73
 
64
74
  INITIAL_WAIT = 10
65
75
 
66
- def initialize
67
- @enq = (Sidekiq.options[:scheduled_enq] || Sidekiq::Scheduled::Enq).new
76
+ def initialize(config)
77
+ @config = config
78
+ @enq = (config[:scheduled_enq] || Sidekiq::Scheduled::Enq).new(config)
68
79
  @sleeper = ConnectionPool::TimedStack.new
69
80
  @done = false
70
81
  @thread = nil
@@ -74,12 +85,10 @@ module Sidekiq
74
85
  # Shut down this instance, will pause until the thread is dead.
75
86
  def terminate
76
87
  @done = true
77
- if @thread
78
- t = @thread
79
- @thread = nil
80
- @sleeper << 0
81
- t.value
82
- end
88
+ @enq.terminate
89
+
90
+ @sleeper << 0
91
+ @thread&.value
83
92
  end
84
93
 
85
94
  def start
@@ -90,7 +99,7 @@ module Sidekiq
90
99
  enqueue
91
100
  wait
92
101
  end
93
- Sidekiq.logger.info("Scheduler exiting...")
102
+ logger.info("Scheduler exiting...")
94
103
  }
95
104
  end
96
105
 
@@ -137,13 +146,16 @@ module Sidekiq
137
146
  # As we run more processes, the scheduling interval average will approach an even spread
138
147
  # between 0 and poll interval so we don't need this artifical boost.
139
148
  #
140
- if process_count < 10
149
+ count = process_count
150
+ interval = poll_interval_average(count)
151
+
152
+ if count < 10
141
153
  # For small clusters, calculate a random interval that is ±50% the desired average.
142
- poll_interval_average * rand + poll_interval_average.to_f / 2
154
+ interval * rand + interval.to_f / 2
143
155
  else
144
156
  # With 10+ processes, we should have enough randomness to get decent polling
145
157
  # across the entire timespan
146
- poll_interval_average * rand
158
+ interval * rand
147
159
  end
148
160
  end
149
161
 
@@ -160,38 +172,64 @@ module Sidekiq
160
172
  # the same time: the thundering herd problem.
161
173
  #
162
174
  # We only do this if poll_interval_average is unset (the default).
163
- def poll_interval_average
164
- Sidekiq.options[:poll_interval_average] ||= scaled_poll_interval
175
+ def poll_interval_average(count)
176
+ @config[:poll_interval_average] || scaled_poll_interval(count)
165
177
  end
166
178
 
167
179
  # Calculates an average poll interval based on the number of known Sidekiq processes.
168
180
  # This minimizes a single point of failure by dispersing check-ins but without taxing
169
181
  # Redis if you run many Sidekiq processes.
170
- def scaled_poll_interval
171
- process_count * Sidekiq.options[:average_scheduled_poll_interval]
182
+ def scaled_poll_interval(process_count)
183
+ process_count * @config[:average_scheduled_poll_interval]
172
184
  end
173
185
 
174
186
  def process_count
175
- # The work buried within Sidekiq::ProcessSet#cleanup can be
176
- # expensive at scale. Cut it down by 90% with this counter.
177
- # NB: This method is only called by the scheduler thread so we
178
- # don't need to worry about the thread safety of +=.
179
- pcount = Sidekiq::ProcessSet.new(@count_calls % 10 == 0).size
187
+ pcount = Sidekiq.redis { |conn| conn.scard("processes") }
180
188
  pcount = 1 if pcount == 0
181
- @count_calls += 1
182
189
  pcount
183
190
  end
184
191
 
192
+ # A copy of Sidekiq::ProcessSet#cleanup because server
193
+ # should never depend on sidekiq/api.
194
+ def cleanup
195
+ # dont run cleanup more than once per minute
196
+ return 0 unless redis { |conn| conn.set("process_cleanup", "1", nx: true, ex: 60) }
197
+
198
+ count = 0
199
+ redis do |conn|
200
+ procs = conn.sscan("processes").to_a
201
+ heartbeats = conn.pipelined { |pipeline|
202
+ procs.each do |key|
203
+ pipeline.hget(key, "info")
204
+ end
205
+ }
206
+
207
+ # the hash named key has an expiry of 60 seconds.
208
+ # if it's not found, that means the process has not reported
209
+ # in to Redis and probably died.
210
+ to_prune = procs.select.with_index { |proc, i|
211
+ heartbeats[i].nil?
212
+ }
213
+ count = conn.srem("processes", to_prune) unless to_prune.empty?
214
+ end
215
+ count
216
+ end
217
+
185
218
  def initial_wait
186
- # Have all processes sleep between 5-15 seconds. 10 seconds
187
- # to give time for the heartbeat to register (if the poll interval is going to be calculated by the number
219
+ # Have all processes sleep between 5-15 seconds. 10 seconds to give time for
220
+ # the heartbeat to register (if the poll interval is going to be calculated by the number
188
221
  # of workers), and 5 random seconds to ensure they don't all hit Redis at the same time.
189
222
  total = 0
190
- total += INITIAL_WAIT unless Sidekiq.options[:poll_interval_average]
223
+ total += INITIAL_WAIT unless @config[:poll_interval_average]
191
224
  total += (5 * rand)
192
225
 
193
226
  @sleeper.pop(total)
194
227
  rescue Timeout::Error
228
+ ensure
229
+ # periodically clean out the `processes` set in Redis which can collect
230
+ # references to dead processes over time. The process count affects how
231
+ # often we scan for scheduled jobs.
232
+ cleanup
195
233
  end
196
234
  end
197
235
  end
@@ -4,7 +4,7 @@ require "sidekiq/testing"
4
4
 
5
5
  ##
6
6
  # The Sidekiq inline infrastructure overrides perform_async so that it
7
- # actually calls perform instead. This allows workers to be run inline in a
7
+ # actually calls perform instead. This allows jobs to be run inline in a
8
8
  # testing environment.
9
9
  #
10
10
  # This is similar to `Resque.inline = true` functionality.
@@ -15,8 +15,8 @@ require "sidekiq/testing"
15
15
  #
16
16
  # $external_variable = 0
17
17
  #
18
- # class ExternalWorker
19
- # include Sidekiq::Worker
18
+ # class ExternalJob
19
+ # include Sidekiq::Job
20
20
  #
21
21
  # def perform
22
22
  # $external_variable = 1
@@ -24,7 +24,7 @@ require "sidekiq/testing"
24
24
  # end
25
25
  #
26
26
  # assert_equal 0, $external_variable
27
- # ExternalWorker.perform_async
27
+ # ExternalJob.perform_async
28
28
  # assert_equal 1, $external_variable
29
29
  #
30
30
  Sidekiq::Testing.inline!
@@ -51,19 +51,10 @@ module Sidekiq
51
51
  end
52
52
 
53
53
  def server_middleware
54
- @server_chain ||= Middleware::Chain.new
54
+ @server_chain ||= Middleware::Chain.new(Sidekiq.default_configuration)
55
55
  yield @server_chain if block_given?
56
56
  @server_chain
57
57
  end
58
-
59
- def constantize(str)
60
- names = str.split("::")
61
- names.shift if names.empty? || names.first.empty?
62
-
63
- names.inject(Object) do |constant, name|
64
- constant.const_defined?(name) ? constant.const_get(name) : constant.const_missing(name)
65
- end
66
- end
67
58
  end
68
59
  end
69
60
 
@@ -83,7 +74,7 @@ module Sidekiq
83
74
  true
84
75
  elsif Sidekiq::Testing.inline?
85
76
  payloads.each do |job|
86
- klass = Sidekiq::Testing.constantize(job["class"])
77
+ klass = Object.const_get(job["class"])
87
78
  job["id"] ||= SecureRandom.hex(12)
88
79
  job_hash = Sidekiq.load_json(Sidekiq.dump_json(job))
89
80
  klass.process_job(job_hash)
@@ -101,20 +92,20 @@ module Sidekiq
101
92
  ##
102
93
  # The Queues class is only for testing the fake queue implementation.
103
94
  # There are 2 data structures involved in tandem. This is due to the
104
- # Rspec syntax of change(QueueWorker.jobs, :size). It keeps a reference
95
+ # Rspec syntax of change(HardJob.jobs, :size). It keeps a reference
105
96
  # to the array. Because the array was dervied from a filter of the total
106
97
  # jobs enqueued, it appeared as though the array didn't change.
107
98
  #
108
99
  # To solve this, we'll keep 2 hashes containing the jobs. One with keys based
109
- # on the queue, and another with keys of the worker names, so the array for
110
- # QueueWorker.jobs is a straight reference to a real array.
100
+ # on the queue, and another with keys of the job type, so the array for
101
+ # HardJob.jobs is a straight reference to a real array.
111
102
  #
112
103
  # Queue-based hash:
113
104
  #
114
105
  # {
115
106
  # "default"=>[
116
107
  # {
117
- # "class"=>"TestTesting::QueueWorker",
108
+ # "class"=>"TestTesting::HardJob",
118
109
  # "args"=>[1, 2],
119
110
  # "retry"=>true,
120
111
  # "queue"=>"default",
@@ -124,12 +115,12 @@ module Sidekiq
124
115
  # ]
125
116
  # }
126
117
  #
127
- # Worker-based hash:
118
+ # Job-based hash:
128
119
  #
129
120
  # {
130
- # "TestTesting::QueueWorker"=>[
121
+ # "TestTesting::HardJob"=>[
131
122
  # {
132
- # "class"=>"TestTesting::QueueWorker",
123
+ # "class"=>"TestTesting::HardJob",
133
124
  # "args"=>[1, 2],
134
125
  # "retry"=>true,
135
126
  # "queue"=>"default",
@@ -144,14 +135,14 @@ module Sidekiq
144
135
  # require 'sidekiq/testing'
145
136
  #
146
137
  # assert_equal 0, Sidekiq::Queues["default"].size
147
- # HardWorker.perform_async(:something)
138
+ # HardJob.perform_async(:something)
148
139
  # assert_equal 1, Sidekiq::Queues["default"].size
149
140
  # assert_equal :something, Sidekiq::Queues["default"].first['args'][0]
150
141
  #
151
- # You can also clear all workers' jobs:
142
+ # You can also clear all jobs:
152
143
  #
153
144
  # assert_equal 0, Sidekiq::Queues["default"].size
154
- # HardWorker.perform_async(:something)
145
+ # HardJob.perform_async(:something)
155
146
  # Sidekiq::Queues.clear_all
156
147
  # assert_equal 0, Sidekiq::Queues["default"].size
157
148
  #
@@ -170,35 +161,36 @@ module Sidekiq
170
161
 
171
162
  def push(queue, klass, job)
172
163
  jobs_by_queue[queue] << job
173
- jobs_by_worker[klass] << job
164
+ jobs_by_class[klass] << job
174
165
  end
175
166
 
176
167
  def jobs_by_queue
177
168
  @jobs_by_queue ||= Hash.new { |hash, key| hash[key] = [] }
178
169
  end
179
170
 
180
- def jobs_by_worker
181
- @jobs_by_worker ||= Hash.new { |hash, key| hash[key] = [] }
171
+ def jobs_by_class
172
+ @jobs_by_class ||= Hash.new { |hash, key| hash[key] = [] }
182
173
  end
174
+ alias_method :jobs_by_worker, :jobs_by_class
183
175
 
184
176
  def delete_for(jid, queue, klass)
185
177
  jobs_by_queue[queue.to_s].delete_if { |job| job["jid"] == jid }
186
- jobs_by_worker[klass].delete_if { |job| job["jid"] == jid }
178
+ jobs_by_class[klass].delete_if { |job| job["jid"] == jid }
187
179
  end
188
180
 
189
181
  def clear_for(queue, klass)
190
- jobs_by_queue[queue].clear
191
- jobs_by_worker[klass].clear
182
+ jobs_by_queue[queue.to_s].clear
183
+ jobs_by_class[klass].clear
192
184
  end
193
185
 
194
186
  def clear_all
195
187
  jobs_by_queue.clear
196
- jobs_by_worker.clear
188
+ jobs_by_class.clear
197
189
  end
198
190
  end
199
191
  end
200
192
 
201
- module Worker
193
+ module Job
202
194
  ##
203
195
  # The Sidekiq testing infrastructure overrides perform_async
204
196
  # so that it does not actually touch the network. Instead it
@@ -212,43 +204,27 @@ module Sidekiq
212
204
  #
213
205
  # require 'sidekiq/testing'
214
206
  #
215
- # assert_equal 0, HardWorker.jobs.size
216
- # HardWorker.perform_async(:something)
217
- # assert_equal 1, HardWorker.jobs.size
218
- # assert_equal :something, HardWorker.jobs[0]['args'][0]
219
- #
220
- # assert_equal 0, Sidekiq::Extensions::DelayedMailer.jobs.size
221
- # MyMailer.delay.send_welcome_email('foo@example.com')
222
- # assert_equal 1, Sidekiq::Extensions::DelayedMailer.jobs.size
207
+ # assert_equal 0, HardJob.jobs.size
208
+ # HardJob.perform_async(:something)
209
+ # assert_equal 1, HardJob.jobs.size
210
+ # assert_equal :something, HardJob.jobs[0]['args'][0]
223
211
  #
224
- # You can also clear and drain all workers' jobs:
212
+ # You can also clear and drain all job types:
225
213
  #
226
- # assert_equal 0, Sidekiq::Extensions::DelayedMailer.jobs.size
227
- # assert_equal 0, Sidekiq::Extensions::DelayedModel.jobs.size
228
- #
229
- # MyMailer.delay.send_welcome_email('foo@example.com')
230
- # MyModel.delay.do_something_hard
231
- #
232
- # assert_equal 1, Sidekiq::Extensions::DelayedMailer.jobs.size
233
- # assert_equal 1, Sidekiq::Extensions::DelayedModel.jobs.size
234
- #
235
- # Sidekiq::Worker.clear_all # or .drain_all
236
- #
237
- # assert_equal 0, Sidekiq::Extensions::DelayedMailer.jobs.size
238
- # assert_equal 0, Sidekiq::Extensions::DelayedModel.jobs.size
214
+ # Sidekiq::Job.clear_all # or .drain_all
239
215
  #
240
216
  # This can be useful to make sure jobs don't linger between tests:
241
217
  #
242
218
  # RSpec.configure do |config|
243
219
  # config.before(:each) do
244
- # Sidekiq::Worker.clear_all
220
+ # Sidekiq::Job.clear_all
245
221
  # end
246
222
  # end
247
223
  #
248
224
  # or for acceptance testing, i.e. with cucumber:
249
225
  #
250
226
  # AfterStep do
251
- # Sidekiq::Worker.drain_all
227
+ # Sidekiq::Job.drain_all
252
228
  # end
253
229
  #
254
230
  # When I sign up as "foo@example.com"
@@ -262,7 +238,7 @@ module Sidekiq
262
238
 
263
239
  # Jobs queued for this worker
264
240
  def jobs
265
- Queues.jobs_by_worker[to_s]
241
+ Queues.jobs_by_class[to_s]
266
242
  end
267
243
 
268
244
  # Clear all jobs for this worker
@@ -288,11 +264,11 @@ module Sidekiq
288
264
  end
289
265
 
290
266
  def process_job(job)
291
- worker = new
292
- worker.jid = job["jid"]
293
- worker.bid = job["bid"] if worker.respond_to?(:bid=)
294
- Sidekiq::Testing.server_middleware.invoke(worker, job, job["queue"]) do
295
- execute_job(worker, job["args"])
267
+ inst = new
268
+ inst.jid = job["jid"]
269
+ inst.bid = job["bid"] if inst.respond_to?(:bid=)
270
+ Sidekiq::Testing.server_middleware.invoke(inst, job, job["queue"]) do
271
+ execute_job(inst, job["args"])
296
272
  end
297
273
  end
298
274
 
@@ -306,18 +282,18 @@ module Sidekiq
306
282
  Queues.jobs_by_queue.values.flatten
307
283
  end
308
284
 
309
- # Clear all queued jobs across all workers
285
+ # Clear all queued jobs
310
286
  def clear_all
311
287
  Queues.clear_all
312
288
  end
313
289
 
314
- # Drain all queued jobs across all workers
290
+ # Drain (execute) all queued jobs
315
291
  def drain_all
316
292
  while jobs.any?
317
- worker_classes = jobs.map { |job| job["class"] }.uniq
293
+ job_classes = jobs.map { |job| job["class"] }.uniq
318
294
 
319
- worker_classes.each do |worker_class|
320
- Sidekiq::Testing.constantize(worker_class).drain
295
+ job_classes.each do |job_class|
296
+ Object.const_get(job_class).drain
321
297
  end
322
298
  end
323
299
  end
@@ -328,13 +304,10 @@ module Sidekiq
328
304
  def jobs_for(klass)
329
305
  jobs.select do |job|
330
306
  marshalled = job["args"][0]
331
- marshalled.index(klass.to_s) && YAML.load(marshalled)[0] == klass
307
+ marshalled.index(klass.to_s) && YAML.safe_load(marshalled)[0] == klass
332
308
  end
333
309
  end
334
310
  end
335
-
336
- Sidekiq::Extensions::DelayedMailer.extend(TestingExtensions) if defined?(Sidekiq::Extensions::DelayedMailer)
337
- Sidekiq::Extensions::DelayedModel.extend(TestingExtensions) if defined?(Sidekiq::Extensions::DelayedModel)
338
311
  end
339
312
 
340
313
  if defined?(::Rails) && Rails.respond_to?(:env) && !Rails.env.test? && !$TESTING
@@ -0,0 +1,44 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "securerandom"
4
+ require "sidekiq/client"
5
+
6
+ module Sidekiq
7
+ class TransactionAwareClient
8
+ def initialize(pool: nil, config: nil)
9
+ @redis_client = Client.new(pool: pool, config: config)
10
+ end
11
+
12
+ def push(item)
13
+ # pre-allocate the JID so we can return it immediately and
14
+ # save it to the database as part of the transaction.
15
+ item["jid"] ||= SecureRandom.hex(12)
16
+ AfterCommitEverywhere.after_commit { @redis_client.push(item) }
17
+ item["jid"]
18
+ end
19
+
20
+ ##
21
+ # We don't provide transactionality for push_bulk because we don't want
22
+ # to hold potentially hundreds of thousands of job records in memory due to
23
+ # a long running enqueue process.
24
+ def push_bulk(items)
25
+ @redis_client.push_bulk(items)
26
+ end
27
+ end
28
+ end
29
+
30
+ ##
31
+ # Use `Sidekiq.transactional_push!` in your sidekiq.rb initializer
32
+ module Sidekiq
33
+ def self.transactional_push!
34
+ begin
35
+ require "after_commit_everywhere"
36
+ rescue LoadError
37
+ raise %q(You need to add `gem "after_commit_everywhere"` to your Gemfile to use Sidekiq's transactional client)
38
+ end
39
+
40
+ Sidekiq.default_job_options["client_class"] = Sidekiq::TransactionAwareClient
41
+ Sidekiq::JobUtil::TRANSIENT_ATTRIBUTES << "client_class"
42
+ true
43
+ end
44
+ end
@@ -1,5 +1,6 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Sidekiq
4
- VERSION = "6.3.1"
4
+ VERSION = "7.0.7"
5
+ MAJOR = 7
5
6
  end
@@ -15,11 +15,11 @@ module Sidekiq
15
15
  end
16
16
 
17
17
  def halt(res)
18
- throw :halt, [res, {"Content-Type" => "text/plain"}, [res.to_s]]
18
+ throw :halt, [res, {"content-type" => "text/plain"}, [res.to_s]]
19
19
  end
20
20
 
21
21
  def redirect(location)
22
- throw :halt, [302, {"Location" => "#{request.base_url}#{location}"}, []]
22
+ throw :halt, [302, {"location" => "#{request.base_url}#{location}"}, []]
23
23
  end
24
24
 
25
25
  def params
@@ -68,7 +68,7 @@ module Sidekiq
68
68
  end
69
69
 
70
70
  def json(payload)
71
- [200, {"Content-Type" => "application/json", "Cache-Control" => "private, no-store"}, [Sidekiq.dump_json(payload)]]
71
+ [200, {"content-type" => "application/json", "cache-control" => "private, no-store"}, [Sidekiq.dump_json(payload)]]
72
72
  end
73
73
 
74
74
  def initialize(env, block)
@@ -20,6 +20,12 @@ module Sidekiq
20
20
  "worker-src 'self'",
21
21
  "base-uri 'self'"
22
22
  ].join("; ").freeze
23
+ METRICS_PERIODS = {
24
+ "1h" => 60,
25
+ "2h" => 120,
26
+ "4h" => 240,
27
+ "8h" => 480
28
+ }
23
29
 
24
30
  def initialize(klass)
25
31
  @klass = klass
@@ -50,24 +56,52 @@ module Sidekiq
50
56
 
51
57
  get "/" do
52
58
  @redis_info = redis_info.select { |k, v| REDIS_KEYS.include? k }
53
- stats_history = Sidekiq::Stats::History.new((params["days"] || 30).to_i)
59
+ days = (params["days"] || 30).to_i
60
+ return halt(401) if days < 1 || days > 180
61
+
62
+ stats_history = Sidekiq::Stats::History.new(days)
54
63
  @processed_history = stats_history.processed
55
64
  @failed_history = stats_history.failed
56
65
 
57
66
  erb(:dashboard)
58
67
  end
59
68
 
69
+ get "/metrics" do
70
+ q = Sidekiq::Metrics::Query.new
71
+ @period = params[:period]
72
+ @periods = METRICS_PERIODS
73
+ minutes = @periods.fetch(@period, @periods.values.first)
74
+ @query_result = q.top_jobs(minutes: minutes)
75
+ erb(:metrics)
76
+ end
77
+
78
+ get "/metrics/:name" do
79
+ @name = route_params[:name]
80
+ @period = params[:period]
81
+ q = Sidekiq::Metrics::Query.new
82
+ @periods = METRICS_PERIODS
83
+ minutes = @periods.fetch(@period, @periods.values.first)
84
+ @query_result = q.for_job(@name, minutes: minutes)
85
+ erb(:metrics_for_job)
86
+ end
87
+
60
88
  get "/busy" do
89
+ @count = (params["count"] || 100).to_i
90
+ (@current_page, @total_size, @workset) = page_items(workset, params["page"], @count)
91
+
61
92
  erb(:busy)
62
93
  end
63
94
 
64
95
  post "/busy" do
65
96
  if params["identity"]
66
- p = Sidekiq::Process.new("identity" => params["identity"])
67
- p.quiet! if params["quiet"]
68
- p.stop! if params["stop"]
97
+ pro = Sidekiq::ProcessSet[params["identity"]]
98
+
99
+ pro.quiet! if params["quiet"]
100
+ pro.stop! if params["stop"]
69
101
  else
70
102
  processes.each do |pro|
103
+ next if pro.embedded?
104
+
71
105
  pro.quiet! if params["quiet"]
72
106
  pro.stop! if params["stop"]
73
107
  end
@@ -291,15 +325,15 @@ module Sidekiq
291
325
  end
292
326
 
293
327
  get "/stats/queues" do
294
- json Sidekiq::Stats::Queues.new.lengths
328
+ json Sidekiq::Stats.new.queues
295
329
  end
296
330
 
297
331
  def call(env)
298
332
  action = self.class.match(env)
299
- return [404, {"Content-Type" => "text/plain", "X-Cascade" => "pass"}, ["Not Found"]] unless action
333
+ return [404, {"content-type" => "text/plain", "x-cascade" => "pass"}, ["Not Found"]] unless action
300
334
 
301
335
  app = @klass
302
- resp = catch(:halt) do # rubocop:disable Standard/SemanticBlocks
336
+ resp = catch(:halt) do
303
337
  self.class.run_befores(app, action)
304
338
  action.instance_exec env, &action.block
305
339
  ensure
@@ -313,10 +347,10 @@ module Sidekiq
313
347
  else
314
348
  # rendered content goes here
315
349
  headers = {
316
- "Content-Type" => "text/html",
317
- "Cache-Control" => "private, no-store",
318
- "Content-Language" => action.locale,
319
- "Content-Security-Policy" => CSP_HEADER
350
+ "content-type" => "text/html",
351
+ "cache-control" => "private, no-store",
352
+ "content-language" => action.locale,
353
+ "content-security-policy" => CSP_HEADER
320
354
  }
321
355
  # we'll let Rack calculate Content-Length for us.
322
356
  [200, headers, [resp]]