sidekiq 0.10.0 → 7.1.6

Sign up to get free protection for your applications and to get access to all the features.
Files changed (234) hide show
  1. checksums.yaml +7 -0
  2. data/Changes.md +2047 -0
  3. data/LICENSE.txt +9 -0
  4. data/README.md +73 -27
  5. data/bin/sidekiq +25 -9
  6. data/bin/sidekiqload +247 -0
  7. data/bin/sidekiqmon +11 -0
  8. data/lib/generators/sidekiq/job_generator.rb +57 -0
  9. data/lib/generators/sidekiq/templates/job.rb.erb +9 -0
  10. data/lib/generators/sidekiq/templates/job_spec.rb.erb +6 -0
  11. data/lib/generators/sidekiq/templates/job_test.rb.erb +8 -0
  12. data/lib/sidekiq/api.rb +1145 -0
  13. data/lib/sidekiq/capsule.rb +127 -0
  14. data/lib/sidekiq/cli.rb +348 -109
  15. data/lib/sidekiq/client.rb +241 -41
  16. data/lib/sidekiq/component.rb +68 -0
  17. data/lib/sidekiq/config.rb +287 -0
  18. data/lib/sidekiq/deploy.rb +62 -0
  19. data/lib/sidekiq/embedded.rb +61 -0
  20. data/lib/sidekiq/fetch.rb +88 -0
  21. data/lib/sidekiq/job.rb +374 -0
  22. data/lib/sidekiq/job_logger.rb +51 -0
  23. data/lib/sidekiq/job_retry.rb +300 -0
  24. data/lib/sidekiq/job_util.rb +107 -0
  25. data/lib/sidekiq/launcher.rb +271 -0
  26. data/lib/sidekiq/logger.rb +131 -0
  27. data/lib/sidekiq/manager.rb +96 -103
  28. data/lib/sidekiq/metrics/query.rb +153 -0
  29. data/lib/sidekiq/metrics/shared.rb +95 -0
  30. data/lib/sidekiq/metrics/tracking.rb +136 -0
  31. data/lib/sidekiq/middleware/chain.rb +149 -38
  32. data/lib/sidekiq/middleware/current_attributes.rb +95 -0
  33. data/lib/sidekiq/middleware/i18n.rb +42 -0
  34. data/lib/sidekiq/middleware/modules.rb +21 -0
  35. data/lib/sidekiq/monitor.rb +146 -0
  36. data/lib/sidekiq/paginator.rb +55 -0
  37. data/lib/sidekiq/processor.rb +246 -61
  38. data/lib/sidekiq/rails.rb +60 -13
  39. data/lib/sidekiq/redis_client_adapter.rb +95 -0
  40. data/lib/sidekiq/redis_connection.rb +68 -15
  41. data/lib/sidekiq/ring_buffer.rb +29 -0
  42. data/lib/sidekiq/scheduled.rb +236 -0
  43. data/lib/sidekiq/sd_notify.rb +149 -0
  44. data/lib/sidekiq/systemd.rb +24 -0
  45. data/lib/sidekiq/testing/inline.rb +30 -0
  46. data/lib/sidekiq/testing.rb +304 -10
  47. data/lib/sidekiq/transaction_aware_client.rb +44 -0
  48. data/lib/sidekiq/version.rb +4 -1
  49. data/lib/sidekiq/web/action.rb +93 -0
  50. data/lib/sidekiq/web/application.rb +447 -0
  51. data/lib/sidekiq/web/csrf_protection.rb +180 -0
  52. data/lib/sidekiq/web/helpers.rb +370 -0
  53. data/lib/sidekiq/web/router.rb +104 -0
  54. data/lib/sidekiq/web.rb +143 -74
  55. data/lib/sidekiq/worker_compatibility_alias.rb +13 -0
  56. data/lib/sidekiq.rb +120 -73
  57. data/sidekiq.gemspec +26 -23
  58. data/web/assets/images/apple-touch-icon.png +0 -0
  59. data/web/assets/images/favicon.ico +0 -0
  60. data/web/assets/images/logo.png +0 -0
  61. data/web/assets/images/status.png +0 -0
  62. data/web/assets/javascripts/application.js +162 -3
  63. data/web/assets/javascripts/base-charts.js +106 -0
  64. data/web/assets/javascripts/chart.min.js +13 -0
  65. data/web/assets/javascripts/chartjs-plugin-annotation.min.js +7 -0
  66. data/web/assets/javascripts/dashboard-charts.js +168 -0
  67. data/web/assets/javascripts/dashboard.js +59 -0
  68. data/web/assets/javascripts/metrics.js +264 -0
  69. data/web/assets/stylesheets/application-dark.css +147 -0
  70. data/web/assets/stylesheets/application-rtl.css +153 -0
  71. data/web/assets/stylesheets/application.css +720 -7
  72. data/web/assets/stylesheets/bootstrap-rtl.min.css +9 -0
  73. data/web/assets/stylesheets/bootstrap.css +5 -0
  74. data/web/locales/ar.yml +87 -0
  75. data/web/locales/cs.yml +78 -0
  76. data/web/locales/da.yml +75 -0
  77. data/web/locales/de.yml +81 -0
  78. data/web/locales/el.yml +87 -0
  79. data/web/locales/en.yml +101 -0
  80. data/web/locales/es.yml +86 -0
  81. data/web/locales/fa.yml +80 -0
  82. data/web/locales/fr.yml +99 -0
  83. data/web/locales/gd.yml +99 -0
  84. data/web/locales/he.yml +80 -0
  85. data/web/locales/hi.yml +75 -0
  86. data/web/locales/it.yml +69 -0
  87. data/web/locales/ja.yml +91 -0
  88. data/web/locales/ko.yml +68 -0
  89. data/web/locales/lt.yml +83 -0
  90. data/web/locales/nb.yml +77 -0
  91. data/web/locales/nl.yml +68 -0
  92. data/web/locales/pl.yml +59 -0
  93. data/web/locales/pt-br.yml +96 -0
  94. data/web/locales/pt.yml +67 -0
  95. data/web/locales/ru.yml +83 -0
  96. data/web/locales/sv.yml +68 -0
  97. data/web/locales/ta.yml +75 -0
  98. data/web/locales/uk.yml +77 -0
  99. data/web/locales/ur.yml +80 -0
  100. data/web/locales/vi.yml +83 -0
  101. data/web/locales/zh-cn.yml +95 -0
  102. data/web/locales/zh-tw.yml +102 -0
  103. data/web/views/_footer.erb +23 -0
  104. data/web/views/_job_info.erb +105 -0
  105. data/web/views/_metrics_period_select.erb +12 -0
  106. data/web/views/_nav.erb +52 -0
  107. data/web/views/_paging.erb +25 -0
  108. data/web/views/_poll_link.erb +4 -0
  109. data/web/views/_status.erb +4 -0
  110. data/web/views/_summary.erb +40 -0
  111. data/web/views/busy.erb +148 -0
  112. data/web/views/dashboard.erb +115 -0
  113. data/web/views/dead.erb +34 -0
  114. data/web/views/filtering.erb +7 -0
  115. data/web/views/layout.erb +42 -0
  116. data/web/views/metrics.erb +82 -0
  117. data/web/views/metrics_for_job.erb +68 -0
  118. data/web/views/morgue.erb +74 -0
  119. data/web/views/queue.erb +55 -0
  120. data/web/views/queues.erb +40 -0
  121. data/web/views/retries.erb +79 -0
  122. data/web/views/retry.erb +34 -0
  123. data/web/views/scheduled.erb +56 -0
  124. data/web/views/scheduled_job_info.erb +8 -0
  125. metadata +159 -237
  126. data/.gitignore +0 -6
  127. data/.rvmrc +0 -4
  128. data/COMM-LICENSE +0 -75
  129. data/Gemfile +0 -10
  130. data/LICENSE +0 -22
  131. data/Rakefile +0 -9
  132. data/TODO.md +0 -1
  133. data/bin/client +0 -7
  134. data/bin/sidekiqctl +0 -43
  135. data/config.ru +0 -8
  136. data/examples/chef/cookbooks/sidekiq/README.rdoc +0 -11
  137. data/examples/chef/cookbooks/sidekiq/recipes/default.rb +0 -55
  138. data/examples/chef/cookbooks/sidekiq/templates/default/monitrc.conf.erb +0 -8
  139. data/examples/chef/cookbooks/sidekiq/templates/default/sidekiq.erb +0 -219
  140. data/examples/chef/cookbooks/sidekiq/templates/default/sidekiq.yml.erb +0 -22
  141. data/examples/config.yml +0 -9
  142. data/examples/monitrc.conf +0 -6
  143. data/examples/por.rb +0 -27
  144. data/examples/scheduling.rb +0 -37
  145. data/examples/sinkiq.rb +0 -57
  146. data/examples/web-ui.png +0 -0
  147. data/lib/sidekiq/capistrano.rb +0 -32
  148. data/lib/sidekiq/extensions/action_mailer.rb +0 -26
  149. data/lib/sidekiq/extensions/active_record.rb +0 -27
  150. data/lib/sidekiq/extensions/generic_proxy.rb +0 -21
  151. data/lib/sidekiq/middleware/client/unique_jobs.rb +0 -32
  152. data/lib/sidekiq/middleware/server/active_record.rb +0 -13
  153. data/lib/sidekiq/middleware/server/exception_handler.rb +0 -38
  154. data/lib/sidekiq/middleware/server/failure_jobs.rb +0 -24
  155. data/lib/sidekiq/middleware/server/logging.rb +0 -27
  156. data/lib/sidekiq/middleware/server/retry_jobs.rb +0 -59
  157. data/lib/sidekiq/middleware/server/unique_jobs.rb +0 -15
  158. data/lib/sidekiq/retry.rb +0 -57
  159. data/lib/sidekiq/util.rb +0 -61
  160. data/lib/sidekiq/worker.rb +0 -37
  161. data/myapp/.gitignore +0 -15
  162. data/myapp/Capfile +0 -5
  163. data/myapp/Gemfile +0 -19
  164. data/myapp/Gemfile.lock +0 -143
  165. data/myapp/Rakefile +0 -7
  166. data/myapp/app/controllers/application_controller.rb +0 -3
  167. data/myapp/app/controllers/work_controller.rb +0 -38
  168. data/myapp/app/helpers/application_helper.rb +0 -2
  169. data/myapp/app/mailers/.gitkeep +0 -0
  170. data/myapp/app/mailers/user_mailer.rb +0 -9
  171. data/myapp/app/models/.gitkeep +0 -0
  172. data/myapp/app/models/post.rb +0 -5
  173. data/myapp/app/views/layouts/application.html.erb +0 -14
  174. data/myapp/app/views/user_mailer/greetings.html.erb +0 -3
  175. data/myapp/app/views/work/index.html.erb +0 -1
  176. data/myapp/app/workers/hard_worker.rb +0 -9
  177. data/myapp/config/application.rb +0 -59
  178. data/myapp/config/boot.rb +0 -6
  179. data/myapp/config/database.yml +0 -25
  180. data/myapp/config/deploy.rb +0 -15
  181. data/myapp/config/environment.rb +0 -5
  182. data/myapp/config/environments/development.rb +0 -38
  183. data/myapp/config/environments/production.rb +0 -67
  184. data/myapp/config/environments/test.rb +0 -37
  185. data/myapp/config/initializers/backtrace_silencers.rb +0 -7
  186. data/myapp/config/initializers/inflections.rb +0 -15
  187. data/myapp/config/initializers/mime_types.rb +0 -5
  188. data/myapp/config/initializers/secret_token.rb +0 -7
  189. data/myapp/config/initializers/session_store.rb +0 -8
  190. data/myapp/config/initializers/sidekiq.rb +0 -6
  191. data/myapp/config/initializers/wrap_parameters.rb +0 -14
  192. data/myapp/config/locales/en.yml +0 -5
  193. data/myapp/config/routes.rb +0 -10
  194. data/myapp/config.ru +0 -4
  195. data/myapp/db/migrate/20120123214055_create_posts.rb +0 -10
  196. data/myapp/db/seeds.rb +0 -7
  197. data/myapp/lib/assets/.gitkeep +0 -0
  198. data/myapp/lib/tasks/.gitkeep +0 -0
  199. data/myapp/log/.gitkeep +0 -0
  200. data/myapp/script/rails +0 -6
  201. data/test/config.yml +0 -9
  202. data/test/fake_env.rb +0 -0
  203. data/test/helper.rb +0 -15
  204. data/test/test_cli.rb +0 -168
  205. data/test/test_client.rb +0 -105
  206. data/test/test_extensions.rb +0 -68
  207. data/test/test_manager.rb +0 -43
  208. data/test/test_middleware.rb +0 -92
  209. data/test/test_processor.rb +0 -32
  210. data/test/test_retry.rb +0 -83
  211. data/test/test_stats.rb +0 -78
  212. data/test/test_testing.rb +0 -65
  213. data/test/test_web.rb +0 -61
  214. data/web/assets/images/bootstrap/glyphicons-halflings-white.png +0 -0
  215. data/web/assets/images/bootstrap/glyphicons-halflings.png +0 -0
  216. data/web/assets/javascripts/vendor/bootstrap/bootstrap-alert.js +0 -91
  217. data/web/assets/javascripts/vendor/bootstrap/bootstrap-button.js +0 -98
  218. data/web/assets/javascripts/vendor/bootstrap/bootstrap-carousel.js +0 -154
  219. data/web/assets/javascripts/vendor/bootstrap/bootstrap-collapse.js +0 -136
  220. data/web/assets/javascripts/vendor/bootstrap/bootstrap-dropdown.js +0 -92
  221. data/web/assets/javascripts/vendor/bootstrap/bootstrap-modal.js +0 -210
  222. data/web/assets/javascripts/vendor/bootstrap/bootstrap-popover.js +0 -95
  223. data/web/assets/javascripts/vendor/bootstrap/bootstrap-scrollspy.js +0 -125
  224. data/web/assets/javascripts/vendor/bootstrap/bootstrap-tab.js +0 -130
  225. data/web/assets/javascripts/vendor/bootstrap/bootstrap-tooltip.js +0 -270
  226. data/web/assets/javascripts/vendor/bootstrap/bootstrap-transition.js +0 -51
  227. data/web/assets/javascripts/vendor/bootstrap/bootstrap-typeahead.js +0 -271
  228. data/web/assets/javascripts/vendor/bootstrap.js +0 -12
  229. data/web/assets/javascripts/vendor/jquery.js +0 -9266
  230. data/web/assets/stylesheets/vendor/bootstrap-responsive.css +0 -567
  231. data/web/assets/stylesheets/vendor/bootstrap.css +0 -3365
  232. data/web/views/index.slim +0 -62
  233. data/web/views/layout.slim +0 -24
  234. data/web/views/queue.slim +0 -11
@@ -0,0 +1,1145 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "sidekiq"
4
+
5
+ require "zlib"
6
+ require "set"
7
+ require "base64"
8
+
9
+ require "sidekiq/metrics/query"
10
+
11
+ #
12
+ # Sidekiq's Data API provides a Ruby object model on top
13
+ # of Sidekiq's runtime data in Redis. This API should never
14
+ # be used within application code for business logic.
15
+ #
16
+ # The Sidekiq server process never uses this API: all data
17
+ # manipulation is done directly for performance reasons to
18
+ # ensure we are using Redis as efficiently as possible at
19
+ # every callsite.
20
+ #
21
+
22
+ module Sidekiq
23
+ # Retrieve runtime statistics from Redis regarding
24
+ # this Sidekiq cluster.
25
+ #
26
+ # stat = Sidekiq::Stats.new
27
+ # stat.processed
28
+ class Stats
29
+ def initialize
30
+ fetch_stats_fast!
31
+ end
32
+
33
+ def processed
34
+ stat :processed
35
+ end
36
+
37
+ def failed
38
+ stat :failed
39
+ end
40
+
41
+ def scheduled_size
42
+ stat :scheduled_size
43
+ end
44
+
45
+ def retry_size
46
+ stat :retry_size
47
+ end
48
+
49
+ def dead_size
50
+ stat :dead_size
51
+ end
52
+
53
+ def enqueued
54
+ stat :enqueued
55
+ end
56
+
57
+ def processes_size
58
+ stat :processes_size
59
+ end
60
+
61
+ def workers_size
62
+ stat :workers_size
63
+ end
64
+
65
+ def default_queue_latency
66
+ stat :default_queue_latency
67
+ end
68
+
69
+ def queues
70
+ Sidekiq.redis do |conn|
71
+ queues = conn.sscan("queues").to_a
72
+
73
+ lengths = conn.pipelined { |pipeline|
74
+ queues.each do |queue|
75
+ pipeline.llen("queue:#{queue}")
76
+ end
77
+ }
78
+
79
+ array_of_arrays = queues.zip(lengths).sort_by { |_, size| -size }
80
+ array_of_arrays.to_h
81
+ end
82
+ end
83
+
84
+ # O(1) redis calls
85
+ # @api private
86
+ def fetch_stats_fast!
87
+ pipe1_res = Sidekiq.redis { |conn|
88
+ conn.pipelined do |pipeline|
89
+ pipeline.get("stat:processed")
90
+ pipeline.get("stat:failed")
91
+ pipeline.zcard("schedule")
92
+ pipeline.zcard("retry")
93
+ pipeline.zcard("dead")
94
+ pipeline.scard("processes")
95
+ pipeline.lindex("queue:default", -1)
96
+ end
97
+ }
98
+
99
+ default_queue_latency = if (entry = pipe1_res[6])
100
+ job = begin
101
+ Sidekiq.load_json(entry)
102
+ rescue
103
+ {}
104
+ end
105
+ now = Time.now.to_f
106
+ thence = job["enqueued_at"] || now
107
+ now - thence
108
+ else
109
+ 0
110
+ end
111
+
112
+ @stats = {
113
+ processed: pipe1_res[0].to_i,
114
+ failed: pipe1_res[1].to_i,
115
+ scheduled_size: pipe1_res[2],
116
+ retry_size: pipe1_res[3],
117
+ dead_size: pipe1_res[4],
118
+ processes_size: pipe1_res[5],
119
+
120
+ default_queue_latency: default_queue_latency
121
+ }
122
+ end
123
+
124
+ # O(number of processes + number of queues) redis calls
125
+ # @api private
126
+ def fetch_stats_slow!
127
+ processes = Sidekiq.redis { |conn|
128
+ conn.sscan("processes").to_a
129
+ }
130
+
131
+ queues = Sidekiq.redis { |conn|
132
+ conn.sscan("queues").to_a
133
+ }
134
+
135
+ pipe2_res = Sidekiq.redis { |conn|
136
+ conn.pipelined do |pipeline|
137
+ processes.each { |key| pipeline.hget(key, "busy") }
138
+ queues.each { |queue| pipeline.llen("queue:#{queue}") }
139
+ end
140
+ }
141
+
142
+ s = processes.size
143
+ workers_size = pipe2_res[0...s].sum(&:to_i)
144
+ enqueued = pipe2_res[s..].sum(&:to_i)
145
+
146
+ @stats[:workers_size] = workers_size
147
+ @stats[:enqueued] = enqueued
148
+ @stats
149
+ end
150
+
151
+ # @api private
152
+ def fetch_stats!
153
+ fetch_stats_fast!
154
+ fetch_stats_slow!
155
+ end
156
+
157
+ # @api private
158
+ def reset(*stats)
159
+ all = %w[failed processed]
160
+ stats = stats.empty? ? all : all & stats.flatten.compact.map(&:to_s)
161
+
162
+ mset_args = []
163
+ stats.each do |stat|
164
+ mset_args << "stat:#{stat}"
165
+ mset_args << 0
166
+ end
167
+ Sidekiq.redis do |conn|
168
+ conn.mset(*mset_args)
169
+ end
170
+ end
171
+
172
+ private
173
+
174
+ def stat(s)
175
+ fetch_stats_slow! if @stats[s].nil?
176
+ @stats[s] || raise(ArgumentError, "Unknown stat #{s}")
177
+ end
178
+
179
+ class History
180
+ def initialize(days_previous, start_date = nil, pool: nil)
181
+ # we only store five years of data in Redis
182
+ raise ArgumentError if days_previous < 1 || days_previous > (5 * 365)
183
+ @days_previous = days_previous
184
+ @start_date = start_date || Time.now.utc.to_date
185
+ end
186
+
187
+ def processed
188
+ @processed ||= date_stat_hash("processed")
189
+ end
190
+
191
+ def failed
192
+ @failed ||= date_stat_hash("failed")
193
+ end
194
+
195
+ private
196
+
197
+ def date_stat_hash(stat)
198
+ stat_hash = {}
199
+ dates = @start_date.downto(@start_date - @days_previous + 1).map { |date|
200
+ date.strftime("%Y-%m-%d")
201
+ }
202
+
203
+ keys = dates.map { |datestr| "stat:#{stat}:#{datestr}" }
204
+
205
+ Sidekiq.redis do |conn|
206
+ conn.mget(keys).each_with_index do |value, idx|
207
+ stat_hash[dates[idx]] = value ? value.to_i : 0
208
+ end
209
+ end
210
+
211
+ stat_hash
212
+ end
213
+ end
214
+ end
215
+
216
+ ##
217
+ # Represents a queue within Sidekiq.
218
+ # Allows enumeration of all jobs within the queue
219
+ # and deletion of jobs. NB: this queue data is real-time
220
+ # and is changing within Redis moment by moment.
221
+ #
222
+ # queue = Sidekiq::Queue.new("mailer")
223
+ # queue.each do |job|
224
+ # job.klass # => 'MyWorker'
225
+ # job.args # => [1, 2, 3]
226
+ # job.delete if job.jid == 'abcdef1234567890'
227
+ # end
228
+ class Queue
229
+ include Enumerable
230
+
231
+ ##
232
+ # Fetch all known queues within Redis.
233
+ #
234
+ # @return [Array<Sidekiq::Queue>]
235
+ def self.all
236
+ Sidekiq.redis { |c| c.sscan("queues").to_a }.sort.map { |q| Sidekiq::Queue.new(q) }
237
+ end
238
+
239
+ attr_reader :name
240
+
241
+ # @param name [String] the name of the queue
242
+ def initialize(name = "default")
243
+ @name = name.to_s
244
+ @rname = "queue:#{name}"
245
+ end
246
+
247
+ # The current size of the queue within Redis.
248
+ # This value is real-time and can change between calls.
249
+ #
250
+ # @return [Integer] the size
251
+ def size
252
+ Sidekiq.redis { |con| con.llen(@rname) }
253
+ end
254
+
255
+ # @return [Boolean] if the queue is currently paused
256
+ def paused?
257
+ false
258
+ end
259
+
260
+ ##
261
+ # Calculates this queue's latency, the difference in seconds since the oldest
262
+ # job in the queue was enqueued.
263
+ #
264
+ # @return [Float] in seconds
265
+ def latency
266
+ entry = Sidekiq.redis { |conn|
267
+ conn.lindex(@rname, -1)
268
+ }
269
+ return 0 unless entry
270
+ job = Sidekiq.load_json(entry)
271
+ now = Time.now.to_f
272
+ thence = job["enqueued_at"] || now
273
+ now - thence
274
+ end
275
+
276
+ def each
277
+ initial_size = size
278
+ deleted_size = 0
279
+ page = 0
280
+ page_size = 50
281
+
282
+ loop do
283
+ range_start = page * page_size - deleted_size
284
+ range_end = range_start + page_size - 1
285
+ entries = Sidekiq.redis { |conn|
286
+ conn.lrange @rname, range_start, range_end
287
+ }
288
+ break if entries.empty?
289
+ page += 1
290
+ entries.each do |entry|
291
+ yield JobRecord.new(entry, @name)
292
+ end
293
+ deleted_size = initial_size - size
294
+ end
295
+ end
296
+
297
+ ##
298
+ # Find the job with the given JID within this queue.
299
+ #
300
+ # This is a *slow, inefficient* operation. Do not use under
301
+ # normal conditions.
302
+ #
303
+ # @param jid [String] the job_id to look for
304
+ # @return [Sidekiq::JobRecord]
305
+ # @return [nil] if not found
306
+ def find_job(jid)
307
+ detect { |j| j.jid == jid }
308
+ end
309
+
310
+ # delete all jobs within this queue
311
+ # @return [Boolean] true
312
+ def clear
313
+ Sidekiq.redis do |conn|
314
+ conn.multi do |transaction|
315
+ transaction.unlink(@rname)
316
+ transaction.srem("queues", [name])
317
+ end
318
+ end
319
+ true
320
+ end
321
+ alias_method :💣, :clear
322
+
323
+ # :nodoc:
324
+ # @api private
325
+ def as_json(options = nil)
326
+ {name: name} # 5336
327
+ end
328
+ end
329
+
330
+ ##
331
+ # Represents a pending job within a Sidekiq queue.
332
+ #
333
+ # The job should be considered immutable but may be
334
+ # removed from the queue via JobRecord#delete.
335
+ class JobRecord
336
+ # the parsed Hash of job data
337
+ # @!attribute [r] Item
338
+ attr_reader :item
339
+ # the underlying String in Redis
340
+ # @!attribute [r] Value
341
+ attr_reader :value
342
+ # the queue associated with this job
343
+ # @!attribute [r] Queue
344
+ attr_reader :queue
345
+
346
+ # :nodoc:
347
+ # @api private
348
+ def initialize(item, queue_name = nil)
349
+ @args = nil
350
+ @value = item
351
+ @item = item.is_a?(Hash) ? item : parse(item)
352
+ @queue = queue_name || @item["queue"]
353
+ end
354
+
355
+ # :nodoc:
356
+ # @api private
357
+ def parse(item)
358
+ Sidekiq.load_json(item)
359
+ rescue JSON::ParserError
360
+ # If the job payload in Redis is invalid JSON, we'll load
361
+ # the item as an empty hash and store the invalid JSON as
362
+ # the job 'args' for display in the Web UI.
363
+ @invalid = true
364
+ @args = [item]
365
+ {}
366
+ end
367
+
368
+ # This is the job class which Sidekiq will execute. If using ActiveJob,
369
+ # this class will be the ActiveJob adapter class rather than a specific job.
370
+ def klass
371
+ self["class"]
372
+ end
373
+
374
+ def display_class
375
+ # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
376
+ @klass ||= self["display_class"] || begin
377
+ if klass == "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
378
+ job_class = @item["wrapped"] || args[0]
379
+ if job_class == "ActionMailer::DeliveryJob" || job_class == "ActionMailer::MailDeliveryJob"
380
+ # MailerClass#mailer_method
381
+ args[0]["arguments"][0..1].join("#")
382
+ else
383
+ job_class
384
+ end
385
+ else
386
+ klass
387
+ end
388
+ end
389
+ end
390
+
391
+ def display_args
392
+ # Unwrap known wrappers so they show up in a human-friendly manner in the Web UI
393
+ @display_args ||= if klass == "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
394
+ job_args = self["wrapped"] ? deserialize_argument(args[0]["arguments"]) : []
395
+ if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
396
+ # remove MailerClass, mailer_method and 'deliver_now'
397
+ job_args.drop(3)
398
+ elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
399
+ # remove MailerClass, mailer_method and 'deliver_now'
400
+ job_args.drop(3).first.values_at("params", "args")
401
+ else
402
+ job_args
403
+ end
404
+ else
405
+ if self["encrypt"]
406
+ # no point in showing 150+ bytes of random garbage
407
+ args[-1] = "[encrypted data]"
408
+ end
409
+ args
410
+ end
411
+ end
412
+
413
+ def args
414
+ @args || @item["args"]
415
+ end
416
+
417
+ def jid
418
+ self["jid"]
419
+ end
420
+
421
+ def bid
422
+ self["bid"]
423
+ end
424
+
425
+ def enqueued_at
426
+ self["enqueued_at"] ? Time.at(self["enqueued_at"]).utc : nil
427
+ end
428
+
429
+ def created_at
430
+ Time.at(self["created_at"] || self["enqueued_at"] || 0).utc
431
+ end
432
+
433
+ def tags
434
+ self["tags"] || []
435
+ end
436
+
437
+ def error_backtrace
438
+ # Cache nil values
439
+ if defined?(@error_backtrace)
440
+ @error_backtrace
441
+ else
442
+ value = self["error_backtrace"]
443
+ @error_backtrace = value && uncompress_backtrace(value)
444
+ end
445
+ end
446
+
447
+ def latency
448
+ now = Time.now.to_f
449
+ now - (@item["enqueued_at"] || @item["created_at"] || now)
450
+ end
451
+
452
+ # Remove this job from the queue
453
+ def delete
454
+ count = Sidekiq.redis { |conn|
455
+ conn.lrem("queue:#{@queue}", 1, @value)
456
+ }
457
+ count != 0
458
+ end
459
+
460
+ # Access arbitrary attributes within the job hash
461
+ def [](name)
462
+ # nil will happen if the JSON fails to parse.
463
+ # We don't guarantee Sidekiq will work with bad job JSON but we should
464
+ # make a best effort to minimize the damage.
465
+ @item ? @item[name] : nil
466
+ end
467
+
468
+ private
469
+
470
+ ACTIVE_JOB_PREFIX = "_aj_"
471
+ GLOBALID_KEY = "_aj_globalid"
472
+
473
+ def deserialize_argument(argument)
474
+ case argument
475
+ when Array
476
+ argument.map { |arg| deserialize_argument(arg) }
477
+ when Hash
478
+ if serialized_global_id?(argument)
479
+ argument[GLOBALID_KEY]
480
+ else
481
+ argument.transform_values { |v| deserialize_argument(v) }
482
+ .reject { |k, _| k.start_with?(ACTIVE_JOB_PREFIX) }
483
+ end
484
+ else
485
+ argument
486
+ end
487
+ end
488
+
489
+ def serialized_global_id?(hash)
490
+ hash.size == 1 && hash.include?(GLOBALID_KEY)
491
+ end
492
+
493
+ def uncompress_backtrace(backtrace)
494
+ decoded = Base64.decode64(backtrace)
495
+ uncompressed = Zlib::Inflate.inflate(decoded)
496
+ Sidekiq.load_json(uncompressed)
497
+ end
498
+ end
499
+
500
+ # Represents a job within a Redis sorted set where the score
501
+ # represents a timestamp associated with the job. This timestamp
502
+ # could be the scheduled time for it to run (e.g. scheduled set),
503
+ # or the expiration date after which the entry should be deleted (e.g. dead set).
504
+ class SortedEntry < JobRecord
505
+ attr_reader :score
506
+ attr_reader :parent
507
+
508
+ # :nodoc:
509
+ # @api private
510
+ def initialize(parent, score, item)
511
+ super(item)
512
+ @score = Float(score)
513
+ @parent = parent
514
+ end
515
+
516
+ # The timestamp associated with this entry
517
+ def at
518
+ Time.at(score).utc
519
+ end
520
+
521
+ # remove this entry from the sorted set
522
+ def delete
523
+ if @value
524
+ @parent.delete_by_value(@parent.name, @value)
525
+ else
526
+ @parent.delete_by_jid(score, jid)
527
+ end
528
+ end
529
+
530
+ # Change the scheduled time for this job.
531
+ #
532
+ # @param at [Time] the new timestamp for this job
533
+ def reschedule(at)
534
+ Sidekiq.redis do |conn|
535
+ conn.zincrby(@parent.name, at.to_f - @score, Sidekiq.dump_json(@item))
536
+ end
537
+ end
538
+
539
+ # Enqueue this job from the scheduled or dead set so it will
540
+ # be executed at some point in the near future.
541
+ def add_to_queue
542
+ remove_job do |message|
543
+ msg = Sidekiq.load_json(message)
544
+ Sidekiq::Client.push(msg)
545
+ end
546
+ end
547
+
548
+ # enqueue this job from the retry set so it will be executed
549
+ # at some point in the near future.
550
+ def retry
551
+ remove_job do |message|
552
+ msg = Sidekiq.load_json(message)
553
+ msg["retry_count"] -= 1 if msg["retry_count"]
554
+ Sidekiq::Client.push(msg)
555
+ end
556
+ end
557
+
558
+ # Move this job from its current set into the Dead set.
559
+ def kill
560
+ remove_job do |message|
561
+ DeadSet.new.kill(message)
562
+ end
563
+ end
564
+
565
+ def error?
566
+ !!item["error_class"]
567
+ end
568
+
569
+ private
570
+
571
+ def remove_job
572
+ Sidekiq.redis do |conn|
573
+ results = conn.multi { |transaction|
574
+ transaction.zrange(parent.name, score, score, "BYSCORE")
575
+ transaction.zremrangebyscore(parent.name, score, score)
576
+ }.first
577
+
578
+ if results.size == 1
579
+ yield results.first
580
+ else
581
+ # multiple jobs with the same score
582
+ # find the one with the right JID and push it
583
+ matched, nonmatched = results.partition { |message|
584
+ if message.index(jid)
585
+ msg = Sidekiq.load_json(message)
586
+ msg["jid"] == jid
587
+ else
588
+ false
589
+ end
590
+ }
591
+
592
+ msg = matched.first
593
+ yield msg if msg
594
+
595
+ # push the rest back onto the sorted set
596
+ conn.multi do |transaction|
597
+ nonmatched.each do |message|
598
+ transaction.zadd(parent.name, score.to_f.to_s, message)
599
+ end
600
+ end
601
+ end
602
+ end
603
+ end
604
+ end
605
+
606
+ # Base class for all sorted sets within Sidekiq.
607
+ class SortedSet
608
+ include Enumerable
609
+
610
+ # Redis key of the set
611
+ # @!attribute [r] Name
612
+ attr_reader :name
613
+
614
+ # :nodoc:
615
+ # @api private
616
+ def initialize(name)
617
+ @name = name
618
+ @_size = size
619
+ end
620
+
621
+ # real-time size of the set, will change
622
+ def size
623
+ Sidekiq.redis { |c| c.zcard(name) }
624
+ end
625
+
626
+ # Scan through each element of the sorted set, yielding each to the supplied block.
627
+ # Please see Redis's <a href="https://redis.io/commands/scan/">SCAN documentation</a> for implementation details.
628
+ #
629
+ # @param match [String] a snippet or regexp to filter matches.
630
+ # @param count [Integer] number of elements to retrieve at a time, default 100
631
+ # @yieldparam [Sidekiq::SortedEntry] each entry
632
+ def scan(match, count = 100)
633
+ return to_enum(:scan, match, count) unless block_given?
634
+
635
+ match = "*#{match}*" unless match.include?("*")
636
+ Sidekiq.redis do |conn|
637
+ conn.zscan(name, match: match, count: count) do |entry, score|
638
+ yield SortedEntry.new(self, score, entry)
639
+ end
640
+ end
641
+ end
642
+
643
+ # @return [Boolean] always true
644
+ def clear
645
+ Sidekiq.redis do |conn|
646
+ conn.unlink(name)
647
+ end
648
+ true
649
+ end
650
+ alias_method :💣, :clear
651
+
652
+ # :nodoc:
653
+ # @api private
654
+ def as_json(options = nil)
655
+ {name: name} # 5336
656
+ end
657
+ end
658
+
659
+ # Base class for all sorted sets which contain jobs, e.g. scheduled, retry and dead.
660
+ # Sidekiq Pro and Enterprise add additional sorted sets which do not contain job data,
661
+ # e.g. Batches.
662
+ class JobSet < SortedSet
663
+ # Add a job with the associated timestamp to this set.
664
+ # @param timestamp [Time] the score for the job
665
+ # @param job [Hash] the job data
666
+ def schedule(timestamp, job)
667
+ Sidekiq.redis do |conn|
668
+ conn.zadd(name, timestamp.to_f.to_s, Sidekiq.dump_json(job))
669
+ end
670
+ end
671
+
672
+ def each
673
+ initial_size = @_size
674
+ offset_size = 0
675
+ page = -1
676
+ page_size = 50
677
+
678
+ loop do
679
+ range_start = page * page_size + offset_size
680
+ range_end = range_start + page_size - 1
681
+ elements = Sidekiq.redis { |conn|
682
+ conn.zrange name, range_start, range_end, withscores: true
683
+ }
684
+ break if elements.empty?
685
+ page -= 1
686
+ elements.reverse_each do |element, score|
687
+ yield SortedEntry.new(self, score, element)
688
+ end
689
+ offset_size = initial_size - @_size
690
+ end
691
+ end
692
+
693
+ ##
694
+ # Fetch jobs that match a given time or Range. Job ID is an
695
+ # optional second argument.
696
+ #
697
+ # @param score [Time,Range] a specific timestamp or range
698
+ # @param jid [String, optional] find a specific JID within the score
699
+ # @return [Array<SortedEntry>] any results found, can be empty
700
+ def fetch(score, jid = nil)
701
+ begin_score, end_score =
702
+ if score.is_a?(Range)
703
+ [score.first, score.last]
704
+ else
705
+ [score, score]
706
+ end
707
+
708
+ elements = Sidekiq.redis { |conn|
709
+ conn.zrange(name, begin_score, end_score, "BYSCORE", withscores: true)
710
+ }
711
+
712
+ elements.each_with_object([]) do |element, result|
713
+ data, job_score = element
714
+ entry = SortedEntry.new(self, job_score, data)
715
+ result << entry if jid.nil? || entry.jid == jid
716
+ end
717
+ end
718
+
719
+ ##
720
+ # Find the job with the given JID within this sorted set.
721
+ # *This is a slow O(n) operation*. Do not use for app logic.
722
+ #
723
+ # @param jid [String] the job identifier
724
+ # @return [SortedEntry] the record or nil
725
+ def find_job(jid)
726
+ Sidekiq.redis do |conn|
727
+ conn.zscan(name, match: "*#{jid}*", count: 100) do |entry, score|
728
+ job = Sidekiq.load_json(entry)
729
+ matched = job["jid"] == jid
730
+ return SortedEntry.new(self, score, entry) if matched
731
+ end
732
+ end
733
+ nil
734
+ end
735
+
736
+ # :nodoc:
737
+ # @api private
738
+ def delete_by_value(name, value)
739
+ Sidekiq.redis do |conn|
740
+ ret = conn.zrem(name, value)
741
+ @_size -= 1 if ret
742
+ ret
743
+ end
744
+ end
745
+
746
+ # :nodoc:
747
+ # @api private
748
+ def delete_by_jid(score, jid)
749
+ Sidekiq.redis do |conn|
750
+ elements = conn.zrange(name, score, score, "BYSCORE")
751
+ elements.each do |element|
752
+ if element.index(jid)
753
+ message = Sidekiq.load_json(element)
754
+ if message["jid"] == jid
755
+ ret = conn.zrem(name, element)
756
+ @_size -= 1 if ret
757
+ break ret
758
+ end
759
+ end
760
+ end
761
+ end
762
+ end
763
+
764
+ alias_method :delete, :delete_by_jid
765
+ end
766
+
767
+ ##
768
+ # The set of scheduled jobs within Sidekiq.
769
+ # Based on this, you can search/filter for jobs. Here's an
770
+ # example where I'm selecting jobs based on some complex logic
771
+ # and deleting them from the scheduled set.
772
+ #
773
+ # See the API wiki page for usage notes and examples.
774
+ #
775
+ class ScheduledSet < JobSet
776
+ def initialize
777
+ super "schedule"
778
+ end
779
+ end
780
+
781
+ ##
782
+ # The set of retries within Sidekiq.
783
+ # Based on this, you can search/filter for jobs. Here's an
784
+ # example where I'm selecting all jobs of a certain type
785
+ # and deleting them from the retry queue.
786
+ #
787
+ # See the API wiki page for usage notes and examples.
788
+ #
789
+ class RetrySet < JobSet
790
+ def initialize
791
+ super "retry"
792
+ end
793
+
794
+ # Enqueues all jobs pending within the retry set.
795
+ def retry_all
796
+ each(&:retry) while size > 0
797
+ end
798
+
799
+ # Kills all jobs pending within the retry set.
800
+ def kill_all
801
+ each(&:kill) while size > 0
802
+ end
803
+ end
804
+
805
+ ##
806
+ # The set of dead jobs within Sidekiq. Dead jobs have failed all of
807
+ # their retries and are helding in this set pending some sort of manual
808
+ # fix. They will be removed after 6 months (dead_timeout) if not.
809
+ #
810
+ class DeadSet < JobSet
811
+ def initialize
812
+ super "dead"
813
+ end
814
+
815
+ # Add the given job to the Dead set.
816
+ # @param message [String] the job data as JSON
817
+ def kill(message, opts = {})
818
+ now = Time.now.to_f
819
+ Sidekiq.redis do |conn|
820
+ conn.multi do |transaction|
821
+ transaction.zadd(name, now.to_s, message)
822
+ transaction.zremrangebyscore(name, "-inf", now - Sidekiq::Config::DEFAULTS[:dead_timeout_in_seconds])
823
+ transaction.zremrangebyrank(name, 0, - Sidekiq::Config::DEFAULTS[:dead_max_jobs])
824
+ end
825
+ end
826
+
827
+ if opts[:notify_failure] != false
828
+ job = Sidekiq.load_json(message)
829
+ r = RuntimeError.new("Job killed by API")
830
+ r.set_backtrace(caller)
831
+ Sidekiq.default_configuration.death_handlers.each do |handle|
832
+ handle.call(job, r)
833
+ end
834
+ end
835
+ true
836
+ end
837
+
838
+ # Enqueue all dead jobs
839
+ def retry_all
840
+ each(&:retry) while size > 0
841
+ end
842
+ end
843
+
844
+ ##
845
+ # Enumerates the set of Sidekiq processes which are actively working
846
+ # right now. Each process sends a heartbeat to Redis every 5 seconds
847
+ # so this set should be relatively accurate, barring network partitions.
848
+ #
849
+ # @yieldparam [Sidekiq::Process]
850
+ #
851
+ class ProcessSet
852
+ include Enumerable
853
+
854
+ def self.[](identity)
855
+ exists, (info, busy, beat, quiet, rss, rtt_us) = Sidekiq.redis { |conn|
856
+ conn.multi { |transaction|
857
+ transaction.sismember("processes", identity)
858
+ transaction.hmget(identity, "info", "busy", "beat", "quiet", "rss", "rtt_us")
859
+ }
860
+ }
861
+
862
+ return nil if exists == 0 || info.nil?
863
+
864
+ hash = Sidekiq.load_json(info)
865
+ Process.new(hash.merge("busy" => busy.to_i,
866
+ "beat" => beat.to_f,
867
+ "quiet" => quiet,
868
+ "rss" => rss.to_i,
869
+ "rtt_us" => rtt_us.to_i))
870
+ end
871
+
872
+ # :nodoc:
873
+ # @api private
874
+ def initialize(clean_plz = true)
875
+ cleanup if clean_plz
876
+ end
877
+
878
+ # Cleans up dead processes recorded in Redis.
879
+ # Returns the number of processes cleaned.
880
+ # :nodoc:
881
+ # @api private
882
+ def cleanup
883
+ # dont run cleanup more than once per minute
884
+ return 0 unless Sidekiq.redis { |conn| conn.set("process_cleanup", "1", nx: true, ex: 60) }
885
+
886
+ count = 0
887
+ Sidekiq.redis do |conn|
888
+ procs = conn.sscan("processes").to_a
889
+ heartbeats = conn.pipelined { |pipeline|
890
+ procs.each do |key|
891
+ pipeline.hget(key, "info")
892
+ end
893
+ }
894
+
895
+ # the hash named key has an expiry of 60 seconds.
896
+ # if it's not found, that means the process has not reported
897
+ # in to Redis and probably died.
898
+ to_prune = procs.select.with_index { |proc, i|
899
+ heartbeats[i].nil?
900
+ }
901
+ count = conn.srem("processes", to_prune) unless to_prune.empty?
902
+ end
903
+ count
904
+ end
905
+
906
+ def each
907
+ result = Sidekiq.redis { |conn|
908
+ procs = conn.sscan("processes").to_a.sort
909
+
910
+ # We're making a tradeoff here between consuming more memory instead of
911
+ # making more roundtrips to Redis, but if you have hundreds or thousands of workers,
912
+ # you'll be happier this way
913
+ conn.pipelined do |pipeline|
914
+ procs.each do |key|
915
+ pipeline.hmget(key, "info", "busy", "beat", "quiet", "rss", "rtt_us")
916
+ end
917
+ end
918
+ }
919
+
920
+ result.each do |info, busy, beat, quiet, rss, rtt_us|
921
+ # If a process is stopped between when we query Redis for `procs` and
922
+ # when we query for `result`, we will have an item in `result` that is
923
+ # composed of `nil` values.
924
+ next if info.nil?
925
+
926
+ hash = Sidekiq.load_json(info)
927
+ yield Process.new(hash.merge("busy" => busy.to_i,
928
+ "beat" => beat.to_f,
929
+ "quiet" => quiet,
930
+ "rss" => rss.to_i,
931
+ "rtt_us" => rtt_us.to_i))
932
+ end
933
+ end
934
+
935
+ # This method is not guaranteed accurate since it does not prune the set
936
+ # based on current heartbeat. #each does that and ensures the set only
937
+ # contains Sidekiq processes which have sent a heartbeat within the last
938
+ # 60 seconds.
939
+ # @return [Integer] current number of registered Sidekiq processes
940
+ def size
941
+ Sidekiq.redis { |conn| conn.scard("processes") }
942
+ end
943
+
944
+ # Total number of threads available to execute jobs.
945
+ # For Sidekiq Enterprise customers this number (in production) must be
946
+ # less than or equal to your licensed concurrency.
947
+ # @return [Integer] the sum of process concurrency
948
+ def total_concurrency
949
+ sum { |x| x["concurrency"].to_i }
950
+ end
951
+
952
+ # @return [Integer] total amount of RSS memory consumed by Sidekiq processes
953
+ def total_rss_in_kb
954
+ sum { |x| x["rss"].to_i }
955
+ end
956
+ alias_method :total_rss, :total_rss_in_kb
957
+
958
+ # Returns the identity of the current cluster leader or "" if no leader.
959
+ # This is a Sidekiq Enterprise feature, will always return "" in Sidekiq
960
+ # or Sidekiq Pro.
961
+ # @return [String] Identity of cluster leader
962
+ # @return [String] empty string if no leader
963
+ def leader
964
+ @leader ||= begin
965
+ x = Sidekiq.redis { |c| c.get("dear-leader") }
966
+ # need a non-falsy value so we can memoize
967
+ x ||= ""
968
+ x
969
+ end
970
+ end
971
+ end
972
+
973
+ #
974
+ # Sidekiq::Process represents an active Sidekiq process talking with Redis.
975
+ # Each process has a set of attributes which look like this:
976
+ #
977
+ # {
978
+ # 'hostname' => 'app-1.example.com',
979
+ # 'started_at' => <process start time>,
980
+ # 'pid' => 12345,
981
+ # 'tag' => 'myapp'
982
+ # 'concurrency' => 25,
983
+ # 'queues' => ['default', 'low'],
984
+ # 'busy' => 10,
985
+ # 'beat' => <last heartbeat>,
986
+ # 'identity' => <unique string identifying the process>,
987
+ # 'embedded' => true,
988
+ # }
989
+ class Process
990
+ # :nodoc:
991
+ # @api private
992
+ def initialize(hash)
993
+ @attribs = hash
994
+ end
995
+
996
+ def tag
997
+ self["tag"]
998
+ end
999
+
1000
+ def labels
1001
+ self["labels"].to_a
1002
+ end
1003
+
1004
+ def [](key)
1005
+ @attribs[key]
1006
+ end
1007
+
1008
+ def identity
1009
+ self["identity"]
1010
+ end
1011
+
1012
+ def queues
1013
+ self["queues"]
1014
+ end
1015
+
1016
+ def weights
1017
+ self["weights"]
1018
+ end
1019
+
1020
+ def version
1021
+ self["version"]
1022
+ end
1023
+
1024
+ def embedded?
1025
+ self["embedded"]
1026
+ end
1027
+
1028
+ # Signal this process to stop processing new jobs.
1029
+ # It will continue to execute jobs it has already fetched.
1030
+ # This method is *asynchronous* and it can take 5-10
1031
+ # seconds for the process to quiet.
1032
+ def quiet!
1033
+ raise "Can't quiet an embedded process" if embedded?
1034
+
1035
+ signal("TSTP")
1036
+ end
1037
+
1038
+ # Signal this process to shutdown.
1039
+ # It will shutdown within its configured :timeout value, default 25 seconds.
1040
+ # This method is *asynchronous* and it can take 5-10
1041
+ # seconds for the process to start shutting down.
1042
+ def stop!
1043
+ raise "Can't stop an embedded process" if embedded?
1044
+
1045
+ signal("TERM")
1046
+ end
1047
+
1048
+ # Signal this process to log backtraces for all threads.
1049
+ # Useful if you have a frozen or deadlocked process which is
1050
+ # still sending a heartbeat.
1051
+ # This method is *asynchronous* and it can take 5-10 seconds.
1052
+ def dump_threads
1053
+ signal("TTIN")
1054
+ end
1055
+
1056
+ # @return [Boolean] true if this process is quiet or shutting down
1057
+ def stopping?
1058
+ self["quiet"] == "true"
1059
+ end
1060
+
1061
+ private
1062
+
1063
+ def signal(sig)
1064
+ key = "#{identity}-signals"
1065
+ Sidekiq.redis do |c|
1066
+ c.multi do |transaction|
1067
+ transaction.lpush(key, sig)
1068
+ transaction.expire(key, 60)
1069
+ end
1070
+ end
1071
+ end
1072
+ end
1073
+
1074
+ ##
1075
+ # The WorkSet stores the work being done by this Sidekiq cluster.
1076
+ # It tracks the process and thread working on each job.
1077
+ #
1078
+ # WARNING WARNING WARNING
1079
+ #
1080
+ # This is live data that can change every millisecond.
1081
+ # If you call #size => 5 and then expect #each to be
1082
+ # called 5 times, you're going to have a bad time.
1083
+ #
1084
+ # works = Sidekiq::WorkSet.new
1085
+ # works.size => 2
1086
+ # works.each do |process_id, thread_id, work|
1087
+ # # process_id is a unique identifier per Sidekiq process
1088
+ # # thread_id is a unique identifier per thread
1089
+ # # work is a Hash which looks like:
1090
+ # # { 'queue' => name, 'run_at' => timestamp, 'payload' => job_hash }
1091
+ # # run_at is an epoch Integer.
1092
+ # end
1093
+ #
1094
+ class WorkSet
1095
+ include Enumerable
1096
+
1097
+ def each(&block)
1098
+ results = []
1099
+ procs = nil
1100
+ all_works = nil
1101
+
1102
+ Sidekiq.redis do |conn|
1103
+ procs = conn.sscan("processes").to_a.sort
1104
+ all_works = conn.pipelined do |pipeline|
1105
+ procs.each do |key|
1106
+ pipeline.hgetall("#{key}:work")
1107
+ end
1108
+ end
1109
+ end
1110
+
1111
+ procs.zip(all_works).each do |key, workers|
1112
+ workers.each_pair do |tid, json|
1113
+ results << [key, tid, Sidekiq.load_json(json)] unless json.empty?
1114
+ end
1115
+ end
1116
+
1117
+ results.sort_by { |(_, _, hsh)| hsh["run_at"] }.each(&block)
1118
+ end
1119
+
1120
+ # Note that #size is only as accurate as Sidekiq's heartbeat,
1121
+ # which happens every 5 seconds. It is NOT real-time.
1122
+ #
1123
+ # Not very efficient if you have lots of Sidekiq
1124
+ # processes but the alternative is a global counter
1125
+ # which can easily get out of sync with crashy processes.
1126
+ def size
1127
+ Sidekiq.redis do |conn|
1128
+ procs = conn.sscan("processes").to_a
1129
+ if procs.empty?
1130
+ 0
1131
+ else
1132
+ conn.pipelined { |pipeline|
1133
+ procs.each do |key|
1134
+ pipeline.hget(key, "busy")
1135
+ end
1136
+ }.sum(&:to_i)
1137
+ end
1138
+ end
1139
+ end
1140
+ end
1141
+ # Since "worker" is a nebulous term, we've deprecated the use of this class name.
1142
+ # Is "worker" a process, a type of job, a thread? Undefined!
1143
+ # WorkSet better describes the data.
1144
+ Workers = WorkSet
1145
+ end