sidekiq 7.1.6 → 7.3.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. checksums.yaml +4 -4
  2. data/Changes.md +184 -0
  3. data/README.md +3 -3
  4. data/bin/multi_queue_bench +271 -0
  5. data/bin/sidekiqload +21 -12
  6. data/lib/active_job/queue_adapters/sidekiq_adapter.rb +75 -0
  7. data/lib/generators/sidekiq/job_generator.rb +2 -0
  8. data/lib/sidekiq/api.rb +139 -44
  9. data/lib/sidekiq/capsule.rb +8 -3
  10. data/lib/sidekiq/cli.rb +4 -1
  11. data/lib/sidekiq/client.rb +21 -1
  12. data/lib/sidekiq/component.rb +22 -0
  13. data/lib/sidekiq/config.rb +31 -7
  14. data/lib/sidekiq/deploy.rb +4 -2
  15. data/lib/sidekiq/embedded.rb +2 -0
  16. data/lib/sidekiq/fetch.rb +1 -1
  17. data/lib/sidekiq/iterable_job.rb +55 -0
  18. data/lib/sidekiq/job/interrupt_handler.rb +24 -0
  19. data/lib/sidekiq/job/iterable/active_record_enumerator.rb +53 -0
  20. data/lib/sidekiq/job/iterable/csv_enumerator.rb +47 -0
  21. data/lib/sidekiq/job/iterable/enumerators.rb +135 -0
  22. data/lib/sidekiq/job/iterable.rb +294 -0
  23. data/lib/sidekiq/job.rb +14 -3
  24. data/lib/sidekiq/job_logger.rb +7 -6
  25. data/lib/sidekiq/job_retry.rb +9 -4
  26. data/lib/sidekiq/job_util.rb +2 -0
  27. data/lib/sidekiq/launcher.rb +7 -5
  28. data/lib/sidekiq/logger.rb +1 -1
  29. data/lib/sidekiq/metrics/query.rb +6 -1
  30. data/lib/sidekiq/metrics/shared.rb +15 -4
  31. data/lib/sidekiq/metrics/tracking.rb +20 -8
  32. data/lib/sidekiq/middleware/current_attributes.rb +46 -13
  33. data/lib/sidekiq/middleware/modules.rb +2 -0
  34. data/lib/sidekiq/monitor.rb +2 -1
  35. data/lib/sidekiq/paginator.rb +8 -2
  36. data/lib/sidekiq/processor.rb +21 -11
  37. data/lib/sidekiq/rails.rb +19 -3
  38. data/lib/sidekiq/redis_client_adapter.rb +24 -5
  39. data/lib/sidekiq/redis_connection.rb +36 -8
  40. data/lib/sidekiq/ring_buffer.rb +2 -0
  41. data/lib/sidekiq/scheduled.rb +2 -2
  42. data/lib/sidekiq/systemd.rb +2 -0
  43. data/lib/sidekiq/testing.rb +14 -8
  44. data/lib/sidekiq/transaction_aware_client.rb +7 -0
  45. data/lib/sidekiq/version.rb +5 -1
  46. data/lib/sidekiq/web/action.rb +26 -4
  47. data/lib/sidekiq/web/application.rb +53 -64
  48. data/lib/sidekiq/web/csrf_protection.rb +8 -5
  49. data/lib/sidekiq/web/helpers.rb +73 -27
  50. data/lib/sidekiq/web/router.rb +5 -2
  51. data/lib/sidekiq/web.rb +54 -2
  52. data/lib/sidekiq.rb +5 -3
  53. data/sidekiq.gemspec +3 -2
  54. data/web/assets/javascripts/application.js +26 -0
  55. data/web/assets/javascripts/dashboard-charts.js +37 -11
  56. data/web/assets/javascripts/dashboard.js +14 -10
  57. data/web/assets/javascripts/metrics.js +34 -0
  58. data/web/assets/stylesheets/application-rtl.css +10 -0
  59. data/web/assets/stylesheets/application.css +38 -3
  60. data/web/locales/en.yml +3 -1
  61. data/web/locales/fr.yml +0 -1
  62. data/web/locales/gd.yml +0 -1
  63. data/web/locales/it.yml +32 -1
  64. data/web/locales/ja.yml +0 -1
  65. data/web/locales/pt-br.yml +1 -2
  66. data/web/locales/tr.yml +100 -0
  67. data/web/locales/uk.yml +24 -1
  68. data/web/locales/zh-cn.yml +0 -1
  69. data/web/locales/zh-tw.yml +0 -1
  70. data/web/views/_footer.erb +12 -1
  71. data/web/views/_metrics_period_select.erb +1 -1
  72. data/web/views/_summary.erb +7 -7
  73. data/web/views/busy.erb +7 -7
  74. data/web/views/dashboard.erb +29 -36
  75. data/web/views/filtering.erb +4 -5
  76. data/web/views/layout.erb +6 -6
  77. data/web/views/metrics.erb +38 -30
  78. data/web/views/metrics_for_job.erb +29 -38
  79. data/web/views/morgue.erb +2 -2
  80. data/web/views/queue.erb +1 -1
  81. data/web/views/queues.erb +6 -2
  82. metadata +33 -13
@@ -0,0 +1,47 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Sidekiq
4
+ module Job
5
+ module Iterable
6
+ # @api private
7
+ class CsvEnumerator
8
+ def initialize(csv)
9
+ unless defined?(CSV) && csv.instance_of?(CSV)
10
+ raise ArgumentError, "CsvEnumerator.new takes CSV object"
11
+ end
12
+
13
+ @csv = csv
14
+ end
15
+
16
+ def rows(cursor:)
17
+ @csv.lazy
18
+ .each_with_index
19
+ .drop(cursor || 0)
20
+ .to_enum { count_of_rows_in_file }
21
+ end
22
+
23
+ def batches(cursor:, batch_size: 100)
24
+ @csv.lazy
25
+ .each_slice(batch_size)
26
+ .with_index
27
+ .drop(cursor || 0)
28
+ .to_enum { (count_of_rows_in_file.to_f / batch_size).ceil }
29
+ end
30
+
31
+ private
32
+
33
+ def count_of_rows_in_file
34
+ filepath = @csv.path
35
+ return unless filepath
36
+
37
+ count = IO.popen(["wc", "-l", filepath]) do |out|
38
+ out.read.strip.to_i
39
+ end
40
+
41
+ count -= 1 if @csv.headers
42
+ count
43
+ end
44
+ end
45
+ end
46
+ end
47
+ end
@@ -0,0 +1,135 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative "active_record_enumerator"
4
+ require_relative "csv_enumerator"
5
+
6
+ module Sidekiq
7
+ module Job
8
+ module Iterable
9
+ module Enumerators
10
+ # Builds Enumerator object from a given array, using +cursor+ as an offset.
11
+ #
12
+ # @param array [Array]
13
+ # @param cursor [Integer] offset to start iteration from
14
+ #
15
+ # @return [Enumerator]
16
+ #
17
+ # @example
18
+ # array_enumerator(['build', 'enumerator', 'from', 'any', 'array'], cursor: cursor)
19
+ #
20
+ def array_enumerator(array, cursor:)
21
+ raise ArgumentError, "array must be an Array" unless array.is_a?(Array)
22
+
23
+ x = array.each_with_index.drop(cursor || 0)
24
+ x.to_enum { x.size }
25
+ end
26
+
27
+ # Builds Enumerator from `ActiveRecord::Relation`.
28
+ # Each Enumerator tick moves the cursor one row forward.
29
+ #
30
+ # @param relation [ActiveRecord::Relation] relation to iterate
31
+ # @param cursor [Object] offset id to start iteration from
32
+ # @param options [Hash] additional options that will be passed to relevant
33
+ # ActiveRecord batching methods
34
+ #
35
+ # @return [ActiveRecordEnumerator]
36
+ #
37
+ # @example
38
+ # def build_enumerator(cursor:)
39
+ # active_record_records_enumerator(User.all, cursor: cursor)
40
+ # end
41
+ #
42
+ # def each_iteration(user)
43
+ # user.notify_about_something
44
+ # end
45
+ #
46
+ def active_record_records_enumerator(relation, cursor:, **options)
47
+ ActiveRecordEnumerator.new(relation, cursor: cursor, **options).records
48
+ end
49
+
50
+ # Builds Enumerator from `ActiveRecord::Relation` and enumerates on batches of records.
51
+ # Each Enumerator tick moves the cursor `:batch_size` rows forward.
52
+ # @see #active_record_records_enumerator
53
+ #
54
+ # @example
55
+ # def build_enumerator(product_id, cursor:)
56
+ # active_record_batches_enumerator(
57
+ # Comment.where(product_id: product_id).select(:id),
58
+ # cursor: cursor,
59
+ # batch_size: 100
60
+ # )
61
+ # end
62
+ #
63
+ # def each_iteration(batch_of_comments, product_id)
64
+ # comment_ids = batch_of_comments.map(&:id)
65
+ # CommentService.call(comment_ids: comment_ids)
66
+ # end
67
+ #
68
+ def active_record_batches_enumerator(relation, cursor:, **options)
69
+ ActiveRecordEnumerator.new(relation, cursor: cursor, **options).batches
70
+ end
71
+
72
+ # Builds Enumerator from `ActiveRecord::Relation` and enumerates on batches,
73
+ # yielding `ActiveRecord::Relation`s.
74
+ # @see #active_record_records_enumerator
75
+ #
76
+ # @example
77
+ # def build_enumerator(product_id, cursor:)
78
+ # active_record_relations_enumerator(
79
+ # Product.find(product_id).comments,
80
+ # cursor: cursor,
81
+ # batch_size: 100,
82
+ # )
83
+ # end
84
+ #
85
+ # def each_iteration(batch_of_comments, product_id)
86
+ # # batch_of_comments will be a Comment::ActiveRecord_Relation
87
+ # batch_of_comments.update_all(deleted: true)
88
+ # end
89
+ #
90
+ def active_record_relations_enumerator(relation, cursor:, **options)
91
+ ActiveRecordEnumerator.new(relation, cursor: cursor, **options).relations
92
+ end
93
+
94
+ # Builds Enumerator from a CSV file.
95
+ #
96
+ # @param csv [CSV] an instance of CSV object
97
+ # @param cursor [Integer] offset to start iteration from
98
+ #
99
+ # @example
100
+ # def build_enumerator(import_id, cursor:)
101
+ # import = Import.find(import_id)
102
+ # csv_enumerator(import.csv, cursor: cursor)
103
+ # end
104
+ #
105
+ # def each_iteration(csv_row)
106
+ # # insert csv_row into database
107
+ # end
108
+ #
109
+ def csv_enumerator(csv, cursor:)
110
+ CsvEnumerator.new(csv).rows(cursor: cursor)
111
+ end
112
+
113
+ # Builds Enumerator from a CSV file and enumerates on batches of records.
114
+ #
115
+ # @param csv [CSV] an instance of CSV object
116
+ # @param cursor [Integer] offset to start iteration from
117
+ # @option options :batch_size [Integer] (100) size of the batch
118
+ #
119
+ # @example
120
+ # def build_enumerator(import_id, cursor:)
121
+ # import = Import.find(import_id)
122
+ # csv_batches_enumerator(import.csv, cursor: cursor)
123
+ # end
124
+ #
125
+ # def each_iteration(batch_of_csv_rows)
126
+ # # ...
127
+ # end
128
+ #
129
+ def csv_batches_enumerator(csv, cursor:, **options)
130
+ CsvEnumerator.new(csv).batches(cursor: cursor, **options)
131
+ end
132
+ end
133
+ end
134
+ end
135
+ end
@@ -0,0 +1,294 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative "iterable/enumerators"
4
+
5
+ module Sidekiq
6
+ module Job
7
+ class Interrupted < ::RuntimeError; end
8
+
9
+ module Iterable
10
+ include Enumerators
11
+
12
+ # @api private
13
+ def self.included(base)
14
+ base.extend(ClassMethods)
15
+ end
16
+
17
+ # @api private
18
+ module ClassMethods
19
+ def method_added(method_name)
20
+ raise "#{self} is an iterable job and must not define #perform" if method_name == :perform
21
+ super
22
+ end
23
+ end
24
+
25
+ # @api private
26
+ def initialize
27
+ super
28
+
29
+ @_executions = 0
30
+ @_cursor = nil
31
+ @_start_time = nil
32
+ @_runtime = 0
33
+ @_args = nil
34
+ @_cancelled = nil
35
+ end
36
+
37
+ def arguments
38
+ @_args
39
+ end
40
+
41
+ # Three days is the longest period you generally need to wait for a retry to
42
+ # execute when using the default retry scheme. We don't want to "forget" the job
43
+ # is cancelled before it has a chance to execute and cancel itself.
44
+ CANCELLATION_PERIOD = (3 * 86_400).to_s
45
+
46
+ # Set a flag in Redis to mark this job as cancelled.
47
+ # Cancellation is asynchronous and is checked at the start of iteration
48
+ # and every 5 seconds thereafter as part of the recurring state flush.
49
+ def cancel!
50
+ return @_cancelled if cancelled?
51
+
52
+ key = "it-#{jid}"
53
+ _, result, _ = Sidekiq.redis do |c|
54
+ c.pipelined do |p|
55
+ p.hsetnx(key, "cancelled", Time.now.to_i)
56
+ p.hget(key, "cancelled")
57
+ # TODO When Redis 7.2 is required
58
+ # p.expire(key, Sidekiq::Job::Iterable::STATE_TTL, "nx")
59
+ p.expire(key, Sidekiq::Job::Iterable::STATE_TTL)
60
+ end
61
+ end
62
+ @_cancelled = result.to_i
63
+ end
64
+
65
+ def cancelled?
66
+ @_cancelled
67
+ end
68
+
69
+ # A hook to override that will be called when the job starts iterating.
70
+ #
71
+ # It is called only once, for the first time.
72
+ #
73
+ def on_start
74
+ end
75
+
76
+ # A hook to override that will be called around each iteration.
77
+ #
78
+ # Can be useful for some metrics collection, performance tracking etc.
79
+ #
80
+ def around_iteration
81
+ yield
82
+ end
83
+
84
+ # A hook to override that will be called when the job resumes iterating.
85
+ #
86
+ def on_resume
87
+ end
88
+
89
+ # A hook to override that will be called each time the job is interrupted.
90
+ #
91
+ # This can be due to interruption or sidekiq stopping.
92
+ #
93
+ def on_stop
94
+ end
95
+
96
+ # A hook to override that will be called when the job finished iterating.
97
+ #
98
+ def on_complete
99
+ end
100
+
101
+ # The enumerator to be iterated over.
102
+ #
103
+ # @return [Enumerator]
104
+ #
105
+ # @raise [NotImplementedError] with a message advising subclasses to
106
+ # implement an override for this method.
107
+ #
108
+ def build_enumerator(*)
109
+ raise NotImplementedError, "#{self.class.name} must implement a '#build_enumerator' method"
110
+ end
111
+
112
+ # The action to be performed on each item from the enumerator.
113
+ #
114
+ # @return [void]
115
+ #
116
+ # @raise [NotImplementedError] with a message advising subclasses to
117
+ # implement an override for this method.
118
+ #
119
+ def each_iteration(*)
120
+ raise NotImplementedError, "#{self.class.name} must implement an '#each_iteration' method"
121
+ end
122
+
123
+ def iteration_key
124
+ "it-#{jid}"
125
+ end
126
+
127
+ # @api private
128
+ def perform(*args)
129
+ @_args = args.dup.freeze
130
+ fetch_previous_iteration_state
131
+
132
+ @_executions += 1
133
+ @_start_time = ::Process.clock_gettime(::Process::CLOCK_MONOTONIC)
134
+
135
+ enumerator = build_enumerator(*args, cursor: @_cursor)
136
+ unless enumerator
137
+ logger.info("'#build_enumerator' returned nil, skipping the job.")
138
+ return
139
+ end
140
+
141
+ assert_enumerator!(enumerator)
142
+
143
+ if @_executions == 1
144
+ on_start
145
+ else
146
+ on_resume
147
+ end
148
+
149
+ completed = catch(:abort) do
150
+ iterate_with_enumerator(enumerator, args)
151
+ end
152
+
153
+ on_stop
154
+ completed = handle_completed(completed)
155
+
156
+ if completed
157
+ on_complete
158
+ cleanup
159
+ else
160
+ reenqueue_iteration_job
161
+ end
162
+ end
163
+
164
+ private
165
+
166
+ def is_cancelled?
167
+ @_cancelled = Sidekiq.redis { |c| c.hget("it-#{jid}", "cancelled") }
168
+ end
169
+
170
+ def fetch_previous_iteration_state
171
+ state = Sidekiq.redis { |conn| conn.hgetall(iteration_key) }
172
+
173
+ unless state.empty?
174
+ @_executions = state["ex"].to_i
175
+ @_cursor = Sidekiq.load_json(state["c"])
176
+ @_runtime = state["rt"].to_f
177
+ end
178
+ end
179
+
180
+ STATE_FLUSH_INTERVAL = 5 # seconds
181
+ # we need to keep the state around as long as the job
182
+ # might be retrying
183
+ STATE_TTL = 30 * 24 * 60 * 60 # one month
184
+
185
+ def iterate_with_enumerator(enumerator, arguments)
186
+ if is_cancelled?
187
+ logger.info { "Job cancelled" }
188
+ return true
189
+ end
190
+
191
+ time_limit = Sidekiq.default_configuration[:timeout]
192
+ found_record = false
193
+ state_flushed_at = ::Process.clock_gettime(::Process::CLOCK_MONOTONIC)
194
+
195
+ enumerator.each do |object, cursor|
196
+ found_record = true
197
+ @_cursor = cursor
198
+
199
+ is_interrupted = interrupted?
200
+ if ::Process.clock_gettime(::Process::CLOCK_MONOTONIC) - state_flushed_at >= STATE_FLUSH_INTERVAL || is_interrupted
201
+ _, _, cancelled = flush_state
202
+ state_flushed_at = ::Process.clock_gettime(::Process::CLOCK_MONOTONIC)
203
+ if cancelled
204
+ @_cancelled = true
205
+ logger.info { "Job cancelled" }
206
+ return true
207
+ end
208
+ end
209
+
210
+ return false if is_interrupted
211
+
212
+ verify_iteration_time(time_limit, object) do
213
+ around_iteration do
214
+ each_iteration(object, *arguments)
215
+ end
216
+ end
217
+ end
218
+
219
+ logger.debug("Enumerator found nothing to iterate!") unless found_record
220
+ true
221
+ ensure
222
+ @_runtime += (::Process.clock_gettime(::Process::CLOCK_MONOTONIC) - @_start_time)
223
+ end
224
+
225
+ def verify_iteration_time(time_limit, object)
226
+ start = ::Process.clock_gettime(::Process::CLOCK_MONOTONIC)
227
+ yield
228
+ finish = ::Process.clock_gettime(::Process::CLOCK_MONOTONIC)
229
+ total = finish - start
230
+ if total > time_limit
231
+ logger.warn { "Iteration took longer (%.2f) than Sidekiq's shutdown timeout (%d) when processing `%s`. This can lead to job processing problems during deploys" % [total, time_limit, object] }
232
+ end
233
+ end
234
+
235
+ def reenqueue_iteration_job
236
+ flush_state
237
+ logger.debug { "Interrupting job (cursor=#{@_cursor.inspect})" }
238
+
239
+ raise Interrupted
240
+ end
241
+
242
+ def assert_enumerator!(enum)
243
+ unless enum.is_a?(Enumerator)
244
+ raise ArgumentError, <<~MSG
245
+ #build_enumerator must return an Enumerator, but returned #{enum.class}.
246
+ Example:
247
+ def build_enumerator(params, cursor:)
248
+ active_record_records_enumerator(
249
+ Shop.find(params["shop_id"]).products,
250
+ cursor: cursor
251
+ )
252
+ end
253
+ MSG
254
+ end
255
+ end
256
+
257
+ def flush_state
258
+ key = iteration_key
259
+ state = {
260
+ "ex" => @_executions,
261
+ "c" => Sidekiq.dump_json(@_cursor),
262
+ "rt" => @_runtime
263
+ }
264
+
265
+ Sidekiq.redis do |conn|
266
+ conn.multi do |pipe|
267
+ pipe.hset(key, state)
268
+ pipe.expire(key, STATE_TTL)
269
+ pipe.hget(key, "cancelled")
270
+ end
271
+ end
272
+ end
273
+
274
+ def cleanup
275
+ logger.debug {
276
+ format("Completed iteration. executions=%d runtime=%.3f", @_executions, @_runtime)
277
+ }
278
+ Sidekiq.redis { |conn| conn.unlink(iteration_key) }
279
+ end
280
+
281
+ def handle_completed(completed)
282
+ case completed
283
+ when nil, # someone aborted the job but wants to call the on_complete callback
284
+ true
285
+ true
286
+ when false
287
+ false
288
+ else
289
+ raise "Unexpected thrown value: #{completed.inspect}"
290
+ end
291
+ end
292
+ end
293
+ end
294
+ end
data/lib/sidekiq/job.rb CHANGED
@@ -69,7 +69,11 @@ module Sidekiq
69
69
  # In practice, any option is allowed. This is the main mechanism to configure the
70
70
  # options for a specific job.
71
71
  def sidekiq_options(opts = {})
72
- opts = opts.transform_keys(&:to_s) # stringify
72
+ # stringify 2 levels of keys
73
+ opts = opts.to_h do |k, v|
74
+ [k.to_s, (Hash === v) ? v.transform_keys(&:to_s) : v]
75
+ end
76
+
73
77
  self.sidekiq_options_hash = get_sidekiq_options.merge(opts)
74
78
  end
75
79
 
@@ -109,7 +113,7 @@ module Sidekiq
109
113
  m = "#{name}="
110
114
  undef_method(m) if method_defined?(m) || private_method_defined?(m)
111
115
  end
112
- define_singleton_method("#{name}=") do |val|
116
+ define_singleton_method(:"#{name}=") do |val|
113
117
  singleton_class.class_eval do
114
118
  ACCESSOR_MUTEX.synchronize do
115
119
  undef_method(synchronized_getter) if method_defined?(synchronized_getter) || private_method_defined?(synchronized_getter)
@@ -155,6 +159,9 @@ module Sidekiq
155
159
 
156
160
  attr_accessor :jid
157
161
 
162
+ # This attribute is implementation-specific and not a public API
163
+ attr_accessor :_context
164
+
158
165
  def self.included(base)
159
166
  raise ArgumentError, "Sidekiq::Job cannot be included in an ActiveJob: #{base.name}" if base.ancestors.any? { |c| c.name == "ActiveJob::Base" }
160
167
 
@@ -166,6 +173,10 @@ module Sidekiq
166
173
  Sidekiq.logger
167
174
  end
168
175
 
176
+ def interrupted?
177
+ @_context&.stopping?
178
+ end
179
+
169
180
  # This helper class encapsulates the set options for `set`, e.g.
170
181
  #
171
182
  # SomeJob.set(queue: 'foo').perform_async(....)
@@ -366,7 +377,7 @@ module Sidekiq
366
377
 
367
378
  def build_client # :nodoc:
368
379
  pool = Thread.current[:sidekiq_redis_pool] || get_sidekiq_options["pool"] || Sidekiq.default_configuration.redis_pool
369
- client_class = get_sidekiq_options["client_class"] || Sidekiq::Client
380
+ client_class = Thread.current[:sidekiq_client_class] || get_sidekiq_options["client_class"] || Sidekiq::Client
370
381
  client_class.new(pool: pool)
371
382
  end
372
383
  end
@@ -2,22 +2,23 @@
2
2
 
3
3
  module Sidekiq
4
4
  class JobLogger
5
- def initialize(logger)
6
- @logger = logger
5
+ def initialize(config)
6
+ @config = config
7
+ @logger = @config.logger
8
+ @skip = !!@config[:skip_default_job_logging]
7
9
  end
8
10
 
9
11
  def call(item, queue)
10
12
  start = ::Process.clock_gettime(::Process::CLOCK_MONOTONIC)
11
- @logger.info("start")
13
+ @logger.info { "start" } unless @skip
12
14
 
13
15
  yield
14
16
 
15
17
  Sidekiq::Context.add(:elapsed, elapsed(start))
16
- @logger.info("done")
18
+ @logger.info { "done" } unless @skip
17
19
  rescue Exception
18
20
  Sidekiq::Context.add(:elapsed, elapsed(start))
19
- @logger.info("fail")
20
-
21
+ @logger.info { "fail" } unless @skip
21
22
  raise
22
23
  end
23
24
 
@@ -1,7 +1,6 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require "zlib"
4
- require "base64"
5
4
  require "sidekiq/component"
6
5
 
7
6
  module Sidekiq
@@ -60,8 +59,13 @@ module Sidekiq
60
59
  # end
61
60
  #
62
61
  class JobRetry
62
+ # Handled means the job failed but has been dealt with
63
+ # (by creating a retry, rescheduling it, etc). It still
64
+ # needs to be logged and dispatched to error_handlers.
63
65
  class Handled < ::RuntimeError; end
64
66
 
67
+ # Skip means the job failed but Sidekiq does not need to
68
+ # create a retry, log it or send to error_handlers.
65
69
  class Skip < Handled; end
66
70
 
67
71
  include Sidekiq::Component
@@ -130,7 +134,7 @@ module Sidekiq
130
134
  process_retry(jobinst, msg, queue, e)
131
135
  # We've handled this error associated with this job, don't
132
136
  # need to handle it at the global level
133
- raise Skip
137
+ raise Handled
134
138
  end
135
139
 
136
140
  private
@@ -226,7 +230,7 @@ module Sidekiq
226
230
  end
227
231
 
228
232
  def retries_exhausted(jobinst, msg, exception)
229
- begin
233
+ rv = begin
230
234
  block = jobinst&.sidekiq_retries_exhausted_block
231
235
 
232
236
  # the sidekiq_retries_exhausted_block can be defined in a wrapped class (ActiveJob for instance)
@@ -239,6 +243,7 @@ module Sidekiq
239
243
  handle_exception(e, {context: "Error calling retries_exhausted", job: msg})
240
244
  end
241
245
 
246
+ return if rv == :discard # poof!
242
247
  send_to_morgue(msg) unless msg["dead"] == false
243
248
 
244
249
  @capsule.config.death_handlers.each do |handler|
@@ -294,7 +299,7 @@ module Sidekiq
294
299
  def compress_backtrace(backtrace)
295
300
  serialized = Sidekiq.dump_json(backtrace)
296
301
  compressed = Zlib::Deflate.deflate(serialized)
297
- Base64.encode64(compressed)
302
+ [compressed].pack("m0") # Base64.strict_encode64
298
303
  end
299
304
  end
300
305
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "securerandom"
2
4
  require "time"
3
5
 
@@ -36,8 +36,8 @@ module Sidekiq
36
36
  # has a heartbeat thread, caller can use `async_beat: false`
37
37
  # and instead have thread call Launcher#heartbeat every N seconds.
38
38
  def run(async_beat: true)
39
- Sidekiq.freeze!
40
39
  logger.debug { @config.merge!({}) }
40
+ Sidekiq.freeze!
41
41
  @thread = safe_thread("heartbeat", &method(:start_heartbeat)) if async_beat
42
42
  @poller.start
43
43
  @managers.each(&:start)
@@ -145,15 +145,17 @@ module Sidekiq
145
145
  flush_stats
146
146
 
147
147
  curstate = Processor::WORK_STATE.dup
148
+ curstate.transform_values! { |val| Sidekiq.dump_json(val) }
149
+
148
150
  redis do |conn|
149
151
  # work is the current set of executing jobs
150
152
  work_key = "#{key}:work"
151
- conn.pipelined do |transaction|
153
+ conn.multi do |transaction|
152
154
  transaction.unlink(work_key)
153
- curstate.each_pair do |tid, hash|
154
- transaction.hset(work_key, tid, Sidekiq.dump_json(hash))
155
+ if curstate.size > 0
156
+ transaction.hset(work_key, curstate)
157
+ transaction.expire(work_key, 60)
155
158
  end
156
- transaction.expire(work_key, 60)
157
159
  end
158
160
  end
159
161
 
@@ -36,7 +36,7 @@ module Sidekiq
36
36
  end
37
37
 
38
38
  LEVELS.each do |level, numeric_level|
39
- define_method("#{level}?") do
39
+ define_method(:"#{level}?") do
40
40
  local_level.nil? ? super() : local_level <= numeric_level
41
41
  end
42
42
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "sidekiq"
2
4
  require "date"
3
5
  require "set"
@@ -20,7 +22,8 @@ module Sidekiq
20
22
  end
21
23
 
22
24
  # Get metric data for all jobs from the last hour
23
- def top_jobs(minutes: 60)
25
+ # +class_filter+: return only results for classes matching filter
26
+ def top_jobs(class_filter: nil, minutes: 60)
24
27
  result = Result.new
25
28
 
26
29
  time = @time
@@ -39,6 +42,7 @@ module Sidekiq
39
42
  redis_results.each do |hash|
40
43
  hash.each do |k, v|
41
44
  kls, metric = k.split("|")
45
+ next if class_filter && !class_filter.match?(kls)
42
46
  result.job_results[kls].add_metric metric, time, v.to_i
43
47
  end
44
48
  time -= 60
@@ -117,6 +121,7 @@ module Sidekiq
117
121
 
118
122
  def total_avg(metric = "ms")
119
123
  completed = totals["p"] - totals["f"]
124
+ return 0 if completed.zero?
120
125
  totals[metric].to_f / completed
121
126
  end
122
127