sidekiq-unique-jobs 7.1.20 → 7.1.30

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of sidekiq-unique-jobs might be problematic. Click here for more details.

Files changed (48) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +135 -1
  3. data/README.md +4 -2
  4. data/lib/sidekiq_unique_jobs/batch_delete.rb +2 -1
  5. data/lib/sidekiq_unique_jobs/cli.rb +33 -8
  6. data/lib/sidekiq_unique_jobs/config.rb +5 -0
  7. data/lib/sidekiq_unique_jobs/constants.rb +1 -0
  8. data/lib/sidekiq_unique_jobs/core_ext.rb +1 -1
  9. data/lib/sidekiq_unique_jobs/digests.rb +2 -2
  10. data/lib/sidekiq_unique_jobs/exceptions.rb +3 -3
  11. data/lib/sidekiq_unique_jobs/expiring_digests.rb +14 -0
  12. data/lib/sidekiq_unique_jobs/job.rb +5 -0
  13. data/lib/sidekiq_unique_jobs/key.rb +13 -8
  14. data/lib/sidekiq_unique_jobs/lock/until_executed.rb +1 -0
  15. data/lib/sidekiq_unique_jobs/lock/until_executing.rb +4 -0
  16. data/lib/sidekiq_unique_jobs/lock/while_executing.rb +1 -2
  17. data/lib/sidekiq_unique_jobs/lock.rb +18 -1
  18. data/lib/sidekiq_unique_jobs/lock_args.rb +18 -14
  19. data/lib/sidekiq_unique_jobs/lock_config.rb +4 -4
  20. data/lib/sidekiq_unique_jobs/lock_digest.rb +7 -7
  21. data/lib/sidekiq_unique_jobs/lock_timeout.rb +4 -4
  22. data/lib/sidekiq_unique_jobs/lock_ttl.rb +4 -4
  23. data/lib/sidekiq_unique_jobs/lock_type.rb +37 -0
  24. data/lib/sidekiq_unique_jobs/locksmith.rb +40 -10
  25. data/lib/sidekiq_unique_jobs/logging.rb +14 -0
  26. data/lib/sidekiq_unique_jobs/lua/lock.lua +15 -9
  27. data/lib/sidekiq_unique_jobs/lua/lock_until_expired.lua +92 -0
  28. data/lib/sidekiq_unique_jobs/lua/reap_orphans.lua +31 -3
  29. data/lib/sidekiq_unique_jobs/lua/shared/_find_digest_in_process_set.lua +1 -1
  30. data/lib/sidekiq_unique_jobs/lua/unlock.lua +5 -0
  31. data/lib/sidekiq_unique_jobs/middleware/client.rb +2 -0
  32. data/lib/sidekiq_unique_jobs/middleware/server.rb +2 -0
  33. data/lib/sidekiq_unique_jobs/middleware.rb +4 -4
  34. data/lib/sidekiq_unique_jobs/on_conflict/reschedule.rb +3 -3
  35. data/lib/sidekiq_unique_jobs/options_with_fallback.rb +4 -4
  36. data/lib/sidekiq_unique_jobs/orphans/lua_reaper.rb +1 -1
  37. data/lib/sidekiq_unique_jobs/orphans/manager.rb +2 -2
  38. data/lib/sidekiq_unique_jobs/orphans/ruby_reaper.rb +30 -8
  39. data/lib/sidekiq_unique_jobs/script/caller.rb +7 -7
  40. data/lib/sidekiq_unique_jobs/sidekiq_unique_jobs.rb +13 -3
  41. data/lib/sidekiq_unique_jobs/sidekiq_worker_methods.rb +33 -24
  42. data/lib/sidekiq_unique_jobs/testing.rb +31 -13
  43. data/lib/sidekiq_unique_jobs/version.rb +1 -1
  44. data/lib/sidekiq_unique_jobs/web/helpers.rb +10 -0
  45. data/lib/sidekiq_unique_jobs/web/views/lock.erb +5 -3
  46. data/lib/sidekiq_unique_jobs/web.rb +22 -3
  47. data/lib/sidekiq_unique_jobs.rb +2 -0
  48. metadata +22 -5
@@ -33,8 +33,8 @@ module SidekiqUniqueJobs
33
33
  # @option item [String] :class the class of the sidekiq worker
34
34
  # @option item [Float] :at the unix time the job is scheduled at
35
35
  def initialize(item)
36
- @item = item
37
- @worker_class = item[CLASS]
36
+ @item = item
37
+ self.job_class = item[CLASS]
38
38
  end
39
39
 
40
40
  #
@@ -67,9 +67,9 @@ module SidekiqUniqueJobs
67
67
  #
68
68
  def calculate
69
69
  ttl = item[LOCK_TTL]
70
- ttl ||= worker_options[LOCK_TTL]
70
+ ttl ||= job_options[LOCK_TTL]
71
71
  ttl ||= item[LOCK_EXPIRATION] # TODO: Deprecate at some point
72
- ttl ||= worker_options[LOCK_EXPIRATION] # TODO: Deprecate at some point
72
+ ttl ||= job_options[LOCK_EXPIRATION] # TODO: Deprecate at some point
73
73
  ttl ||= SidekiqUniqueJobs.config.lock_ttl
74
74
  ttl && (ttl.to_i + time_until_scheduled)
75
75
  end
@@ -0,0 +1,37 @@
1
+ # frozen_string_literal: true
2
+
3
+ module SidekiqUniqueJobs
4
+ # Calculates the lock type
5
+ #
6
+ class LockType
7
+ # includes "SidekiqUniqueJobs::SidekiqWorkerMethods"
8
+ # @!parse include SidekiqUniqueJobs::SidekiqWorkerMethods
9
+ include SidekiqUniqueJobs::SidekiqWorkerMethods
10
+
11
+ #
12
+ # Computes lock type from job arguments, sidekiq_options.
13
+ #
14
+ # @return [Symbol] the lock type
15
+ # @return [NilClass] if no lock type is found.
16
+ #
17
+ def self.call(item)
18
+ new(item).call
19
+ end
20
+
21
+ # @!attribute [r] item
22
+ # @return [Hash] the Sidekiq job hash
23
+ attr_reader :item
24
+
25
+ # @param [Hash] item the Sidekiq job hash
26
+ # @option item [Symbol, nil] :lock the type of lock to use.
27
+ # @option item [String] :class the class of the sidekiq worker
28
+ def initialize(item)
29
+ @item = item
30
+ self.job_class = item[CLASS]
31
+ end
32
+
33
+ def call
34
+ item[LOCK] || job_options[LOCK] || default_job_options[LOCK]
35
+ end
36
+ end
37
+ end
@@ -32,6 +32,7 @@ module SidekiqUniqueJobs
32
32
  #
33
33
  # @return [Float] used to take into consideration the inaccuracy of redis timestamps
34
34
  CLOCK_DRIFT_FACTOR = 0.01
35
+ NETWORK_FACTOR = 0.04
35
36
 
36
37
  #
37
38
  # @!attribute [r] key
@@ -126,7 +127,10 @@ module SidekiqUniqueJobs
126
127
  #
127
128
  def unlock!(conn = nil)
128
129
  call_script(:unlock, key.to_a, argv, conn) do |unlocked_jid|
129
- reflect(:debug, :unlocked, item, unlocked_jid) if unlocked_jid == job_id
130
+ if unlocked_jid == job_id
131
+ reflect(:debug, :unlocked, item, unlocked_jid)
132
+ reflect(:unlocked, item)
133
+ end
130
134
 
131
135
  unlocked_jid
132
136
  end
@@ -184,22 +188,23 @@ module SidekiqUniqueJobs
184
188
  #
185
189
  # @param [Sidekiq::RedisConnection, ConnectionPool] conn the redis connection
186
190
  # @param [Method] primed_method reference to the method to use for getting a primed token
191
+ # @param [nil, Integer, Float] time to wait before timeout
187
192
  #
188
193
  # @yieldparam [string] job_id the sidekiq JID
189
194
  # @yieldreturn [void] whatever the calling block returns
190
195
  def lock!(conn, primed_method, wait = nil)
191
- return yield job_id if locked?(conn)
196
+ return yield if locked?(conn)
192
197
 
193
198
  enqueue(conn) do |queued_jid|
194
- reflect(:debug, item, queued_jid)
199
+ reflect(:debug, :queued, item, queued_jid)
195
200
 
196
201
  primed_method.call(conn, wait) do |primed_jid|
197
202
  reflect(:debug, :primed, item, primed_jid)
198
-
199
203
  locked_jid = call_script(:lock, key.to_a, argv, conn)
204
+
200
205
  if locked_jid
201
206
  reflect(:debug, :locked, item, locked_jid)
202
- return yield job_id
207
+ return yield
203
208
  end
204
209
  end
205
210
  end
@@ -239,9 +244,22 @@ module SidekiqUniqueJobs
239
244
  # @return [Object] whatever the block returns when lock was acquired
240
245
  #
241
246
  def primed_async(conn, wait = nil, &block)
247
+ timeout = (wait || config.timeout).to_i
248
+ timeout = 1 if timeout.zero?
249
+
250
+ brpoplpush_timeout = timeout
251
+ concurrent_timeout = add_drift(timeout)
252
+
253
+ reflect(:debug, :timeouts, item,
254
+ timeouts: {
255
+ brpoplpush_timeout: brpoplpush_timeout,
256
+ concurrent_timeout: concurrent_timeout,
257
+ })
258
+
259
+ # NOTE: When debugging, change .value to .value!
242
260
  primed_jid = Concurrent::Promises
243
- .future(conn) { |red_con| pop_queued(red_con, wait) }
244
- .value(add_drift(wait || config.timeout))
261
+ .future(conn) { |red_con| pop_queued(red_con, timeout) }
262
+ .value
245
263
 
246
264
  handle_primed(primed_jid, &block)
247
265
  end
@@ -273,7 +291,7 @@ module SidekiqUniqueJobs
273
291
  #
274
292
  # @return [String] a previously enqueued token (now taken off the queue)
275
293
  #
276
- def pop_queued(conn, wait = nil)
294
+ def pop_queued(conn, wait = 1)
277
295
  wait ||= config.timeout if config.wait_for_lock?
278
296
 
279
297
  if wait.nil?
@@ -287,10 +305,18 @@ module SidekiqUniqueJobs
287
305
  # @api private
288
306
  #
289
307
  def brpoplpush(conn, wait)
308
+ # passing timeout 0 to brpoplpush causes it to block indefinitely
290
309
  raise InvalidArgument, "wait must be an integer" unless wait.is_a?(Integer)
291
310
 
292
- # passing timeout 0 to brpoplpush causes it to block indefinitely
293
- conn.brpoplpush(key.queued, key.primed, timeout: wait)
311
+ if defined?(::Redis::Namespace) && conn.instance_of?(::Redis::Namespace)
312
+ return conn.brpoplpush(key.queued, key.primed, wait)
313
+ end
314
+
315
+ if VersionCheck.satisfied?(redis_version, ">= 6.2.0") && conn.respond_to?(:blmove)
316
+ conn.blmove(key.queued, key.primed, "RIGHT", "LEFT", timeout: wait)
317
+ else
318
+ conn.brpoplpush(key.queued, key.primed, timeout: wait)
319
+ end
294
320
  end
295
321
 
296
322
  #
@@ -359,5 +385,9 @@ module SidekiqUniqueJobs
359
385
  TIME => now_f,
360
386
  )
361
387
  end
388
+
389
+ def redis_version
390
+ @redis_version ||= SidekiqUniqueJobs.config.redis_version
391
+ end
362
392
  end
363
393
  end
@@ -30,6 +30,8 @@ module SidekiqUniqueJobs
30
30
  # @yield [String, Exception] the message or exception to use for log message
31
31
  #
32
32
  def log_debug(message_or_exception = nil, item = nil, &block)
33
+ return unless logging?
34
+
33
35
  message = build_message(message_or_exception, item)
34
36
  logger.debug(message, &block)
35
37
  nil
@@ -45,6 +47,8 @@ module SidekiqUniqueJobs
45
47
  # @yield [String, Exception] the message or exception to use for log message
46
48
  #
47
49
  def log_info(message_or_exception = nil, item = nil, &block)
50
+ return unless logging?
51
+
48
52
  message = build_message(message_or_exception, item)
49
53
  logger.info(message, &block)
50
54
  nil
@@ -60,6 +64,8 @@ module SidekiqUniqueJobs
60
64
  # @yield [String, Exception] the message or exception to use for log message
61
65
  #
62
66
  def log_warn(message_or_exception = nil, item = nil, &block)
67
+ return unless logging?
68
+
63
69
  message = build_message(message_or_exception, item)
64
70
  logger.warn(message, &block)
65
71
  nil
@@ -75,6 +81,8 @@ module SidekiqUniqueJobs
75
81
  # @yield [String, Exception] the message or exception to use for log message
76
82
  #
77
83
  def log_error(message_or_exception = nil, item = nil, &block)
84
+ return unless logging?
85
+
78
86
  message = build_message(message_or_exception, item)
79
87
  logger.error(message, &block)
80
88
  nil
@@ -90,6 +98,8 @@ module SidekiqUniqueJobs
90
98
  # @yield [String, Exception] the message or exception to use for log message
91
99
  #
92
100
  def log_fatal(message_or_exception = nil, item = nil, &block)
101
+ return unless logging?
102
+
93
103
  message = build_message(message_or_exception, item)
94
104
  logger.fatal(message, &block)
95
105
 
@@ -218,5 +228,9 @@ module SidekiqUniqueJobs
218
228
 
219
229
  yield
220
230
  end
231
+
232
+ def logging?
233
+ SidekiqUniqueJobs.logging?
234
+ end
221
235
  end
222
236
  end
@@ -1,11 +1,12 @@
1
1
  -------- BEGIN keys ---------
2
- local digest = KEYS[1]
3
- local queued = KEYS[2]
4
- local primed = KEYS[3]
5
- local locked = KEYS[4]
6
- local info = KEYS[5]
7
- local changelog = KEYS[6]
8
- local digests = KEYS[7]
2
+ local digest = KEYS[1]
3
+ local queued = KEYS[2]
4
+ local primed = KEYS[3]
5
+ local locked = KEYS[4]
6
+ local info = KEYS[5]
7
+ local changelog = KEYS[6]
8
+ local digests = KEYS[7]
9
+ local expiring_digests = KEYS[8]
9
10
  -------- END keys ---------
10
11
 
11
12
 
@@ -57,8 +58,13 @@ if limit_exceeded then
57
58
  return nil
58
59
  end
59
60
 
60
- log_debug("ZADD", digests, current_time, digest)
61
- redis.call("ZADD", digests, current_time, digest)
61
+ if lock_type == "until_expired" and pttl and pttl > 0 then
62
+ log_debug("ZADD", expiring_digests, current_time + pttl, digest)
63
+ redis.call("ZADD", expiring_digests, current_time + pttl, digest)
64
+ else
65
+ log_debug("ZADD", digests, current_time, digest)
66
+ redis.call("ZADD", digests, current_time, digest)
67
+ end
62
68
 
63
69
  log_debug("HSET", locked, job_id, current_time)
64
70
  redis.call("HSET", locked, job_id, current_time)
@@ -0,0 +1,92 @@
1
+ -------- BEGIN keys ---------
2
+ local digest = KEYS[1]
3
+ local queued = KEYS[2]
4
+ local primed = KEYS[3]
5
+ local locked = KEYS[4]
6
+ local info = KEYS[5]
7
+ local changelog = KEYS[6]
8
+ local digests = KEYS[7]
9
+ local expiring_digests = KEYS[8]
10
+ -------- END keys ---------
11
+
12
+
13
+ -------- BEGIN lock arguments ---------
14
+ local job_id = ARGV[1]
15
+ local pttl = tonumber(ARGV[2])
16
+ local lock_type = ARGV[3]
17
+ local limit = tonumber(ARGV[4])
18
+ -------- END lock arguments -----------
19
+
20
+
21
+ -------- BEGIN injected arguments --------
22
+ local current_time = tonumber(ARGV[5])
23
+ local debug_lua = ARGV[6] == "true"
24
+ local max_history = tonumber(ARGV[7])
25
+ local script_name = tostring(ARGV[8]) .. ".lua"
26
+ local redisversion = ARGV[9]
27
+ --------- END injected arguments ---------
28
+
29
+
30
+ -------- BEGIN local functions --------
31
+ <%= include_partial "shared/_common.lua" %>
32
+ ---------- END local functions ----------
33
+
34
+
35
+ --------- BEGIN lock.lua ---------
36
+ log_debug("BEGIN lock digest:", digest, "job_id:", job_id)
37
+
38
+ if redis.call("HEXISTS", locked, job_id) == 1 then
39
+ log_debug(locked, "already locked with job_id:", job_id)
40
+ log("Duplicate")
41
+
42
+ log_debug("LREM", queued, -1, job_id)
43
+ redis.call("LREM", queued, -1, job_id)
44
+
45
+ log_debug("LREM", primed, 1, job_id)
46
+ redis.call("LREM", primed, 1, job_id)
47
+
48
+ return job_id
49
+ end
50
+
51
+ local locked_count = redis.call("HLEN", locked)
52
+ local within_limit = limit > locked_count
53
+ local limit_exceeded = not within_limit
54
+
55
+ if limit_exceeded then
56
+ log_debug("Limit exceeded:", digest, "(", locked_count, "of", limit, ")")
57
+ log("Limited")
58
+ return nil
59
+ end
60
+
61
+ log_debug("ZADD", expiring_digests, current_time + pttl, digest)
62
+ redis.call("ZADD", expiring_digests, current_time + pttl, digest)
63
+
64
+ log_debug("HSET", locked, job_id, current_time)
65
+ redis.call("HSET", locked, job_id, current_time)
66
+
67
+ log_debug("LREM", queued, -1, job_id)
68
+ redis.call("LREM", queued, -1, job_id)
69
+
70
+ log_debug("LREM", primed, 1, job_id)
71
+ redis.call("LREM", primed, 1, job_id)
72
+
73
+ -- The Sidekiq client sets pttl
74
+ log_debug("PEXPIRE", digest, pttl)
75
+ redis.call("PEXPIRE", digest, pttl)
76
+
77
+ log_debug("PEXPIRE", locked, pttl)
78
+ redis.call("PEXPIRE", locked, pttl)
79
+
80
+ log_debug("PEXPIRE", info, pttl)
81
+ redis.call("PEXPIRE", info, pttl)
82
+
83
+ log_debug("PEXPIRE", queued, 1000)
84
+ redis.call("PEXPIRE", queued, 1000)
85
+
86
+ log_debug("PEXPIRE", primed, 1000)
87
+ redis.call("PEXPIRE", primed, 1000)
88
+
89
+ log("Locked")
90
+ log_debug("END lock digest:", digest, "job_id:", job_id)
91
+ return job_id
92
+ ---------- END lock.lua ----------
@@ -1,9 +1,10 @@
1
1
  redis.replicate_commands()
2
2
 
3
3
  -------- BEGIN keys ---------
4
- local digests_set = KEYS[1]
5
- local schedule_set = KEYS[2]
6
- local retry_set = KEYS[3]
4
+ local digests_set = KEYS[1]
5
+ local expiring_digests_set = KEYS[2]
6
+ local schedule_set = KEYS[3]
7
+ local retry_set = KEYS[4]
7
8
  -------- END keys ---------
8
9
 
9
10
  -------- BEGIN argv ---------
@@ -90,5 +91,32 @@ repeat
90
91
  index = index + per
91
92
  until index >= total or del_count >= reaper_count
92
93
 
94
+ if del_count < reaper_count then
95
+ index = 0
96
+ total = redis.call("ZCOUNT", expiring_digests_set, 0, current_time)
97
+ repeat
98
+ local digests = redis.call("ZRANGEBYSCORE", expiring_digests_set, 0, current_time, "LIMIT", index, index + per -1)
99
+
100
+ for _, digest in pairs(digests) do
101
+ local queued = digest .. ":QUEUED"
102
+ local primed = digest .. ":PRIMED"
103
+ local locked = digest .. ":LOCKED"
104
+ local info = digest .. ":INFO"
105
+ local run_digest = digest .. ":RUN"
106
+ local run_queued = digest .. ":RUN:QUEUED"
107
+ local run_primed = digest .. ":RUN:PRIMED"
108
+ local run_locked = digest .. ":RUN:LOCKED"
109
+ local run_info = digest .. ":RUN:INFO"
110
+
111
+ redis.call(del_cmd, digest, queued, primed, locked, info, run_digest, run_queued, run_primed, run_locked, run_info)
112
+
113
+ redis.call("ZREM", expiring_digests_set, digest)
114
+ del_count = del_count + 1
115
+ end
116
+
117
+ index = index + per
118
+ until index >= total or del_count >= reaper_count
119
+ end
120
+
93
121
  log_debug("END")
94
122
  return del_count
@@ -15,7 +15,7 @@ local function find_digest_in_process_set(digest, threshold)
15
15
  log_debug("Found number of processes:", #processes, "next cursor:", next_process_cursor)
16
16
 
17
17
  for _, process in ipairs(processes) do
18
- local workers_key = process .. ":workers"
18
+ local workers_key = process .. ":work"
19
19
  log_debug("searching in process set:", process,
20
20
  "for digest:", digest,
21
21
  "cursor:", process_cursor)
@@ -85,6 +85,11 @@ if locked_count and locked_count < 1 then
85
85
  redis.call(del_cmd, locked)
86
86
  end
87
87
 
88
+ if redis.call("LLEN", primed) == 0 then
89
+ log_debug(del_cmd, primed)
90
+ redis.call(del_cmd, primed)
91
+ end
92
+
88
93
  if limit and limit <= 1 and locked_count and locked_count <= 1 then
89
94
  log_debug("ZREM", digests, digest)
90
95
  redis.call("ZREM", digests, digest)
@@ -6,6 +6,8 @@ module SidekiqUniqueJobs
6
6
  #
7
7
  # @author Mikael Henriksson <mikael@mhenrixon.com>
8
8
  class Client
9
+ include Sidekiq::ClientMiddleware if defined?(Sidekiq::ClientMiddleware)
10
+
9
11
  # prepend "SidekiqUniqueJobs::Middleware"
10
12
  # @!parse prepends SidekiqUniqueJobs::Middleware
11
13
  prepend SidekiqUniqueJobs::Middleware
@@ -6,6 +6,8 @@ module SidekiqUniqueJobs
6
6
  #
7
7
  # @author Mikael Henriksson <mikael@mhenrixon.com>
8
8
  class Server
9
+ include Sidekiq::ServerMiddleware if defined?(Sidekiq::ServerMiddleware)
10
+
9
11
  # prepend "SidekiqUniqueJobs::Middleware"
10
12
  # @!parse prepends SidekiqUniqueJobs::Middleware
11
13
  prepend SidekiqUniqueJobs::Middleware
@@ -29,10 +29,10 @@ module SidekiqUniqueJobs
29
29
  # @yieldparam [<type>] if <description>
30
30
  # @yieldreturn [<type>] <describe what yield should return>
31
31
  def call(worker_class, item, queue, redis_pool = nil)
32
- @worker_class = worker_class
33
- @item = item
34
- @queue = queue
35
- @redis_pool = redis_pool
32
+ @item = item
33
+ @queue = queue
34
+ @redis_pool = redis_pool
35
+ self.job_class = worker_class
36
36
  return yield if unique_disabled?
37
37
 
38
38
  SidekiqUniqueJobs::Job.prepare(item) unless item[LOCK_DIGEST]
@@ -14,14 +14,14 @@ module SidekiqUniqueJobs
14
14
  # @param [Hash] item sidekiq job hash
15
15
  def initialize(item, redis_pool = nil)
16
16
  super(item, redis_pool)
17
- @worker_class = item[CLASS]
17
+ self.job_class = item[CLASS]
18
18
  end
19
19
 
20
20
  # Create a new job from the current one.
21
21
  # This will mess up sidekiq stats because a new job is created
22
22
  def call
23
- if sidekiq_worker_class?
24
- if worker_class.set(queue: item["queue"].to_sym).perform_in(5, *item[ARGS])
23
+ if sidekiq_job_class?
24
+ if job_class.set(queue: item["queue"].to_sym).perform_in(5, *item[ARGS])
25
25
  reflect(:rescheduled, item)
26
26
  else
27
27
  reflect(:reschedule_failed, item)
@@ -6,7 +6,7 @@ module SidekiqUniqueJobs
6
6
  # Requires the following methods to be defined in the including class
7
7
  # 1. item (required)
8
8
  # 2. options (can be nil)
9
- # 3. worker_class (required, can be anything)
9
+ # 3. job_class (required, can be anything)
10
10
  # @author Mikael Henriksson <mikael@mhenrixon.com>
11
11
  module OptionsWithFallback
12
12
  def self.included(base)
@@ -55,7 +55,7 @@ module SidekiqUniqueJobs
55
55
  # The type of lock for this worker
56
56
  #
57
57
  #
58
- # @return [Symbol]
58
+ # @return [Symbol, NilClass]
59
59
  #
60
60
  def lock_type
61
61
  @lock_type ||= options[LOCK] || item[LOCK]
@@ -69,8 +69,8 @@ module SidekiqUniqueJobs
69
69
  #
70
70
  def options
71
71
  @options ||= begin
72
- opts = default_worker_options.dup
73
- opts.merge!(worker_options) if sidekiq_worker_class?
72
+ opts = default_job_options.dup
73
+ opts.merge!(job_options) if sidekiq_job_class?
74
74
  (opts || {}).stringify_keys
75
75
  end
76
76
  end
@@ -20,7 +20,7 @@ module SidekiqUniqueJobs
20
20
  call_script(
21
21
  :reap_orphans,
22
22
  conn,
23
- keys: [DIGESTS, SCHEDULE, RETRY, PROCESSES],
23
+ keys: [DIGESTS, EXPIRING_DIGESTS, SCHEDULE, RETRY, PROCESSES],
24
24
  argv: [reaper_count, (Time.now - reaper_timeout).to_f],
25
25
  )
26
26
  end
@@ -72,7 +72,7 @@ module SidekiqUniqueJobs
72
72
  # @return [<type>] <description>
73
73
  #
74
74
  def task
75
- @task ||= default_task
75
+ @task ||= default_task # rubocop:disable ThreadSafety/InstanceVariableInClassMethod
76
76
  end
77
77
 
78
78
  #
@@ -100,7 +100,7 @@ module SidekiqUniqueJobs
100
100
  # @return [void]
101
101
  #
102
102
  def task=(task)
103
- @task = task
103
+ @task = task # rubocop:disable ThreadSafety/InstanceVariableInClassMethod
104
104
  end
105
105
 
106
106
  #
@@ -34,9 +34,15 @@ module SidekiqUniqueJobs
34
34
 
35
35
  #
36
36
  # @!attribute [r] start_time
37
- # @return [Integer] The timestamp this execution started represented as integer
37
+ # @return [Integer] The timestamp this execution started represented as Time (used for locks)
38
38
  attr_reader :start_time
39
39
 
40
+ #
41
+ # @!attribute [r] start_time
42
+ # @return [Integer] The clock stamp this execution started represented as integer
43
+ # (used for redis compatibility as it is more accurate than time)
44
+ attr_reader :start_source
45
+
40
46
  #
41
47
  # @!attribute [r] timeout_ms
42
48
  # @return [Integer] The allowed ms before timeout
@@ -49,11 +55,12 @@ module SidekiqUniqueJobs
49
55
  #
50
56
  def initialize(conn)
51
57
  super(conn)
52
- @digests = SidekiqUniqueJobs::Digests.new
53
- @scheduled = Redis::SortedSet.new(SCHEDULE)
54
- @retried = Redis::SortedSet.new(RETRY)
55
- @start_time = time_source.call
56
- @timeout_ms = SidekiqUniqueJobs.config.reaper_timeout * 1000
58
+ @digests = SidekiqUniqueJobs::Digests.new
59
+ @scheduled = Redis::SortedSet.new(SCHEDULE)
60
+ @retried = Redis::SortedSet.new(RETRY)
61
+ @start_time = Time.now
62
+ @start_source = time_source.call
63
+ @timeout_ms = SidekiqUniqueJobs.config.reaper_timeout * 1000
57
64
  end
58
65
 
59
66
  #
@@ -65,9 +72,20 @@ module SidekiqUniqueJobs
65
72
  def call
66
73
  return if queues_very_full?
67
74
 
75
+ BatchDelete.call(expired_digests, conn)
68
76
  BatchDelete.call(orphans, conn)
69
77
  end
70
78
 
79
+ def expired_digests
80
+ max_score = (start_time - reaper_timeout).to_f
81
+
82
+ if VersionCheck.satisfied?(redis_version, ">= 6.2.0") && VersionCheck.satisfied?(::Redis::VERSION, ">= 4.6.0")
83
+ conn.zrange(EXPIRING_DIGESTS, 0, max_score, byscore: true)
84
+ else
85
+ conn.zrangebyscore(EXPIRING_DIGESTS, 0, max_score)
86
+ end
87
+ end
88
+
71
89
  #
72
90
  # Find orphaned digests
73
91
  #
@@ -104,7 +122,7 @@ module SidekiqUniqueJobs
104
122
  end
105
123
 
106
124
  def elapsed_ms
107
- time_source.call - start_time
125
+ time_source.call - start_source
108
126
  end
109
127
 
110
128
  #
@@ -177,7 +195,7 @@ module SidekiqUniqueJobs
177
195
  else
178
196
  pipeline.exists(key)
179
197
  end
180
- pipeline.hgetall("#{key}:workers")
198
+ pipeline.hgetall("#{key}:work")
181
199
  end
182
200
 
183
201
  next unless valid
@@ -229,6 +247,7 @@ module SidekiqUniqueJobs
229
247
 
230
248
  loop do
231
249
  range_start = (page * page_size) - deleted_size
250
+
232
251
  range_end = range_start + page_size - 1
233
252
  entries = conn.lrange(queue_key, range_start, range_end)
234
253
  page += 1
@@ -238,6 +257,9 @@ module SidekiqUniqueJobs
238
257
  entries.each(&block)
239
258
 
240
259
  deleted_size = initial_size - conn.llen(queue_key)
260
+
261
+ # The queue is growing, not shrinking, just keep looping
262
+ deleted_size = 0 if deleted_size.negative?
241
263
  end
242
264
  end
243
265
 
@@ -54,13 +54,13 @@ module SidekiqUniqueJobs
54
54
  # Only used to reduce a little bit of duplication
55
55
  # @see call_script
56
56
  def do_call(file_name, conn, keys, argv)
57
- argv = argv.dup.concat([
58
- now_f,
59
- debug_lua,
60
- max_history,
61
- file_name,
62
- redis_version,
63
- ])
57
+ argv = argv.dup.push(
58
+ now_f,
59
+ debug_lua,
60
+ max_history,
61
+ file_name,
62
+ redis_version,
63
+ )
64
64
  Script.execute(file_name, conn, keys: keys, argv: argv)
65
65
  end
66
66