sidekiq-unique-jobs 7.1.6 → 7.1.30

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of sidekiq-unique-jobs might be problematic. Click here for more details.

Files changed (66) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +312 -3
  3. data/README.md +19 -15
  4. data/lib/sidekiq_unique_jobs/batch_delete.rb +8 -7
  5. data/lib/sidekiq_unique_jobs/changelog.rb +3 -3
  6. data/lib/sidekiq_unique_jobs/cli.rb +33 -8
  7. data/lib/sidekiq_unique_jobs/config.rb +48 -4
  8. data/lib/sidekiq_unique_jobs/constants.rb +45 -44
  9. data/lib/sidekiq_unique_jobs/core_ext.rb +1 -1
  10. data/lib/sidekiq_unique_jobs/deprecation.rb +30 -0
  11. data/lib/sidekiq_unique_jobs/digests.rb +8 -11
  12. data/lib/sidekiq_unique_jobs/exceptions.rb +4 -3
  13. data/lib/sidekiq_unique_jobs/expiring_digests.rb +14 -0
  14. data/lib/sidekiq_unique_jobs/job.rb +5 -0
  15. data/lib/sidekiq_unique_jobs/json.rb +7 -0
  16. data/lib/sidekiq_unique_jobs/key.rb +13 -8
  17. data/lib/sidekiq_unique_jobs/lock/base_lock.rb +30 -23
  18. data/lib/sidekiq_unique_jobs/lock/until_and_while_executing.rb +9 -3
  19. data/lib/sidekiq_unique_jobs/lock/until_executed.rb +15 -4
  20. data/lib/sidekiq_unique_jobs/lock/until_executing.rb +14 -4
  21. data/lib/sidekiq_unique_jobs/lock/until_expired.rb +13 -5
  22. data/lib/sidekiq_unique_jobs/lock/while_executing.rb +10 -6
  23. data/lib/sidekiq_unique_jobs/lock.rb +27 -10
  24. data/lib/sidekiq_unique_jobs/lock_args.rb +19 -15
  25. data/lib/sidekiq_unique_jobs/lock_config.rb +6 -6
  26. data/lib/sidekiq_unique_jobs/lock_digest.rb +7 -7
  27. data/lib/sidekiq_unique_jobs/lock_info.rb +2 -2
  28. data/lib/sidekiq_unique_jobs/lock_timeout.rb +4 -4
  29. data/lib/sidekiq_unique_jobs/lock_ttl.rb +5 -5
  30. data/lib/sidekiq_unique_jobs/lock_type.rb +37 -0
  31. data/lib/sidekiq_unique_jobs/locksmith.rb +41 -10
  32. data/lib/sidekiq_unique_jobs/logging.rb +23 -0
  33. data/lib/sidekiq_unique_jobs/lua/lock.lua +18 -12
  34. data/lib/sidekiq_unique_jobs/lua/lock_until_expired.lua +92 -0
  35. data/lib/sidekiq_unique_jobs/lua/reap_orphans.lua +31 -3
  36. data/lib/sidekiq_unique_jobs/lua/shared/_common.lua +1 -1
  37. data/lib/sidekiq_unique_jobs/lua/shared/_find_digest_in_process_set.lua +1 -1
  38. data/lib/sidekiq_unique_jobs/lua/unlock.lua +17 -5
  39. data/lib/sidekiq_unique_jobs/middleware/client.rb +3 -1
  40. data/lib/sidekiq_unique_jobs/middleware/server.rb +2 -0
  41. data/lib/sidekiq_unique_jobs/middleware.rb +4 -4
  42. data/lib/sidekiq_unique_jobs/on_conflict/reject.rb +4 -4
  43. data/lib/sidekiq_unique_jobs/on_conflict/reschedule.rb +3 -3
  44. data/lib/sidekiq_unique_jobs/options_with_fallback.rb +6 -8
  45. data/lib/sidekiq_unique_jobs/orphans/lua_reaper.rb +1 -1
  46. data/lib/sidekiq_unique_jobs/orphans/manager.rb +40 -12
  47. data/lib/sidekiq_unique_jobs/orphans/ruby_reaper.rb +99 -14
  48. data/lib/sidekiq_unique_jobs/redis/string.rb +3 -1
  49. data/lib/sidekiq_unique_jobs/reflectable.rb +11 -2
  50. data/lib/sidekiq_unique_jobs/reflections.rb +12 -1
  51. data/lib/sidekiq_unique_jobs/script/caller.rb +7 -7
  52. data/lib/sidekiq_unique_jobs/server.rb +13 -1
  53. data/lib/sidekiq_unique_jobs/sidekiq_unique_ext.rb +1 -1
  54. data/lib/sidekiq_unique_jobs/sidekiq_unique_jobs.rb +13 -3
  55. data/lib/sidekiq_unique_jobs/sidekiq_worker_methods.rb +40 -21
  56. data/lib/sidekiq_unique_jobs/testing.rb +53 -21
  57. data/lib/sidekiq_unique_jobs/timer_task.rb +266 -45
  58. data/lib/sidekiq_unique_jobs/timing.rb +1 -1
  59. data/lib/sidekiq_unique_jobs/upgrade_locks.rb +6 -6
  60. data/lib/sidekiq_unique_jobs/version.rb +1 -1
  61. data/lib/sidekiq_unique_jobs/web/helpers.rb +11 -1
  62. data/lib/sidekiq_unique_jobs/web/views/lock.erb +5 -3
  63. data/lib/sidekiq_unique_jobs/web.rb +22 -3
  64. data/lib/sidekiq_unique_jobs.rb +2 -0
  65. data/lib/tasks/changelog.rake +16 -16
  66. metadata +26 -13
@@ -62,13 +62,13 @@ module SidekiqUniqueJobs
62
62
  #
63
63
  def lock(job_id, lock_info = {})
64
64
  redis do |conn|
65
- conn.multi do
66
- conn.set(key.digest, job_id)
67
- conn.hset(key.locked, job_id, now_f)
68
- info.set(lock_info)
69
- conn.zadd(key.digests, now_f, key.digest)
70
- conn.zadd(key.changelog, now_f, changelog_json(job_id, "queue.lua", "Queued"))
71
- conn.zadd(key.changelog, now_f, changelog_json(job_id, "lock.lua", "Locked"))
65
+ conn.multi do |pipeline|
66
+ pipeline.set(key.digest, job_id)
67
+ pipeline.hset(key.locked, job_id, now_f)
68
+ info.set(lock_info, pipeline)
69
+ add_digest_to_set(pipeline, lock_info)
70
+ pipeline.zadd(key.changelog, now_f, changelog_json(job_id, "queue.lua", "Queued"))
71
+ pipeline.zadd(key.changelog, now_f, changelog_json(job_id, "lock.lua", "Locked"))
72
72
  end
73
73
  end
74
74
  end
@@ -123,9 +123,9 @@ module SidekiqUniqueJobs
123
123
  #
124
124
  def del
125
125
  redis do |conn|
126
- conn.multi do
127
- conn.zrem(DIGESTS, key.digest)
128
- conn.del(key.digest, key.queued, key.primed, key.locked, key.info)
126
+ conn.multi do |pipeline|
127
+ pipeline.zrem(DIGESTS, key.digest)
128
+ pipeline.del(key.digest, key.queued, key.primed, key.locked, key.info)
129
129
  end
130
130
  end
131
131
  end
@@ -321,5 +321,22 @@ module SidekiqUniqueJobs
321
321
  time: now_f,
322
322
  )
323
323
  end
324
+
325
+ #
326
+ # Add the digest to the correct sorted set
327
+ #
328
+ # @param [Object] pipeline a redis pipeline object for issue commands
329
+ # @param [Hash] lock_info the lock info relevant to the digest
330
+ #
331
+ # @return [nil]
332
+ #
333
+ def add_digest_to_set(pipeline, lock_info)
334
+ digest_string = key.digest
335
+ if lock_info["lock"] == :until_expired
336
+ pipeline.zadd(key.expiring_digests, now_f + lock_info["ttl"], digest_string)
337
+ else
338
+ pipeline.zadd(key.digests, now_f, digest_string)
339
+ end
340
+ end
324
341
  end
325
342
  end
@@ -26,15 +26,15 @@ module SidekiqUniqueJobs
26
26
 
27
27
  # @param [Hash] item a Sidekiq job hash
28
28
  def initialize(item)
29
- @item = item
30
- @worker_class = item[CLASS]
31
- @args = item[ARGS]
29
+ @item = item
30
+ @args = item[ARGS]
31
+ self.job_class = item[CLASS]
32
32
  end
33
33
 
34
34
  # The unique arguments to use for creating a lock
35
35
  # @return [Array] the arguments filters by the {#filtered_args} method if {#lock_args_enabled?}
36
36
  def lock_args
37
- @lock_args ||= filtered_args
37
+ @lock_args ||= filtered_args || []
38
38
  end
39
39
 
40
40
  # Checks if the worker class has enabled lock_args
@@ -83,31 +83,31 @@ module SidekiqUniqueJobs
83
83
 
84
84
  # Filters unique arguments by method configured in the sidekiq worker
85
85
  # @param [Array] args the arguments passed to the sidekiq worker
86
- # @return [Array] unfiltered unless {#worker_method_defined?}
86
+ # @return [Array] unfiltered unless {#job_method_defined?}
87
87
  # @return [Array] with the filtered arguments
88
88
  def filter_by_symbol(args)
89
- return args unless worker_method_defined?(lock_args_method)
89
+ return args unless job_method_defined?(lock_args_method)
90
90
 
91
- worker_class.send(lock_args_method, args)
91
+ job_class.send(lock_args_method, args)
92
92
  rescue ArgumentError
93
93
  raise SidekiqUniqueJobs::InvalidUniqueArguments,
94
94
  given: args,
95
- worker_class: worker_class,
95
+ job_class: job_class,
96
96
  lock_args_method: lock_args_method
97
97
  end
98
98
 
99
99
  # The method to use for filtering unique arguments
100
100
  def lock_args_method
101
- @lock_args_method ||= worker_options.slice(LOCK_ARGS_METHOD, UNIQUE_ARGS_METHOD).values.first
102
- @lock_args_method ||= :lock_args if worker_method_defined?(:lock_args)
103
- @lock_args_method ||= :unique_args if worker_method_defined?(:unique_args)
101
+ @lock_args_method ||= job_options.slice(LOCK_ARGS_METHOD, UNIQUE_ARGS_METHOD).values.first
102
+ @lock_args_method ||= :lock_args if job_method_defined?(:lock_args)
103
+ @lock_args_method ||= :unique_args if job_method_defined?(:unique_args)
104
104
  @lock_args_method ||= default_lock_args_method
105
105
  end
106
106
 
107
107
  # The global worker options defined in Sidekiq directly
108
108
  def default_lock_args_method
109
- default_worker_options[LOCK_ARGS_METHOD] ||
110
- default_worker_options[UNIQUE_ARGS_METHOD]
109
+ default_job_options[LOCK_ARGS_METHOD] ||
110
+ default_job_options[UNIQUE_ARGS_METHOD]
111
111
  end
112
112
 
113
113
  #
@@ -116,8 +116,12 @@ module SidekiqUniqueJobs
116
116
  #
117
117
  # @return [Hash<String, Object>]
118
118
  #
119
- def default_worker_options
120
- @default_worker_options ||= Sidekiq.default_worker_options.stringify_keys
119
+ def default_job_options
120
+ @default_job_options ||= if Sidekiq.respond_to?(:default_job_options)
121
+ Sidekiq.default_job_options.stringify_keys
122
+ else
123
+ Sidekiq.default_worker_options.stringify_keys
124
+ end
121
125
  end
122
126
  end
123
127
  end
@@ -13,9 +13,9 @@ module SidekiqUniqueJobs
13
13
  # @return [Symbol] the type of lock
14
14
  attr_reader :type
15
15
  #
16
- # @!attribute [r] worker
17
- # @return [Symbol] the worker class
18
- attr_reader :worker
16
+ # @!attribute [r] job
17
+ # @return [Symbol] the job class
18
+ attr_reader :job
19
19
  #
20
20
  # @!attribute [r] limit
21
21
  # @return [Integer] the number of simultaneous locks
@@ -58,7 +58,7 @@ module SidekiqUniqueJobs
58
58
 
59
59
  def initialize(job_hash = {})
60
60
  @type = job_hash[LOCK]&.to_sym
61
- @worker = SidekiqUniqueJobs.safe_constantize(job_hash[CLASS])
61
+ @job = SidekiqUniqueJobs.safe_constantize(job_hash[CLASS])
62
62
  @limit = job_hash.fetch(LOCK_LIMIT, 1)&.to_i
63
63
  @timeout = job_hash.fetch(LOCK_TIMEOUT, 0)&.to_i
64
64
  @ttl = job_hash.fetch(LOCK_TTL) { job_hash.fetch(LOCK_EXPIRATION, nil) }.to_i
@@ -113,13 +113,13 @@ module SidekiqUniqueJobs
113
113
 
114
114
  # the strategy to use as conflict resolution from sidekiq client
115
115
  def on_client_conflict
116
- @on_client_conflict ||= on_conflict["client"] if on_conflict.is_a?(Hash)
116
+ @on_client_conflict ||= on_conflict["client"] || on_conflict[:client] if on_conflict.is_a?(Hash)
117
117
  @on_client_conflict ||= on_conflict
118
118
  end
119
119
 
120
120
  # the strategy to use as conflict resolution from sidekiq server
121
121
  def on_server_conflict
122
- @on_server_conflict ||= on_conflict["server"] if on_conflict.is_a?(Hash)
122
+ @on_server_conflict ||= on_conflict["server"] || on_conflict[:server] if on_conflict.is_a?(Hash)
123
123
  @on_server_conflict ||= on_conflict
124
124
  end
125
125
  end
@@ -36,10 +36,10 @@ module SidekiqUniqueJobs
36
36
 
37
37
  # @param [Hash] item a Sidekiq job hash
38
38
  def initialize(item)
39
- @item = item
40
- @worker_class = item[CLASS]
41
- @lock_args = item.slice(LOCK_ARGS, UNIQUE_ARGS).values.first # TODO: Deprecate UNIQUE_ARGS
42
- @lock_prefix = item.slice(LOCK_PREFIX, UNIQUE_PREFIX).values.first # TODO: Deprecate UNIQUE_PREFIX
39
+ @item = item
40
+ @lock_args = item[LOCK_ARGS] || item[UNIQUE_ARGS] # TODO: Deprecate UNIQUE_ARGS
41
+ @lock_prefix = item[LOCK_PREFIX] || item[UNIQUE_PREFIX] # TODO: Deprecate UNIQUE_PREFIX
42
+ self.job_class = item[CLASS]
43
43
  end
44
44
 
45
45
  # Memoized lock_digest
@@ -51,7 +51,7 @@ module SidekiqUniqueJobs
51
51
  # Creates a namespaced unique digest based on the {#digestable_hash} and the {#lock_prefix}
52
52
  # @return [String] a unique digest
53
53
  def create_digest
54
- digest = OpenSSL::Digest::MD5.hexdigest(dump_json(digestable_hash))
54
+ digest = OpenSSL::Digest::MD5.hexdigest(dump_json(digestable_hash.sort))
55
55
  "#{lock_prefix}:#{digest}"
56
56
  end
57
57
 
@@ -67,13 +67,13 @@ module SidekiqUniqueJobs
67
67
  # Checks if we should disregard the queue when creating the unique digest
68
68
  # @return [true, false]
69
69
  def unique_across_queues?
70
- item[UNIQUE_ACROSS_QUEUES] || worker_options[UNIQUE_ACROSS_QUEUES]
70
+ item[UNIQUE_ACROSS_QUEUES] || job_options[UNIQUE_ACROSS_QUEUES]
71
71
  end
72
72
 
73
73
  # Checks if we should disregard the worker when creating the unique digest
74
74
  # @return [true, false]
75
75
  def unique_across_workers?
76
- item[UNIQUE_ACROSS_WORKERS] || worker_options[UNIQUE_ACROSS_WORKERS]
76
+ item[UNIQUE_ACROSS_WORKERS] || job_options[UNIQUE_ACROSS_WORKERS]
77
77
  end
78
78
  end
79
79
  end
@@ -55,13 +55,13 @@ module SidekiqUniqueJobs
55
55
  #
56
56
  # @return [Hash]
57
57
  #
58
- def set(obj)
58
+ def set(obj, pipeline = nil)
59
59
  return unless SidekiqUniqueJobs.config.lock_info
60
60
  raise InvalidArgument, "argument `obj` (#{obj}) needs to be a hash" unless obj.is_a?(Hash)
61
61
 
62
62
  json = dump_json(obj)
63
63
  @value = load_json(json)
64
- super(json)
64
+ super(json, pipeline)
65
65
  value
66
66
  end
67
67
  end
@@ -30,8 +30,8 @@ module SidekiqUniqueJobs
30
30
  # @option item [String] :class the class of the sidekiq worker
31
31
  # @option item [Float] :at the unix time the job is scheduled at
32
32
  def initialize(item)
33
- @item = item
34
- @worker_class = item[CLASS]
33
+ @item = item
34
+ self.job_class = item[CLASS]
35
35
  end
36
36
 
37
37
  #
@@ -42,9 +42,9 @@ module SidekiqUniqueJobs
42
42
  # @return [Integer, nil]
43
43
  #
44
44
  def calculate
45
- timeout = default_worker_options[LOCK_TIMEOUT]
45
+ timeout = default_job_options[LOCK_TIMEOUT]
46
46
  timeout = default_lock_timeout if default_lock_timeout
47
- timeout = worker_options[LOCK_TIMEOUT] if worker_options.key?(LOCK_TIMEOUT)
47
+ timeout = job_options[LOCK_TIMEOUT] if job_options.key?(LOCK_TIMEOUT)
48
48
  timeout
49
49
  end
50
50
 
@@ -33,8 +33,8 @@ module SidekiqUniqueJobs
33
33
  # @option item [String] :class the class of the sidekiq worker
34
34
  # @option item [Float] :at the unix time the job is scheduled at
35
35
  def initialize(item)
36
- @item = item
37
- @worker_class = item[CLASS]
36
+ @item = item
37
+ self.job_class = item[CLASS]
38
38
  end
39
39
 
40
40
  #
@@ -67,11 +67,11 @@ module SidekiqUniqueJobs
67
67
  #
68
68
  def calculate
69
69
  ttl = item[LOCK_TTL]
70
- ttl ||= worker_options[LOCK_TTL]
70
+ ttl ||= job_options[LOCK_TTL]
71
71
  ttl ||= item[LOCK_EXPIRATION] # TODO: Deprecate at some point
72
- ttl ||= worker_options[LOCK_EXPIRATION] # TODO: Deprecate at some point
72
+ ttl ||= job_options[LOCK_EXPIRATION] # TODO: Deprecate at some point
73
73
  ttl ||= SidekiqUniqueJobs.config.lock_ttl
74
- ttl && ttl.to_i + time_until_scheduled
74
+ ttl && (ttl.to_i + time_until_scheduled)
75
75
  end
76
76
  end
77
77
  end
@@ -0,0 +1,37 @@
1
+ # frozen_string_literal: true
2
+
3
+ module SidekiqUniqueJobs
4
+ # Calculates the lock type
5
+ #
6
+ class LockType
7
+ # includes "SidekiqUniqueJobs::SidekiqWorkerMethods"
8
+ # @!parse include SidekiqUniqueJobs::SidekiqWorkerMethods
9
+ include SidekiqUniqueJobs::SidekiqWorkerMethods
10
+
11
+ #
12
+ # Computes lock type from job arguments, sidekiq_options.
13
+ #
14
+ # @return [Symbol] the lock type
15
+ # @return [NilClass] if no lock type is found.
16
+ #
17
+ def self.call(item)
18
+ new(item).call
19
+ end
20
+
21
+ # @!attribute [r] item
22
+ # @return [Hash] the Sidekiq job hash
23
+ attr_reader :item
24
+
25
+ # @param [Hash] item the Sidekiq job hash
26
+ # @option item [Symbol, nil] :lock the type of lock to use.
27
+ # @option item [String] :class the class of the sidekiq worker
28
+ def initialize(item)
29
+ @item = item
30
+ self.job_class = item[CLASS]
31
+ end
32
+
33
+ def call
34
+ item[LOCK] || job_options[LOCK] || default_job_options[LOCK]
35
+ end
36
+ end
37
+ end
@@ -32,6 +32,7 @@ module SidekiqUniqueJobs
32
32
  #
33
33
  # @return [Float] used to take into consideration the inaccuracy of redis timestamps
34
34
  CLOCK_DRIFT_FACTOR = 0.01
35
+ NETWORK_FACTOR = 0.04
35
36
 
36
37
  #
37
38
  # @!attribute [r] key
@@ -126,7 +127,10 @@ module SidekiqUniqueJobs
126
127
  #
127
128
  def unlock!(conn = nil)
128
129
  call_script(:unlock, key.to_a, argv, conn) do |unlocked_jid|
129
- reflect(:debug, :unlocked, item, unlocked_jid) if unlocked_jid == job_id
130
+ if unlocked_jid == job_id
131
+ reflect(:debug, :unlocked, item, unlocked_jid)
132
+ reflect(:unlocked, item)
133
+ end
130
134
 
131
135
  unlocked_jid
132
136
  end
@@ -184,22 +188,23 @@ module SidekiqUniqueJobs
184
188
  #
185
189
  # @param [Sidekiq::RedisConnection, ConnectionPool] conn the redis connection
186
190
  # @param [Method] primed_method reference to the method to use for getting a primed token
191
+ # @param [nil, Integer, Float] time to wait before timeout
187
192
  #
188
193
  # @yieldparam [string] job_id the sidekiq JID
189
194
  # @yieldreturn [void] whatever the calling block returns
190
195
  def lock!(conn, primed_method, wait = nil)
191
- return yield job_id if locked?(conn)
196
+ return yield if locked?(conn)
192
197
 
193
198
  enqueue(conn) do |queued_jid|
194
- reflect(:debug, item, queued_jid)
199
+ reflect(:debug, :queued, item, queued_jid)
195
200
 
196
201
  primed_method.call(conn, wait) do |primed_jid|
197
202
  reflect(:debug, :primed, item, primed_jid)
198
-
199
203
  locked_jid = call_script(:lock, key.to_a, argv, conn)
204
+
200
205
  if locked_jid
201
206
  reflect(:debug, :locked, item, locked_jid)
202
- return yield job_id
207
+ return yield
203
208
  end
204
209
  end
205
210
  end
@@ -239,9 +244,22 @@ module SidekiqUniqueJobs
239
244
  # @return [Object] whatever the block returns when lock was acquired
240
245
  #
241
246
  def primed_async(conn, wait = nil, &block)
247
+ timeout = (wait || config.timeout).to_i
248
+ timeout = 1 if timeout.zero?
249
+
250
+ brpoplpush_timeout = timeout
251
+ concurrent_timeout = add_drift(timeout)
252
+
253
+ reflect(:debug, :timeouts, item,
254
+ timeouts: {
255
+ brpoplpush_timeout: brpoplpush_timeout,
256
+ concurrent_timeout: concurrent_timeout,
257
+ })
258
+
259
+ # NOTE: When debugging, change .value to .value!
242
260
  primed_jid = Concurrent::Promises
243
- .future(conn) { |red_con| pop_queued(red_con, wait) }
244
- .value(add_drift(wait || config.timeout))
261
+ .future(conn) { |red_con| pop_queued(red_con, timeout) }
262
+ .value
245
263
 
246
264
  handle_primed(primed_jid, &block)
247
265
  end
@@ -273,7 +291,7 @@ module SidekiqUniqueJobs
273
291
  #
274
292
  # @return [String] a previously enqueued token (now taken off the queue)
275
293
  #
276
- def pop_queued(conn, wait = nil)
294
+ def pop_queued(conn, wait = 1)
277
295
  wait ||= config.timeout if config.wait_for_lock?
278
296
 
279
297
  if wait.nil?
@@ -287,10 +305,18 @@ module SidekiqUniqueJobs
287
305
  # @api private
288
306
  #
289
307
  def brpoplpush(conn, wait)
308
+ # passing timeout 0 to brpoplpush causes it to block indefinitely
290
309
  raise InvalidArgument, "wait must be an integer" unless wait.is_a?(Integer)
291
310
 
292
- # passing timeout 0 to brpoplpush causes it to block indefinitely
293
- conn.brpoplpush(key.queued, key.primed, timeout: wait)
311
+ if defined?(::Redis::Namespace) && conn.instance_of?(::Redis::Namespace)
312
+ return conn.brpoplpush(key.queued, key.primed, wait)
313
+ end
314
+
315
+ if VersionCheck.satisfied?(redis_version, ">= 6.2.0") && conn.respond_to?(:blmove)
316
+ conn.blmove(key.queued, key.primed, "RIGHT", "LEFT", timeout: wait)
317
+ else
318
+ conn.brpoplpush(key.queued, key.primed, timeout: wait)
319
+ end
294
320
  end
295
321
 
296
322
  #
@@ -328,6 +354,7 @@ module SidekiqUniqueJobs
328
354
  end
329
355
 
330
356
  def add_drift(val)
357
+ val = val.to_f
331
358
  val + drift(val)
332
359
  end
333
360
 
@@ -358,5 +385,9 @@ module SidekiqUniqueJobs
358
385
  TIME => now_f,
359
386
  )
360
387
  end
388
+
389
+ def redis_version
390
+ @redis_version ||= SidekiqUniqueJobs.config.redis_version
391
+ end
361
392
  end
362
393
  end
@@ -30,6 +30,8 @@ module SidekiqUniqueJobs
30
30
  # @yield [String, Exception] the message or exception to use for log message
31
31
  #
32
32
  def log_debug(message_or_exception = nil, item = nil, &block)
33
+ return unless logging?
34
+
33
35
  message = build_message(message_or_exception, item)
34
36
  logger.debug(message, &block)
35
37
  nil
@@ -45,6 +47,8 @@ module SidekiqUniqueJobs
45
47
  # @yield [String, Exception] the message or exception to use for log message
46
48
  #
47
49
  def log_info(message_or_exception = nil, item = nil, &block)
50
+ return unless logging?
51
+
48
52
  message = build_message(message_or_exception, item)
49
53
  logger.info(message, &block)
50
54
  nil
@@ -60,6 +64,8 @@ module SidekiqUniqueJobs
60
64
  # @yield [String, Exception] the message or exception to use for log message
61
65
  #
62
66
  def log_warn(message_or_exception = nil, item = nil, &block)
67
+ return unless logging?
68
+
63
69
  message = build_message(message_or_exception, item)
64
70
  logger.warn(message, &block)
65
71
  nil
@@ -75,6 +81,8 @@ module SidekiqUniqueJobs
75
81
  # @yield [String, Exception] the message or exception to use for log message
76
82
  #
77
83
  def log_error(message_or_exception = nil, item = nil, &block)
84
+ return unless logging?
85
+
78
86
  message = build_message(message_or_exception, item)
79
87
  logger.error(message, &block)
80
88
  nil
@@ -90,11 +98,22 @@ module SidekiqUniqueJobs
90
98
  # @yield [String, Exception] the message or exception to use for log message
91
99
  #
92
100
  def log_fatal(message_or_exception = nil, item = nil, &block)
101
+ return unless logging?
102
+
93
103
  message = build_message(message_or_exception, item)
94
104
  logger.fatal(message, &block)
105
+
95
106
  nil
96
107
  end
97
108
 
109
+ #
110
+ # Build a log message
111
+ #
112
+ # @param [String, Exception] message_or_exception an entry to log
113
+ # @param [Hash] item the sidekiq job hash
114
+ #
115
+ # @return [String] a complete log entry
116
+ #
98
117
  def build_message(message_or_exception, item = nil)
99
118
  return nil if message_or_exception.nil?
100
119
  return message_or_exception if item.nil?
@@ -209,5 +228,9 @@ module SidekiqUniqueJobs
209
228
 
210
229
  yield
211
230
  end
231
+
232
+ def logging?
233
+ SidekiqUniqueJobs.logging?
234
+ end
212
235
  end
213
236
  end
@@ -1,11 +1,12 @@
1
1
  -------- BEGIN keys ---------
2
- local digest = KEYS[1]
3
- local queued = KEYS[2]
4
- local primed = KEYS[3]
5
- local locked = KEYS[4]
6
- local info = KEYS[5]
7
- local changelog = KEYS[6]
8
- local digests = KEYS[7]
2
+ local digest = KEYS[1]
3
+ local queued = KEYS[2]
4
+ local primed = KEYS[3]
5
+ local locked = KEYS[4]
6
+ local info = KEYS[5]
7
+ local changelog = KEYS[6]
8
+ local digests = KEYS[7]
9
+ local expiring_digests = KEYS[8]
9
10
  -------- END keys ---------
10
11
 
11
12
 
@@ -57,8 +58,13 @@ if limit_exceeded then
57
58
  return nil
58
59
  end
59
60
 
60
- log_debug("ZADD", digests, current_time, digest)
61
- redis.call("ZADD", digests, current_time, digest)
61
+ if lock_type == "until_expired" and pttl and pttl > 0 then
62
+ log_debug("ZADD", expiring_digests, current_time + pttl, digest)
63
+ redis.call("ZADD", expiring_digests, current_time + pttl, digest)
64
+ else
65
+ log_debug("ZADD", digests, current_time, digest)
66
+ redis.call("ZADD", digests, current_time, digest)
67
+ end
62
68
 
63
69
  log_debug("HSET", locked, job_id, current_time)
64
70
  redis.call("HSET", locked, job_id, current_time)
@@ -76,6 +82,9 @@ if pttl and pttl > 0 then
76
82
 
77
83
  log_debug("PEXPIRE", locked, pttl)
78
84
  redis.call("PEXPIRE", locked, pttl)
85
+
86
+ log_debug("PEXPIRE", info, pttl)
87
+ redis.call("PEXPIRE", info, pttl)
79
88
  end
80
89
 
81
90
  log_debug("PEXPIRE", queued, 1000)
@@ -84,9 +93,6 @@ redis.call("PEXPIRE", queued, 1000)
84
93
  log_debug("PEXPIRE", primed, 1000)
85
94
  redis.call("PEXPIRE", primed, 1000)
86
95
 
87
- log_debug("PEXPIRE", info, 1000)
88
- redis.call("PEXPIRE", info, 1000)
89
-
90
96
  log("Locked")
91
97
  log_debug("END lock digest:", digest, "job_id:", job_id)
92
98
  return job_id
@@ -0,0 +1,92 @@
1
+ -------- BEGIN keys ---------
2
+ local digest = KEYS[1]
3
+ local queued = KEYS[2]
4
+ local primed = KEYS[3]
5
+ local locked = KEYS[4]
6
+ local info = KEYS[5]
7
+ local changelog = KEYS[6]
8
+ local digests = KEYS[7]
9
+ local expiring_digests = KEYS[8]
10
+ -------- END keys ---------
11
+
12
+
13
+ -------- BEGIN lock arguments ---------
14
+ local job_id = ARGV[1]
15
+ local pttl = tonumber(ARGV[2])
16
+ local lock_type = ARGV[3]
17
+ local limit = tonumber(ARGV[4])
18
+ -------- END lock arguments -----------
19
+
20
+
21
+ -------- BEGIN injected arguments --------
22
+ local current_time = tonumber(ARGV[5])
23
+ local debug_lua = ARGV[6] == "true"
24
+ local max_history = tonumber(ARGV[7])
25
+ local script_name = tostring(ARGV[8]) .. ".lua"
26
+ local redisversion = ARGV[9]
27
+ --------- END injected arguments ---------
28
+
29
+
30
+ -------- BEGIN local functions --------
31
+ <%= include_partial "shared/_common.lua" %>
32
+ ---------- END local functions ----------
33
+
34
+
35
+ --------- BEGIN lock.lua ---------
36
+ log_debug("BEGIN lock digest:", digest, "job_id:", job_id)
37
+
38
+ if redis.call("HEXISTS", locked, job_id) == 1 then
39
+ log_debug(locked, "already locked with job_id:", job_id)
40
+ log("Duplicate")
41
+
42
+ log_debug("LREM", queued, -1, job_id)
43
+ redis.call("LREM", queued, -1, job_id)
44
+
45
+ log_debug("LREM", primed, 1, job_id)
46
+ redis.call("LREM", primed, 1, job_id)
47
+
48
+ return job_id
49
+ end
50
+
51
+ local locked_count = redis.call("HLEN", locked)
52
+ local within_limit = limit > locked_count
53
+ local limit_exceeded = not within_limit
54
+
55
+ if limit_exceeded then
56
+ log_debug("Limit exceeded:", digest, "(", locked_count, "of", limit, ")")
57
+ log("Limited")
58
+ return nil
59
+ end
60
+
61
+ log_debug("ZADD", expiring_digests, current_time + pttl, digest)
62
+ redis.call("ZADD", expiring_digests, current_time + pttl, digest)
63
+
64
+ log_debug("HSET", locked, job_id, current_time)
65
+ redis.call("HSET", locked, job_id, current_time)
66
+
67
+ log_debug("LREM", queued, -1, job_id)
68
+ redis.call("LREM", queued, -1, job_id)
69
+
70
+ log_debug("LREM", primed, 1, job_id)
71
+ redis.call("LREM", primed, 1, job_id)
72
+
73
+ -- The Sidekiq client sets pttl
74
+ log_debug("PEXPIRE", digest, pttl)
75
+ redis.call("PEXPIRE", digest, pttl)
76
+
77
+ log_debug("PEXPIRE", locked, pttl)
78
+ redis.call("PEXPIRE", locked, pttl)
79
+
80
+ log_debug("PEXPIRE", info, pttl)
81
+ redis.call("PEXPIRE", info, pttl)
82
+
83
+ log_debug("PEXPIRE", queued, 1000)
84
+ redis.call("PEXPIRE", queued, 1000)
85
+
86
+ log_debug("PEXPIRE", primed, 1000)
87
+ redis.call("PEXPIRE", primed, 1000)
88
+
89
+ log("Locked")
90
+ log_debug("END lock digest:", digest, "job_id:", job_id)
91
+ return job_id
92
+ ---------- END lock.lua ----------