sidekiq-unique-jobs 7.1.8 → 7.1.29

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of sidekiq-unique-jobs might be problematic. Click here for more details.

Files changed (53) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +283 -2
  3. data/README.md +17 -14
  4. data/lib/sidekiq_unique_jobs/batch_delete.rb +7 -6
  5. data/lib/sidekiq_unique_jobs/changelog.rb +3 -3
  6. data/lib/sidekiq_unique_jobs/cli.rb +33 -8
  7. data/lib/sidekiq_unique_jobs/config.rb +5 -0
  8. data/lib/sidekiq_unique_jobs/constants.rb +1 -0
  9. data/lib/sidekiq_unique_jobs/digests.rb +5 -5
  10. data/lib/sidekiq_unique_jobs/exceptions.rb +3 -3
  11. data/lib/sidekiq_unique_jobs/expiring_digests.rb +14 -0
  12. data/lib/sidekiq_unique_jobs/key.rb +13 -8
  13. data/lib/sidekiq_unique_jobs/lock/until_executed.rb +5 -1
  14. data/lib/sidekiq_unique_jobs/lock/until_executing.rb +4 -0
  15. data/lib/sidekiq_unique_jobs/lock/until_expired.rb +3 -1
  16. data/lib/sidekiq_unique_jobs/lock/while_executing.rb +4 -1
  17. data/lib/sidekiq_unique_jobs/lock.rb +27 -10
  18. data/lib/sidekiq_unique_jobs/lock_args.rb +19 -15
  19. data/lib/sidekiq_unique_jobs/lock_config.rb +6 -6
  20. data/lib/sidekiq_unique_jobs/lock_digest.rb +7 -7
  21. data/lib/sidekiq_unique_jobs/lock_info.rb +2 -2
  22. data/lib/sidekiq_unique_jobs/lock_timeout.rb +4 -4
  23. data/lib/sidekiq_unique_jobs/lock_ttl.rb +4 -4
  24. data/lib/sidekiq_unique_jobs/locksmith.rb +30 -9
  25. data/lib/sidekiq_unique_jobs/logging.rb +14 -0
  26. data/lib/sidekiq_unique_jobs/lua/lock.lua +15 -9
  27. data/lib/sidekiq_unique_jobs/lua/lock_until_expired.lua +92 -0
  28. data/lib/sidekiq_unique_jobs/lua/reap_orphans.lua +31 -3
  29. data/lib/sidekiq_unique_jobs/lua/shared/_common.lua +1 -1
  30. data/lib/sidekiq_unique_jobs/lua/unlock.lua +11 -7
  31. data/lib/sidekiq_unique_jobs/middleware/client.rb +2 -0
  32. data/lib/sidekiq_unique_jobs/middleware/server.rb +2 -0
  33. data/lib/sidekiq_unique_jobs/middleware.rb +4 -4
  34. data/lib/sidekiq_unique_jobs/on_conflict/reject.rb +4 -4
  35. data/lib/sidekiq_unique_jobs/on_conflict/reschedule.rb +3 -3
  36. data/lib/sidekiq_unique_jobs/options_with_fallback.rb +3 -3
  37. data/lib/sidekiq_unique_jobs/orphans/lua_reaper.rb +1 -1
  38. data/lib/sidekiq_unique_jobs/orphans/manager.rb +3 -10
  39. data/lib/sidekiq_unique_jobs/orphans/ruby_reaper.rb +96 -13
  40. data/lib/sidekiq_unique_jobs/redis/string.rb +3 -1
  41. data/lib/sidekiq_unique_jobs/sidekiq_unique_ext.rb +1 -1
  42. data/lib/sidekiq_unique_jobs/sidekiq_unique_jobs.rb +10 -0
  43. data/lib/sidekiq_unique_jobs/sidekiq_worker_methods.rb +40 -21
  44. data/lib/sidekiq_unique_jobs/testing.rb +53 -21
  45. data/lib/sidekiq_unique_jobs/timer_task.rb +266 -45
  46. data/lib/sidekiq_unique_jobs/timing.rb +1 -1
  47. data/lib/sidekiq_unique_jobs/upgrade_locks.rb +6 -6
  48. data/lib/sidekiq_unique_jobs/version.rb +1 -1
  49. data/lib/sidekiq_unique_jobs/web/helpers.rb +11 -1
  50. data/lib/sidekiq_unique_jobs/web.rb +22 -3
  51. data/lib/sidekiq_unique_jobs.rb +1 -0
  52. data/lib/tasks/changelog.rake +1 -1
  53. metadata +23 -11
@@ -0,0 +1,14 @@
1
+ # frozen_string_literal: true
2
+
3
+ module SidekiqUniqueJobs
4
+ #
5
+ # Class ExpiringDigests provides access to the expiring digests used by until_expired locks
6
+ #
7
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
8
+ #
9
+ class ExpiringDigests < Digests
10
+ def initialize
11
+ super(EXPIRING_DIGESTS)
12
+ end
13
+ end
14
+ end
@@ -33,6 +33,10 @@ module SidekiqUniqueJobs
33
33
  # @!attribute [r] digests
34
34
  # @return [String] the zset with locked digests
35
35
  attr_reader :digests
36
+ #
37
+ # @!attribute [r] expiring_digests
38
+ # @return [String] the zset with locked expiring_digests
39
+ attr_reader :expiring_digests
36
40
 
37
41
  #
38
42
  # Initialize a new Key
@@ -40,13 +44,14 @@ module SidekiqUniqueJobs
40
44
  # @param [String] digest the digest to use as key
41
45
  #
42
46
  def initialize(digest)
43
- @digest = digest
44
- @queued = suffixed_key("QUEUED")
45
- @primed = suffixed_key("PRIMED")
46
- @locked = suffixed_key("LOCKED")
47
- @info = suffixed_key("INFO")
48
- @changelog = CHANGELOGS
49
- @digests = DIGESTS
47
+ @digest = digest
48
+ @queued = suffixed_key("QUEUED")
49
+ @primed = suffixed_key("PRIMED")
50
+ @locked = suffixed_key("LOCKED")
51
+ @info = suffixed_key("INFO")
52
+ @changelog = CHANGELOGS
53
+ @digests = DIGESTS
54
+ @expiring_digests = EXPIRING_DIGESTS
50
55
  end
51
56
 
52
57
  #
@@ -81,7 +86,7 @@ module SidekiqUniqueJobs
81
86
  # @return [Array] an ordered array with all keys
82
87
  #
83
88
  def to_a
84
- [digest, queued, primed, locked, info, changelog, digests]
89
+ [digest, queued, primed, locked, info, changelog, digests, expiring_digests]
85
90
  end
86
91
 
87
92
  private
@@ -33,10 +33,14 @@ module SidekiqUniqueJobs
33
33
  # Executes in the Sidekiq server process
34
34
  # @yield to the worker class perform method
35
35
  def execute
36
- locksmith.execute do
36
+ executed = locksmith.execute do
37
37
  yield
38
38
  unlock_and_callback
39
39
  end
40
+
41
+ reflect(:execution_failed, item) unless executed
42
+
43
+ nil
40
44
  end
41
45
  end
42
46
  end
@@ -33,6 +33,10 @@ module SidekiqUniqueJobs
33
33
  def execute
34
34
  callback_safely if locksmith.unlock
35
35
  yield
36
+ rescue StandardError => ex
37
+ reflect(:execution_failed, item, ex)
38
+ locksmith.lock(wait: 1)
39
+ raise
36
40
  end
37
41
  end
38
42
  end
@@ -33,7 +33,9 @@ module SidekiqUniqueJobs
33
33
  # Executes in the Sidekiq server process
34
34
  # @yield to the worker class perform method
35
35
  def execute(&block)
36
- locksmith.execute(&block)
36
+ executed = locksmith.execute(&block)
37
+
38
+ reflect(:execution_failed, item) unless executed
37
39
  end
38
40
  end
39
41
  end
@@ -47,7 +47,10 @@ module SidekiqUniqueJobs
47
47
  locksmith.unlock
48
48
  end
49
49
 
50
- call_strategy(origin: :server, &block) unless executed
50
+ unless executed
51
+ reflect(:execution_failed, item)
52
+ call_strategy(origin: :server, &block)
53
+ end
51
54
  end
52
55
  end
53
56
 
@@ -62,13 +62,13 @@ module SidekiqUniqueJobs
62
62
  #
63
63
  def lock(job_id, lock_info = {})
64
64
  redis do |conn|
65
- conn.multi do
66
- conn.set(key.digest, job_id)
67
- conn.hset(key.locked, job_id, now_f)
68
- info.set(lock_info)
69
- conn.zadd(key.digests, now_f, key.digest)
70
- conn.zadd(key.changelog, now_f, changelog_json(job_id, "queue.lua", "Queued"))
71
- conn.zadd(key.changelog, now_f, changelog_json(job_id, "lock.lua", "Locked"))
65
+ conn.multi do |pipeline|
66
+ pipeline.set(key.digest, job_id)
67
+ pipeline.hset(key.locked, job_id, now_f)
68
+ info.set(lock_info, pipeline)
69
+ add_digest_to_set(pipeline, lock_info)
70
+ pipeline.zadd(key.changelog, now_f, changelog_json(job_id, "queue.lua", "Queued"))
71
+ pipeline.zadd(key.changelog, now_f, changelog_json(job_id, "lock.lua", "Locked"))
72
72
  end
73
73
  end
74
74
  end
@@ -123,9 +123,9 @@ module SidekiqUniqueJobs
123
123
  #
124
124
  def del
125
125
  redis do |conn|
126
- conn.multi do
127
- conn.zrem(DIGESTS, key.digest)
128
- conn.del(key.digest, key.queued, key.primed, key.locked, key.info)
126
+ conn.multi do |pipeline|
127
+ pipeline.zrem(DIGESTS, key.digest)
128
+ pipeline.del(key.digest, key.queued, key.primed, key.locked, key.info)
129
129
  end
130
130
  end
131
131
  end
@@ -321,5 +321,22 @@ module SidekiqUniqueJobs
321
321
  time: now_f,
322
322
  )
323
323
  end
324
+
325
+ #
326
+ # Add the digest to the correct sorted set
327
+ #
328
+ # @param [Object] pipeline a redis pipeline object for issue commands
329
+ # @param [Hash] lock_info the lock info relevant to the digest
330
+ #
331
+ # @return [nil]
332
+ #
333
+ def add_digest_to_set(pipeline, lock_info)
334
+ digest_string = key.digest
335
+ if lock_info["lock"] == :until_expired
336
+ pipeline.zadd(key.expiring_digests, now_f + lock_info["ttl"], digest_string)
337
+ else
338
+ pipeline.zadd(key.digests, now_f, digest_string)
339
+ end
340
+ end
324
341
  end
325
342
  end
@@ -26,15 +26,15 @@ module SidekiqUniqueJobs
26
26
 
27
27
  # @param [Hash] item a Sidekiq job hash
28
28
  def initialize(item)
29
- @item = item
30
- @worker_class = item[CLASS]
31
- @args = item[ARGS]
29
+ @item = item
30
+ @args = item[ARGS]
31
+ self.job_class = item[CLASS]
32
32
  end
33
33
 
34
34
  # The unique arguments to use for creating a lock
35
35
  # @return [Array] the arguments filters by the {#filtered_args} method if {#lock_args_enabled?}
36
36
  def lock_args
37
- @lock_args ||= filtered_args
37
+ @lock_args ||= filtered_args || []
38
38
  end
39
39
 
40
40
  # Checks if the worker class has enabled lock_args
@@ -83,31 +83,31 @@ module SidekiqUniqueJobs
83
83
 
84
84
  # Filters unique arguments by method configured in the sidekiq worker
85
85
  # @param [Array] args the arguments passed to the sidekiq worker
86
- # @return [Array] unfiltered unless {#worker_method_defined?}
86
+ # @return [Array] unfiltered unless {#job_method_defined?}
87
87
  # @return [Array] with the filtered arguments
88
88
  def filter_by_symbol(args)
89
- return args unless worker_method_defined?(lock_args_method)
89
+ return args unless job_method_defined?(lock_args_method)
90
90
 
91
- worker_class.send(lock_args_method, args)
91
+ job_class.send(lock_args_method, args)
92
92
  rescue ArgumentError
93
93
  raise SidekiqUniqueJobs::InvalidUniqueArguments,
94
94
  given: args,
95
- worker_class: worker_class,
95
+ job_class: job_class,
96
96
  lock_args_method: lock_args_method
97
97
  end
98
98
 
99
99
  # The method to use for filtering unique arguments
100
100
  def lock_args_method
101
- @lock_args_method ||= worker_options.slice(LOCK_ARGS_METHOD, UNIQUE_ARGS_METHOD).values.first
102
- @lock_args_method ||= :lock_args if worker_method_defined?(:lock_args)
103
- @lock_args_method ||= :unique_args if worker_method_defined?(:unique_args)
101
+ @lock_args_method ||= job_options.slice(LOCK_ARGS_METHOD, UNIQUE_ARGS_METHOD).values.first
102
+ @lock_args_method ||= :lock_args if job_method_defined?(:lock_args)
103
+ @lock_args_method ||= :unique_args if job_method_defined?(:unique_args)
104
104
  @lock_args_method ||= default_lock_args_method
105
105
  end
106
106
 
107
107
  # The global worker options defined in Sidekiq directly
108
108
  def default_lock_args_method
109
- default_worker_options[LOCK_ARGS_METHOD] ||
110
- default_worker_options[UNIQUE_ARGS_METHOD]
109
+ default_job_options[LOCK_ARGS_METHOD] ||
110
+ default_job_options[UNIQUE_ARGS_METHOD]
111
111
  end
112
112
 
113
113
  #
@@ -116,8 +116,12 @@ module SidekiqUniqueJobs
116
116
  #
117
117
  # @return [Hash<String, Object>]
118
118
  #
119
- def default_worker_options
120
- @default_worker_options ||= Sidekiq.default_worker_options.stringify_keys
119
+ def default_job_options
120
+ @default_job_options ||= if Sidekiq.respond_to?(:default_job_options)
121
+ Sidekiq.default_job_options.stringify_keys
122
+ else
123
+ Sidekiq.default_worker_options.stringify_keys
124
+ end
121
125
  end
122
126
  end
123
127
  end
@@ -13,9 +13,9 @@ module SidekiqUniqueJobs
13
13
  # @return [Symbol] the type of lock
14
14
  attr_reader :type
15
15
  #
16
- # @!attribute [r] worker
17
- # @return [Symbol] the worker class
18
- attr_reader :worker
16
+ # @!attribute [r] job
17
+ # @return [Symbol] the job class
18
+ attr_reader :job
19
19
  #
20
20
  # @!attribute [r] limit
21
21
  # @return [Integer] the number of simultaneous locks
@@ -58,7 +58,7 @@ module SidekiqUniqueJobs
58
58
 
59
59
  def initialize(job_hash = {})
60
60
  @type = job_hash[LOCK]&.to_sym
61
- @worker = SidekiqUniqueJobs.safe_constantize(job_hash[CLASS])
61
+ @job = SidekiqUniqueJobs.safe_constantize(job_hash[CLASS])
62
62
  @limit = job_hash.fetch(LOCK_LIMIT, 1)&.to_i
63
63
  @timeout = job_hash.fetch(LOCK_TIMEOUT, 0)&.to_i
64
64
  @ttl = job_hash.fetch(LOCK_TTL) { job_hash.fetch(LOCK_EXPIRATION, nil) }.to_i
@@ -113,13 +113,13 @@ module SidekiqUniqueJobs
113
113
 
114
114
  # the strategy to use as conflict resolution from sidekiq client
115
115
  def on_client_conflict
116
- @on_client_conflict ||= on_conflict["client"] if on_conflict.is_a?(Hash)
116
+ @on_client_conflict ||= on_conflict["client"] || on_conflict[:client] if on_conflict.is_a?(Hash)
117
117
  @on_client_conflict ||= on_conflict
118
118
  end
119
119
 
120
120
  # the strategy to use as conflict resolution from sidekiq server
121
121
  def on_server_conflict
122
- @on_server_conflict ||= on_conflict["server"] if on_conflict.is_a?(Hash)
122
+ @on_server_conflict ||= on_conflict["server"] || on_conflict[:server] if on_conflict.is_a?(Hash)
123
123
  @on_server_conflict ||= on_conflict
124
124
  end
125
125
  end
@@ -36,10 +36,10 @@ module SidekiqUniqueJobs
36
36
 
37
37
  # @param [Hash] item a Sidekiq job hash
38
38
  def initialize(item)
39
- @item = item
40
- @worker_class = item[CLASS]
41
- @lock_args = item.slice(LOCK_ARGS, UNIQUE_ARGS).values.first # TODO: Deprecate UNIQUE_ARGS
42
- @lock_prefix = item.slice(LOCK_PREFIX, UNIQUE_PREFIX).values.first # TODO: Deprecate UNIQUE_PREFIX
39
+ @item = item
40
+ @lock_args = item[LOCK_ARGS] || item[UNIQUE_ARGS] # TODO: Deprecate UNIQUE_ARGS
41
+ @lock_prefix = item[LOCK_PREFIX] || item[UNIQUE_PREFIX] # TODO: Deprecate UNIQUE_PREFIX
42
+ self.job_class = item[CLASS]
43
43
  end
44
44
 
45
45
  # Memoized lock_digest
@@ -51,7 +51,7 @@ module SidekiqUniqueJobs
51
51
  # Creates a namespaced unique digest based on the {#digestable_hash} and the {#lock_prefix}
52
52
  # @return [String] a unique digest
53
53
  def create_digest
54
- digest = OpenSSL::Digest::MD5.hexdigest(dump_json(digestable_hash))
54
+ digest = OpenSSL::Digest::MD5.hexdigest(dump_json(digestable_hash.sort))
55
55
  "#{lock_prefix}:#{digest}"
56
56
  end
57
57
 
@@ -67,13 +67,13 @@ module SidekiqUniqueJobs
67
67
  # Checks if we should disregard the queue when creating the unique digest
68
68
  # @return [true, false]
69
69
  def unique_across_queues?
70
- item[UNIQUE_ACROSS_QUEUES] || worker_options[UNIQUE_ACROSS_QUEUES]
70
+ item[UNIQUE_ACROSS_QUEUES] || job_options[UNIQUE_ACROSS_QUEUES]
71
71
  end
72
72
 
73
73
  # Checks if we should disregard the worker when creating the unique digest
74
74
  # @return [true, false]
75
75
  def unique_across_workers?
76
- item[UNIQUE_ACROSS_WORKERS] || worker_options[UNIQUE_ACROSS_WORKERS]
76
+ item[UNIQUE_ACROSS_WORKERS] || job_options[UNIQUE_ACROSS_WORKERS]
77
77
  end
78
78
  end
79
79
  end
@@ -55,13 +55,13 @@ module SidekiqUniqueJobs
55
55
  #
56
56
  # @return [Hash]
57
57
  #
58
- def set(obj)
58
+ def set(obj, pipeline = nil)
59
59
  return unless SidekiqUniqueJobs.config.lock_info
60
60
  raise InvalidArgument, "argument `obj` (#{obj}) needs to be a hash" unless obj.is_a?(Hash)
61
61
 
62
62
  json = dump_json(obj)
63
63
  @value = load_json(json)
64
- super(json)
64
+ super(json, pipeline)
65
65
  value
66
66
  end
67
67
  end
@@ -30,8 +30,8 @@ module SidekiqUniqueJobs
30
30
  # @option item [String] :class the class of the sidekiq worker
31
31
  # @option item [Float] :at the unix time the job is scheduled at
32
32
  def initialize(item)
33
- @item = item
34
- @worker_class = item[CLASS]
33
+ @item = item
34
+ self.job_class = item[CLASS]
35
35
  end
36
36
 
37
37
  #
@@ -42,9 +42,9 @@ module SidekiqUniqueJobs
42
42
  # @return [Integer, nil]
43
43
  #
44
44
  def calculate
45
- timeout = default_worker_options[LOCK_TIMEOUT]
45
+ timeout = default_job_options[LOCK_TIMEOUT]
46
46
  timeout = default_lock_timeout if default_lock_timeout
47
- timeout = worker_options[LOCK_TIMEOUT] if worker_options.key?(LOCK_TIMEOUT)
47
+ timeout = job_options[LOCK_TIMEOUT] if job_options.key?(LOCK_TIMEOUT)
48
48
  timeout
49
49
  end
50
50
 
@@ -33,8 +33,8 @@ module SidekiqUniqueJobs
33
33
  # @option item [String] :class the class of the sidekiq worker
34
34
  # @option item [Float] :at the unix time the job is scheduled at
35
35
  def initialize(item)
36
- @item = item
37
- @worker_class = item[CLASS]
36
+ @item = item
37
+ self.job_class = item[CLASS]
38
38
  end
39
39
 
40
40
  #
@@ -67,9 +67,9 @@ module SidekiqUniqueJobs
67
67
  #
68
68
  def calculate
69
69
  ttl = item[LOCK_TTL]
70
- ttl ||= worker_options[LOCK_TTL]
70
+ ttl ||= job_options[LOCK_TTL]
71
71
  ttl ||= item[LOCK_EXPIRATION] # TODO: Deprecate at some point
72
- ttl ||= worker_options[LOCK_EXPIRATION] # TODO: Deprecate at some point
72
+ ttl ||= job_options[LOCK_EXPIRATION] # TODO: Deprecate at some point
73
73
  ttl ||= SidekiqUniqueJobs.config.lock_ttl
74
74
  ttl && (ttl.to_i + time_until_scheduled)
75
75
  end
@@ -32,6 +32,7 @@ module SidekiqUniqueJobs
32
32
  #
33
33
  # @return [Float] used to take into consideration the inaccuracy of redis timestamps
34
34
  CLOCK_DRIFT_FACTOR = 0.01
35
+ NETWORK_FACTOR = 0.04
35
36
 
36
37
  #
37
38
  # @!attribute [r] key
@@ -184,22 +185,23 @@ module SidekiqUniqueJobs
184
185
  #
185
186
  # @param [Sidekiq::RedisConnection, ConnectionPool] conn the redis connection
186
187
  # @param [Method] primed_method reference to the method to use for getting a primed token
188
+ # @param [nil, Integer, Float] time to wait before timeout
187
189
  #
188
190
  # @yieldparam [string] job_id the sidekiq JID
189
191
  # @yieldreturn [void] whatever the calling block returns
190
192
  def lock!(conn, primed_method, wait = nil)
191
- return yield job_id if locked?(conn)
193
+ return yield if locked?(conn)
192
194
 
193
195
  enqueue(conn) do |queued_jid|
194
- reflect(:debug, item, queued_jid)
196
+ reflect(:debug, :queued, item, queued_jid)
195
197
 
196
198
  primed_method.call(conn, wait) do |primed_jid|
197
199
  reflect(:debug, :primed, item, primed_jid)
198
-
199
200
  locked_jid = call_script(:lock, key.to_a, argv, conn)
201
+
200
202
  if locked_jid
201
203
  reflect(:debug, :locked, item, locked_jid)
202
- return yield job_id
204
+ return yield
203
205
  end
204
206
  end
205
207
  end
@@ -239,9 +241,18 @@ module SidekiqUniqueJobs
239
241
  # @return [Object] whatever the block returns when lock was acquired
240
242
  #
241
243
  def primed_async(conn, wait = nil, &block)
244
+ timeout = (wait || config.timeout).to_i
245
+ timeout = 1 if timeout.zero?
246
+
247
+ brpoplpush_timeout = timeout
248
+ concurrent_timeout = add_drift(timeout)
249
+
250
+ reflect(:debug, :timeouts, item,
251
+ timeouts: { brpoplpush_timeout: brpoplpush_timeout, concurrent_timeout: concurrent_timeout })
252
+
242
253
  primed_jid = Concurrent::Promises
243
- .future(conn) { |red_con| pop_queued(red_con, wait) }
244
- .value(add_drift(wait || config.timeout))
254
+ .future(conn) { |red_con| pop_queued(red_con, timeout) }
255
+ .value
245
256
 
246
257
  handle_primed(primed_jid, &block)
247
258
  end
@@ -273,7 +284,7 @@ module SidekiqUniqueJobs
273
284
  #
274
285
  # @return [String] a previously enqueued token (now taken off the queue)
275
286
  #
276
- def pop_queued(conn, wait = nil)
287
+ def pop_queued(conn, wait = 1)
277
288
  wait ||= config.timeout if config.wait_for_lock?
278
289
 
279
290
  if wait.nil?
@@ -287,10 +298,15 @@ module SidekiqUniqueJobs
287
298
  # @api private
288
299
  #
289
300
  def brpoplpush(conn, wait)
301
+ # passing timeout 0 to brpoplpush causes it to block indefinitely
290
302
  raise InvalidArgument, "wait must be an integer" unless wait.is_a?(Integer)
303
+ return conn.brpoplpush(key.queued, key.primed, wait) if conn.class.to_s == "Redis::Namespace"
291
304
 
292
- # passing timeout 0 to brpoplpush causes it to block indefinitely
293
- conn.brpoplpush(key.queued, key.primed, timeout: wait)
305
+ if VersionCheck.satisfied?(redis_version, ">= 6.2.0") && conn.respond_to?(:blmove)
306
+ conn.blmove(key.queued, key.primed, "RIGHT", "LEFT", timeout: wait)
307
+ else
308
+ conn.brpoplpush(key.queued, key.primed, timeout: wait)
309
+ end
294
310
  end
295
311
 
296
312
  #
@@ -328,6 +344,7 @@ module SidekiqUniqueJobs
328
344
  end
329
345
 
330
346
  def add_drift(val)
347
+ val = val.to_f
331
348
  val + drift(val)
332
349
  end
333
350
 
@@ -358,5 +375,9 @@ module SidekiqUniqueJobs
358
375
  TIME => now_f,
359
376
  )
360
377
  end
378
+
379
+ def redis_version
380
+ @redis_version ||= SidekiqUniqueJobs.config.redis_version
381
+ end
361
382
  end
362
383
  end
@@ -30,6 +30,8 @@ module SidekiqUniqueJobs
30
30
  # @yield [String, Exception] the message or exception to use for log message
31
31
  #
32
32
  def log_debug(message_or_exception = nil, item = nil, &block)
33
+ return unless logging?
34
+
33
35
  message = build_message(message_or_exception, item)
34
36
  logger.debug(message, &block)
35
37
  nil
@@ -45,6 +47,8 @@ module SidekiqUniqueJobs
45
47
  # @yield [String, Exception] the message or exception to use for log message
46
48
  #
47
49
  def log_info(message_or_exception = nil, item = nil, &block)
50
+ return unless logging?
51
+
48
52
  message = build_message(message_or_exception, item)
49
53
  logger.info(message, &block)
50
54
  nil
@@ -60,6 +64,8 @@ module SidekiqUniqueJobs
60
64
  # @yield [String, Exception] the message or exception to use for log message
61
65
  #
62
66
  def log_warn(message_or_exception = nil, item = nil, &block)
67
+ return unless logging?
68
+
63
69
  message = build_message(message_or_exception, item)
64
70
  logger.warn(message, &block)
65
71
  nil
@@ -75,6 +81,8 @@ module SidekiqUniqueJobs
75
81
  # @yield [String, Exception] the message or exception to use for log message
76
82
  #
77
83
  def log_error(message_or_exception = nil, item = nil, &block)
84
+ return unless logging?
85
+
78
86
  message = build_message(message_or_exception, item)
79
87
  logger.error(message, &block)
80
88
  nil
@@ -90,6 +98,8 @@ module SidekiqUniqueJobs
90
98
  # @yield [String, Exception] the message or exception to use for log message
91
99
  #
92
100
  def log_fatal(message_or_exception = nil, item = nil, &block)
101
+ return unless logging?
102
+
93
103
  message = build_message(message_or_exception, item)
94
104
  logger.fatal(message, &block)
95
105
 
@@ -218,5 +228,9 @@ module SidekiqUniqueJobs
218
228
 
219
229
  yield
220
230
  end
231
+
232
+ def logging?
233
+ SidekiqUniqueJobs.logging?
234
+ end
221
235
  end
222
236
  end
@@ -1,11 +1,12 @@
1
1
  -------- BEGIN keys ---------
2
- local digest = KEYS[1]
3
- local queued = KEYS[2]
4
- local primed = KEYS[3]
5
- local locked = KEYS[4]
6
- local info = KEYS[5]
7
- local changelog = KEYS[6]
8
- local digests = KEYS[7]
2
+ local digest = KEYS[1]
3
+ local queued = KEYS[2]
4
+ local primed = KEYS[3]
5
+ local locked = KEYS[4]
6
+ local info = KEYS[5]
7
+ local changelog = KEYS[6]
8
+ local digests = KEYS[7]
9
+ local expiring_digests = KEYS[8]
9
10
  -------- END keys ---------
10
11
 
11
12
 
@@ -57,8 +58,13 @@ if limit_exceeded then
57
58
  return nil
58
59
  end
59
60
 
60
- log_debug("ZADD", digests, current_time, digest)
61
- redis.call("ZADD", digests, current_time, digest)
61
+ if lock_type == "until_expired" and pttl and pttl > 0 then
62
+ log_debug("ZADD", expiring_digests, current_time + pttl, digest)
63
+ redis.call("ZADD", expiring_digests, current_time + pttl, digest)
64
+ else
65
+ log_debug("ZADD", digests, current_time, digest)
66
+ redis.call("ZADD", digests, current_time, digest)
67
+ end
62
68
 
63
69
  log_debug("HSET", locked, job_id, current_time)
64
70
  redis.call("HSET", locked, job_id, current_time)
@@ -0,0 +1,92 @@
1
+ -------- BEGIN keys ---------
2
+ local digest = KEYS[1]
3
+ local queued = KEYS[2]
4
+ local primed = KEYS[3]
5
+ local locked = KEYS[4]
6
+ local info = KEYS[5]
7
+ local changelog = KEYS[6]
8
+ local digests = KEYS[7]
9
+ local expiring_digests = KEYS[8]
10
+ -------- END keys ---------
11
+
12
+
13
+ -------- BEGIN lock arguments ---------
14
+ local job_id = ARGV[1]
15
+ local pttl = tonumber(ARGV[2])
16
+ local lock_type = ARGV[3]
17
+ local limit = tonumber(ARGV[4])
18
+ -------- END lock arguments -----------
19
+
20
+
21
+ -------- BEGIN injected arguments --------
22
+ local current_time = tonumber(ARGV[5])
23
+ local debug_lua = ARGV[6] == "true"
24
+ local max_history = tonumber(ARGV[7])
25
+ local script_name = tostring(ARGV[8]) .. ".lua"
26
+ local redisversion = ARGV[9]
27
+ --------- END injected arguments ---------
28
+
29
+
30
+ -------- BEGIN local functions --------
31
+ <%= include_partial "shared/_common.lua" %>
32
+ ---------- END local functions ----------
33
+
34
+
35
+ --------- BEGIN lock.lua ---------
36
+ log_debug("BEGIN lock digest:", digest, "job_id:", job_id)
37
+
38
+ if redis.call("HEXISTS", locked, job_id) == 1 then
39
+ log_debug(locked, "already locked with job_id:", job_id)
40
+ log("Duplicate")
41
+
42
+ log_debug("LREM", queued, -1, job_id)
43
+ redis.call("LREM", queued, -1, job_id)
44
+
45
+ log_debug("LREM", primed, 1, job_id)
46
+ redis.call("LREM", primed, 1, job_id)
47
+
48
+ return job_id
49
+ end
50
+
51
+ local locked_count = redis.call("HLEN", locked)
52
+ local within_limit = limit > locked_count
53
+ local limit_exceeded = not within_limit
54
+
55
+ if limit_exceeded then
56
+ log_debug("Limit exceeded:", digest, "(", locked_count, "of", limit, ")")
57
+ log("Limited")
58
+ return nil
59
+ end
60
+
61
+ log_debug("ZADD", expiring_digests, current_time + pttl, digest)
62
+ redis.call("ZADD", expiring_digests, current_time + pttl, digest)
63
+
64
+ log_debug("HSET", locked, job_id, current_time)
65
+ redis.call("HSET", locked, job_id, current_time)
66
+
67
+ log_debug("LREM", queued, -1, job_id)
68
+ redis.call("LREM", queued, -1, job_id)
69
+
70
+ log_debug("LREM", primed, 1, job_id)
71
+ redis.call("LREM", primed, 1, job_id)
72
+
73
+ -- The Sidekiq client sets pttl
74
+ log_debug("PEXPIRE", digest, pttl)
75
+ redis.call("PEXPIRE", digest, pttl)
76
+
77
+ log_debug("PEXPIRE", locked, pttl)
78
+ redis.call("PEXPIRE", locked, pttl)
79
+
80
+ log_debug("PEXPIRE", info, pttl)
81
+ redis.call("PEXPIRE", info, pttl)
82
+
83
+ log_debug("PEXPIRE", queued, 1000)
84
+ redis.call("PEXPIRE", queued, 1000)
85
+
86
+ log_debug("PEXPIRE", primed, 1000)
87
+ redis.call("PEXPIRE", primed, 1000)
88
+
89
+ log("Locked")
90
+ log_debug("END lock digest:", digest, "job_id:", job_id)
91
+ return job_id
92
+ ---------- END lock.lua ----------