sidekiq-unique-jobs 6.0.24 → 7.0.4
Sign up to get free protection for your applications and to get access to all the features.
Potentially problematic release.
This version of sidekiq-unique-jobs might be problematic. Click here for more details.
- checksums.yaml +4 -4
- data/CHANGELOG.md +707 -25
- data/README.md +516 -105
- data/lib/sidekiq_unique_jobs.rb +48 -7
- data/lib/sidekiq_unique_jobs/batch_delete.rb +123 -0
- data/lib/sidekiq_unique_jobs/changelog.rb +78 -0
- data/lib/sidekiq_unique_jobs/cli.rb +34 -31
- data/lib/sidekiq_unique_jobs/config.rb +263 -0
- data/lib/sidekiq_unique_jobs/connection.rb +6 -5
- data/lib/sidekiq_unique_jobs/constants.rb +46 -24
- data/lib/sidekiq_unique_jobs/core_ext.rb +80 -0
- data/lib/sidekiq_unique_jobs/digests.rb +71 -100
- data/lib/sidekiq_unique_jobs/exceptions.rb +78 -12
- data/lib/sidekiq_unique_jobs/job.rb +41 -12
- data/lib/sidekiq_unique_jobs/json.rb +40 -0
- data/lib/sidekiq_unique_jobs/key.rb +93 -0
- data/lib/sidekiq_unique_jobs/lock.rb +325 -0
- data/lib/sidekiq_unique_jobs/lock/base_lock.rb +66 -50
- data/lib/sidekiq_unique_jobs/lock/client_validator.rb +28 -0
- data/lib/sidekiq_unique_jobs/lock/server_validator.rb +27 -0
- data/lib/sidekiq_unique_jobs/lock/until_and_while_executing.rb +7 -10
- data/lib/sidekiq_unique_jobs/lock/until_executed.rb +6 -6
- data/lib/sidekiq_unique_jobs/lock/until_executing.rb +1 -1
- data/lib/sidekiq_unique_jobs/lock/until_expired.rb +4 -21
- data/lib/sidekiq_unique_jobs/lock/validator.rb +96 -0
- data/lib/sidekiq_unique_jobs/lock/while_executing.rb +13 -9
- data/lib/sidekiq_unique_jobs/lock/while_executing_reject.rb +3 -3
- data/lib/sidekiq_unique_jobs/lock_args.rb +123 -0
- data/lib/sidekiq_unique_jobs/lock_config.rb +122 -0
- data/lib/sidekiq_unique_jobs/lock_digest.rb +79 -0
- data/lib/sidekiq_unique_jobs/lock_info.rb +68 -0
- data/lib/sidekiq_unique_jobs/lock_timeout.rb +62 -0
- data/lib/sidekiq_unique_jobs/lock_ttl.rb +77 -0
- data/lib/sidekiq_unique_jobs/locksmith.rb +261 -101
- data/lib/sidekiq_unique_jobs/logging.rb +149 -23
- data/lib/sidekiq_unique_jobs/logging/middleware_context.rb +44 -0
- data/lib/sidekiq_unique_jobs/lua/delete.lua +51 -0
- data/lib/sidekiq_unique_jobs/lua/delete_by_digest.lua +42 -0
- data/lib/sidekiq_unique_jobs/lua/delete_job_by_digest.lua +38 -0
- data/lib/sidekiq_unique_jobs/lua/find_digest_in_queues.lua +26 -0
- data/lib/sidekiq_unique_jobs/lua/lock.lua +93 -0
- data/lib/sidekiq_unique_jobs/lua/locked.lua +35 -0
- data/lib/sidekiq_unique_jobs/lua/queue.lua +87 -0
- data/lib/sidekiq_unique_jobs/lua/reap_orphans.lua +94 -0
- data/lib/sidekiq_unique_jobs/lua/shared/_common.lua +40 -0
- data/lib/sidekiq_unique_jobs/lua/shared/_current_time.lua +8 -0
- data/lib/sidekiq_unique_jobs/lua/shared/_delete_from_queue.lua +22 -0
- data/lib/sidekiq_unique_jobs/lua/shared/_delete_from_sorted_set.lua +18 -0
- data/lib/sidekiq_unique_jobs/lua/shared/_find_digest_in_process_set.lua +53 -0
- data/lib/sidekiq_unique_jobs/lua/shared/_find_digest_in_queues.lua +43 -0
- data/lib/sidekiq_unique_jobs/lua/shared/_find_digest_in_sorted_set.lua +24 -0
- data/lib/sidekiq_unique_jobs/lua/shared/_hgetall.lua +13 -0
- data/lib/sidekiq_unique_jobs/lua/shared/_upgrades.lua +3 -0
- data/lib/sidekiq_unique_jobs/lua/unlock.lua +95 -0
- data/lib/sidekiq_unique_jobs/lua/update_version.lua +40 -0
- data/lib/sidekiq_unique_jobs/lua/upgrade.lua +68 -0
- data/lib/sidekiq_unique_jobs/middleware.rb +29 -31
- data/lib/sidekiq_unique_jobs/middleware/client.rb +42 -0
- data/lib/sidekiq_unique_jobs/middleware/server.rb +27 -0
- data/lib/sidekiq_unique_jobs/normalizer.rb +4 -4
- data/lib/sidekiq_unique_jobs/on_conflict.rb +23 -10
- data/lib/sidekiq_unique_jobs/on_conflict/log.rb +9 -5
- data/lib/sidekiq_unique_jobs/on_conflict/null_strategy.rb +1 -1
- data/lib/sidekiq_unique_jobs/on_conflict/raise.rb +1 -1
- data/lib/sidekiq_unique_jobs/on_conflict/reject.rb +61 -15
- data/lib/sidekiq_unique_jobs/on_conflict/replace.rb +54 -14
- data/lib/sidekiq_unique_jobs/on_conflict/reschedule.rb +12 -5
- data/lib/sidekiq_unique_jobs/on_conflict/strategy.rb +25 -6
- data/lib/sidekiq_unique_jobs/options_with_fallback.rb +41 -27
- data/lib/sidekiq_unique_jobs/orphans/lua_reaper.rb +29 -0
- data/lib/sidekiq_unique_jobs/orphans/manager.rb +212 -0
- data/lib/sidekiq_unique_jobs/orphans/null_reaper.rb +24 -0
- data/lib/sidekiq_unique_jobs/orphans/observer.rb +42 -0
- data/lib/sidekiq_unique_jobs/orphans/reaper.rb +114 -0
- data/lib/sidekiq_unique_jobs/orphans/ruby_reaper.rb +201 -0
- data/lib/sidekiq_unique_jobs/redis.rb +11 -0
- data/lib/sidekiq_unique_jobs/redis/entity.rb +106 -0
- data/lib/sidekiq_unique_jobs/redis/hash.rb +56 -0
- data/lib/sidekiq_unique_jobs/redis/list.rb +32 -0
- data/lib/sidekiq_unique_jobs/redis/set.rb +32 -0
- data/lib/sidekiq_unique_jobs/redis/sorted_set.rb +86 -0
- data/lib/sidekiq_unique_jobs/redis/string.rb +49 -0
- data/lib/sidekiq_unique_jobs/rspec/matchers.rb +26 -0
- data/lib/sidekiq_unique_jobs/rspec/matchers/have_valid_sidekiq_options.rb +51 -0
- data/lib/sidekiq_unique_jobs/script.rb +15 -0
- data/lib/sidekiq_unique_jobs/script/caller.rb +125 -0
- data/lib/sidekiq_unique_jobs/server.rb +48 -0
- data/lib/sidekiq_unique_jobs/sidekiq_unique_ext.rb +92 -65
- data/lib/sidekiq_unique_jobs/sidekiq_unique_jobs.rb +185 -34
- data/lib/sidekiq_unique_jobs/sidekiq_worker_methods.rb +11 -5
- data/lib/sidekiq_unique_jobs/testing.rb +62 -21
- data/lib/sidekiq_unique_jobs/timer_task.rb +78 -0
- data/lib/sidekiq_unique_jobs/timing.rb +58 -0
- data/lib/sidekiq_unique_jobs/unlockable.rb +20 -4
- data/lib/sidekiq_unique_jobs/update_version.rb +25 -0
- data/lib/sidekiq_unique_jobs/upgrade_locks.rb +155 -0
- data/lib/sidekiq_unique_jobs/version.rb +3 -1
- data/lib/sidekiq_unique_jobs/version_check.rb +23 -4
- data/lib/sidekiq_unique_jobs/web.rb +50 -27
- data/lib/sidekiq_unique_jobs/web/helpers.rb +125 -10
- data/lib/sidekiq_unique_jobs/web/views/changelogs.erb +54 -0
- data/lib/sidekiq_unique_jobs/web/views/lock.erb +108 -0
- data/lib/sidekiq_unique_jobs/web/views/locks.erb +52 -0
- data/lib/tasks/changelog.rake +5 -5
- metadata +117 -177
- data/lib/sidekiq_unique_jobs/client/middleware.rb +0 -56
- data/lib/sidekiq_unique_jobs/scripts.rb +0 -118
- data/lib/sidekiq_unique_jobs/server/middleware.rb +0 -46
- data/lib/sidekiq_unique_jobs/timeout.rb +0 -8
- data/lib/sidekiq_unique_jobs/timeout/calculator.rb +0 -63
- data/lib/sidekiq_unique_jobs/unique_args.rb +0 -150
- data/lib/sidekiq_unique_jobs/util.rb +0 -103
- data/lib/sidekiq_unique_jobs/web/views/unique_digest.erb +0 -28
- data/lib/sidekiq_unique_jobs/web/views/unique_digests.erb +0 -46
- data/redis/acquire_lock.lua +0 -21
- data/redis/convert_legacy_lock.lua +0 -13
- data/redis/delete.lua +0 -14
- data/redis/delete_by_digest.lua +0 -23
- data/redis/delete_job_by_digest.lua +0 -60
- data/redis/lock.lua +0 -62
- data/redis/release_stale_locks.lua +0 -90
- data/redis/unlock.lua +0 -35
@@ -3,7 +3,7 @@
|
|
3
3
|
module SidekiqUniqueJobs
|
4
4
|
# Shared module for dealing with redis connections
|
5
5
|
#
|
6
|
-
# @author Mikael Henriksson <mikael@
|
6
|
+
# @author Mikael Henriksson <mikael@mhenrixon.com>
|
7
7
|
module Connection
|
8
8
|
def self.included(base)
|
9
9
|
base.send(:extend, self)
|
@@ -11,11 +11,12 @@ module SidekiqUniqueJobs
|
|
11
11
|
|
12
12
|
# Creates a connection to redis
|
13
13
|
# @return [Sidekiq::RedisConnection, ConnectionPool] a connection to redis
|
14
|
-
def redis(
|
15
|
-
|
16
|
-
|
14
|
+
def redis(r_pool = nil, &block)
|
15
|
+
r_pool ||= defined?(redis_pool) ? redis_pool : r_pool
|
16
|
+
if r_pool
|
17
|
+
r_pool.with(&block)
|
17
18
|
else
|
18
|
-
Sidekiq.redis
|
19
|
+
Sidekiq.redis(&block)
|
19
20
|
end
|
20
21
|
end
|
21
22
|
end
|
@@ -3,30 +3,52 @@
|
|
3
3
|
#
|
4
4
|
# Module with constants to avoid string duplication
|
5
5
|
#
|
6
|
-
# @author Mikael Henriksson <mikael@
|
6
|
+
# @author Mikael Henriksson <mikael@mhenrixon.com>
|
7
7
|
#
|
8
8
|
module SidekiqUniqueJobs
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
9
|
+
ARGS ||= "args"
|
10
|
+
APARTMENT ||= "apartment"
|
11
|
+
AT ||= "at"
|
12
|
+
CHANGELOGS ||= "uniquejobs:changelog"
|
13
|
+
CLASS ||= "class"
|
14
|
+
CREATED_AT ||= "created_at"
|
15
|
+
DEAD_VERSION ||= "uniquejobs:dead"
|
16
|
+
DIGESTS ||= "uniquejobs:digests"
|
17
|
+
ERRORS ||= "errors"
|
18
|
+
JID ||= "jid"
|
19
|
+
LIMIT ||= "limit"
|
20
|
+
LIVE_VERSION ||= "uniquejobs:live"
|
21
|
+
LOCK ||= "lock"
|
22
|
+
LOCK_ARGS ||= "lock_args"
|
23
|
+
LOCK_ARGS_METHOD ||= "lock_args_method"
|
24
|
+
LOCK_DIGEST ||= "lock_digest"
|
25
|
+
LOCK_EXPIRATION ||= "lock_expiration"
|
26
|
+
LOCK_INFO ||= "lock_info"
|
27
|
+
LOCK_LIMIT ||= "lock_limit"
|
28
|
+
LOCK_PREFIX ||= "lock_prefix"
|
29
|
+
LOCK_TIMEOUT ||= "lock_timeout"
|
30
|
+
LOCK_TTL ||= "lock_ttl"
|
31
|
+
LOCK_TYPE ||= "lock_type"
|
32
|
+
LOG_DUPLICATE ||= "log_duplicate"
|
33
|
+
ON_CLIENT_CONFLICT ||= "on_client_conflict"
|
34
|
+
ON_CONFLICT ||= "on_conflict"
|
35
|
+
ON_SERVER_CONFLICT ||= "on_server_conflict"
|
36
|
+
PAYLOAD ||= "payload"
|
37
|
+
PROCESSES ||= "processes"
|
38
|
+
QUEUE ||= "queue"
|
39
|
+
RETRY ||= "retry"
|
40
|
+
SCHEDULE ||= "schedule"
|
41
|
+
TIME ||= "time"
|
42
|
+
TIMEOUT ||= "timeout"
|
43
|
+
TTL ||= "ttl"
|
44
|
+
TYPE ||= "type"
|
45
|
+
UNIQUE ||= "unique"
|
46
|
+
UNIQUE_ACROSS_QUEUES ||= "unique_across_queues"
|
47
|
+
UNIQUE_ACROSS_WORKERS ||= "unique_across_workers"
|
48
|
+
UNIQUE_ARGS ||= "unique_args"
|
49
|
+
UNIQUE_ARGS_METHOD ||= "unique_args_method"
|
50
|
+
UNIQUE_DIGEST ||= "unique_digest"
|
51
|
+
UNIQUE_PREFIX ||= "unique_prefix"
|
52
|
+
UNIQUE_REAPER ||= "uniquejobs:reaper"
|
53
|
+
WORKER ||= "worker"
|
32
54
|
end
|
@@ -2,21 +2,67 @@
|
|
2
2
|
|
3
3
|
# :nocov:
|
4
4
|
|
5
|
+
#
|
6
|
+
# Monkey patches for the ruby Hash
|
7
|
+
#
|
5
8
|
class Hash
|
6
9
|
unless {}.respond_to?(:slice)
|
10
|
+
#
|
11
|
+
# Returns only the matching keys in a new hash
|
12
|
+
#
|
13
|
+
# @param [Array<String>, Array<Symbol>] keys the keys to match
|
14
|
+
#
|
15
|
+
# @return [Hash]
|
16
|
+
#
|
7
17
|
def slice(*keys)
|
8
18
|
keys.map! { |key| convert_key(key) } if respond_to?(:convert_key, true)
|
9
19
|
keys.each_with_object(self.class.new) { |k, hash| hash[k] = self[k] if key?(k) }
|
10
20
|
end
|
11
21
|
end
|
12
22
|
|
23
|
+
unless {}.respond_to?(:deep_stringify_keys)
|
24
|
+
#
|
25
|
+
# Depp converts all keys to string
|
26
|
+
#
|
27
|
+
#
|
28
|
+
# @return [Hash<String>]
|
29
|
+
#
|
30
|
+
def deep_stringify_keys
|
31
|
+
deep_transform_keys(&:to_s)
|
32
|
+
end
|
33
|
+
end
|
34
|
+
|
35
|
+
unless {}.respond_to?(:deep_transform_keys)
|
36
|
+
#
|
37
|
+
# Deep transfor all keys by yielding to the caller
|
38
|
+
#
|
39
|
+
#
|
40
|
+
# @return [Hash<String>]
|
41
|
+
#
|
42
|
+
def deep_transform_keys(&block)
|
43
|
+
_deep_transform_keys_in_object(self, &block)
|
44
|
+
end
|
45
|
+
end
|
46
|
+
|
13
47
|
unless {}.respond_to?(:stringify_keys)
|
48
|
+
#
|
49
|
+
# Converts all keys to string
|
50
|
+
#
|
51
|
+
#
|
52
|
+
# @return [Hash<String>]
|
53
|
+
#
|
14
54
|
def stringify_keys
|
15
55
|
transform_keys(&:to_s)
|
16
56
|
end
|
17
57
|
end
|
18
58
|
|
19
59
|
unless {}.respond_to?(:transform_keys)
|
60
|
+
#
|
61
|
+
# Transforms all keys by yielding to the caller
|
62
|
+
#
|
63
|
+
#
|
64
|
+
# @return [Hash]
|
65
|
+
#
|
20
66
|
def transform_keys
|
21
67
|
result = {}
|
22
68
|
each_key do |key|
|
@@ -27,6 +73,13 @@ class Hash
|
|
27
73
|
end
|
28
74
|
|
29
75
|
unless {}.respond_to?(:slice!)
|
76
|
+
#
|
77
|
+
# Removes all keys not provided from the current hash and returns it
|
78
|
+
#
|
79
|
+
# @param [Array<String>, Array<Symbol>] keys the keys to match
|
80
|
+
#
|
81
|
+
# @return [Hash]
|
82
|
+
#
|
30
83
|
def slice!(*keys)
|
31
84
|
keys.map! { |key| convert_key(key) } if respond_to?(:convert_key, true)
|
32
85
|
omit = slice(*self.keys - keys)
|
@@ -37,10 +90,37 @@ class Hash
|
|
37
90
|
omit
|
38
91
|
end
|
39
92
|
end
|
93
|
+
|
94
|
+
private
|
95
|
+
|
96
|
+
unless {}.respond_to?(:_deep_transform_keys_in_object)
|
97
|
+
# support methods for deep transforming nested hashes and arrays
|
98
|
+
def _deep_transform_keys_in_object(object, &block)
|
99
|
+
case object
|
100
|
+
when Hash
|
101
|
+
object.each_with_object({}) do |(key, value), result|
|
102
|
+
result[yield(key)] = _deep_transform_keys_in_object(value, &block)
|
103
|
+
end
|
104
|
+
when Array
|
105
|
+
object.map { |element| _deep_transform_keys_in_object(element, &block) }
|
106
|
+
else
|
107
|
+
object
|
108
|
+
end
|
109
|
+
end
|
110
|
+
end
|
40
111
|
end
|
41
112
|
|
113
|
+
#
|
114
|
+
# Monkey patches for the ruby Array
|
115
|
+
#
|
42
116
|
class Array
|
43
117
|
unless [].respond_to?(:extract_options!)
|
118
|
+
#
|
119
|
+
# Extract the last argument if it is a hash
|
120
|
+
#
|
121
|
+
#
|
122
|
+
# @return [Hash]
|
123
|
+
#
|
44
124
|
def extract_options!
|
45
125
|
if last.is_a?(Hash) && last.instance_of?(Hash)
|
46
126
|
pop
|
@@ -1,87 +1,65 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
3
|
module SidekiqUniqueJobs
|
4
|
-
# Utility module to help manage unique digests in redis.
|
5
4
|
#
|
6
|
-
#
|
7
|
-
|
5
|
+
# Class Changelogs provides access to the changelog entries
|
6
|
+
#
|
7
|
+
# @author Mikael Henriksson <mikael@mhenrixon.com>
|
8
|
+
#
|
9
|
+
class Digests < Redis::SortedSet
|
10
|
+
#
|
11
|
+
# @return [Integer] the number of matches to return by default
|
8
12
|
DEFAULT_COUNT = 1_000
|
13
|
+
#
|
14
|
+
# @return [String] the default pattern to use for matching
|
9
15
|
SCAN_PATTERN = "*"
|
10
|
-
CHUNK_SIZE = 100
|
11
|
-
|
12
|
-
include SidekiqUniqueJobs::Logging
|
13
|
-
include SidekiqUniqueJobs::Connection
|
14
|
-
extend self
|
15
16
|
|
16
|
-
|
17
|
-
|
18
|
-
# @param [String] pattern a pattern to match with
|
19
|
-
# @param [Integer] count the maximum number to match
|
20
|
-
# @return [Array<String>] with unique digests
|
21
|
-
def all(pattern: SCAN_PATTERN, count: DEFAULT_COUNT)
|
22
|
-
redis { |conn| conn.sscan_each(UNIQUE_SET, match: pattern, count: count).to_a }
|
17
|
+
def initialize
|
18
|
+
super(DIGESTS)
|
23
19
|
end
|
24
20
|
|
25
|
-
# Paginate unique digests
|
26
21
|
#
|
27
|
-
#
|
28
|
-
# @param [Integer] cursor the maximum number to match
|
29
|
-
# @param [Integer] page_size the current cursor position
|
22
|
+
# Adds a digest
|
30
23
|
#
|
31
|
-
# @
|
32
|
-
def page(pattern: SCAN_PATTERN, cursor: 0, page_size: 100)
|
33
|
-
redis do |conn|
|
34
|
-
total_size, digests = conn.multi do
|
35
|
-
conn.scard(UNIQUE_SET)
|
36
|
-
conn.sscan(UNIQUE_SET, cursor, match: pattern, count: page_size)
|
37
|
-
end
|
38
|
-
|
39
|
-
[total_size, digests[0], digests[1]]
|
40
|
-
end
|
41
|
-
end
|
42
|
-
|
43
|
-
# Get a total count of unique digests
|
24
|
+
# @param [String] digest the digest to add
|
44
25
|
#
|
45
|
-
|
46
|
-
|
47
|
-
redis { |conn| conn.scard(UNIQUE_SET) }
|
26
|
+
def add(digest)
|
27
|
+
redis { |conn| conn.zadd(key, now_f, digest) }
|
48
28
|
end
|
49
29
|
|
50
|
-
# Deletes unique
|
30
|
+
# Deletes unique digests by pattern
|
51
31
|
#
|
52
|
-
# @param [String] digest the full digest to delete
|
53
32
|
# @param [String] pattern a key pattern to match with
|
54
33
|
# @param [Integer] count the maximum number
|
55
|
-
# @raise [ArgumentError] when both pattern and digest are nil
|
56
34
|
# @return [Array<String>] with unique digests
|
57
|
-
def
|
58
|
-
|
35
|
+
def delete_by_pattern(pattern, count: DEFAULT_COUNT)
|
36
|
+
result, elapsed = timed do
|
37
|
+
digests = entries(pattern: pattern, count: count).keys
|
38
|
+
redis { |conn| BatchDelete.call(digests, conn) }
|
39
|
+
end
|
59
40
|
|
60
|
-
|
61
|
-
return delete_by_digest(digest) if digest
|
41
|
+
log_info("#{__method__}(#{pattern}, count: #{count}) completed in #{elapsed}ms")
|
62
42
|
|
63
|
-
|
43
|
+
result
|
64
44
|
end
|
65
45
|
|
66
|
-
#
|
46
|
+
# Delete unique digests by digest
|
47
|
+
# Also deletes the :AVAILABLE, :EXPIRED etc keys
|
67
48
|
#
|
68
|
-
# @param [String] digest
|
49
|
+
# @param [String] digest a unique digest to delete
|
69
50
|
def delete_by_digest(digest) # rubocop:disable Metrics/MethodLength
|
70
51
|
result, elapsed = timed do
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
])
|
83
|
-
|
84
|
-
count
|
52
|
+
call_script(:delete_by_digest, [
|
53
|
+
digest,
|
54
|
+
"#{digest}:QUEUED",
|
55
|
+
"#{digest}:PRIMED",
|
56
|
+
"#{digest}:LOCKED",
|
57
|
+
"#{digest}:RUN",
|
58
|
+
"#{digest}:RUN:QUEUED",
|
59
|
+
"#{digest}:RUN:PRIMED",
|
60
|
+
"#{digest}:RUN:LOCKED",
|
61
|
+
key,
|
62
|
+
])
|
85
63
|
end
|
86
64
|
|
87
65
|
log_info("#{__method__}(#{digest}) completed in #{elapsed}ms")
|
@@ -89,55 +67,48 @@ module SidekiqUniqueJobs
|
|
89
67
|
result
|
90
68
|
end
|
91
69
|
|
92
|
-
# Deletes unique digests by pattern
|
93
70
|
#
|
94
|
-
#
|
95
|
-
#
|
96
|
-
# @
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
71
|
+
# The entries in this sorted set
|
72
|
+
#
|
73
|
+
# @param [String] pattern SCAN_PATTERN the match pattern to search for
|
74
|
+
# @param [Integer] count DEFAULT_COUNT the number of entries to return
|
75
|
+
#
|
76
|
+
# @return [Array<String>] an array of digests matching the given pattern
|
77
|
+
#
|
78
|
+
def entries(pattern: SCAN_PATTERN, count: DEFAULT_COUNT)
|
79
|
+
options = {}
|
80
|
+
options[:match] = pattern
|
81
|
+
options[:count] = count
|
103
82
|
|
104
|
-
|
83
|
+
result = redis { |conn| conn.zscan_each(key, **options).to_a }
|
105
84
|
|
106
|
-
result
|
85
|
+
result.each_with_object({}) do |entry, hash|
|
86
|
+
hash[entry[0]] = entry[1]
|
87
|
+
end
|
107
88
|
end
|
108
89
|
|
109
|
-
|
110
|
-
|
111
|
-
|
90
|
+
#
|
91
|
+
# Returns a paginated
|
92
|
+
#
|
93
|
+
# @param [Integer] cursor the cursor for this iteration
|
94
|
+
# @param [String] pattern SCAN_PATTERN the match pattern to search for
|
95
|
+
# @param [Integer] page_size 100 the size per page
|
96
|
+
#
|
97
|
+
# @return [Array<Integer, Integer, Array<Lock>>] total_size, next_cursor, locks
|
98
|
+
#
|
99
|
+
def page(cursor: 0, pattern: SCAN_PATTERN, page_size: 100)
|
112
100
|
redis do |conn|
|
113
|
-
digests.
|
114
|
-
conn.
|
115
|
-
|
116
|
-
conn.del digest
|
117
|
-
conn.srem(UNIQUE_SET, digest)
|
118
|
-
conn.del("#{digest}:EXISTS")
|
119
|
-
conn.del("#{digest}:GRABBED")
|
120
|
-
conn.del("#{digest}:VERSION")
|
121
|
-
conn.del("#{digest}:AVAILABLE")
|
122
|
-
conn.del("#{digest}:RUN:EXISTS")
|
123
|
-
conn.del("#{digest}:RUN:GRABBED")
|
124
|
-
conn.del("#{digest}:RUN:VERSION")
|
125
|
-
conn.del("#{digest}:RUN:AVAILABLE")
|
126
|
-
end
|
127
|
-
end
|
101
|
+
total_size, digests = conn.multi do
|
102
|
+
conn.zcard(key)
|
103
|
+
conn.zscan(key, cursor, match: pattern, count: page_size)
|
128
104
|
end
|
129
|
-
end
|
130
|
-
end
|
131
105
|
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
def current_time
|
140
|
-
Time.now
|
106
|
+
[
|
107
|
+
total_size,
|
108
|
+
digests[0], # next_cursor
|
109
|
+
digests[1].map { |digest, score| Lock.new(digest, time: score) }, # entries
|
110
|
+
]
|
111
|
+
end
|
141
112
|
end
|
142
113
|
end
|
143
114
|
end
|
@@ -1,29 +1,95 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
3
|
module SidekiqUniqueJobs
|
4
|
+
#
|
5
|
+
# Base class for all exceptions raised from the gem
|
6
|
+
#
|
7
|
+
# @author Mikael Henriksson <mikael@mhenrixon.com>
|
8
|
+
#
|
9
|
+
class UniqueJobsError < ::RuntimeError
|
10
|
+
end
|
11
|
+
|
4
12
|
# Error raised when a Lua script fails to execute
|
5
13
|
#
|
6
|
-
# @author Mikael Henriksson <mikael@
|
7
|
-
class Conflict <
|
14
|
+
# @author Mikael Henriksson <mikael@mhenrixon.com>
|
15
|
+
class Conflict < UniqueJobsError
|
8
16
|
def initialize(item)
|
9
|
-
super("Item with the key: #{item[
|
17
|
+
super("Item with the key: #{item[LOCK_DIGEST]} is already scheduled or processing")
|
18
|
+
end
|
19
|
+
end
|
20
|
+
|
21
|
+
#
|
22
|
+
# Error raised when trying to add a duplicate lock
|
23
|
+
#
|
24
|
+
# @author Mikael Henriksson <mikael@mhenrixon.com>
|
25
|
+
#
|
26
|
+
class DuplicateLock < UniqueJobsError
|
27
|
+
end
|
28
|
+
|
29
|
+
#
|
30
|
+
# Error raised when trying to add a duplicate stragegy
|
31
|
+
#
|
32
|
+
# @author Mikael Henriksson <mikael@mhenrixon.com>
|
33
|
+
#
|
34
|
+
class DuplicateStrategy < UniqueJobsError
|
35
|
+
end
|
36
|
+
|
37
|
+
#
|
38
|
+
# Error raised when an invalid argument is given
|
39
|
+
#
|
40
|
+
# @author Mikael Henriksson <mikael@mhenrixon.com>
|
41
|
+
#
|
42
|
+
class InvalidArgument < UniqueJobsError
|
43
|
+
end
|
44
|
+
|
45
|
+
#
|
46
|
+
# Raised when a workers configuration is invalid
|
47
|
+
#
|
48
|
+
# @author Mikael Henriksson <mikael@mhenrixon.com>
|
49
|
+
#
|
50
|
+
class InvalidWorker < UniqueJobsError
|
51
|
+
def initialize(lock_config)
|
52
|
+
super(<<~FAILURE_MESSAGE)
|
53
|
+
Expected #{lock_config.worker} to have valid sidekiq options but found the following problems:
|
54
|
+
#{lock_config.errors_as_string}
|
55
|
+
FAILURE_MESSAGE
|
10
56
|
end
|
11
57
|
end
|
12
58
|
|
13
|
-
# Error raised
|
59
|
+
# Error raised when a Lua script fails to execute
|
60
|
+
#
|
61
|
+
# @author Mikael Henriksson <mikael@mhenrixon.com>
|
62
|
+
class InvalidUniqueArguments < UniqueJobsError
|
63
|
+
def initialize(options)
|
64
|
+
given = options[:given]
|
65
|
+
worker_class = options[:worker_class]
|
66
|
+
lock_args_method = options[:lock_args_method]
|
67
|
+
lock_args_meth = worker_class.method(lock_args_method)
|
68
|
+
num_args = lock_args_meth.arity
|
69
|
+
source_location = lock_args_meth.source_location
|
70
|
+
|
71
|
+
super(
|
72
|
+
"#{worker_class}##{lock_args_method} takes #{num_args} arguments, received #{given.inspect}" \
|
73
|
+
"\n\n" \
|
74
|
+
" #{source_location.join(':')}"
|
75
|
+
)
|
76
|
+
end
|
77
|
+
end
|
78
|
+
|
79
|
+
#
|
80
|
+
# Raised when a workers configuration is invalid
|
81
|
+
#
|
82
|
+
# @author Mikael Henriksson <mikael@mhenrixon.com>
|
14
83
|
#
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
# @param [Redis::CommandError] source_exception exception to handle
|
19
|
-
def initialize(file_name:, source_exception:)
|
20
|
-
super("Problem compiling #{file_name}. Message: #{source_exception.message}")
|
84
|
+
class NotUniqueWorker < UniqueJobsError
|
85
|
+
def initialize(options)
|
86
|
+
super("#{options[:class]} is not configured for uniqueness. Missing the key `:lock` in #{options.inspect}")
|
21
87
|
end
|
22
88
|
end
|
23
89
|
|
24
90
|
# Error raised from {OptionsWithFallback#lock_class}
|
25
91
|
#
|
26
|
-
# @author Mikael Henriksson <mikael@
|
27
|
-
class UnknownLock <
|
92
|
+
# @author Mikael Henriksson <mikael@mhenrixon.com>
|
93
|
+
class UnknownLock < UniqueJobsError
|
28
94
|
end
|
29
95
|
end
|