sidekiq-unique-jobs 6.0.24 → 7.0.4

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of sidekiq-unique-jobs might be problematic. Click here for more details.

Files changed (122) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +707 -25
  3. data/README.md +516 -105
  4. data/lib/sidekiq_unique_jobs.rb +48 -7
  5. data/lib/sidekiq_unique_jobs/batch_delete.rb +123 -0
  6. data/lib/sidekiq_unique_jobs/changelog.rb +78 -0
  7. data/lib/sidekiq_unique_jobs/cli.rb +34 -31
  8. data/lib/sidekiq_unique_jobs/config.rb +263 -0
  9. data/lib/sidekiq_unique_jobs/connection.rb +6 -5
  10. data/lib/sidekiq_unique_jobs/constants.rb +46 -24
  11. data/lib/sidekiq_unique_jobs/core_ext.rb +80 -0
  12. data/lib/sidekiq_unique_jobs/digests.rb +71 -100
  13. data/lib/sidekiq_unique_jobs/exceptions.rb +78 -12
  14. data/lib/sidekiq_unique_jobs/job.rb +41 -12
  15. data/lib/sidekiq_unique_jobs/json.rb +40 -0
  16. data/lib/sidekiq_unique_jobs/key.rb +93 -0
  17. data/lib/sidekiq_unique_jobs/lock.rb +325 -0
  18. data/lib/sidekiq_unique_jobs/lock/base_lock.rb +66 -50
  19. data/lib/sidekiq_unique_jobs/lock/client_validator.rb +28 -0
  20. data/lib/sidekiq_unique_jobs/lock/server_validator.rb +27 -0
  21. data/lib/sidekiq_unique_jobs/lock/until_and_while_executing.rb +7 -10
  22. data/lib/sidekiq_unique_jobs/lock/until_executed.rb +6 -6
  23. data/lib/sidekiq_unique_jobs/lock/until_executing.rb +1 -1
  24. data/lib/sidekiq_unique_jobs/lock/until_expired.rb +4 -21
  25. data/lib/sidekiq_unique_jobs/lock/validator.rb +96 -0
  26. data/lib/sidekiq_unique_jobs/lock/while_executing.rb +13 -9
  27. data/lib/sidekiq_unique_jobs/lock/while_executing_reject.rb +3 -3
  28. data/lib/sidekiq_unique_jobs/lock_args.rb +123 -0
  29. data/lib/sidekiq_unique_jobs/lock_config.rb +122 -0
  30. data/lib/sidekiq_unique_jobs/lock_digest.rb +79 -0
  31. data/lib/sidekiq_unique_jobs/lock_info.rb +68 -0
  32. data/lib/sidekiq_unique_jobs/lock_timeout.rb +62 -0
  33. data/lib/sidekiq_unique_jobs/lock_ttl.rb +77 -0
  34. data/lib/sidekiq_unique_jobs/locksmith.rb +261 -101
  35. data/lib/sidekiq_unique_jobs/logging.rb +149 -23
  36. data/lib/sidekiq_unique_jobs/logging/middleware_context.rb +44 -0
  37. data/lib/sidekiq_unique_jobs/lua/delete.lua +51 -0
  38. data/lib/sidekiq_unique_jobs/lua/delete_by_digest.lua +42 -0
  39. data/lib/sidekiq_unique_jobs/lua/delete_job_by_digest.lua +38 -0
  40. data/lib/sidekiq_unique_jobs/lua/find_digest_in_queues.lua +26 -0
  41. data/lib/sidekiq_unique_jobs/lua/lock.lua +93 -0
  42. data/lib/sidekiq_unique_jobs/lua/locked.lua +35 -0
  43. data/lib/sidekiq_unique_jobs/lua/queue.lua +87 -0
  44. data/lib/sidekiq_unique_jobs/lua/reap_orphans.lua +94 -0
  45. data/lib/sidekiq_unique_jobs/lua/shared/_common.lua +40 -0
  46. data/lib/sidekiq_unique_jobs/lua/shared/_current_time.lua +8 -0
  47. data/lib/sidekiq_unique_jobs/lua/shared/_delete_from_queue.lua +22 -0
  48. data/lib/sidekiq_unique_jobs/lua/shared/_delete_from_sorted_set.lua +18 -0
  49. data/lib/sidekiq_unique_jobs/lua/shared/_find_digest_in_process_set.lua +53 -0
  50. data/lib/sidekiq_unique_jobs/lua/shared/_find_digest_in_queues.lua +43 -0
  51. data/lib/sidekiq_unique_jobs/lua/shared/_find_digest_in_sorted_set.lua +24 -0
  52. data/lib/sidekiq_unique_jobs/lua/shared/_hgetall.lua +13 -0
  53. data/lib/sidekiq_unique_jobs/lua/shared/_upgrades.lua +3 -0
  54. data/lib/sidekiq_unique_jobs/lua/unlock.lua +95 -0
  55. data/lib/sidekiq_unique_jobs/lua/update_version.lua +40 -0
  56. data/lib/sidekiq_unique_jobs/lua/upgrade.lua +68 -0
  57. data/lib/sidekiq_unique_jobs/middleware.rb +29 -31
  58. data/lib/sidekiq_unique_jobs/middleware/client.rb +42 -0
  59. data/lib/sidekiq_unique_jobs/middleware/server.rb +27 -0
  60. data/lib/sidekiq_unique_jobs/normalizer.rb +4 -4
  61. data/lib/sidekiq_unique_jobs/on_conflict.rb +23 -10
  62. data/lib/sidekiq_unique_jobs/on_conflict/log.rb +9 -5
  63. data/lib/sidekiq_unique_jobs/on_conflict/null_strategy.rb +1 -1
  64. data/lib/sidekiq_unique_jobs/on_conflict/raise.rb +1 -1
  65. data/lib/sidekiq_unique_jobs/on_conflict/reject.rb +61 -15
  66. data/lib/sidekiq_unique_jobs/on_conflict/replace.rb +54 -14
  67. data/lib/sidekiq_unique_jobs/on_conflict/reschedule.rb +12 -5
  68. data/lib/sidekiq_unique_jobs/on_conflict/strategy.rb +25 -6
  69. data/lib/sidekiq_unique_jobs/options_with_fallback.rb +41 -27
  70. data/lib/sidekiq_unique_jobs/orphans/lua_reaper.rb +29 -0
  71. data/lib/sidekiq_unique_jobs/orphans/manager.rb +212 -0
  72. data/lib/sidekiq_unique_jobs/orphans/null_reaper.rb +24 -0
  73. data/lib/sidekiq_unique_jobs/orphans/observer.rb +42 -0
  74. data/lib/sidekiq_unique_jobs/orphans/reaper.rb +114 -0
  75. data/lib/sidekiq_unique_jobs/orphans/ruby_reaper.rb +201 -0
  76. data/lib/sidekiq_unique_jobs/redis.rb +11 -0
  77. data/lib/sidekiq_unique_jobs/redis/entity.rb +106 -0
  78. data/lib/sidekiq_unique_jobs/redis/hash.rb +56 -0
  79. data/lib/sidekiq_unique_jobs/redis/list.rb +32 -0
  80. data/lib/sidekiq_unique_jobs/redis/set.rb +32 -0
  81. data/lib/sidekiq_unique_jobs/redis/sorted_set.rb +86 -0
  82. data/lib/sidekiq_unique_jobs/redis/string.rb +49 -0
  83. data/lib/sidekiq_unique_jobs/rspec/matchers.rb +26 -0
  84. data/lib/sidekiq_unique_jobs/rspec/matchers/have_valid_sidekiq_options.rb +51 -0
  85. data/lib/sidekiq_unique_jobs/script.rb +15 -0
  86. data/lib/sidekiq_unique_jobs/script/caller.rb +125 -0
  87. data/lib/sidekiq_unique_jobs/server.rb +48 -0
  88. data/lib/sidekiq_unique_jobs/sidekiq_unique_ext.rb +92 -65
  89. data/lib/sidekiq_unique_jobs/sidekiq_unique_jobs.rb +185 -34
  90. data/lib/sidekiq_unique_jobs/sidekiq_worker_methods.rb +11 -5
  91. data/lib/sidekiq_unique_jobs/testing.rb +62 -21
  92. data/lib/sidekiq_unique_jobs/timer_task.rb +78 -0
  93. data/lib/sidekiq_unique_jobs/timing.rb +58 -0
  94. data/lib/sidekiq_unique_jobs/unlockable.rb +20 -4
  95. data/lib/sidekiq_unique_jobs/update_version.rb +25 -0
  96. data/lib/sidekiq_unique_jobs/upgrade_locks.rb +155 -0
  97. data/lib/sidekiq_unique_jobs/version.rb +3 -1
  98. data/lib/sidekiq_unique_jobs/version_check.rb +23 -4
  99. data/lib/sidekiq_unique_jobs/web.rb +50 -27
  100. data/lib/sidekiq_unique_jobs/web/helpers.rb +125 -10
  101. data/lib/sidekiq_unique_jobs/web/views/changelogs.erb +54 -0
  102. data/lib/sidekiq_unique_jobs/web/views/lock.erb +108 -0
  103. data/lib/sidekiq_unique_jobs/web/views/locks.erb +52 -0
  104. data/lib/tasks/changelog.rake +5 -5
  105. metadata +117 -177
  106. data/lib/sidekiq_unique_jobs/client/middleware.rb +0 -56
  107. data/lib/sidekiq_unique_jobs/scripts.rb +0 -118
  108. data/lib/sidekiq_unique_jobs/server/middleware.rb +0 -46
  109. data/lib/sidekiq_unique_jobs/timeout.rb +0 -8
  110. data/lib/sidekiq_unique_jobs/timeout/calculator.rb +0 -63
  111. data/lib/sidekiq_unique_jobs/unique_args.rb +0 -150
  112. data/lib/sidekiq_unique_jobs/util.rb +0 -103
  113. data/lib/sidekiq_unique_jobs/web/views/unique_digest.erb +0 -28
  114. data/lib/sidekiq_unique_jobs/web/views/unique_digests.erb +0 -46
  115. data/redis/acquire_lock.lua +0 -21
  116. data/redis/convert_legacy_lock.lua +0 -13
  117. data/redis/delete.lua +0 -14
  118. data/redis/delete_by_digest.lua +0 -23
  119. data/redis/delete_job_by_digest.lua +0 -60
  120. data/redis/lock.lua +0 -62
  121. data/redis/release_stale_locks.lua +0 -90
  122. data/redis/unlock.lua +0 -35
@@ -3,7 +3,7 @@
3
3
  module SidekiqUniqueJobs
4
4
  # Shared module for dealing with redis connections
5
5
  #
6
- # @author Mikael Henriksson <mikael@zoolutions.se>
6
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
7
7
  module Connection
8
8
  def self.included(base)
9
9
  base.send(:extend, self)
@@ -11,11 +11,12 @@ module SidekiqUniqueJobs
11
11
 
12
12
  # Creates a connection to redis
13
13
  # @return [Sidekiq::RedisConnection, ConnectionPool] a connection to redis
14
- def redis(redis_pool = nil)
15
- if redis_pool
16
- redis_pool.with { |conn| yield conn }
14
+ def redis(r_pool = nil, &block)
15
+ r_pool ||= defined?(redis_pool) ? redis_pool : r_pool
16
+ if r_pool
17
+ r_pool.with(&block)
17
18
  else
18
- Sidekiq.redis { |conn| yield conn }
19
+ Sidekiq.redis(&block)
19
20
  end
20
21
  end
21
22
  end
@@ -3,30 +3,52 @@
3
3
  #
4
4
  # Module with constants to avoid string duplication
5
5
  #
6
- # @author Mikael Henriksson <mikael@zoolutions.se>
6
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
7
7
  #
8
8
  module SidekiqUniqueJobs
9
- ARGS_KEY ||= "args"
10
- AT_KEY ||= "at"
11
- CLASS_KEY ||= "class"
12
- JAVA ||= "java"
13
- JID_KEY ||= "jid"
14
- LOCK_DIGEST_KEY ||= "lock_digest"
15
- LOCK_EXPIRATION_KEY ||= "lock_expiration"
16
- LOCK_TIMEOUT_KEY ||= "lock_timeout"
17
- LOCK_TTL_KEY ||= "lock_ttl"
18
- LOG_DUPLICATE_KEY ||= "log_duplicate_payload"
19
- QUEUE_KEY ||= "queue"
20
- UNIQUE_ACROSS_QUEUES_KEY ||= "unique_across_queues"
21
- UNIQUE_ACROSS_WORKERS_KEY ||= "unique_across_workers"
22
- UNIQUE_ARGS_KEY ||= "unique_args"
23
- UNIQUE_DIGEST_KEY ||= "unique_digest"
24
- UNIQUE_KEY ||= "unique"
25
- UNIQUE_SET ||= "unique:keys"
26
- LOCK_KEY ||= "lock"
27
- ON_CONFLICT_KEY ||= "on_conflict"
28
- UNIQUE_ON_ALL_QUEUES_KEY ||= "unique_on_all_queues" # TODO: Remove in v6.1
29
- UNIQUE_PREFIX_KEY ||= "unique_prefix"
30
- RETRY_SET ||= "retry"
31
- SCHEDULE_SET ||= "schedule"
9
+ ARGS ||= "args"
10
+ APARTMENT ||= "apartment"
11
+ AT ||= "at"
12
+ CHANGELOGS ||= "uniquejobs:changelog"
13
+ CLASS ||= "class"
14
+ CREATED_AT ||= "created_at"
15
+ DEAD_VERSION ||= "uniquejobs:dead"
16
+ DIGESTS ||= "uniquejobs:digests"
17
+ ERRORS ||= "errors"
18
+ JID ||= "jid"
19
+ LIMIT ||= "limit"
20
+ LIVE_VERSION ||= "uniquejobs:live"
21
+ LOCK ||= "lock"
22
+ LOCK_ARGS ||= "lock_args"
23
+ LOCK_ARGS_METHOD ||= "lock_args_method"
24
+ LOCK_DIGEST ||= "lock_digest"
25
+ LOCK_EXPIRATION ||= "lock_expiration"
26
+ LOCK_INFO ||= "lock_info"
27
+ LOCK_LIMIT ||= "lock_limit"
28
+ LOCK_PREFIX ||= "lock_prefix"
29
+ LOCK_TIMEOUT ||= "lock_timeout"
30
+ LOCK_TTL ||= "lock_ttl"
31
+ LOCK_TYPE ||= "lock_type"
32
+ LOG_DUPLICATE ||= "log_duplicate"
33
+ ON_CLIENT_CONFLICT ||= "on_client_conflict"
34
+ ON_CONFLICT ||= "on_conflict"
35
+ ON_SERVER_CONFLICT ||= "on_server_conflict"
36
+ PAYLOAD ||= "payload"
37
+ PROCESSES ||= "processes"
38
+ QUEUE ||= "queue"
39
+ RETRY ||= "retry"
40
+ SCHEDULE ||= "schedule"
41
+ TIME ||= "time"
42
+ TIMEOUT ||= "timeout"
43
+ TTL ||= "ttl"
44
+ TYPE ||= "type"
45
+ UNIQUE ||= "unique"
46
+ UNIQUE_ACROSS_QUEUES ||= "unique_across_queues"
47
+ UNIQUE_ACROSS_WORKERS ||= "unique_across_workers"
48
+ UNIQUE_ARGS ||= "unique_args"
49
+ UNIQUE_ARGS_METHOD ||= "unique_args_method"
50
+ UNIQUE_DIGEST ||= "unique_digest"
51
+ UNIQUE_PREFIX ||= "unique_prefix"
52
+ UNIQUE_REAPER ||= "uniquejobs:reaper"
53
+ WORKER ||= "worker"
32
54
  end
@@ -2,21 +2,67 @@
2
2
 
3
3
  # :nocov:
4
4
 
5
+ #
6
+ # Monkey patches for the ruby Hash
7
+ #
5
8
  class Hash
6
9
  unless {}.respond_to?(:slice)
10
+ #
11
+ # Returns only the matching keys in a new hash
12
+ #
13
+ # @param [Array<String>, Array<Symbol>] keys the keys to match
14
+ #
15
+ # @return [Hash]
16
+ #
7
17
  def slice(*keys)
8
18
  keys.map! { |key| convert_key(key) } if respond_to?(:convert_key, true)
9
19
  keys.each_with_object(self.class.new) { |k, hash| hash[k] = self[k] if key?(k) }
10
20
  end
11
21
  end
12
22
 
23
+ unless {}.respond_to?(:deep_stringify_keys)
24
+ #
25
+ # Depp converts all keys to string
26
+ #
27
+ #
28
+ # @return [Hash<String>]
29
+ #
30
+ def deep_stringify_keys
31
+ deep_transform_keys(&:to_s)
32
+ end
33
+ end
34
+
35
+ unless {}.respond_to?(:deep_transform_keys)
36
+ #
37
+ # Deep transfor all keys by yielding to the caller
38
+ #
39
+ #
40
+ # @return [Hash<String>]
41
+ #
42
+ def deep_transform_keys(&block)
43
+ _deep_transform_keys_in_object(self, &block)
44
+ end
45
+ end
46
+
13
47
  unless {}.respond_to?(:stringify_keys)
48
+ #
49
+ # Converts all keys to string
50
+ #
51
+ #
52
+ # @return [Hash<String>]
53
+ #
14
54
  def stringify_keys
15
55
  transform_keys(&:to_s)
16
56
  end
17
57
  end
18
58
 
19
59
  unless {}.respond_to?(:transform_keys)
60
+ #
61
+ # Transforms all keys by yielding to the caller
62
+ #
63
+ #
64
+ # @return [Hash]
65
+ #
20
66
  def transform_keys
21
67
  result = {}
22
68
  each_key do |key|
@@ -27,6 +73,13 @@ class Hash
27
73
  end
28
74
 
29
75
  unless {}.respond_to?(:slice!)
76
+ #
77
+ # Removes all keys not provided from the current hash and returns it
78
+ #
79
+ # @param [Array<String>, Array<Symbol>] keys the keys to match
80
+ #
81
+ # @return [Hash]
82
+ #
30
83
  def slice!(*keys)
31
84
  keys.map! { |key| convert_key(key) } if respond_to?(:convert_key, true)
32
85
  omit = slice(*self.keys - keys)
@@ -37,10 +90,37 @@ class Hash
37
90
  omit
38
91
  end
39
92
  end
93
+
94
+ private
95
+
96
+ unless {}.respond_to?(:_deep_transform_keys_in_object)
97
+ # support methods for deep transforming nested hashes and arrays
98
+ def _deep_transform_keys_in_object(object, &block)
99
+ case object
100
+ when Hash
101
+ object.each_with_object({}) do |(key, value), result|
102
+ result[yield(key)] = _deep_transform_keys_in_object(value, &block)
103
+ end
104
+ when Array
105
+ object.map { |element| _deep_transform_keys_in_object(element, &block) }
106
+ else
107
+ object
108
+ end
109
+ end
110
+ end
40
111
  end
41
112
 
113
+ #
114
+ # Monkey patches for the ruby Array
115
+ #
42
116
  class Array
43
117
  unless [].respond_to?(:extract_options!)
118
+ #
119
+ # Extract the last argument if it is a hash
120
+ #
121
+ #
122
+ # @return [Hash]
123
+ #
44
124
  def extract_options!
45
125
  if last.is_a?(Hash) && last.instance_of?(Hash)
46
126
  pop
@@ -1,87 +1,65 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module SidekiqUniqueJobs
4
- # Utility module to help manage unique digests in redis.
5
4
  #
6
- # @author Mikael Henriksson <mikael@zoolutions.se>
7
- module Digests
5
+ # Class Changelogs provides access to the changelog entries
6
+ #
7
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
8
+ #
9
+ class Digests < Redis::SortedSet
10
+ #
11
+ # @return [Integer] the number of matches to return by default
8
12
  DEFAULT_COUNT = 1_000
13
+ #
14
+ # @return [String] the default pattern to use for matching
9
15
  SCAN_PATTERN = "*"
10
- CHUNK_SIZE = 100
11
-
12
- include SidekiqUniqueJobs::Logging
13
- include SidekiqUniqueJobs::Connection
14
- extend self
15
16
 
16
- # Return unique digests matching pattern
17
- #
18
- # @param [String] pattern a pattern to match with
19
- # @param [Integer] count the maximum number to match
20
- # @return [Array<String>] with unique digests
21
- def all(pattern: SCAN_PATTERN, count: DEFAULT_COUNT)
22
- redis { |conn| conn.sscan_each(UNIQUE_SET, match: pattern, count: count).to_a }
17
+ def initialize
18
+ super(DIGESTS)
23
19
  end
24
20
 
25
- # Paginate unique digests
26
21
  #
27
- # @param [String] pattern a pattern to match with
28
- # @param [Integer] cursor the maximum number to match
29
- # @param [Integer] page_size the current cursor position
22
+ # Adds a digest
30
23
  #
31
- # @return [Array<String>] with unique digests
32
- def page(pattern: SCAN_PATTERN, cursor: 0, page_size: 100)
33
- redis do |conn|
34
- total_size, digests = conn.multi do
35
- conn.scard(UNIQUE_SET)
36
- conn.sscan(UNIQUE_SET, cursor, match: pattern, count: page_size)
37
- end
38
-
39
- [total_size, digests[0], digests[1]]
40
- end
41
- end
42
-
43
- # Get a total count of unique digests
24
+ # @param [String] digest the digest to add
44
25
  #
45
- # @return [Integer] number of digests
46
- def count
47
- redis { |conn| conn.scard(UNIQUE_SET) }
26
+ def add(digest)
27
+ redis { |conn| conn.zadd(key, now_f, digest) }
48
28
  end
49
29
 
50
- # Deletes unique digest either by a digest or pattern
30
+ # Deletes unique digests by pattern
51
31
  #
52
- # @param [String] digest the full digest to delete
53
32
  # @param [String] pattern a key pattern to match with
54
33
  # @param [Integer] count the maximum number
55
- # @raise [ArgumentError] when both pattern and digest are nil
56
34
  # @return [Array<String>] with unique digests
57
- def del(digest: nil, pattern: nil, count: DEFAULT_COUNT)
58
- warn("#{self}.#{__method__} has been deprecated and will be removed in a future version")
35
+ def delete_by_pattern(pattern, count: DEFAULT_COUNT)
36
+ result, elapsed = timed do
37
+ digests = entries(pattern: pattern, count: count).keys
38
+ redis { |conn| BatchDelete.call(digests, conn) }
39
+ end
59
40
 
60
- return delete_by_pattern(pattern, count: count) if pattern
61
- return delete_by_digest(digest) if digest
41
+ log_info("#{__method__}(#{pattern}, count: #{count}) completed in #{elapsed}ms")
62
42
 
63
- raise ArgumentError, "either digest or pattern need to be provided"
43
+ result
64
44
  end
65
45
 
66
- # Deletes unique digest either by a digest or pattern
46
+ # Delete unique digests by digest
47
+ # Also deletes the :AVAILABLE, :EXPIRED etc keys
67
48
  #
68
- # @param [String] digest the full digest to delete
49
+ # @param [String] digest a unique digest to delete
69
50
  def delete_by_digest(digest) # rubocop:disable Metrics/MethodLength
70
51
  result, elapsed = timed do
71
- Scripts.call(:delete_by_digest, nil, keys: [
72
- UNIQUE_SET,
73
- digest,
74
- "#{digest}:EXISTS",
75
- "#{digest}:GRABBED",
76
- "#{digest}:AVAILABLE",
77
- "#{digest}:VERSION",
78
- "#{digest}:RUN:EXISTS",
79
- "#{digest}:RUN:GRABBED",
80
- "#{digest}:RUN:AVAILABLE",
81
- "#{digest}:RUN:VERSION",
82
- ])
83
-
84
- count
52
+ call_script(:delete_by_digest, [
53
+ digest,
54
+ "#{digest}:QUEUED",
55
+ "#{digest}:PRIMED",
56
+ "#{digest}:LOCKED",
57
+ "#{digest}:RUN",
58
+ "#{digest}:RUN:QUEUED",
59
+ "#{digest}:RUN:PRIMED",
60
+ "#{digest}:RUN:LOCKED",
61
+ key,
62
+ ])
85
63
  end
86
64
 
87
65
  log_info("#{__method__}(#{digest}) completed in #{elapsed}ms")
@@ -89,55 +67,48 @@ module SidekiqUniqueJobs
89
67
  result
90
68
  end
91
69
 
92
- # Deletes unique digests by pattern
93
70
  #
94
- # @param [String] pattern a key pattern to match with
95
- # @param [Integer] count the maximum number
96
- # @return [Array<String>] with unique digests
97
- def delete_by_pattern(pattern, count: DEFAULT_COUNT)
98
- result, elapsed = timed do
99
- digests = all(pattern: pattern, count: count)
100
- batch_delete(digests)
101
- digests.size
102
- end
71
+ # The entries in this sorted set
72
+ #
73
+ # @param [String] pattern SCAN_PATTERN the match pattern to search for
74
+ # @param [Integer] count DEFAULT_COUNT the number of entries to return
75
+ #
76
+ # @return [Array<String>] an array of digests matching the given pattern
77
+ #
78
+ def entries(pattern: SCAN_PATTERN, count: DEFAULT_COUNT)
79
+ options = {}
80
+ options[:match] = pattern
81
+ options[:count] = count
103
82
 
104
- log_info("#{__method__}(#{pattern}, count: #{count}) completed in #{elapsed}ms")
83
+ result = redis { |conn| conn.zscan_each(key, **options).to_a }
105
84
 
106
- result
85
+ result.each_with_object({}) do |entry, hash|
86
+ hash[entry[0]] = entry[1]
87
+ end
107
88
  end
108
89
 
109
- private
110
-
111
- def batch_delete(digests) # rubocop:disable Metrics/MethodLength
90
+ #
91
+ # Returns a paginated
92
+ #
93
+ # @param [Integer] cursor the cursor for this iteration
94
+ # @param [String] pattern SCAN_PATTERN the match pattern to search for
95
+ # @param [Integer] page_size 100 the size per page
96
+ #
97
+ # @return [Array<Integer, Integer, Array<Lock>>] total_size, next_cursor, locks
98
+ #
99
+ def page(cursor: 0, pattern: SCAN_PATTERN, page_size: 100)
112
100
  redis do |conn|
113
- digests.each_slice(CHUNK_SIZE) do |chunk|
114
- conn.pipelined do
115
- chunk.each do |digest|
116
- conn.del digest
117
- conn.srem(UNIQUE_SET, digest)
118
- conn.del("#{digest}:EXISTS")
119
- conn.del("#{digest}:GRABBED")
120
- conn.del("#{digest}:VERSION")
121
- conn.del("#{digest}:AVAILABLE")
122
- conn.del("#{digest}:RUN:EXISTS")
123
- conn.del("#{digest}:RUN:GRABBED")
124
- conn.del("#{digest}:RUN:VERSION")
125
- conn.del("#{digest}:RUN:AVAILABLE")
126
- end
127
- end
101
+ total_size, digests = conn.multi do
102
+ conn.zcard(key)
103
+ conn.zscan(key, cursor, match: pattern, count: page_size)
128
104
  end
129
- end
130
- end
131
105
 
132
- def timed
133
- start = current_time
134
- result = yield
135
- elapsed = (current_time - start).round(2)
136
- [result, elapsed]
137
- end
138
-
139
- def current_time
140
- Time.now
106
+ [
107
+ total_size,
108
+ digests[0], # next_cursor
109
+ digests[1].map { |digest, score| Lock.new(digest, time: score) }, # entries
110
+ ]
111
+ end
141
112
  end
142
113
  end
143
114
  end
@@ -1,29 +1,95 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module SidekiqUniqueJobs
4
+ #
5
+ # Base class for all exceptions raised from the gem
6
+ #
7
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
8
+ #
9
+ class UniqueJobsError < ::RuntimeError
10
+ end
11
+
4
12
  # Error raised when a Lua script fails to execute
5
13
  #
6
- # @author Mikael Henriksson <mikael@zoolutions.se>
7
- class Conflict < StandardError
14
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
15
+ class Conflict < UniqueJobsError
8
16
  def initialize(item)
9
- super("Item with the key: #{item[UNIQUE_DIGEST_KEY]} is already scheduled or processing")
17
+ super("Item with the key: #{item[LOCK_DIGEST]} is already scheduled or processing")
18
+ end
19
+ end
20
+
21
+ #
22
+ # Error raised when trying to add a duplicate lock
23
+ #
24
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
25
+ #
26
+ class DuplicateLock < UniqueJobsError
27
+ end
28
+
29
+ #
30
+ # Error raised when trying to add a duplicate stragegy
31
+ #
32
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
33
+ #
34
+ class DuplicateStrategy < UniqueJobsError
35
+ end
36
+
37
+ #
38
+ # Error raised when an invalid argument is given
39
+ #
40
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
41
+ #
42
+ class InvalidArgument < UniqueJobsError
43
+ end
44
+
45
+ #
46
+ # Raised when a workers configuration is invalid
47
+ #
48
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
49
+ #
50
+ class InvalidWorker < UniqueJobsError
51
+ def initialize(lock_config)
52
+ super(<<~FAILURE_MESSAGE)
53
+ Expected #{lock_config.worker} to have valid sidekiq options but found the following problems:
54
+ #{lock_config.errors_as_string}
55
+ FAILURE_MESSAGE
10
56
  end
11
57
  end
12
58
 
13
- # Error raised from {OnConflict::Raise}
59
+ # Error raised when a Lua script fails to execute
60
+ #
61
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
62
+ class InvalidUniqueArguments < UniqueJobsError
63
+ def initialize(options)
64
+ given = options[:given]
65
+ worker_class = options[:worker_class]
66
+ lock_args_method = options[:lock_args_method]
67
+ lock_args_meth = worker_class.method(lock_args_method)
68
+ num_args = lock_args_meth.arity
69
+ source_location = lock_args_meth.source_location
70
+
71
+ super(
72
+ "#{worker_class}##{lock_args_method} takes #{num_args} arguments, received #{given.inspect}" \
73
+ "\n\n" \
74
+ " #{source_location.join(':')}"
75
+ )
76
+ end
77
+ end
78
+
79
+ #
80
+ # Raised when a workers configuration is invalid
81
+ #
82
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
14
83
  #
15
- # @author Mikael Henriksson <mikael@zoolutions.se>
16
- class ScriptError < StandardError
17
- # @param [Symbol] file_name the name of the lua script
18
- # @param [Redis::CommandError] source_exception exception to handle
19
- def initialize(file_name:, source_exception:)
20
- super("Problem compiling #{file_name}. Message: #{source_exception.message}")
84
+ class NotUniqueWorker < UniqueJobsError
85
+ def initialize(options)
86
+ super("#{options[:class]} is not configured for uniqueness. Missing the key `:lock` in #{options.inspect}")
21
87
  end
22
88
  end
23
89
 
24
90
  # Error raised from {OptionsWithFallback#lock_class}
25
91
  #
26
- # @author Mikael Henriksson <mikael@zoolutions.se>
27
- class UnknownLock < StandardError
92
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
93
+ class UnknownLock < UniqueJobsError
28
94
  end
29
95
  end