sidekiq-unique-jobs 6.0.25 → 7.1.33

Sign up to get free protection for your applications and to get access to all the features.
Files changed (130) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +1157 -41
  3. data/README.md +825 -291
  4. data/lib/sidekiq_unique_jobs/batch_delete.rb +124 -0
  5. data/lib/sidekiq_unique_jobs/changelog.rb +78 -0
  6. data/lib/sidekiq_unique_jobs/cli.rb +57 -29
  7. data/lib/sidekiq_unique_jobs/config.rb +319 -0
  8. data/lib/sidekiq_unique_jobs/connection.rb +6 -5
  9. data/lib/sidekiq_unique_jobs/constants.rb +46 -25
  10. data/lib/sidekiq_unique_jobs/core_ext.rb +80 -0
  11. data/lib/sidekiq_unique_jobs/deprecation.rb +65 -0
  12. data/lib/sidekiq_unique_jobs/digests.rb +70 -102
  13. data/lib/sidekiq_unique_jobs/exceptions.rb +88 -12
  14. data/lib/sidekiq_unique_jobs/expiring_digests.rb +14 -0
  15. data/lib/sidekiq_unique_jobs/job.rb +46 -12
  16. data/lib/sidekiq_unique_jobs/json.rb +47 -0
  17. data/lib/sidekiq_unique_jobs/key.rb +98 -0
  18. data/lib/sidekiq_unique_jobs/lock/base_lock.rb +111 -82
  19. data/lib/sidekiq_unique_jobs/lock/client_validator.rb +28 -0
  20. data/lib/sidekiq_unique_jobs/lock/server_validator.rb +27 -0
  21. data/lib/sidekiq_unique_jobs/lock/until_and_while_executing.rb +40 -15
  22. data/lib/sidekiq_unique_jobs/lock/until_executed.rb +30 -7
  23. data/lib/sidekiq_unique_jobs/lock/until_executing.rb +26 -2
  24. data/lib/sidekiq_unique_jobs/lock/until_expired.rb +27 -15
  25. data/lib/sidekiq_unique_jobs/lock/validator.rb +96 -0
  26. data/lib/sidekiq_unique_jobs/lock/while_executing.rb +26 -12
  27. data/lib/sidekiq_unique_jobs/lock/while_executing_reject.rb +3 -3
  28. data/lib/sidekiq_unique_jobs/lock.rb +342 -0
  29. data/lib/sidekiq_unique_jobs/lock_args.rb +127 -0
  30. data/lib/sidekiq_unique_jobs/lock_config.rb +126 -0
  31. data/lib/sidekiq_unique_jobs/lock_digest.rb +79 -0
  32. data/lib/sidekiq_unique_jobs/lock_info.rb +68 -0
  33. data/lib/sidekiq_unique_jobs/lock_timeout.rb +62 -0
  34. data/lib/sidekiq_unique_jobs/lock_ttl.rb +77 -0
  35. data/lib/sidekiq_unique_jobs/lock_type.rb +37 -0
  36. data/lib/sidekiq_unique_jobs/locksmith.rb +305 -101
  37. data/lib/sidekiq_unique_jobs/logging/middleware_context.rb +44 -0
  38. data/lib/sidekiq_unique_jobs/logging.rb +202 -33
  39. data/lib/sidekiq_unique_jobs/lua/delete.lua +51 -0
  40. data/lib/sidekiq_unique_jobs/lua/delete_by_digest.lua +42 -0
  41. data/lib/sidekiq_unique_jobs/lua/delete_job_by_digest.lua +38 -0
  42. data/lib/sidekiq_unique_jobs/lua/find_digest_in_queues.lua +26 -0
  43. data/lib/sidekiq_unique_jobs/lua/lock.lua +99 -0
  44. data/lib/sidekiq_unique_jobs/lua/lock_until_expired.lua +92 -0
  45. data/lib/sidekiq_unique_jobs/lua/locked.lua +35 -0
  46. data/lib/sidekiq_unique_jobs/lua/queue.lua +87 -0
  47. data/lib/sidekiq_unique_jobs/lua/reap_orphans.lua +122 -0
  48. data/lib/sidekiq_unique_jobs/lua/shared/_common.lua +40 -0
  49. data/lib/sidekiq_unique_jobs/lua/shared/_current_time.lua +8 -0
  50. data/lib/sidekiq_unique_jobs/lua/shared/_delete_from_queue.lua +22 -0
  51. data/lib/sidekiq_unique_jobs/lua/shared/_delete_from_sorted_set.lua +18 -0
  52. data/lib/sidekiq_unique_jobs/lua/shared/_find_digest_in_process_set.lua +53 -0
  53. data/lib/sidekiq_unique_jobs/lua/shared/_find_digest_in_queues.lua +43 -0
  54. data/lib/sidekiq_unique_jobs/lua/shared/_find_digest_in_sorted_set.lua +24 -0
  55. data/lib/sidekiq_unique_jobs/lua/shared/_hgetall.lua +13 -0
  56. data/lib/sidekiq_unique_jobs/lua/shared/_upgrades.lua +3 -0
  57. data/lib/sidekiq_unique_jobs/lua/unlock.lua +107 -0
  58. data/lib/sidekiq_unique_jobs/lua/update_version.lua +40 -0
  59. data/lib/sidekiq_unique_jobs/lua/upgrade.lua +68 -0
  60. data/lib/sidekiq_unique_jobs/middleware/client.rb +42 -0
  61. data/lib/sidekiq_unique_jobs/middleware/server.rb +31 -0
  62. data/lib/sidekiq_unique_jobs/middleware.rb +29 -43
  63. data/lib/sidekiq_unique_jobs/normalizer.rb +4 -4
  64. data/lib/sidekiq_unique_jobs/on_conflict/log.rb +9 -5
  65. data/lib/sidekiq_unique_jobs/on_conflict/null_strategy.rb +1 -1
  66. data/lib/sidekiq_unique_jobs/on_conflict/raise.rb +1 -1
  67. data/lib/sidekiq_unique_jobs/on_conflict/reject.rb +63 -17
  68. data/lib/sidekiq_unique_jobs/on_conflict/replace.rb +54 -14
  69. data/lib/sidekiq_unique_jobs/on_conflict/reschedule.rb +16 -5
  70. data/lib/sidekiq_unique_jobs/on_conflict/strategy.rb +25 -6
  71. data/lib/sidekiq_unique_jobs/on_conflict.rb +23 -10
  72. data/lib/sidekiq_unique_jobs/options_with_fallback.rb +39 -36
  73. data/lib/sidekiq_unique_jobs/orphans/lua_reaper.rb +29 -0
  74. data/lib/sidekiq_unique_jobs/orphans/manager.rb +241 -0
  75. data/lib/sidekiq_unique_jobs/orphans/null_reaper.rb +24 -0
  76. data/lib/sidekiq_unique_jobs/orphans/observer.rb +42 -0
  77. data/lib/sidekiq_unique_jobs/orphans/reaper.rb +114 -0
  78. data/lib/sidekiq_unique_jobs/orphans/reaper_resurrector.rb +170 -0
  79. data/lib/sidekiq_unique_jobs/orphans/ruby_reaper.rb +298 -0
  80. data/lib/sidekiq_unique_jobs/redis/entity.rb +112 -0
  81. data/lib/sidekiq_unique_jobs/redis/hash.rb +56 -0
  82. data/lib/sidekiq_unique_jobs/redis/list.rb +32 -0
  83. data/lib/sidekiq_unique_jobs/redis/set.rb +32 -0
  84. data/lib/sidekiq_unique_jobs/redis/sorted_set.rb +86 -0
  85. data/lib/sidekiq_unique_jobs/redis/string.rb +51 -0
  86. data/lib/sidekiq_unique_jobs/redis.rb +11 -0
  87. data/lib/sidekiq_unique_jobs/reflectable.rb +26 -0
  88. data/lib/sidekiq_unique_jobs/reflections.rb +79 -0
  89. data/lib/sidekiq_unique_jobs/rspec/matchers/have_valid_sidekiq_options.rb +51 -0
  90. data/lib/sidekiq_unique_jobs/rspec/matchers.rb +26 -0
  91. data/lib/sidekiq_unique_jobs/script/caller.rb +127 -0
  92. data/lib/sidekiq_unique_jobs/script.rb +15 -0
  93. data/lib/sidekiq_unique_jobs/server.rb +61 -0
  94. data/lib/sidekiq_unique_jobs/sidekiq_unique_ext.rb +114 -65
  95. data/lib/sidekiq_unique_jobs/sidekiq_unique_jobs.rb +252 -36
  96. data/lib/sidekiq_unique_jobs/sidekiq_worker_methods.rb +47 -32
  97. data/lib/sidekiq_unique_jobs/testing.rb +102 -29
  98. data/lib/sidekiq_unique_jobs/timer_task.rb +299 -0
  99. data/lib/sidekiq_unique_jobs/timing.rb +58 -0
  100. data/lib/sidekiq_unique_jobs/unlockable.rb +20 -4
  101. data/lib/sidekiq_unique_jobs/update_version.rb +25 -0
  102. data/lib/sidekiq_unique_jobs/upgrade_locks.rb +155 -0
  103. data/lib/sidekiq_unique_jobs/version.rb +3 -1
  104. data/lib/sidekiq_unique_jobs/version_check.rb +23 -4
  105. data/lib/sidekiq_unique_jobs/web/helpers.rb +138 -13
  106. data/lib/sidekiq_unique_jobs/web/views/_paging.erb +4 -4
  107. data/lib/sidekiq_unique_jobs/web/views/changelogs.erb +54 -0
  108. data/lib/sidekiq_unique_jobs/web/views/lock.erb +110 -0
  109. data/lib/sidekiq_unique_jobs/web/views/locks.erb +54 -0
  110. data/lib/sidekiq_unique_jobs/web.rb +82 -32
  111. data/lib/sidekiq_unique_jobs.rb +54 -7
  112. data/lib/tasks/changelog.rake +16 -16
  113. metadata +134 -177
  114. data/lib/sidekiq_unique_jobs/client/middleware.rb +0 -56
  115. data/lib/sidekiq_unique_jobs/scripts.rb +0 -118
  116. data/lib/sidekiq_unique_jobs/server/middleware.rb +0 -46
  117. data/lib/sidekiq_unique_jobs/timeout/calculator.rb +0 -63
  118. data/lib/sidekiq_unique_jobs/timeout.rb +0 -8
  119. data/lib/sidekiq_unique_jobs/unique_args.rb +0 -150
  120. data/lib/sidekiq_unique_jobs/util.rb +0 -103
  121. data/lib/sidekiq_unique_jobs/web/views/unique_digest.erb +0 -28
  122. data/lib/sidekiq_unique_jobs/web/views/unique_digests.erb +0 -46
  123. data/redis/acquire_lock.lua +0 -21
  124. data/redis/convert_legacy_lock.lua +0 -13
  125. data/redis/delete.lua +0 -14
  126. data/redis/delete_by_digest.lua +0 -23
  127. data/redis/delete_job_by_digest.lua +0 -60
  128. data/redis/lock.lua +0 -62
  129. data/redis/release_stale_locks.lua +0 -90
  130. data/redis/unlock.lua +0 -35
@@ -3,31 +3,52 @@
3
3
  #
4
4
  # Module with constants to avoid string duplication
5
5
  #
6
- # @author Mikael Henriksson <mikael@zoolutions.se>
6
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
7
7
  #
8
8
  module SidekiqUniqueJobs
9
- ARGS_KEY ||= "args"
10
- APARTMENT ||= "apartment"
11
- AT_KEY ||= "at"
12
- CLASS_KEY ||= "class"
13
- JAVA ||= "java"
14
- JID_KEY ||= "jid"
15
- LOCK_DIGEST_KEY ||= "lock_digest"
16
- LOCK_EXPIRATION_KEY ||= "lock_expiration"
17
- LOCK_TIMEOUT_KEY ||= "lock_timeout"
18
- LOCK_TTL_KEY ||= "lock_ttl"
19
- LOG_DUPLICATE_KEY ||= "log_duplicate_payload"
20
- QUEUE_KEY ||= "queue"
21
- UNIQUE_ACROSS_QUEUES_KEY ||= "unique_across_queues"
22
- UNIQUE_ACROSS_WORKERS_KEY ||= "unique_across_workers"
23
- UNIQUE_ARGS_KEY ||= "unique_args"
24
- UNIQUE_DIGEST_KEY ||= "unique_digest"
25
- UNIQUE_KEY ||= "unique"
26
- UNIQUE_SET ||= "unique:keys"
27
- LOCK_KEY ||= "lock"
28
- ON_CONFLICT_KEY ||= "on_conflict"
29
- UNIQUE_ON_ALL_QUEUES_KEY ||= "unique_on_all_queues" # TODO: Remove in v6.1
30
- UNIQUE_PREFIX_KEY ||= "unique_prefix"
31
- RETRY_SET ||= "retry"
32
- SCHEDULE_SET ||= "schedule"
9
+ ARGS = "args"
10
+ APARTMENT = "apartment"
11
+ AT = "at"
12
+ CHANGELOGS = "uniquejobs:changelog"
13
+ CLASS = "class"
14
+ CREATED_AT = "created_at"
15
+ DEAD_VERSION = "uniquejobs:dead"
16
+ DIGESTS = "uniquejobs:digests"
17
+ EXPIRING_DIGESTS = "uniquejobs:expiring_digests"
18
+ ERRORS = "errors"
19
+ JID = "jid"
20
+ LIMIT = "limit"
21
+ LIVE_VERSION = "uniquejobs:live"
22
+ LOCK = "lock"
23
+ LOCK_ARGS = "lock_args"
24
+ LOCK_ARGS_METHOD = "lock_args_method"
25
+ LOCK_DIGEST = "lock_digest"
26
+ LOCK_EXPIRATION = "lock_expiration"
27
+ LOCK_INFO = "lock_info"
28
+ LOCK_LIMIT = "lock_limit"
29
+ LOCK_PREFIX = "lock_prefix"
30
+ LOCK_TIMEOUT = "lock_timeout"
31
+ LOCK_TTL = "lock_ttl"
32
+ LOCK_TYPE = "lock_type"
33
+ ON_CLIENT_CONFLICT = "on_client_conflict"
34
+ ON_CONFLICT = "on_conflict"
35
+ ON_SERVER_CONFLICT = "on_server_conflict"
36
+ PAYLOAD = "payload"
37
+ PROCESSES = "processes"
38
+ QUEUE = "queue"
39
+ RETRY = "retry"
40
+ SCHEDULE = "schedule"
41
+ TIME = "time"
42
+ TIMEOUT = "timeout"
43
+ TTL = "ttl"
44
+ TYPE = "type"
45
+ UNIQUE = "unique"
46
+ UNIQUE_ACROSS_QUEUES = "unique_across_queues"
47
+ UNIQUE_ACROSS_WORKERS = "unique_across_workers"
48
+ UNIQUE_ARGS = "unique_args"
49
+ UNIQUE_ARGS_METHOD = "unique_args_method"
50
+ UNIQUE_DIGEST = "unique_digest"
51
+ UNIQUE_PREFIX = "unique_prefix"
52
+ UNIQUE_REAPER = "uniquejobs:reaper"
53
+ WORKER = "worker"
33
54
  end
@@ -2,21 +2,67 @@
2
2
 
3
3
  # :nocov:
4
4
 
5
+ #
6
+ # Monkey patches for the ruby Hash
7
+ #
5
8
  class Hash
6
9
  unless {}.respond_to?(:slice)
10
+ #
11
+ # Returns only the matching keys in a new hash
12
+ #
13
+ # @param [Array<String>, Array<Symbol>] keys the keys to match
14
+ #
15
+ # @return [Hash]
16
+ #
7
17
  def slice(*keys)
8
18
  keys.map! { |key| convert_key(key) } if respond_to?(:convert_key, true)
9
19
  keys.each_with_object(self.class.new) { |k, hash| hash[k] = self[k] if key?(k) }
10
20
  end
11
21
  end
12
22
 
23
+ unless {}.respond_to?(:deep_stringify_keys)
24
+ #
25
+ # Depp converts all keys to string
26
+ #
27
+ #
28
+ # @return [Hash<String>]
29
+ #
30
+ def deep_stringify_keys
31
+ deep_transform_keys(&:to_s)
32
+ end
33
+ end
34
+
35
+ unless {}.respond_to?(:deep_transform_keys)
36
+ #
37
+ # Deep transfor all keys by yielding to the caller
38
+ #
39
+ #
40
+ # @return [Hash<String>]
41
+ #
42
+ def deep_transform_keys(&block)
43
+ _deep_transform_keys_in_object(self, &block)
44
+ end
45
+ end
46
+
13
47
  unless {}.respond_to?(:stringify_keys)
48
+ #
49
+ # Converts all keys to string
50
+ #
51
+ #
52
+ # @return [Hash<String>]
53
+ #
14
54
  def stringify_keys
15
55
  transform_keys(&:to_s)
16
56
  end
17
57
  end
18
58
 
19
59
  unless {}.respond_to?(:transform_keys)
60
+ #
61
+ # Transforms all keys by yielding to the caller
62
+ #
63
+ #
64
+ # @return [Hash]
65
+ #
20
66
  def transform_keys
21
67
  result = {}
22
68
  each_key do |key|
@@ -27,6 +73,13 @@ class Hash
27
73
  end
28
74
 
29
75
  unless {}.respond_to?(:slice!)
76
+ #
77
+ # Removes all keys not provided from the current hash and returns it
78
+ #
79
+ # @param [Array<String>, Array<Symbol>] keys the keys to match
80
+ #
81
+ # @return [Hash]
82
+ #
30
83
  def slice!(*keys)
31
84
  keys.map! { |key| convert_key(key) } if respond_to?(:convert_key, true)
32
85
  omit = slice(*self.keys - keys)
@@ -37,10 +90,37 @@ class Hash
37
90
  omit
38
91
  end
39
92
  end
93
+
94
+ private
95
+
96
+ unless {}.respond_to?(:_deep_transform_keys_in_object)
97
+ # support methods for deep transforming nested hashes and arrays
98
+ def _deep_transform_keys_in_object(object, &block)
99
+ case object
100
+ when Hash
101
+ object.each_with_object(self.class.new) do |(key, value), result|
102
+ result[yield(key)] = _deep_transform_keys_in_object(value, &block)
103
+ end
104
+ when Array
105
+ object.map { |element| _deep_transform_keys_in_object(element, &block) }
106
+ else
107
+ object
108
+ end
109
+ end
110
+ end
40
111
  end
41
112
 
113
+ #
114
+ # Monkey patches for the ruby Array
115
+ #
42
116
  class Array
43
117
  unless [].respond_to?(:extract_options!)
118
+ #
119
+ # Extract the last argument if it is a hash
120
+ #
121
+ #
122
+ # @return [Hash]
123
+ #
44
124
  def extract_options!
45
125
  if last.is_a?(Hash) && last.instance_of?(Hash)
46
126
  pop
@@ -0,0 +1,65 @@
1
+ # frozen_string_literal: true
2
+
3
+ module SidekiqUniqueJobs
4
+ #
5
+ # Class Deprecation provides logging of deprecations
6
+ #
7
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
8
+ #
9
+ class Deprecation
10
+ #
11
+ # Mute warnings from this gem in a threaded context
12
+ #
13
+ #
14
+ # @return [void] <description>
15
+ #
16
+ # @yieldreturn [void]
17
+ def self.muted
18
+ orig_val = Thread.current[:uniquejobs_mute_deprecations]
19
+ Thread.current[:uniquejobs_mute_deprecations] = true
20
+ yield
21
+ ensure
22
+ Thread.current[:uniquejobs_mute_deprecations] = orig_val
23
+ end
24
+
25
+ #
26
+ # Check if deprecation warnings have been muted
27
+ #
28
+ #
29
+ # @return [true,false]
30
+ #
31
+ def self.muted?
32
+ Thread.current[:uniquejobs_mute_deprecations] == true
33
+ end
34
+
35
+ #
36
+ # Warn about deprecation
37
+ #
38
+ # @param [String] msg a descriptive reason for why the deprecation
39
+ #
40
+ # @return [void]
41
+ #
42
+ def self.warn(msg)
43
+ return if SidekiqUniqueJobs::Deprecation.muted?
44
+
45
+ warn "DEPRECATION WARNING: #{msg}"
46
+ nil
47
+ end
48
+
49
+ #
50
+ # Warn about deprecation and provide a context
51
+ #
52
+ # @param [String] msg a descriptive reason for why the deprecation
53
+ #
54
+ # @return [void]
55
+ #
56
+ def self.warn_with_backtrace(msg)
57
+ return if SidekiqUniqueJobs::Deprecation.muted?
58
+
59
+ trace = "\n\nCALLED FROM:\n#{caller.join("\n")}"
60
+ warn(msg + trace)
61
+
62
+ nil
63
+ end
64
+ end
65
+ end
@@ -1,87 +1,66 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module SidekiqUniqueJobs
4
- # Utility module to help manage unique digests in redis.
5
4
  #
6
- # @author Mikael Henriksson <mikael@zoolutions.se>
7
- module Digests
5
+ # Class Changelogs provides access to the changelog entries
6
+ #
7
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
8
+ #
9
+ class Digests < Redis::SortedSet
10
+ #
11
+ # @return [Integer] the number of matches to return by default
8
12
  DEFAULT_COUNT = 1_000
13
+ #
14
+ # @return [String] the default pattern to use for matching
9
15
  SCAN_PATTERN = "*"
10
- CHUNK_SIZE = 100
11
-
12
- include SidekiqUniqueJobs::Logging
13
- include SidekiqUniqueJobs::Connection
14
- extend self
15
16
 
16
- # Return unique digests matching pattern
17
- #
18
- # @param [String] pattern a pattern to match with
19
- # @param [Integer] count the maximum number to match
20
- # @return [Array<String>] with unique digests
21
- def all(pattern: SCAN_PATTERN, count: DEFAULT_COUNT)
22
- redis { |conn| conn.sscan_each(UNIQUE_SET, match: pattern, count: count).to_a }
17
+ def initialize(digests_key = DIGESTS)
18
+ super(digests_key)
23
19
  end
24
20
 
25
- # Paginate unique digests
26
21
  #
27
- # @param [String] pattern a pattern to match with
28
- # @param [Integer] cursor the maximum number to match
29
- # @param [Integer] page_size the current cursor position
22
+ # Adds a digest
30
23
  #
31
- # @return [Array<String>] with unique digests
32
- def page(pattern: SCAN_PATTERN, cursor: 0, page_size: 100)
33
- redis do |conn|
34
- total_size, digests = conn.multi do
35
- conn.scard(UNIQUE_SET)
36
- conn.sscan(UNIQUE_SET, cursor, match: pattern, count: page_size)
37
- end
38
-
39
- [total_size, digests[0], digests[1]]
40
- end
41
- end
42
-
43
- # Get a total count of unique digests
24
+ # @param [String] digest the digest to add
44
25
  #
45
- # @return [Integer] number of digests
46
- def count
47
- redis { |conn| conn.scard(UNIQUE_SET) }
26
+ def add(digest)
27
+ redis { |conn| conn.zadd(key, now_f, digest) }
48
28
  end
49
29
 
50
- # Deletes unique digest either by a digest or pattern
30
+ # Deletes unique digests by pattern
51
31
  #
52
- # @param [String] digest the full digest to delete
53
32
  # @param [String] pattern a key pattern to match with
54
33
  # @param [Integer] count the maximum number
55
- # @raise [ArgumentError] when both pattern and digest are nil
56
- # @return [Array<String>] with unique digests
57
- def del(digest: nil, pattern: nil, count: DEFAULT_COUNT)
58
- warn("#{self}.#{__method__} has been deprecated and will be removed in a future version")
34
+ # @return [Hash<String,Float>] Hash mapping of digest matching the given pattern and score
35
+
36
+ def delete_by_pattern(pattern, count: DEFAULT_COUNT)
37
+ result, elapsed = timed do
38
+ digests = entries(pattern: pattern, count: count).keys
39
+ redis { |conn| BatchDelete.call(digests, conn) }
40
+ end
59
41
 
60
- return delete_by_pattern(pattern, count: count) if pattern
61
- return delete_by_digest(digest) if digest
42
+ log_info("#{__method__}(#{pattern}, count: #{count}) completed in #{elapsed}ms")
62
43
 
63
- raise ArgumentError, "either digest or pattern need to be provided"
44
+ result
64
45
  end
65
46
 
66
- # Deletes unique digest either by a digest or pattern
47
+ # Delete unique digests by digest
48
+ # Also deletes the :AVAILABLE, :EXPIRED etc keys
67
49
  #
68
- # @param [String] digest the full digest to delete
50
+ # @param [String] digest a unique digest to delete
69
51
  def delete_by_digest(digest) # rubocop:disable Metrics/MethodLength
70
52
  result, elapsed = timed do
71
- Scripts.call(:delete_by_digest, nil, keys: [
72
- UNIQUE_SET,
73
- digest,
74
- "#{digest}:EXISTS",
75
- "#{digest}:GRABBED",
76
- "#{digest}:AVAILABLE",
77
- "#{digest}:VERSION",
78
- "#{digest}:RUN:EXISTS",
79
- "#{digest}:RUN:GRABBED",
80
- "#{digest}:RUN:AVAILABLE",
81
- "#{digest}:RUN:VERSION",
82
- ])
83
-
84
- count
53
+ call_script(:delete_by_digest, [
54
+ digest,
55
+ "#{digest}:QUEUED",
56
+ "#{digest}:PRIMED",
57
+ "#{digest}:LOCKED",
58
+ "#{digest}:RUN",
59
+ "#{digest}:RUN:QUEUED",
60
+ "#{digest}:RUN:PRIMED",
61
+ "#{digest}:RUN:LOCKED",
62
+ key,
63
+ ])
85
64
  end
86
65
 
87
66
  log_info("#{__method__}(#{digest}) completed in #{elapsed}ms")
@@ -89,55 +68,44 @@ module SidekiqUniqueJobs
89
68
  result
90
69
  end
91
70
 
92
- # Deletes unique digests by pattern
93
71
  #
94
- # @param [String] pattern a key pattern to match with
95
- # @param [Integer] count the maximum number
96
- # @return [Array<String>] with unique digests
97
- def delete_by_pattern(pattern, count: DEFAULT_COUNT)
98
- result, elapsed = timed do
99
- digests = all(pattern: pattern, count: count)
100
- batch_delete(digests)
101
- digests.size
102
- end
103
-
104
- log_info("#{__method__}(#{pattern}, count: #{count}) completed in #{elapsed}ms")
72
+ # The entries in this sorted set
73
+ #
74
+ # @param [String] pattern SCAN_PATTERN the match pattern to search for
75
+ # @param [Integer] count DEFAULT_COUNT the number of entries to return
76
+ #
77
+ # @return [Array<String>] an array of digests matching the given pattern
78
+ #
79
+ def entries(pattern: SCAN_PATTERN, count: DEFAULT_COUNT)
80
+ options = {}
81
+ options[:match] = pattern
82
+ options[:count] = count
105
83
 
106
- result
84
+ redis { |conn| conn.zscan_each(key, **options).to_a }.to_h
107
85
  end
108
86
 
109
- private
110
-
111
- def batch_delete(digests) # rubocop:disable Metrics/MethodLength
87
+ #
88
+ # Returns a paginated
89
+ #
90
+ # @param [Integer] cursor the cursor for this iteration
91
+ # @param [String] pattern SCAN_PATTERN the match pattern to search for
92
+ # @param [Integer] page_size 100 the size per page
93
+ #
94
+ # @return [Array<Integer, Integer, Array<Lock>>] total_size, next_cursor, locks
95
+ #
96
+ def page(cursor: 0, pattern: SCAN_PATTERN, page_size: 100)
112
97
  redis do |conn|
113
- digests.each_slice(CHUNK_SIZE) do |chunk|
114
- conn.pipelined do
115
- chunk.each do |digest|
116
- conn.del digest
117
- conn.srem(UNIQUE_SET, digest)
118
- conn.del("#{digest}:EXISTS")
119
- conn.del("#{digest}:GRABBED")
120
- conn.del("#{digest}:VERSION")
121
- conn.del("#{digest}:AVAILABLE")
122
- conn.del("#{digest}:RUN:EXISTS")
123
- conn.del("#{digest}:RUN:GRABBED")
124
- conn.del("#{digest}:RUN:VERSION")
125
- conn.del("#{digest}:RUN:AVAILABLE")
126
- end
127
- end
98
+ total_size, digests = conn.multi do |pipeline|
99
+ pipeline.zcard(key)
100
+ pipeline.zscan(key, cursor, match: pattern, count: page_size)
128
101
  end
129
- end
130
- end
131
102
 
132
- def timed
133
- start = current_time
134
- result = yield
135
- elapsed = (current_time - start).round(2)
136
- [result, elapsed]
137
- end
138
-
139
- def current_time
140
- Time.now
103
+ [
104
+ total_size.to_i,
105
+ digests[0].to_i, # next_cursor
106
+ digests[1].map { |digest, score| Lock.new(digest, time: score) }, # entries
107
+ ]
108
+ end
141
109
  end
142
110
  end
143
111
  end
@@ -1,29 +1,105 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module SidekiqUniqueJobs
4
+ #
5
+ # Base class for all exceptions raised from the gem
6
+ #
7
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
8
+ #
9
+ class UniqueJobsError < ::RuntimeError
10
+ end
11
+
4
12
  # Error raised when a Lua script fails to execute
5
13
  #
6
- # @author Mikael Henriksson <mikael@zoolutions.se>
7
- class Conflict < StandardError
14
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
15
+ class Conflict < UniqueJobsError
8
16
  def initialize(item)
9
- super("Item with the key: #{item[UNIQUE_DIGEST_KEY]} is already scheduled or processing")
17
+ super("Item with the key: #{item[LOCK_DIGEST]} is already scheduled or processing")
18
+ end
19
+ end
20
+
21
+ #
22
+ # Raised when no block was given
23
+ #
24
+ class NoBlockGiven < SidekiqUniqueJobs::UniqueJobsError; end
25
+
26
+ #
27
+ # Raised when a notification has been mistyped
28
+ #
29
+ class NoSuchNotificationError < UniqueJobsError; end
30
+
31
+ #
32
+ # Error raised when trying to add a duplicate lock
33
+ #
34
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
35
+ #
36
+ class DuplicateLock < UniqueJobsError
37
+ end
38
+
39
+ #
40
+ # Error raised when trying to add a duplicate stragegy
41
+ #
42
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
43
+ #
44
+ class DuplicateStrategy < UniqueJobsError
45
+ end
46
+
47
+ #
48
+ # Error raised when an invalid argument is given
49
+ #
50
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
51
+ #
52
+ class InvalidArgument < UniqueJobsError
53
+ end
54
+
55
+ #
56
+ # Raised when a workers configuration is invalid
57
+ #
58
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
59
+ #
60
+ class InvalidWorker < UniqueJobsError
61
+ def initialize(lock_config)
62
+ super(<<~FAILURE_MESSAGE)
63
+ Expected #{lock_config.worker} to have valid sidekiq options but found the following problems:
64
+ #{lock_config.errors_as_string}
65
+ FAILURE_MESSAGE
66
+ end
67
+ end
68
+
69
+ # Error raised when a Lua script fails to execute
70
+ #
71
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
72
+ class InvalidUniqueArguments < UniqueJobsError
73
+ def initialize(options)
74
+ given = options[:given]
75
+ job_class = options[:job_class]
76
+ lock_args_method = options[:lock_args_method]
77
+ lock_args_meth = job_class.method(lock_args_method)
78
+ num_args = lock_args_meth.arity
79
+ source_location = lock_args_meth.source_location
80
+
81
+ super(
82
+ "#{job_class}##{lock_args_method} takes #{num_args} arguments, received #{given.inspect}" \
83
+ "\n\n" \
84
+ " #{source_location.join(':')}"
85
+ )
10
86
  end
11
87
  end
12
88
 
13
- # Error raised from {OnConflict::Raise}
14
89
  #
15
- # @author Mikael Henriksson <mikael@zoolutions.se>
16
- class ScriptError < StandardError
17
- # @param [Symbol] file_name the name of the lua script
18
- # @param [Redis::CommandError] source_exception exception to handle
19
- def initialize(file_name:, source_exception:)
20
- super("Problem compiling #{file_name}. Message: #{source_exception.message}")
90
+ # Raised when a workers configuration is invalid
91
+ #
92
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
93
+ #
94
+ class NotUniqueWorker < UniqueJobsError
95
+ def initialize(options)
96
+ super("#{options[:class]} is not configured for uniqueness. Missing the key `:lock` in #{options.inspect}")
21
97
  end
22
98
  end
23
99
 
24
100
  # Error raised from {OptionsWithFallback#lock_class}
25
101
  #
26
- # @author Mikael Henriksson <mikael@zoolutions.se>
27
- class UnknownLock < StandardError
102
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
103
+ class UnknownLock < UniqueJobsError
28
104
  end
29
105
  end
@@ -0,0 +1,14 @@
1
+ # frozen_string_literal: true
2
+
3
+ module SidekiqUniqueJobs
4
+ #
5
+ # Class ExpiringDigests provides access to the expiring digests used by until_expired locks
6
+ #
7
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
8
+ #
9
+ class ExpiringDigests < Digests
10
+ def initialize
11
+ super(EXPIRING_DIGESTS)
12
+ end
13
+ end
14
+ end
@@ -3,27 +3,61 @@
3
3
  module SidekiqUniqueJobs
4
4
  # Utility class to append uniqueness to the sidekiq job hash
5
5
  #
6
- # @author Mikael Henriksson <mikael@zoolutions.se>
6
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
7
7
  module Job
8
8
  extend self
9
9
 
10
- # Adds timeout, expiration, unique_args, unique_prefix and unique_digest to the sidekiq job hash
11
- # @return [void] nothing returned here matters
12
- def add_uniqueness(item)
13
- add_timeout_and_expiration(item)
14
- add_unique_args_and_digest(item)
10
+ # Adds timeout, expiration, lock_args, lock_prefix and lock_digest to the sidekiq job hash
11
+ # @return [Hash] the job hash
12
+ def prepare(item)
13
+ stringify_on_conflict_hash(item)
14
+ add_lock_type(item)
15
+ add_lock_timeout(item)
16
+ add_lock_ttl(item)
17
+ add_digest(item)
18
+ end
19
+
20
+ # Adds lock_args, lock_prefix and lock_digest to the sidekiq job hash
21
+ # @return [Hash] the job hash
22
+ def add_digest(item)
23
+ add_lock_prefix(item)
24
+ add_lock_args(item)
25
+ add_lock_digest(item)
26
+
27
+ item
15
28
  end
16
29
 
17
30
  private
18
31
 
19
- def add_timeout_and_expiration(item)
20
- calculator = SidekiqUniqueJobs::Timeout::Calculator.new(item)
21
- item[LOCK_TIMEOUT_KEY] = calculator.lock_timeout
22
- item[LOCK_EXPIRATION_KEY] = calculator.lock_expiration
32
+ def stringify_on_conflict_hash(item)
33
+ on_conflict = item[ON_CONFLICT]
34
+ return unless on_conflict.is_a?(Hash)
35
+
36
+ item[ON_CONFLICT] = on_conflict.deep_stringify_keys
37
+ end
38
+
39
+ def add_lock_ttl(item)
40
+ item[LOCK_TTL] = SidekiqUniqueJobs::LockTTL.calculate(item)
41
+ end
42
+
43
+ def add_lock_timeout(item)
44
+ item[LOCK_TIMEOUT] ||= SidekiqUniqueJobs::LockTimeout.calculate(item)
45
+ end
46
+
47
+ def add_lock_args(item)
48
+ item[LOCK_ARGS] ||= SidekiqUniqueJobs::LockArgs.call(item)
49
+ end
50
+
51
+ def add_lock_digest(item)
52
+ item[LOCK_DIGEST] ||= SidekiqUniqueJobs::LockDigest.call(item)
53
+ end
54
+
55
+ def add_lock_prefix(item)
56
+ item[LOCK_PREFIX] ||= SidekiqUniqueJobs.config.lock_prefix
23
57
  end
24
58
 
25
- def add_unique_args_and_digest(item)
26
- SidekiqUniqueJobs::UniqueArgs.digest(item)
59
+ def add_lock_type(item)
60
+ item[LOCK] ||= SidekiqUniqueJobs::LockType.call(item)
27
61
  end
28
62
  end
29
63
  end