sidekiq-unique-jobs 6.0.25 → 7.1.5

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of sidekiq-unique-jobs might be problematic. Click here for more details.

Files changed (127) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +840 -41
  3. data/README.md +814 -284
  4. data/lib/sidekiq_unique_jobs/batch_delete.rb +123 -0
  5. data/lib/sidekiq_unique_jobs/changelog.rb +78 -0
  6. data/lib/sidekiq_unique_jobs/cli.rb +34 -31
  7. data/lib/sidekiq_unique_jobs/config.rb +275 -0
  8. data/lib/sidekiq_unique_jobs/connection.rb +6 -5
  9. data/lib/sidekiq_unique_jobs/constants.rb +45 -25
  10. data/lib/sidekiq_unique_jobs/core_ext.rb +80 -0
  11. data/lib/sidekiq_unique_jobs/deprecation.rb +35 -0
  12. data/lib/sidekiq_unique_jobs/digests.rb +71 -100
  13. data/lib/sidekiq_unique_jobs/exceptions.rb +87 -12
  14. data/lib/sidekiq_unique_jobs/job.rb +41 -12
  15. data/lib/sidekiq_unique_jobs/json.rb +40 -0
  16. data/lib/sidekiq_unique_jobs/key.rb +93 -0
  17. data/lib/sidekiq_unique_jobs/lock/base_lock.rb +100 -79
  18. data/lib/sidekiq_unique_jobs/lock/client_validator.rb +28 -0
  19. data/lib/sidekiq_unique_jobs/lock/server_validator.rb +27 -0
  20. data/lib/sidekiq_unique_jobs/lock/until_and_while_executing.rb +34 -15
  21. data/lib/sidekiq_unique_jobs/lock/until_executed.rb +19 -7
  22. data/lib/sidekiq_unique_jobs/lock/until_executing.rb +16 -2
  23. data/lib/sidekiq_unique_jobs/lock/until_expired.rb +20 -16
  24. data/lib/sidekiq_unique_jobs/lock/validator.rb +96 -0
  25. data/lib/sidekiq_unique_jobs/lock/while_executing.rb +19 -10
  26. data/lib/sidekiq_unique_jobs/lock/while_executing_reject.rb +3 -3
  27. data/lib/sidekiq_unique_jobs/lock.rb +325 -0
  28. data/lib/sidekiq_unique_jobs/lock_args.rb +123 -0
  29. data/lib/sidekiq_unique_jobs/lock_config.rb +126 -0
  30. data/lib/sidekiq_unique_jobs/lock_digest.rb +79 -0
  31. data/lib/sidekiq_unique_jobs/lock_info.rb +68 -0
  32. data/lib/sidekiq_unique_jobs/lock_timeout.rb +62 -0
  33. data/lib/sidekiq_unique_jobs/lock_ttl.rb +77 -0
  34. data/lib/sidekiq_unique_jobs/locksmith.rb +275 -102
  35. data/lib/sidekiq_unique_jobs/logging/middleware_context.rb +44 -0
  36. data/lib/sidekiq_unique_jobs/logging.rb +179 -33
  37. data/lib/sidekiq_unique_jobs/lua/delete.lua +51 -0
  38. data/lib/sidekiq_unique_jobs/lua/delete_by_digest.lua +42 -0
  39. data/lib/sidekiq_unique_jobs/lua/delete_job_by_digest.lua +38 -0
  40. data/lib/sidekiq_unique_jobs/lua/find_digest_in_queues.lua +26 -0
  41. data/lib/sidekiq_unique_jobs/lua/lock.lua +93 -0
  42. data/lib/sidekiq_unique_jobs/lua/locked.lua +35 -0
  43. data/lib/sidekiq_unique_jobs/lua/queue.lua +87 -0
  44. data/lib/sidekiq_unique_jobs/lua/reap_orphans.lua +94 -0
  45. data/lib/sidekiq_unique_jobs/lua/shared/_common.lua +40 -0
  46. data/lib/sidekiq_unique_jobs/lua/shared/_current_time.lua +8 -0
  47. data/lib/sidekiq_unique_jobs/lua/shared/_delete_from_queue.lua +22 -0
  48. data/lib/sidekiq_unique_jobs/lua/shared/_delete_from_sorted_set.lua +18 -0
  49. data/lib/sidekiq_unique_jobs/lua/shared/_find_digest_in_process_set.lua +53 -0
  50. data/lib/sidekiq_unique_jobs/lua/shared/_find_digest_in_queues.lua +43 -0
  51. data/lib/sidekiq_unique_jobs/lua/shared/_find_digest_in_sorted_set.lua +24 -0
  52. data/lib/sidekiq_unique_jobs/lua/shared/_hgetall.lua +13 -0
  53. data/lib/sidekiq_unique_jobs/lua/shared/_upgrades.lua +3 -0
  54. data/lib/sidekiq_unique_jobs/lua/unlock.lua +95 -0
  55. data/lib/sidekiq_unique_jobs/lua/update_version.lua +40 -0
  56. data/lib/sidekiq_unique_jobs/lua/upgrade.lua +68 -0
  57. data/lib/sidekiq_unique_jobs/middleware/client.rb +40 -0
  58. data/lib/sidekiq_unique_jobs/middleware/server.rb +29 -0
  59. data/lib/sidekiq_unique_jobs/middleware.rb +29 -43
  60. data/lib/sidekiq_unique_jobs/normalizer.rb +4 -4
  61. data/lib/sidekiq_unique_jobs/on_conflict/log.rb +9 -5
  62. data/lib/sidekiq_unique_jobs/on_conflict/null_strategy.rb +1 -1
  63. data/lib/sidekiq_unique_jobs/on_conflict/raise.rb +1 -1
  64. data/lib/sidekiq_unique_jobs/on_conflict/reject.rb +61 -15
  65. data/lib/sidekiq_unique_jobs/on_conflict/replace.rb +54 -14
  66. data/lib/sidekiq_unique_jobs/on_conflict/reschedule.rb +16 -5
  67. data/lib/sidekiq_unique_jobs/on_conflict/strategy.rb +25 -6
  68. data/lib/sidekiq_unique_jobs/on_conflict.rb +23 -10
  69. data/lib/sidekiq_unique_jobs/options_with_fallback.rb +34 -29
  70. data/lib/sidekiq_unique_jobs/orphans/lua_reaper.rb +29 -0
  71. data/lib/sidekiq_unique_jobs/orphans/manager.rb +213 -0
  72. data/lib/sidekiq_unique_jobs/orphans/null_reaper.rb +24 -0
  73. data/lib/sidekiq_unique_jobs/orphans/observer.rb +42 -0
  74. data/lib/sidekiq_unique_jobs/orphans/reaper.rb +114 -0
  75. data/lib/sidekiq_unique_jobs/orphans/reaper_resurrector.rb +170 -0
  76. data/lib/sidekiq_unique_jobs/orphans/ruby_reaper.rb +213 -0
  77. data/lib/sidekiq_unique_jobs/redis/entity.rb +112 -0
  78. data/lib/sidekiq_unique_jobs/redis/hash.rb +56 -0
  79. data/lib/sidekiq_unique_jobs/redis/list.rb +32 -0
  80. data/lib/sidekiq_unique_jobs/redis/set.rb +32 -0
  81. data/lib/sidekiq_unique_jobs/redis/sorted_set.rb +86 -0
  82. data/lib/sidekiq_unique_jobs/redis/string.rb +49 -0
  83. data/lib/sidekiq_unique_jobs/redis.rb +11 -0
  84. data/lib/sidekiq_unique_jobs/reflectable.rb +17 -0
  85. data/lib/sidekiq_unique_jobs/reflections.rb +68 -0
  86. data/lib/sidekiq_unique_jobs/rspec/matchers/have_valid_sidekiq_options.rb +51 -0
  87. data/lib/sidekiq_unique_jobs/rspec/matchers.rb +26 -0
  88. data/lib/sidekiq_unique_jobs/script/caller.rb +127 -0
  89. data/lib/sidekiq_unique_jobs/script.rb +15 -0
  90. data/lib/sidekiq_unique_jobs/server.rb +49 -0
  91. data/lib/sidekiq_unique_jobs/sidekiq_unique_ext.rb +92 -65
  92. data/lib/sidekiq_unique_jobs/sidekiq_unique_jobs.rb +241 -35
  93. data/lib/sidekiq_unique_jobs/sidekiq_worker_methods.rb +11 -15
  94. data/lib/sidekiq_unique_jobs/testing.rb +62 -21
  95. data/lib/sidekiq_unique_jobs/timer_task.rb +78 -0
  96. data/lib/sidekiq_unique_jobs/timing.rb +58 -0
  97. data/lib/sidekiq_unique_jobs/unlockable.rb +20 -4
  98. data/lib/sidekiq_unique_jobs/update_version.rb +25 -0
  99. data/lib/sidekiq_unique_jobs/upgrade_locks.rb +155 -0
  100. data/lib/sidekiq_unique_jobs/version.rb +3 -1
  101. data/lib/sidekiq_unique_jobs/version_check.rb +23 -4
  102. data/lib/sidekiq_unique_jobs/web/helpers.rb +128 -13
  103. data/lib/sidekiq_unique_jobs/web/views/_paging.erb +4 -4
  104. data/lib/sidekiq_unique_jobs/web/views/changelogs.erb +54 -0
  105. data/lib/sidekiq_unique_jobs/web/views/lock.erb +108 -0
  106. data/lib/sidekiq_unique_jobs/web/views/locks.erb +54 -0
  107. data/lib/sidekiq_unique_jobs/web.rb +57 -27
  108. data/lib/sidekiq_unique_jobs.rb +52 -7
  109. data/lib/tasks/changelog.rake +5 -5
  110. metadata +121 -177
  111. data/lib/sidekiq_unique_jobs/client/middleware.rb +0 -56
  112. data/lib/sidekiq_unique_jobs/scripts.rb +0 -118
  113. data/lib/sidekiq_unique_jobs/server/middleware.rb +0 -46
  114. data/lib/sidekiq_unique_jobs/timeout/calculator.rb +0 -63
  115. data/lib/sidekiq_unique_jobs/timeout.rb +0 -8
  116. data/lib/sidekiq_unique_jobs/unique_args.rb +0 -150
  117. data/lib/sidekiq_unique_jobs/util.rb +0 -103
  118. data/lib/sidekiq_unique_jobs/web/views/unique_digest.erb +0 -28
  119. data/lib/sidekiq_unique_jobs/web/views/unique_digests.erb +0 -46
  120. data/redis/acquire_lock.lua +0 -21
  121. data/redis/convert_legacy_lock.lua +0 -13
  122. data/redis/delete.lua +0 -14
  123. data/redis/delete_by_digest.lua +0 -23
  124. data/redis/delete_job_by_digest.lua +0 -60
  125. data/redis/lock.lua +0 -62
  126. data/redis/release_stale_locks.lua +0 -90
  127. data/redis/unlock.lua +0 -35
@@ -2,21 +2,67 @@
2
2
 
3
3
  # :nocov:
4
4
 
5
+ #
6
+ # Monkey patches for the ruby Hash
7
+ #
5
8
  class Hash
6
9
  unless {}.respond_to?(:slice)
10
+ #
11
+ # Returns only the matching keys in a new hash
12
+ #
13
+ # @param [Array<String>, Array<Symbol>] keys the keys to match
14
+ #
15
+ # @return [Hash]
16
+ #
7
17
  def slice(*keys)
8
18
  keys.map! { |key| convert_key(key) } if respond_to?(:convert_key, true)
9
19
  keys.each_with_object(self.class.new) { |k, hash| hash[k] = self[k] if key?(k) }
10
20
  end
11
21
  end
12
22
 
23
+ unless {}.respond_to?(:deep_stringify_keys)
24
+ #
25
+ # Depp converts all keys to string
26
+ #
27
+ #
28
+ # @return [Hash<String>]
29
+ #
30
+ def deep_stringify_keys
31
+ deep_transform_keys(&:to_s)
32
+ end
33
+ end
34
+
35
+ unless {}.respond_to?(:deep_transform_keys)
36
+ #
37
+ # Deep transfor all keys by yielding to the caller
38
+ #
39
+ #
40
+ # @return [Hash<String>]
41
+ #
42
+ def deep_transform_keys(&block)
43
+ _deep_transform_keys_in_object(self, &block)
44
+ end
45
+ end
46
+
13
47
  unless {}.respond_to?(:stringify_keys)
48
+ #
49
+ # Converts all keys to string
50
+ #
51
+ #
52
+ # @return [Hash<String>]
53
+ #
14
54
  def stringify_keys
15
55
  transform_keys(&:to_s)
16
56
  end
17
57
  end
18
58
 
19
59
  unless {}.respond_to?(:transform_keys)
60
+ #
61
+ # Transforms all keys by yielding to the caller
62
+ #
63
+ #
64
+ # @return [Hash]
65
+ #
20
66
  def transform_keys
21
67
  result = {}
22
68
  each_key do |key|
@@ -27,6 +73,13 @@ class Hash
27
73
  end
28
74
 
29
75
  unless {}.respond_to?(:slice!)
76
+ #
77
+ # Removes all keys not provided from the current hash and returns it
78
+ #
79
+ # @param [Array<String>, Array<Symbol>] keys the keys to match
80
+ #
81
+ # @return [Hash]
82
+ #
30
83
  def slice!(*keys)
31
84
  keys.map! { |key| convert_key(key) } if respond_to?(:convert_key, true)
32
85
  omit = slice(*self.keys - keys)
@@ -37,10 +90,37 @@ class Hash
37
90
  omit
38
91
  end
39
92
  end
93
+
94
+ private
95
+
96
+ unless {}.respond_to?(:_deep_transform_keys_in_object)
97
+ # support methods for deep transforming nested hashes and arrays
98
+ def _deep_transform_keys_in_object(object, &block)
99
+ case object
100
+ when Hash
101
+ object.each_with_object({}) do |(key, value), result|
102
+ result[yield(key)] = _deep_transform_keys_in_object(value, &block)
103
+ end
104
+ when Array
105
+ object.map { |element| _deep_transform_keys_in_object(element, &block) }
106
+ else
107
+ object
108
+ end
109
+ end
110
+ end
40
111
  end
41
112
 
113
+ #
114
+ # Monkey patches for the ruby Array
115
+ #
42
116
  class Array
43
117
  unless [].respond_to?(:extract_options!)
118
+ #
119
+ # Extract the last argument if it is a hash
120
+ #
121
+ #
122
+ # @return [Hash]
123
+ #
44
124
  def extract_options!
45
125
  if last.is_a?(Hash) && last.instance_of?(Hash)
46
126
  pop
@@ -0,0 +1,35 @@
1
+ # frozen_string_literal: true
2
+
3
+ module SidekiqUniqueJobs
4
+ #
5
+ # Class Deprecation provides logging of deprecations
6
+ #
7
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
8
+ #
9
+ class Deprecation
10
+ def self.muted
11
+ orig_val = Thread.current[:uniquejobs_mute_deprecations]
12
+ Thread.current[:uniquejobs_mute_deprecations] = true
13
+ yield
14
+ ensure
15
+ Thread.current[:uniquejobs_mute_deprecations] = orig_val
16
+ end
17
+
18
+ def self.muted?
19
+ Thread.current[:uniquejobs_mute_deprecations] == true
20
+ end
21
+
22
+ def self.warn(msg)
23
+ return if SidekiqUniqueJobs::Deprecation.muted?
24
+
25
+ warn "DEPRECATION WARNING: #{msg}"
26
+ end
27
+
28
+ def self.warn_with_backtrace(msg)
29
+ return if SidekiqUniqueJobs::Deprecation.muted?
30
+
31
+ trace = "\n\nCALLED FROM:\n#{caller.join("\n")}"
32
+ warn(msg + trace)
33
+ end
34
+ end
35
+ end
@@ -1,87 +1,65 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module SidekiqUniqueJobs
4
- # Utility module to help manage unique digests in redis.
5
4
  #
6
- # @author Mikael Henriksson <mikael@zoolutions.se>
7
- module Digests
5
+ # Class Changelogs provides access to the changelog entries
6
+ #
7
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
8
+ #
9
+ class Digests < Redis::SortedSet
10
+ #
11
+ # @return [Integer] the number of matches to return by default
8
12
  DEFAULT_COUNT = 1_000
13
+ #
14
+ # @return [String] the default pattern to use for matching
9
15
  SCAN_PATTERN = "*"
10
- CHUNK_SIZE = 100
11
-
12
- include SidekiqUniqueJobs::Logging
13
- include SidekiqUniqueJobs::Connection
14
- extend self
15
16
 
16
- # Return unique digests matching pattern
17
- #
18
- # @param [String] pattern a pattern to match with
19
- # @param [Integer] count the maximum number to match
20
- # @return [Array<String>] with unique digests
21
- def all(pattern: SCAN_PATTERN, count: DEFAULT_COUNT)
22
- redis { |conn| conn.sscan_each(UNIQUE_SET, match: pattern, count: count).to_a }
17
+ def initialize
18
+ super(DIGESTS)
23
19
  end
24
20
 
25
- # Paginate unique digests
26
21
  #
27
- # @param [String] pattern a pattern to match with
28
- # @param [Integer] cursor the maximum number to match
29
- # @param [Integer] page_size the current cursor position
22
+ # Adds a digest
30
23
  #
31
- # @return [Array<String>] with unique digests
32
- def page(pattern: SCAN_PATTERN, cursor: 0, page_size: 100)
33
- redis do |conn|
34
- total_size, digests = conn.multi do
35
- conn.scard(UNIQUE_SET)
36
- conn.sscan(UNIQUE_SET, cursor, match: pattern, count: page_size)
37
- end
38
-
39
- [total_size, digests[0], digests[1]]
40
- end
41
- end
42
-
43
- # Get a total count of unique digests
24
+ # @param [String] digest the digest to add
44
25
  #
45
- # @return [Integer] number of digests
46
- def count
47
- redis { |conn| conn.scard(UNIQUE_SET) }
26
+ def add(digest)
27
+ redis { |conn| conn.zadd(key, now_f, digest) }
48
28
  end
49
29
 
50
- # Deletes unique digest either by a digest or pattern
30
+ # Deletes unique digests by pattern
51
31
  #
52
- # @param [String] digest the full digest to delete
53
32
  # @param [String] pattern a key pattern to match with
54
33
  # @param [Integer] count the maximum number
55
- # @raise [ArgumentError] when both pattern and digest are nil
56
34
  # @return [Array<String>] with unique digests
57
- def del(digest: nil, pattern: nil, count: DEFAULT_COUNT)
58
- warn("#{self}.#{__method__} has been deprecated and will be removed in a future version")
35
+ def delete_by_pattern(pattern, count: DEFAULT_COUNT)
36
+ result, elapsed = timed do
37
+ digests = entries(pattern: pattern, count: count).keys
38
+ redis { |conn| BatchDelete.call(digests, conn) }
39
+ end
59
40
 
60
- return delete_by_pattern(pattern, count: count) if pattern
61
- return delete_by_digest(digest) if digest
41
+ log_info("#{__method__}(#{pattern}, count: #{count}) completed in #{elapsed}ms")
62
42
 
63
- raise ArgumentError, "either digest or pattern need to be provided"
43
+ result
64
44
  end
65
45
 
66
- # Deletes unique digest either by a digest or pattern
46
+ # Delete unique digests by digest
47
+ # Also deletes the :AVAILABLE, :EXPIRED etc keys
67
48
  #
68
- # @param [String] digest the full digest to delete
49
+ # @param [String] digest a unique digest to delete
69
50
  def delete_by_digest(digest) # rubocop:disable Metrics/MethodLength
70
51
  result, elapsed = timed do
71
- Scripts.call(:delete_by_digest, nil, keys: [
72
- UNIQUE_SET,
73
- digest,
74
- "#{digest}:EXISTS",
75
- "#{digest}:GRABBED",
76
- "#{digest}:AVAILABLE",
77
- "#{digest}:VERSION",
78
- "#{digest}:RUN:EXISTS",
79
- "#{digest}:RUN:GRABBED",
80
- "#{digest}:RUN:AVAILABLE",
81
- "#{digest}:RUN:VERSION",
82
- ])
83
-
84
- count
52
+ call_script(:delete_by_digest, [
53
+ digest,
54
+ "#{digest}:QUEUED",
55
+ "#{digest}:PRIMED",
56
+ "#{digest}:LOCKED",
57
+ "#{digest}:RUN",
58
+ "#{digest}:RUN:QUEUED",
59
+ "#{digest}:RUN:PRIMED",
60
+ "#{digest}:RUN:LOCKED",
61
+ key,
62
+ ])
85
63
  end
86
64
 
87
65
  log_info("#{__method__}(#{digest}) completed in #{elapsed}ms")
@@ -89,55 +67,48 @@ module SidekiqUniqueJobs
89
67
  result
90
68
  end
91
69
 
92
- # Deletes unique digests by pattern
93
70
  #
94
- # @param [String] pattern a key pattern to match with
95
- # @param [Integer] count the maximum number
96
- # @return [Array<String>] with unique digests
97
- def delete_by_pattern(pattern, count: DEFAULT_COUNT)
98
- result, elapsed = timed do
99
- digests = all(pattern: pattern, count: count)
100
- batch_delete(digests)
101
- digests.size
102
- end
71
+ # The entries in this sorted set
72
+ #
73
+ # @param [String] pattern SCAN_PATTERN the match pattern to search for
74
+ # @param [Integer] count DEFAULT_COUNT the number of entries to return
75
+ #
76
+ # @return [Array<String>] an array of digests matching the given pattern
77
+ #
78
+ def entries(pattern: SCAN_PATTERN, count: DEFAULT_COUNT)
79
+ options = {}
80
+ options[:match] = pattern
81
+ options[:count] = count
103
82
 
104
- log_info("#{__method__}(#{pattern}, count: #{count}) completed in #{elapsed}ms")
83
+ result = redis { |conn| conn.zscan_each(key, **options).to_a }
105
84
 
106
- result
85
+ result.each_with_object({}) do |entry, hash|
86
+ hash[entry[0]] = entry[1]
87
+ end
107
88
  end
108
89
 
109
- private
110
-
111
- def batch_delete(digests) # rubocop:disable Metrics/MethodLength
90
+ #
91
+ # Returns a paginated
92
+ #
93
+ # @param [Integer] cursor the cursor for this iteration
94
+ # @param [String] pattern SCAN_PATTERN the match pattern to search for
95
+ # @param [Integer] page_size 100 the size per page
96
+ #
97
+ # @return [Array<Integer, Integer, Array<Lock>>] total_size, next_cursor, locks
98
+ #
99
+ def page(cursor: 0, pattern: SCAN_PATTERN, page_size: 100)
112
100
  redis do |conn|
113
- digests.each_slice(CHUNK_SIZE) do |chunk|
114
- conn.pipelined do
115
- chunk.each do |digest|
116
- conn.del digest
117
- conn.srem(UNIQUE_SET, digest)
118
- conn.del("#{digest}:EXISTS")
119
- conn.del("#{digest}:GRABBED")
120
- conn.del("#{digest}:VERSION")
121
- conn.del("#{digest}:AVAILABLE")
122
- conn.del("#{digest}:RUN:EXISTS")
123
- conn.del("#{digest}:RUN:GRABBED")
124
- conn.del("#{digest}:RUN:VERSION")
125
- conn.del("#{digest}:RUN:AVAILABLE")
126
- end
127
- end
101
+ total_size, digests = conn.multi do
102
+ conn.zcard(key)
103
+ conn.zscan(key, cursor, match: pattern, count: page_size)
128
104
  end
129
- end
130
- end
131
105
 
132
- def timed
133
- start = current_time
134
- result = yield
135
- elapsed = (current_time - start).round(2)
136
- [result, elapsed]
137
- end
138
-
139
- def current_time
140
- Time.now
106
+ [
107
+ total_size.to_i,
108
+ digests[0].to_i, # next_cursor
109
+ digests[1].map { |digest, score| Lock.new(digest, time: score) }, # entries
110
+ ]
111
+ end
141
112
  end
142
113
  end
143
114
  end
@@ -1,29 +1,104 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module SidekiqUniqueJobs
4
+ #
5
+ # Base class for all exceptions raised from the gem
6
+ #
7
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
8
+ #
9
+ class UniqueJobsError < ::RuntimeError
10
+ end
11
+
4
12
  # Error raised when a Lua script fails to execute
5
13
  #
6
- # @author Mikael Henriksson <mikael@zoolutions.se>
7
- class Conflict < StandardError
14
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
15
+ class Conflict < UniqueJobsError
8
16
  def initialize(item)
9
- super("Item with the key: #{item[UNIQUE_DIGEST_KEY]} is already scheduled or processing")
17
+ super("Item with the key: #{item[LOCK_DIGEST]} is already scheduled or processing")
10
18
  end
11
19
  end
12
20
 
13
- # Error raised from {OnConflict::Raise}
14
21
  #
15
- # @author Mikael Henriksson <mikael@zoolutions.se>
16
- class ScriptError < StandardError
17
- # @param [Symbol] file_name the name of the lua script
18
- # @param [Redis::CommandError] source_exception exception to handle
19
- def initialize(file_name:, source_exception:)
20
- super("Problem compiling #{file_name}. Message: #{source_exception.message}")
22
+ # Raised when no block was given
23
+ #
24
+ class NoBlockGiven < SidekiqUniqueJobs::UniqueJobsError; end
25
+ #
26
+ # Raised when a notification has been mistyped
27
+ #
28
+ class NoSuchNotificationError < UniqueJobsError; end
29
+
30
+ #
31
+ # Error raised when trying to add a duplicate lock
32
+ #
33
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
34
+ #
35
+ class DuplicateLock < UniqueJobsError
36
+ end
37
+
38
+ #
39
+ # Error raised when trying to add a duplicate stragegy
40
+ #
41
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
42
+ #
43
+ class DuplicateStrategy < UniqueJobsError
44
+ end
45
+
46
+ #
47
+ # Error raised when an invalid argument is given
48
+ #
49
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
50
+ #
51
+ class InvalidArgument < UniqueJobsError
52
+ end
53
+
54
+ #
55
+ # Raised when a workers configuration is invalid
56
+ #
57
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
58
+ #
59
+ class InvalidWorker < UniqueJobsError
60
+ def initialize(lock_config)
61
+ super(<<~FAILURE_MESSAGE)
62
+ Expected #{lock_config.worker} to have valid sidekiq options but found the following problems:
63
+ #{lock_config.errors_as_string}
64
+ FAILURE_MESSAGE
65
+ end
66
+ end
67
+
68
+ # Error raised when a Lua script fails to execute
69
+ #
70
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
71
+ class InvalidUniqueArguments < UniqueJobsError
72
+ def initialize(options)
73
+ given = options[:given]
74
+ worker_class = options[:worker_class]
75
+ lock_args_method = options[:lock_args_method]
76
+ lock_args_meth = worker_class.method(lock_args_method)
77
+ num_args = lock_args_meth.arity
78
+ source_location = lock_args_meth.source_location
79
+
80
+ super(
81
+ "#{worker_class}##{lock_args_method} takes #{num_args} arguments, received #{given.inspect}" \
82
+ "\n\n" \
83
+ " #{source_location.join(':')}"
84
+ )
85
+ end
86
+ end
87
+
88
+ #
89
+ # Raised when a workers configuration is invalid
90
+ #
91
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
92
+ #
93
+ class NotUniqueWorker < UniqueJobsError
94
+ def initialize(options)
95
+ super("#{options[:class]} is not configured for uniqueness. Missing the key `:lock` in #{options.inspect}")
21
96
  end
22
97
  end
23
98
 
24
99
  # Error raised from {OptionsWithFallback#lock_class}
25
100
  #
26
- # @author Mikael Henriksson <mikael@zoolutions.se>
27
- class UnknownLock < StandardError
101
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
102
+ class UnknownLock < UniqueJobsError
28
103
  end
29
104
  end
@@ -3,27 +3,56 @@
3
3
  module SidekiqUniqueJobs
4
4
  # Utility class to append uniqueness to the sidekiq job hash
5
5
  #
6
- # @author Mikael Henriksson <mikael@zoolutions.se>
6
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
7
7
  module Job
8
8
  extend self
9
9
 
10
- # Adds timeout, expiration, unique_args, unique_prefix and unique_digest to the sidekiq job hash
11
- # @return [void] nothing returned here matters
12
- def add_uniqueness(item)
13
- add_timeout_and_expiration(item)
14
- add_unique_args_and_digest(item)
10
+ # Adds timeout, expiration, lock_args, lock_prefix and lock_digest to the sidekiq job hash
11
+ # @return [Hash] the job hash
12
+ def prepare(item)
13
+ stringify_on_conflict_hash(item)
14
+ add_lock_timeout(item)
15
+ add_lock_ttl(item)
16
+ add_digest(item)
17
+ end
18
+
19
+ # Adds lock_args, lock_prefix and lock_digest to the sidekiq job hash
20
+ # @return [Hash] the job hash
21
+ def add_digest(item)
22
+ add_lock_prefix(item)
23
+ add_lock_args(item)
24
+ add_lock_digest(item)
25
+
26
+ item
15
27
  end
16
28
 
17
29
  private
18
30
 
19
- def add_timeout_and_expiration(item)
20
- calculator = SidekiqUniqueJobs::Timeout::Calculator.new(item)
21
- item[LOCK_TIMEOUT_KEY] = calculator.lock_timeout
22
- item[LOCK_EXPIRATION_KEY] = calculator.lock_expiration
31
+ def stringify_on_conflict_hash(item)
32
+ on_conflict = item[ON_CONFLICT]
33
+ return unless on_conflict.is_a?(Hash)
34
+
35
+ item[ON_CONFLICT] = on_conflict.deep_stringify_keys
36
+ end
37
+
38
+ def add_lock_ttl(item)
39
+ item[LOCK_TTL] = SidekiqUniqueJobs::LockTTL.calculate(item)
40
+ end
41
+
42
+ def add_lock_timeout(item)
43
+ item[LOCK_TIMEOUT] ||= SidekiqUniqueJobs::LockTimeout.calculate(item)
44
+ end
45
+
46
+ def add_lock_args(item)
47
+ item[LOCK_ARGS] ||= SidekiqUniqueJobs::LockArgs.call(item)
48
+ end
49
+
50
+ def add_lock_digest(item)
51
+ item[LOCK_DIGEST] ||= SidekiqUniqueJobs::LockDigest.call(item)
23
52
  end
24
53
 
25
- def add_unique_args_and_digest(item)
26
- SidekiqUniqueJobs::UniqueArgs.digest(item)
54
+ def add_lock_prefix(item)
55
+ item[LOCK_PREFIX] ||= SidekiqUniqueJobs.config.lock_prefix
27
56
  end
28
57
  end
29
58
  end
@@ -0,0 +1,40 @@
1
+ # frozen_string_literal: true
2
+
3
+ module SidekiqUniqueJobs
4
+ # Handles loading and dumping of json
5
+ #
6
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
7
+ module JSON
8
+ module_function
9
+
10
+ #
11
+ # Parses a JSON string into an object
12
+ #
13
+ # @param [String] string the object to parse
14
+ #
15
+ # @return [Object]
16
+ #
17
+ def load_json(string)
18
+ return if string.nil? || string.empty?
19
+
20
+ ::JSON.parse(string)
21
+ end
22
+
23
+ def safe_load_json(string)
24
+ return string if string.is_a?(Hash)
25
+
26
+ load_json(string)
27
+ end
28
+
29
+ #
30
+ # Dumps an object into a JSON string
31
+ #
32
+ # @param [Object] object a JSON convertible object
33
+ #
34
+ # @return [String] a JSON string
35
+ #
36
+ def dump_json(object)
37
+ ::JSON.generate(object)
38
+ end
39
+ end
40
+ end
@@ -0,0 +1,93 @@
1
+ # frozen_string_literal: true
2
+
3
+ module SidekiqUniqueJobs
4
+ # Key class wraps logic dealing with various lock keys
5
+ #
6
+ # @author Mikael Henriksson <mikael@mhenrixon.com>
7
+ class Key
8
+ #
9
+ # @!attribute [r] digest
10
+ # @return [String] the digest key for which keys are created
11
+ attr_reader :digest
12
+ #
13
+ # @!attribute [r] queued
14
+ # @return [String] the list key with queued job_id's
15
+ attr_reader :queued
16
+ #
17
+ # @!attribute [r] primed
18
+ # @return [String] the list key with primed job_id's
19
+ attr_reader :primed
20
+ #
21
+ # @!attribute [r] locked
22
+ # @return [String] the hash key with locked job_id's
23
+ attr_reader :locked
24
+ #
25
+ # @!attribute [r] info
26
+ # @return [String] information about the lock
27
+ attr_reader :info
28
+ #
29
+ # @!attribute [r] changelog
30
+ # @return [String] the zset with changelog entries
31
+ attr_reader :changelog
32
+ #
33
+ # @!attribute [r] digests
34
+ # @return [String] the zset with locked digests
35
+ attr_reader :digests
36
+
37
+ #
38
+ # Initialize a new Key
39
+ #
40
+ # @param [String] digest the digest to use as key
41
+ #
42
+ def initialize(digest)
43
+ @digest = digest
44
+ @queued = suffixed_key("QUEUED")
45
+ @primed = suffixed_key("PRIMED")
46
+ @locked = suffixed_key("LOCKED")
47
+ @info = suffixed_key("INFO")
48
+ @changelog = CHANGELOGS
49
+ @digests = DIGESTS
50
+ end
51
+
52
+ #
53
+ # Provides the only important information about this keys
54
+ #
55
+ #
56
+ # @return [String]
57
+ #
58
+ def to_s
59
+ digest
60
+ end
61
+
62
+ # @see to_s
63
+ def inspect
64
+ digest
65
+ end
66
+
67
+ #
68
+ # Compares keys by digest
69
+ #
70
+ # @param [Key] other the key to compare with
71
+ #
72
+ # @return [true, false]
73
+ #
74
+ def ==(other)
75
+ digest == other.digest
76
+ end
77
+
78
+ #
79
+ # Returns all keys as an ordered array
80
+ #
81
+ # @return [Array] an ordered array with all keys
82
+ #
83
+ def to_a
84
+ [digest, queued, primed, locked, info, changelog, digests]
85
+ end
86
+
87
+ private
88
+
89
+ def suffixed_key(variable)
90
+ "#{digest}:#{variable}"
91
+ end
92
+ end
93
+ end