sidekiq-unique-jobs 6.0.25 → 7.0.0.beta2

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of sidekiq-unique-jobs might be problematic. Click here for more details.

Files changed (113) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +155 -20
  3. data/README.md +349 -112
  4. data/lib/sidekiq-unique-jobs.rb +2 -0
  5. data/lib/sidekiq_unique_jobs.rb +43 -6
  6. data/lib/sidekiq_unique_jobs/batch_delete.rb +121 -0
  7. data/lib/sidekiq_unique_jobs/changelog.rb +71 -0
  8. data/lib/sidekiq_unique_jobs/cli.rb +20 -29
  9. data/lib/sidekiq_unique_jobs/config.rb +193 -0
  10. data/lib/sidekiq_unique_jobs/connection.rb +5 -4
  11. data/lib/sidekiq_unique_jobs/constants.rb +36 -24
  12. data/lib/sidekiq_unique_jobs/core_ext.rb +38 -0
  13. data/lib/sidekiq_unique_jobs/digests.rb +78 -93
  14. data/lib/sidekiq_unique_jobs/exceptions.rb +152 -8
  15. data/lib/sidekiq_unique_jobs/job.rb +3 -3
  16. data/lib/sidekiq_unique_jobs/json.rb +34 -0
  17. data/lib/sidekiq_unique_jobs/key.rb +93 -0
  18. data/lib/sidekiq_unique_jobs/lock.rb +295 -0
  19. data/lib/sidekiq_unique_jobs/lock/base_lock.rb +49 -43
  20. data/lib/sidekiq_unique_jobs/lock/client_validator.rb +28 -0
  21. data/lib/sidekiq_unique_jobs/lock/server_validator.rb +27 -0
  22. data/lib/sidekiq_unique_jobs/lock/until_and_while_executing.rb +8 -17
  23. data/lib/sidekiq_unique_jobs/lock/until_executed.rb +5 -5
  24. data/lib/sidekiq_unique_jobs/lock/until_expired.rb +1 -23
  25. data/lib/sidekiq_unique_jobs/lock/validator.rb +65 -0
  26. data/lib/sidekiq_unique_jobs/lock/while_executing.rb +12 -8
  27. data/lib/sidekiq_unique_jobs/lock/while_executing_reject.rb +1 -1
  28. data/lib/sidekiq_unique_jobs/lock_config.rb +95 -0
  29. data/lib/sidekiq_unique_jobs/lock_info.rb +68 -0
  30. data/lib/sidekiq_unique_jobs/locksmith.rb +255 -99
  31. data/lib/sidekiq_unique_jobs/logging.rb +148 -22
  32. data/lib/sidekiq_unique_jobs/logging/middleware_context.rb +44 -0
  33. data/lib/sidekiq_unique_jobs/lua/delete.lua +51 -0
  34. data/lib/sidekiq_unique_jobs/lua/delete_by_digest.lua +46 -0
  35. data/lib/sidekiq_unique_jobs/lua/delete_job_by_digest.lua +38 -0
  36. data/lib/sidekiq_unique_jobs/lua/find_digest_in_queues.lua +26 -0
  37. data/lib/sidekiq_unique_jobs/lua/find_digest_in_sorted_set.lua +24 -0
  38. data/lib/sidekiq_unique_jobs/lua/lock.lua +91 -0
  39. data/lib/sidekiq_unique_jobs/lua/locked.lua +35 -0
  40. data/lib/sidekiq_unique_jobs/lua/queue.lua +83 -0
  41. data/lib/sidekiq_unique_jobs/lua/reap_orphans.lua +86 -0
  42. data/lib/sidekiq_unique_jobs/lua/shared/_common.lua +40 -0
  43. data/lib/sidekiq_unique_jobs/lua/shared/_current_time.lua +8 -0
  44. data/lib/sidekiq_unique_jobs/lua/shared/_delete_from_queue.lua +19 -0
  45. data/lib/sidekiq_unique_jobs/lua/shared/_delete_from_sorted_set.lua +18 -0
  46. data/lib/sidekiq_unique_jobs/lua/shared/_find_digest_in_queues.lua +46 -0
  47. data/lib/sidekiq_unique_jobs/lua/shared/_find_digest_in_sorted_set.lua +24 -0
  48. data/lib/sidekiq_unique_jobs/lua/shared/_hgetall.lua +13 -0
  49. data/lib/sidekiq_unique_jobs/lua/shared/_upgrades.lua +3 -0
  50. data/lib/sidekiq_unique_jobs/lua/shared/find_digest_in_sorted_set.lua +24 -0
  51. data/lib/sidekiq_unique_jobs/lua/unlock.lua +99 -0
  52. data/lib/sidekiq_unique_jobs/lua/update_version.lua +40 -0
  53. data/lib/sidekiq_unique_jobs/lua/upgrade.lua +68 -0
  54. data/lib/sidekiq_unique_jobs/middleware.rb +62 -31
  55. data/lib/sidekiq_unique_jobs/middleware/client.rb +42 -0
  56. data/lib/sidekiq_unique_jobs/middleware/server.rb +27 -0
  57. data/lib/sidekiq_unique_jobs/normalizer.rb +3 -3
  58. data/lib/sidekiq_unique_jobs/on_conflict.rb +22 -9
  59. data/lib/sidekiq_unique_jobs/on_conflict/log.rb +8 -4
  60. data/lib/sidekiq_unique_jobs/on_conflict/reject.rb +59 -13
  61. data/lib/sidekiq_unique_jobs/on_conflict/replace.rb +42 -13
  62. data/lib/sidekiq_unique_jobs/on_conflict/reschedule.rb +4 -4
  63. data/lib/sidekiq_unique_jobs/on_conflict/strategy.rb +24 -5
  64. data/lib/sidekiq_unique_jobs/options_with_fallback.rb +47 -23
  65. data/lib/sidekiq_unique_jobs/orphans/manager.rb +100 -0
  66. data/lib/sidekiq_unique_jobs/orphans/observer.rb +42 -0
  67. data/lib/sidekiq_unique_jobs/orphans/reaper.rb +201 -0
  68. data/lib/sidekiq_unique_jobs/profiler.rb +51 -0
  69. data/lib/sidekiq_unique_jobs/redis.rb +11 -0
  70. data/lib/sidekiq_unique_jobs/redis/entity.rb +94 -0
  71. data/lib/sidekiq_unique_jobs/redis/hash.rb +56 -0
  72. data/lib/sidekiq_unique_jobs/redis/list.rb +32 -0
  73. data/lib/sidekiq_unique_jobs/redis/set.rb +32 -0
  74. data/lib/sidekiq_unique_jobs/redis/sorted_set.rb +59 -0
  75. data/lib/sidekiq_unique_jobs/redis/string.rb +49 -0
  76. data/lib/sidekiq_unique_jobs/rspec/matchers.rb +19 -0
  77. data/lib/sidekiq_unique_jobs/rspec/matchers/have_valid_sidekiq_options.rb +43 -0
  78. data/lib/sidekiq_unique_jobs/{scripts.rb → script.rb} +43 -29
  79. data/lib/sidekiq_unique_jobs/script/caller.rb +125 -0
  80. data/lib/sidekiq_unique_jobs/script/template.rb +41 -0
  81. data/lib/sidekiq_unique_jobs/sidekiq_unique_ext.rb +92 -65
  82. data/lib/sidekiq_unique_jobs/sidekiq_unique_jobs.rb +166 -28
  83. data/lib/sidekiq_unique_jobs/sidekiq_worker_methods.rb +10 -11
  84. data/lib/sidekiq_unique_jobs/testing.rb +47 -15
  85. data/lib/sidekiq_unique_jobs/time_calculator.rb +103 -0
  86. data/lib/sidekiq_unique_jobs/timing.rb +58 -0
  87. data/lib/sidekiq_unique_jobs/unique_args.rb +19 -21
  88. data/lib/sidekiq_unique_jobs/unlockable.rb +11 -2
  89. data/lib/sidekiq_unique_jobs/update_version.rb +25 -0
  90. data/lib/sidekiq_unique_jobs/upgrade_locks.rb +151 -0
  91. data/lib/sidekiq_unique_jobs/version.rb +3 -1
  92. data/lib/sidekiq_unique_jobs/version_check.rb +1 -1
  93. data/lib/sidekiq_unique_jobs/web.rb +25 -19
  94. data/lib/sidekiq_unique_jobs/web/helpers.rb +98 -6
  95. data/lib/sidekiq_unique_jobs/web/views/lock.erb +108 -0
  96. data/lib/sidekiq_unique_jobs/web/views/locks.erb +52 -0
  97. data/lib/tasks/changelog.rake +4 -3
  98. metadata +70 -35
  99. data/lib/sidekiq_unique_jobs/client/middleware.rb +0 -56
  100. data/lib/sidekiq_unique_jobs/server/middleware.rb +0 -46
  101. data/lib/sidekiq_unique_jobs/timeout.rb +0 -8
  102. data/lib/sidekiq_unique_jobs/timeout/calculator.rb +0 -63
  103. data/lib/sidekiq_unique_jobs/util.rb +0 -103
  104. data/lib/sidekiq_unique_jobs/web/views/unique_digest.erb +0 -28
  105. data/lib/sidekiq_unique_jobs/web/views/unique_digests.erb +0 -46
  106. data/redis/acquire_lock.lua +0 -21
  107. data/redis/convert_legacy_lock.lua +0 -13
  108. data/redis/delete.lua +0 -14
  109. data/redis/delete_by_digest.lua +0 -23
  110. data/redis/delete_job_by_digest.lua +0 -60
  111. data/redis/lock.lua +0 -62
  112. data/redis/release_stale_locks.lua +0 -90
  113. data/redis/unlock.lua +0 -35
@@ -11,11 +11,12 @@ module SidekiqUniqueJobs
11
11
 
12
12
  # Creates a connection to redis
13
13
  # @return [Sidekiq::RedisConnection, ConnectionPool] a connection to redis
14
- def redis(redis_pool = nil)
15
- if redis_pool
16
- redis_pool.with { |conn| yield conn }
14
+ def redis(r_pool = nil, &block)
15
+ r_pool ||= defined?(redis_pool) ? redis_pool : r_pool
16
+ if r_pool
17
+ r_pool.with(&block)
17
18
  else
18
- Sidekiq.redis { |conn| yield conn }
19
+ Sidekiq.redis(&block)
19
20
  end
20
21
  end
21
22
  end
@@ -6,28 +6,40 @@
6
6
  # @author Mikael Henriksson <mikael@zoolutions.se>
7
7
  #
8
8
  module SidekiqUniqueJobs
9
- ARGS_KEY ||= "args"
10
- APARTMENT ||= "apartment"
11
- AT_KEY ||= "at"
12
- CLASS_KEY ||= "class"
13
- JAVA ||= "java"
14
- JID_KEY ||= "jid"
15
- LOCK_DIGEST_KEY ||= "lock_digest"
16
- LOCK_EXPIRATION_KEY ||= "lock_expiration"
17
- LOCK_TIMEOUT_KEY ||= "lock_timeout"
18
- LOCK_TTL_KEY ||= "lock_ttl"
19
- LOG_DUPLICATE_KEY ||= "log_duplicate_payload"
20
- QUEUE_KEY ||= "queue"
21
- UNIQUE_ACROSS_QUEUES_KEY ||= "unique_across_queues"
22
- UNIQUE_ACROSS_WORKERS_KEY ||= "unique_across_workers"
23
- UNIQUE_ARGS_KEY ||= "unique_args"
24
- UNIQUE_DIGEST_KEY ||= "unique_digest"
25
- UNIQUE_KEY ||= "unique"
26
- UNIQUE_SET ||= "unique:keys"
27
- LOCK_KEY ||= "lock"
28
- ON_CONFLICT_KEY ||= "on_conflict"
29
- UNIQUE_ON_ALL_QUEUES_KEY ||= "unique_on_all_queues" # TODO: Remove in v6.1
30
- UNIQUE_PREFIX_KEY ||= "unique_prefix"
31
- RETRY_SET ||= "retry"
32
- SCHEDULE_SET ||= "schedule"
9
+ ARGS ||= "args"
10
+ AT ||= "at"
11
+ CHANGELOGS ||= "uniquejobs:changelog"
12
+ CLASS ||= "class"
13
+ DEAD_VERSION ||= "uniquejobs:dead"
14
+ DIGESTS ||= "uniquejobs:digests"
15
+ ERRORS ||= "errors"
16
+ JID ||= "jid"
17
+ LIMIT ||= "limit"
18
+ LIVE_VERSION ||= "uniquejobs:live"
19
+ LOCK ||= "lock"
20
+ LOCK_EXPIRATION ||= "lock_expiration"
21
+ LOCK_INFO ||= "lock_info"
22
+ LOCK_LIMIT ||= "lock_limit"
23
+ LOCK_PREFIX ||= "lock_prefix"
24
+ LOCK_TIMEOUT ||= "lock_timeout"
25
+ LOCK_TTL ||= "lock_ttl"
26
+ LOCK_TYPE ||= "lock_type"
27
+ LOG_DUPLICATE ||= "log_duplicate"
28
+ ON_CLIENT_CONFLICT ||= "on_client_conflict"
29
+ ON_CONFLICT ||= "on_conflict"
30
+ ON_SERVER_CONFLICT ||= "on_server_conflict"
31
+ QUEUE ||= "queue"
32
+ RETRY ||= "retry"
33
+ SCHEDULE ||= "schedule"
34
+ TIME ||= "time"
35
+ TIMEOUT ||= "timeout"
36
+ TTL ||= "ttl"
37
+ TYPE ||= "type"
38
+ UNIQUE ||= "unique"
39
+ UNIQUE_ACROSS_QUEUES ||= "unique_across_queues"
40
+ UNIQUE_ACROSS_WORKERS ||= "unique_across_workers"
41
+ UNIQUE_ARGS ||= "unique_args"
42
+ UNIQUE_DIGEST ||= "unique_digest"
43
+ UNIQUE_PREFIX ||= "unique_prefix"
44
+ WORKER ||= "worker"
33
45
  end
@@ -2,8 +2,18 @@
2
2
 
3
3
  # :nocov:
4
4
 
5
+ #
6
+ # Monkey patches for the ruby Hash
7
+ #
5
8
  class Hash
6
9
  unless {}.respond_to?(:slice)
10
+ #
11
+ # Returns only the matching keys in a new hash
12
+ #
13
+ # @param [Array<String>, Array<Symbol>] keys the keys to match
14
+ #
15
+ # @return [Hash]
16
+ #
7
17
  def slice(*keys)
8
18
  keys.map! { |key| convert_key(key) } if respond_to?(:convert_key, true)
9
19
  keys.each_with_object(self.class.new) { |k, hash| hash[k] = self[k] if key?(k) }
@@ -11,12 +21,24 @@ class Hash
11
21
  end
12
22
 
13
23
  unless {}.respond_to?(:stringify_keys)
24
+ #
25
+ # Converts all keys to string
26
+ #
27
+ #
28
+ # @return [Hash<String>]
29
+ #
14
30
  def stringify_keys
15
31
  transform_keys(&:to_s)
16
32
  end
17
33
  end
18
34
 
19
35
  unless {}.respond_to?(:transform_keys)
36
+ #
37
+ # Transforms all keys by yielding to the caller
38
+ #
39
+ #
40
+ # @return [Hash]
41
+ #
20
42
  def transform_keys
21
43
  result = {}
22
44
  each_key do |key|
@@ -27,6 +49,13 @@ class Hash
27
49
  end
28
50
 
29
51
  unless {}.respond_to?(:slice!)
52
+ #
53
+ # Removes all keys not provided from the current hash and returns it
54
+ #
55
+ # @param [Array<String>, Array<Symbol>] keys the keys to match
56
+ #
57
+ # @return [Hash]
58
+ #
30
59
  def slice!(*keys)
31
60
  keys.map! { |key| convert_key(key) } if respond_to?(:convert_key, true)
32
61
  omit = slice(*self.keys - keys)
@@ -39,8 +68,17 @@ class Hash
39
68
  end
40
69
  end
41
70
 
71
+ #
72
+ # Monkey patches for the ruby Array
73
+ #
42
74
  class Array
43
75
  unless [].respond_to?(:extract_options!)
76
+ #
77
+ # Extract the last argument if it is a hash
78
+ #
79
+ #
80
+ # @return [Hash]
81
+ #
44
82
  def extract_options!
45
83
  if last.is_a?(Hash) && last.instance_of?(Hash)
46
84
  pop
@@ -1,94 +1,100 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module SidekiqUniqueJobs
4
- # Utility module to help manage unique digests in redis.
4
+ #
5
+ # Class Changelogs provides access to the changelog entries
5
6
  #
6
7
  # @author Mikael Henriksson <mikael@zoolutions.se>
7
- module Digests
8
+ #
9
+ class Digests < Redis::SortedSet
10
+ #
11
+ # @return [Integer] the number of matches to return by default
8
12
  DEFAULT_COUNT = 1_000
13
+ #
14
+ # @return [String] the default pattern to use for matching
9
15
  SCAN_PATTERN = "*"
10
- CHUNK_SIZE = 100
11
16
 
12
- include SidekiqUniqueJobs::Logging
13
- include SidekiqUniqueJobs::Connection
14
- extend self
15
-
16
- # Return unique digests matching pattern
17
- #
18
- # @param [String] pattern a pattern to match with
19
- # @param [Integer] count the maximum number to match
20
- # @return [Array<String>] with unique digests
21
- def all(pattern: SCAN_PATTERN, count: DEFAULT_COUNT)
22
- redis { |conn| conn.sscan_each(UNIQUE_SET, match: pattern, count: count).to_a }
17
+ def initialize
18
+ super(DIGESTS)
23
19
  end
24
20
 
25
- # Paginate unique digests
26
21
  #
27
- # @param [String] pattern a pattern to match with
28
- # @param [Integer] cursor the maximum number to match
29
- # @param [Integer] page_size the current cursor position
22
+ # Adds a digest
30
23
  #
31
- # @return [Array<String>] with unique digests
32
- def page(pattern: SCAN_PATTERN, cursor: 0, page_size: 100)
33
- redis do |conn|
34
- total_size, digests = conn.multi do
35
- conn.scard(UNIQUE_SET)
36
- conn.sscan(UNIQUE_SET, cursor, match: pattern, count: page_size)
37
- end
38
-
39
- [total_size, digests[0], digests[1]]
40
- end
41
- end
42
-
43
- # Get a total count of unique digests
24
+ # @param [String] digest the digest to add
44
25
  #
45
- # @return [Integer] number of digests
46
- def count
47
- redis { |conn| conn.scard(UNIQUE_SET) }
26
+ def add(digest)
27
+ redis { |conn| conn.zadd(key, now_f, digest) }
48
28
  end
49
29
 
30
+ #
50
31
  # Deletes unique digest either by a digest or pattern
51
32
  #
52
- # @param [String] digest the full digest to delete
53
- # @param [String] pattern a key pattern to match with
54
- # @param [Integer] count the maximum number
55
- # @raise [ArgumentError] when both pattern and digest are nil
33
+ # @overload call_script(digest: "abcdefab")
34
+ # Call script with digest
35
+ # @param [String] digest: a digest to delete
36
+ # @overload call_script(pattern: "*", count: 1_000)
37
+ # Call script with pattern
38
+ # @param [String] pattern: "*" a pattern to match
39
+ # @param [String] count: DEFAULT_COUNT the number of keys to delete
40
+ #
41
+ # @raise [ArgumentError] when given neither pattern nor digest
42
+ #
56
43
  # @return [Array<String>] with unique digests
44
+ #
57
45
  def del(digest: nil, pattern: nil, count: DEFAULT_COUNT)
58
- warn("#{self}.#{__method__} has been deprecated and will be removed in a future version")
59
-
60
46
  return delete_by_pattern(pattern, count: count) if pattern
61
47
  return delete_by_digest(digest) if digest
62
48
 
63
- raise ArgumentError, "either digest or pattern need to be provided"
49
+ raise ArgumentError, "##{__method__} requires either a :digest or a :pattern"
64
50
  end
65
51
 
66
- # Deletes unique digest either by a digest or pattern
67
52
  #
68
- # @param [String] digest the full digest to delete
69
- def delete_by_digest(digest) # rubocop:disable Metrics/MethodLength
70
- result, elapsed = timed do
71
- Scripts.call(:delete_by_digest, nil, keys: [
72
- UNIQUE_SET,
73
- digest,
74
- "#{digest}:EXISTS",
75
- "#{digest}:GRABBED",
76
- "#{digest}:AVAILABLE",
77
- "#{digest}:VERSION",
78
- "#{digest}:RUN:EXISTS",
79
- "#{digest}:RUN:GRABBED",
80
- "#{digest}:RUN:AVAILABLE",
81
- "#{digest}:RUN:VERSION",
82
- ])
83
-
84
- count
53
+ # The entries in this sorted set
54
+ #
55
+ # @param [String] pattern SCAN_PATTERN the match pattern to search for
56
+ # @param [Integer] count DEFAULT_COUNT the number of entries to return
57
+ #
58
+ # @return [Array<String>] an array of digests matching the given pattern
59
+ #
60
+ def entries(pattern: SCAN_PATTERN, count: DEFAULT_COUNT)
61
+ options = {}
62
+ options[:match] = pattern
63
+ options[:count] = count if count
64
+
65
+ result = redis { |conn| conn.zscan_each(key, options).to_a }
66
+
67
+ result.each_with_object({}) do |entry, hash|
68
+ hash[entry[0]] = entry[1]
85
69
  end
70
+ end
86
71
 
87
- log_info("#{__method__}(#{digest}) completed in #{elapsed}ms")
72
+ #
73
+ # Returns a paginated
74
+ #
75
+ # @param [Integer] cursor the cursor for this iteration
76
+ # @param [String] pattern SCAN_PATTERN the match pattern to search for
77
+ # @param [Integer] page_size 100 the size per page
78
+ #
79
+ # @return [Array<Integer, Integer, Array<Lock>>] total_size, next_cursor, locks
80
+ #
81
+ def page(cursor: 0, pattern: SCAN_PATTERN, page_size: 100)
82
+ redis do |conn|
83
+ total_size, digests = conn.multi do
84
+ conn.zcard(key)
85
+ conn.zscan(key, cursor, match: pattern, count: page_size)
86
+ end
88
87
 
89
- result
88
+ [
89
+ total_size,
90
+ digests[0], # next_cursor
91
+ digests[1].map { |digest, score| Lock.new(digest, time: score) }, # entries
92
+ ]
93
+ end
90
94
  end
91
95
 
96
+ private
97
+
92
98
  # Deletes unique digests by pattern
93
99
  #
94
100
  # @param [String] pattern a key pattern to match with
@@ -96,9 +102,8 @@ module SidekiqUniqueJobs
96
102
  # @return [Array<String>] with unique digests
97
103
  def delete_by_pattern(pattern, count: DEFAULT_COUNT)
98
104
  result, elapsed = timed do
99
- digests = all(pattern: pattern, count: count)
100
- batch_delete(digests)
101
- digests.size
105
+ digests = entries(pattern: pattern, count: count).keys
106
+ redis { |conn| BatchDelete.call(digests, conn) }
102
107
  end
103
108
 
104
109
  log_info("#{__method__}(#{pattern}, count: #{count}) completed in #{elapsed}ms")
@@ -106,38 +111,18 @@ module SidekiqUniqueJobs
106
111
  result
107
112
  end
108
113
 
109
- private
110
-
111
- def batch_delete(digests) # rubocop:disable Metrics/MethodLength
112
- redis do |conn|
113
- digests.each_slice(CHUNK_SIZE) do |chunk|
114
- conn.pipelined do
115
- chunk.each do |digest|
116
- conn.del digest
117
- conn.srem(UNIQUE_SET, digest)
118
- conn.del("#{digest}:EXISTS")
119
- conn.del("#{digest}:GRABBED")
120
- conn.del("#{digest}:VERSION")
121
- conn.del("#{digest}:AVAILABLE")
122
- conn.del("#{digest}:RUN:EXISTS")
123
- conn.del("#{digest}:RUN:GRABBED")
124
- conn.del("#{digest}:RUN:VERSION")
125
- conn.del("#{digest}:RUN:AVAILABLE")
126
- end
127
- end
128
- end
114
+ # Delete unique digests by digest
115
+ # Also deletes the :AVAILABLE, :EXPIRED etc keys
116
+ #
117
+ # @param [String] digest a unique digest to delete
118
+ def delete_by_digest(digest)
119
+ result, elapsed = timed do
120
+ call_script(:delete_by_digest, [digest, key])
129
121
  end
130
- end
131
122
 
132
- def timed
133
- start = current_time
134
- result = yield
135
- elapsed = (current_time - start).round(2)
136
- [result, elapsed]
137
- end
123
+ log_info("#{__method__}(#{digest}) completed in #{elapsed}ms")
138
124
 
139
- def current_time
140
- Time.now
125
+ result
141
126
  end
142
127
  end
143
128
  end
@@ -1,29 +1,173 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module SidekiqUniqueJobs
4
+ #
5
+ # Base class for all exceptions raised from the gem
6
+ #
7
+ # @author Mikael Henriksson <mikael@zoolutions.se>
8
+ #
9
+ class UniqueJobsError < ::RuntimeError
10
+ end
11
+
4
12
  # Error raised when a Lua script fails to execute
5
13
  #
6
14
  # @author Mikael Henriksson <mikael@zoolutions.se>
7
- class Conflict < StandardError
15
+ class Conflict < UniqueJobsError
8
16
  def initialize(item)
9
- super("Item with the key: #{item[UNIQUE_DIGEST_KEY]} is already scheduled or processing")
17
+ super("Item with the key: #{item[UNIQUE_DIGEST]} is already scheduled or processing")
18
+ end
19
+ end
20
+
21
+ #
22
+ # Error raised when trying to add a duplicate lock
23
+ #
24
+ # @author Mikael Henriksson <mikael@zoolutions.se>
25
+ #
26
+ class DuplicateLock < UniqueJobsError
27
+ end
28
+
29
+ #
30
+ # Error raised when trying to add a duplicate stragegy
31
+ #
32
+ # @author Mikael Henriksson <mikael@zoolutions.se>
33
+ #
34
+ class DuplicateStrategy < UniqueJobsError
35
+ end
36
+
37
+ #
38
+ # Error raised when an invalid argument is given
39
+ #
40
+ # @author Mikael Henriksson <mikael@zoolutions.se>
41
+ #
42
+ class InvalidArgument < UniqueJobsError
43
+ end
44
+
45
+ #
46
+ # Raised when a workers configuration is invalid
47
+ #
48
+ # @author Mikael Henriksson <mikael@zoolutions.se>
49
+ #
50
+ class InvalidWorker < UniqueJobsError
51
+ def initialize(lock_config)
52
+ super(<<~FAILURE_MESSAGE)
53
+ Expected #{lock_config.worker} to have valid sidekiq options but found the following problems:
54
+ #{lock_config.errors_as_string}
55
+ FAILURE_MESSAGE
56
+ end
57
+ end
58
+
59
+ # Error raised when a Lua script fails to execute
60
+ #
61
+ # @author Mikael Henriksson <mikael@zoolutions.se>
62
+ class InvalidUniqueArguments < UniqueJobsError
63
+ def initialize(given:, worker_class:, unique_args_method:)
64
+ uniq_args_meth = worker_class.method(unique_args_method)
65
+ num_args = uniq_args_meth.arity
66
+ # source_location = uniq_args_meth.source_location
67
+
68
+ super(
69
+ "#{worker_class}#unique_args takes #{num_args} arguments, received #{given.inspect}"
70
+ )
71
+ end
72
+ end
73
+
74
+ #
75
+ # Raised when a workers configuration is invalid
76
+ #
77
+ # @author Mikael Henriksson <mikael@zoolutions.se>
78
+ #
79
+ class NotUniqueWorker < UniqueJobsError
80
+ def initialize(options: {})
81
+ super("#{options[:class]} is not configured for uniqueness. Missing the key `:lock` in #{options.inspect}")
10
82
  end
11
83
  end
12
84
 
13
85
  # Error raised from {OnConflict::Raise}
14
86
  #
15
87
  # @author Mikael Henriksson <mikael@zoolutions.se>
16
- class ScriptError < StandardError
17
- # @param [Symbol] file_name the name of the lua script
18
- # @param [Redis::CommandError] source_exception exception to handle
19
- def initialize(file_name:, source_exception:)
20
- super("Problem compiling #{file_name}. Message: #{source_exception.message}")
88
+ class ScriptError < UniqueJobsError
89
+ # Reformats errors raised by redis representing failures while executing
90
+ # a lua script. The default errors have confusing messages and backtraces,
91
+ # and a type of +RuntimeError+. This class improves the message and
92
+ # modifies the backtrace to include the lua script itself in a reasonable
93
+ # way.
94
+
95
+ PATTERN = /ERR Error (compiling|running) script \(.*?\): .*?:(\d+): (.*)/.freeze
96
+ LIB_PATH = File.expand_path("..", __dir__)
97
+ CONTEXT_LINE_NUMBER = 3
98
+
99
+ attr_reader :error, :file, :content
100
+
101
+ # Is this error one that should be reformatted?
102
+ #
103
+ # @param error [StandardError] the original error raised by redis
104
+ # @return [Boolean] is this an error that should be reformatted?
105
+ def self.intercepts?(error)
106
+ error.message =~ PATTERN
107
+ end
108
+
109
+ # Initialize a new {ScriptError} from an existing redis error, adjusting
110
+ # the message and backtrace in the process.
111
+ #
112
+ # @param error [StandardError] the original error raised by redis
113
+ # @param file [Pathname] full path to the lua file the error ocurred in
114
+ # @param content [String] lua file content the error ocurred in
115
+ # :nocov:
116
+ def initialize(error, file, content)
117
+ @error = error
118
+ @file = file
119
+ @content = content
120
+ @backtrace = @error.backtrace
121
+
122
+ @error.message.match(PATTERN) do |regexp_match|
123
+ line_number = regexp_match[2].to_i
124
+ message = regexp_match[3]
125
+ error_context = generate_error_context(content, line_number)
126
+
127
+ super("#{message}\n\n#{error_context}\n\n")
128
+ set_backtrace(generate_backtrace(file, line_number))
129
+ end
130
+ end
131
+
132
+ private
133
+
134
+ # :nocov:
135
+ def generate_error_context(content, line_number)
136
+ lines = content.lines.to_a
137
+ beginning_line_number = [1, line_number - CONTEXT_LINE_NUMBER].max
138
+ ending_line_number = [lines.count, line_number + CONTEXT_LINE_NUMBER].min
139
+ line_number_width = ending_line_number.to_s.length
140
+
141
+ (beginning_line_number..ending_line_number).map do |number|
142
+ indicator = (number == line_number) ? "=>" : " "
143
+ formatted_number = format("%#{line_number_width}d", number)
144
+ " #{indicator} #{formatted_number}: #{lines[number - 1]}"
145
+ end.join.chomp
146
+ end
147
+
148
+ # :nocov:
149
+ def generate_backtrace(file, line_number)
150
+ pre_gem = backtrace_before_entering_gem(@backtrace)
151
+ index_of_first_unique_jobs_line = (@backtrace.size - pre_gem.size - 1)
152
+ pre_gem.unshift(@backtrace[index_of_first_unique_jobs_line])
153
+ pre_gem.unshift("#{file}:#{line_number}")
154
+ pre_gem
155
+ end
156
+
157
+ # :nocov:
158
+ def backtrace_before_entering_gem(backtrace)
159
+ backtrace.reverse.take_while { |line| !line_from_gem(line) }.reverse
160
+ end
161
+
162
+ # :nocov:
163
+ def line_from_gem(line)
164
+ line.split(":").first.include?(LIB_PATH)
21
165
  end
22
166
  end
23
167
 
24
168
  # Error raised from {OptionsWithFallback#lock_class}
25
169
  #
26
170
  # @author Mikael Henriksson <mikael@zoolutions.se>
27
- class UnknownLock < StandardError
171
+ class UnknownLock < UniqueJobsError
28
172
  end
29
173
  end