sidekiq-unique-jobs 6.0.23 → 7.1.12
Sign up to get free protection for your applications and to get access to all the features.
Potentially problematic release.
This version of sidekiq-unique-jobs might be problematic. Click here for more details.
- checksums.yaml +4 -4
- data/CHANGELOG.md +922 -41
- data/README.md +821 -284
- data/lib/sidekiq_unique_jobs/batch_delete.rb +123 -0
- data/lib/sidekiq_unique_jobs/changelog.rb +78 -0
- data/lib/sidekiq_unique_jobs/cli.rb +34 -31
- data/lib/sidekiq_unique_jobs/config.rb +314 -0
- data/lib/sidekiq_unique_jobs/connection.rb +6 -5
- data/lib/sidekiq_unique_jobs/constants.rb +45 -24
- data/lib/sidekiq_unique_jobs/core_ext.rb +80 -0
- data/lib/sidekiq_unique_jobs/deprecation.rb +65 -0
- data/lib/sidekiq_unique_jobs/digests.rb +70 -102
- data/lib/sidekiq_unique_jobs/exceptions.rb +88 -12
- data/lib/sidekiq_unique_jobs/job.rb +41 -12
- data/lib/sidekiq_unique_jobs/json.rb +47 -0
- data/lib/sidekiq_unique_jobs/key.rb +93 -0
- data/lib/sidekiq_unique_jobs/lock/base_lock.rb +111 -82
- data/lib/sidekiq_unique_jobs/lock/client_validator.rb +28 -0
- data/lib/sidekiq_unique_jobs/lock/server_validator.rb +27 -0
- data/lib/sidekiq_unique_jobs/lock/until_and_while_executing.rb +40 -15
- data/lib/sidekiq_unique_jobs/lock/until_executed.rb +25 -7
- data/lib/sidekiq_unique_jobs/lock/until_executing.rb +22 -2
- data/lib/sidekiq_unique_jobs/lock/until_expired.rb +26 -16
- data/lib/sidekiq_unique_jobs/lock/validator.rb +96 -0
- data/lib/sidekiq_unique_jobs/lock/while_executing.rb +23 -12
- data/lib/sidekiq_unique_jobs/lock/while_executing_reject.rb +3 -3
- data/lib/sidekiq_unique_jobs/lock.rb +325 -0
- data/lib/sidekiq_unique_jobs/lock_args.rb +123 -0
- data/lib/sidekiq_unique_jobs/lock_config.rb +126 -0
- data/lib/sidekiq_unique_jobs/lock_digest.rb +79 -0
- data/lib/sidekiq_unique_jobs/lock_info.rb +68 -0
- data/lib/sidekiq_unique_jobs/lock_timeout.rb +62 -0
- data/lib/sidekiq_unique_jobs/lock_ttl.rb +77 -0
- data/lib/sidekiq_unique_jobs/locksmith.rb +275 -102
- data/lib/sidekiq_unique_jobs/logging/middleware_context.rb +44 -0
- data/lib/sidekiq_unique_jobs/logging.rb +188 -33
- data/lib/sidekiq_unique_jobs/lua/delete.lua +51 -0
- data/lib/sidekiq_unique_jobs/lua/delete_by_digest.lua +42 -0
- data/lib/sidekiq_unique_jobs/lua/delete_job_by_digest.lua +38 -0
- data/lib/sidekiq_unique_jobs/lua/find_digest_in_queues.lua +26 -0
- data/lib/sidekiq_unique_jobs/lua/lock.lua +93 -0
- data/lib/sidekiq_unique_jobs/lua/locked.lua +35 -0
- data/lib/sidekiq_unique_jobs/lua/queue.lua +87 -0
- data/lib/sidekiq_unique_jobs/lua/reap_orphans.lua +94 -0
- data/lib/sidekiq_unique_jobs/lua/shared/_common.lua +40 -0
- data/lib/sidekiq_unique_jobs/lua/shared/_current_time.lua +8 -0
- data/lib/sidekiq_unique_jobs/lua/shared/_delete_from_queue.lua +22 -0
- data/lib/sidekiq_unique_jobs/lua/shared/_delete_from_sorted_set.lua +18 -0
- data/lib/sidekiq_unique_jobs/lua/shared/_find_digest_in_process_set.lua +53 -0
- data/lib/sidekiq_unique_jobs/lua/shared/_find_digest_in_queues.lua +43 -0
- data/lib/sidekiq_unique_jobs/lua/shared/_find_digest_in_sorted_set.lua +24 -0
- data/lib/sidekiq_unique_jobs/lua/shared/_hgetall.lua +13 -0
- data/lib/sidekiq_unique_jobs/lua/shared/_upgrades.lua +3 -0
- data/lib/sidekiq_unique_jobs/lua/unlock.lua +102 -0
- data/lib/sidekiq_unique_jobs/lua/update_version.lua +40 -0
- data/lib/sidekiq_unique_jobs/lua/upgrade.lua +68 -0
- data/lib/sidekiq_unique_jobs/middleware/client.rb +40 -0
- data/lib/sidekiq_unique_jobs/middleware/server.rb +29 -0
- data/lib/sidekiq_unique_jobs/middleware.rb +29 -31
- data/lib/sidekiq_unique_jobs/normalizer.rb +4 -4
- data/lib/sidekiq_unique_jobs/on_conflict/log.rb +9 -5
- data/lib/sidekiq_unique_jobs/on_conflict/null_strategy.rb +1 -1
- data/lib/sidekiq_unique_jobs/on_conflict/raise.rb +1 -1
- data/lib/sidekiq_unique_jobs/on_conflict/reject.rb +61 -15
- data/lib/sidekiq_unique_jobs/on_conflict/replace.rb +54 -14
- data/lib/sidekiq_unique_jobs/on_conflict/reschedule.rb +16 -5
- data/lib/sidekiq_unique_jobs/on_conflict/strategy.rb +25 -6
- data/lib/sidekiq_unique_jobs/on_conflict.rb +23 -10
- data/lib/sidekiq_unique_jobs/options_with_fallback.rb +35 -32
- data/lib/sidekiq_unique_jobs/orphans/lua_reaper.rb +29 -0
- data/lib/sidekiq_unique_jobs/orphans/manager.rb +248 -0
- data/lib/sidekiq_unique_jobs/orphans/null_reaper.rb +24 -0
- data/lib/sidekiq_unique_jobs/orphans/observer.rb +42 -0
- data/lib/sidekiq_unique_jobs/orphans/reaper.rb +114 -0
- data/lib/sidekiq_unique_jobs/orphans/reaper_resurrector.rb +170 -0
- data/lib/sidekiq_unique_jobs/orphans/ruby_reaper.rb +231 -0
- data/lib/sidekiq_unique_jobs/redis/entity.rb +112 -0
- data/lib/sidekiq_unique_jobs/redis/hash.rb +56 -0
- data/lib/sidekiq_unique_jobs/redis/list.rb +32 -0
- data/lib/sidekiq_unique_jobs/redis/set.rb +32 -0
- data/lib/sidekiq_unique_jobs/redis/sorted_set.rb +86 -0
- data/lib/sidekiq_unique_jobs/redis/string.rb +49 -0
- data/lib/sidekiq_unique_jobs/redis.rb +11 -0
- data/lib/sidekiq_unique_jobs/reflectable.rb +26 -0
- data/lib/sidekiq_unique_jobs/reflections.rb +79 -0
- data/lib/sidekiq_unique_jobs/rspec/matchers/have_valid_sidekiq_options.rb +51 -0
- data/lib/sidekiq_unique_jobs/rspec/matchers.rb +26 -0
- data/lib/sidekiq_unique_jobs/script/caller.rb +127 -0
- data/lib/sidekiq_unique_jobs/script.rb +15 -0
- data/lib/sidekiq_unique_jobs/server.rb +61 -0
- data/lib/sidekiq_unique_jobs/sidekiq_unique_ext.rb +114 -65
- data/lib/sidekiq_unique_jobs/sidekiq_unique_jobs.rb +241 -35
- data/lib/sidekiq_unique_jobs/sidekiq_worker_methods.rb +18 -16
- data/lib/sidekiq_unique_jobs/testing.rb +62 -21
- data/lib/sidekiq_unique_jobs/timer_task.rb +78 -0
- data/lib/sidekiq_unique_jobs/timing.rb +58 -0
- data/lib/sidekiq_unique_jobs/unlockable.rb +20 -4
- data/lib/sidekiq_unique_jobs/update_version.rb +25 -0
- data/lib/sidekiq_unique_jobs/upgrade_locks.rb +155 -0
- data/lib/sidekiq_unique_jobs/version.rb +3 -1
- data/lib/sidekiq_unique_jobs/version_check.rb +23 -4
- data/lib/sidekiq_unique_jobs/web/helpers.rb +128 -13
- data/lib/sidekiq_unique_jobs/web/views/_paging.erb +4 -4
- data/lib/sidekiq_unique_jobs/web/views/changelogs.erb +54 -0
- data/lib/sidekiq_unique_jobs/web/views/lock.erb +108 -0
- data/lib/sidekiq_unique_jobs/web/views/locks.erb +54 -0
- data/lib/sidekiq_unique_jobs/web.rb +57 -27
- data/lib/sidekiq_unique_jobs.rb +52 -7
- data/lib/tasks/changelog.rake +15 -15
- metadata +124 -184
- data/lib/sidekiq_unique_jobs/client/middleware.rb +0 -56
- data/lib/sidekiq_unique_jobs/scripts.rb +0 -118
- data/lib/sidekiq_unique_jobs/server/middleware.rb +0 -46
- data/lib/sidekiq_unique_jobs/timeout/calculator.rb +0 -63
- data/lib/sidekiq_unique_jobs/timeout.rb +0 -8
- data/lib/sidekiq_unique_jobs/unique_args.rb +0 -150
- data/lib/sidekiq_unique_jobs/util.rb +0 -103
- data/lib/sidekiq_unique_jobs/web/views/unique_digest.erb +0 -28
- data/lib/sidekiq_unique_jobs/web/views/unique_digests.erb +0 -46
- data/redis/acquire_lock.lua +0 -21
- data/redis/convert_legacy_lock.lua +0 -13
- data/redis/delete.lua +0 -14
- data/redis/delete_by_digest.lua +0 -23
- data/redis/delete_job_by_digest.lua +0 -60
- data/redis/lock.lua +0 -62
- data/redis/release_stale_locks.lua +0 -90
- data/redis/unlock.lua +0 -35
@@ -3,65 +3,220 @@
|
|
3
3
|
module SidekiqUniqueJobs
|
4
4
|
# Utility module for reducing the number of uses of logger.
|
5
5
|
#
|
6
|
-
# @author Mikael Henriksson <mikael@
|
6
|
+
# @author Mikael Henriksson <mikael@mhenrixon.com>
|
7
7
|
module Logging
|
8
|
-
|
8
|
+
def self.included(base)
|
9
|
+
base.send(:extend, self)
|
10
|
+
end
|
11
|
+
|
12
|
+
#
|
13
|
+
# A convenience method for using the configured gem logger
|
14
|
+
#
|
15
|
+
# @see SidekiqUniqueJobs#.logger
|
16
|
+
#
|
17
|
+
# @return [Logger]
|
18
|
+
#
|
9
19
|
def logger
|
10
20
|
SidekiqUniqueJobs.logger
|
11
21
|
end
|
12
22
|
|
23
|
+
#
|
13
24
|
# Logs a message at debug level
|
14
|
-
#
|
15
|
-
# @
|
16
|
-
#
|
17
|
-
|
18
|
-
|
25
|
+
#
|
26
|
+
# @param [String, Exception] message_or_exception the message or exception to log
|
27
|
+
#
|
28
|
+
# @return [void]
|
29
|
+
#
|
30
|
+
# @yield [String, Exception] the message or exception to use for log message
|
31
|
+
#
|
32
|
+
def log_debug(message_or_exception = nil, item = nil, &block)
|
33
|
+
message = build_message(message_or_exception, item)
|
34
|
+
logger.debug(message, &block)
|
19
35
|
nil
|
20
36
|
end
|
21
37
|
|
38
|
+
#
|
22
39
|
# Logs a message at info level
|
23
|
-
#
|
24
|
-
# @
|
25
|
-
#
|
26
|
-
|
27
|
-
|
40
|
+
#
|
41
|
+
# @param [String, Exception] message_or_exception the message or exception to log
|
42
|
+
#
|
43
|
+
# @return [void]
|
44
|
+
#
|
45
|
+
# @yield [String, Exception] the message or exception to use for log message
|
46
|
+
#
|
47
|
+
def log_info(message_or_exception = nil, item = nil, &block)
|
48
|
+
message = build_message(message_or_exception, item)
|
49
|
+
logger.info(message, &block)
|
28
50
|
nil
|
29
51
|
end
|
30
52
|
|
53
|
+
#
|
31
54
|
# Logs a message at warn level
|
32
|
-
#
|
33
|
-
# @
|
34
|
-
#
|
35
|
-
|
36
|
-
|
55
|
+
#
|
56
|
+
# @param [String, Exception] message_or_exception the message or exception to log
|
57
|
+
#
|
58
|
+
# @return [void]
|
59
|
+
#
|
60
|
+
# @yield [String, Exception] the message or exception to use for log message
|
61
|
+
#
|
62
|
+
def log_warn(message_or_exception = nil, item = nil, &block)
|
63
|
+
message = build_message(message_or_exception, item)
|
64
|
+
logger.warn(message, &block)
|
37
65
|
nil
|
38
66
|
end
|
39
67
|
|
68
|
+
#
|
40
69
|
# Logs a message at error level
|
41
|
-
#
|
42
|
-
# @
|
43
|
-
#
|
44
|
-
|
45
|
-
|
70
|
+
#
|
71
|
+
# @param [String, Exception] message_or_exception the message or exception to log
|
72
|
+
#
|
73
|
+
# @return [void]
|
74
|
+
#
|
75
|
+
# @yield [String, Exception] the message or exception to use for log message
|
76
|
+
#
|
77
|
+
def log_error(message_or_exception = nil, item = nil, &block)
|
78
|
+
message = build_message(message_or_exception, item)
|
79
|
+
logger.error(message, &block)
|
46
80
|
nil
|
47
81
|
end
|
48
82
|
|
83
|
+
#
|
49
84
|
# Logs a message at fatal level
|
50
|
-
#
|
51
|
-
# @
|
52
|
-
#
|
53
|
-
|
54
|
-
|
85
|
+
#
|
86
|
+
# @param [String, Exception] message_or_exception the message or exception to log
|
87
|
+
#
|
88
|
+
# @return [void]
|
89
|
+
#
|
90
|
+
# @yield [String, Exception] the message or exception to use for log message
|
91
|
+
#
|
92
|
+
def log_fatal(message_or_exception = nil, item = nil, &block)
|
93
|
+
message = build_message(message_or_exception, item)
|
94
|
+
logger.fatal(message, &block)
|
95
|
+
|
55
96
|
nil
|
56
97
|
end
|
57
98
|
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
99
|
+
#
|
100
|
+
# Build a log message
|
101
|
+
#
|
102
|
+
# @param [String, Exception] message_or_exception an entry to log
|
103
|
+
# @param [Hash] item the sidekiq job hash
|
104
|
+
#
|
105
|
+
# @return [String] a complete log entry
|
106
|
+
#
|
107
|
+
def build_message(message_or_exception, item = nil)
|
108
|
+
return nil if message_or_exception.nil?
|
109
|
+
return message_or_exception if item.nil?
|
110
|
+
|
111
|
+
message = message_or_exception.dup
|
112
|
+
details = item.slice(LOCK, QUEUE, CLASS, JID, LOCK_DIGEST).each_with_object([]) do |(key, value), memo|
|
113
|
+
memo << "#{key}=#{value}"
|
114
|
+
end
|
115
|
+
message << " ("
|
116
|
+
message << details.join(" ")
|
117
|
+
message << ")"
|
118
|
+
|
119
|
+
message
|
120
|
+
end
|
121
|
+
|
122
|
+
#
|
123
|
+
# Wraps the middleware logic with context aware logging
|
124
|
+
#
|
125
|
+
#
|
126
|
+
# @return [void]
|
127
|
+
#
|
128
|
+
# @yieldreturn [void] yield to the middleware instance
|
129
|
+
#
|
130
|
+
def with_logging_context
|
131
|
+
with_configured_loggers_context do
|
132
|
+
return yield
|
64
133
|
end
|
134
|
+
|
135
|
+
nil # Need to make sure we don't return anything here
|
136
|
+
end
|
137
|
+
|
138
|
+
#
|
139
|
+
# Attempt to setup context aware logging for the given logger
|
140
|
+
#
|
141
|
+
#
|
142
|
+
# @return [void]
|
143
|
+
#
|
144
|
+
# @yield
|
145
|
+
#
|
146
|
+
def with_configured_loggers_context(&block)
|
147
|
+
logger_method.call(logging_context, &block)
|
148
|
+
end
|
149
|
+
|
150
|
+
#
|
151
|
+
# Setup some variables to add to each log line
|
152
|
+
#
|
153
|
+
#
|
154
|
+
# @return [Hash] the context to use for each log line
|
155
|
+
#
|
156
|
+
def logging_context
|
157
|
+
raise NotImplementedError, "#{__method__} needs to be implemented in #{self.class}"
|
158
|
+
end
|
159
|
+
|
160
|
+
private
|
161
|
+
|
162
|
+
#
|
163
|
+
# A memoized method to use for setting up a logging context
|
164
|
+
#
|
165
|
+
#
|
166
|
+
# @return [proc] the method to call
|
167
|
+
#
|
168
|
+
def logger_method
|
169
|
+
@logger_method ||= sidekiq_context_method
|
170
|
+
@logger_method ||= sidekiq_logger_context_method
|
171
|
+
@logger_method ||= sidekiq_logging_context_method
|
172
|
+
@logger_method ||= no_sidekiq_context_method
|
173
|
+
end
|
174
|
+
|
175
|
+
#
|
176
|
+
# Checks if the logger respond to `with_context`.
|
177
|
+
#
|
178
|
+
# @note only used to remove the need for explicitly ignoring manual dispatch in other places.
|
179
|
+
#
|
180
|
+
#
|
181
|
+
# @return [true,false]
|
182
|
+
#
|
183
|
+
def logger_respond_to_with_context?
|
184
|
+
logger.respond_to?(:with_context)
|
185
|
+
end
|
186
|
+
|
187
|
+
#
|
188
|
+
# Checks if the logger context takes a hash argument
|
189
|
+
#
|
190
|
+
# @note only used to remove the need for explicitly ignoring manual dispatch in other places.
|
191
|
+
#
|
192
|
+
#
|
193
|
+
# @return [true,false]
|
194
|
+
#
|
195
|
+
def logger_context_hash?
|
196
|
+
defined?(Sidekiq::Context) || logger_respond_to_with_context?
|
197
|
+
end
|
198
|
+
|
199
|
+
def sidekiq_context_method
|
200
|
+
Sidekiq::Context.method(:with) if defined?(Sidekiq::Context)
|
201
|
+
end
|
202
|
+
|
203
|
+
def sidekiq_logger_context_method
|
204
|
+
logger.method(:with_context) if logger_respond_to_with_context?
|
205
|
+
end
|
206
|
+
|
207
|
+
def sidekiq_logging_context_method
|
208
|
+
Sidekiq::Logging.method(:with_context) if defined?(Sidekiq::Logging)
|
209
|
+
end
|
210
|
+
|
211
|
+
def no_sidekiq_context_method
|
212
|
+
method(:fake_logger_context)
|
213
|
+
end
|
214
|
+
|
215
|
+
def fake_logger_context(_context)
|
216
|
+
logger.warn "Don't know how to setup the logging context. Please open a feature request:" \
|
217
|
+
" https://github.com/mhenrixon/sidekiq-unique-jobs/issues/new?template=feature_request.md"
|
218
|
+
|
219
|
+
yield
|
65
220
|
end
|
66
221
|
end
|
67
222
|
end
|
@@ -0,0 +1,51 @@
|
|
1
|
+
-------- BEGIN keys ---------
|
2
|
+
local digest = KEYS[1]
|
3
|
+
local queued = KEYS[2]
|
4
|
+
local primed = KEYS[3]
|
5
|
+
local locked = KEYS[4]
|
6
|
+
local info = KEYS[5]
|
7
|
+
local changelog = KEYS[6]
|
8
|
+
local digests = KEYS[7]
|
9
|
+
-------- END keys ---------
|
10
|
+
|
11
|
+
-------- BEGIN lock arguments ---------
|
12
|
+
local job_id = ARGV[1]
|
13
|
+
local pttl = tonumber(ARGV[2])
|
14
|
+
local lock_type = ARGV[3]
|
15
|
+
local limit = tonumber(ARGV[4])
|
16
|
+
-------- END lock arguments -----------
|
17
|
+
|
18
|
+
-------- BEGIN injected arguments --------
|
19
|
+
local current_time = tonumber(ARGV[5])
|
20
|
+
local debug_lua = tostring(ARGV[6]) == "true"
|
21
|
+
local max_history = tonumber(ARGV[7])
|
22
|
+
local script_name = tostring(ARGV[8]) .. ".lua"
|
23
|
+
local redisversion = tostring(ARGV[9])
|
24
|
+
--------- END injected arguments ---------
|
25
|
+
|
26
|
+
-------- BEGIN local functions --------
|
27
|
+
<%= include_partial "shared/_common.lua" %>
|
28
|
+
---------- END local functions ----------
|
29
|
+
|
30
|
+
|
31
|
+
-------- BEGIN delete.lua --------
|
32
|
+
log_debug("BEGIN delete", digest)
|
33
|
+
|
34
|
+
local redis_version = toversion(redisversion)
|
35
|
+
local count = 0
|
36
|
+
local del_cmd = "DEL"
|
37
|
+
|
38
|
+
log_debug("ZREM", digests, digest)
|
39
|
+
count = count + redis.call("ZREM", digests, digest)
|
40
|
+
|
41
|
+
if redis_version["major"] >= 4 then del_cmd = "UNLINK"; end
|
42
|
+
|
43
|
+
log_debug(del_cmd, digest, queued, primed, locked, info)
|
44
|
+
count = count + redis.call(del_cmd, digest, queued, primed, locked, info)
|
45
|
+
|
46
|
+
|
47
|
+
log("Deleted (" .. count .. ") keys")
|
48
|
+
log_debug("END delete (" .. count .. ") keys for:", digest)
|
49
|
+
|
50
|
+
return count
|
51
|
+
-------- END delete.lua --------
|
@@ -0,0 +1,42 @@
|
|
1
|
+
-------- BEGIN keys ---------
|
2
|
+
local digest = KEYS[1]
|
3
|
+
local queued = KEYS[2]
|
4
|
+
local primed = KEYS[3]
|
5
|
+
local locked = KEYS[4]
|
6
|
+
local run_digest = KEYS[5]
|
7
|
+
local run_queued = KEYS[6]
|
8
|
+
local run_primed = KEYS[7]
|
9
|
+
local run_locked = KEYS[8]
|
10
|
+
local digests = KEYS[9]
|
11
|
+
-------- END keys ---------
|
12
|
+
|
13
|
+
-------- BEGIN injected arguments --------
|
14
|
+
local current_time = tonumber(ARGV[1])
|
15
|
+
local debug_lua = ARGV[2] == "true"
|
16
|
+
local max_history = tonumber(ARGV[3])
|
17
|
+
local script_name = tostring(ARGV[4]) .. ".lua"
|
18
|
+
local redisversion = tostring(ARGV[5])
|
19
|
+
--------- END injected arguments ---------
|
20
|
+
|
21
|
+
-------- BEGIN local functions --------
|
22
|
+
<%= include_partial "shared/_common.lua" %>
|
23
|
+
---------- END local functions ----------
|
24
|
+
|
25
|
+
-------- BEGIN delete_by_digest.lua --------
|
26
|
+
local counter = 0
|
27
|
+
local redis_version = toversion(redisversion)
|
28
|
+
local del_cmd = "DEL"
|
29
|
+
|
30
|
+
log_debug("BEGIN delete_by_digest:", digest)
|
31
|
+
|
32
|
+
if redis_version["major"] >= 4 then del_cmd = "UNLINK"; end
|
33
|
+
|
34
|
+
log_debug(del_cmd, digest, queued, primed, locked, run_digest, run_queued, run_primed, run_locked)
|
35
|
+
counter = redis.call(del_cmd, digest, queued, primed, locked, run_digest, run_queued, run_primed, run_locked)
|
36
|
+
|
37
|
+
log_debug("ZREM", digests, digest)
|
38
|
+
redis.call("ZREM", digests, digest)
|
39
|
+
|
40
|
+
log_debug("END delete_by_digest:", digest, "(deleted " .. counter .. " keys)")
|
41
|
+
return counter
|
42
|
+
-------- END delete_by_digest.lua --------
|
@@ -0,0 +1,38 @@
|
|
1
|
+
-------- BEGIN keys ---------
|
2
|
+
local queue = KEYS[1]
|
3
|
+
local schedule_set = KEYS[2]
|
4
|
+
local retry_set = KEYS[3]
|
5
|
+
-------- END keys ---------
|
6
|
+
|
7
|
+
-------- BEGIN Arguments ---------
|
8
|
+
local digest = ARGV[1]
|
9
|
+
-------- END Arguments ---------
|
10
|
+
|
11
|
+
-------- BEGIN injected arguments --------
|
12
|
+
local current_time = tonumber(ARGV[2])
|
13
|
+
local debug_lua = ARGV[3] == "true"
|
14
|
+
local max_history = tonumber(ARGV[4])
|
15
|
+
local script_name = tostring(ARGV[5]) .. ".lua"
|
16
|
+
--------- END injected arguments ---------
|
17
|
+
|
18
|
+
-------- BEGIN local functions --------
|
19
|
+
<%= include_partial "shared/_common.lua" %>
|
20
|
+
<%= include_partial "shared/_delete_from_queue.lua" %>
|
21
|
+
<%= include_partial "shared/_delete_from_sorted_set.lua" %>
|
22
|
+
---------- END local functions ----------
|
23
|
+
|
24
|
+
|
25
|
+
-------- BEGIN delete_job_by_digest.lua --------
|
26
|
+
local result = delete_from_queue(queue, digest)
|
27
|
+
if result then
|
28
|
+
return result
|
29
|
+
end
|
30
|
+
|
31
|
+
result = delete_from_sorted_set(schedule_set, digest)
|
32
|
+
if result then
|
33
|
+
return result
|
34
|
+
end
|
35
|
+
|
36
|
+
result = delete_from_sorted_set(retry_set, digest)
|
37
|
+
return result
|
38
|
+
-------- END delete_job_by_digest.lua --------
|
@@ -0,0 +1,26 @@
|
|
1
|
+
-------- BEGIN keys ---------
|
2
|
+
local digest = KEYS[1]
|
3
|
+
-------- END keys ---------
|
4
|
+
|
5
|
+
-------- BEGIN injected arguments --------
|
6
|
+
local current_time = tonumber(ARGV[2])
|
7
|
+
local debug_lua = ARGV[3] == "true"
|
8
|
+
local max_history = tonumber(ARGV[4])
|
9
|
+
local script_name = tostring(ARGV[5]) .. ".lua"
|
10
|
+
--------- END injected arguments ---------
|
11
|
+
|
12
|
+
|
13
|
+
-------- BEGIN local functions --------
|
14
|
+
<%= include_partial "shared/_common.lua" %>
|
15
|
+
<%= include_partial "shared/_find_digest_in_queues.lua" %>
|
16
|
+
---------- END local functions ----------
|
17
|
+
|
18
|
+
|
19
|
+
-------- BEGIN delete_orphaned.lua --------
|
20
|
+
log_debug("BEGIN")
|
21
|
+
local result = find_digest_in_queues(digest)
|
22
|
+
log_debug("END")
|
23
|
+
if result and result ~= nil then
|
24
|
+
return result
|
25
|
+
end
|
26
|
+
-------- END delete_orphaned.lua --------
|
@@ -0,0 +1,93 @@
|
|
1
|
+
-------- BEGIN keys ---------
|
2
|
+
local digest = KEYS[1]
|
3
|
+
local queued = KEYS[2]
|
4
|
+
local primed = KEYS[3]
|
5
|
+
local locked = KEYS[4]
|
6
|
+
local info = KEYS[5]
|
7
|
+
local changelog = KEYS[6]
|
8
|
+
local digests = KEYS[7]
|
9
|
+
-------- END keys ---------
|
10
|
+
|
11
|
+
|
12
|
+
-------- BEGIN lock arguments ---------
|
13
|
+
local job_id = ARGV[1]
|
14
|
+
local pttl = tonumber(ARGV[2])
|
15
|
+
local lock_type = ARGV[3]
|
16
|
+
local limit = tonumber(ARGV[4])
|
17
|
+
-------- END lock arguments -----------
|
18
|
+
|
19
|
+
|
20
|
+
-------- BEGIN injected arguments --------
|
21
|
+
local current_time = tonumber(ARGV[5])
|
22
|
+
local debug_lua = ARGV[6] == "true"
|
23
|
+
local max_history = tonumber(ARGV[7])
|
24
|
+
local script_name = tostring(ARGV[8]) .. ".lua"
|
25
|
+
local redisversion = ARGV[9]
|
26
|
+
--------- END injected arguments ---------
|
27
|
+
|
28
|
+
|
29
|
+
-------- BEGIN local functions --------
|
30
|
+
<%= include_partial "shared/_common.lua" %>
|
31
|
+
---------- END local functions ----------
|
32
|
+
|
33
|
+
|
34
|
+
--------- BEGIN lock.lua ---------
|
35
|
+
log_debug("BEGIN lock digest:", digest, "job_id:", job_id)
|
36
|
+
|
37
|
+
if redis.call("HEXISTS", locked, job_id) == 1 then
|
38
|
+
log_debug(locked, "already locked with job_id:", job_id)
|
39
|
+
log("Duplicate")
|
40
|
+
|
41
|
+
log_debug("LREM", queued, -1, job_id)
|
42
|
+
redis.call("LREM", queued, -1, job_id)
|
43
|
+
|
44
|
+
log_debug("LREM", primed, 1, job_id)
|
45
|
+
redis.call("LREM", primed, 1, job_id)
|
46
|
+
|
47
|
+
return job_id
|
48
|
+
end
|
49
|
+
|
50
|
+
local locked_count = redis.call("HLEN", locked)
|
51
|
+
local within_limit = limit > locked_count
|
52
|
+
local limit_exceeded = not within_limit
|
53
|
+
|
54
|
+
if limit_exceeded then
|
55
|
+
log_debug("Limit exceeded:", digest, "(", locked_count, "of", limit, ")")
|
56
|
+
log("Limited")
|
57
|
+
return nil
|
58
|
+
end
|
59
|
+
|
60
|
+
log_debug("ZADD", digests, current_time, digest)
|
61
|
+
redis.call("ZADD", digests, current_time, digest)
|
62
|
+
|
63
|
+
log_debug("HSET", locked, job_id, current_time)
|
64
|
+
redis.call("HSET", locked, job_id, current_time)
|
65
|
+
|
66
|
+
log_debug("LREM", queued, -1, job_id)
|
67
|
+
redis.call("LREM", queued, -1, job_id)
|
68
|
+
|
69
|
+
log_debug("LREM", primed, 1, job_id)
|
70
|
+
redis.call("LREM", primed, 1, job_id)
|
71
|
+
|
72
|
+
-- The Sidekiq client sets pttl
|
73
|
+
if pttl and pttl > 0 then
|
74
|
+
log_debug("PEXPIRE", digest, pttl)
|
75
|
+
redis.call("PEXPIRE", digest, pttl)
|
76
|
+
|
77
|
+
log_debug("PEXPIRE", locked, pttl)
|
78
|
+
redis.call("PEXPIRE", locked, pttl)
|
79
|
+
|
80
|
+
log_debug("PEXPIRE", info, pttl)
|
81
|
+
redis.call("PEXPIRE", info, pttl)
|
82
|
+
end
|
83
|
+
|
84
|
+
log_debug("PEXPIRE", queued, 1000)
|
85
|
+
redis.call("PEXPIRE", queued, 1000)
|
86
|
+
|
87
|
+
log_debug("PEXPIRE", primed, 1000)
|
88
|
+
redis.call("PEXPIRE", primed, 1000)
|
89
|
+
|
90
|
+
log("Locked")
|
91
|
+
log_debug("END lock digest:", digest, "job_id:", job_id)
|
92
|
+
return job_id
|
93
|
+
---------- END lock.lua ----------
|
@@ -0,0 +1,35 @@
|
|
1
|
+
-------- BEGIN keys ---------
|
2
|
+
local digest = KEYS[1]
|
3
|
+
local queued = KEYS[2]
|
4
|
+
local primed = KEYS[3]
|
5
|
+
local locked = KEYS[4]
|
6
|
+
local info = KEYS[5]
|
7
|
+
local changelog = KEYS[6]
|
8
|
+
local digests = KEYS[7]
|
9
|
+
-------- END keys ---------
|
10
|
+
|
11
|
+
-------- BEGIN lock arguments ---------
|
12
|
+
local job_id = ARGV[1]
|
13
|
+
-------- END lock arguments -----------
|
14
|
+
|
15
|
+
-------- BEGIN injected arguments --------
|
16
|
+
local current_time = tonumber(ARGV[2])
|
17
|
+
local debug_lua = ARGV[3] == "true"
|
18
|
+
local max_history = tonumber(ARGV[4])
|
19
|
+
local script_name = tostring(ARGV[5]) .. ".lua"
|
20
|
+
--------- END injected arguments ---------
|
21
|
+
|
22
|
+
-------- BEGIN local functions --------
|
23
|
+
<%= include_partial "shared/_common.lua" %>
|
24
|
+
--------- END local functions ---------
|
25
|
+
|
26
|
+
|
27
|
+
-------- BEGIN locked.lua --------
|
28
|
+
if redis.call("HEXISTS", locked, job_id) == 1 then
|
29
|
+
log_debug("Locked", digest, "job_id:", job_id)
|
30
|
+
return 1
|
31
|
+
else
|
32
|
+
log_debug("NOT Locked", digest, "job_id:", job_id)
|
33
|
+
return -1
|
34
|
+
end
|
35
|
+
--------- END locked.lua ---------
|
@@ -0,0 +1,87 @@
|
|
1
|
+
-------- BEGIN keys ---------
|
2
|
+
local digest = KEYS[1]
|
3
|
+
local queued = KEYS[2]
|
4
|
+
local primed = KEYS[3]
|
5
|
+
local locked = KEYS[4]
|
6
|
+
local info = KEYS[5]
|
7
|
+
local changelog = KEYS[6]
|
8
|
+
local digests = KEYS[7]
|
9
|
+
-------- END keys ---------
|
10
|
+
|
11
|
+
|
12
|
+
-------- BEGIN lock arguments ---------
|
13
|
+
local job_id = ARGV[1] -- The job_id that was previously primed
|
14
|
+
local pttl = tonumber(ARGV[2])
|
15
|
+
local lock_type = ARGV[3]
|
16
|
+
local limit = tonumber(ARGV[4])
|
17
|
+
-------- END lock arguments -----------
|
18
|
+
|
19
|
+
|
20
|
+
-------- BEGIN injected arguments --------
|
21
|
+
local current_time = tonumber(ARGV[5])
|
22
|
+
local debug_lua = ARGV[6] == "true"
|
23
|
+
local max_history = tonumber(ARGV[7])
|
24
|
+
local script_name = tostring(ARGV[8]) .. ".lua"
|
25
|
+
--------- END injected arguments ---------
|
26
|
+
|
27
|
+
|
28
|
+
-------- BEGIN Variables --------
|
29
|
+
local queued_count = redis.call("LLEN", queued)
|
30
|
+
local locked_count = redis.call("HLEN", locked)
|
31
|
+
local within_limit = limit > locked_count
|
32
|
+
local limit_exceeded = not within_limit
|
33
|
+
-------- END Variables --------
|
34
|
+
|
35
|
+
|
36
|
+
-------- BEGIN local functions --------
|
37
|
+
<%= include_partial "shared/_common.lua" %>
|
38
|
+
---------- END local functions ----------
|
39
|
+
|
40
|
+
|
41
|
+
-------- BEGIN queue.lua --------
|
42
|
+
log_debug("BEGIN queue with key:", digest, "for job:", job_id)
|
43
|
+
|
44
|
+
if redis.call("HEXISTS", locked, job_id) == 1 then
|
45
|
+
log_debug("HEXISTS", locked, job_id, "== 1")
|
46
|
+
log("Duplicate")
|
47
|
+
return job_id
|
48
|
+
end
|
49
|
+
|
50
|
+
local prev_jid = redis.call("GET", digest)
|
51
|
+
log_debug("job_id:", job_id, "prev_jid:", prev_jid)
|
52
|
+
if not prev_jid or prev_jid == false then
|
53
|
+
log_debug("SET", digest, job_id)
|
54
|
+
redis.call("SET", digest, job_id)
|
55
|
+
elseif prev_jid == job_id then
|
56
|
+
log_debug(digest, "already queued with job_id:", job_id)
|
57
|
+
log("Duplicate")
|
58
|
+
return job_id
|
59
|
+
else
|
60
|
+
-- TODO: Consider constraining the total count of both locked and queued?
|
61
|
+
if within_limit and queued_count < limit then
|
62
|
+
log_debug("Within limit:", digest, "(", locked_count, "of", limit, ")", "queued (", queued_count, "of", limit, ")")
|
63
|
+
log_debug("SET", digest, job_id, "(was", prev_jid, ")")
|
64
|
+
redis.call("SET", digest, job_id)
|
65
|
+
else
|
66
|
+
log_debug("Limit exceeded:", digest, "(", locked_count, "of", limit, ")")
|
67
|
+
log("Limit exceeded", prev_jid)
|
68
|
+
return prev_jid
|
69
|
+
end
|
70
|
+
end
|
71
|
+
|
72
|
+
log_debug("LPUSH", queued, job_id)
|
73
|
+
redis.call("LPUSH", queued, job_id)
|
74
|
+
|
75
|
+
-- The Sidekiq client should only set pttl for until_expired
|
76
|
+
-- The Sidekiq server should set pttl for all other jobs
|
77
|
+
if pttl and pttl > 0 then
|
78
|
+
log_debug("PEXPIRE", digest, pttl)
|
79
|
+
redis.call("PEXPIRE", digest, pttl)
|
80
|
+
log_debug("PEXPIRE", queued, pttl)
|
81
|
+
redis.call("PEXPIRE", queued, pttl)
|
82
|
+
end
|
83
|
+
|
84
|
+
log("Queued")
|
85
|
+
log_debug("END queue with key:", digest, "for job:", job_id)
|
86
|
+
return job_id
|
87
|
+
-------- END queue.lua --------
|