sidekiq-cron 1.5.1 → 1.12.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +161 -0
- data/Gemfile +3 -0
- data/README.md +74 -66
- data/lib/sidekiq/cron/job.rb +191 -132
- data/lib/sidekiq/cron/launcher.rb +17 -15
- data/lib/sidekiq/cron/locales/it.yml +23 -0
- data/lib/sidekiq/cron/poller.rb +14 -18
- data/lib/sidekiq/cron/schedule_loader.rb +20 -0
- data/lib/sidekiq/cron/support.rb +8 -1
- data/lib/sidekiq/cron/version.rb +1 -1
- data/lib/sidekiq/cron/views/cron.erb +1 -1
- data/lib/sidekiq/cron/web_extension.rb +6 -9
- data/lib/sidekiq/cron.rb +1 -0
- data/lib/sidekiq/options.rb +29 -0
- data/sidekiq-cron.gemspec +12 -13
- metadata +45 -32
- data/Changes.md +0 -111
- data/test/integration/performance_test.rb +0 -49
- data/test/test_helper.rb +0 -92
- data/test/unit/job_test.rb +0 -1124
- data/test/unit/poller_test.rb +0 -150
- data/test/unit/web_extension_test.rb +0 -156
data/lib/sidekiq/cron/job.rb
CHANGED
@@ -1,41 +1,46 @@
|
|
1
1
|
require 'fugit'
|
2
|
+
require 'globalid'
|
2
3
|
require 'sidekiq'
|
3
4
|
require 'sidekiq/cron/support'
|
5
|
+
require 'sidekiq/options'
|
4
6
|
|
5
7
|
module Sidekiq
|
6
8
|
module Cron
|
7
9
|
class Job
|
8
|
-
#
|
10
|
+
# How long we would like to store informations about previous enqueues.
|
9
11
|
REMEMBER_THRESHOLD = 24 * 60 * 60
|
12
|
+
|
13
|
+
# Time format for enqueued jobs.
|
10
14
|
LAST_ENQUEUE_TIME_FORMAT = '%Y-%m-%d %H:%M:%S %z'
|
11
15
|
|
12
|
-
# Use the exists? method if we're on a newer version of
|
13
|
-
REDIS_EXISTS_METHOD = Gem.loaded_specs['redis'].version < Gem::Version.new('4.2') ? :exists : :exists?
|
16
|
+
# Use the exists? method if we're on a newer version of Redis.
|
17
|
+
REDIS_EXISTS_METHOD = Gem::Version.new(Sidekiq::VERSION) >= Gem::Version.new("7.0.0") || Gem.loaded_specs['redis'].version < Gem::Version.new('4.2') ? :exists : :exists?
|
18
|
+
|
19
|
+
# Use serialize/deserialize key of GlobalID.
|
20
|
+
GLOBALID_KEY = "_sc_globalid"
|
14
21
|
|
15
|
-
#
|
22
|
+
# Crucial part of whole enqueuing job.
|
16
23
|
def should_enque? time
|
17
|
-
|
24
|
+
return false unless status == "enabled"
|
25
|
+
return false unless not_past_scheduled_time?(time)
|
26
|
+
return false unless not_enqueued_after?(time)
|
27
|
+
|
18
28
|
enqueue = Sidekiq.redis do |conn|
|
19
|
-
|
20
|
-
not_past_scheduled_time?(time) &&
|
21
|
-
not_enqueued_after?(time) &&
|
22
|
-
conn.zadd(job_enqueued_key, formated_enqueue_time(time), formated_last_time(time))
|
29
|
+
conn.zadd(job_enqueued_key, formatted_enqueue_time(time), formatted_last_time(time))
|
23
30
|
end
|
24
|
-
enqueue
|
31
|
+
enqueue == true || enqueue == 1
|
25
32
|
end
|
26
33
|
|
27
|
-
#
|
28
|
-
# this will clear
|
29
|
-
# not overflow with memory
|
34
|
+
# Remove previous information about run times,
|
35
|
+
# this will clear Redis and make sure that Redis will not overflow with memory.
|
30
36
|
def remove_previous_enques time
|
31
37
|
Sidekiq.redis do |conn|
|
32
38
|
conn.zremrangebyscore(job_enqueued_key, 0, "(#{(time.to_f - REMEMBER_THRESHOLD).to_s}")
|
33
39
|
end
|
34
40
|
end
|
35
41
|
|
36
|
-
#
|
42
|
+
# Test if job should be enqueued.
|
37
43
|
def test_and_enque_for_time! time
|
38
|
-
#should this job be enqued?
|
39
44
|
if should_enque?(time)
|
40
45
|
enque!
|
41
46
|
|
@@ -43,9 +48,9 @@ module Sidekiq
|
|
43
48
|
end
|
44
49
|
end
|
45
50
|
|
46
|
-
#
|
51
|
+
# Enqueue cron job to queue.
|
47
52
|
def enque! time = Time.now.utc
|
48
|
-
@last_enqueue_time = time
|
53
|
+
@last_enqueue_time = time
|
49
54
|
|
50
55
|
klass_const =
|
51
56
|
begin
|
@@ -56,7 +61,7 @@ module Sidekiq
|
|
56
61
|
|
57
62
|
jid =
|
58
63
|
if klass_const
|
59
|
-
if
|
64
|
+
if is_active_job?(klass_const)
|
60
65
|
enqueue_active_job(klass_const).try :provider_job_id
|
61
66
|
else
|
62
67
|
enqueue_sidekiq_worker(klass_const)
|
@@ -74,23 +79,34 @@ module Sidekiq
|
|
74
79
|
Sidekiq.logger.debug { "enqueued #{@name}: #{@message}" }
|
75
80
|
end
|
76
81
|
|
77
|
-
def is_active_job?
|
78
|
-
@active_job || defined?(ActiveJob::Base) && Sidekiq::Cron::Support.constantize(@klass.to_s) < ActiveJob::Base
|
82
|
+
def is_active_job?(klass = nil)
|
83
|
+
@active_job || defined?(ActiveJob::Base) && (klass || Sidekiq::Cron::Support.constantize(@klass.to_s)) < ActiveJob::Base
|
79
84
|
rescue NameError
|
80
85
|
false
|
81
86
|
end
|
82
87
|
|
88
|
+
def date_as_argument?
|
89
|
+
!!@date_as_argument
|
90
|
+
end
|
91
|
+
|
92
|
+
def enqueue_args
|
93
|
+
args = date_as_argument? ? @args + [Time.now.to_f] : @args
|
94
|
+
deserialize_argument(args)
|
95
|
+
end
|
96
|
+
|
83
97
|
def enqueue_active_job(klass_const)
|
84
|
-
klass_const.set(queue: @queue).perform_later(
|
98
|
+
klass_const.set(queue: @queue).perform_later(*enqueue_args)
|
85
99
|
end
|
86
100
|
|
87
101
|
def enqueue_sidekiq_worker(klass_const)
|
88
|
-
klass_const.set(queue: queue_name_with_prefix).perform_async(
|
102
|
+
klass_const.set(queue: queue_name_with_prefix).perform_async(*enqueue_args)
|
89
103
|
end
|
90
104
|
|
91
|
-
#
|
105
|
+
# Sidekiq worker message.
|
92
106
|
def sidekiq_worker_message
|
93
|
-
@message.is_a?(String) ? Sidekiq.load_json(@message) : @message
|
107
|
+
message = @message.is_a?(String) ? Sidekiq.load_json(@message) : @message
|
108
|
+
message["args"] = enqueue_args
|
109
|
+
message
|
94
110
|
end
|
95
111
|
|
96
112
|
def queue_name_with_prefix
|
@@ -115,8 +131,8 @@ module Sidekiq
|
|
115
131
|
queue_name
|
116
132
|
end
|
117
133
|
|
118
|
-
#
|
119
|
-
# queue, it
|
134
|
+
# Active Job has different structure how it is loading data from Sidekiq
|
135
|
+
# queue, it creates a wrapper around job.
|
120
136
|
def active_job_message
|
121
137
|
{
|
122
138
|
'class' => 'ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper',
|
@@ -127,13 +143,13 @@ module Sidekiq
|
|
127
143
|
'job_class' => @klass,
|
128
144
|
'job_id' => SecureRandom.uuid,
|
129
145
|
'queue_name' => @queue_name_with_prefix,
|
130
|
-
'arguments' =>
|
146
|
+
'arguments' => enqueue_args
|
131
147
|
}]
|
132
148
|
}
|
133
149
|
end
|
134
150
|
|
135
|
-
#
|
136
|
-
#
|
151
|
+
# Load cron jobs from Hash.
|
152
|
+
# Input structure should look like:
|
137
153
|
# {
|
138
154
|
# 'name_of_job' => {
|
139
155
|
# 'class' => 'MyClass',
|
@@ -147,23 +163,23 @@ module Sidekiq
|
|
147
163
|
# }
|
148
164
|
# }
|
149
165
|
#
|
150
|
-
def self.load_from_hash
|
151
|
-
array = hash.
|
166
|
+
def self.load_from_hash(hash, options = {})
|
167
|
+
array = hash.map do |key, job|
|
152
168
|
job['name'] = key
|
153
|
-
|
169
|
+
job
|
154
170
|
end
|
155
|
-
load_from_array
|
171
|
+
load_from_array(array, options)
|
156
172
|
end
|
157
173
|
|
158
|
-
#
|
159
|
-
# If exists old jobs in
|
160
|
-
def self.load_from_hash!
|
174
|
+
# Like #load_from_hash.
|
175
|
+
# If exists old jobs in Redis but removed from args, destroy old jobs.
|
176
|
+
def self.load_from_hash!(hash, options = {})
|
161
177
|
destroy_removed_jobs(hash.keys)
|
162
|
-
load_from_hash(hash)
|
178
|
+
load_from_hash(hash, options)
|
163
179
|
end
|
164
180
|
|
165
|
-
#
|
166
|
-
#
|
181
|
+
# Load cron jobs from Array.
|
182
|
+
# Input structure should look like:
|
167
183
|
# [
|
168
184
|
# {
|
169
185
|
# 'name' => 'name_of_job',
|
@@ -179,24 +195,24 @@ module Sidekiq
|
|
179
195
|
# }
|
180
196
|
# ]
|
181
197
|
#
|
182
|
-
def self.load_from_array
|
198
|
+
def self.load_from_array(array, options = {})
|
183
199
|
errors = {}
|
184
200
|
array.each do |job_data|
|
185
|
-
job = new(job_data)
|
201
|
+
job = new(job_data.merge(options))
|
186
202
|
errors[job.name] = job.errors unless job.save
|
187
203
|
end
|
188
204
|
errors
|
189
205
|
end
|
190
206
|
|
191
|
-
#
|
192
|
-
# If exists old jobs in
|
193
|
-
def self.load_from_array!
|
207
|
+
# Like #load_from_array.
|
208
|
+
# If exists old jobs in Redis but removed from args, destroy old jobs.
|
209
|
+
def self.load_from_array!(array, options = {})
|
194
210
|
job_names = array.map { |job| job["name"] }
|
195
211
|
destroy_removed_jobs(job_names)
|
196
|
-
load_from_array(array)
|
212
|
+
load_from_array(array, options)
|
197
213
|
end
|
198
214
|
|
199
|
-
#
|
215
|
+
# Get all cron jobs.
|
200
216
|
def self.all
|
201
217
|
job_hashes = nil
|
202
218
|
Sidekiq.redis do |conn|
|
@@ -208,7 +224,7 @@ module Sidekiq
|
|
208
224
|
end
|
209
225
|
end
|
210
226
|
job_hashes.compact.reject(&:empty?).collect do |h|
|
211
|
-
#
|
227
|
+
# No need to fetch missing args from Redis since we just got this hash from there
|
212
228
|
Sidekiq::Cron::Job.new(h.merge(fetch_missing_args: false))
|
213
229
|
end
|
214
230
|
end
|
@@ -222,26 +238,25 @@ module Sidekiq
|
|
222
238
|
end
|
223
239
|
|
224
240
|
def self.find name
|
225
|
-
#
|
241
|
+
# If name is hash try to get name from it.
|
226
242
|
name = name[:name] || name['name'] if name.is_a?(Hash)
|
243
|
+
return unless exists? name
|
227
244
|
|
228
245
|
output = nil
|
229
246
|
Sidekiq.redis do |conn|
|
230
|
-
|
231
|
-
output = Job.new conn.hgetall( redis_key(name) )
|
232
|
-
end
|
247
|
+
output = Job.new conn.hgetall( redis_key(name) )
|
233
248
|
end
|
234
249
|
output if output && output.valid?
|
235
250
|
end
|
236
251
|
|
237
|
-
#
|
252
|
+
# Create new instance of cron job.
|
238
253
|
def self.create hash
|
239
254
|
new(hash).save
|
240
255
|
end
|
241
256
|
|
242
|
-
#
|
257
|
+
# Destroy job by name.
|
243
258
|
def self.destroy name
|
244
|
-
#
|
259
|
+
# If name is hash try to get name from it.
|
245
260
|
name = name[:name] || name['name'] if name.is_a?(Hash)
|
246
261
|
|
247
262
|
if job = find(name)
|
@@ -252,7 +267,7 @@ module Sidekiq
|
|
252
267
|
end
|
253
268
|
|
254
269
|
attr_accessor :name, :cron, :description, :klass, :args, :message
|
255
|
-
attr_reader :last_enqueue_time, :fetch_missing_args
|
270
|
+
attr_reader :last_enqueue_time, :fetch_missing_args, :source
|
256
271
|
|
257
272
|
def initialize input_args = {}
|
258
273
|
args = Hash[input_args.map{ |k, v| [k.to_s, v] }]
|
@@ -262,24 +277,26 @@ module Sidekiq
|
|
262
277
|
@name = args["name"]
|
263
278
|
@cron = args["cron"]
|
264
279
|
@description = args["description"] if args["description"]
|
280
|
+
@source = args["source"] == "schedule" ? "schedule" : "dynamic"
|
265
281
|
|
266
|
-
#
|
282
|
+
# Get class from klass or class.
|
267
283
|
@klass = args["klass"] || args["class"]
|
268
284
|
|
269
|
-
#
|
285
|
+
# Set status of job.
|
270
286
|
@status = args['status'] || status_from_redis
|
271
287
|
|
272
|
-
#
|
288
|
+
# Set last enqueue time - from args or from existing job.
|
273
289
|
if args['last_enqueue_time'] && !args['last_enqueue_time'].empty?
|
274
290
|
@last_enqueue_time = parse_enqueue_time(args['last_enqueue_time'])
|
275
291
|
else
|
276
292
|
@last_enqueue_time = last_enqueue_time_from_redis
|
277
293
|
end
|
278
294
|
|
279
|
-
#
|
295
|
+
# Get right arguments for job.
|
280
296
|
@symbolize_args = args["symbolize_args"] == true || ("#{args["symbolize_args"]}" =~ (/^(true|t|yes|y|1)$/i)) == 0 || false
|
281
|
-
@args =
|
282
|
-
|
297
|
+
@args = parse_args(args["args"])
|
298
|
+
|
299
|
+
@date_as_argument = args["date_as_argument"] == true || ("#{args["date_as_argument"]}" =~ (/^(true|t|yes|y|1)$/i)) == 0 || false
|
283
300
|
|
284
301
|
@active_job = args["active_job"] == true || ("#{args["active_job"]}" =~ (/^(true|t|yes|y|1)$/i)) == 0 || false
|
285
302
|
@active_job_queue_name_prefix = args["queue_name_prefix"]
|
@@ -295,8 +312,8 @@ module Sidekiq
|
|
295
312
|
"args" => @args,
|
296
313
|
}
|
297
314
|
|
298
|
-
#
|
299
|
-
#only if message wasn't specified before
|
315
|
+
# Get right data for message,
|
316
|
+
# only if message wasn't specified before.
|
300
317
|
klass_data = case @klass
|
301
318
|
when Class
|
302
319
|
@klass.get_sidekiq_options
|
@@ -304,21 +321,21 @@ module Sidekiq
|
|
304
321
|
begin
|
305
322
|
Sidekiq::Cron::Support.constantize(@klass).get_sidekiq_options
|
306
323
|
rescue Exception => e
|
307
|
-
#Unknown class
|
324
|
+
# Unknown class
|
308
325
|
{"queue"=>"default"}
|
309
326
|
end
|
310
327
|
end
|
311
328
|
|
312
329
|
message_data = klass_data.merge(message_data)
|
313
|
-
|
314
|
-
#
|
330
|
+
|
331
|
+
# Override queue if setted in config,
|
332
|
+
# only if message is hash - can be string (dumped JSON).
|
315
333
|
if args['queue']
|
316
334
|
@queue = message_data['queue'] = args['queue']
|
317
335
|
else
|
318
336
|
@queue = message_data['queue'] || "default"
|
319
337
|
end
|
320
338
|
|
321
|
-
#dump message as json
|
322
339
|
@message = message_data
|
323
340
|
end
|
324
341
|
|
@@ -380,28 +397,34 @@ module Sidekiq
|
|
380
397
|
conn.lrange(jid_history_key, 0, -1) rescue nil
|
381
398
|
end
|
382
399
|
|
383
|
-
# returns nil if out nil
|
384
400
|
out && out.map do |jid_history_raw|
|
385
401
|
Sidekiq.load_json jid_history_raw
|
386
402
|
end
|
387
403
|
end
|
388
404
|
|
389
|
-
#
|
405
|
+
# Export job data to hash.
|
390
406
|
def to_hash
|
391
|
-
{
|
407
|
+
hash = {
|
392
408
|
name: @name,
|
393
|
-
klass: @klass,
|
409
|
+
klass: @klass.to_s,
|
394
410
|
cron: @cron,
|
395
411
|
description: @description,
|
412
|
+
source: @source,
|
396
413
|
args: @args.is_a?(String) ? @args : Sidekiq.dump_json(@args || []),
|
397
414
|
message: @message.is_a?(String) ? @message : Sidekiq.dump_json(@message || {}),
|
398
415
|
status: @status,
|
399
|
-
active_job: @active_job,
|
416
|
+
active_job: @active_job ? "1" : "0",
|
400
417
|
queue_name_prefix: @active_job_queue_name_prefix,
|
401
418
|
queue_name_delimiter: @active_job_queue_name_delimiter,
|
402
|
-
last_enqueue_time:
|
403
|
-
symbolize_args:
|
419
|
+
last_enqueue_time: serialized_last_enqueue_time,
|
420
|
+
symbolize_args: symbolize_args? ? "1" : "0",
|
404
421
|
}
|
422
|
+
|
423
|
+
if date_as_argument?
|
424
|
+
hash.merge!(date_as_argument: "1")
|
425
|
+
end
|
426
|
+
|
427
|
+
hash
|
405
428
|
end
|
406
429
|
|
407
430
|
def errors
|
@@ -409,7 +432,7 @@ module Sidekiq
|
|
409
432
|
end
|
410
433
|
|
411
434
|
def valid?
|
412
|
-
#
|
435
|
+
# Clear previous errors.
|
413
436
|
@errors = []
|
414
437
|
|
415
438
|
errors << "'name' must be set" if @name.nil? || @name.size == 0
|
@@ -417,7 +440,7 @@ module Sidekiq
|
|
417
440
|
errors << "'cron' must be set"
|
418
441
|
else
|
419
442
|
begin
|
420
|
-
@parsed_cron = Fugit.
|
443
|
+
@parsed_cron = Fugit.do_parse_cronish(@cron)
|
421
444
|
rescue => e
|
422
445
|
errors << "'cron' -> #{@cron.inspect} -> #{e.class}: #{e.message}"
|
423
446
|
end
|
@@ -438,38 +461,30 @@ module Sidekiq
|
|
438
461
|
end
|
439
462
|
end
|
440
463
|
|
441
|
-
# add job to cron jobs
|
442
|
-
# input:
|
443
|
-
# name: (string) - name of job
|
444
|
-
# cron: (string: '* * * * *' - cron specification when to run job
|
445
|
-
# class: (string|class) - which class to perform
|
446
|
-
# optional input:
|
447
|
-
# queue: (string) - which queue to use for enquing (will override class queue)
|
448
|
-
# args: (array|hash|nil) - arguments for permorm method
|
449
|
-
|
450
464
|
def save
|
451
|
-
#
|
465
|
+
# If job is invalid, return false.
|
452
466
|
return false unless valid?
|
453
467
|
|
454
468
|
Sidekiq.redis do |conn|
|
455
469
|
|
456
|
-
#
|
457
|
-
conn.sadd self.class.jobs_key, redis_key
|
470
|
+
# Add to set of all jobs
|
471
|
+
conn.sadd self.class.jobs_key, [redis_key]
|
458
472
|
|
459
|
-
#
|
460
|
-
conn.
|
473
|
+
# Add informations for this job!
|
474
|
+
conn.hset redis_key, to_hash.transform_values! { |v| v || "" }
|
461
475
|
|
462
|
-
#
|
476
|
+
# Add information about last time! - don't enque right after scheduler poller starts!
|
463
477
|
time = Time.now.utc
|
464
|
-
|
478
|
+
exists = conn.public_send(REDIS_EXISTS_METHOD, job_enqueued_key)
|
479
|
+
conn.zadd(job_enqueued_key, time.to_f.to_s, formatted_last_time(time).to_s) unless exists == true || exists == 1
|
465
480
|
end
|
466
481
|
Sidekiq.logger.info { "Cron Jobs - added job with name: #{@name}" }
|
467
482
|
end
|
468
483
|
|
469
484
|
def save_last_enqueue_time
|
470
485
|
Sidekiq.redis do |conn|
|
471
|
-
#
|
472
|
-
conn.hset redis_key, 'last_enqueue_time',
|
486
|
+
# Update last enqueue time.
|
487
|
+
conn.hset redis_key, 'last_enqueue_time', serialized_last_enqueue_time
|
473
488
|
end
|
474
489
|
end
|
475
490
|
|
@@ -478,36 +493,35 @@ module Sidekiq
|
|
478
493
|
jid: jid,
|
479
494
|
enqueued: @last_enqueue_time
|
480
495
|
}
|
481
|
-
|
496
|
+
|
497
|
+
@history_size ||= (Sidekiq::Options[:cron_history_size] || 10).to_i - 1
|
482
498
|
Sidekiq.redis do |conn|
|
483
499
|
conn.lpush jid_history_key,
|
484
500
|
Sidekiq.dump_json(jid_history)
|
485
|
-
#
|
501
|
+
# Keep only last 10 entries in a fifo manner.
|
486
502
|
conn.ltrim jid_history_key, 0, @history_size
|
487
503
|
end
|
488
504
|
end
|
489
505
|
|
490
|
-
# remove job from cron jobs by name
|
491
|
-
# input:
|
492
|
-
# first arg: name (string) - name of job (must be same - case sensitive)
|
493
506
|
def destroy
|
494
507
|
Sidekiq.redis do |conn|
|
495
|
-
#
|
496
|
-
conn.srem self.class.jobs_key, redis_key
|
508
|
+
# Delete from set.
|
509
|
+
conn.srem self.class.jobs_key, [redis_key]
|
497
510
|
|
498
|
-
#
|
511
|
+
# Delete runned timestamps.
|
499
512
|
conn.del job_enqueued_key
|
500
513
|
|
501
|
-
#
|
514
|
+
# Delete jid_history.
|
502
515
|
conn.del jid_history_key
|
503
516
|
|
504
|
-
#
|
517
|
+
# Delete main job.
|
505
518
|
conn.del redis_key
|
506
519
|
end
|
520
|
+
|
507
521
|
Sidekiq.logger.info { "Cron Jobs - deleted job with name: #{@name}" }
|
508
522
|
end
|
509
523
|
|
510
|
-
#
|
524
|
+
# Remove all job from cron.
|
511
525
|
def self.destroy_all!
|
512
526
|
all.each do |job|
|
513
527
|
job.destroy
|
@@ -515,9 +529,9 @@ module Sidekiq
|
|
515
529
|
Sidekiq.logger.info { "Cron Jobs - deleted all jobs" }
|
516
530
|
end
|
517
531
|
|
518
|
-
#
|
532
|
+
# Remove "removed jobs" between current jobs and new jobs
|
519
533
|
def self.destroy_removed_jobs new_job_names
|
520
|
-
current_job_names = Sidekiq::Cron::Job.all.
|
534
|
+
current_job_names = Sidekiq::Cron::Job.all.filter_map { |j| j.name if j.source == "schedule" }
|
521
535
|
removed_job_names = current_job_names - new_job_names
|
522
536
|
removed_job_names.each { |j| Sidekiq::Cron::Job.destroy(j) }
|
523
537
|
removed_job_names
|
@@ -529,20 +543,19 @@ module Sidekiq
|
|
529
543
|
parsed_cron.previous_time(now.utc).utc
|
530
544
|
end
|
531
545
|
|
532
|
-
def
|
546
|
+
def formatted_enqueue_time now = Time.now.utc
|
533
547
|
last_time(now).getutc.to_f.to_s
|
534
548
|
end
|
535
549
|
|
536
|
-
def
|
550
|
+
def formatted_last_time now = Time.now.utc
|
537
551
|
last_time(now).getutc.iso8601
|
538
552
|
end
|
539
553
|
|
540
554
|
def self.exists? name
|
541
|
-
out =
|
542
|
-
|
543
|
-
out = conn.public_send(REDIS_EXISTS_METHOD, redis_key(name))
|
555
|
+
out = Sidekiq.redis do |conn|
|
556
|
+
conn.public_send(REDIS_EXISTS_METHOD, redis_key(name))
|
544
557
|
end
|
545
|
-
out
|
558
|
+
out == true || out == 1
|
546
559
|
end
|
547
560
|
|
548
561
|
def exists?
|
@@ -553,10 +566,14 @@ module Sidekiq
|
|
553
566
|
"#{status == "enabled" ? 0 : 1}_#{name}".downcase
|
554
567
|
end
|
555
568
|
|
569
|
+
def args=(args)
|
570
|
+
@args = parse_args(args)
|
571
|
+
end
|
572
|
+
|
556
573
|
private
|
557
574
|
|
558
575
|
def parsed_cron
|
559
|
-
@parsed_cron ||= Fugit.
|
576
|
+
@parsed_cron ||= Fugit.parse_cronish(@cron)
|
560
577
|
end
|
561
578
|
|
562
579
|
def not_enqueued_after?(time)
|
@@ -564,24 +581,27 @@ module Sidekiq
|
|
564
581
|
end
|
565
582
|
|
566
583
|
# Try parsing inbound args into an array.
|
567
|
-
#
|
568
|
-
# try to load JSON, then failover
|
569
|
-
# to string array.
|
584
|
+
# Args from Redis will be encoded JSON,
|
585
|
+
# try to load JSON, then failover to string array.
|
570
586
|
def parse_args(args)
|
571
587
|
case args
|
588
|
+
when GlobalID::Identification
|
589
|
+
[convert_to_global_id_hash(args)]
|
572
590
|
when String
|
573
591
|
begin
|
574
592
|
parsed_args = Sidekiq.load_json(args)
|
575
593
|
symbolize_args? ? symbolize_args(parsed_args) : parsed_args
|
576
594
|
rescue JSON::ParserError
|
577
|
-
[*args]
|
595
|
+
[*args]
|
578
596
|
end
|
579
597
|
when Hash
|
598
|
+
args = serialize_argument(args)
|
580
599
|
symbolize_args? ? [symbolize_args(args)] : [args]
|
581
600
|
when Array
|
601
|
+
args = serialize_argument(args)
|
582
602
|
symbolize_args? ? symbolize_args(args) : args
|
583
603
|
else
|
584
|
-
[*args]
|
604
|
+
[*args]
|
585
605
|
end
|
586
606
|
end
|
587
607
|
|
@@ -613,29 +633,26 @@ module Sidekiq
|
|
613
633
|
|
614
634
|
def not_past_scheduled_time?(current_time)
|
615
635
|
last_cron_time = parsed_cron.previous_time(current_time).utc
|
616
|
-
# or could it be?
|
617
|
-
#last_cron_time = last_time(current_time)
|
618
636
|
return false if (current_time.to_i - last_cron_time.to_i) > 60
|
619
637
|
true
|
620
638
|
end
|
621
639
|
|
622
|
-
# Redis key for set of all cron jobs
|
640
|
+
# Redis key for set of all cron jobs.
|
623
641
|
def self.jobs_key
|
624
642
|
"cron_jobs"
|
625
643
|
end
|
626
644
|
|
627
|
-
# Redis key for storing one cron job
|
645
|
+
# Redis key for storing one cron job.
|
628
646
|
def self.redis_key name
|
629
647
|
"cron_job:#{name}"
|
630
648
|
end
|
631
649
|
|
632
|
-
# Redis key for storing one cron job
|
650
|
+
# Redis key for storing one cron job.
|
633
651
|
def redis_key
|
634
652
|
self.class.redis_key @name
|
635
653
|
end
|
636
654
|
|
637
|
-
# Redis key for storing one cron job run times
|
638
|
-
# (when poller added job to queue)
|
655
|
+
# Redis key for storing one cron job run times (when poller added job to queue)
|
639
656
|
def self.job_enqueued_key name
|
640
657
|
"cron_job:#{name}:enqueued"
|
641
658
|
end
|
@@ -644,8 +661,6 @@ module Sidekiq
|
|
644
661
|
"cron_job:#{name}:jid_history"
|
645
662
|
end
|
646
663
|
|
647
|
-
# Redis key for storing one cron job run times
|
648
|
-
# (when poller added job to queue)
|
649
664
|
def job_enqueued_key
|
650
665
|
self.class.job_enqueued_key @name
|
651
666
|
end
|
@@ -654,12 +669,56 @@ module Sidekiq
|
|
654
669
|
self.class.jid_history_key @name
|
655
670
|
end
|
656
671
|
|
657
|
-
|
658
|
-
|
659
|
-
|
660
|
-
|
672
|
+
def serialized_last_enqueue_time
|
673
|
+
@last_enqueue_time&.strftime(LAST_ENQUEUE_TIME_FORMAT)
|
674
|
+
end
|
675
|
+
|
676
|
+
def convert_to_global_id_hash(argument)
|
677
|
+
{ GLOBALID_KEY => argument.to_global_id.to_s }
|
678
|
+
rescue URI::GID::MissingModelIdError
|
679
|
+
raise "Unable to serialize #{argument.class} " \
|
680
|
+
"without an id. (Maybe you forgot to call save?)"
|
681
|
+
end
|
682
|
+
|
683
|
+
def deserialize_argument(argument)
|
684
|
+
case argument
|
685
|
+
when String
|
686
|
+
argument
|
687
|
+
when Array
|
688
|
+
argument.map { |arg| deserialize_argument(arg) }
|
689
|
+
when Hash
|
690
|
+
if serialized_global_id?(argument)
|
691
|
+
deserialize_global_id argument
|
692
|
+
else
|
693
|
+
argument.transform_values { |v| deserialize_argument(v) }
|
694
|
+
end
|
695
|
+
else
|
696
|
+
argument
|
697
|
+
end
|
661
698
|
end
|
662
699
|
|
700
|
+
def serialized_global_id?(hash)
|
701
|
+
hash.size == 1 && hash.include?(GLOBALID_KEY)
|
702
|
+
end
|
703
|
+
|
704
|
+
def deserialize_global_id(hash)
|
705
|
+
GlobalID::Locator.locate hash[GLOBALID_KEY]
|
706
|
+
end
|
707
|
+
|
708
|
+
def serialize_argument(argument)
|
709
|
+
case argument
|
710
|
+
when GlobalID::Identification
|
711
|
+
convert_to_global_id_hash(argument)
|
712
|
+
when Array
|
713
|
+
argument.map { |arg| serialize_argument(arg) }
|
714
|
+
when Hash
|
715
|
+
argument.each_with_object({}) do |(key, value), hash|
|
716
|
+
hash[key] = serialize_argument(value)
|
717
|
+
end
|
718
|
+
else
|
719
|
+
argument
|
720
|
+
end
|
721
|
+
end
|
663
722
|
end
|
664
723
|
end
|
665
724
|
end
|