sidekiq-cron 0.6.3 → 1.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. checksums.yaml +5 -5
  2. data/CHANGELOG.md +145 -0
  3. data/Gemfile +3 -29
  4. data/README.md +175 -121
  5. data/Rakefile +3 -42
  6. data/lib/sidekiq/cron/job.rb +273 -144
  7. data/lib/sidekiq/cron/launcher.rb +39 -43
  8. data/lib/sidekiq/cron/locales/de.yml +2 -2
  9. data/lib/sidekiq/cron/locales/en.yml +6 -2
  10. data/lib/sidekiq/cron/locales/it.yml +23 -0
  11. data/lib/sidekiq/cron/locales/ja.yml +18 -0
  12. data/lib/sidekiq/cron/locales/pt.yml +22 -0
  13. data/lib/sidekiq/cron/locales/ru.yml +2 -2
  14. data/lib/sidekiq/cron/locales/zh-CN.yml +19 -0
  15. data/lib/sidekiq/cron/poller.rb +22 -12
  16. data/lib/sidekiq/cron/schedule_loader.rb +22 -0
  17. data/lib/sidekiq/cron/support.rb +8 -1
  18. data/lib/sidekiq/cron/version.rb +7 -0
  19. data/lib/sidekiq/cron/views/cron.erb +38 -28
  20. data/lib/sidekiq/cron/views/cron_show.erb +88 -0
  21. data/lib/sidekiq/cron/web.rb +1 -7
  22. data/lib/sidekiq/cron/web_extension.rb +19 -15
  23. data/lib/sidekiq/cron.rb +1 -0
  24. data/lib/sidekiq/options.rb +25 -0
  25. data/sidekiq-cron.gemspec +23 -108
  26. data/test/integration/performance_test.rb +13 -19
  27. data/test/models/person.rb +21 -0
  28. data/test/test_helper.rb +37 -38
  29. data/test/unit/fixtures/schedule_array.yml +13 -0
  30. data/test/unit/fixtures/schedule_erb.yml +6 -0
  31. data/test/unit/fixtures/schedule_hash.yml +12 -0
  32. data/test/unit/fixtures/schedule_string.yml +1 -0
  33. data/test/unit/job_test.rb +450 -35
  34. data/test/unit/launcher_test.rb +33 -0
  35. data/test/unit/poller_test.rb +28 -37
  36. data/test/unit/schedule_loader_test.rb +58 -0
  37. data/test/unit/web_extension_test.rb +59 -41
  38. metadata +72 -191
  39. data/.document +0 -5
  40. data/.travis.yml +0 -19
  41. data/Changes.md +0 -50
  42. data/Dockerfile +0 -32
  43. data/VERSION +0 -1
  44. data/config.ru +0 -14
  45. data/docker-compose.yml +0 -21
  46. data/examples/web-cron-ui.png +0 -0
  47. data/lib/sidekiq/cron/views/cron.slim +0 -69
@@ -1,42 +1,45 @@
1
+ require 'fugit'
2
+ require 'globalid'
1
3
  require 'sidekiq'
2
- require 'sidekiq/util'
3
- require 'rufus-scheduler'
4
4
  require 'sidekiq/cron/support'
5
+ require 'sidekiq/options'
5
6
 
6
7
  module Sidekiq
7
8
  module Cron
8
-
9
9
  class Job
10
- include Util
11
- extend Util
12
-
13
- #how long we would like to store informations about previous enqueues
10
+ # How long we would like to store informations about previous enqueues.
14
11
  REMEMBER_THRESHOLD = 24 * 60 * 60
15
12
 
16
- #crucial part of whole enquing job
13
+ # Time format for enqueued jobs.
14
+ LAST_ENQUEUE_TIME_FORMAT = '%Y-%m-%d %H:%M:%S %z'
15
+
16
+ # Use the exists? method if we're on a newer version of Redis.
17
+ REDIS_EXISTS_METHOD = Gem::Version.new(Sidekiq::VERSION) >= Gem::Version.new("7.0.0") || Gem.loaded_specs['redis'].version < Gem::Version.new('4.2') ? :exists : :exists?
18
+
19
+ # Use serialize/deserialize key of GlobalID.
20
+ GLOBALID_KEY = "_sc_globalid"
21
+
22
+ # Crucial part of whole enqueuing job.
17
23
  def should_enque? time
18
- enqueue = false
19
24
  enqueue = Sidekiq.redis do |conn|
20
25
  status == "enabled" &&
21
26
  not_past_scheduled_time?(time) &&
22
27
  not_enqueued_after?(time) &&
23
- conn.zadd(job_enqueued_key, formated_enqueue_time(time), formated_last_time(time))
28
+ conn.zadd(job_enqueued_key, formatted_enqueue_time(time), formatted_last_time(time))
24
29
  end
25
- enqueue
30
+ enqueue == true || enqueue == 1
26
31
  end
27
32
 
28
- # remove previous informations about run times
29
- # this will clear redis and make sure that redis will
30
- # not overflow with memory
33
+ # Remove previous information about run times,
34
+ # this will clear Redis and make sure that Redis will not overflow with memory.
31
35
  def remove_previous_enques time
32
36
  Sidekiq.redis do |conn|
33
37
  conn.zremrangebyscore(job_enqueued_key, 0, "(#{(time.to_f - REMEMBER_THRESHOLD).to_s}")
34
38
  end
35
39
  end
36
40
 
37
- #test if job should be enqued If yes add it to queue
41
+ # Test if job should be enqueued.
38
42
  def test_and_enque_for_time! time
39
- #should this job be enqued?
40
43
  if should_enque?(time)
41
44
  enque!
42
45
 
@@ -44,7 +47,7 @@ module Sidekiq
44
47
  end
45
48
  end
46
49
 
47
- #enque cron job to queue
50
+ # Enqueue cron job to queue.
48
51
  def enque! time = Time.now.utc
49
52
  @last_enqueue_time = time
50
53
 
@@ -55,45 +58,54 @@ module Sidekiq
55
58
  nil
56
59
  end
57
60
 
58
- if klass_const
59
- if defined?(ActiveJob::Base) && klass_const < ActiveJob::Base
60
- enqueue_active_job(klass_const)
61
- else
62
- enqueue_sidekiq_worker(klass_const)
63
- end
64
- else
65
- if @active_job
66
- Sidekiq::Client.push(active_job_message)
61
+ jid =
62
+ if klass_const
63
+ if is_active_job?(klass_const)
64
+ enqueue_active_job(klass_const).try :provider_job_id
65
+ else
66
+ enqueue_sidekiq_worker(klass_const)
67
+ end
67
68
  else
68
- Sidekiq::Client.push(sidekiq_worker_message)
69
+ if @active_job
70
+ Sidekiq::Client.push(active_job_message)
71
+ else
72
+ Sidekiq::Client.push(sidekiq_worker_message)
73
+ end
69
74
  end
70
- end
71
75
 
72
76
  save_last_enqueue_time
73
- logger.debug { "enqueued #{@name}: #{@message}" }
77
+ add_jid_history jid
78
+ Sidekiq.logger.debug { "enqueued #{@name}: #{@message}" }
74
79
  end
75
80
 
76
- def is_active_job?
77
- @active_job || defined?(ActiveJob::Base) && Sidekiq::Cron::Support.constantize(@klass.to_s) < ActiveJob::Base
81
+ def is_active_job?(klass = nil)
82
+ @active_job || defined?(ActiveJob::Base) && (klass || Sidekiq::Cron::Support.constantize(@klass.to_s)) < ActiveJob::Base
78
83
  rescue NameError
79
84
  false
80
85
  end
81
86
 
82
- def enqueue_active_job(klass_const)
83
- klass_const.set(queue: @queue).perform_later(*@args)
87
+ def date_as_argument?
88
+ !!@date_as_argument
89
+ end
84
90
 
85
- true
91
+ def enqueue_args
92
+ args = date_as_argument? ? @args + [Time.now.to_f] : @args
93
+ deserialize_argument(args)
86
94
  end
87
95
 
88
- def enqueue_sidekiq_worker(klass_const)
89
- klass_const.set(queue: queue_name_with_prefix).perform_async(*@args)
96
+ def enqueue_active_job(klass_const)
97
+ klass_const.set(queue: @queue).perform_later(*enqueue_args)
98
+ end
90
99
 
91
- true
100
+ def enqueue_sidekiq_worker(klass_const)
101
+ klass_const.set(queue: queue_name_with_prefix).perform_async(*enqueue_args)
92
102
  end
93
103
 
94
- # siodekiq worker message
104
+ # Sidekiq worker message.
95
105
  def sidekiq_worker_message
96
- @message.is_a?(String) ? Sidekiq.load_json(@message) : @message
106
+ message = @message.is_a?(String) ? Sidekiq.load_json(@message) : @message
107
+ message["args"] = enqueue_args
108
+ message
97
109
  end
98
110
 
99
111
  def queue_name_with_prefix
@@ -118,24 +130,25 @@ module Sidekiq
118
130
  queue_name
119
131
  end
120
132
 
121
- # active job has different structure how it is loading data from sidekiq
122
- # queue, it createaswrapper arround job
133
+ # Active Job has different structure how it is loading data from Sidekiq
134
+ # queue, it creates a wrapper around job.
123
135
  def active_job_message
124
136
  {
125
137
  'class' => 'ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper',
138
+ 'wrapped' => @klass,
126
139
  'queue' => @queue_name_with_prefix,
127
140
  'description' => @description,
128
141
  'args' => [{
129
142
  'job_class' => @klass,
130
143
  'job_id' => SecureRandom.uuid,
131
144
  'queue_name' => @queue_name_with_prefix,
132
- 'arguments' => @args
145
+ 'arguments' => enqueue_args
133
146
  }]
134
147
  }
135
148
  end
136
149
 
137
- # load cron jobs from Hash
138
- # input structure should look like:
150
+ # Load cron jobs from Hash.
151
+ # Input structure should look like:
139
152
  # {
140
153
  # 'name_of_job' => {
141
154
  # 'class' => 'MyClass',
@@ -150,22 +163,22 @@ module Sidekiq
150
163
  # }
151
164
  #
152
165
  def self.load_from_hash hash
153
- array = hash.inject([]) do |out,(key, job)|
166
+ array = hash.map do |key, job|
154
167
  job['name'] = key
155
- out << job
168
+ job
156
169
  end
157
170
  load_from_array array
158
171
  end
159
172
 
160
- # like to {#load_from_hash}
161
- # If exists old jobs in redis but removed from args, destroy old jobs
173
+ # Like #load_from_hash.
174
+ # If exists old jobs in Redis but removed from args, destroy old jobs.
162
175
  def self.load_from_hash! hash
163
176
  destroy_removed_jobs(hash.keys)
164
177
  load_from_hash(hash)
165
178
  end
166
179
 
167
- # load cron jobs from Array
168
- # input structure should look like:
180
+ # Load cron jobs from Array.
181
+ # Input structure should look like:
169
182
  # [
170
183
  # {
171
184
  # 'name' => 'name_of_job',
@@ -190,27 +203,27 @@ module Sidekiq
190
203
  errors
191
204
  end
192
205
 
193
- # like to {#load_from_array}
194
- # If exists old jobs in redis but removed from args, destroy old jobs
206
+ # Like #load_from_array.
207
+ # If exists old jobs in Redis but removed from args, destroy old jobs.
195
208
  def self.load_from_array! array
196
209
  job_names = array.map { |job| job["name"] }
197
210
  destroy_removed_jobs(job_names)
198
211
  load_from_array(array)
199
212
  end
200
213
 
201
- # get all cron jobs
214
+ # Get all cron jobs.
202
215
  def self.all
203
216
  job_hashes = nil
204
217
  Sidekiq.redis do |conn|
205
218
  set_members = conn.smembers(jobs_key)
206
- job_hashes = conn.pipelined do
219
+ job_hashes = conn.pipelined do |pipeline|
207
220
  set_members.each do |key|
208
- conn.hgetall(key)
221
+ pipeline.hgetall(key)
209
222
  end
210
223
  end
211
224
  end
212
225
  job_hashes.compact.reject(&:empty?).collect do |h|
213
- # no need to fetch missing args from redis since we just got this hash from there
226
+ # No need to fetch missing args from Redis since we just got this hash from there
214
227
  Sidekiq::Cron::Job.new(h.merge(fetch_missing_args: false))
215
228
  end
216
229
  end
@@ -224,7 +237,7 @@ module Sidekiq
224
237
  end
225
238
 
226
239
  def self.find name
227
- #if name is hash try to get name from it
240
+ # If name is hash try to get name from it.
228
241
  name = name[:name] || name['name'] if name.is_a?(Hash)
229
242
 
230
243
  output = nil
@@ -233,17 +246,17 @@ module Sidekiq
233
246
  output = Job.new conn.hgetall( redis_key(name) )
234
247
  end
235
248
  end
236
- output
249
+ output if output && output.valid?
237
250
  end
238
251
 
239
- # create new instance of cron job
252
+ # Create new instance of cron job.
240
253
  def self.create hash
241
254
  new(hash).save
242
255
  end
243
256
 
244
- #destroy job by name
257
+ # Destroy job by name.
245
258
  def self.destroy name
246
- #if name is hash try to get name from it
259
+ # If name is hash try to get name from it.
247
260
  name = name[:name] || name['name'] if name.is_a?(Hash)
248
261
 
249
262
  if job = find(name)
@@ -265,22 +278,25 @@ module Sidekiq
265
278
  @cron = args["cron"]
266
279
  @description = args["description"] if args["description"]
267
280
 
268
- #get class from klass or class
281
+ # Get class from klass or class.
269
282
  @klass = args["klass"] || args["class"]
270
283
 
271
- #set status of job
284
+ # Set status of job.
272
285
  @status = args['status'] || status_from_redis
273
286
 
274
- #set last enqueue time - from args or from existing job
287
+ # Set last enqueue time - from args or from existing job.
275
288
  if args['last_enqueue_time'] && !args['last_enqueue_time'].empty?
276
- @last_enqueue_time = Time.parse(args['last_enqueue_time'])
289
+ @last_enqueue_time = parse_enqueue_time(args['last_enqueue_time'])
277
290
  else
278
291
  @last_enqueue_time = last_enqueue_time_from_redis
279
292
  end
280
293
 
281
- #get right arguments for job
294
+ # Get right arguments for job.
295
+ @symbolize_args = args["symbolize_args"] == true || ("#{args["symbolize_args"]}" =~ (/^(true|t|yes|y|1)$/i)) == 0 || false
282
296
  @args = args["args"].nil? ? [] : parse_args( args["args"] )
283
297
 
298
+ @date_as_argument = args["date_as_argument"] == true || ("#{args["date_as_argument"]}" =~ (/^(true|t|yes|y|1)$/i)) == 0 || false
299
+
284
300
  @active_job = args["active_job"] == true || ("#{args["active_job"]}" =~ (/^(true|t|yes|y|1)$/i)) == 0 || false
285
301
  @active_job_queue_name_prefix = args["queue_name_prefix"]
286
302
  @active_job_queue_name_delimiter = args["queue_name_delimiter"]
@@ -295,8 +311,8 @@ module Sidekiq
295
311
  "args" => @args,
296
312
  }
297
313
 
298
- #get right data for message
299
- #only if message wasn't specified before
314
+ # Get right data for message,
315
+ # only if message wasn't specified before.
300
316
  klass_data = case @klass
301
317
  when Class
302
318
  @klass.get_sidekiq_options
@@ -304,21 +320,21 @@ module Sidekiq
304
320
  begin
305
321
  Sidekiq::Cron::Support.constantize(@klass).get_sidekiq_options
306
322
  rescue Exception => e
307
- #Unknown class
323
+ # Unknown class
308
324
  {"queue"=>"default"}
309
325
  end
310
326
  end
311
327
 
312
328
  message_data = klass_data.merge(message_data)
313
- #override queue if setted in config
314
- #only if message is hash - can be string (dumped JSON)
329
+
330
+ # Override queue if setted in config,
331
+ # only if message is hash - can be string (dumped JSON).
315
332
  if args['queue']
316
333
  @queue = message_data['queue'] = args['queue']
317
334
  else
318
335
  @queue = message_data['queue'] || "default"
319
336
  end
320
337
 
321
- #dump message as json
322
338
  @message = message_data
323
339
  end
324
340
 
@@ -347,6 +363,12 @@ module Sidekiq
347
363
  !enabled?
348
364
  end
349
365
 
366
+ def pretty_message
367
+ JSON.pretty_generate Sidekiq.load_json(message)
368
+ rescue JSON::ParserError
369
+ message
370
+ end
371
+
350
372
  def status_from_redis
351
373
  out = "enabled"
352
374
  if fetch_missing_args
@@ -362,27 +384,45 @@ module Sidekiq
362
384
  out = nil
363
385
  if fetch_missing_args
364
386
  Sidekiq.redis do |conn|
365
- out = Time.parse(conn.hget(redis_key, "last_enqueue_time")) rescue nil
387
+ out = parse_enqueue_time(conn.hget(redis_key, "last_enqueue_time")) rescue nil
366
388
  end
367
389
  end
368
390
  out
369
391
  end
370
392
 
371
- #export job data to hash
393
+ def jid_history_from_redis
394
+ out =
395
+ Sidekiq.redis do |conn|
396
+ conn.lrange(jid_history_key, 0, -1) rescue nil
397
+ end
398
+
399
+ out && out.map do |jid_history_raw|
400
+ Sidekiq.load_json jid_history_raw
401
+ end
402
+ end
403
+
404
+ # Export job data to hash.
372
405
  def to_hash
373
- {
406
+ hash = {
374
407
  name: @name,
375
- klass: @klass,
408
+ klass: @klass.to_s,
376
409
  cron: @cron,
377
410
  description: @description,
378
411
  args: @args.is_a?(String) ? @args : Sidekiq.dump_json(@args || []),
379
412
  message: @message.is_a?(String) ? @message : Sidekiq.dump_json(@message || {}),
380
413
  status: @status,
381
- active_job: @active_job,
414
+ active_job: @active_job ? "1" : "0",
382
415
  queue_name_prefix: @active_job_queue_name_prefix,
383
416
  queue_name_delimiter: @active_job_queue_name_delimiter,
384
- last_enqueue_time: @last_enqueue_time,
417
+ last_enqueue_time: serialized_last_enqueue_time,
418
+ symbolize_args: symbolize_args? ? "1" : "0",
385
419
  }
420
+
421
+ if date_as_argument?
422
+ hash.merge!(date_as_argument: "1")
423
+ end
424
+
425
+ hash
386
426
  end
387
427
 
388
428
  def errors
@@ -390,7 +430,7 @@ module Sidekiq
390
430
  end
391
431
 
392
432
  def valid?
393
- #clear previos errors
433
+ # Clear previous errors.
394
434
  @errors = []
395
435
 
396
436
  errors << "'name' must be set" if @name.nil? || @name.size == 0
@@ -398,21 +438,15 @@ module Sidekiq
398
438
  errors << "'cron' must be set"
399
439
  else
400
440
  begin
401
- cron = Rufus::Scheduler::CronLine.new(@cron)
402
- cron.next_time(Time.now.utc).utc
403
- rescue Exception => e
404
- #fix for different versions of cron-parser
405
- if e.message == "Bad Vixie-style specification bad"
406
- errors << "'cron' -> #{@cron}: not a valid cronline"
407
- else
408
- errors << "'cron' -> #{@cron}: #{e.message}"
409
- end
441
+ @parsed_cron = Fugit.do_parse_cronish(@cron)
442
+ rescue => e
443
+ errors << "'cron' -> #{@cron.inspect} -> #{e.class}: #{e.message}"
410
444
  end
411
445
  end
412
446
 
413
447
  errors << "'klass' (or class) must be set" unless klass_valid
414
448
 
415
- !errors.any?
449
+ errors.empty?
416
450
  end
417
451
 
418
452
  def klass_valid
@@ -425,67 +459,75 @@ module Sidekiq
425
459
  end
426
460
  end
427
461
 
428
- # add job to cron jobs
429
- # input:
430
- # name: (string) - name of job
431
- # cron: (string: '* * * * *' - cron specification when to run job
432
- # class: (string|class) - which class to perform
433
- # optional input:
434
- # queue: (string) - which queue to use for enquing (will override class queue)
435
- # args: (array|hash|nil) - arguments for permorm method
436
-
437
462
  def save
438
- #if job is invalid return false
463
+ # If job is invalid, return false.
439
464
  return false unless valid?
440
465
 
441
466
  Sidekiq.redis do |conn|
442
467
 
443
- #add to set of all jobs
444
- conn.sadd self.class.jobs_key, redis_key
468
+ # Add to set of all jobs
469
+ conn.sadd self.class.jobs_key, [redis_key]
445
470
 
446
- #add informations for this job!
471
+ # Add informations for this job!
447
472
  conn.hmset redis_key, *hash_to_redis(to_hash)
448
473
 
449
- #add information about last time! - don't enque right after scheduler poller starts!
474
+ # Add information about last time! - don't enque right after scheduler poller starts!
450
475
  time = Time.now.utc
451
- conn.zadd(job_enqueued_key, time.to_f.to_s, formated_last_time(time).to_s) unless conn.exists(job_enqueued_key)
476
+ exists = conn.public_send(REDIS_EXISTS_METHOD, job_enqueued_key)
477
+ conn.zadd(job_enqueued_key, time.to_f.to_s, formatted_last_time(time).to_s) unless exists == true || exists == 1
452
478
  end
453
- logger.info { "Cron Jobs - add job with name: #{@name}" }
479
+ Sidekiq.logger.info { "Cron Jobs - added job with name: #{@name}" }
454
480
  end
455
481
 
456
482
  def save_last_enqueue_time
457
483
  Sidekiq.redis do |conn|
458
- # update last enqueue time
459
- conn.hset redis_key, 'last_enqueue_time', @last_enqueue_time
484
+ # Update last enqueue time.
485
+ conn.hset redis_key, 'last_enqueue_time', serialized_last_enqueue_time
486
+ end
487
+ end
488
+
489
+ def add_jid_history(jid)
490
+ jid_history = {
491
+ jid: jid,
492
+ enqueued: @last_enqueue_time
493
+ }
494
+
495
+ @history_size ||= (Sidekiq::Options[:cron_history_size] || 10).to_i - 1
496
+ Sidekiq.redis do |conn|
497
+ conn.lpush jid_history_key,
498
+ Sidekiq.dump_json(jid_history)
499
+ # Keep only last 10 entries in a fifo manner.
500
+ conn.ltrim jid_history_key, 0, @history_size
460
501
  end
461
502
  end
462
503
 
463
- # remove job from cron jobs by name
464
- # input:
465
- # first arg: name (string) - name of job (must be same - case sensitive)
466
504
  def destroy
467
505
  Sidekiq.redis do |conn|
468
- #delete from set
469
- conn.srem self.class.jobs_key, redis_key
506
+ # Delete from set.
507
+ conn.srem self.class.jobs_key, [redis_key]
470
508
 
471
- #delete runned timestamps
509
+ # Delete runned timestamps.
472
510
  conn.del job_enqueued_key
473
511
 
474
- #delete main job
512
+ # Delete jid_history.
513
+ conn.del jid_history_key
514
+
515
+ # Delete main job.
475
516
  conn.del redis_key
476
517
  end
477
- logger.info { "Cron Jobs - deleted job with name: #{@name}" }
518
+
519
+ Sidekiq.logger.info { "Cron Jobs - deleted job with name: #{@name}" }
478
520
  end
479
521
 
480
- # remove all job from cron
522
+ # Remove all job from cron.
481
523
  def self.destroy_all!
482
524
  all.each do |job|
483
525
  job.destroy
484
526
  end
485
- logger.info { "Cron Jobs - deleted all jobs" }
527
+ Sidekiq.logger.info { "Cron Jobs - deleted all jobs" }
486
528
  end
487
529
 
488
- # remove "removed jobs" between current jobs and new jobs
530
+ # Remove "removed jobs" between current jobs and new jobs
489
531
  def self.destroy_removed_jobs new_job_names
490
532
  current_job_names = Sidekiq::Cron::Job.all.map(&:name)
491
533
  removed_job_names = current_job_names - new_job_names
@@ -496,23 +538,22 @@ module Sidekiq
496
538
  # Parse cron specification '* * * * *' and returns
497
539
  # time when last run should be performed
498
540
  def last_time now = Time.now.utc
499
- Rufus::Scheduler::CronLine.new(@cron).previous_time(now.utc).utc
541
+ parsed_cron.previous_time(now.utc).utc
500
542
  end
501
543
 
502
- def formated_enqueue_time now = Time.now.utc
544
+ def formatted_enqueue_time now = Time.now.utc
503
545
  last_time(now).getutc.to_f.to_s
504
546
  end
505
547
 
506
- def formated_last_time now = Time.now.utc
548
+ def formatted_last_time now = Time.now.utc
507
549
  last_time(now).getutc.iso8601
508
550
  end
509
551
 
510
552
  def self.exists? name
511
- out = false
512
- Sidekiq.redis do |conn|
513
- out = conn.exists redis_key name
553
+ out = Sidekiq.redis do |conn|
554
+ conn.public_send(REDIS_EXISTS_METHOD, redis_key(name))
514
555
  end
515
- out
556
+ out == true || out == 1
516
557
  end
517
558
 
518
559
  def exists?
@@ -525,70 +566,158 @@ module Sidekiq
525
566
 
526
567
  private
527
568
 
569
+ def parsed_cron
570
+ @parsed_cron ||= Fugit.parse_cronish(@cron)
571
+ end
572
+
528
573
  def not_enqueued_after?(time)
529
574
  @last_enqueue_time.nil? || @last_enqueue_time.to_i < last_time(time).to_i
530
575
  end
531
576
 
532
577
  # Try parsing inbound args into an array.
533
- # args from Redis will be encoded JSON;
534
- # try to load JSON, then failover
535
- # to string array.
578
+ # Args from Redis will be encoded JSON,
579
+ # try to load JSON, then failover to string array.
536
580
  def parse_args(args)
537
581
  case args
582
+ when GlobalID::Identification
583
+ [convert_to_global_id_hash(args)]
538
584
  when String
539
585
  begin
540
- Sidekiq.load_json(args)
586
+ parsed_args = Sidekiq.load_json(args)
587
+ symbolize_args? ? symbolize_args(parsed_args) : parsed_args
541
588
  rescue JSON::ParserError
542
- [*args] # cast to string array
589
+ [*args]
543
590
  end
544
591
  when Hash
545
- [args] # just put hash into array
592
+ args = serialize_argument(args)
593
+ symbolize_args? ? [symbolize_args(args)] : [args]
546
594
  when Array
547
- args # do nothing, already array
595
+ args = serialize_argument(args)
596
+ symbolize_args? ? symbolize_args(args) : args
597
+ else
598
+ [*args]
599
+ end
600
+ end
601
+
602
+ def symbolize_args?
603
+ @symbolize_args
604
+ end
605
+
606
+ def symbolize_args(input)
607
+ if input.is_a?(Array)
608
+ input.map do |arg|
609
+ if arg.respond_to?(:symbolize_keys)
610
+ arg.symbolize_keys
611
+ else
612
+ arg
613
+ end
614
+ end
615
+ elsif input.is_a?(Hash) && input.respond_to?(:symbolize_keys)
616
+ input.symbolize_keys
548
617
  else
549
- [*args] # cast to string array
618
+ input
550
619
  end
551
620
  end
552
621
 
622
+ def parse_enqueue_time(timestamp)
623
+ DateTime.strptime(timestamp, LAST_ENQUEUE_TIME_FORMAT).to_time.utc
624
+ rescue ArgumentError
625
+ DateTime.parse(timestamp).to_time.utc
626
+ end
627
+
553
628
  def not_past_scheduled_time?(current_time)
554
- last_cron_time = Rufus::Scheduler::CronLine.new(@cron).previous_time(current_time).utc
629
+ last_cron_time = parsed_cron.previous_time(current_time).utc
555
630
  return false if (current_time.to_i - last_cron_time.to_i) > 60
556
631
  true
557
632
  end
558
633
 
559
- # Redis key for set of all cron jobs
634
+ # Redis key for set of all cron jobs.
560
635
  def self.jobs_key
561
636
  "cron_jobs"
562
637
  end
563
638
 
564
- # Redis key for storing one cron job
639
+ # Redis key for storing one cron job.
565
640
  def self.redis_key name
566
641
  "cron_job:#{name}"
567
642
  end
568
643
 
569
- # Redis key for storing one cron job
644
+ # Redis key for storing one cron job.
570
645
  def redis_key
571
646
  self.class.redis_key @name
572
647
  end
573
648
 
574
- # Redis key for storing one cron job run times
575
- # (when poller added job to queue)
649
+ # Redis key for storing one cron job run times (when poller added job to queue)
576
650
  def self.job_enqueued_key name
577
651
  "cron_job:#{name}:enqueued"
578
652
  end
579
653
 
580
- # Redis key for storing one cron job run times
581
- # (when poller added job to queue)
654
+ def self.jid_history_key name
655
+ "cron_job:#{name}:jid_history"
656
+ end
657
+
582
658
  def job_enqueued_key
583
659
  self.class.job_enqueued_key @name
584
660
  end
585
661
 
586
- # Give Hash
587
- # returns array for using it for redis.hmset
662
+ def jid_history_key
663
+ self.class.jid_history_key @name
664
+ end
665
+
666
+ # Give Hash returns array for using it for redis.hmset
588
667
  def hash_to_redis hash
589
- hash.inject([]){ |arr,kv| arr + [kv[0], kv[1]] }
668
+ hash.flat_map{ |key, value| [key, value || ""] }
590
669
  end
591
670
 
671
+ def serialized_last_enqueue_time
672
+ @last_enqueue_time&.strftime(LAST_ENQUEUE_TIME_FORMAT)
673
+ end
674
+
675
+ def convert_to_global_id_hash(argument)
676
+ { GLOBALID_KEY => argument.to_global_id.to_s }
677
+ rescue URI::GID::MissingModelIdError
678
+ raise "Unable to serialize #{argument.class} " \
679
+ "without an id. (Maybe you forgot to call save?)"
680
+ end
681
+
682
+ def deserialize_argument(argument)
683
+ case argument
684
+ when String
685
+ argument
686
+ when Array
687
+ argument.map { |arg| deserialize_argument(arg) }
688
+ when Hash
689
+ if serialized_global_id?(argument)
690
+ deserialize_global_id argument
691
+ else
692
+ argument.transform_values { |v| deserialize_argument(v) }
693
+ end
694
+ else
695
+ argument
696
+ end
697
+ end
698
+
699
+ def serialized_global_id?(hash)
700
+ hash.size == 1 && hash.include?(GLOBALID_KEY)
701
+ end
702
+
703
+ def deserialize_global_id(hash)
704
+ GlobalID::Locator.locate hash[GLOBALID_KEY]
705
+ end
706
+
707
+ def serialize_argument(argument)
708
+ case argument
709
+ when GlobalID::Identification
710
+ convert_to_global_id_hash(argument)
711
+ when Array
712
+ argument.map { |arg| serialize_argument(arg) }
713
+ when Hash
714
+ argument.each_with_object({}) do |(key, value), hash|
715
+ hash[key] = serialize_argument(value)
716
+ end
717
+ else
718
+ argument
719
+ end
720
+ end
592
721
  end
593
722
  end
594
723
  end