sidekiq 6.0.0 → 6.0.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sidekiq might be problematic. Click here for more details.

Files changed (57) hide show
  1. checksums.yaml +4 -4
  2. data/.circleci/config.yml +0 -1
  3. data/6.0-Upgrade.md +3 -1
  4. data/Changes.md +122 -1
  5. data/Ent-Changes.md +7 -1
  6. data/Gemfile +1 -1
  7. data/Gemfile.lock +106 -94
  8. data/Pro-Changes.md +16 -2
  9. data/README.md +3 -1
  10. data/bin/sidekiqload +8 -4
  11. data/bin/sidekiqmon +4 -5
  12. data/lib/generators/sidekiq/worker_generator.rb +11 -1
  13. data/lib/sidekiq/api.rb +125 -92
  14. data/lib/sidekiq/cli.rb +32 -19
  15. data/lib/sidekiq/client.rb +20 -4
  16. data/lib/sidekiq/fetch.rb +7 -7
  17. data/lib/sidekiq/job_logger.rb +12 -4
  18. data/lib/sidekiq/job_retry.rb +23 -10
  19. data/lib/sidekiq/launcher.rb +23 -7
  20. data/lib/sidekiq/logger.rb +108 -12
  21. data/lib/sidekiq/middleware/chain.rb +11 -2
  22. data/lib/sidekiq/monitor.rb +3 -18
  23. data/lib/sidekiq/paginator.rb +7 -2
  24. data/lib/sidekiq/processor.rb +18 -20
  25. data/lib/sidekiq/redis_connection.rb +3 -0
  26. data/lib/sidekiq/scheduled.rb +13 -12
  27. data/lib/sidekiq/sd_notify.rb +149 -0
  28. data/lib/sidekiq/systemd.rb +38 -0
  29. data/lib/sidekiq/testing.rb +12 -0
  30. data/lib/sidekiq/util.rb +0 -2
  31. data/lib/sidekiq/version.rb +1 -1
  32. data/lib/sidekiq/web/application.rb +22 -21
  33. data/lib/sidekiq/web/helpers.rb +23 -11
  34. data/lib/sidekiq/web/router.rb +1 -3
  35. data/lib/sidekiq/web.rb +1 -1
  36. data/lib/sidekiq/worker.rb +6 -6
  37. data/lib/sidekiq.rb +17 -5
  38. data/sidekiq.gemspec +2 -2
  39. data/web/assets/javascripts/application.js +22 -19
  40. data/web/assets/javascripts/dashboard.js +2 -2
  41. data/web/assets/stylesheets/application-dark.css +122 -0
  42. data/web/assets/stylesheets/application.css +9 -0
  43. data/web/locales/de.yml +14 -2
  44. data/web/locales/en.yml +2 -0
  45. data/web/locales/ja.yml +2 -0
  46. data/web/locales/lt.yml +83 -0
  47. data/web/views/_job_info.erb +2 -1
  48. data/web/views/busy.erb +4 -1
  49. data/web/views/dead.erb +2 -2
  50. data/web/views/layout.erb +1 -0
  51. data/web/views/morgue.erb +4 -1
  52. data/web/views/queue.erb +10 -1
  53. data/web/views/queues.erb +8 -0
  54. data/web/views/retries.erb +4 -1
  55. data/web/views/retry.erb +2 -2
  56. data/web/views/scheduled.erb +4 -1
  57. metadata +12 -8
data/lib/sidekiq/api.rb CHANGED
@@ -2,23 +2,11 @@
2
2
 
3
3
  require "sidekiq"
4
4
 
5
- module Sidekiq
6
- module RedisScanner
7
- def sscan(conn, key)
8
- cursor = "0"
9
- result = []
10
- loop do
11
- cursor, values = conn.sscan(key, cursor)
12
- result.push(*values)
13
- break if cursor == "0"
14
- end
15
- result
16
- end
17
- end
5
+ require "zlib"
6
+ require "base64"
18
7
 
8
+ module Sidekiq
19
9
  class Stats
20
- include RedisScanner
21
-
22
10
  def initialize
23
11
  fetch_stats!
24
12
  end
@@ -77,11 +65,11 @@ module Sidekiq
77
65
  }
78
66
 
79
67
  processes = Sidekiq.redis { |conn|
80
- sscan(conn, "processes")
68
+ conn.sscan_each("processes").to_a
81
69
  }
82
70
 
83
71
  queues = Sidekiq.redis { |conn|
84
- sscan(conn, "queues")
72
+ conn.sscan_each("queues").to_a
85
73
  }
86
74
 
87
75
  pipe2_res = Sidekiq.redis { |conn|
@@ -92,8 +80,8 @@ module Sidekiq
92
80
  }
93
81
 
94
82
  s = processes.size
95
- workers_size = pipe2_res[0...s].map(&:to_i).inject(0, &:+)
96
- enqueued = pipe2_res[s..-1].map(&:to_i).inject(0, &:+)
83
+ workers_size = pipe2_res[0...s].sum(&:to_i)
84
+ enqueued = pipe2_res[s..-1].sum(&:to_i)
97
85
 
98
86
  default_queue_latency = if (entry = pipe1_res[6].first)
99
87
  job = begin
@@ -117,7 +105,7 @@ module Sidekiq
117
105
 
118
106
  default_queue_latency: default_queue_latency,
119
107
  workers_size: workers_size,
120
- enqueued: enqueued,
108
+ enqueued: enqueued
121
109
  }
122
110
  end
123
111
 
@@ -142,11 +130,9 @@ module Sidekiq
142
130
  end
143
131
 
144
132
  class Queues
145
- include RedisScanner
146
-
147
133
  def lengths
148
134
  Sidekiq.redis do |conn|
149
- queues = sscan(conn, "queues")
135
+ queues = conn.sscan_each("queues").to_a
150
136
 
151
137
  lengths = conn.pipelined {
152
138
  queues.each do |queue|
@@ -154,13 +140,8 @@ module Sidekiq
154
140
  end
155
141
  }
156
142
 
157
- i = 0
158
- array_of_arrays = queues.each_with_object({}) { |queue, memo|
159
- memo[queue] = lengths[i]
160
- i += 1
161
- }.sort_by { |_, size| size }
162
-
163
- Hash[array_of_arrays.reverse]
143
+ array_of_arrays = queues.zip(lengths).sort_by { |_, size| -size }
144
+ Hash[array_of_arrays]
164
145
  end
165
146
  end
166
147
  end
@@ -182,18 +163,12 @@ module Sidekiq
182
163
  private
183
164
 
184
165
  def date_stat_hash(stat)
185
- i = 0
186
166
  stat_hash = {}
187
- keys = []
188
- dates = []
189
-
190
- while i < @days_previous
191
- date = @start_date - i
192
- datestr = date.strftime("%Y-%m-%d")
193
- keys << "stat:#{stat}:#{datestr}"
194
- dates << datestr
195
- i += 1
196
- end
167
+ dates = @start_date.downto(@start_date - @days_previous + 1).map { |date|
168
+ date.strftime("%Y-%m-%d")
169
+ }
170
+
171
+ keys = dates.map { |datestr| "stat:#{stat}:#{datestr}" }
197
172
 
198
173
  begin
199
174
  Sidekiq.redis do |conn|
@@ -225,13 +200,12 @@ module Sidekiq
225
200
  #
226
201
  class Queue
227
202
  include Enumerable
228
- extend RedisScanner
229
203
 
230
204
  ##
231
205
  # Return all known queues within Redis.
232
206
  #
233
207
  def self.all
234
- Sidekiq.redis { |c| sscan(c, "queues") }.sort.map { |q| Sidekiq::Queue.new(q) }
208
+ Sidekiq.redis { |c| c.sscan_each("queues").to_a }.sort.map { |q| Sidekiq::Queue.new(q) }
235
209
  end
236
210
 
237
211
  attr_reader :name
@@ -299,7 +273,7 @@ module Sidekiq
299
273
  def clear
300
274
  Sidekiq.redis do |conn|
301
275
  conn.multi do
302
- conn.del(@rname)
276
+ conn.unlink(@rname)
303
277
  conn.srem("queues", name)
304
278
  end
305
279
  end
@@ -349,7 +323,7 @@ module Sidekiq
349
323
  end
350
324
  when "ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper"
351
325
  job_class = @item["wrapped"] || args[0]
352
- if job_class == "ActionMailer::DeliveryJob"
326
+ if job_class == "ActionMailer::DeliveryJob" || job_class == "ActionMailer::MailDeliveryJob"
353
327
  # MailerClass#mailer_method
354
328
  args[0]["arguments"][0..1].join("#")
355
329
  else
@@ -372,6 +346,9 @@ module Sidekiq
372
346
  if (self["wrapped"] || args[0]) == "ActionMailer::DeliveryJob"
373
347
  # remove MailerClass, mailer_method and 'deliver_now'
374
348
  job_args.drop(3)
349
+ elsif (self["wrapped"] || args[0]) == "ActionMailer::MailDeliveryJob"
350
+ # remove MailerClass, mailer_method and 'deliver_now'
351
+ job_args.drop(3).first["args"]
375
352
  else
376
353
  job_args
377
354
  end
@@ -400,6 +377,20 @@ module Sidekiq
400
377
  Time.at(self["created_at"] || self["enqueued_at"] || 0).utc
401
378
  end
402
379
 
380
+ def tags
381
+ self["tags"] || []
382
+ end
383
+
384
+ def error_backtrace
385
+ # Cache nil values
386
+ if defined?(@error_backtrace)
387
+ @error_backtrace
388
+ else
389
+ value = self["error_backtrace"]
390
+ @error_backtrace = value && uncompress_backtrace(value)
391
+ end
392
+ end
393
+
403
394
  attr_reader :queue
404
395
 
405
396
  def latency
@@ -433,6 +424,23 @@ module Sidekiq
433
424
  Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == "development"
434
425
  default
435
426
  end
427
+
428
+ def uncompress_backtrace(backtrace)
429
+ if backtrace.is_a?(Array)
430
+ # Handle old jobs with raw Array backtrace format
431
+ backtrace
432
+ else
433
+ decoded = Base64.decode64(backtrace)
434
+ uncompressed = Zlib::Inflate.inflate(decoded)
435
+ begin
436
+ Sidekiq.load_json(uncompressed)
437
+ rescue
438
+ # Handle old jobs with marshalled backtrace format
439
+ # TODO Remove in 7.x
440
+ Marshal.load(uncompressed)
441
+ end
442
+ end
443
+ end
436
444
  end
437
445
 
438
446
  class SortedEntry < Job
@@ -458,8 +466,9 @@ module Sidekiq
458
466
  end
459
467
 
460
468
  def reschedule(at)
461
- delete
462
- @parent.schedule(at, item)
469
+ Sidekiq.redis do |conn|
470
+ conn.zincrby(@parent.name, at.to_f - @score, Sidekiq.dump_json(@item))
471
+ end
463
472
  end
464
473
 
465
474
  def add_to_queue
@@ -503,7 +512,7 @@ module Sidekiq
503
512
  else
504
513
  # multiple jobs with the same score
505
514
  # find the one with the right JID and push it
506
- hash = results.group_by { |message|
515
+ matched, nonmatched = results.partition { |message|
507
516
  if message.index(jid)
508
517
  msg = Sidekiq.load_json(message)
509
518
  msg["jid"] == jid
@@ -512,12 +521,12 @@ module Sidekiq
512
521
  end
513
522
  }
514
523
 
515
- msg = hash.fetch(true, []).first
524
+ msg = matched.first
516
525
  yield msg if msg
517
526
 
518
527
  # push the rest back onto the sorted set
519
528
  conn.multi do
520
- hash.fetch(false, []).each do |message|
529
+ nonmatched.each do |message|
521
530
  conn.zadd(parent.name, score.to_f.to_s, message)
522
531
  end
523
532
  end
@@ -540,9 +549,20 @@ module Sidekiq
540
549
  Sidekiq.redis { |c| c.zcard(name) }
541
550
  end
542
551
 
552
+ def scan(match, count = 100)
553
+ return to_enum(:scan, match, count) unless block_given?
554
+
555
+ match = "*#{match}*" unless match.include?("*")
556
+ Sidekiq.redis do |conn|
557
+ conn.zscan_each(name, match: match, count: count) do |entry, score|
558
+ yield SortedEntry.new(self, score, entry)
559
+ end
560
+ end
561
+ end
562
+
543
563
  def clear
544
564
  Sidekiq.redis do |conn|
545
- conn.del(name)
565
+ conn.unlink(name)
546
566
  end
547
567
  end
548
568
  alias_method :💣, :clear
@@ -576,28 +596,40 @@ module Sidekiq
576
596
  end
577
597
  end
578
598
 
599
+ ##
600
+ # Fetch jobs that match a given time or Range. Job ID is an
601
+ # optional second argument.
579
602
  def fetch(score, jid = nil)
603
+ begin_score, end_score =
604
+ if score.is_a?(Range)
605
+ [score.first, score.last]
606
+ else
607
+ [score, score]
608
+ end
609
+
580
610
  elements = Sidekiq.redis { |conn|
581
- conn.zrangebyscore(name, score, score)
611
+ conn.zrangebyscore(name, begin_score, end_score, with_scores: true)
582
612
  }
583
613
 
584
614
  elements.each_with_object([]) do |element, result|
585
- entry = SortedEntry.new(self, score, element)
586
- if jid
587
- result << entry if entry.jid == jid
588
- else
589
- result << entry
590
- end
615
+ data, job_score = element
616
+ entry = SortedEntry.new(self, job_score, data)
617
+ result << entry if jid.nil? || entry.jid == jid
591
618
  end
592
619
  end
593
620
 
594
621
  ##
595
622
  # Find the job with the given JID within this sorted set.
596
- #
597
- # This is a slow, inefficient operation. Do not use under
598
- # normal conditions. Sidekiq Pro contains a faster version.
623
+ # This is a slower O(n) operation. Do not use for app logic.
599
624
  def find_job(jid)
600
- detect { |j| j.jid == jid }
625
+ Sidekiq.redis do |conn|
626
+ conn.zscan_each(name, match: "*#{jid}*", count: 100) do |entry, score|
627
+ job = JSON.parse(entry)
628
+ matched = job["jid"] == jid
629
+ return SortedEntry.new(self, score, entry) if matched
630
+ end
631
+ end
632
+ nil
601
633
  end
602
634
 
603
635
  def delete_by_value(name, value)
@@ -612,11 +644,13 @@ module Sidekiq
612
644
  Sidekiq.redis do |conn|
613
645
  elements = conn.zrangebyscore(name, score, score)
614
646
  elements.each do |element|
615
- message = Sidekiq.load_json(element)
616
- if message["jid"] == jid
617
- ret = conn.zrem(name, element)
618
- @_size -= 1 if ret
619
- break ret
647
+ if element.index(jid)
648
+ message = Sidekiq.load_json(element)
649
+ if message["jid"] == jid
650
+ ret = conn.zrem(name, element)
651
+ @_size -= 1 if ret
652
+ break ret
653
+ end
620
654
  end
621
655
  end
622
656
  end
@@ -720,7 +754,6 @@ module Sidekiq
720
754
  #
721
755
  class ProcessSet
722
756
  include Enumerable
723
- include RedisScanner
724
757
 
725
758
  def initialize(clean_plz = true)
726
759
  cleanup if clean_plz
@@ -731,7 +764,7 @@ module Sidekiq
731
764
  def cleanup
732
765
  count = 0
733
766
  Sidekiq.redis do |conn|
734
- procs = sscan(conn, "processes").sort
767
+ procs = conn.sscan_each("processes").to_a.sort
735
768
  heartbeats = conn.pipelined {
736
769
  procs.each do |key|
737
770
  conn.hget(key, "info")
@@ -741,40 +774,37 @@ module Sidekiq
741
774
  # the hash named key has an expiry of 60 seconds.
742
775
  # if it's not found, that means the process has not reported
743
776
  # in to Redis and probably died.
744
- to_prune = []
745
- heartbeats.each_with_index do |beat, i|
746
- to_prune << procs[i] if beat.nil?
747
- end
777
+ to_prune = procs.select.with_index { |proc, i|
778
+ heartbeats[i].nil?
779
+ }
748
780
  count = conn.srem("processes", to_prune) unless to_prune.empty?
749
781
  end
750
782
  count
751
783
  end
752
784
 
753
785
  def each
754
- procs = Sidekiq.redis { |conn| sscan(conn, "processes") }.sort
786
+ result = Sidekiq.redis { |conn|
787
+ procs = conn.sscan_each("processes").to_a.sort
755
788
 
756
- Sidekiq.redis do |conn|
757
789
  # We're making a tradeoff here between consuming more memory instead of
758
790
  # making more roundtrips to Redis, but if you have hundreds or thousands of workers,
759
791
  # you'll be happier this way
760
- result = conn.pipelined {
792
+ conn.pipelined do
761
793
  procs.each do |key|
762
794
  conn.hmget(key, "info", "busy", "beat", "quiet")
763
795
  end
764
- }
796
+ end
797
+ }
765
798
 
766
- result.each do |info, busy, at_s, quiet|
767
- # If a process is stopped between when we query Redis for `procs` and
768
- # when we query for `result`, we will have an item in `result` that is
769
- # composed of `nil` values.
770
- next if info.nil?
799
+ result.each do |info, busy, at_s, quiet|
800
+ # If a process is stopped between when we query Redis for `procs` and
801
+ # when we query for `result`, we will have an item in `result` that is
802
+ # composed of `nil` values.
803
+ next if info.nil?
771
804
 
772
- hash = Sidekiq.load_json(info)
773
- yield Process.new(hash.merge("busy" => busy.to_i, "beat" => at_s.to_f, "quiet" => quiet))
774
- end
805
+ hash = Sidekiq.load_json(info)
806
+ yield Process.new(hash.merge("busy" => busy.to_i, "beat" => at_s.to_f, "quiet" => quiet))
775
807
  end
776
-
777
- nil
778
808
  end
779
809
 
780
810
  # This method is not guaranteed accurate since it does not prune the set
@@ -885,11 +915,10 @@ module Sidekiq
885
915
  #
886
916
  class Workers
887
917
  include Enumerable
888
- include RedisScanner
889
918
 
890
919
  def each
891
920
  Sidekiq.redis do |conn|
892
- procs = sscan(conn, "processes")
921
+ procs = conn.sscan_each("processes").to_a
893
922
  procs.sort.each do |key|
894
923
  valid, workers = conn.pipelined {
895
924
  conn.exists(key)
@@ -897,7 +926,11 @@ module Sidekiq
897
926
  }
898
927
  next unless valid
899
928
  workers.each_pair do |tid, json|
900
- yield key, tid, Sidekiq.load_json(json)
929
+ hsh = Sidekiq.load_json(json)
930
+ p = hsh["payload"]
931
+ # avoid breaking API, this is a side effect of the JSON optimization in #4316
932
+ hsh["payload"] = Sidekiq.load_json(p) if p.is_a?(String)
933
+ yield key, tid, hsh
901
934
  end
902
935
  end
903
936
  end
@@ -911,7 +944,7 @@ module Sidekiq
911
944
  # which can easily get out of sync with crashy processes.
912
945
  def size
913
946
  Sidekiq.redis do |conn|
914
- procs = sscan(conn, "processes")
947
+ procs = conn.sscan_each("processes").to_a
915
948
  if procs.empty?
916
949
  0
917
950
  else
@@ -919,7 +952,7 @@ module Sidekiq
919
952
  procs.each do |key|
920
953
  conn.hget(key, "busy")
921
954
  end
922
- }.map(&:to_i).inject(:+)
955
+ }.sum(&:to_i)
923
956
  end
924
957
  end
925
958
  end
data/lib/sidekiq/cli.rb CHANGED
@@ -38,12 +38,15 @@ module Sidekiq
38
38
  if environment == "development" && $stdout.tty? && Sidekiq.log_formatter.is_a?(Sidekiq::Logger::Formatters::Pretty)
39
39
  print_banner
40
40
  end
41
+ logger.info "Booted Rails #{::Rails.version} application in #{environment} environment" if rails_app?
41
42
 
42
43
  self_read, self_write = IO.pipe
43
44
  sigs = %w[INT TERM TTIN TSTP]
45
+ # USR1 and USR2 don't work on the JVM
46
+ sigs << "USR2" unless jruby?
44
47
  sigs.each do |sig|
45
48
  trap sig do
46
- self_write.write("#{sig}\n")
49
+ self_write.puts(sig)
47
50
  end
48
51
  rescue ArgumentError
49
52
  puts "Signal #{sig} not supported"
@@ -56,7 +59,7 @@ module Sidekiq
56
59
  # touch the connection pool so it is created before we
57
60
  # fire startup and start multithreading.
58
61
  ver = Sidekiq.redis_info["redis_version"]
59
- raise "You are using Redis v#{ver}, Sidekiq requires Redis v4.0.0 or greater" if ver < "4"
62
+ raise "You are connecting to Redis v#{ver}, Sidekiq requires Redis v4.0.0 or greater" if ver < "4"
60
63
 
61
64
  # Since the user can pass us a connection pool explicitly in the initializer, we
62
65
  # need to verify the size is large enough or else Sidekiq's performance is dramatically slowed.
@@ -160,17 +163,14 @@ module Sidekiq
160
163
  Sidekiq.logger.warn "<no backtrace available>"
161
164
  end
162
165
  end
163
- },
166
+ }
164
167
  }
168
+ UNHANDLED_SIGNAL_HANDLER = ->(cli) { Sidekiq.logger.info "No signal handler registered, ignoring" }
169
+ SIGNAL_HANDLERS.default = UNHANDLED_SIGNAL_HANDLER
165
170
 
166
171
  def handle_signal(sig)
167
172
  Sidekiq.logger.debug "Got #{sig} signal"
168
- handy = SIGNAL_HANDLERS[sig]
169
- if handy
170
- handy.call(self)
171
- else
172
- Sidekiq.logger.info { "No signal handler for #{sig}" }
173
- end
173
+ SIGNAL_HANDLERS[sig].call(self)
174
174
  end
175
175
 
176
176
  private
@@ -182,7 +182,11 @@ module Sidekiq
182
182
  end
183
183
 
184
184
  def set_environment(cli_env)
185
- @environment = cli_env || ENV["RAILS_ENV"] || ENV["RACK_ENV"] || "development"
185
+ # See #984 for discussion.
186
+ # APP_ENV is now the preferred ENV term since it is not tech-specific.
187
+ # Both Sinatra 2.0+ and Sidekiq support this term.
188
+ # RAILS_ENV and RACK_ENV are there for legacy support.
189
+ @environment = cli_env || ENV["APP_ENV"] || ENV["RAILS_ENV"] || ENV["RACK_ENV"] || "development"
186
190
  end
187
191
 
188
192
  def symbolize_keys_deep!(hash)
@@ -204,7 +208,7 @@ module Sidekiq
204
208
 
205
209
  # check config file presence
206
210
  if opts[:config_file]
207
- if opts[:config_file] && !File.exist?(opts[:config_file])
211
+ unless File.exist?(opts[:config_file])
208
212
  raise ArgumentError, "No such file #{opts[:config_file]}"
209
213
  end
210
214
  else
@@ -224,7 +228,7 @@ module Sidekiq
224
228
  opts = parse_config(opts[:config_file]).merge(opts) if opts[:config_file]
225
229
 
226
230
  # set defaults
227
- opts[:queues] = Array(opts[:queues]) << "default" if opts[:queues].nil? || opts[:queues].empty?
231
+ opts[:queues] = ["default"] if opts[:queues].nil? || opts[:queues].empty?
228
232
  opts[:strict] = true if opts[:strict].nil?
229
233
  opts[:concurrency] = Integer(ENV["RAILS_MAX_THREADS"]) if opts[:concurrency].nil? && ENV["RAILS_MAX_THREADS"]
230
234
 
@@ -283,8 +287,13 @@ module Sidekiq
283
287
 
284
288
  def parse_options(argv)
285
289
  opts = {}
290
+ @parser = option_parser(opts)
291
+ @parser.parse!(argv)
292
+ opts
293
+ end
286
294
 
287
- @parser = OptionParser.new { |o|
295
+ def option_parser(opts)
296
+ parser = OptionParser.new { |o|
288
297
  o.on "-c", "--concurrency INT", "processor threads to use" do |arg|
289
298
  opts[:concurrency] = Integer(arg)
290
299
  end
@@ -336,15 +345,13 @@ module Sidekiq
336
345
  end
337
346
  }
338
347
 
339
- @parser.banner = "sidekiq [options]"
340
- @parser.on_tail "-h", "--help", "Show help" do
341
- logger.info @parser
348
+ parser.banner = "sidekiq [options]"
349
+ parser.on_tail "-h", "--help", "Show help" do
350
+ logger.info parser
342
351
  die 1
343
352
  end
344
353
 
345
- @parser.parse!(argv)
346
-
347
- opts
354
+ parser
348
355
  end
349
356
 
350
357
  def initialize_logger
@@ -376,5 +383,11 @@ module Sidekiq
376
383
  [weight.to_i, 1].max.times { opts[:queues] << queue }
377
384
  opts[:strict] = false if weight.to_i > 0
378
385
  end
386
+
387
+ def rails_app?
388
+ defined?(::Rails) && ::Rails.respond_to?(:application)
389
+ end
379
390
  end
380
391
  end
392
+
393
+ require "sidekiq/systemd"
@@ -94,9 +94,14 @@ module Sidekiq
94
94
  return [] unless arg # no jobs to push
95
95
  raise ArgumentError, "Bulk arguments must be an Array of Arrays: [[1], [2]]" unless arg.is_a?(Array)
96
96
 
97
+ at = items.delete("at")
98
+ raise ArgumentError, "Job 'at' must be a Numeric or an Array of Numeric timestamps" if at && (Array(at).empty? || !Array(at).all?(Numeric))
99
+
97
100
  normed = normalize_item(items)
98
- payloads = items["args"].map { |args|
101
+ payloads = items["args"].map.with_index { |args, index|
99
102
  copy = normed.merge("args" => args, "jid" => SecureRandom.hex(12), "enqueued_at" => Time.now.to_f)
103
+ copy["at"] = (at.is_a?(Array) ? at[index] : at) if at
104
+
100
105
  result = process_single(items["class"], copy)
101
106
  result || nil
102
107
  }.compact
@@ -188,7 +193,7 @@ module Sidekiq
188
193
  end
189
194
 
190
195
  def atomic_push(conn, payloads)
191
- if payloads.first["at"]
196
+ if payloads.first.key?("at")
192
197
  conn.zadd("schedule", payloads.map { |hash|
193
198
  at = hash.delete("at").to_s
194
199
  [at, Sidekiq.dump_json(hash)]
@@ -214,19 +219,30 @@ module Sidekiq
214
219
  end
215
220
 
216
221
  def normalize_item(item)
222
+ # 6.0.0 push_bulk bug, #4321
223
+ # TODO Remove after a while...
224
+ item.delete("at") if item.key?("at") && item["at"].nil?
225
+
217
226
  raise(ArgumentError, "Job must be a Hash with 'class' and 'args' keys: { 'class' => SomeWorker, 'args' => ['bob', 1, :foo => 'bar'] }") unless item.is_a?(Hash) && item.key?("class") && item.key?("args")
218
227
  raise(ArgumentError, "Job args must be an Array") unless item["args"].is_a?(Array)
219
228
  raise(ArgumentError, "Job class must be either a Class or String representation of the class name") unless item["class"].is_a?(Class) || item["class"].is_a?(String)
220
229
  raise(ArgumentError, "Job 'at' must be a Numeric timestamp") if item.key?("at") && !item["at"].is_a?(Numeric)
230
+ raise(ArgumentError, "Job tags must be an Array") if item["tags"] && !item["tags"].is_a?(Array)
221
231
  # raise(ArgumentError, "Arguments must be native JSON types, see https://github.com/mperham/sidekiq/wiki/Best-Practices") unless JSON.load(JSON.dump(item['args'])) == item['args']
222
232
 
223
- normalized_hash(item["class"])
224
- .each { |key, value| item[key] = value if item[key].nil? }
233
+ # merge in the default sidekiq_options for the item's class and/or wrapped element
234
+ # this allows ActiveJobs to control sidekiq_options too.
235
+ defaults = normalized_hash(item["class"])
236
+ defaults = defaults.merge(item["wrapped"].get_sidekiq_options) if item["wrapped"].respond_to?("get_sidekiq_options")
237
+ item = defaults.merge(item)
238
+
239
+ raise(ArgumentError, "Job must include a valid queue name") if item["queue"].nil? || item["queue"] == ""
225
240
 
226
241
  item["class"] = item["class"].to_s
227
242
  item["queue"] = item["queue"].to_s
228
243
  item["jid"] ||= SecureRandom.hex(12)
229
244
  item["created_at"] ||= Time.now.to_f
245
+
230
246
  item
231
247
  end
232
248
 
data/lib/sidekiq/fetch.rb CHANGED
@@ -14,12 +14,12 @@ module Sidekiq
14
14
  end
15
15
 
16
16
  def queue_name
17
- queue.sub(/.*queue:/, "")
17
+ queue.delete_prefix("queue:")
18
18
  end
19
19
 
20
20
  def requeue
21
21
  Sidekiq.redis do |conn|
22
- conn.rpush("queue:#{queue_name}", job)
22
+ conn.rpush(queue, job)
23
23
  end
24
24
  end
25
25
  }
@@ -28,7 +28,7 @@ module Sidekiq
28
28
  @strictly_ordered_queues = !!options[:strict]
29
29
  @queues = options[:queues].map { |q| "queue:#{q}" }
30
30
  if @strictly_ordered_queues
31
- @queues = @queues.uniq
31
+ @queues.uniq!
32
32
  @queues << TIMEOUT
33
33
  end
34
34
  end
@@ -47,7 +47,7 @@ module Sidekiq
47
47
  if @strictly_ordered_queues
48
48
  @queues
49
49
  else
50
- queues = @queues.shuffle.uniq
50
+ queues = @queues.shuffle!.uniq
51
51
  queues << TIMEOUT
52
52
  queues
53
53
  end
@@ -61,14 +61,14 @@ module Sidekiq
61
61
  Sidekiq.logger.debug { "Re-queueing terminated jobs" }
62
62
  jobs_to_requeue = {}
63
63
  inprogress.each do |unit_of_work|
64
- jobs_to_requeue[unit_of_work.queue_name] ||= []
65
- jobs_to_requeue[unit_of_work.queue_name] << unit_of_work.job
64
+ jobs_to_requeue[unit_of_work.queue] ||= []
65
+ jobs_to_requeue[unit_of_work.queue] << unit_of_work.job
66
66
  end
67
67
 
68
68
  Sidekiq.redis do |conn|
69
69
  conn.pipelined do
70
70
  jobs_to_requeue.each do |queue, jobs|
71
- conn.rpush("queue:#{queue}", jobs)
71
+ conn.rpush(queue, jobs)
72
72
  end
73
73
  end
74
74
  end