sidekiq 5.1.1 → 5.2.9

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of sidekiq might be problematic. Click here for more details.

Files changed (56) hide show
  1. checksums.yaml +5 -5
  2. data/.circleci/config.yml +61 -0
  3. data/.gitignore +2 -0
  4. data/.travis.yml +5 -8
  5. data/COMM-LICENSE +11 -9
  6. data/Changes.md +89 -0
  7. data/Ent-Changes.md +28 -0
  8. data/Gemfile +20 -5
  9. data/LICENSE +1 -1
  10. data/Pro-Changes.md +43 -0
  11. data/README.md +1 -1
  12. data/Rakefile +2 -1
  13. data/bin/sidekiqctl +13 -92
  14. data/bin/sidekiqload +1 -1
  15. data/lib/sidekiq/api.rb +59 -27
  16. data/lib/sidekiq/cli.rb +68 -61
  17. data/lib/sidekiq/client.rb +31 -30
  18. data/lib/sidekiq/ctl.rb +221 -0
  19. data/lib/sidekiq/delay.rb +1 -0
  20. data/lib/sidekiq/fetch.rb +1 -1
  21. data/lib/sidekiq/job_logger.rb +4 -3
  22. data/lib/sidekiq/job_retry.rb +40 -14
  23. data/lib/sidekiq/launcher.rb +18 -12
  24. data/lib/sidekiq/logging.rb +3 -3
  25. data/lib/sidekiq/manager.rb +3 -4
  26. data/lib/sidekiq/middleware/server/active_record.rb +2 -1
  27. data/lib/sidekiq/processor.rb +79 -28
  28. data/lib/sidekiq/rails.rb +4 -8
  29. data/lib/sidekiq/redis_connection.rb +29 -2
  30. data/lib/sidekiq/scheduled.rb +33 -4
  31. data/lib/sidekiq/testing.rb +4 -4
  32. data/lib/sidekiq/util.rb +1 -1
  33. data/lib/sidekiq/version.rb +1 -1
  34. data/lib/sidekiq/web/action.rb +1 -1
  35. data/lib/sidekiq/web/application.rb +24 -2
  36. data/lib/sidekiq/web/helpers.rb +14 -7
  37. data/lib/sidekiq/web/router.rb +10 -10
  38. data/lib/sidekiq/web.rb +4 -4
  39. data/lib/sidekiq/worker.rb +31 -15
  40. data/lib/sidekiq.rb +8 -7
  41. data/sidekiq.gemspec +5 -12
  42. data/web/assets/javascripts/application.js +0 -0
  43. data/web/assets/javascripts/dashboard.js +15 -5
  44. data/web/assets/stylesheets/application.css +35 -2
  45. data/web/assets/stylesheets/bootstrap.css +2 -2
  46. data/web/locales/ar.yml +1 -0
  47. data/web/locales/en.yml +1 -0
  48. data/web/locales/es.yml +3 -3
  49. data/web/views/_footer.erb +3 -0
  50. data/web/views/_nav.erb +3 -17
  51. data/web/views/layout.erb +1 -1
  52. data/web/views/queue.erb +1 -0
  53. data/web/views/queues.erb +1 -1
  54. data/web/views/retries.erb +4 -0
  55. metadata +12 -80
  56. data/lib/sidekiq/middleware/server/active_record_cache.rb +0 -11
data/lib/sidekiq/api.rb CHANGED
@@ -1,9 +1,24 @@
1
- # encoding: utf-8
2
1
  # frozen_string_literal: true
3
2
  require 'sidekiq'
4
3
 
5
4
  module Sidekiq
5
+
6
+ module RedisScanner
7
+ def sscan(conn, key)
8
+ cursor = '0'
9
+ result = []
10
+ loop do
11
+ cursor, values = conn.sscan(key, cursor)
12
+ result.push(*values)
13
+ break if cursor == '0'
14
+ end
15
+ result
16
+ end
17
+ end
18
+
6
19
  class Stats
20
+ include RedisScanner
21
+
7
22
  def initialize
8
23
  fetch_stats!
9
24
  end
@@ -51,33 +66,39 @@ module Sidekiq
51
66
  def fetch_stats!
52
67
  pipe1_res = Sidekiq.redis do |conn|
53
68
  conn.pipelined do
54
- conn.get('stat:processed'.freeze)
55
- conn.get('stat:failed'.freeze)
56
- conn.zcard('schedule'.freeze)
57
- conn.zcard('retry'.freeze)
58
- conn.zcard('dead'.freeze)
59
- conn.scard('processes'.freeze)
60
- conn.lrange('queue:default'.freeze, -1, -1)
61
- conn.smembers('processes'.freeze)
62
- conn.smembers('queues'.freeze)
69
+ conn.get('stat:processed')
70
+ conn.get('stat:failed')
71
+ conn.zcard('schedule')
72
+ conn.zcard('retry')
73
+ conn.zcard('dead')
74
+ conn.scard('processes')
75
+ conn.lrange('queue:default', -1, -1)
63
76
  end
64
77
  end
65
78
 
79
+ processes = Sidekiq.redis do |conn|
80
+ sscan(conn, 'processes')
81
+ end
82
+
83
+ queues = Sidekiq.redis do |conn|
84
+ sscan(conn, 'queues')
85
+ end
86
+
66
87
  pipe2_res = Sidekiq.redis do |conn|
67
88
  conn.pipelined do
68
- pipe1_res[7].each {|key| conn.hget(key, 'busy'.freeze) }
69
- pipe1_res[8].each {|queue| conn.llen("queue:#{queue}") }
89
+ processes.each {|key| conn.hget(key, 'busy') }
90
+ queues.each {|queue| conn.llen("queue:#{queue}") }
70
91
  end
71
92
  end
72
93
 
73
- s = pipe1_res[7].size
94
+ s = processes.size
74
95
  workers_size = pipe2_res[0...s].map(&:to_i).inject(0, &:+)
75
96
  enqueued = pipe2_res[s..-1].map(&:to_i).inject(0, &:+)
76
97
 
77
98
  default_queue_latency = if (entry = pipe1_res[6].first)
78
99
  job = Sidekiq.load_json(entry) rescue {}
79
100
  now = Time.now.to_f
80
- thence = job['enqueued_at'.freeze] || now
101
+ thence = job['enqueued_at'] || now
81
102
  now - thence
82
103
  else
83
104
  0
@@ -117,9 +138,11 @@ module Sidekiq
117
138
  end
118
139
 
119
140
  class Queues
141
+ include RedisScanner
142
+
120
143
  def lengths
121
144
  Sidekiq.redis do |conn|
122
- queues = conn.smembers('queues'.freeze)
145
+ queues = sscan(conn, 'queues')
123
146
 
124
147
  lengths = conn.pipelined do
125
148
  queues.each do |queue|
@@ -163,7 +186,7 @@ module Sidekiq
163
186
 
164
187
  while i < @days_previous
165
188
  date = @start_date - i
166
- datestr = date.strftime("%Y-%m-%d".freeze)
189
+ datestr = date.strftime("%Y-%m-%d")
167
190
  keys << "stat:#{stat}:#{datestr}"
168
191
  dates << datestr
169
192
  i += 1
@@ -199,18 +222,19 @@ module Sidekiq
199
222
  #
200
223
  class Queue
201
224
  include Enumerable
225
+ extend RedisScanner
202
226
 
203
227
  ##
204
228
  # Return all known queues within Redis.
205
229
  #
206
230
  def self.all
207
- Sidekiq.redis { |c| c.smembers('queues'.freeze) }.sort.map { |q| Sidekiq::Queue.new(q) }
231
+ Sidekiq.redis { |c| sscan(c, 'queues') }.sort.map { |q| Sidekiq::Queue.new(q) }
208
232
  end
209
233
 
210
234
  attr_reader :name
211
235
 
212
236
  def initialize(name="default")
213
- @name = name
237
+ @name = name.to_s
214
238
  @rname = "queue:#{name}"
215
239
  end
216
240
 
@@ -273,7 +297,7 @@ module Sidekiq
273
297
  Sidekiq.redis do |conn|
274
298
  conn.multi do
275
299
  conn.del(@rname)
276
- conn.srem("queues".freeze, name)
300
+ conn.srem("queues", name)
277
301
  end
278
302
  end
279
303
  end
@@ -349,9 +373,9 @@ module Sidekiq
349
373
  job_args
350
374
  end
351
375
  else
352
- if self['encrypt'.freeze]
376
+ if self['encrypt']
353
377
  # no point in showing 150+ bytes of random garbage
354
- args[-1] = '[encrypted data]'.freeze
378
+ args[-1] = '[encrypted data]'
355
379
  end
356
380
  args
357
381
  end
@@ -646,6 +670,12 @@ module Sidekiq
646
670
  each(&:retry)
647
671
  end
648
672
  end
673
+
674
+ def kill_all
675
+ while size > 0
676
+ each(&:kill)
677
+ end
678
+ end
649
679
  end
650
680
 
651
681
  ##
@@ -701,17 +731,18 @@ module Sidekiq
701
731
  #
702
732
  class ProcessSet
703
733
  include Enumerable
734
+ include RedisScanner
704
735
 
705
736
  def initialize(clean_plz=true)
706
- self.class.cleanup if clean_plz
737
+ cleanup if clean_plz
707
738
  end
708
739
 
709
740
  # Cleans up dead processes recorded in Redis.
710
741
  # Returns the number of processes cleaned.
711
- def self.cleanup
742
+ def cleanup
712
743
  count = 0
713
744
  Sidekiq.redis do |conn|
714
- procs = conn.smembers('processes').sort
745
+ procs = sscan(conn, 'processes').sort
715
746
  heartbeats = conn.pipelined do
716
747
  procs.each do |key|
717
748
  conn.hget(key, 'info')
@@ -731,7 +762,7 @@ module Sidekiq
731
762
  end
732
763
 
733
764
  def each
734
- procs = Sidekiq.redis { |conn| conn.smembers('processes') }.sort
765
+ procs = Sidekiq.redis { |conn| sscan(conn, 'processes') }.sort
735
766
 
736
767
  Sidekiq.redis do |conn|
737
768
  # We're making a tradeoff here between consuming more memory instead of
@@ -866,10 +897,11 @@ module Sidekiq
866
897
  #
867
898
  class Workers
868
899
  include Enumerable
900
+ include RedisScanner
869
901
 
870
902
  def each
871
903
  Sidekiq.redis do |conn|
872
- procs = conn.smembers('processes')
904
+ procs = sscan(conn, 'processes')
873
905
  procs.sort.each do |key|
874
906
  valid, workers = conn.pipelined do
875
907
  conn.exists(key)
@@ -891,7 +923,7 @@ module Sidekiq
891
923
  # which can easily get out of sync with crashy processes.
892
924
  def size
893
925
  Sidekiq.redis do |conn|
894
- procs = conn.smembers('processes')
926
+ procs = sscan(conn, 'processes')
895
927
  if procs.empty?
896
928
  0
897
929
  else
data/lib/sidekiq/cli.rb CHANGED
@@ -1,4 +1,3 @@
1
- # encoding: utf-8
2
1
  # frozen_string_literal: true
3
2
  $stdout.sync = true
4
3
 
@@ -10,6 +9,7 @@ require 'fileutils'
10
9
 
11
10
  require 'sidekiq'
12
11
  require 'sidekiq/util'
12
+ require 'sidekiq/launcher'
13
13
 
14
14
  module Sidekiq
15
15
  class CLI
@@ -17,30 +17,20 @@ module Sidekiq
17
17
  include Singleton unless $TESTING
18
18
 
19
19
  PROCTITLES = [
20
- proc { 'sidekiq'.freeze },
20
+ proc { 'sidekiq' },
21
21
  proc { Sidekiq::VERSION },
22
22
  proc { |me, data| data['tag'] },
23
23
  proc { |me, data| "[#{Processor::WORKER_STATE.size} of #{data['concurrency']} busy]" },
24
24
  proc { |me, data| "stopping" if me.stopping? },
25
25
  ]
26
26
 
27
- # Used for CLI testing
28
- attr_accessor :code
29
27
  attr_accessor :launcher
30
28
  attr_accessor :environment
31
29
 
32
- def initialize
33
- @code = nil
34
- end
35
-
36
- def parse(args=ARGV)
37
- @code = nil
38
-
30
+ def parse(args = ARGV)
39
31
  setup_options(args)
40
32
  initialize_logger
41
33
  validate!
42
- daemonize
43
- write_pid
44
34
  end
45
35
 
46
36
  def jruby?
@@ -51,8 +41,10 @@ module Sidekiq
51
41
  # global process state irreversibly. PRs which improve the
52
42
  # test coverage of Sidekiq::CLI are welcomed.
53
43
  def run
44
+ daemonize if options[:daemon]
45
+ write_pid
54
46
  boot_system
55
- print_banner
47
+ print_banner if environment == 'development' && $stdout.tty?
56
48
 
57
49
  self_read, self_write = IO.pipe
58
50
  sigs = %w(INT TERM TTIN TSTP)
@@ -65,7 +57,7 @@ module Sidekiq
65
57
  sigs.each do |sig|
66
58
  begin
67
59
  trap sig do
68
- self_write.puts(sig)
60
+ self_write.write("#{sig}\n")
69
61
  end
70
62
  rescue ArgumentError
71
63
  puts "Signal #{sig} not supported"
@@ -80,6 +72,13 @@ module Sidekiq
80
72
  # fire startup and start multithreading.
81
73
  ver = Sidekiq.redis_info['redis_version']
82
74
  raise "You are using Redis v#{ver}, Sidekiq requires Redis v2.8.0 or greater" if ver < '2.8'
75
+ logger.warn "Sidekiq 6.0 will require Redis 4.0+, you are using Redis v#{ver}" if ver < '4'
76
+
77
+ # Since the user can pass us a connection pool explicitly in the initializer, we
78
+ # need to verify the size is large enough or else Sidekiq's performance is dramatically slowed.
79
+ cursize = Sidekiq.redis_pool.size
80
+ needed = Sidekiq.options[:concurrency] + 2
81
+ raise "Your pool of #{cursize} Redis connections is too small, please increase the size to at least #{needed}" if cursize < needed
83
82
 
84
83
  # cache process identity
85
84
  Sidekiq.options[:identity] = identity
@@ -94,11 +93,14 @@ module Sidekiq
94
93
  logger.debug { "Client Middleware: #{Sidekiq.client_middleware.map(&:klass).join(', ')}" }
95
94
  logger.debug { "Server Middleware: #{Sidekiq.server_middleware.map(&:klass).join(', ')}" }
96
95
 
96
+ launch(self_read)
97
+ end
98
+
99
+ def launch(self_read)
97
100
  if !options[:daemon]
98
101
  logger.info 'Starting processing, hit Ctrl-C to stop'
99
102
  end
100
103
 
101
- require 'sidekiq/launcher'
102
104
  @launcher = Sidekiq::Launcher.new(options)
103
105
 
104
106
  begin
@@ -180,23 +182,15 @@ module Sidekiq
180
182
  private
181
183
 
182
184
  def print_banner
183
- # Print logo and banner for development
184
- if environment == 'development' && $stdout.tty?
185
- puts "\e[#{31}m"
186
- puts Sidekiq::CLI.banner
187
- puts "\e[0m"
188
- end
185
+ puts "\e[#{31}m"
186
+ puts Sidekiq::CLI.banner
187
+ puts "\e[0m"
189
188
  end
190
189
 
191
190
  def daemonize
192
- return unless options[:daemon]
193
-
194
191
  raise ArgumentError, "You really should set a logfile if you're going to daemonize" unless options[:logfile]
195
- files_to_reopen = []
196
- ObjectSpace.each_object(File) do |file|
197
- files_to_reopen << file unless file.closed?
198
- end
199
192
 
193
+ files_to_reopen = ObjectSpace.each_object(File).reject { |f| f.closed? }
200
194
  ::Process.daemon(true, true)
201
195
 
202
196
  files_to_reopen.each do |file|
@@ -234,15 +228,38 @@ module Sidekiq
234
228
  alias_method :☠, :exit
235
229
 
236
230
  def setup_options(args)
231
+ # parse CLI options
237
232
  opts = parse_options(args)
233
+
238
234
  set_environment opts[:environment]
239
235
 
240
- cfile = opts[:config_file]
241
- opts = parse_config(cfile).merge(opts) if cfile
236
+ # check config file presence
237
+ if opts[:config_file]
238
+ if opts[:config_file] && !File.exist?(opts[:config_file])
239
+ raise ArgumentError, "No such file #{opts[:config_file]}"
240
+ end
241
+ else
242
+ config_dir = if File.directory?(opts[:require].to_s)
243
+ File.join(opts[:require], 'config')
244
+ else
245
+ File.join(options[:require], 'config')
246
+ end
242
247
 
248
+ %w[sidekiq.yml sidekiq.yml.erb].each do |config_file|
249
+ path = File.join(config_dir, config_file)
250
+ opts[:config_file] ||= path if File.exist?(path)
251
+ end
252
+ end
253
+
254
+ # parse config file options
255
+ opts = parse_config(opts[:config_file]).merge(opts) if opts[:config_file]
256
+
257
+ # set defaults
258
+ opts[:queues] = Array(opts[:queues]) << 'default' if opts[:queues].nil? || opts[:queues].empty?
243
259
  opts[:strict] = true if opts[:strict].nil?
244
- opts[:concurrency] = Integer(ENV["RAILS_MAX_THREADS"]) if !opts[:concurrency] && ENV["RAILS_MAX_THREADS"]
260
+ opts[:concurrency] = Integer(ENV["RAILS_MAX_THREADS"]) if opts[:concurrency].nil? && ENV["RAILS_MAX_THREADS"]
245
261
 
262
+ # merge with defaults
246
263
  options.merge!(opts)
247
264
  end
248
265
 
@@ -253,8 +270,6 @@ module Sidekiq
253
270
  def boot_system
254
271
  ENV['RACK_ENV'] = ENV['RAILS_ENV'] = environment
255
272
 
256
- raise ArgumentError, "#{options[:require]} does not exist" unless File.exist?(options[:require])
257
-
258
273
  if File.directory?(options[:require])
259
274
  require 'rails'
260
275
  if ::Rails::VERSION::MAJOR < 4
@@ -274,10 +289,7 @@ module Sidekiq
274
289
  end
275
290
  options[:tag] ||= default_tag
276
291
  else
277
- not_required_message = "#{options[:require]} was not required, you should use an explicit path: " +
278
- "./#{options[:require]} or /path/to/#{options[:require]}"
279
-
280
- require(options[:require]) || raise(ArgumentError, not_required_message)
292
+ require options[:require]
281
293
  end
282
294
  end
283
295
 
@@ -293,8 +305,6 @@ module Sidekiq
293
305
  end
294
306
 
295
307
  def validate!
296
- options[:queues] << 'default' if options[:queues].empty?
297
-
298
308
  if !File.exist?(options[:require]) ||
299
309
  (File.directory?(options[:require]) && !File.exist?("#{options[:require]}/config/application.rb"))
300
310
  logger.info "=================================================================="
@@ -320,6 +330,7 @@ module Sidekiq
320
330
 
321
331
  o.on '-d', '--daemon', "Daemonize process" do |arg|
322
332
  opts[:daemon] = arg
333
+ puts "WARNING: Daemonization mode will be removed in Sidekiq 6.0, see #4045. Please use a proper process supervisor to start and manage your services"
323
334
  end
324
335
 
325
336
  o.on '-e', '--environment ENV', "Application environment" do |arg|
@@ -330,6 +341,8 @@ module Sidekiq
330
341
  opts[:tag] = arg
331
342
  end
332
343
 
344
+ # this index remains here for backwards compatibility but none of the Sidekiq
345
+ # family use this value anymore. it was used by Pro's original reliable_fetch.
333
346
  o.on '-i', '--index INT', "unique process index on this machine" do |arg|
334
347
  opts[:index] = Integer(arg.match(/\d+/)[0])
335
348
  end
@@ -357,10 +370,12 @@ module Sidekiq
357
370
 
358
371
  o.on '-L', '--logfile PATH', "path to writable logfile" do |arg|
359
372
  opts[:logfile] = arg
373
+ puts "WARNING: Logfile redirection will be removed in Sidekiq 6.0, see #4045. Sidekiq will only log to STDOUT"
360
374
  end
361
375
 
362
376
  o.on '-P', '--pidfile PATH', "path to pidfile" do |arg|
363
377
  opts[:pidfile] = arg
378
+ puts "WARNING: PID file creation will be removed in Sidekiq 6.0, see #4045. Please use a proper process supervisor to start and manage your services"
364
379
  end
365
380
 
366
381
  o.on '-V', '--version', "Print version and exit" do |arg|
@@ -374,11 +389,8 @@ module Sidekiq
374
389
  logger.info @parser
375
390
  die 1
376
391
  end
377
- @parser.parse!(argv)
378
392
 
379
- %w[config/sidekiq.yml config/sidekiq.yml.erb].each do |filename|
380
- opts[:config_file] ||= filename if File.exist?(filename)
381
- end
393
+ @parser.parse!(argv)
382
394
 
383
395
  opts
384
396
  end
@@ -398,23 +410,18 @@ module Sidekiq
398
410
  end
399
411
  end
400
412
 
401
- def parse_config(cfile)
402
- opts = {}
403
- if File.exist?(cfile)
404
- opts = YAML.load(ERB.new(IO.read(cfile)).result) || opts
405
-
406
- if opts.respond_to? :deep_symbolize_keys!
407
- opts.deep_symbolize_keys!
408
- else
409
- symbolize_keys_deep!(opts)
410
- end
413
+ def parse_config(path)
414
+ opts = YAML.load(ERB.new(File.read(path)).result) || {}
411
415
 
412
- opts = opts.merge(opts.delete(environment.to_sym) || {})
413
- parse_queues(opts, opts.delete(:queues) || [])
416
+ if opts.respond_to? :deep_symbolize_keys!
417
+ opts.deep_symbolize_keys!
414
418
  else
415
- # allow a non-existent config file so Sidekiq
416
- # can be deployed by cap with just the defaults.
419
+ symbolize_keys_deep!(opts)
417
420
  end
421
+
422
+ opts = opts.merge(opts.delete(environment.to_sym) || {})
423
+ parse_queues(opts, opts.delete(:queues) || [])
424
+
418
425
  ns = opts.delete(:namespace)
419
426
  if ns
420
427
  # logger hasn't been initialized yet, puts is all we have.
@@ -428,10 +435,10 @@ module Sidekiq
428
435
  queues_and_weights.each { |queue_and_weight| parse_queue(opts, *queue_and_weight) }
429
436
  end
430
437
 
431
- def parse_queue(opts, q, weight=nil)
432
- [weight.to_i, 1].max.times do
433
- (opts[:queues] ||= []) << q
434
- end
438
+ def parse_queue(opts, queue, weight = nil)
439
+ opts[:queues] ||= []
440
+ raise ArgumentError, "queues: #{queue} cannot be defined twice" if opts[:queues].include?(queue)
441
+ [weight.to_i, 1].max.times { opts[:queues] << queue }
435
442
  opts[:strict] = false if weight.to_i > 0
436
443
  end
437
444
  end
@@ -68,18 +68,19 @@ module Sidekiq
68
68
  #
69
69
  def push(item)
70
70
  normed = normalize_item(item)
71
- payload = process_single(item['class'.freeze], normed)
71
+ payload = process_single(item['class'], normed)
72
72
 
73
73
  if payload
74
74
  raw_push([payload])
75
- payload['jid'.freeze]
75
+ payload['jid']
76
76
  end
77
77
  end
78
78
 
79
79
  ##
80
- # Push a large number of jobs to Redis. In practice this method is only
81
- # useful if you are pushing thousands of jobs or more. This method
82
- # cuts out the redis network round trip latency.
80
+ # Push a large number of jobs to Redis. This method cuts out the redis
81
+ # network round trip latency. I wouldn't recommend pushing more than
82
+ # 1000 per call but YMMV based on network quality, size of job args, etc.
83
+ # A large number of jobs can cause a bit of Redis command processing latency.
83
84
  #
84
85
  # Takes the same arguments as #push except that args is expected to be
85
86
  # an Array of Arrays. All other keys are duplicated for each job. Each job
@@ -89,19 +90,19 @@ module Sidekiq
89
90
  # Returns an array of the of pushed jobs' jids. The number of jobs pushed can be less
90
91
  # than the number given if the middleware stopped processing for one or more jobs.
91
92
  def push_bulk(items)
92
- arg = items['args'.freeze].first
93
+ arg = items['args'].first
93
94
  return [] unless arg # no jobs to push
94
95
  raise ArgumentError, "Bulk arguments must be an Array of Arrays: [[1], [2]]" if !arg.is_a?(Array)
95
96
 
96
97
  normed = normalize_item(items)
97
- payloads = items['args'.freeze].map do |args|
98
- copy = normed.merge('args'.freeze => args, 'jid'.freeze => SecureRandom.hex(12), 'enqueued_at'.freeze => Time.now.to_f)
99
- result = process_single(items['class'.freeze], copy)
98
+ payloads = items['args'].map do |args|
99
+ copy = normed.merge('args' => args, 'jid' => SecureRandom.hex(12), 'enqueued_at' => Time.now.to_f)
100
+ result = process_single(items['class'], copy)
100
101
  result ? result : nil
101
102
  end.compact
102
103
 
103
104
  raw_push(payloads) if !payloads.empty?
104
- payloads.collect { |payload| payload['jid'.freeze] }
105
+ payloads.collect { |payload| payload['jid'] }
105
106
  end
106
107
 
107
108
  # Allows sharding of jobs across any number of Redis instances. All jobs
@@ -144,14 +145,14 @@ module Sidekiq
144
145
  # Messages are enqueued to the 'default' queue.
145
146
  #
146
147
  def enqueue(klass, *args)
147
- klass.client_push('class'.freeze => klass, 'args'.freeze => args)
148
+ klass.client_push('class' => klass, 'args' => args)
148
149
  end
149
150
 
150
151
  # Example usage:
151
152
  # Sidekiq::Client.enqueue_to(:queue_name, MyWorker, 'foo', 1, :bat => 'bar')
152
153
  #
153
154
  def enqueue_to(queue, klass, *args)
154
- klass.client_push('queue'.freeze => queue, 'class'.freeze => klass, 'args'.freeze => args)
155
+ klass.client_push('queue' => queue, 'class' => klass, 'args' => args)
155
156
  end
156
157
 
157
158
  # Example usage:
@@ -162,8 +163,8 @@ module Sidekiq
162
163
  now = Time.now.to_f
163
164
  ts = (int < 1_000_000_000 ? now + int : int)
164
165
 
165
- item = { 'class'.freeze => klass, 'args'.freeze => args, 'at'.freeze => ts, 'queue'.freeze => queue }
166
- item.delete('at'.freeze) if ts <= now
166
+ item = { 'class' => klass, 'args' => args, 'at' => ts, 'queue' => queue }
167
+ item.delete('at') if ts <= now
167
168
 
168
169
  klass.client_push(item)
169
170
  end
@@ -188,25 +189,25 @@ module Sidekiq
188
189
  end
189
190
 
190
191
  def atomic_push(conn, payloads)
191
- if payloads.first['at'.freeze]
192
- conn.zadd('schedule'.freeze, payloads.map do |hash|
193
- at = hash.delete('at'.freeze).to_s
192
+ if payloads.first['at']
193
+ conn.zadd('schedule', payloads.map do |hash|
194
+ at = hash.delete('at').to_s
194
195
  [at, Sidekiq.dump_json(hash)]
195
196
  end)
196
197
  else
197
- q = payloads.first['queue'.freeze]
198
+ q = payloads.first['queue']
198
199
  now = Time.now.to_f
199
200
  to_push = payloads.map do |entry|
200
- entry['enqueued_at'.freeze] = now
201
+ entry['enqueued_at'] = now
201
202
  Sidekiq.dump_json(entry)
202
203
  end
203
- conn.sadd('queues'.freeze, q)
204
+ conn.sadd('queues', q)
204
205
  conn.lpush("queue:#{q}", to_push)
205
206
  end
206
207
  end
207
208
 
208
209
  def process_single(worker_class, item)
209
- queue = item['queue'.freeze]
210
+ queue = item['queue']
210
211
 
211
212
  middleware.invoke(worker_class, item, queue, @redis_pool) do
212
213
  item
@@ -214,25 +215,25 @@ module Sidekiq
214
215
  end
215
216
 
216
217
  def normalize_item(item)
217
- raise(ArgumentError, "Job must be a Hash with 'class' and 'args' keys: { 'class' => SomeWorker, 'args' => ['bob', 1, :foo => 'bar'] }") unless item.is_a?(Hash) && item.has_key?('class'.freeze) && item.has_key?('args'.freeze)
218
+ raise(ArgumentError, "Job must be a Hash with 'class' and 'args' keys: { 'class' => SomeWorker, 'args' => ['bob', 1, :foo => 'bar'] }") unless item.is_a?(Hash) && item.has_key?('class') && item.has_key?('args')
218
219
  raise(ArgumentError, "Job args must be an Array") unless item['args'].is_a?(Array)
219
- raise(ArgumentError, "Job class must be either a Class or String representation of the class name") unless item['class'.freeze].is_a?(Class) || item['class'.freeze].is_a?(String)
220
- raise(ArgumentError, "Job 'at' must be a Numeric timestamp") if item.has_key?('at'.freeze) && !item['at'].is_a?(Numeric)
220
+ raise(ArgumentError, "Job class must be either a Class or String representation of the class name") unless item['class'].is_a?(Class) || item['class'].is_a?(String)
221
+ raise(ArgumentError, "Job 'at' must be a Numeric timestamp") if item.has_key?('at') && !item['at'].is_a?(Numeric)
221
222
  #raise(ArgumentError, "Arguments must be native JSON types, see https://github.com/mperham/sidekiq/wiki/Best-Practices") unless JSON.load(JSON.dump(item['args'])) == item['args']
222
223
 
223
- normalized_hash(item['class'.freeze])
224
+ normalized_hash(item['class'])
224
225
  .each{ |key, value| item[key] = value if item[key].nil? }
225
226
 
226
- item['class'.freeze] = item['class'.freeze].to_s
227
- item['queue'.freeze] = item['queue'.freeze].to_s
228
- item['jid'.freeze] ||= SecureRandom.hex(12)
229
- item['created_at'.freeze] ||= Time.now.to_f
227
+ item['class'] = item['class'].to_s
228
+ item['queue'] = item['queue'].to_s
229
+ item['jid'] ||= SecureRandom.hex(12)
230
+ item['created_at'] ||= Time.now.to_f
230
231
  item
231
232
  end
232
233
 
233
234
  def normalized_hash(item_class)
234
235
  if item_class.is_a?(Class)
235
- raise(ArgumentError, "Message must include a Sidekiq::Worker class, not class name: #{item_class.ancestors.inspect}") if !item_class.respond_to?('get_sidekiq_options'.freeze)
236
+ raise(ArgumentError, "Message must include a Sidekiq::Worker class, not class name: #{item_class.ancestors.inspect}") if !item_class.respond_to?('get_sidekiq_options')
236
237
  item_class.get_sidekiq_options
237
238
  else
238
239
  Sidekiq.default_worker_options