sidekiq 5.1.1 → 5.2.10

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of sidekiq might be problematic. Click here for more details.

Files changed (56) hide show
  1. checksums.yaml +5 -5
  2. data/.circleci/config.yml +61 -0
  3. data/.gitignore +2 -0
  4. data/.travis.yml +5 -8
  5. data/COMM-LICENSE +11 -9
  6. data/Changes.md +96 -0
  7. data/Ent-Changes.md +28 -0
  8. data/Gemfile +16 -5
  9. data/LICENSE +1 -1
  10. data/Pro-Changes.md +43 -0
  11. data/README.md +1 -1
  12. data/Rakefile +2 -1
  13. data/bin/sidekiqctl +13 -92
  14. data/bin/sidekiqload +1 -1
  15. data/lib/sidekiq/api.rb +62 -28
  16. data/lib/sidekiq/cli.rb +68 -61
  17. data/lib/sidekiq/client.rb +31 -30
  18. data/lib/sidekiq/ctl.rb +221 -0
  19. data/lib/sidekiq/delay.rb +1 -0
  20. data/lib/sidekiq/fetch.rb +1 -1
  21. data/lib/sidekiq/job_logger.rb +4 -3
  22. data/lib/sidekiq/job_retry.rb +40 -14
  23. data/lib/sidekiq/launcher.rb +19 -13
  24. data/lib/sidekiq/logging.rb +3 -3
  25. data/lib/sidekiq/manager.rb +3 -4
  26. data/lib/sidekiq/middleware/server/active_record.rb +2 -1
  27. data/lib/sidekiq/processor.rb +79 -28
  28. data/lib/sidekiq/rails.rb +4 -8
  29. data/lib/sidekiq/redis_connection.rb +29 -2
  30. data/lib/sidekiq/scheduled.rb +33 -4
  31. data/lib/sidekiq/testing.rb +4 -4
  32. data/lib/sidekiq/util.rb +1 -1
  33. data/lib/sidekiq/version.rb +1 -1
  34. data/lib/sidekiq/web/action.rb +2 -2
  35. data/lib/sidekiq/web/application.rb +28 -3
  36. data/lib/sidekiq/web/helpers.rb +14 -7
  37. data/lib/sidekiq/web/router.rb +10 -10
  38. data/lib/sidekiq/web.rb +4 -4
  39. data/lib/sidekiq/worker.rb +31 -15
  40. data/lib/sidekiq.rb +8 -7
  41. data/sidekiq.gemspec +5 -12
  42. data/web/assets/javascripts/application.js +0 -0
  43. data/web/assets/javascripts/dashboard.js +15 -5
  44. data/web/assets/stylesheets/application.css +35 -2
  45. data/web/assets/stylesheets/bootstrap.css +2 -2
  46. data/web/locales/ar.yml +1 -0
  47. data/web/locales/en.yml +1 -0
  48. data/web/locales/es.yml +3 -3
  49. data/web/views/_footer.erb +3 -0
  50. data/web/views/_nav.erb +3 -17
  51. data/web/views/layout.erb +1 -1
  52. data/web/views/queue.erb +1 -0
  53. data/web/views/queues.erb +1 -1
  54. data/web/views/retries.erb +4 -0
  55. metadata +19 -87
  56. data/lib/sidekiq/middleware/server/active_record_cache.rb +0 -11
data/lib/sidekiq/api.rb CHANGED
@@ -1,9 +1,24 @@
1
- # encoding: utf-8
2
1
  # frozen_string_literal: true
3
2
  require 'sidekiq'
4
3
 
5
4
  module Sidekiq
5
+
6
+ module RedisScanner
7
+ def sscan(conn, key)
8
+ cursor = '0'
9
+ result = []
10
+ loop do
11
+ cursor, values = conn.sscan(key, cursor)
12
+ result.push(*values)
13
+ break if cursor == '0'
14
+ end
15
+ result
16
+ end
17
+ end
18
+
6
19
  class Stats
20
+ include RedisScanner
21
+
7
22
  def initialize
8
23
  fetch_stats!
9
24
  end
@@ -51,33 +66,39 @@ module Sidekiq
51
66
  def fetch_stats!
52
67
  pipe1_res = Sidekiq.redis do |conn|
53
68
  conn.pipelined do
54
- conn.get('stat:processed'.freeze)
55
- conn.get('stat:failed'.freeze)
56
- conn.zcard('schedule'.freeze)
57
- conn.zcard('retry'.freeze)
58
- conn.zcard('dead'.freeze)
59
- conn.scard('processes'.freeze)
60
- conn.lrange('queue:default'.freeze, -1, -1)
61
- conn.smembers('processes'.freeze)
62
- conn.smembers('queues'.freeze)
69
+ conn.get('stat:processed')
70
+ conn.get('stat:failed')
71
+ conn.zcard('schedule')
72
+ conn.zcard('retry')
73
+ conn.zcard('dead')
74
+ conn.scard('processes')
75
+ conn.lrange('queue:default', -1, -1)
63
76
  end
64
77
  end
65
78
 
79
+ processes = Sidekiq.redis do |conn|
80
+ sscan(conn, 'processes')
81
+ end
82
+
83
+ queues = Sidekiq.redis do |conn|
84
+ sscan(conn, 'queues')
85
+ end
86
+
66
87
  pipe2_res = Sidekiq.redis do |conn|
67
88
  conn.pipelined do
68
- pipe1_res[7].each {|key| conn.hget(key, 'busy'.freeze) }
69
- pipe1_res[8].each {|queue| conn.llen("queue:#{queue}") }
89
+ processes.each {|key| conn.hget(key, 'busy') }
90
+ queues.each {|queue| conn.llen("queue:#{queue}") }
70
91
  end
71
92
  end
72
93
 
73
- s = pipe1_res[7].size
94
+ s = processes.size
74
95
  workers_size = pipe2_res[0...s].map(&:to_i).inject(0, &:+)
75
96
  enqueued = pipe2_res[s..-1].map(&:to_i).inject(0, &:+)
76
97
 
77
98
  default_queue_latency = if (entry = pipe1_res[6].first)
78
99
  job = Sidekiq.load_json(entry) rescue {}
79
100
  now = Time.now.to_f
80
- thence = job['enqueued_at'.freeze] || now
101
+ thence = job['enqueued_at'] || now
81
102
  now - thence
82
103
  else
83
104
  0
@@ -117,9 +138,11 @@ module Sidekiq
117
138
  end
118
139
 
119
140
  class Queues
141
+ include RedisScanner
142
+
120
143
  def lengths
121
144
  Sidekiq.redis do |conn|
122
- queues = conn.smembers('queues'.freeze)
145
+ queues = sscan(conn, 'queues')
123
146
 
124
147
  lengths = conn.pipelined do
125
148
  queues.each do |queue|
@@ -141,6 +164,8 @@ module Sidekiq
141
164
 
142
165
  class History
143
166
  def initialize(days_previous, start_date = nil)
167
+ #we only store five years of data in Redis
168
+ raise ArgumentError if days_previous < 1 || days_previous > (5 * 365)
144
169
  @days_previous = days_previous
145
170
  @start_date = start_date || Time.now.utc.to_date
146
171
  end
@@ -163,7 +188,7 @@ module Sidekiq
163
188
 
164
189
  while i < @days_previous
165
190
  date = @start_date - i
166
- datestr = date.strftime("%Y-%m-%d".freeze)
191
+ datestr = date.strftime("%Y-%m-%d")
167
192
  keys << "stat:#{stat}:#{datestr}"
168
193
  dates << datestr
169
194
  i += 1
@@ -199,18 +224,19 @@ module Sidekiq
199
224
  #
200
225
  class Queue
201
226
  include Enumerable
227
+ extend RedisScanner
202
228
 
203
229
  ##
204
230
  # Return all known queues within Redis.
205
231
  #
206
232
  def self.all
207
- Sidekiq.redis { |c| c.smembers('queues'.freeze) }.sort.map { |q| Sidekiq::Queue.new(q) }
233
+ Sidekiq.redis { |c| sscan(c, 'queues') }.sort.map { |q| Sidekiq::Queue.new(q) }
208
234
  end
209
235
 
210
236
  attr_reader :name
211
237
 
212
238
  def initialize(name="default")
213
- @name = name
239
+ @name = name.to_s
214
240
  @rname = "queue:#{name}"
215
241
  end
216
242
 
@@ -273,7 +299,7 @@ module Sidekiq
273
299
  Sidekiq.redis do |conn|
274
300
  conn.multi do
275
301
  conn.del(@rname)
276
- conn.srem("queues".freeze, name)
302
+ conn.srem("queues", name)
277
303
  end
278
304
  end
279
305
  end
@@ -349,9 +375,9 @@ module Sidekiq
349
375
  job_args
350
376
  end
351
377
  else
352
- if self['encrypt'.freeze]
378
+ if self['encrypt']
353
379
  # no point in showing 150+ bytes of random garbage
354
- args[-1] = '[encrypted data]'.freeze
380
+ args[-1] = '[encrypted data]'
355
381
  end
356
382
  args
357
383
  end
@@ -646,6 +672,12 @@ module Sidekiq
646
672
  each(&:retry)
647
673
  end
648
674
  end
675
+
676
+ def kill_all
677
+ while size > 0
678
+ each(&:kill)
679
+ end
680
+ end
649
681
  end
650
682
 
651
683
  ##
@@ -701,17 +733,18 @@ module Sidekiq
701
733
  #
702
734
  class ProcessSet
703
735
  include Enumerable
736
+ include RedisScanner
704
737
 
705
738
  def initialize(clean_plz=true)
706
- self.class.cleanup if clean_plz
739
+ cleanup if clean_plz
707
740
  end
708
741
 
709
742
  # Cleans up dead processes recorded in Redis.
710
743
  # Returns the number of processes cleaned.
711
- def self.cleanup
744
+ def cleanup
712
745
  count = 0
713
746
  Sidekiq.redis do |conn|
714
- procs = conn.smembers('processes').sort
747
+ procs = sscan(conn, 'processes').sort
715
748
  heartbeats = conn.pipelined do
716
749
  procs.each do |key|
717
750
  conn.hget(key, 'info')
@@ -731,7 +764,7 @@ module Sidekiq
731
764
  end
732
765
 
733
766
  def each
734
- procs = Sidekiq.redis { |conn| conn.smembers('processes') }.sort
767
+ procs = Sidekiq.redis { |conn| sscan(conn, 'processes') }.sort
735
768
 
736
769
  Sidekiq.redis do |conn|
737
770
  # We're making a tradeoff here between consuming more memory instead of
@@ -866,13 +899,14 @@ module Sidekiq
866
899
  #
867
900
  class Workers
868
901
  include Enumerable
902
+ include RedisScanner
869
903
 
870
904
  def each
871
905
  Sidekiq.redis do |conn|
872
- procs = conn.smembers('processes')
906
+ procs = sscan(conn, 'processes')
873
907
  procs.sort.each do |key|
874
908
  valid, workers = conn.pipelined do
875
- conn.exists(key)
909
+ conn.exists?(key)
876
910
  conn.hgetall("#{key}:workers")
877
911
  end
878
912
  next unless valid
@@ -891,7 +925,7 @@ module Sidekiq
891
925
  # which can easily get out of sync with crashy processes.
892
926
  def size
893
927
  Sidekiq.redis do |conn|
894
- procs = conn.smembers('processes')
928
+ procs = sscan(conn, 'processes')
895
929
  if procs.empty?
896
930
  0
897
931
  else
data/lib/sidekiq/cli.rb CHANGED
@@ -1,4 +1,3 @@
1
- # encoding: utf-8
2
1
  # frozen_string_literal: true
3
2
  $stdout.sync = true
4
3
 
@@ -10,6 +9,7 @@ require 'fileutils'
10
9
 
11
10
  require 'sidekiq'
12
11
  require 'sidekiq/util'
12
+ require 'sidekiq/launcher'
13
13
 
14
14
  module Sidekiq
15
15
  class CLI
@@ -17,30 +17,20 @@ module Sidekiq
17
17
  include Singleton unless $TESTING
18
18
 
19
19
  PROCTITLES = [
20
- proc { 'sidekiq'.freeze },
20
+ proc { 'sidekiq' },
21
21
  proc { Sidekiq::VERSION },
22
22
  proc { |me, data| data['tag'] },
23
23
  proc { |me, data| "[#{Processor::WORKER_STATE.size} of #{data['concurrency']} busy]" },
24
24
  proc { |me, data| "stopping" if me.stopping? },
25
25
  ]
26
26
 
27
- # Used for CLI testing
28
- attr_accessor :code
29
27
  attr_accessor :launcher
30
28
  attr_accessor :environment
31
29
 
32
- def initialize
33
- @code = nil
34
- end
35
-
36
- def parse(args=ARGV)
37
- @code = nil
38
-
30
+ def parse(args = ARGV)
39
31
  setup_options(args)
40
32
  initialize_logger
41
33
  validate!
42
- daemonize
43
- write_pid
44
34
  end
45
35
 
46
36
  def jruby?
@@ -51,8 +41,10 @@ module Sidekiq
51
41
  # global process state irreversibly. PRs which improve the
52
42
  # test coverage of Sidekiq::CLI are welcomed.
53
43
  def run
44
+ daemonize if options[:daemon]
45
+ write_pid
54
46
  boot_system
55
- print_banner
47
+ print_banner if environment == 'development' && $stdout.tty?
56
48
 
57
49
  self_read, self_write = IO.pipe
58
50
  sigs = %w(INT TERM TTIN TSTP)
@@ -65,7 +57,7 @@ module Sidekiq
65
57
  sigs.each do |sig|
66
58
  begin
67
59
  trap sig do
68
- self_write.puts(sig)
60
+ self_write.write("#{sig}\n")
69
61
  end
70
62
  rescue ArgumentError
71
63
  puts "Signal #{sig} not supported"
@@ -80,6 +72,13 @@ module Sidekiq
80
72
  # fire startup and start multithreading.
81
73
  ver = Sidekiq.redis_info['redis_version']
82
74
  raise "You are using Redis v#{ver}, Sidekiq requires Redis v2.8.0 or greater" if ver < '2.8'
75
+ logger.warn "Sidekiq 6.0 will require Redis 4.0+, you are using Redis v#{ver}" if ver < '4'
76
+
77
+ # Since the user can pass us a connection pool explicitly in the initializer, we
78
+ # need to verify the size is large enough or else Sidekiq's performance is dramatically slowed.
79
+ cursize = Sidekiq.redis_pool.size
80
+ needed = Sidekiq.options[:concurrency] + 2
81
+ raise "Your pool of #{cursize} Redis connections is too small, please increase the size to at least #{needed}" if cursize < needed
83
82
 
84
83
  # cache process identity
85
84
  Sidekiq.options[:identity] = identity
@@ -94,11 +93,14 @@ module Sidekiq
94
93
  logger.debug { "Client Middleware: #{Sidekiq.client_middleware.map(&:klass).join(', ')}" }
95
94
  logger.debug { "Server Middleware: #{Sidekiq.server_middleware.map(&:klass).join(', ')}" }
96
95
 
96
+ launch(self_read)
97
+ end
98
+
99
+ def launch(self_read)
97
100
  if !options[:daemon]
98
101
  logger.info 'Starting processing, hit Ctrl-C to stop'
99
102
  end
100
103
 
101
- require 'sidekiq/launcher'
102
104
  @launcher = Sidekiq::Launcher.new(options)
103
105
 
104
106
  begin
@@ -180,23 +182,15 @@ module Sidekiq
180
182
  private
181
183
 
182
184
  def print_banner
183
- # Print logo and banner for development
184
- if environment == 'development' && $stdout.tty?
185
- puts "\e[#{31}m"
186
- puts Sidekiq::CLI.banner
187
- puts "\e[0m"
188
- end
185
+ puts "\e[#{31}m"
186
+ puts Sidekiq::CLI.banner
187
+ puts "\e[0m"
189
188
  end
190
189
 
191
190
  def daemonize
192
- return unless options[:daemon]
193
-
194
191
  raise ArgumentError, "You really should set a logfile if you're going to daemonize" unless options[:logfile]
195
- files_to_reopen = []
196
- ObjectSpace.each_object(File) do |file|
197
- files_to_reopen << file unless file.closed?
198
- end
199
192
 
193
+ files_to_reopen = ObjectSpace.each_object(File).reject { |f| f.closed? }
200
194
  ::Process.daemon(true, true)
201
195
 
202
196
  files_to_reopen.each do |file|
@@ -234,15 +228,38 @@ module Sidekiq
234
228
  alias_method :☠, :exit
235
229
 
236
230
  def setup_options(args)
231
+ # parse CLI options
237
232
  opts = parse_options(args)
233
+
238
234
  set_environment opts[:environment]
239
235
 
240
- cfile = opts[:config_file]
241
- opts = parse_config(cfile).merge(opts) if cfile
236
+ # check config file presence
237
+ if opts[:config_file]
238
+ if opts[:config_file] && !File.exist?(opts[:config_file])
239
+ raise ArgumentError, "No such file #{opts[:config_file]}"
240
+ end
241
+ else
242
+ config_dir = if File.directory?(opts[:require].to_s)
243
+ File.join(opts[:require], 'config')
244
+ else
245
+ File.join(options[:require], 'config')
246
+ end
242
247
 
248
+ %w[sidekiq.yml sidekiq.yml.erb].each do |config_file|
249
+ path = File.join(config_dir, config_file)
250
+ opts[:config_file] ||= path if File.exist?(path)
251
+ end
252
+ end
253
+
254
+ # parse config file options
255
+ opts = parse_config(opts[:config_file]).merge(opts) if opts[:config_file]
256
+
257
+ # set defaults
258
+ opts[:queues] = Array(opts[:queues]) << 'default' if opts[:queues].nil? || opts[:queues].empty?
243
259
  opts[:strict] = true if opts[:strict].nil?
244
- opts[:concurrency] = Integer(ENV["RAILS_MAX_THREADS"]) if !opts[:concurrency] && ENV["RAILS_MAX_THREADS"]
260
+ opts[:concurrency] = Integer(ENV["RAILS_MAX_THREADS"]) if opts[:concurrency].nil? && ENV["RAILS_MAX_THREADS"]
245
261
 
262
+ # merge with defaults
246
263
  options.merge!(opts)
247
264
  end
248
265
 
@@ -253,8 +270,6 @@ module Sidekiq
253
270
  def boot_system
254
271
  ENV['RACK_ENV'] = ENV['RAILS_ENV'] = environment
255
272
 
256
- raise ArgumentError, "#{options[:require]} does not exist" unless File.exist?(options[:require])
257
-
258
273
  if File.directory?(options[:require])
259
274
  require 'rails'
260
275
  if ::Rails::VERSION::MAJOR < 4
@@ -274,10 +289,7 @@ module Sidekiq
274
289
  end
275
290
  options[:tag] ||= default_tag
276
291
  else
277
- not_required_message = "#{options[:require]} was not required, you should use an explicit path: " +
278
- "./#{options[:require]} or /path/to/#{options[:require]}"
279
-
280
- require(options[:require]) || raise(ArgumentError, not_required_message)
292
+ require options[:require]
281
293
  end
282
294
  end
283
295
 
@@ -293,8 +305,6 @@ module Sidekiq
293
305
  end
294
306
 
295
307
  def validate!
296
- options[:queues] << 'default' if options[:queues].empty?
297
-
298
308
  if !File.exist?(options[:require]) ||
299
309
  (File.directory?(options[:require]) && !File.exist?("#{options[:require]}/config/application.rb"))
300
310
  logger.info "=================================================================="
@@ -320,6 +330,7 @@ module Sidekiq
320
330
 
321
331
  o.on '-d', '--daemon', "Daemonize process" do |arg|
322
332
  opts[:daemon] = arg
333
+ puts "WARNING: Daemonization mode will be removed in Sidekiq 6.0, see #4045. Please use a proper process supervisor to start and manage your services"
323
334
  end
324
335
 
325
336
  o.on '-e', '--environment ENV', "Application environment" do |arg|
@@ -330,6 +341,8 @@ module Sidekiq
330
341
  opts[:tag] = arg
331
342
  end
332
343
 
344
+ # this index remains here for backwards compatibility but none of the Sidekiq
345
+ # family use this value anymore. it was used by Pro's original reliable_fetch.
333
346
  o.on '-i', '--index INT', "unique process index on this machine" do |arg|
334
347
  opts[:index] = Integer(arg.match(/\d+/)[0])
335
348
  end
@@ -357,10 +370,12 @@ module Sidekiq
357
370
 
358
371
  o.on '-L', '--logfile PATH', "path to writable logfile" do |arg|
359
372
  opts[:logfile] = arg
373
+ puts "WARNING: Logfile redirection will be removed in Sidekiq 6.0, see #4045. Sidekiq will only log to STDOUT"
360
374
  end
361
375
 
362
376
  o.on '-P', '--pidfile PATH', "path to pidfile" do |arg|
363
377
  opts[:pidfile] = arg
378
+ puts "WARNING: PID file creation will be removed in Sidekiq 6.0, see #4045. Please use a proper process supervisor to start and manage your services"
364
379
  end
365
380
 
366
381
  o.on '-V', '--version', "Print version and exit" do |arg|
@@ -374,11 +389,8 @@ module Sidekiq
374
389
  logger.info @parser
375
390
  die 1
376
391
  end
377
- @parser.parse!(argv)
378
392
 
379
- %w[config/sidekiq.yml config/sidekiq.yml.erb].each do |filename|
380
- opts[:config_file] ||= filename if File.exist?(filename)
381
- end
393
+ @parser.parse!(argv)
382
394
 
383
395
  opts
384
396
  end
@@ -398,23 +410,18 @@ module Sidekiq
398
410
  end
399
411
  end
400
412
 
401
- def parse_config(cfile)
402
- opts = {}
403
- if File.exist?(cfile)
404
- opts = YAML.load(ERB.new(IO.read(cfile)).result) || opts
405
-
406
- if opts.respond_to? :deep_symbolize_keys!
407
- opts.deep_symbolize_keys!
408
- else
409
- symbolize_keys_deep!(opts)
410
- end
413
+ def parse_config(path)
414
+ opts = YAML.load(ERB.new(File.read(path)).result) || {}
411
415
 
412
- opts = opts.merge(opts.delete(environment.to_sym) || {})
413
- parse_queues(opts, opts.delete(:queues) || [])
416
+ if opts.respond_to? :deep_symbolize_keys!
417
+ opts.deep_symbolize_keys!
414
418
  else
415
- # allow a non-existent config file so Sidekiq
416
- # can be deployed by cap with just the defaults.
419
+ symbolize_keys_deep!(opts)
417
420
  end
421
+
422
+ opts = opts.merge(opts.delete(environment.to_sym) || {})
423
+ parse_queues(opts, opts.delete(:queues) || [])
424
+
418
425
  ns = opts.delete(:namespace)
419
426
  if ns
420
427
  # logger hasn't been initialized yet, puts is all we have.
@@ -428,10 +435,10 @@ module Sidekiq
428
435
  queues_and_weights.each { |queue_and_weight| parse_queue(opts, *queue_and_weight) }
429
436
  end
430
437
 
431
- def parse_queue(opts, q, weight=nil)
432
- [weight.to_i, 1].max.times do
433
- (opts[:queues] ||= []) << q
434
- end
438
+ def parse_queue(opts, queue, weight = nil)
439
+ opts[:queues] ||= []
440
+ raise ArgumentError, "queues: #{queue} cannot be defined twice" if opts[:queues].include?(queue)
441
+ [weight.to_i, 1].max.times { opts[:queues] << queue }
435
442
  opts[:strict] = false if weight.to_i > 0
436
443
  end
437
444
  end
@@ -68,18 +68,19 @@ module Sidekiq
68
68
  #
69
69
  def push(item)
70
70
  normed = normalize_item(item)
71
- payload = process_single(item['class'.freeze], normed)
71
+ payload = process_single(item['class'], normed)
72
72
 
73
73
  if payload
74
74
  raw_push([payload])
75
- payload['jid'.freeze]
75
+ payload['jid']
76
76
  end
77
77
  end
78
78
 
79
79
  ##
80
- # Push a large number of jobs to Redis. In practice this method is only
81
- # useful if you are pushing thousands of jobs or more. This method
82
- # cuts out the redis network round trip latency.
80
+ # Push a large number of jobs to Redis. This method cuts out the redis
81
+ # network round trip latency. I wouldn't recommend pushing more than
82
+ # 1000 per call but YMMV based on network quality, size of job args, etc.
83
+ # A large number of jobs can cause a bit of Redis command processing latency.
83
84
  #
84
85
  # Takes the same arguments as #push except that args is expected to be
85
86
  # an Array of Arrays. All other keys are duplicated for each job. Each job
@@ -89,19 +90,19 @@ module Sidekiq
89
90
  # Returns an array of the of pushed jobs' jids. The number of jobs pushed can be less
90
91
  # than the number given if the middleware stopped processing for one or more jobs.
91
92
  def push_bulk(items)
92
- arg = items['args'.freeze].first
93
+ arg = items['args'].first
93
94
  return [] unless arg # no jobs to push
94
95
  raise ArgumentError, "Bulk arguments must be an Array of Arrays: [[1], [2]]" if !arg.is_a?(Array)
95
96
 
96
97
  normed = normalize_item(items)
97
- payloads = items['args'.freeze].map do |args|
98
- copy = normed.merge('args'.freeze => args, 'jid'.freeze => SecureRandom.hex(12), 'enqueued_at'.freeze => Time.now.to_f)
99
- result = process_single(items['class'.freeze], copy)
98
+ payloads = items['args'].map do |args|
99
+ copy = normed.merge('args' => args, 'jid' => SecureRandom.hex(12), 'enqueued_at' => Time.now.to_f)
100
+ result = process_single(items['class'], copy)
100
101
  result ? result : nil
101
102
  end.compact
102
103
 
103
104
  raw_push(payloads) if !payloads.empty?
104
- payloads.collect { |payload| payload['jid'.freeze] }
105
+ payloads.collect { |payload| payload['jid'] }
105
106
  end
106
107
 
107
108
  # Allows sharding of jobs across any number of Redis instances. All jobs
@@ -144,14 +145,14 @@ module Sidekiq
144
145
  # Messages are enqueued to the 'default' queue.
145
146
  #
146
147
  def enqueue(klass, *args)
147
- klass.client_push('class'.freeze => klass, 'args'.freeze => args)
148
+ klass.client_push('class' => klass, 'args' => args)
148
149
  end
149
150
 
150
151
  # Example usage:
151
152
  # Sidekiq::Client.enqueue_to(:queue_name, MyWorker, 'foo', 1, :bat => 'bar')
152
153
  #
153
154
  def enqueue_to(queue, klass, *args)
154
- klass.client_push('queue'.freeze => queue, 'class'.freeze => klass, 'args'.freeze => args)
155
+ klass.client_push('queue' => queue, 'class' => klass, 'args' => args)
155
156
  end
156
157
 
157
158
  # Example usage:
@@ -162,8 +163,8 @@ module Sidekiq
162
163
  now = Time.now.to_f
163
164
  ts = (int < 1_000_000_000 ? now + int : int)
164
165
 
165
- item = { 'class'.freeze => klass, 'args'.freeze => args, 'at'.freeze => ts, 'queue'.freeze => queue }
166
- item.delete('at'.freeze) if ts <= now
166
+ item = { 'class' => klass, 'args' => args, 'at' => ts, 'queue' => queue }
167
+ item.delete('at') if ts <= now
167
168
 
168
169
  klass.client_push(item)
169
170
  end
@@ -188,25 +189,25 @@ module Sidekiq
188
189
  end
189
190
 
190
191
  def atomic_push(conn, payloads)
191
- if payloads.first['at'.freeze]
192
- conn.zadd('schedule'.freeze, payloads.map do |hash|
193
- at = hash.delete('at'.freeze).to_s
192
+ if payloads.first['at']
193
+ conn.zadd('schedule', payloads.map do |hash|
194
+ at = hash.delete('at').to_s
194
195
  [at, Sidekiq.dump_json(hash)]
195
196
  end)
196
197
  else
197
- q = payloads.first['queue'.freeze]
198
+ q = payloads.first['queue']
198
199
  now = Time.now.to_f
199
200
  to_push = payloads.map do |entry|
200
- entry['enqueued_at'.freeze] = now
201
+ entry['enqueued_at'] = now
201
202
  Sidekiq.dump_json(entry)
202
203
  end
203
- conn.sadd('queues'.freeze, q)
204
+ conn.sadd('queues', q)
204
205
  conn.lpush("queue:#{q}", to_push)
205
206
  end
206
207
  end
207
208
 
208
209
  def process_single(worker_class, item)
209
- queue = item['queue'.freeze]
210
+ queue = item['queue']
210
211
 
211
212
  middleware.invoke(worker_class, item, queue, @redis_pool) do
212
213
  item
@@ -214,25 +215,25 @@ module Sidekiq
214
215
  end
215
216
 
216
217
  def normalize_item(item)
217
- raise(ArgumentError, "Job must be a Hash with 'class' and 'args' keys: { 'class' => SomeWorker, 'args' => ['bob', 1, :foo => 'bar'] }") unless item.is_a?(Hash) && item.has_key?('class'.freeze) && item.has_key?('args'.freeze)
218
+ raise(ArgumentError, "Job must be a Hash with 'class' and 'args' keys: { 'class' => SomeWorker, 'args' => ['bob', 1, :foo => 'bar'] }") unless item.is_a?(Hash) && item.has_key?('class') && item.has_key?('args')
218
219
  raise(ArgumentError, "Job args must be an Array") unless item['args'].is_a?(Array)
219
- raise(ArgumentError, "Job class must be either a Class or String representation of the class name") unless item['class'.freeze].is_a?(Class) || item['class'.freeze].is_a?(String)
220
- raise(ArgumentError, "Job 'at' must be a Numeric timestamp") if item.has_key?('at'.freeze) && !item['at'].is_a?(Numeric)
220
+ raise(ArgumentError, "Job class must be either a Class or String representation of the class name") unless item['class'].is_a?(Class) || item['class'].is_a?(String)
221
+ raise(ArgumentError, "Job 'at' must be a Numeric timestamp") if item.has_key?('at') && !item['at'].is_a?(Numeric)
221
222
  #raise(ArgumentError, "Arguments must be native JSON types, see https://github.com/mperham/sidekiq/wiki/Best-Practices") unless JSON.load(JSON.dump(item['args'])) == item['args']
222
223
 
223
- normalized_hash(item['class'.freeze])
224
+ normalized_hash(item['class'])
224
225
  .each{ |key, value| item[key] = value if item[key].nil? }
225
226
 
226
- item['class'.freeze] = item['class'.freeze].to_s
227
- item['queue'.freeze] = item['queue'.freeze].to_s
228
- item['jid'.freeze] ||= SecureRandom.hex(12)
229
- item['created_at'.freeze] ||= Time.now.to_f
227
+ item['class'] = item['class'].to_s
228
+ item['queue'] = item['queue'].to_s
229
+ item['jid'] ||= SecureRandom.hex(12)
230
+ item['created_at'] ||= Time.now.to_f
230
231
  item
231
232
  end
232
233
 
233
234
  def normalized_hash(item_class)
234
235
  if item_class.is_a?(Class)
235
- raise(ArgumentError, "Message must include a Sidekiq::Worker class, not class name: #{item_class.ancestors.inspect}") if !item_class.respond_to?('get_sidekiq_options'.freeze)
236
+ raise(ArgumentError, "Message must include a Sidekiq::Worker class, not class name: #{item_class.ancestors.inspect}") if !item_class.respond_to?('get_sidekiq_options')
236
237
  item_class.get_sidekiq_options
237
238
  else
238
239
  Sidekiq.default_worker_options