sidekiq 6.4.1 → 6.4.2

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of sidekiq might be problematic. Click here for more details.

@@ -44,10 +44,10 @@ module Sidekiq
44
44
  # This is an example of a minimal server middleware:
45
45
  #
46
46
  # class MyServerHook
47
- # def call(worker_instance, msg, queue)
48
- # puts "Before work"
47
+ # def call(job_instance, msg, queue)
48
+ # puts "Before job"
49
49
  # yield
50
- # puts "After work"
50
+ # puts "After job"
51
51
  # end
52
52
  # end
53
53
  #
@@ -56,7 +56,7 @@ module Sidekiq
56
56
  # to Redis:
57
57
  #
58
58
  # class MyClientHook
59
- # def call(worker_class, msg, queue, redis_pool)
59
+ # def call(job_class, msg, queue, redis_pool)
60
60
  # puts "Before push"
61
61
  # result = yield
62
62
  # puts "After push"
@@ -10,16 +10,16 @@ module Sidekiq::Middleware::I18n
10
10
  # Get the current locale and store it in the message
11
11
  # to be sent to Sidekiq.
12
12
  class Client
13
- def call(_worker, msg, _queue, _redis)
14
- msg["locale"] ||= I18n.locale
13
+ def call(_jobclass, job, _queue, _redis)
14
+ job["locale"] ||= I18n.locale
15
15
  yield
16
16
  end
17
17
  end
18
18
 
19
19
  # Pull the msg locale out and set the current thread to use it.
20
20
  class Server
21
- def call(_worker, msg, _queue, &block)
22
- I18n.with_locale(msg.fetch("locale", I18n.default_locale), &block)
21
+ def call(_jobclass, job, _queue, &block)
22
+ I18n.with_locale(job.fetch("locale", I18n.default_locale), &block)
23
23
  end
24
24
  end
25
25
  end
@@ -17,7 +17,7 @@ class Sidekiq::Monitor
17
17
  end
18
18
  send(section)
19
19
  rescue => e
20
- puts "Couldn't get status: #{e}"
20
+ abort "Couldn't get status: #{e}"
21
21
  end
22
22
 
23
23
  def all
@@ -11,7 +11,7 @@ module Sidekiq
11
11
  #
12
12
  # 1. fetches a job from Redis
13
13
  # 2. executes the job
14
- # a. instantiate the Worker
14
+ # a. instantiate the job class
15
15
  # b. run the middleware chain
16
16
  # c. call #perform
17
17
  #
@@ -80,12 +80,12 @@ module Sidekiq
80
80
  end
81
81
 
82
82
  def get_one
83
- work = @strategy.retrieve_work
83
+ uow = @strategy.retrieve_work
84
84
  if @down
85
85
  logger.info { "Redis is online, #{::Process.clock_gettime(::Process::CLOCK_MONOTONIC) - @down} sec downtime" }
86
86
  @down = nil
87
87
  end
88
- work
88
+ uow
89
89
  rescue Sidekiq::Shutdown
90
90
  rescue => ex
91
91
  handle_fetch_exception(ex)
@@ -130,10 +130,10 @@ module Sidekiq
130
130
  # Effectively this block denotes a "unit of work" to Rails.
131
131
  @reloader.call do
132
132
  klass = constantize(job_hash["class"])
133
- worker = klass.new
134
- worker.jid = job_hash["jid"]
135
- @retrier.local(worker, jobstr, queue) do
136
- yield worker
133
+ inst = klass.new
134
+ inst.jid = job_hash["jid"]
135
+ @retrier.local(inst, jobstr, queue) do
136
+ yield inst
137
137
  end
138
138
  end
139
139
  end
@@ -142,9 +142,9 @@ module Sidekiq
142
142
  end
143
143
  end
144
144
 
145
- def process(work)
146
- jobstr = work.job
147
- queue = work.queue_name
145
+ def process(uow)
146
+ jobstr = uow.job
147
+ queue = uow.queue_name
148
148
 
149
149
  # Treat malformed JSON as a special case: job goes straight to the morgue.
150
150
  job_hash = nil
@@ -154,14 +154,14 @@ module Sidekiq
154
154
  handle_exception(ex, {context: "Invalid JSON for job", jobstr: jobstr})
155
155
  # we can't notify because the job isn't a valid hash payload.
156
156
  DeadSet.new.kill(jobstr, notify_failure: false)
157
- return work.acknowledge
157
+ return uow.acknowledge
158
158
  end
159
159
 
160
160
  ack = false
161
161
  begin
162
- dispatch(job_hash, queue, jobstr) do |worker|
163
- Sidekiq.server_middleware.invoke(worker, job_hash, queue) do
164
- execute_job(worker, job_hash["args"])
162
+ dispatch(job_hash, queue, jobstr) do |inst|
163
+ Sidekiq.server_middleware.invoke(inst, job_hash, queue) do
164
+ execute_job(inst, job_hash["args"])
165
165
  end
166
166
  end
167
167
  ack = true
@@ -186,14 +186,14 @@ module Sidekiq
186
186
  if ack
187
187
  # We don't want a shutdown signal to interrupt job acknowledgment.
188
188
  Thread.handle_interrupt(Sidekiq::Shutdown => :never) do
189
- work.acknowledge
189
+ uow.acknowledge
190
190
  end
191
191
  end
192
192
  end
193
193
  end
194
194
 
195
- def execute_job(worker, cloned_args)
196
- worker.perform(*cloned_args)
195
+ def execute_job(inst, cloned_args)
196
+ inst.perform(*cloned_args)
197
197
  end
198
198
 
199
199
  # Ruby doesn't provide atomic counters out of the box so we'll
@@ -219,39 +219,39 @@ module Sidekiq
219
219
  end
220
220
 
221
221
  # jruby's Hash implementation is not threadsafe, so we wrap it in a mutex here
222
- class SharedWorkerState
222
+ class SharedWorkState
223
223
  def initialize
224
- @worker_state = {}
224
+ @work_state = {}
225
225
  @lock = Mutex.new
226
226
  end
227
227
 
228
228
  def set(tid, hash)
229
- @lock.synchronize { @worker_state[tid] = hash }
229
+ @lock.synchronize { @work_state[tid] = hash }
230
230
  end
231
231
 
232
232
  def delete(tid)
233
- @lock.synchronize { @worker_state.delete(tid) }
233
+ @lock.synchronize { @work_state.delete(tid) }
234
234
  end
235
235
 
236
236
  def dup
237
- @lock.synchronize { @worker_state.dup }
237
+ @lock.synchronize { @work_state.dup }
238
238
  end
239
239
 
240
240
  def size
241
- @lock.synchronize { @worker_state.size }
241
+ @lock.synchronize { @work_state.size }
242
242
  end
243
243
 
244
244
  def clear
245
- @lock.synchronize { @worker_state.clear }
245
+ @lock.synchronize { @work_state.clear }
246
246
  end
247
247
  end
248
248
 
249
249
  PROCESSED = Counter.new
250
250
  FAILURE = Counter.new
251
- WORKER_STATE = SharedWorkerState.new
251
+ WORK_STATE = SharedWorkState.new
252
252
 
253
253
  def stats(jobstr, queue)
254
- WORKER_STATE.set(tid, {queue: queue, payload: jobstr, run_at: Time.now.to_i})
254
+ WORK_STATE.set(tid, {queue: queue, payload: jobstr, run_at: Time.now.to_i})
255
255
 
256
256
  begin
257
257
  yield
@@ -259,7 +259,7 @@ module Sidekiq
259
259
  FAILURE.incr
260
260
  raise
261
261
  ensure
262
- WORKER_STATE.delete(tid)
262
+ WORK_STATE.delete(tid)
263
263
  PROCESSED.incr
264
264
  end
265
265
  end
data/lib/sidekiq/rails.rb CHANGED
@@ -1,6 +1,6 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "sidekiq/worker"
3
+ require "sidekiq/job"
4
4
 
5
5
  module Sidekiq
6
6
  class Rails < ::Rails::Engine
@@ -33,13 +33,13 @@ module Sidekiq
33
33
  # end
34
34
  initializer "sidekiq.active_job_integration" do
35
35
  ActiveSupport.on_load(:active_job) do
36
- include ::Sidekiq::Worker::Options unless respond_to?(:sidekiq_options)
36
+ include ::Sidekiq::Job::Options unless respond_to?(:sidekiq_options)
37
37
  end
38
38
  end
39
39
 
40
40
  initializer "sidekiq.rails_logger" do
41
41
  Sidekiq.configure_server do |_|
42
- # This is the integration code necessary so that if code uses `Rails.logger.info "Hello"`,
42
+ # This is the integration code necessary so that if a job uses `Rails.logger.info "Hello"`,
43
43
  # it will appear in the Sidekiq console with all of the job context. See #5021 and
44
44
  # https://github.com/rails/rails/blob/b5f2b550f69a99336482739000c58e4e04e033aa/railties/lib/rails/commands/server/server_command.rb#L82-L84
45
45
  unless ::Rails.logger == ::Sidekiq.logger || ::ActiveSupport::Logger.logger_outputs_to?(::Rails.logger, $stdout)
@@ -48,6 +48,13 @@ module Sidekiq
48
48
  end
49
49
  end
50
50
 
51
+ config.before_configuration do
52
+ dep = ActiveSupport::Deprecation.new("7.0", "Sidekiq")
53
+ dep.deprecate_methods(Sidekiq.singleton_class,
54
+ default_worker_options: :default_job_options,
55
+ "default_worker_options=": :default_job_options=)
56
+ end
57
+
51
58
  # This hook happens after all initializers are run, just before returning
52
59
  # from config/environment.rb back to sidekiq/cli.rb.
53
60
  #
@@ -38,7 +38,7 @@ module Sidekiq
38
38
 
39
39
  private
40
40
 
41
- # Sidekiq needs a lot of concurrent Redis connections.
41
+ # Sidekiq needs many concurrent Redis connections.
42
42
  #
43
43
  # We need a connection for each Processor.
44
44
  # We need a connection for Pro's real-time change listener
@@ -47,7 +47,7 @@ module Sidekiq
47
47
  # - enterprise's leader election
48
48
  # - enterprise's cron support
49
49
  def verify_sizing(size, concurrency)
50
- raise ArgumentError, "Your Redis connection pool is too small for Sidekiq to work. Your pool has #{size} connections but must have at least #{concurrency + 2}" if size < (concurrency + 2)
50
+ raise ArgumentError, "Your Redis connection pool is too small for Sidekiq. Your pool has #{size} connections but must have at least #{concurrency + 2}" if size < (concurrency + 2)
51
51
  end
52
52
 
53
53
  def build_client(options)
@@ -4,7 +4,7 @@ require "sidekiq/testing"
4
4
 
5
5
  ##
6
6
  # The Sidekiq inline infrastructure overrides perform_async so that it
7
- # actually calls perform instead. This allows workers to be run inline in a
7
+ # actually calls perform instead. This allows jobs to be run inline in a
8
8
  # testing environment.
9
9
  #
10
10
  # This is similar to `Resque.inline = true` functionality.
@@ -15,8 +15,8 @@ require "sidekiq/testing"
15
15
  #
16
16
  # $external_variable = 0
17
17
  #
18
- # class ExternalWorker
19
- # include Sidekiq::Worker
18
+ # class ExternalJob
19
+ # include Sidekiq::Job
20
20
  #
21
21
  # def perform
22
22
  # $external_variable = 1
@@ -24,7 +24,7 @@ require "sidekiq/testing"
24
24
  # end
25
25
  #
26
26
  # assert_equal 0, $external_variable
27
- # ExternalWorker.perform_async
27
+ # ExternalJob.perform_async
28
28
  # assert_equal 1, $external_variable
29
29
  #
30
30
  Sidekiq::Testing.inline!
@@ -101,20 +101,20 @@ module Sidekiq
101
101
  ##
102
102
  # The Queues class is only for testing the fake queue implementation.
103
103
  # There are 2 data structures involved in tandem. This is due to the
104
- # Rspec syntax of change(QueueWorker.jobs, :size). It keeps a reference
104
+ # Rspec syntax of change(HardJob.jobs, :size). It keeps a reference
105
105
  # to the array. Because the array was dervied from a filter of the total
106
106
  # jobs enqueued, it appeared as though the array didn't change.
107
107
  #
108
108
  # To solve this, we'll keep 2 hashes containing the jobs. One with keys based
109
- # on the queue, and another with keys of the worker names, so the array for
110
- # QueueWorker.jobs is a straight reference to a real array.
109
+ # on the queue, and another with keys of the job type, so the array for
110
+ # HardJob.jobs is a straight reference to a real array.
111
111
  #
112
112
  # Queue-based hash:
113
113
  #
114
114
  # {
115
115
  # "default"=>[
116
116
  # {
117
- # "class"=>"TestTesting::QueueWorker",
117
+ # "class"=>"TestTesting::HardJob",
118
118
  # "args"=>[1, 2],
119
119
  # "retry"=>true,
120
120
  # "queue"=>"default",
@@ -124,12 +124,12 @@ module Sidekiq
124
124
  # ]
125
125
  # }
126
126
  #
127
- # Worker-based hash:
127
+ # Job-based hash:
128
128
  #
129
129
  # {
130
- # "TestTesting::QueueWorker"=>[
130
+ # "TestTesting::HardJob"=>[
131
131
  # {
132
- # "class"=>"TestTesting::QueueWorker",
132
+ # "class"=>"TestTesting::HardJob",
133
133
  # "args"=>[1, 2],
134
134
  # "retry"=>true,
135
135
  # "queue"=>"default",
@@ -144,14 +144,14 @@ module Sidekiq
144
144
  # require 'sidekiq/testing'
145
145
  #
146
146
  # assert_equal 0, Sidekiq::Queues["default"].size
147
- # HardWorker.perform_async(:something)
147
+ # HardJob.perform_async(:something)
148
148
  # assert_equal 1, Sidekiq::Queues["default"].size
149
149
  # assert_equal :something, Sidekiq::Queues["default"].first['args'][0]
150
150
  #
151
- # You can also clear all workers' jobs:
151
+ # You can also clear all jobs:
152
152
  #
153
153
  # assert_equal 0, Sidekiq::Queues["default"].size
154
- # HardWorker.perform_async(:something)
154
+ # HardJob.perform_async(:something)
155
155
  # Sidekiq::Queues.clear_all
156
156
  # assert_equal 0, Sidekiq::Queues["default"].size
157
157
  #
@@ -170,35 +170,36 @@ module Sidekiq
170
170
 
171
171
  def push(queue, klass, job)
172
172
  jobs_by_queue[queue] << job
173
- jobs_by_worker[klass] << job
173
+ jobs_by_class[klass] << job
174
174
  end
175
175
 
176
176
  def jobs_by_queue
177
177
  @jobs_by_queue ||= Hash.new { |hash, key| hash[key] = [] }
178
178
  end
179
179
 
180
- def jobs_by_worker
181
- @jobs_by_worker ||= Hash.new { |hash, key| hash[key] = [] }
180
+ def jobs_by_class
181
+ @jobs_by_class ||= Hash.new { |hash, key| hash[key] = [] }
182
182
  end
183
+ alias_method :jobs_by_worker, :jobs_by_class
183
184
 
184
185
  def delete_for(jid, queue, klass)
185
186
  jobs_by_queue[queue.to_s].delete_if { |job| job["jid"] == jid }
186
- jobs_by_worker[klass].delete_if { |job| job["jid"] == jid }
187
+ jobs_by_class[klass].delete_if { |job| job["jid"] == jid }
187
188
  end
188
189
 
189
190
  def clear_for(queue, klass)
190
191
  jobs_by_queue[queue].clear
191
- jobs_by_worker[klass].clear
192
+ jobs_by_class[klass].clear
192
193
  end
193
194
 
194
195
  def clear_all
195
196
  jobs_by_queue.clear
196
- jobs_by_worker.clear
197
+ jobs_by_class.clear
197
198
  end
198
199
  end
199
200
  end
200
201
 
201
- module Worker
202
+ module Job
202
203
  ##
203
204
  # The Sidekiq testing infrastructure overrides perform_async
204
205
  # so that it does not actually touch the network. Instead it
@@ -212,16 +213,16 @@ module Sidekiq
212
213
  #
213
214
  # require 'sidekiq/testing'
214
215
  #
215
- # assert_equal 0, HardWorker.jobs.size
216
- # HardWorker.perform_async(:something)
217
- # assert_equal 1, HardWorker.jobs.size
218
- # assert_equal :something, HardWorker.jobs[0]['args'][0]
216
+ # assert_equal 0, HardJob.jobs.size
217
+ # HardJob.perform_async(:something)
218
+ # assert_equal 1, HardJob.jobs.size
219
+ # assert_equal :something, HardJob.jobs[0]['args'][0]
219
220
  #
220
221
  # assert_equal 0, Sidekiq::Extensions::DelayedMailer.jobs.size
221
222
  # MyMailer.delay.send_welcome_email('foo@example.com')
222
223
  # assert_equal 1, Sidekiq::Extensions::DelayedMailer.jobs.size
223
224
  #
224
- # You can also clear and drain all workers' jobs:
225
+ # You can also clear and drain all job types:
225
226
  #
226
227
  # assert_equal 0, Sidekiq::Extensions::DelayedMailer.jobs.size
227
228
  # assert_equal 0, Sidekiq::Extensions::DelayedModel.jobs.size
@@ -241,14 +242,14 @@ module Sidekiq
241
242
  #
242
243
  # RSpec.configure do |config|
243
244
  # config.before(:each) do
244
- # Sidekiq::Worker.clear_all
245
+ # Sidekiq::Job.clear_all
245
246
  # end
246
247
  # end
247
248
  #
248
249
  # or for acceptance testing, i.e. with cucumber:
249
250
  #
250
251
  # AfterStep do
251
- # Sidekiq::Worker.drain_all
252
+ # Sidekiq::Job.drain_all
252
253
  # end
253
254
  #
254
255
  # When I sign up as "foo@example.com"
@@ -262,7 +263,7 @@ module Sidekiq
262
263
 
263
264
  # Jobs queued for this worker
264
265
  def jobs
265
- Queues.jobs_by_worker[to_s]
266
+ Queues.jobs_by_class[to_s]
266
267
  end
267
268
 
268
269
  # Clear all jobs for this worker
@@ -288,11 +289,11 @@ module Sidekiq
288
289
  end
289
290
 
290
291
  def process_job(job)
291
- worker = new
292
- worker.jid = job["jid"]
293
- worker.bid = job["bid"] if worker.respond_to?(:bid=)
294
- Sidekiq::Testing.server_middleware.invoke(worker, job, job["queue"]) do
295
- execute_job(worker, job["args"])
292
+ inst = new
293
+ inst.jid = job["jid"]
294
+ inst.bid = job["bid"] if inst.respond_to?(:bid=)
295
+ Sidekiq::Testing.server_middleware.invoke(inst, job, job["queue"]) do
296
+ execute_job(inst, job["args"])
296
297
  end
297
298
  end
298
299
 
@@ -306,18 +307,18 @@ module Sidekiq
306
307
  Queues.jobs_by_queue.values.flatten
307
308
  end
308
309
 
309
- # Clear all queued jobs across all workers
310
+ # Clear all queued jobs
310
311
  def clear_all
311
312
  Queues.clear_all
312
313
  end
313
314
 
314
- # Drain all queued jobs across all workers
315
+ # Drain (execute) all queued jobs
315
316
  def drain_all
316
317
  while jobs.any?
317
- worker_classes = jobs.map { |job| job["class"] }.uniq
318
+ job_classes = jobs.map { |job| job["class"] }.uniq
318
319
 
319
- worker_classes.each do |worker_class|
320
- Sidekiq::Testing.constantize(worker_class).drain
320
+ job_classes.each do |job_class|
321
+ Sidekiq::Testing.constantize(job_class).drain
321
322
  end
322
323
  end
323
324
  end
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Sidekiq
4
- VERSION = "6.4.1"
4
+ VERSION = "6.4.2"
5
5
  end
@@ -143,7 +143,7 @@ module Sidekiq
143
143
  one_time_pad = SecureRandom.random_bytes(token.length)
144
144
  encrypted_token = xor_byte_strings(one_time_pad, token)
145
145
  masked_token = one_time_pad + encrypted_token
146
- Base64.strict_encode64(masked_token)
146
+ Base64.urlsafe_encode64(masked_token)
147
147
  end
148
148
 
149
149
  # Essentially the inverse of +mask_token+.
@@ -169,7 +169,7 @@ module Sidekiq
169
169
  end
170
170
 
171
171
  def decode_token(token)
172
- Base64.strict_decode64(token)
172
+ Base64.urlsafe_decode64(token)
173
173
  end
174
174
 
175
175
  def xor_byte_strings(s1, s2)
@@ -140,8 +140,8 @@ module Sidekiq
140
140
  params[:direction] == "asc" ? "&uarr;" : "&darr;"
141
141
  end
142
142
 
143
- def workers
144
- @workers ||= Sidekiq::Workers.new
143
+ def workset
144
+ @work ||= Sidekiq::WorkSet.new
145
145
  end
146
146
 
147
147
  def processes
@@ -175,7 +175,7 @@ module Sidekiq
175
175
  end
176
176
 
177
177
  def current_status
178
- workers.size == 0 ? "idle" : "active"
178
+ workset.size == 0 ? "idle" : "active"
179
179
  end
180
180
 
181
181
  def relative_time(time)
@@ -82,7 +82,7 @@ module Sidekiq
82
82
  end
83
83
 
84
84
  def get_sidekiq_options # :nodoc:
85
- self.sidekiq_options_hash ||= Sidekiq.default_worker_options
85
+ self.sidekiq_options_hash ||= Sidekiq.default_job_options
86
86
  end
87
87
 
88
88
  def sidekiq_class_attribute(*attrs)
@@ -175,16 +175,18 @@ module Sidekiq
175
175
 
176
176
  def initialize(klass, opts)
177
177
  @klass = klass
178
- @opts = opts
178
+ # NB: the internal hash always has stringified keys
179
+ @opts = opts.transform_keys(&:to_s)
179
180
 
180
181
  # ActiveJob compatibility
181
- interval = @opts.delete(:wait_until) || @opts.delete(:wait)
182
+ interval = @opts.delete("wait_until") || @opts.delete("wait")
182
183
  at(interval) if interval
183
184
  end
184
185
 
185
186
  def set(options)
186
- interval = options.delete(:wait_until) || options.delete(:wait)
187
- @opts.merge!(options)
187
+ hash = options.transform_keys(&:to_s)
188
+ interval = hash.delete("wait_until") || @opts.delete("wait")
189
+ @opts.merge!(hash)
188
190
  at(interval) if interval
189
191
  self
190
192
  end
@@ -200,7 +202,7 @@ module Sidekiq
200
202
  # Explicit inline execution of a job. Returns nil if the job did not
201
203
  # execute, true otherwise.
202
204
  def perform_inline(*args)
203
- raw = @opts.merge("args" => args, "class" => @klass).transform_keys(&:to_s)
205
+ raw = @opts.merge("args" => args, "class" => @klass)
204
206
 
205
207
  # validate and normalize payload
206
208
  item = normalize_item(raw)
@@ -235,11 +237,9 @@ module Sidekiq
235
237
  alias_method :perform_sync, :perform_inline
236
238
 
237
239
  def perform_bulk(args, batch_size: 1_000)
238
- hash = @opts.transform_keys(&:to_s)
239
- pool = Thread.current[:sidekiq_via_pool] || @klass.get_sidekiq_options["pool"] || Sidekiq.redis_pool
240
- client = Sidekiq::Client.new(pool)
240
+ client = @klass.build_client
241
241
  result = args.each_slice(batch_size).flat_map do |slice|
242
- client.push_bulk(hash.merge("class" => @klass, "args" => slice))
242
+ client.push_bulk(@opts.merge("class" => @klass, "args" => slice))
243
243
  end
244
244
 
245
245
  result.is_a?(Enumerator::Lazy) ? result.force : result
@@ -293,6 +293,7 @@ module Sidekiq
293
293
  def perform_inline(*args)
294
294
  Setter.new(self, {}).perform_inline(*args)
295
295
  end
296
+ alias_method :perform_sync, :perform_inline
296
297
 
297
298
  ##
298
299
  # Push a large number of jobs to Redis, while limiting the batch of
@@ -352,10 +353,13 @@ module Sidekiq
352
353
  end
353
354
 
354
355
  def client_push(item) # :nodoc:
355
- pool = Thread.current[:sidekiq_via_pool] || get_sidekiq_options["pool"] || Sidekiq.redis_pool
356
- stringified_item = item.transform_keys(&:to_s)
356
+ raise ArgumentError, "Job payloads should contain no Symbols: #{item}" if item.any? { |k, v| k.is_a?(::Symbol) }
357
+ build_client.push(item)
358
+ end
357
359
 
358
- Sidekiq::Client.new(pool).push(stringified_item)
360
+ def build_client # :nodoc:
361
+ pool = Thread.current[:sidekiq_via_pool] || get_sidekiq_options["pool"] || Sidekiq.redis_pool
362
+ Sidekiq::Client.new(pool)
359
363
  end
360
364
  end
361
365
  end