sidekiq 6.0.2 → 6.0.3

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 9dd138f78183ff31972192acbcee3ea7aad67403dfdd478500f3bbbfebf14698
4
- data.tar.gz: ea4ab3b7c40bf358a80df9043013a63ee7c0562322ce5f212f938cecdc93c62f
3
+ metadata.gz: 289abeb95ea2defd87dce17b31747cc56193a3e606e7f1f87d9ab237706526cc
4
+ data.tar.gz: a9ee206b90eb90737d587da3033559f96150aaceda8b0d319fcca29a08507c56
5
5
  SHA512:
6
- metadata.gz: c34d01bffdf5af462b98afa03e4b71356b8afffd873533eb953305314e1c0fd0ea4a4c0eea53c7169b61aca51420a50d5eda9127ad815c46c4053954677be24d
7
- data.tar.gz: fdfe2b1704bc7d3a071756ca983c9b81bf4534ceb5f532bec59ec87fe4c3094c95120a7b1471163327f55e132c9773a3fd05d9a6211c6c1a1342c58872e8d980
6
+ metadata.gz: b9feed82f74bad9ae58674c373286522df5c2198ec4f685a777fc8ad2dd89f4f985dcfc81a1f915e77ae806e3fab060f5e662fd8378a4efc951da6b230ae3cd4
7
+ data.tar.gz: 1928083cc83adc9a248f6b9f9a5cd95c91b6a4a5c324cb38d4082309cf801031ac2f644a9f1aef3d7c49424dd4525da870af72e892f983c35cb07e4a214aa311
data/Changes.md CHANGED
@@ -2,6 +2,12 @@
2
2
 
3
3
  [Sidekiq Changes](https://github.com/mperham/sidekiq/blob/master/Changes.md) | [Sidekiq Pro Changes](https://github.com/mperham/sidekiq/blob/master/Pro-Changes.md) | [Sidekiq Enterprise Changes](https://github.com/mperham/sidekiq/blob/master/Ent-Changes.md)
4
4
 
5
+ 6.0.3
6
+ ---------
7
+
8
+ - Fix `Sidekiq::Client.push_bulk` API which was erroneously putting
9
+ invalid `at` values in the job payloads [#4321]
10
+
5
11
  6.0.2
6
12
  ---------
7
13
 
@@ -4,11 +4,11 @@
4
4
 
5
5
  Please see [http://sidekiq.org/](http://sidekiq.org/) for more details and how to buy.
6
6
 
7
- HEAD
7
+ 2.0.1
8
8
  -------------
9
9
 
10
- - Periodic job registration API adjusted to avoid loading classes in
11
- initializer [#4271]
10
+ - Periodic job registration API adjusted to avoid loading classes in initializer [#4271]
11
+ - Remove support for deprecated ENV variables (COUNT, MAXMEM\_MB, INDEX) in swarm code
12
12
 
13
13
  2.0.0
14
14
  -------------
@@ -140,13 +140,8 @@ module Sidekiq
140
140
  end
141
141
  }
142
142
 
143
- i = 0
144
- array_of_arrays = queues.each_with_object({}) { |queue, memo|
145
- memo[queue] = lengths[i]
146
- i += 1
147
- }.sort_by { |_, size| size }
148
-
149
- Hash[array_of_arrays.reverse]
143
+ array_of_arrays = queues.zip(lengths).sort_by { |_, size| -size }
144
+ Hash[array_of_arrays]
150
145
  end
151
146
  end
152
147
  end
@@ -168,18 +163,12 @@ module Sidekiq
168
163
  private
169
164
 
170
165
  def date_stat_hash(stat)
171
- i = 0
172
166
  stat_hash = {}
173
- keys = []
174
- dates = []
175
-
176
- while i < @days_previous
177
- date = @start_date - i
178
- datestr = date.strftime("%Y-%m-%d")
179
- keys << "stat:#{stat}:#{datestr}"
180
- dates << datestr
181
- i += 1
182
- end
167
+ dates = @start_date.downto(@start_date - @days_previous + 1).map { |date|
168
+ date.strftime("%Y-%m-%d")
169
+ }
170
+
171
+ keys = dates.map { |datestr| "stat:#{stat}:#{datestr}" }
183
172
 
184
173
  begin
185
174
  Sidekiq.redis do |conn|
@@ -478,7 +467,7 @@ module Sidekiq
478
467
 
479
468
  def reschedule(at)
480
469
  Sidekiq.redis do |conn|
481
- conn.zincrby(@parent.name, at - @score, Sidekiq.dump_json(@item))
470
+ conn.zincrby(@parent.name, at.to_f - @score, Sidekiq.dump_json(@item))
482
471
  end
483
472
  end
484
473
 
@@ -523,7 +512,7 @@ module Sidekiq
523
512
  else
524
513
  # multiple jobs with the same score
525
514
  # find the one with the right JID and push it
526
- hash = results.group_by { |message|
515
+ matched, nonmatched = results.partition { |message|
527
516
  if message.index(jid)
528
517
  msg = Sidekiq.load_json(message)
529
518
  msg["jid"] == jid
@@ -532,12 +521,12 @@ module Sidekiq
532
521
  end
533
522
  }
534
523
 
535
- msg = hash.fetch(true, []).first
524
+ msg = matched.first
536
525
  yield msg if msg
537
526
 
538
527
  # push the rest back onto the sorted set
539
528
  conn.multi do
540
- hash.fetch(false, []).each do |message|
529
+ nonmatched.each do |message|
541
530
  conn.zadd(parent.name, score.to_f.to_s, message)
542
531
  end
543
532
  end
@@ -785,10 +774,9 @@ module Sidekiq
785
774
  # the hash named key has an expiry of 60 seconds.
786
775
  # if it's not found, that means the process has not reported
787
776
  # in to Redis and probably died.
788
- to_prune = []
789
- heartbeats.each_with_index do |beat, i|
790
- to_prune << procs[i] if beat.nil?
791
- end
777
+ to_prune = procs.select.with_index { |proc, i|
778
+ heartbeats[i].nil?
779
+ }
792
780
  count = conn.srem("processes", to_prune) unless to_prune.empty?
793
781
  end
794
782
  count
@@ -58,7 +58,7 @@ module Sidekiq
58
58
  # touch the connection pool so it is created before we
59
59
  # fire startup and start multithreading.
60
60
  ver = Sidekiq.redis_info["redis_version"]
61
- raise "You are using Redis v#{ver}, Sidekiq requires Redis v4.0.0 or greater" if ver < "4"
61
+ raise "You are connecting to Redis v#{ver}, Sidekiq requires Redis v4.0.0 or greater" if ver < "4"
62
62
 
63
63
  # Since the user can pass us a connection pool explicitly in the initializer, we
64
64
  # need to verify the size is large enough or else Sidekiq's performance is dramatically slowed.
@@ -99,8 +99,8 @@ module Sidekiq
99
99
 
100
100
  normed = normalize_item(items)
101
101
  payloads = items["args"].map.with_index { |args, index|
102
- single_at = at.is_a?(Array) ? at[index] : at
103
- copy = normed.merge("args" => args, "jid" => SecureRandom.hex(12), "at" => single_at, "enqueued_at" => Time.now.to_f)
102
+ copy = normed.merge("args" => args, "jid" => SecureRandom.hex(12), "enqueued_at" => Time.now.to_f)
103
+ copy["at"] = (at.is_a?(Array) ? at[index] : at) if at
104
104
 
105
105
  result = process_single(items["class"], copy)
106
106
  result || nil
@@ -193,7 +193,7 @@ module Sidekiq
193
193
  end
194
194
 
195
195
  def atomic_push(conn, payloads)
196
- if payloads.first["at"]
196
+ if payloads.first.key?("at")
197
197
  conn.zadd("schedule", payloads.map { |hash|
198
198
  at = hash.delete("at").to_s
199
199
  [at, Sidekiq.dump_json(hash)]
@@ -219,6 +219,10 @@ module Sidekiq
219
219
  end
220
220
 
221
221
  def normalize_item(item)
222
+ # 6.0.0 push_bulk bug, #4321
223
+ # TODO Remove after a while...
224
+ item.delete("at") if item.key?("at") && item["at"].nil?
225
+
222
226
  raise(ArgumentError, "Job must be a Hash with 'class' and 'args' keys: { 'class' => SomeWorker, 'args' => ['bob', 1, :foo => 'bar'] }") unless item.is_a?(Hash) && item.key?("class") && item.key?("args")
223
227
  raise(ArgumentError, "Job args must be an Array") unless item["args"].is_a?(Array)
224
228
  raise(ArgumentError, "Job class must be either a Class or String representation of the class name") unless item["class"].is_a?(Class) || item["class"].is_a?(String)
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Sidekiq
4
- VERSION = "6.0.2"
4
+ VERSION = "6.0.3"
5
5
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: sidekiq
3
3
  version: !ruby/object:Gem::Version
4
- version: 6.0.2
4
+ version: 6.0.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Mike Perham
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2019-10-12 00:00:00.000000000 Z
11
+ date: 2019-10-25 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: redis