sidekiq 6.5.4 → 6.5.6

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of sidekiq might be problematic. Click here for more details.

checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 5a8c3c194984f6f100e1b71b92d200fa90660d07f0ad4c670285116ddba8bbfc
4
- data.tar.gz: 97dd46ecc5728504e80ada9b15a3f26eab75cf18f1022630db29d19c9109219f
3
+ metadata.gz: 7e12ddc46413e3e476d620e04428a86476f4826d7802575110b5c3697a096137
4
+ data.tar.gz: 802bdbc7c0d45a9b793222baca250e292ce852d67902446bc75a0961a36f5677
5
5
  SHA512:
6
- metadata.gz: 8e5a7a1cf338064e2b900a3083cd124ee98cb0b8cc059e5aab07a572b2714ed1ce46b3149bbdd8808574daf41718d7f98f65825c98c447f194b4a64266991996
7
- data.tar.gz: 67933e92a6d81c4b11f99b510b5b00b45224bef8babafa8918ab87108f194abc6e586b0e14239ccfa415ce22cae0cd6e94764cea630aa830acc21b50af862931
6
+ metadata.gz: 6180da0e7aefa1573d6b283c56ad365063b290afb5c10a15dc83fb367cb981391d8304acc9ebb1219279a749da71e0af8b37979baea770b0b1efeb04627e0e4c
7
+ data.tar.gz: 35c51c58b73450c6bbd3479cc55d873ed54a6a1ce6a5b7414ca78fc807984acea11d4697ce8c05bdb2e9eb799c30dc7b377872b4c48a3de60bee5d6d71e07b3a
data/Changes.md CHANGED
@@ -2,6 +2,18 @@
2
2
 
3
3
  [Sidekiq Changes](https://github.com/mperham/sidekiq/blob/main/Changes.md) | [Sidekiq Pro Changes](https://github.com/mperham/sidekiq/blob/main/Pro-Changes.md) | [Sidekiq Enterprise Changes](https://github.com/mperham/sidekiq/blob/main/Ent-Changes.md)
4
4
 
5
+ 6.5.6
6
+ ----------
7
+
8
+ - Fix deprecation warnings with redis-rb 4.8.0 [#5484]
9
+ - Lock redis-rb to < 5.0 as we are moving to redis-client in Sidekiq 7.0
10
+
11
+ 6.5.5
12
+ ----------
13
+
14
+ - Fix require issue with job_retry.rb [#5462]
15
+ - Improve Sidekiq::Web compatibility with Rack 3.x
16
+
5
17
  6.5.4
6
18
  ----------
7
19
 
data/lib/sidekiq/api.rb CHANGED
@@ -316,7 +316,7 @@ module Sidekiq
316
316
  Sidekiq.redis do |conn|
317
317
  conn.multi do |transaction|
318
318
  transaction.unlink(@rname)
319
- transaction.srem("queues", name)
319
+ transaction.srem("queues", [name])
320
320
  end
321
321
  end
322
322
  true
@@ -486,7 +486,7 @@ module Sidekiq
486
486
  # #1761 in dev mode, it's possible to have jobs enqueued which haven't been loaded into
487
487
  # memory yet so the YAML can't be loaded.
488
488
  # TODO is this still necessary? Zeitwerk reloader should handle?
489
- Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.config[:environment] == "development"
489
+ Sidekiq.logger.warn "Unable to load YAML: #{ex.message}" unless Sidekiq.options[:environment] == "development"
490
490
  default
491
491
  end
492
492
 
@@ -231,7 +231,7 @@ module Sidekiq
231
231
  entry["enqueued_at"] = now
232
232
  Sidekiq.dump_json(entry)
233
233
  }
234
- conn.sadd("queues", queue)
234
+ conn.sadd("queues", [queue])
235
235
  conn.lpush("queue:#{queue}", to_push)
236
236
  end
237
237
  end
data/lib/sidekiq/fetch.rb CHANGED
@@ -33,7 +33,7 @@ module Sidekiq # :nodoc:
33
33
  @queues = @config[:queues].map { |q| "queue:#{q}" }
34
34
  if @strictly_ordered_queues
35
35
  @queues.uniq!
36
- @queues << TIMEOUT
36
+ @queues << {timeout: TIMEOUT}
37
37
  end
38
38
  end
39
39
 
@@ -83,7 +83,7 @@ module Sidekiq # :nodoc:
83
83
  else
84
84
  permute = @queues.shuffle
85
85
  permute.uniq!
86
- permute << TIMEOUT
86
+ permute << {timeout: TIMEOUT}
87
87
  permute
88
88
  end
89
89
  end
@@ -2,6 +2,7 @@
2
2
 
3
3
  require "zlib"
4
4
  require "base64"
5
+ require "sidekiq/component"
5
6
 
6
7
  module Sidekiq
7
8
  ##
@@ -86,7 +86,7 @@ module Sidekiq
86
86
  # doesn't actually exit, it'll reappear in the Web UI.
87
87
  redis do |conn|
88
88
  conn.pipelined do |pipeline|
89
- pipeline.srem("processes", identity)
89
+ pipeline.srem("processes", [identity])
90
90
  pipeline.unlink("#{identity}:work")
91
91
  end
92
92
  end
@@ -165,7 +165,7 @@ module Sidekiq
165
165
 
166
166
  _, exists, _, _, msg = redis { |conn|
167
167
  conn.multi { |transaction|
168
- transaction.sadd("processes", key)
168
+ transaction.sadd("processes", [key])
169
169
  transaction.exists?(key)
170
170
  transaction.hmset(key, "info", to_json,
171
171
  "busy", curstate.size,
@@ -1,5 +1,5 @@
1
1
  require "sidekiq"
2
- require "date"
2
+ require "time"
3
3
 
4
4
  # This file is designed to be required within the user's
5
5
  # deployment script; it should need a bare minimum of dependencies.
@@ -32,7 +32,7 @@ module Sidekiq
32
32
  key = "#{datecode}-marks"
33
33
  @pool.with do |c|
34
34
  c.pipelined do |pipe|
35
- pipe.hsetnx(key, floor.rfc3339, label)
35
+ pipe.hsetnx(key, floor.iso8601, label)
36
36
  pipe.expire(key, MARK_TTL)
37
37
  end
38
38
  end
@@ -13,111 +13,140 @@ module Sidekiq
13
13
  # NB: all metrics and times/dates are UTC only. We specifically do not
14
14
  # support timezones.
15
15
  class Query
16
- # :hour, :day, :month
17
- attr_accessor :period
18
-
19
- # a specific job class, e.g. "App::OrderJob"
20
- attr_accessor :klass
21
-
22
- # the date specific to the period
23
- # for :day or :hour, something like Date.today or Date.new(2022, 7, 13)
24
- # for :month, Date.new(2022, 7, 1)
25
- attr_accessor :date
26
-
27
- # for period = :hour, the specific hour, integer e.g. 1 or 18
28
- # note that hours and minutes do not have a leading zero so minute-specific
29
- # keys will look like "j|20220718|7:3" for data at 07:03.
30
- attr_accessor :hour
31
-
32
16
  def initialize(pool: Sidekiq.redis_pool, now: Time.now)
33
17
  @time = now.utc
34
18
  @pool = pool
35
19
  @klass = nil
36
20
  end
37
21
 
38
- # Get metric data from the last hour and roll it up
39
- # into top processed count and execution time based on class.
40
- def top_jobs
41
- resultset = {}
42
- resultset[:date] = @time.to_date
43
- resultset[:period] = :hour
44
- resultset[:ends_at] = @time
45
- time = @time
22
+ # Get metric data for all jobs from the last hour
23
+ def top_jobs(minutes: 60)
24
+ result = Result.new
46
25
 
47
- results = @pool.with do |conn|
26
+ time = @time
27
+ redis_results = @pool.with do |conn|
48
28
  conn.pipelined do |pipe|
49
- resultset[:size] = 60
50
- 60.times do |idx|
29
+ minutes.times do |idx|
51
30
  key = "j|#{time.strftime("%Y%m%d")}|#{time.hour}:#{time.min}"
52
31
  pipe.hgetall key
32
+ result.prepend_bucket time
53
33
  time -= 60
54
34
  end
55
- resultset[:starts_at] = time
56
35
  end
57
36
  end
58
37
 
59
- t = Hash.new(0)
60
- klsset = Set.new
61
- # merge the per-minute data into a totals hash for the hour
62
- results.each do |hash|
63
- hash.each { |k, v| t[k] = t[k] + v.to_i }
64
- klsset.merge(hash.keys.map { |k| k.split("|")[0] })
65
- end
66
- resultset[:job_classes] = klsset.delete_if { |item| item.size < 3 }
67
- resultset[:totals] = t
68
- top = t.each_with_object({}) do |(k, v), memo|
69
- (kls, metric) = k.split("|")
70
- memo[metric] ||= Hash.new(0)
71
- memo[metric][kls] = v
38
+ time = @time
39
+ redis_results.each do |hash|
40
+ hash.each do |k, v|
41
+ kls, metric = k.split("|")
42
+ result.job_results[kls].add_metric metric, time, v.to_i
43
+ end
44
+ time -= 60
72
45
  end
73
46
 
74
- sorted = {}
75
- top.each_pair do |metric, hash|
76
- sorted[metric] = hash.sort_by { |k, v| v }.reverse.to_h
77
- end
78
- resultset[:top_classes] = sorted
79
- resultset
47
+ result.marks = fetch_marks(result.starts_at..result.ends_at)
48
+
49
+ result
80
50
  end
81
51
 
82
- def for_job(klass)
83
- resultset = {}
84
- resultset[:date] = @time.to_date
85
- resultset[:period] = :hour
86
- resultset[:ends_at] = @time
87
- marks = @pool.with { |c| c.hgetall("#{@time.strftime("%Y%m%d")}-marks") }
52
+ def for_job(klass, minutes: 60)
53
+ result = Result.new
88
54
 
89
55
  time = @time
90
- initial = @pool.with do |conn|
56
+ redis_results = @pool.with do |conn|
91
57
  conn.pipelined do |pipe|
92
- resultset[:size] = 60
93
- 60.times do |idx|
94
- key = "j|#{time.strftime("%Y%m%d|%-H:%-M")}"
58
+ minutes.times do |idx|
59
+ key = "j|#{time.strftime("%Y%m%d")}|#{time.hour}:#{time.min}"
95
60
  pipe.hmget key, "#{klass}|ms", "#{klass}|p", "#{klass}|f"
61
+ result.prepend_bucket time
96
62
  time -= 60
97
63
  end
98
64
  end
99
65
  end
100
66
 
101
67
  time = @time
102
- hist = Histogram.new(klass)
103
- results = @pool.with do |conn|
104
- initial.map do |(ms, p, f)|
105
- tm = Time.utc(time.year, time.month, time.mday, time.hour, time.min, 0)
106
- {
107
- time: tm.iso8601,
108
- epoch: tm.to_i,
109
- ms: ms.to_i, p: p.to_i, f: f.to_i, hist: hist.fetch(conn, time)
110
- }.tap { |x|
111
- x[:mark] = marks[x[:time]] if marks[x[:time]]
112
- time -= 60
113
- }
68
+ @pool.with do |conn|
69
+ redis_results.each do |(ms, p, f)|
70
+ result.job_results[klass].add_metric "ms", time, ms.to_i if ms
71
+ result.job_results[klass].add_metric "p", time, p.to_i if p
72
+ result.job_results[klass].add_metric "f", time, f.to_i if f
73
+ result.job_results[klass].add_hist time, Histogram.new(klass).fetch(conn, time)
74
+ time -= 60
75
+ end
76
+ end
77
+
78
+ result.marks = fetch_marks(result.starts_at..result.ends_at)
79
+
80
+ result
81
+ end
82
+
83
+ class Result < Struct.new(:starts_at, :ends_at, :size, :buckets, :job_results, :marks)
84
+ def initialize
85
+ super
86
+ self.buckets = []
87
+ self.marks = []
88
+ self.job_results = Hash.new { |h, k| h[k] = JobResult.new }
89
+ end
90
+
91
+ def prepend_bucket(time)
92
+ buckets.unshift time.strftime("%H:%M")
93
+ self.ends_at ||= time
94
+ self.starts_at = time
95
+ end
96
+ end
97
+
98
+ class JobResult < Struct.new(:series, :hist, :totals)
99
+ def initialize
100
+ super
101
+ self.series = Hash.new { |h, k| h[k] = Hash.new(0) }
102
+ self.hist = Hash.new { |h, k| h[k] = [] }
103
+ self.totals = Hash.new(0)
104
+ end
105
+
106
+ def add_metric(metric, time, value)
107
+ totals[metric] += value
108
+ series[metric][time.strftime("%H:%M")] += value
109
+
110
+ # Include timing measurements in seconds for convenience
111
+ add_metric("s", time, value / 1000.0) if metric == "ms"
112
+ end
113
+
114
+ def add_hist(time, hist_result)
115
+ hist[time.strftime("%H:%M")] = hist_result
116
+ end
117
+
118
+ def total_avg(metric = "ms")
119
+ completed = totals["p"] - totals["f"]
120
+ totals[metric].to_f / completed
121
+ end
122
+
123
+ def series_avg(metric = "ms")
124
+ series[metric].each_with_object(Hash.new(0)) do |(bucket, value), result|
125
+ completed = series.dig("p", bucket) - series.dig("f", bucket)
126
+ result[bucket] = completed == 0 ? 0 : value.to_f / completed
114
127
  end
115
128
  end
129
+ end
130
+
131
+ class MarkResult < Struct.new(:time, :label)
132
+ def bucket
133
+ time.strftime("%H:%M")
134
+ end
135
+ end
116
136
 
117
- resultset[:marks] = marks
118
- resultset[:starts_at] = time
119
- resultset[:data] = results
120
- resultset
137
+ private
138
+
139
+ def fetch_marks(time_range)
140
+ [].tap do |result|
141
+ marks = @pool.with { |c| c.hgetall("#{@time.strftime("%Y%m%d")}-marks") }
142
+
143
+ marks.each do |timestamp, label|
144
+ time = Time.parse(timestamp)
145
+ if time_range.cover? time
146
+ result << MarkResult.new(time, label)
147
+ end
148
+ end
149
+ end
121
150
  end
122
151
  end
123
152
  end
@@ -46,8 +46,6 @@ module Sidekiq
46
46
  opts.delete(:network_timeout)
47
47
  end
48
48
 
49
- opts[:driver] ||= Redis::Connection.drivers.last || "ruby"
50
-
51
49
  # Issue #3303, redis-rb will silently retry an operation.
52
50
  # This can lead to duplicate jobs if Sidekiq::Client's LPUSH
53
51
  # is performed twice but I believe this is much, much rarer
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Sidekiq
4
- VERSION = "6.5.4"
4
+ VERSION = "6.5.6"
5
5
  end
@@ -15,11 +15,11 @@ module Sidekiq
15
15
  end
16
16
 
17
17
  def halt(res)
18
- throw :halt, [res, {"Content-Type" => "text/plain"}, [res.to_s]]
18
+ throw :halt, [res, {"content-type" => "text/plain"}, [res.to_s]]
19
19
  end
20
20
 
21
21
  def redirect(location)
22
- throw :halt, [302, {"Location" => "#{request.base_url}#{location}"}, []]
22
+ throw :halt, [302, {"location" => "#{request.base_url}#{location}"}, []]
23
23
  end
24
24
 
25
25
  def params
@@ -68,7 +68,7 @@ module Sidekiq
68
68
  end
69
69
 
70
70
  def json(payload)
71
- [200, {"Content-Type" => "application/json", "Cache-Control" => "private, no-store"}, [Sidekiq.dump_json(payload)]]
71
+ [200, {"content-type" => "application/json", "cache-control" => "private, no-store"}, [Sidekiq.dump_json(payload)]]
72
72
  end
73
73
 
74
74
  def initialize(env, block)
@@ -62,14 +62,14 @@ module Sidekiq
62
62
 
63
63
  get "/metrics" do
64
64
  q = Sidekiq::Metrics::Query.new
65
- @resultset = q.top_jobs
65
+ @query_result = q.top_jobs
66
66
  erb(:metrics)
67
67
  end
68
68
 
69
69
  get "/metrics/:name" do
70
70
  @name = route_params[:name]
71
71
  q = Sidekiq::Metrics::Query.new
72
- @resultset = q.for_job(@name)
72
+ @query_result = q.for_job(@name)
73
73
  erb(:metrics_for_job)
74
74
  end
75
75
 
@@ -312,7 +312,7 @@ module Sidekiq
312
312
 
313
313
  def call(env)
314
314
  action = self.class.match(env)
315
- return [404, {"Content-Type" => "text/plain", "X-Cascade" => "pass"}, ["Not Found"]] unless action
315
+ return [404, {"content-type" => "text/plain", "x-cascade" => "pass"}, ["Not Found"]] unless action
316
316
 
317
317
  app = @klass
318
318
  resp = catch(:halt) do
@@ -329,10 +329,10 @@ module Sidekiq
329
329
  else
330
330
  # rendered content goes here
331
331
  headers = {
332
- "Content-Type" => "text/html",
333
- "Cache-Control" => "private, no-store",
334
- "Content-Language" => action.locale,
335
- "Content-Security-Policy" => CSP_HEADER
332
+ "content-type" => "text/html",
333
+ "cache-control" => "private, no-store",
334
+ "content-language" => action.locale,
335
+ "content-security-policy" => CSP_HEADER
336
336
  }
337
337
  # we'll let Rack calculate Content-Length for us.
338
338
  [200, headers, [resp]]
data/lib/sidekiq/web.rb CHANGED
@@ -148,7 +148,7 @@ module Sidekiq
148
148
  m = middlewares
149
149
 
150
150
  rules = []
151
- rules = [[:all, {"Cache-Control" => "public, max-age=86400"}]] unless ENV["SIDEKIQ_WEB_TESTING"]
151
+ rules = [[:all, {"cache-control" => "public, max-age=86400"}]] unless ENV["SIDEKIQ_WEB_TESTING"]
152
152
 
153
153
  ::Rack::Builder.new do
154
154
  use Rack::Static, urls: ["/stylesheets", "/images", "/javascripts"],
data/sidekiq.gemspec CHANGED
@@ -22,7 +22,7 @@ Gem::Specification.new do |gem|
22
22
  "source_code_uri" => "https://github.com/mperham/sidekiq"
23
23
  }
24
24
 
25
- gem.add_dependency "redis", ">= 4.5.0"
26
- gem.add_dependency "connection_pool", ">= 2.2.2"
25
+ gem.add_dependency "redis", "<5", ">= 4.5.0"
26
+ gem.add_dependency "connection_pool", ">= 2.2.5"
27
27
  gem.add_dependency "rack", "~> 2.0"
28
28
  end