sidekiq 6.5.4 → 6.5.5

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of sidekiq might be problematic. Click here for more details.

checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 5a8c3c194984f6f100e1b71b92d200fa90660d07f0ad4c670285116ddba8bbfc
4
- data.tar.gz: 97dd46ecc5728504e80ada9b15a3f26eab75cf18f1022630db29d19c9109219f
3
+ metadata.gz: 56d317d1dc6e27057146cee5e5951030fb907489025a0d14f1e11c7ae952befe
4
+ data.tar.gz: 7626890a494eb9f55374a24fa2dc0cf69193ddf31c00ef9a96c15e3a6aa88450
5
5
  SHA512:
6
- metadata.gz: 8e5a7a1cf338064e2b900a3083cd124ee98cb0b8cc059e5aab07a572b2714ed1ce46b3149bbdd8808574daf41718d7f98f65825c98c447f194b4a64266991996
7
- data.tar.gz: 67933e92a6d81c4b11f99b510b5b00b45224bef8babafa8918ab87108f194abc6e586b0e14239ccfa415ce22cae0cd6e94764cea630aa830acc21b50af862931
6
+ metadata.gz: 11fa0967bb8dcd19aa7c5893d151500ac2b8f06fb364fd957d5a01a8fe6842247e6da3b45d7103e95026ae7741265ef84881278bc55d6c1830480ee9e83cadd6
7
+ data.tar.gz: d625912d1f148c56b82da498cac73109532050d4e36c51ac9c3b229207a0e32ecbd34d3bc53823703951fa0ead9304113bdee654b8898d4cc2e4973621b8a268
data/Changes.md CHANGED
@@ -2,6 +2,12 @@
2
2
 
3
3
  [Sidekiq Changes](https://github.com/mperham/sidekiq/blob/main/Changes.md) | [Sidekiq Pro Changes](https://github.com/mperham/sidekiq/blob/main/Pro-Changes.md) | [Sidekiq Enterprise Changes](https://github.com/mperham/sidekiq/blob/main/Ent-Changes.md)
4
4
 
5
+ 6.5.5
6
+ ----------
7
+
8
+ - Fix require issue with job_retry.rb [#5462]
9
+ - Improve Sidekiq::Web compatibility with Rack 3.x
10
+
5
11
  6.5.4
6
12
  ----------
7
13
 
@@ -2,6 +2,7 @@
2
2
 
3
3
  require "zlib"
4
4
  require "base64"
5
+ require "sidekiq/component"
5
6
 
6
7
  module Sidekiq
7
8
  ##
@@ -1,5 +1,5 @@
1
1
  require "sidekiq"
2
- require "date"
2
+ require "time"
3
3
 
4
4
  # This file is designed to be required within the user's
5
5
  # deployment script; it should need a bare minimum of dependencies.
@@ -32,7 +32,7 @@ module Sidekiq
32
32
  key = "#{datecode}-marks"
33
33
  @pool.with do |c|
34
34
  c.pipelined do |pipe|
35
- pipe.hsetnx(key, floor.rfc3339, label)
35
+ pipe.hsetnx(key, floor.iso8601, label)
36
36
  pipe.expire(key, MARK_TTL)
37
37
  end
38
38
  end
@@ -13,111 +13,140 @@ module Sidekiq
13
13
  # NB: all metrics and times/dates are UTC only. We specifically do not
14
14
  # support timezones.
15
15
  class Query
16
- # :hour, :day, :month
17
- attr_accessor :period
18
-
19
- # a specific job class, e.g. "App::OrderJob"
20
- attr_accessor :klass
21
-
22
- # the date specific to the period
23
- # for :day or :hour, something like Date.today or Date.new(2022, 7, 13)
24
- # for :month, Date.new(2022, 7, 1)
25
- attr_accessor :date
26
-
27
- # for period = :hour, the specific hour, integer e.g. 1 or 18
28
- # note that hours and minutes do not have a leading zero so minute-specific
29
- # keys will look like "j|20220718|7:3" for data at 07:03.
30
- attr_accessor :hour
31
-
32
16
  def initialize(pool: Sidekiq.redis_pool, now: Time.now)
33
17
  @time = now.utc
34
18
  @pool = pool
35
19
  @klass = nil
36
20
  end
37
21
 
38
- # Get metric data from the last hour and roll it up
39
- # into top processed count and execution time based on class.
40
- def top_jobs
41
- resultset = {}
42
- resultset[:date] = @time.to_date
43
- resultset[:period] = :hour
44
- resultset[:ends_at] = @time
45
- time = @time
22
+ # Get metric data for all jobs from the last hour
23
+ def top_jobs(minutes: 60)
24
+ result = Result.new
46
25
 
47
- results = @pool.with do |conn|
26
+ time = @time
27
+ redis_results = @pool.with do |conn|
48
28
  conn.pipelined do |pipe|
49
- resultset[:size] = 60
50
- 60.times do |idx|
29
+ minutes.times do |idx|
51
30
  key = "j|#{time.strftime("%Y%m%d")}|#{time.hour}:#{time.min}"
52
31
  pipe.hgetall key
32
+ result.prepend_bucket time
53
33
  time -= 60
54
34
  end
55
- resultset[:starts_at] = time
56
35
  end
57
36
  end
58
37
 
59
- t = Hash.new(0)
60
- klsset = Set.new
61
- # merge the per-minute data into a totals hash for the hour
62
- results.each do |hash|
63
- hash.each { |k, v| t[k] = t[k] + v.to_i }
64
- klsset.merge(hash.keys.map { |k| k.split("|")[0] })
65
- end
66
- resultset[:job_classes] = klsset.delete_if { |item| item.size < 3 }
67
- resultset[:totals] = t
68
- top = t.each_with_object({}) do |(k, v), memo|
69
- (kls, metric) = k.split("|")
70
- memo[metric] ||= Hash.new(0)
71
- memo[metric][kls] = v
38
+ time = @time
39
+ redis_results.each do |hash|
40
+ hash.each do |k, v|
41
+ kls, metric = k.split("|")
42
+ result.job_results[kls].add_metric metric, time, v.to_i
43
+ end
44
+ time -= 60
72
45
  end
73
46
 
74
- sorted = {}
75
- top.each_pair do |metric, hash|
76
- sorted[metric] = hash.sort_by { |k, v| v }.reverse.to_h
77
- end
78
- resultset[:top_classes] = sorted
79
- resultset
47
+ result.marks = fetch_marks(result.starts_at..result.ends_at)
48
+
49
+ result
80
50
  end
81
51
 
82
- def for_job(klass)
83
- resultset = {}
84
- resultset[:date] = @time.to_date
85
- resultset[:period] = :hour
86
- resultset[:ends_at] = @time
87
- marks = @pool.with { |c| c.hgetall("#{@time.strftime("%Y%m%d")}-marks") }
52
+ def for_job(klass, minutes: 60)
53
+ result = Result.new
88
54
 
89
55
  time = @time
90
- initial = @pool.with do |conn|
56
+ redis_results = @pool.with do |conn|
91
57
  conn.pipelined do |pipe|
92
- resultset[:size] = 60
93
- 60.times do |idx|
94
- key = "j|#{time.strftime("%Y%m%d|%-H:%-M")}"
58
+ minutes.times do |idx|
59
+ key = "j|#{time.strftime("%Y%m%d")}|#{time.hour}:#{time.min}"
95
60
  pipe.hmget key, "#{klass}|ms", "#{klass}|p", "#{klass}|f"
61
+ result.prepend_bucket time
96
62
  time -= 60
97
63
  end
98
64
  end
99
65
  end
100
66
 
101
67
  time = @time
102
- hist = Histogram.new(klass)
103
- results = @pool.with do |conn|
104
- initial.map do |(ms, p, f)|
105
- tm = Time.utc(time.year, time.month, time.mday, time.hour, time.min, 0)
106
- {
107
- time: tm.iso8601,
108
- epoch: tm.to_i,
109
- ms: ms.to_i, p: p.to_i, f: f.to_i, hist: hist.fetch(conn, time)
110
- }.tap { |x|
111
- x[:mark] = marks[x[:time]] if marks[x[:time]]
112
- time -= 60
113
- }
68
+ @pool.with do |conn|
69
+ redis_results.each do |(ms, p, f)|
70
+ result.job_results[klass].add_metric "ms", time, ms.to_i if ms
71
+ result.job_results[klass].add_metric "p", time, p.to_i if p
72
+ result.job_results[klass].add_metric "f", time, f.to_i if f
73
+ result.job_results[klass].add_hist time, Histogram.new(klass).fetch(conn, time)
74
+ time -= 60
75
+ end
76
+ end
77
+
78
+ result.marks = fetch_marks(result.starts_at..result.ends_at)
79
+
80
+ result
81
+ end
82
+
83
+ class Result < Struct.new(:starts_at, :ends_at, :size, :buckets, :job_results, :marks)
84
+ def initialize
85
+ super
86
+ self.buckets = []
87
+ self.marks = []
88
+ self.job_results = Hash.new { |h, k| h[k] = JobResult.new }
89
+ end
90
+
91
+ def prepend_bucket(time)
92
+ buckets.unshift time.strftime("%H:%M")
93
+ self.ends_at ||= time
94
+ self.starts_at = time
95
+ end
96
+ end
97
+
98
+ class JobResult < Struct.new(:series, :hist, :totals)
99
+ def initialize
100
+ super
101
+ self.series = Hash.new { |h, k| h[k] = Hash.new(0) }
102
+ self.hist = Hash.new { |h, k| h[k] = [] }
103
+ self.totals = Hash.new(0)
104
+ end
105
+
106
+ def add_metric(metric, time, value)
107
+ totals[metric] += value
108
+ series[metric][time.strftime("%H:%M")] += value
109
+
110
+ # Include timing measurements in seconds for convenience
111
+ add_metric("s", time, value / 1000.0) if metric == "ms"
112
+ end
113
+
114
+ def add_hist(time, hist_result)
115
+ hist[time.strftime("%H:%M")] = hist_result
116
+ end
117
+
118
+ def total_avg(metric = "ms")
119
+ completed = totals["p"] - totals["f"]
120
+ totals[metric].to_f / completed
121
+ end
122
+
123
+ def series_avg(metric = "ms")
124
+ series[metric].each_with_object(Hash.new(0)) do |(bucket, value), result|
125
+ completed = series.dig("p", bucket) - series.dig("f", bucket)
126
+ result[bucket] = completed == 0 ? 0 : value.to_f / completed
114
127
  end
115
128
  end
129
+ end
130
+
131
+ class MarkResult < Struct.new(:time, :label)
132
+ def bucket
133
+ time.strftime("%H:%M")
134
+ end
135
+ end
116
136
 
117
- resultset[:marks] = marks
118
- resultset[:starts_at] = time
119
- resultset[:data] = results
120
- resultset
137
+ private
138
+
139
+ def fetch_marks(time_range)
140
+ [].tap do |result|
141
+ marks = @pool.with { |c| c.hgetall("#{@time.strftime("%Y%m%d")}-marks") }
142
+
143
+ marks.each do |timestamp, label|
144
+ time = Time.parse(timestamp)
145
+ if time_range.cover? time
146
+ result << MarkResult.new(time, label)
147
+ end
148
+ end
149
+ end
121
150
  end
122
151
  end
123
152
  end
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Sidekiq
4
- VERSION = "6.5.4"
4
+ VERSION = "6.5.5"
5
5
  end
@@ -15,11 +15,11 @@ module Sidekiq
15
15
  end
16
16
 
17
17
  def halt(res)
18
- throw :halt, [res, {"Content-Type" => "text/plain"}, [res.to_s]]
18
+ throw :halt, [res, {"content-type" => "text/plain"}, [res.to_s]]
19
19
  end
20
20
 
21
21
  def redirect(location)
22
- throw :halt, [302, {"Location" => "#{request.base_url}#{location}"}, []]
22
+ throw :halt, [302, {"location" => "#{request.base_url}#{location}"}, []]
23
23
  end
24
24
 
25
25
  def params
@@ -68,7 +68,7 @@ module Sidekiq
68
68
  end
69
69
 
70
70
  def json(payload)
71
- [200, {"Content-Type" => "application/json", "Cache-Control" => "private, no-store"}, [Sidekiq.dump_json(payload)]]
71
+ [200, {"content-type" => "application/json", "cache-control" => "private, no-store"}, [Sidekiq.dump_json(payload)]]
72
72
  end
73
73
 
74
74
  def initialize(env, block)
@@ -62,14 +62,14 @@ module Sidekiq
62
62
 
63
63
  get "/metrics" do
64
64
  q = Sidekiq::Metrics::Query.new
65
- @resultset = q.top_jobs
65
+ @query_result = q.top_jobs
66
66
  erb(:metrics)
67
67
  end
68
68
 
69
69
  get "/metrics/:name" do
70
70
  @name = route_params[:name]
71
71
  q = Sidekiq::Metrics::Query.new
72
- @resultset = q.for_job(@name)
72
+ @query_result = q.for_job(@name)
73
73
  erb(:metrics_for_job)
74
74
  end
75
75
 
@@ -312,7 +312,7 @@ module Sidekiq
312
312
 
313
313
  def call(env)
314
314
  action = self.class.match(env)
315
- return [404, {"Content-Type" => "text/plain", "X-Cascade" => "pass"}, ["Not Found"]] unless action
315
+ return [404, {"content-type" => "text/plain", "x-cascade" => "pass"}, ["Not Found"]] unless action
316
316
 
317
317
  app = @klass
318
318
  resp = catch(:halt) do
@@ -329,10 +329,10 @@ module Sidekiq
329
329
  else
330
330
  # rendered content goes here
331
331
  headers = {
332
- "Content-Type" => "text/html",
333
- "Cache-Control" => "private, no-store",
334
- "Content-Language" => action.locale,
335
- "Content-Security-Policy" => CSP_HEADER
332
+ "content-type" => "text/html",
333
+ "cache-control" => "private, no-store",
334
+ "content-language" => action.locale,
335
+ "content-security-policy" => CSP_HEADER
336
336
  }
337
337
  # we'll let Rack calculate Content-Length for us.
338
338
  [200, headers, [resp]]
data/lib/sidekiq/web.rb CHANGED
@@ -148,7 +148,7 @@ module Sidekiq
148
148
  m = middlewares
149
149
 
150
150
  rules = []
151
- rules = [[:all, {"Cache-Control" => "public, max-age=86400"}]] unless ENV["SIDEKIQ_WEB_TESTING"]
151
+ rules = [[:all, {"cache-control" => "public, max-age=86400"}]] unless ENV["SIDEKIQ_WEB_TESTING"]
152
152
 
153
153
  ::Rack::Builder.new do
154
154
  use Rack::Static, urls: ["/stylesheets", "/images", "/javascripts"],