sidekiq 7.3.9 → 8.0.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (111) hide show
  1. checksums.yaml +4 -4
  2. data/Changes.md +116 -0
  3. data/README.md +16 -13
  4. data/bin/sidekiqload +10 -10
  5. data/bin/webload +69 -0
  6. data/lib/active_job/queue_adapters/sidekiq_adapter.rb +104 -58
  7. data/lib/sidekiq/api.rb +124 -39
  8. data/lib/sidekiq/capsule.rb +6 -6
  9. data/lib/sidekiq/cli.rb +15 -19
  10. data/lib/sidekiq/client.rb +28 -17
  11. data/lib/sidekiq/component.rb +42 -3
  12. data/lib/sidekiq/config.rb +23 -20
  13. data/lib/sidekiq/embedded.rb +2 -1
  14. data/lib/sidekiq/iterable_job.rb +1 -0
  15. data/lib/sidekiq/job/iterable.rb +44 -16
  16. data/lib/sidekiq/job.rb +2 -2
  17. data/lib/sidekiq/job_logger.rb +4 -4
  18. data/lib/sidekiq/job_retry.rb +33 -10
  19. data/lib/sidekiq/job_util.rb +5 -1
  20. data/lib/sidekiq/launcher.rb +2 -1
  21. data/lib/sidekiq/loader.rb +57 -0
  22. data/lib/sidekiq/logger.rb +25 -69
  23. data/lib/sidekiq/manager.rb +0 -1
  24. data/lib/sidekiq/metrics/query.rb +71 -45
  25. data/lib/sidekiq/metrics/shared.rb +8 -5
  26. data/lib/sidekiq/metrics/tracking.rb +12 -7
  27. data/lib/sidekiq/middleware/current_attributes.rb +11 -19
  28. data/lib/sidekiq/paginator.rb +8 -1
  29. data/lib/sidekiq/processor.rb +21 -14
  30. data/lib/sidekiq/profiler.rb +72 -0
  31. data/lib/sidekiq/rails.rb +46 -67
  32. data/lib/sidekiq/redis_client_adapter.rb +0 -1
  33. data/lib/sidekiq/redis_connection.rb +14 -3
  34. data/lib/sidekiq/testing.rb +3 -3
  35. data/lib/sidekiq/transaction_aware_client.rb +13 -5
  36. data/lib/sidekiq/version.rb +2 -2
  37. data/lib/sidekiq/web/action.rb +146 -83
  38. data/lib/sidekiq/web/application.rb +353 -332
  39. data/lib/sidekiq/web/config.rb +120 -0
  40. data/lib/sidekiq/web/helpers.rb +57 -27
  41. data/lib/sidekiq/web/router.rb +60 -76
  42. data/lib/sidekiq/web.rb +51 -156
  43. data/lib/sidekiq.rb +6 -1
  44. data/sidekiq.gemspec +6 -6
  45. data/web/assets/images/logo.png +0 -0
  46. data/web/assets/images/status.png +0 -0
  47. data/web/assets/javascripts/application.js +26 -26
  48. data/web/assets/javascripts/base-charts.js +30 -16
  49. data/web/assets/javascripts/chartjs-adapter-date-fns.min.js +7 -0
  50. data/web/assets/javascripts/dashboard.js +1 -1
  51. data/web/assets/javascripts/metrics.js +16 -34
  52. data/web/assets/stylesheets/style.css +759 -0
  53. data/web/locales/ar.yml +1 -0
  54. data/web/locales/cs.yml +1 -0
  55. data/web/locales/da.yml +1 -0
  56. data/web/locales/de.yml +1 -0
  57. data/web/locales/el.yml +1 -0
  58. data/web/locales/en.yml +6 -0
  59. data/web/locales/es.yml +24 -2
  60. data/web/locales/fa.yml +1 -0
  61. data/web/locales/fr.yml +1 -0
  62. data/web/locales/gd.yml +1 -0
  63. data/web/locales/he.yml +1 -0
  64. data/web/locales/hi.yml +1 -0
  65. data/web/locales/it.yml +8 -0
  66. data/web/locales/ja.yml +1 -0
  67. data/web/locales/ko.yml +1 -0
  68. data/web/locales/lt.yml +1 -0
  69. data/web/locales/nb.yml +1 -0
  70. data/web/locales/nl.yml +1 -0
  71. data/web/locales/pl.yml +1 -0
  72. data/web/locales/{pt-br.yml → pt-BR.yml} +2 -1
  73. data/web/locales/pt.yml +1 -0
  74. data/web/locales/ru.yml +1 -0
  75. data/web/locales/sv.yml +1 -0
  76. data/web/locales/ta.yml +1 -0
  77. data/web/locales/tr.yml +1 -0
  78. data/web/locales/uk.yml +6 -5
  79. data/web/locales/ur.yml +1 -0
  80. data/web/locales/vi.yml +1 -0
  81. data/web/locales/{zh-cn.yml → zh-CN.yml} +85 -73
  82. data/web/locales/{zh-tw.yml → zh-TW.yml} +2 -1
  83. data/web/views/_footer.erb +31 -33
  84. data/web/views/_job_info.erb +91 -89
  85. data/web/views/_metrics_period_select.erb +13 -10
  86. data/web/views/_nav.erb +14 -21
  87. data/web/views/_paging.erb +23 -21
  88. data/web/views/_poll_link.erb +2 -2
  89. data/web/views/_summary.erb +16 -16
  90. data/web/views/busy.erb +124 -122
  91. data/web/views/dashboard.erb +62 -66
  92. data/web/views/dead.erb +31 -27
  93. data/web/views/filtering.erb +3 -3
  94. data/web/views/layout.erb +13 -29
  95. data/web/views/metrics.erb +75 -81
  96. data/web/views/metrics_for_job.erb +45 -46
  97. data/web/views/morgue.erb +61 -70
  98. data/web/views/profiles.erb +43 -0
  99. data/web/views/queue.erb +54 -52
  100. data/web/views/queues.erb +43 -41
  101. data/web/views/retries.erb +66 -75
  102. data/web/views/retry.erb +32 -27
  103. data/web/views/scheduled.erb +59 -55
  104. data/web/views/scheduled_job_info.erb +1 -1
  105. metadata +26 -25
  106. data/web/assets/stylesheets/application-dark.css +0 -147
  107. data/web/assets/stylesheets/application-rtl.css +0 -163
  108. data/web/assets/stylesheets/application.css +0 -759
  109. data/web/assets/stylesheets/bootstrap-rtl.min.css +0 -9
  110. data/web/assets/stylesheets/bootstrap.css +0 -5
  111. data/web/views/_status.erb +0 -4
@@ -1,9 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "sidekiq"
4
3
  require "date"
5
- require "set"
6
-
4
+ require "sidekiq"
7
5
  require "sidekiq/metrics/shared"
8
6
 
9
7
  module Sidekiq
@@ -12,7 +10,7 @@ module Sidekiq
12
10
  # Caller sets a set of attributes to act as filters. {#fetch} will call
13
11
  # Redis and return a Hash of results.
14
12
  #
15
- # NB: all metrics and times/dates are UTC only. We specifically do not
13
+ # NB: all metrics and times/dates are UTC only. We explicitly do not
16
14
  # support timezones.
17
15
  class Query
18
16
  def initialize(pool: nil, now: Time.now)
@@ -21,23 +19,46 @@ module Sidekiq
21
19
  @klass = nil
22
20
  end
23
21
 
22
+ ROLLUPS = {
23
+ # minutely aggregates per minute
24
+ minutely: [60, ->(time) { time.strftime("j|%y%m%d|%-H:%M") }],
25
+ # hourly aggregates every 10 minutes so we'll have six data points per hour
26
+ hourly: [600, ->(time) {
27
+ m = time.min
28
+ mins = (m < 10) ? "0" : m.to_s[0]
29
+ time.strftime("j|%y%m%d|%-H:#{mins}")
30
+ }]
31
+ }
32
+
24
33
  # Get metric data for all jobs from the last hour
25
34
  # +class_filter+: return only results for classes matching filter
26
- def top_jobs(class_filter: nil, minutes: 60)
27
- result = Result.new
28
-
35
+ # +minutes+: the number of fine-grained minute buckets to retrieve
36
+ # +hours+: the number of coarser-grained 10-minute buckets to retrieve, in hours
37
+ def top_jobs(class_filter: nil, minutes: nil, hours: nil)
29
38
  time = @time
39
+ minutes = 60 unless minutes || hours
40
+
41
+ # DoS protection, sanity check
42
+ minutes = 60 if minutes && minutes > 480
43
+ hours = 72 if hours && hours > 72
44
+
45
+ granularity = hours ? :hourly : :minutely
46
+ result = Result.new(granularity)
47
+ result.ends_at = time
48
+ count = hours ? hours * 6 : minutes
49
+ stride, keyproc = ROLLUPS[granularity]
50
+
30
51
  redis_results = @pool.with do |conn|
31
52
  conn.pipelined do |pipe|
32
- minutes.times do |idx|
33
- key = "j|#{time.strftime("%Y%m%d")}|#{time.hour}:#{time.min}"
53
+ count.times do |idx|
54
+ key = keyproc.call(time)
34
55
  pipe.hgetall key
35
- result.prepend_bucket time
36
- time -= 60
56
+ time -= stride
37
57
  end
38
58
  end
39
59
  end
40
60
 
61
+ result.starts_at = time
41
62
  time = @time
42
63
  redis_results.each do |hash|
43
64
  hash.each do |k, v|
@@ -45,63 +66,66 @@ module Sidekiq
45
66
  next if class_filter && !class_filter.match?(kls)
46
67
  result.job_results[kls].add_metric metric, time, v.to_i
47
68
  end
48
- time -= 60
69
+ time -= stride
49
70
  end
50
71
 
51
- result.marks = fetch_marks(result.starts_at..result.ends_at)
52
-
72
+ result.marks = fetch_marks(result.starts_at..result.ends_at, granularity)
53
73
  result
54
74
  end
55
75
 
56
- def for_job(klass, minutes: 60)
57
- result = Result.new
58
-
76
+ def for_job(klass, minutes: nil, hours: nil)
59
77
  time = @time
78
+ minutes = 60 unless minutes || hours
79
+
80
+ # DoS protection, sanity check
81
+ minutes = 60 if minutes && minutes > 480
82
+ hours = 72 if hours && hours > 72
83
+
84
+ granularity = hours ? :hourly : :minutely
85
+ result = Result.new(granularity)
86
+ result.ends_at = time
87
+ count = hours ? hours * 6 : minutes
88
+ stride, keyproc = ROLLUPS[granularity]
89
+
60
90
  redis_results = @pool.with do |conn|
61
91
  conn.pipelined do |pipe|
62
- minutes.times do |idx|
63
- key = "j|#{time.strftime("%Y%m%d")}|#{time.hour}:#{time.min}"
92
+ count.times do |idx|
93
+ key = keyproc.call(time)
64
94
  pipe.hmget key, "#{klass}|ms", "#{klass}|p", "#{klass}|f"
65
- result.prepend_bucket time
66
- time -= 60
95
+ time -= stride
67
96
  end
68
97
  end
69
98
  end
70
99
 
100
+ result.starts_at = time
71
101
  time = @time
72
102
  @pool.with do |conn|
73
103
  redis_results.each do |(ms, p, f)|
74
104
  result.job_results[klass].add_metric "ms", time, ms.to_i if ms
75
105
  result.job_results[klass].add_metric "p", time, p.to_i if p
76
106
  result.job_results[klass].add_metric "f", time, f.to_i if f
77
- result.job_results[klass].add_hist time, Histogram.new(klass).fetch(conn, time).reverse
78
- time -= 60
107
+ result.job_results[klass].add_hist time, Histogram.new(klass).fetch(conn, time).reverse if minutes
108
+ time -= stride
79
109
  end
80
110
  end
81
111
 
82
- result.marks = fetch_marks(result.starts_at..result.ends_at)
83
-
112
+ result.marks = fetch_marks(result.starts_at..result.ends_at, granularity)
84
113
  result
85
114
  end
86
115
 
87
- class Result < Struct.new(:starts_at, :ends_at, :size, :buckets, :job_results, :marks)
88
- def initialize
116
+ class Result < Struct.new(:granularity, :starts_at, :ends_at, :size, :job_results, :marks)
117
+ def initialize(granularity = :minutely)
89
118
  super
90
- self.buckets = []
119
+ self.granularity = granularity
91
120
  self.marks = []
92
- self.job_results = Hash.new { |h, k| h[k] = JobResult.new }
93
- end
94
-
95
- def prepend_bucket(time)
96
- buckets.unshift time.strftime("%H:%M")
97
- self.ends_at ||= time
98
- self.starts_at = time
121
+ self.job_results = Hash.new { |h, k| h[k] = JobResult.new(granularity) }
99
122
  end
100
123
  end
101
124
 
102
- class JobResult < Struct.new(:series, :hist, :totals)
103
- def initialize
125
+ class JobResult < Struct.new(:granularity, :series, :hist, :totals)
126
+ def initialize(granularity = :minutely)
104
127
  super
128
+ self.granularity = granularity
105
129
  self.series = Hash.new { |h, k| h[k] = Hash.new(0) }
106
130
  self.hist = Hash.new { |h, k| h[k] = [] }
107
131
  self.totals = Hash.new(0)
@@ -109,14 +133,14 @@ module Sidekiq
109
133
 
110
134
  def add_metric(metric, time, value)
111
135
  totals[metric] += value
112
- series[metric][time.strftime("%H:%M")] += value
136
+ series[metric][Query.bkt_time_s(time, granularity)] += value
113
137
 
114
138
  # Include timing measurements in seconds for convenience
115
139
  add_metric("s", time, value / 1000.0) if metric == "ms"
116
140
  end
117
141
 
118
142
  def add_hist(time, hist_result)
119
- hist[time.strftime("%H:%M")] = hist_result
143
+ hist[Query.bkt_time_s(time, granularity)] = hist_result
120
144
  end
121
145
 
122
146
  def total_avg(metric = "ms")
@@ -133,22 +157,24 @@ module Sidekiq
133
157
  end
134
158
  end
135
159
 
136
- class MarkResult < Struct.new(:time, :label)
137
- def bucket
138
- time.strftime("%H:%M")
139
- end
160
+ MarkResult = Struct.new(:time, :label, :bucket)
161
+
162
+ def self.bkt_time_s(time, granularity)
163
+ # truncate time to ten minutes ("8:40", not "8:43") or one minute
164
+ truncation = (granularity == :hourly) ? 600 : 60
165
+ Time.at(time.to_i - time.to_i % truncation).utc.iso8601
140
166
  end
141
167
 
142
168
  private
143
169
 
144
- def fetch_marks(time_range)
170
+ def fetch_marks(time_range, granularity)
145
171
  [].tap do |result|
146
172
  marks = @pool.with { |c| c.hgetall("#{@time.strftime("%Y%m%d")}-marks") }
147
173
 
148
174
  marks.each do |timestamp, label|
149
175
  time = Time.parse(timestamp)
150
176
  if time_range.cover? time
151
- result << MarkResult.new(time, label)
177
+ result << MarkResult.new(time, label, Query.bkt_time_s(time, granularity))
152
178
  end
153
179
  end
154
180
  end
@@ -25,7 +25,10 @@ module Sidekiq
25
25
  #
26
26
  # To store this data, we use Redis' BITFIELD command to store unsigned 16-bit counters
27
27
  # per bucket per klass per minute. It's unlikely that most people will be executing more
28
- # than 1000 job/sec for a full minute of a specific type.
28
+ # than 1000 job/sec for a full minute of a specific type (i.e. overflow 65,536).
29
+ #
30
+ # Histograms are only stored at the fine-grained level, they are not rolled up
31
+ # for longer-term buckets.
29
32
  class Histogram
30
33
  include Enumerable
31
34
 
@@ -82,15 +85,15 @@ module Sidekiq
82
85
  end
83
86
 
84
87
  def fetch(conn, now = Time.now)
85
- window = now.utc.strftime("%d-%H:%-M")
86
- key = "#{@klass}-#{window}"
88
+ window = now.utc.strftime("%-d-%-H:%-M")
89
+ key = "h|#{@klass}-#{window}"
87
90
  conn.bitfield_ro(key, *FETCH)
88
91
  end
89
92
 
90
93
  def persist(conn, now = Time.now)
91
94
  buckets, @buckets = @buckets, []
92
- window = now.utc.strftime("%d-%H:%-M")
93
- key = "#{@klass}-#{window}"
95
+ window = now.utc.strftime("%-d-%-H:%-M")
96
+ key = "h|#{@klass}-#{window}"
94
97
  cmd = [key, "OVERFLOW", "SAT"]
95
98
  buckets.each_with_index do |counter, idx|
96
99
  val = counter.value
@@ -19,13 +19,13 @@ module Sidekiq
19
19
  end
20
20
 
21
21
  def track(queue, klass)
22
- start = ::Process.clock_gettime(::Process::CLOCK_MONOTONIC, :millisecond)
22
+ start = mono_ms
23
23
  time_ms = 0
24
24
  begin
25
25
  begin
26
26
  yield
27
27
  ensure
28
- finish = ::Process.clock_gettime(::Process::CLOCK_MONOTONIC, :millisecond)
28
+ finish = mono_ms
29
29
  time_ms = finish - start
30
30
  end
31
31
  # We don't track time for failed jobs as they can have very unpredictable
@@ -51,7 +51,7 @@ module Sidekiq
51
51
  end
52
52
 
53
53
  # LONG_TERM = 90 * 24 * 60 * 60
54
- # MID_TERM = 7 * 24 * 60 * 60
54
+ MID_TERM = 3 * 24 * 60 * 60
55
55
  SHORT_TERM = 8 * 60 * 60
56
56
 
57
57
  def flush(time = Time.now)
@@ -62,8 +62,10 @@ module Sidekiq
62
62
 
63
63
  now = time.utc
64
64
  # nowdate = now.strftime("%Y%m%d")
65
- # nowhour = now.strftime("%Y%m%d|%-H")
66
- nowmin = now.strftime("%Y%m%d|%-H:%-M")
65
+ # "250214|8:4" is the 10 minute bucket for Feb 14 2025, 08:43
66
+ nowmid = now.strftime("%y%m%d|%-H:%M")[0..-2]
67
+ # "250214|8:43" is the 1 minute bucket for Feb 14 2025, 08:43
68
+ nowshort = now.strftime("%y%m%d|%-H:%M")
67
69
  count = 0
68
70
 
69
71
  redis do |conn|
@@ -81,8 +83,8 @@ module Sidekiq
81
83
  # daily or hourly rollups.
82
84
  [
83
85
  # ["j", jobs, nowdate, LONG_TERM],
84
- # ["j", jobs, nowhour, MID_TERM],
85
- ["j", jobs, nowmin, SHORT_TERM]
86
+ ["j", jobs, nowmid, MID_TERM],
87
+ ["j", jobs, nowshort, SHORT_TERM]
86
88
  ].each do |prefix, data, bucket, ttl|
87
89
  conn.pipelined do |xa|
88
90
  stats = "#{prefix}|#{bucket}"
@@ -145,4 +147,7 @@ Sidekiq.configure_server do |config|
145
147
  config.on(:beat) do
146
148
  exec.flush
147
149
  end
150
+ config.on(:exit) do
151
+ exec.flush
152
+ end
148
153
  end
@@ -1,5 +1,6 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ require "active_job/arguments"
3
4
  require "active_support/current_attributes"
4
5
 
5
6
  module Sidekiq
@@ -20,6 +21,8 @@ module Sidekiq
20
21
  # Sidekiq::CurrentAttributes.persist(["Myapp::Current", "Myapp::OtherCurrent"])
21
22
  #
22
23
  module CurrentAttributes
24
+ Serializer = ::ActiveJob::Arguments
25
+
23
26
  class Save
24
27
  include Sidekiq::ClientMiddleware
25
28
 
@@ -33,26 +36,11 @@ module Sidekiq
33
36
  attrs = strklass.constantize.attributes
34
37
  # Retries can push the job N times, we don't
35
38
  # want retries to reset cattr. #5692, #5090
36
- if attrs.any?
37
- # Older rails has a bug that `CurrentAttributes#attributes` always returns
38
- # the same hash instance. We need to dup it to avoid being accidentally mutated.
39
- job[key] = if returns_same_object?
40
- attrs.dup
41
- else
42
- attrs
43
- end
44
- end
39
+ job[key] = Serializer.serialize(attrs) if attrs.any?
45
40
  end
46
41
  end
47
42
  yield
48
43
  end
49
-
50
- private
51
-
52
- def returns_same_object?
53
- ActiveSupport::VERSION::MAJOR < 8 ||
54
- (ActiveSupport::VERSION::MAJOR == 8 && ActiveSupport::VERSION::MINOR == 0)
55
- end
56
44
  end
57
45
 
58
46
  class Load
@@ -62,13 +50,13 @@ module Sidekiq
62
50
  @cattrs = cattrs
63
51
  end
64
52
 
65
- def call(_, job, _, &block)
53
+ def call(_, job, *, &block)
66
54
  klass_attrs = {}
67
55
 
68
56
  @cattrs.each do |(key, strklass)|
69
57
  next unless job.has_key?(key)
70
58
 
71
- klass_attrs[strklass.constantize] = job[key]
59
+ klass_attrs[strklass.constantize] = Serializer.deserialize(job[key]).to_h
72
60
  end
73
61
 
74
62
  wrap(klass_attrs.to_a, &block)
@@ -83,11 +71,14 @@ module Sidekiq
83
71
  retried = false
84
72
 
85
73
  begin
74
+ set_succeeded = false
86
75
  klass.set(attrs) do
76
+ set_succeeded = true
87
77
  wrap(klass_attrs, &block)
88
78
  end
89
79
  rescue NoMethodError
90
- raise if retried
80
+ # Don't retry if the no method error didn't come from current attributes
81
+ raise if retried || set_succeeded
91
82
 
92
83
  # It is possible that the `CurrentAttributes` definition
93
84
  # was changed before the job started processing.
@@ -102,6 +93,7 @@ module Sidekiq
102
93
  def persist(klass_or_array, config = Sidekiq.default_configuration)
103
94
  cattrs = build_cattrs_hash(klass_or_array)
104
95
 
96
+ config.client_middleware.prepend Load, cattrs
105
97
  config.client_middleware.add Save, cattrs
106
98
  config.server_middleware.prepend Load, cattrs
107
99
  end
@@ -17,7 +17,14 @@ module Sidekiq
17
17
  ending = starting + page_size - 1
18
18
 
19
19
  Sidekiq.redis do |conn|
20
- type = conn.type(key)
20
+ # horrible, think you can make this cleaner?
21
+ type = TYPE_CACHE[key]
22
+ if type
23
+ elsif key.start_with?("queue:")
24
+ type = TYPE_CACHE[key] = "list"
25
+ else
26
+ type = TYPE_CACHE[key] = conn.type(key)
27
+ end
21
28
  rev = opts && opts[:reverse]
22
29
 
23
30
  case type
@@ -3,6 +3,7 @@
3
3
  require "sidekiq/fetch"
4
4
  require "sidekiq/job_logger"
5
5
  require "sidekiq/job_retry"
6
+ require "sidekiq/profiler"
6
7
 
7
8
  module Sidekiq
8
9
  ##
@@ -66,7 +67,7 @@ module Sidekiq
66
67
  @thread ||= safe_thread("#{config.name}/processor", &method(:run))
67
68
  end
68
69
 
69
- private unless $TESTING
70
+ private
70
71
 
71
72
  def run
72
73
  # By setting this thread-local, Sidekiq.redis will access +Sidekiq::Capsule#redis_pool+
@@ -112,13 +113,17 @@ module Sidekiq
112
113
  def handle_fetch_exception(ex)
113
114
  unless @down
114
115
  @down = ::Process.clock_gettime(::Process::CLOCK_MONOTONIC)
115
- logger.error("Error fetching job: #{ex}")
116
116
  handle_exception(ex)
117
117
  end
118
118
  sleep(1)
119
119
  nil
120
120
  end
121
121
 
122
+ def profile(job, &block)
123
+ return yield unless job["profile"]
124
+ Sidekiq::Profiler.new(config).call(job, &block)
125
+ end
126
+
122
127
  def dispatch(job_hash, queue, jobstr)
123
128
  # since middleware can mutate the job hash
124
129
  # we need to clone it to report the original
@@ -132,17 +137,19 @@ module Sidekiq
132
137
  @retrier.global(jobstr, queue) do
133
138
  @job_logger.call(job_hash, queue) do
134
139
  stats(jobstr, queue) do
135
- # Rails 5 requires a Reloader to wrap code execution. In order to
136
- # constantize the worker and instantiate an instance, we have to call
137
- # the Reloader. It handles code loading, db connection management, etc.
138
- # Effectively this block denotes a "unit of work" to Rails.
139
- @reloader.call do
140
- klass = Object.const_get(job_hash["class"])
141
- instance = klass.new
142
- instance.jid = job_hash["jid"]
143
- instance._context = self
144
- @retrier.local(instance, jobstr, queue) do
145
- yield instance
140
+ profile(job_hash) do
141
+ # Rails 5 requires a Reloader to wrap code execution. In order to
142
+ # constantize the worker and instantiate an instance, we have to call
143
+ # the Reloader. It handles code loading, db connection management, etc.
144
+ # Effectively this block denotes a "unit of work" to Rails.
145
+ @reloader.call do
146
+ klass = Object.const_get(job_hash["class"])
147
+ instance = klass.new
148
+ instance.jid = job_hash["jid"]
149
+ instance._context = self
150
+ @retrier.local(instance, jobstr, queue) do
151
+ yield instance
152
+ end
146
153
  end
147
154
  end
148
155
  end
@@ -165,7 +172,6 @@ module Sidekiq
165
172
  begin
166
173
  job_hash = Sidekiq.load_json(jobstr)
167
174
  rescue => ex
168
- handle_exception(ex, {context: "Invalid JSON for job", jobstr: jobstr})
169
175
  now = Time.now.to_f
170
176
  redis do |conn|
171
177
  conn.multi do |xa|
@@ -174,6 +180,7 @@ module Sidekiq
174
180
  xa.zremrangebyrank("dead", 0, - @capsule.config[:dead_max_jobs])
175
181
  end
176
182
  end
183
+ handle_exception(ex, {context: "Invalid JSON for job", jobstr: jobstr})
177
184
  return uow.acknowledge
178
185
  end
179
186
 
@@ -0,0 +1,72 @@
1
+ require "fileutils"
2
+ require "sidekiq/component"
3
+
4
+ module Sidekiq
5
+ # Allows the user to profile jobs running in production.
6
+ # See details in the Profiling wiki page.
7
+ class Profiler
8
+ EXPIRY = 86400 # 1 day
9
+ DEFAULT_OPTIONS = {
10
+ mode: :wall
11
+ }
12
+
13
+ include Sidekiq::Component
14
+ def initialize(config)
15
+ @config = config
16
+ @vernier_output_dir = ENV.fetch("VERNIER_OUTPUT_DIR") { Dir.tmpdir }
17
+ end
18
+
19
+ def call(job, &block)
20
+ return yield unless job["profile"]
21
+
22
+ token = job["profile"]
23
+ type = job["class"]
24
+ jid = job["jid"]
25
+ started_at = Time.now
26
+
27
+ rundata = {
28
+ started_at: started_at.to_i,
29
+ token: token,
30
+ type: type,
31
+ jid: jid,
32
+ # .gz extension tells Vernier to compress the data
33
+ filename: File.join(
34
+ @vernier_output_dir,
35
+ "#{token}-#{type}-#{jid}-#{started_at.strftime("%Y%m%d-%H%M%S")}.json.gz"
36
+ )
37
+ }
38
+ profiler_options = profiler_options(job, rundata)
39
+
40
+ require "vernier"
41
+ begin
42
+ a = Time.now
43
+ rc = Vernier.profile(**profiler_options, &block)
44
+ b = Time.now
45
+
46
+ # Failed jobs will raise an exception on previous line and skip this
47
+ # block. Only successful jobs will persist profile data to Redis.
48
+ key = "#{token}-#{jid}"
49
+ data = File.read(rundata[:filename])
50
+ redis do |conn|
51
+ conn.multi do |m|
52
+ m.zadd("profiles", Time.now.to_f + EXPIRY, key)
53
+ m.hset(key, rundata.merge(elapsed: (b - a), data: data, size: data.bytesize))
54
+ m.expire(key, EXPIRY)
55
+ end
56
+ end
57
+ rc
58
+ ensure
59
+ FileUtils.rm_f(rundata[:filename])
60
+ end
61
+ end
62
+
63
+ private
64
+
65
+ def profiler_options(job, rundata)
66
+ profiler_options = (job["profiler_options"] || {}).transform_keys(&:to_sym)
67
+ profiler_options[:mode] = profiler_options[:mode].to_sym if profiler_options[:mode]
68
+
69
+ DEFAULT_OPTIONS.merge(profiler_options, {out: rundata[:filename]})
70
+ end
71
+ end
72
+ end