misha-resque-cleaner 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CHANGELOG.md +76 -0
- data/LICENSE +20 -0
- data/README.md +330 -0
- data/Rakefile +38 -0
- data/lib/resque-cleaner.rb +1 -0
- data/lib/resque_cleaner.rb +307 -0
- data/lib/resque_cleaner/server.rb +266 -0
- data/lib/resque_cleaner/server/public/cleaner.css +64 -0
- data/lib/resque_cleaner/server/views/_limiter.erb +13 -0
- data/lib/resque_cleaner/server/views/_paginate.erb +54 -0
- data/lib/resque_cleaner/server/views/_stats.erb +44 -0
- data/lib/resque_cleaner/server/views/cleaner.erb +25 -0
- data/lib/resque_cleaner/server/views/cleaner_exec.erb +8 -0
- data/lib/resque_cleaner/server/views/cleaner_list.erb +179 -0
- data/test/redis-test.conf +115 -0
- data/test/resque_cleaner_test.rb +206 -0
- data/test/resque_web_test.rb +66 -0
- data/test/test_helper.rb +131 -0
- metadata +109 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
require 'resque_cleaner'
|
|
@@ -0,0 +1,307 @@
|
|
|
1
|
+
require 'time'
|
|
2
|
+
require 'resque'
|
|
3
|
+
require 'resque/server'
|
|
4
|
+
|
|
5
|
+
module Resque
|
|
6
|
+
module Plugins
|
|
7
|
+
# ResqueCleaner class provides useful functionalities to retry or clean
|
|
8
|
+
# failed jobs. Let's clean up your failed list!
|
|
9
|
+
class ResqueCleaner
|
|
10
|
+
# ResqueCleaner fetches all elements from Redis and checks them
|
|
11
|
+
# by linear when filtering them. Since there is a performance concern,
|
|
12
|
+
# ResqueCleaner handles only the latest x(default 1000) jobs.
|
|
13
|
+
#
|
|
14
|
+
# You can change the value through limiter attribute.
|
|
15
|
+
# e.g. cleaner.limiter.maximum = 5000
|
|
16
|
+
attr_reader :limiter
|
|
17
|
+
|
|
18
|
+
# Set false if you don't show any message.
|
|
19
|
+
attr_accessor :print_message
|
|
20
|
+
|
|
21
|
+
# Initializes instance
|
|
22
|
+
def initialize
|
|
23
|
+
@failure = Resque::Failure.backend
|
|
24
|
+
@print_message = true
|
|
25
|
+
@limiter = Limiter.new self
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
# Returns redis instance.
|
|
29
|
+
def redis
|
|
30
|
+
Resque.redis
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
# Returns failure backend. Only supports redis backend.
|
|
34
|
+
def failure
|
|
35
|
+
@failure
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
# Stats by date.
|
|
39
|
+
def stats_by_date(&block)
|
|
40
|
+
jobs, stats = select(&block), {}
|
|
41
|
+
jobs.each do |job|
|
|
42
|
+
date = job["failed_at"][0,10]
|
|
43
|
+
stats[date] ||= 0
|
|
44
|
+
stats[date] += 1
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
print_stats(stats) if print?
|
|
48
|
+
stats
|
|
49
|
+
end
|
|
50
|
+
|
|
51
|
+
# Stats by class.
|
|
52
|
+
def stats_by_class(&block)
|
|
53
|
+
jobs, stats = select(&block), {}
|
|
54
|
+
jobs.each do |job|
|
|
55
|
+
klass = job["payload"] && job["payload"]["class"] ? job["payload"]["class"] : "UNKNOWN"
|
|
56
|
+
stats[klass] ||= 0
|
|
57
|
+
stats[klass] += 1
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
print_stats(stats) if print?
|
|
61
|
+
stats
|
|
62
|
+
end
|
|
63
|
+
|
|
64
|
+
# Stats by exception.
|
|
65
|
+
def stats_by_exception(&block)
|
|
66
|
+
jobs, stats = select(&block), {}
|
|
67
|
+
jobs.each do |job|
|
|
68
|
+
exception = job["exception"]
|
|
69
|
+
stats[exception] ||= 0
|
|
70
|
+
stats[exception] += 1
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
print_stats(stats) if print?
|
|
74
|
+
stats
|
|
75
|
+
end
|
|
76
|
+
|
|
77
|
+
# Print stats
|
|
78
|
+
def print_stats(stats)
|
|
79
|
+
log too_many_message if @limiter.on?
|
|
80
|
+
stats.keys.sort.each do |k|
|
|
81
|
+
log "%15s: %4d" % [k,stats[k]]
|
|
82
|
+
end
|
|
83
|
+
log "%15s: %4d" % ["total", @limiter.count]
|
|
84
|
+
end
|
|
85
|
+
|
|
86
|
+
# Returns every jobs for which block evaluates to true.
|
|
87
|
+
def select(&block)
|
|
88
|
+
jobs = @limiter.jobs
|
|
89
|
+
block_given? ? @limiter.jobs.select(&block) : jobs
|
|
90
|
+
end
|
|
91
|
+
alias :failure_jobs :select
|
|
92
|
+
|
|
93
|
+
def select_by_regex(regex)
|
|
94
|
+
select do |job|
|
|
95
|
+
job.to_s =~ regex
|
|
96
|
+
end
|
|
97
|
+
end
|
|
98
|
+
|
|
99
|
+
# Clears every jobs for which block evaluates to true.
|
|
100
|
+
def clear(&block)
|
|
101
|
+
cleared = 0
|
|
102
|
+
@limiter.lock do
|
|
103
|
+
@limiter.jobs.each_with_index do |job,i|
|
|
104
|
+
if !block_given? || block.call(job)
|
|
105
|
+
index = @limiter.start_index + i - cleared
|
|
106
|
+
# fetches again since you can't ensure that it is always true:
|
|
107
|
+
# a == endode(decode(a))
|
|
108
|
+
value = redis.lindex(:failed, index)
|
|
109
|
+
redis.lrem(:failed, 1, value)
|
|
110
|
+
cleared += 1
|
|
111
|
+
end
|
|
112
|
+
end
|
|
113
|
+
end
|
|
114
|
+
cleared
|
|
115
|
+
end
|
|
116
|
+
|
|
117
|
+
# Retries every jobs for which block evaluates to true.
|
|
118
|
+
def requeue(clear_after_requeue=false, options={}, &block)
|
|
119
|
+
requeued = 0
|
|
120
|
+
queue = options["queue"] || options[:queue]
|
|
121
|
+
@limiter.lock do
|
|
122
|
+
@limiter.jobs.each_with_index do |job,i|
|
|
123
|
+
if !block_given? || block.call(job)
|
|
124
|
+
index = @limiter.start_index + i - requeued
|
|
125
|
+
|
|
126
|
+
value = redis.lindex(:failed, index)
|
|
127
|
+
redis.multi do
|
|
128
|
+
Job.create(queue||job['queue'], job['payload']['class'], *job['payload']['args'])
|
|
129
|
+
|
|
130
|
+
if clear_after_requeue
|
|
131
|
+
# remove job
|
|
132
|
+
# TODO: should use ltrim. not sure why i used lrem here...
|
|
133
|
+
redis.lrem(:failed, 1, value)
|
|
134
|
+
else
|
|
135
|
+
# mark retried
|
|
136
|
+
job['retried_at'] = Time.now.strftime("%Y/%m/%d %H:%M:%S")
|
|
137
|
+
redis.lset(:failed, @limiter.start_index+i, Resque.encode(job))
|
|
138
|
+
end
|
|
139
|
+
end
|
|
140
|
+
|
|
141
|
+
requeued += 1
|
|
142
|
+
end
|
|
143
|
+
end
|
|
144
|
+
end
|
|
145
|
+
requeued
|
|
146
|
+
end
|
|
147
|
+
|
|
148
|
+
# Clears all jobs except the last X jobs
|
|
149
|
+
def clear_stale
|
|
150
|
+
return 0 unless @limiter.on?
|
|
151
|
+
c = @limiter.maximum
|
|
152
|
+
redis.ltrim(:failed, -c, -1)
|
|
153
|
+
c
|
|
154
|
+
end
|
|
155
|
+
|
|
156
|
+
# Exntends job(Hash instance) with some helper methods.
|
|
157
|
+
module FailedJobEx
|
|
158
|
+
# Returns true if the job has been already retried. Otherwise returns
|
|
159
|
+
# false.
|
|
160
|
+
def retried?
|
|
161
|
+
!self['retried_at'].nil?
|
|
162
|
+
end
|
|
163
|
+
alias :requeued? :retried?
|
|
164
|
+
|
|
165
|
+
# Returns true if the job processed(failed) before the given time.
|
|
166
|
+
# Otherwise returns false.
|
|
167
|
+
# You can pass Time object or String.
|
|
168
|
+
def before?(time)
|
|
169
|
+
time = Time.parse(time) if time.is_a?(String)
|
|
170
|
+
Time.parse(self['failed_at']) < time
|
|
171
|
+
end
|
|
172
|
+
|
|
173
|
+
# Returns true if the job processed(failed) after the given time.
|
|
174
|
+
# Otherwise returns false.
|
|
175
|
+
# You can pass Time object or String.
|
|
176
|
+
def after?(time)
|
|
177
|
+
time = Time.parse(time) if time.is_a?(String)
|
|
178
|
+
Time.parse(self['failed_at']) >= time
|
|
179
|
+
end
|
|
180
|
+
|
|
181
|
+
# Returns true if the class of the job matches. Otherwise returns false.
|
|
182
|
+
def klass?(klass_or_name)
|
|
183
|
+
if self["payload"] && self["payload"]["class"]
|
|
184
|
+
self["payload"]["class"] == klass_or_name.to_s
|
|
185
|
+
else
|
|
186
|
+
klass_or_name=="UNKNOWN"
|
|
187
|
+
end
|
|
188
|
+
end
|
|
189
|
+
|
|
190
|
+
# Returns true if the exception raised by the failed job matches. Otherwise returns false.
|
|
191
|
+
def exception?(exception)
|
|
192
|
+
self["exception"] == exception.to_s
|
|
193
|
+
end
|
|
194
|
+
|
|
195
|
+
# Returns true if the queue of the job matches. Otherwise returns false.
|
|
196
|
+
def queue?(queue)
|
|
197
|
+
self["queue"] == queue.to_s
|
|
198
|
+
end
|
|
199
|
+
end
|
|
200
|
+
|
|
201
|
+
# Through the Limiter class, you accesses only the last x(default 1000)
|
|
202
|
+
# jobs.
|
|
203
|
+
class Limiter
|
|
204
|
+
@@default_maximum ||= 1000
|
|
205
|
+
|
|
206
|
+
class << self
|
|
207
|
+
def default_maximum
|
|
208
|
+
@@default_maximum
|
|
209
|
+
end
|
|
210
|
+
|
|
211
|
+
def default_maximum=(v)
|
|
212
|
+
@@default_maximum = v
|
|
213
|
+
end
|
|
214
|
+
end
|
|
215
|
+
|
|
216
|
+
attr_accessor :maximum
|
|
217
|
+
def initialize(cleaner)
|
|
218
|
+
@cleaner = cleaner
|
|
219
|
+
@maximum = @@default_maximum
|
|
220
|
+
@locked = false
|
|
221
|
+
end
|
|
222
|
+
|
|
223
|
+
# Returns true if limiter is ON: number of failed jobs is more than
|
|
224
|
+
# maximum value.
|
|
225
|
+
def on?
|
|
226
|
+
@cleaner.failure.count > @maximum
|
|
227
|
+
end
|
|
228
|
+
|
|
229
|
+
# Returns limited count.
|
|
230
|
+
def count
|
|
231
|
+
if @locked
|
|
232
|
+
@jobs.size
|
|
233
|
+
else
|
|
234
|
+
on? ? @maximum : @cleaner.failure.count
|
|
235
|
+
end
|
|
236
|
+
end
|
|
237
|
+
|
|
238
|
+
# Returns jobs. If numbers of jobs is more than maximum, it returns only
|
|
239
|
+
# the maximum.
|
|
240
|
+
def jobs
|
|
241
|
+
if @locked
|
|
242
|
+
@jobs
|
|
243
|
+
else
|
|
244
|
+
all( - count, count)
|
|
245
|
+
end
|
|
246
|
+
end
|
|
247
|
+
|
|
248
|
+
# Wraps Resque's all and returns always array.
|
|
249
|
+
def all(index=0,count=1)
|
|
250
|
+
jobs = @cleaner.failure.all( index, count)
|
|
251
|
+
jobs = [] unless jobs
|
|
252
|
+
jobs = [jobs] unless jobs.is_a?(Array)
|
|
253
|
+
jobs.each{|j| j.extend FailedJobEx}
|
|
254
|
+
jobs
|
|
255
|
+
end
|
|
256
|
+
|
|
257
|
+
# Returns a start index of jobs in :failed list.
|
|
258
|
+
def start_index
|
|
259
|
+
if @locked
|
|
260
|
+
@start_index
|
|
261
|
+
else
|
|
262
|
+
on? ? @cleaner.failure.count-@maximum : 0
|
|
263
|
+
end
|
|
264
|
+
end
|
|
265
|
+
|
|
266
|
+
# Assuming new failures pushed while cleaner is dealing with failures,
|
|
267
|
+
# you need to lock the range.
|
|
268
|
+
def lock
|
|
269
|
+
old = @locked
|
|
270
|
+
|
|
271
|
+
unless @locked
|
|
272
|
+
total_count = @cleaner.failure.count
|
|
273
|
+
if total_count>@maximum
|
|
274
|
+
@start_index = total_count-@maximum
|
|
275
|
+
@jobs = all( @start_index, @maximum)
|
|
276
|
+
else
|
|
277
|
+
@start_index = 0
|
|
278
|
+
@jobs = all( 0, total_count)
|
|
279
|
+
end
|
|
280
|
+
end
|
|
281
|
+
|
|
282
|
+
@locked = true
|
|
283
|
+
yield
|
|
284
|
+
ensure
|
|
285
|
+
@locked = old
|
|
286
|
+
end
|
|
287
|
+
end
|
|
288
|
+
|
|
289
|
+
# Outputs message. Overrides this method when you want to change a output
|
|
290
|
+
# stream.
|
|
291
|
+
def log(msg)
|
|
292
|
+
puts msg if print?
|
|
293
|
+
end
|
|
294
|
+
|
|
295
|
+
def print?
|
|
296
|
+
@print_message
|
|
297
|
+
end
|
|
298
|
+
|
|
299
|
+
def too_many_message
|
|
300
|
+
"There are too many failed jobs(count=#{@failure.count}). This only looks at last #{@limiter.maximum} jobs."
|
|
301
|
+
end
|
|
302
|
+
end
|
|
303
|
+
end
|
|
304
|
+
end
|
|
305
|
+
|
|
306
|
+
require 'resque_cleaner/server'
|
|
307
|
+
|
|
@@ -0,0 +1,266 @@
|
|
|
1
|
+
require 'yaml'
|
|
2
|
+
|
|
3
|
+
# Extends Resque Web Based UI.
|
|
4
|
+
# Structure has been borrowed from ResqueScheduler.
|
|
5
|
+
module ResqueCleaner
|
|
6
|
+
module Server
|
|
7
|
+
|
|
8
|
+
def self.erb_path(filename)
|
|
9
|
+
File.join(File.dirname(__FILE__), 'server', 'views', filename)
|
|
10
|
+
end
|
|
11
|
+
def self.public_path(filename)
|
|
12
|
+
File.join(File.dirname(__FILE__), 'server', 'public', filename)
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
# Pagination helper for list page.
|
|
16
|
+
class Paginate
|
|
17
|
+
attr_accessor :page_size, :page, :jobs, :url
|
|
18
|
+
def initialize(jobs, url, page=1, page_size=20)
|
|
19
|
+
@jobs = jobs
|
|
20
|
+
@url = url
|
|
21
|
+
@page = (!page || page < 1) ? 1 : page
|
|
22
|
+
@page_size = 20
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
def first_index
|
|
26
|
+
@page_size * (@page-1)
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
def last_index
|
|
30
|
+
last = first_index + @page_size - 1
|
|
31
|
+
last > @jobs.size-1 ? @jobs.size-1 : last
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
def paginated_jobs
|
|
35
|
+
@jobs[first_index,@page_size]
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
def first_page?
|
|
39
|
+
@page <= 1
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
def last_page?
|
|
43
|
+
@page >= max_page
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
def page_url(page)
|
|
47
|
+
u = @url
|
|
48
|
+
u += @url.include?("?") ? "&" : "?"
|
|
49
|
+
if page.is_a?(Symbol)
|
|
50
|
+
page = @page - 1 if page==:prev
|
|
51
|
+
page = @page + 1 if page==:next
|
|
52
|
+
end
|
|
53
|
+
u += "p=#{page}"
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
def total_size
|
|
57
|
+
@jobs.size
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
def max_page
|
|
61
|
+
((total_size-1) / @page_size) + 1
|
|
62
|
+
end
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
def self.included(base)
|
|
66
|
+
|
|
67
|
+
base.class_eval do
|
|
68
|
+
helpers do
|
|
69
|
+
def time_filter(id, name, value)
|
|
70
|
+
html = "<select id=\"#{id}\" name=\"#{name}\">"
|
|
71
|
+
html += "<option value=\"\">-</option>"
|
|
72
|
+
[1, 3, 6, 12, 24].each do |h|
|
|
73
|
+
selected = h.to_s == value ? 'selected="selected"' : ''
|
|
74
|
+
html += "<option #{selected} value=\"#{h}\">#{h} #{h==1 ? "hour" : "hours"} ago</option>"
|
|
75
|
+
end
|
|
76
|
+
[3, 7, 14, 28].each do |d|
|
|
77
|
+
selected = (d*24).to_s == value ? 'selected="selected"' : ''
|
|
78
|
+
html += "<option #{selected} value=\"#{d*24}\">#{d} days ago</option>"
|
|
79
|
+
end
|
|
80
|
+
html += "</select>"
|
|
81
|
+
end
|
|
82
|
+
|
|
83
|
+
def class_filter(id, name, klasses, value)
|
|
84
|
+
html = "<select id=\"#{id}\" name=\"#{name}\">"
|
|
85
|
+
html += "<option value=\"\">-</option>"
|
|
86
|
+
klasses.each do |k|
|
|
87
|
+
selected = k == value ? 'selected="selected"' : ''
|
|
88
|
+
html += "<option #{selected} value=\"#{k}\">#{k}</option>"
|
|
89
|
+
end
|
|
90
|
+
html += "</select>"
|
|
91
|
+
end
|
|
92
|
+
|
|
93
|
+
def exception_filter(id, name, exceptions, value)
|
|
94
|
+
html = "<select id=\"#{id}\" name=\"#{name}\">"
|
|
95
|
+
html += "<option value=\"\">-</option>"
|
|
96
|
+
exceptions.each do |ex|
|
|
97
|
+
selected = ex == value ? 'selected="selected"' : ''
|
|
98
|
+
html += "<option #{selected} value=\"#{ex}\">#{ex}</option>"
|
|
99
|
+
end
|
|
100
|
+
html += "</select>"
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
def show_job_args(args)
|
|
104
|
+
Array(args).map { |a| a.inspect }.join("\n")
|
|
105
|
+
end
|
|
106
|
+
|
|
107
|
+
def text_filter(id, name, value)
|
|
108
|
+
html = "<input id=\"#{id}\" type=\"text\" name=\"#{name}\" value=\"#{value}\">"
|
|
109
|
+
html += "</input>"
|
|
110
|
+
end
|
|
111
|
+
end
|
|
112
|
+
|
|
113
|
+
mime_type :json, 'application/json'
|
|
114
|
+
|
|
115
|
+
get "/cleaner" do
|
|
116
|
+
load_library
|
|
117
|
+
load_cleaner_filter
|
|
118
|
+
|
|
119
|
+
@jobs = cleaner.select
|
|
120
|
+
@stats = { :klass => {}, :exception => {} }
|
|
121
|
+
@total = Hash.new(0)
|
|
122
|
+
@jobs.each do |job|
|
|
123
|
+
klass = job["payload"]["class"] || 'UNKNOWN'
|
|
124
|
+
exception = job["exception"] || 'UNKNOWN'
|
|
125
|
+
failed_at = Time.parse job["failed_at"]
|
|
126
|
+
@stats[:klass][klass] ||= Hash.new(0)
|
|
127
|
+
@stats[:exception][exception] ||= Hash.new(0)
|
|
128
|
+
|
|
129
|
+
[
|
|
130
|
+
@stats[:klass][klass],
|
|
131
|
+
@stats[:exception][exception],
|
|
132
|
+
@total
|
|
133
|
+
].each do |stat|
|
|
134
|
+
stat[:total] += 1
|
|
135
|
+
stat[:h1] += 1 if failed_at >= hours_ago(1)
|
|
136
|
+
stat[:h3] += 1 if failed_at >= hours_ago(3)
|
|
137
|
+
stat[:d1] += 1 if failed_at >= hours_ago(24)
|
|
138
|
+
stat[:d3] += 1 if failed_at >= hours_ago(24*3)
|
|
139
|
+
stat[:d7] += 1 if failed_at >= hours_ago(24*7)
|
|
140
|
+
end
|
|
141
|
+
end
|
|
142
|
+
|
|
143
|
+
erb File.read(ResqueCleaner::Server.erb_path('cleaner.erb'))
|
|
144
|
+
end
|
|
145
|
+
|
|
146
|
+
get "/cleaner_list" do
|
|
147
|
+
load_library
|
|
148
|
+
load_cleaner_filter
|
|
149
|
+
build_urls
|
|
150
|
+
|
|
151
|
+
block = filter_block
|
|
152
|
+
|
|
153
|
+
@failed = cleaner.select(&block).reverse
|
|
154
|
+
|
|
155
|
+
@paginate = Paginate.new(@failed, @list_url, params[:p].to_i)
|
|
156
|
+
|
|
157
|
+
@klasses = cleaner.stats_by_class.keys
|
|
158
|
+
@exceptions = cleaner.stats_by_exception.keys
|
|
159
|
+
@count = cleaner.select(&block).size
|
|
160
|
+
|
|
161
|
+
erb File.read(ResqueCleaner::Server.erb_path('cleaner_list.erb'))
|
|
162
|
+
end
|
|
163
|
+
|
|
164
|
+
post "/cleaner_exec" do
|
|
165
|
+
load_library
|
|
166
|
+
load_cleaner_filter
|
|
167
|
+
build_urls
|
|
168
|
+
|
|
169
|
+
if params[:select_all_pages]!="1"
|
|
170
|
+
@sha1 = {}
|
|
171
|
+
params[:sha1].split(",").each {|s| @sha1[s] = true }
|
|
172
|
+
end
|
|
173
|
+
|
|
174
|
+
block = filter_block
|
|
175
|
+
|
|
176
|
+
@count =
|
|
177
|
+
case params[:action]
|
|
178
|
+
when "clear" then cleaner.clear(&block)
|
|
179
|
+
when "retry_and_clear" then cleaner.requeue(true,&block)
|
|
180
|
+
when "retry" then cleaner.requeue(false,{},&block)
|
|
181
|
+
end
|
|
182
|
+
|
|
183
|
+
erb File.read(ResqueCleaner::Server.erb_path('cleaner_exec.erb'))
|
|
184
|
+
end
|
|
185
|
+
|
|
186
|
+
get "/cleaner_dump" do
|
|
187
|
+
load_library
|
|
188
|
+
load_cleaner_filter
|
|
189
|
+
|
|
190
|
+
block = filter_block
|
|
191
|
+
|
|
192
|
+
content_type :json
|
|
193
|
+
JSON.pretty_generate(cleaner.select(&block))
|
|
194
|
+
end
|
|
195
|
+
|
|
196
|
+
post "/cleaner_stale" do
|
|
197
|
+
load_library
|
|
198
|
+
cleaner.clear_stale
|
|
199
|
+
redirect url_path(:cleaner)
|
|
200
|
+
end
|
|
201
|
+
|
|
202
|
+
get /cleaner\/public\/([a-z]+\.[a-z]+)/ do
|
|
203
|
+
send_file ResqueCleaner::Server.public_path(params[:captures].first)
|
|
204
|
+
end
|
|
205
|
+
end
|
|
206
|
+
|
|
207
|
+
end
|
|
208
|
+
|
|
209
|
+
def cleaner
|
|
210
|
+
@cleaner ||= Resque::Plugins::ResqueCleaner.new
|
|
211
|
+
@cleaner.print_message = false
|
|
212
|
+
@cleaner
|
|
213
|
+
end
|
|
214
|
+
|
|
215
|
+
def load_library
|
|
216
|
+
require 'digest/sha1'
|
|
217
|
+
begin
|
|
218
|
+
require 'yajl/json_gem' unless [].respond_to?(:to_json)
|
|
219
|
+
rescue Exception
|
|
220
|
+
require 'json'
|
|
221
|
+
end
|
|
222
|
+
end
|
|
223
|
+
|
|
224
|
+
def load_cleaner_filter
|
|
225
|
+
@from = params[:f]=="" ? nil : params[:f]
|
|
226
|
+
@to = params[:t]=="" ? nil : params[:t]
|
|
227
|
+
@klass = params[:c]=="" ? nil : params[:c]
|
|
228
|
+
@exception = params[:ex]=="" ? nil : params[:ex]
|
|
229
|
+
@regex = params[:regex]=="" ? nil : params[:regex]
|
|
230
|
+
end
|
|
231
|
+
|
|
232
|
+
def build_urls
|
|
233
|
+
params = {
|
|
234
|
+
c: @klass,
|
|
235
|
+
ex: @exception,
|
|
236
|
+
f: @from,
|
|
237
|
+
t: @to,
|
|
238
|
+
regex: @regex
|
|
239
|
+
}.map {|key,value| "#{key}=#{URI.encode(value.to_s)}"}.join("&")
|
|
240
|
+
|
|
241
|
+
@list_url = "cleaner_list?#{params}"
|
|
242
|
+
@dump_url = "cleaner_dump?#{params}"
|
|
243
|
+
end
|
|
244
|
+
|
|
245
|
+
def filter_block
|
|
246
|
+
block = lambda{|j|
|
|
247
|
+
(!@from || j.after?(hours_ago(@from))) &&
|
|
248
|
+
(!@to || j.before?(hours_ago(@to))) &&
|
|
249
|
+
(!@klass || j.klass?(@klass)) &&
|
|
250
|
+
(!@exception || j.exception?(@exception)) &&
|
|
251
|
+
(!@sha1 || @sha1[Digest::SHA1.hexdigest(j.to_json)]) &&
|
|
252
|
+
(!@regex || j.to_s =~ /#{@regex}/)
|
|
253
|
+
}
|
|
254
|
+
end
|
|
255
|
+
|
|
256
|
+
def hours_ago(h)
|
|
257
|
+
Time.now - h.to_i*60*60
|
|
258
|
+
end
|
|
259
|
+
Resque::Server.tabs << 'Cleaner'
|
|
260
|
+
end
|
|
261
|
+
end
|
|
262
|
+
|
|
263
|
+
Resque::Server.class_eval do
|
|
264
|
+
include ResqueCleaner::Server
|
|
265
|
+
end
|
|
266
|
+
|