canvas_sync 0.17.1 → 0.17.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +58 -0
- data/lib/canvas_sync/job_batches/batch.rb +101 -115
- data/lib/canvas_sync/job_batches/callback.rb +29 -34
- data/lib/canvas_sync/job_batches/context_hash.rb +13 -5
- data/lib/canvas_sync/job_batches/hincr_max.lua +5 -0
- data/lib/canvas_sync/job_batches/jobs/managed_batch_job.rb +99 -0
- data/lib/canvas_sync/job_batches/jobs/serial_batch_job.rb +6 -65
- data/lib/canvas_sync/job_batches/pool.rb +213 -0
- data/lib/canvas_sync/job_batches/redis_model.rb +69 -0
- data/lib/canvas_sync/job_batches/redis_script.rb +163 -0
- data/lib/canvas_sync/job_batches/sidekiq.rb +24 -1
- data/lib/canvas_sync/job_batches/sidekiq/web.rb +114 -0
- data/lib/canvas_sync/job_batches/sidekiq/web/helpers.rb +41 -0
- data/lib/canvas_sync/job_batches/sidekiq/web/views/_batches_table.erb +42 -0
- data/lib/canvas_sync/job_batches/sidekiq/web/views/_pagination.erb +26 -0
- data/lib/canvas_sync/job_batches/sidekiq/web/views/batch.erb +138 -0
- data/lib/canvas_sync/job_batches/sidekiq/web/views/batches.erb +23 -0
- data/lib/canvas_sync/job_batches/sidekiq/web/views/pool.erb +85 -0
- data/lib/canvas_sync/job_batches/sidekiq/web/views/pools.erb +47 -0
- data/lib/canvas_sync/job_batches/status.rb +9 -5
- data/lib/canvas_sync/jobs/begin_sync_chain_job.rb +3 -1
- data/lib/canvas_sync/version.rb +1 -1
- data/spec/dummy/log/test.log +140455 -0
- data/spec/job_batching/batch_aware_job_spec.rb +1 -0
- data/spec/job_batching/batch_spec.rb +72 -16
- data/spec/job_batching/callback_spec.rb +1 -1
- data/spec/job_batching/context_hash_spec.rb +54 -0
- data/spec/job_batching/flow_spec.rb +5 -11
- data/spec/job_batching/integration/fail_then_succeed.rb +42 -0
- data/spec/job_batching/integration_helper.rb +6 -4
- data/spec/job_batching/sidekiq_spec.rb +1 -0
- data/spec/job_batching/status_spec.rb +4 -20
- data/spec/spec_helper.rb +3 -7
- metadata +19 -16
@@ -0,0 +1,69 @@
|
|
1
|
+
module CanvasSync
|
2
|
+
module JobBatches
|
3
|
+
module RedisModel
|
4
|
+
extend ActiveSupport::Concern
|
5
|
+
|
6
|
+
class_methods do
|
7
|
+
def redis_attr(key, type = :string, read_only: true)
|
8
|
+
class_eval <<-RUBY, __FILE__, __LINE__ + 1
|
9
|
+
def #{key}=(value)
|
10
|
+
raise "#{key} is read-only once the batch has been started" if #{read_only.to_s} && (@initialized || @existing)
|
11
|
+
@#{key} = value
|
12
|
+
if :#{type} == :json
|
13
|
+
value = JSON.unparse(value)
|
14
|
+
end
|
15
|
+
persist_bid_attr('#{key}', value)
|
16
|
+
end
|
17
|
+
|
18
|
+
def #{key}
|
19
|
+
return @#{key} if defined?(@#{key})
|
20
|
+
if (@initialized || @existing)
|
21
|
+
value = read_bid_attr('#{key}')
|
22
|
+
if :#{type} == :bool
|
23
|
+
value = value == 'true'
|
24
|
+
elsif :#{type} == :int
|
25
|
+
value = value.to_i
|
26
|
+
elsif :#{type} == :float
|
27
|
+
value = value.to_f
|
28
|
+
elsif :#{type} == :json
|
29
|
+
value = JSON.parse(value)
|
30
|
+
elsif :#{type} == :symbol
|
31
|
+
value = value&.to_sym
|
32
|
+
end
|
33
|
+
@#{key} = value
|
34
|
+
end
|
35
|
+
end
|
36
|
+
RUBY
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
def persist_bid_attr(attribute, value)
|
41
|
+
if @initialized || @existing
|
42
|
+
redis do |r|
|
43
|
+
r.multi do
|
44
|
+
r.hset(redis_key, attribute, value)
|
45
|
+
r.expire(redis_key, Batch::BID_EXPIRE_TTL)
|
46
|
+
end
|
47
|
+
end
|
48
|
+
else
|
49
|
+
@pending_attrs ||= {}
|
50
|
+
@pending_attrs[attribute] = value
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
def read_bid_attr(attribute)
|
55
|
+
redis do |r|
|
56
|
+
r.hget(redis_key, attribute)
|
57
|
+
end
|
58
|
+
end
|
59
|
+
|
60
|
+
def flush_pending_attrs
|
61
|
+
redis do |r|
|
62
|
+
r.mapped_hmset(redis_key, @pending_attrs)
|
63
|
+
end
|
64
|
+
@initialized = true
|
65
|
+
@pending_attrs = {}
|
66
|
+
end
|
67
|
+
end
|
68
|
+
end
|
69
|
+
end
|
@@ -0,0 +1,163 @@
|
|
1
|
+
require 'pathname'
|
2
|
+
require 'digest/sha1'
|
3
|
+
require 'erb'
|
4
|
+
|
5
|
+
# Modified from https://github.com/Shopify/wolverine/blob/master/lib/wolverine/script.rb
|
6
|
+
|
7
|
+
module CanvasSync
|
8
|
+
module JobBatches
|
9
|
+
# {RedisScript} represents a lua script in the filesystem. It loads the script
|
10
|
+
# from disk and handles talking to redis to execute it. Error handling
|
11
|
+
# is handled by {LuaError}.
|
12
|
+
class RedisScript
|
13
|
+
|
14
|
+
# Loads the script file from disk and calculates its +SHA1+ sum.
|
15
|
+
#
|
16
|
+
# @param file [Pathname] the full path to the indicated file
|
17
|
+
def initialize(file)
|
18
|
+
@file = Pathname.new(file)
|
19
|
+
end
|
20
|
+
|
21
|
+
# Passes the script and supplied arguments to redis for evaulation.
|
22
|
+
# It first attempts to use a script redis has already cached by using
|
23
|
+
# the +EVALSHA+ command, but falls back to providing the full script
|
24
|
+
# text via +EVAL+ if redis has not seen this script before. Future
|
25
|
+
# invocations will then use +EVALSHA+ without erroring.
|
26
|
+
#
|
27
|
+
# @param redis [Redis] the redis connection to run against
|
28
|
+
# @param args [*Objects] the arguments to the script
|
29
|
+
# @return [Object] the value passed back by redis after script execution
|
30
|
+
# @raise [LuaError] if the script failed to compile of encountered a
|
31
|
+
# runtime error
|
32
|
+
def call(redis, *args)
|
33
|
+
t = Time.now
|
34
|
+
begin
|
35
|
+
redis.evalsha(digest, *args)
|
36
|
+
rescue => e
|
37
|
+
e.message =~ /NOSCRIPT/ ? redis.eval(content, *args) : raise
|
38
|
+
end
|
39
|
+
rescue => e
|
40
|
+
if LuaError.intercepts?(e)
|
41
|
+
raise LuaError.new(e, @file, content)
|
42
|
+
else
|
43
|
+
raise
|
44
|
+
end
|
45
|
+
end
|
46
|
+
|
47
|
+
def content
|
48
|
+
@content ||= load_lua(@file)
|
49
|
+
end
|
50
|
+
|
51
|
+
def digest
|
52
|
+
@digest ||= Digest::SHA1.hexdigest content
|
53
|
+
end
|
54
|
+
|
55
|
+
private
|
56
|
+
|
57
|
+
def script_path
|
58
|
+
Rails.root + 'app/redis_lua'
|
59
|
+
end
|
60
|
+
|
61
|
+
def relative_path
|
62
|
+
@path ||= @file.relative_path_from(script_path)
|
63
|
+
end
|
64
|
+
|
65
|
+
def load_lua(file)
|
66
|
+
TemplateContext.new(script_path).template(script_path + file)
|
67
|
+
end
|
68
|
+
|
69
|
+
class TemplateContext
|
70
|
+
def initialize(script_path)
|
71
|
+
@script_path = script_path
|
72
|
+
end
|
73
|
+
|
74
|
+
def template(pathname)
|
75
|
+
@partial_templates ||= {}
|
76
|
+
ERB.new(File.read(pathname)).result binding
|
77
|
+
end
|
78
|
+
|
79
|
+
# helper method to include a lua partial within another lua script
|
80
|
+
#
|
81
|
+
# @param relative_path [String] the relative path to the script from
|
82
|
+
# `script_path`
|
83
|
+
def include_partial(relative_path)
|
84
|
+
unless @partial_templates.has_key? relative_path
|
85
|
+
@partial_templates[relative_path] = nil
|
86
|
+
template( Pathname.new("#{@script_path}/#{relative_path}") )
|
87
|
+
end
|
88
|
+
end
|
89
|
+
end
|
90
|
+
|
91
|
+
# Reformats errors raised by redis representing failures while executing
|
92
|
+
# a lua script. The default errors have confusing messages and backtraces,
|
93
|
+
# and a type of +RuntimeError+. This class improves the message and
|
94
|
+
# modifies the backtrace to include the lua script itself in a reasonable
|
95
|
+
# way.
|
96
|
+
class LuaError < StandardError
|
97
|
+
PATTERN = /ERR Error (compiling|running) script \(.*?\): .*?:(\d+): (.*)/
|
98
|
+
WOLVERINE_LIB_PATH = File.expand_path('../../', __FILE__)
|
99
|
+
CONTEXT_LINE_NUMBER = 2
|
100
|
+
|
101
|
+
attr_reader :error, :file, :content
|
102
|
+
|
103
|
+
# Is this error one that should be reformatted?
|
104
|
+
#
|
105
|
+
# @param error [StandardError] the original error raised by redis
|
106
|
+
# @return [Boolean] is this an error that should be reformatted?
|
107
|
+
def self.intercepts? error
|
108
|
+
error.message =~ PATTERN
|
109
|
+
end
|
110
|
+
|
111
|
+
# Initialize a new {LuaError} from an existing redis error, adjusting
|
112
|
+
# the message and backtrace in the process.
|
113
|
+
#
|
114
|
+
# @param error [StandardError] the original error raised by redis
|
115
|
+
# @param file [Pathname] full path to the lua file the error ocurred in
|
116
|
+
# @param content [String] lua file content the error ocurred in
|
117
|
+
def initialize error, file, content
|
118
|
+
@error = error
|
119
|
+
@file = file
|
120
|
+
@content = content
|
121
|
+
|
122
|
+
@error.message =~ PATTERN
|
123
|
+
_stage, line_number, message = $1, $2, $3
|
124
|
+
error_context = generate_error_context(content, line_number.to_i)
|
125
|
+
|
126
|
+
super "#{message}\n\n#{error_context}\n\n"
|
127
|
+
set_backtrace generate_backtrace file, line_number
|
128
|
+
end
|
129
|
+
|
130
|
+
private
|
131
|
+
|
132
|
+
def generate_error_context(content, line_number)
|
133
|
+
lines = content.lines.to_a
|
134
|
+
beginning_line_number = [1, line_number - CONTEXT_LINE_NUMBER].max
|
135
|
+
ending_line_number = [lines.count, line_number + CONTEXT_LINE_NUMBER].min
|
136
|
+
line_number_width = ending_line_number.to_s.length
|
137
|
+
|
138
|
+
(beginning_line_number..ending_line_number).map do |number|
|
139
|
+
indicator = number == line_number ? '=>' : ' '
|
140
|
+
formatted_number = "%#{line_number_width}d" % number
|
141
|
+
" #{indicator} #{formatted_number}: #{lines[number - 1]}"
|
142
|
+
end.join.chomp
|
143
|
+
end
|
144
|
+
|
145
|
+
def generate_backtrace(file, line_number)
|
146
|
+
pre_wolverine = backtrace_before_entering_wolverine(@error.backtrace)
|
147
|
+
index_of_first_wolverine_line = (@error.backtrace.size - pre_wolverine.size - 1)
|
148
|
+
pre_wolverine.unshift(@error.backtrace[index_of_first_wolverine_line])
|
149
|
+
pre_wolverine.unshift("#{file}:#{line_number}")
|
150
|
+
pre_wolverine
|
151
|
+
end
|
152
|
+
|
153
|
+
def backtrace_before_entering_wolverine(backtrace)
|
154
|
+
backtrace.reverse.take_while { |line| ! line_from_wolverine(line) }.reverse
|
155
|
+
end
|
156
|
+
|
157
|
+
def line_from_wolverine(line)
|
158
|
+
line.split(':').first.include?(WOLVERINE_LIB_PATH)
|
159
|
+
end
|
160
|
+
end
|
161
|
+
end
|
162
|
+
end
|
163
|
+
end
|
@@ -15,11 +15,31 @@ module CanvasSync
|
|
15
15
|
Thread.current[:batch]
|
16
16
|
end
|
17
17
|
|
18
|
+
def batch_context
|
19
|
+
batch&.context || {}
|
20
|
+
end
|
21
|
+
|
18
22
|
def valid_within_batch?
|
19
23
|
batch.valid?
|
20
24
|
end
|
21
25
|
end
|
22
26
|
|
27
|
+
class SidekiqCallbackWorker
|
28
|
+
include ::Sidekiq::Worker
|
29
|
+
include WorkerExtension
|
30
|
+
include Batch::Callback::CallbackWorkerCommon
|
31
|
+
|
32
|
+
def self.enqueue_all(args, queue)
|
33
|
+
return if args.empty?
|
34
|
+
|
35
|
+
::Sidekiq::Client.push_bulk(
|
36
|
+
'class' => self,
|
37
|
+
'args' => args,
|
38
|
+
'queue' => queue
|
39
|
+
)
|
40
|
+
end
|
41
|
+
end
|
42
|
+
|
23
43
|
class ClientMiddleware
|
24
44
|
def call(_worker, msg, _queue, _redis_pool = nil)
|
25
45
|
if (batch = Thread.current[:batch]) && should_handle_batch?(msg)
|
@@ -29,7 +49,7 @@ module CanvasSync
|
|
29
49
|
end
|
30
50
|
|
31
51
|
def should_handle_batch?(msg)
|
32
|
-
return false if msg['class'] == 'ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper' && msg['wrapped'].constantize
|
52
|
+
return false if msg['class'] == 'ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper' && msg['wrapped'].constantize < BatchAwareJob
|
33
53
|
true
|
34
54
|
end
|
35
55
|
end
|
@@ -92,7 +112,10 @@ module CanvasSync
|
|
92
112
|
# This alias helps apartment-sidekiq set itself up correctly
|
93
113
|
::Sidekiq::Batch.const_set(:Server, CanvasSync::JobBatches::Sidekiq::ServerMiddleware)
|
94
114
|
::Sidekiq::Worker.send(:include, JobBatches::Sidekiq::WorkerExtension)
|
115
|
+
Batch::Callback.worker_class = SidekiqCallbackWorker
|
95
116
|
end
|
96
117
|
end
|
97
118
|
end
|
98
119
|
end
|
120
|
+
|
121
|
+
require_relative 'sidekiq/web'
|
@@ -0,0 +1,114 @@
|
|
1
|
+
|
2
|
+
begin
|
3
|
+
require "sidekiq/web"
|
4
|
+
rescue LoadError
|
5
|
+
# client-only usage
|
6
|
+
end
|
7
|
+
|
8
|
+
require_relative "web/helpers"
|
9
|
+
|
10
|
+
module CanvasSync::JobBatches::Sidekiq
|
11
|
+
module Web
|
12
|
+
def self.registered(app) # rubocop:disable Metrics/MethodLength, Metrics/AbcSize
|
13
|
+
app.helpers do
|
14
|
+
include Web::Helpers
|
15
|
+
end
|
16
|
+
|
17
|
+
app.get "/batches" do
|
18
|
+
@count = (params['count'] || 25).to_i
|
19
|
+
@current_page, @total_size, @batches = page('batches', params['page'], @count)
|
20
|
+
@batches = @batches.map {|b, score| CanvasSync::JobBatches::Batch.new(b) }
|
21
|
+
|
22
|
+
erb(get_template(:batches))
|
23
|
+
end
|
24
|
+
|
25
|
+
app.get "/batches/:bid" do
|
26
|
+
@bid = params[:bid]
|
27
|
+
@batch = CanvasSync::JobBatches::Batch.new(@bid)
|
28
|
+
|
29
|
+
@count = (params['count'] || 25).to_i
|
30
|
+
@current_batches_page, @total_batches_size, @sub_batches = page("BID-#{@batch.bid}-bids", params['batch_page'], @count)
|
31
|
+
@sub_batches = @sub_batches.map {|b, score| CanvasSync::JobBatches::Batch.new(b) }
|
32
|
+
|
33
|
+
@current_jobs_page, @total_jobs_size, @jobs = page("BID-#{@batch.bid}-jids", params['job_page'], @count)
|
34
|
+
@jobs = @jobs.map {|jid, score| jid }
|
35
|
+
|
36
|
+
erb(get_template(:batch))
|
37
|
+
end
|
38
|
+
|
39
|
+
app.post "/batches/all" do
|
40
|
+
if params['delete']
|
41
|
+
drain_zset('batches') do |batches|
|
42
|
+
batches.each do |bid|
|
43
|
+
CanvasSync::JobBatches::Batch.cleanup_redis(bid)
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|
47
|
+
|
48
|
+
redirect "#{root_path}batches"
|
49
|
+
end
|
50
|
+
|
51
|
+
app.post "/batches/:bid" do
|
52
|
+
@bid = params[:bid]
|
53
|
+
@batch = CanvasSync::JobBatches::Batch.new(@bid)
|
54
|
+
|
55
|
+
if params['delete']
|
56
|
+
CanvasSync::JobBatches::Batch.delete_prematurely!(@bid)
|
57
|
+
end
|
58
|
+
|
59
|
+
redirect_with_query("#{root_path}batches")
|
60
|
+
end
|
61
|
+
|
62
|
+
# =============== POOLS =============== #
|
63
|
+
|
64
|
+
app.get "/pools" do
|
65
|
+
@count = (params['count'] || 25).to_i
|
66
|
+
@current_page, @total_size, @pools = page('pools', params['page'], @count)
|
67
|
+
@pools = @pools.map {|b, score| CanvasSync::JobBatches::Pool.new(b) }
|
68
|
+
|
69
|
+
erb(get_template(:pools))
|
70
|
+
end
|
71
|
+
|
72
|
+
app.get "/pools/:pid" do
|
73
|
+
@pid = params[:pid]
|
74
|
+
@pool = CanvasSync::JobBatches::Pool.new(@pid)
|
75
|
+
|
76
|
+
@count = (params['count'] || 25).to_i
|
77
|
+
@current_jobs_page, @total_jobs_size, @jobs = page("POOLID-#{@pool.pid}-jobs", params['job_page'], @count)
|
78
|
+
@jobs = @jobs.map {|desc, score=nil| JSON.parse(desc)[0] }
|
79
|
+
|
80
|
+
erb(get_template(:pool))
|
81
|
+
end
|
82
|
+
|
83
|
+
app.post "/pools/all" do
|
84
|
+
if params['delete']
|
85
|
+
drain_zset('pools') do |pools|
|
86
|
+
pools.each do |pid|
|
87
|
+
CanvasSync::JobBatches::Pool.from_pid(pid).cleanup_redis
|
88
|
+
end
|
89
|
+
end
|
90
|
+
end
|
91
|
+
|
92
|
+
redirect "#{root_path}pools"
|
93
|
+
end
|
94
|
+
|
95
|
+
app.post "/pools/:pid" do
|
96
|
+
@pid = params[:pid]
|
97
|
+
@pool = CanvasSync::JobBatches::Pool.from_pid(@pid)
|
98
|
+
|
99
|
+
if params['delete']
|
100
|
+
@pool.cleanup_redis
|
101
|
+
end
|
102
|
+
|
103
|
+
redirect_with_query("#{root_path}pools")
|
104
|
+
end
|
105
|
+
end
|
106
|
+
end
|
107
|
+
end
|
108
|
+
|
109
|
+
if defined?(::Sidekiq::Web)
|
110
|
+
::Sidekiq::Web.register CanvasSync::JobBatches::Sidekiq::Web
|
111
|
+
::Sidekiq::Web.tabs["Batches"] = "batches"
|
112
|
+
::Sidekiq::Web.tabs["Pools"] = "pools"
|
113
|
+
::Sidekiq::Web.settings.locales << File.join(File.dirname(__FILE__), "locales")
|
114
|
+
end
|
@@ -0,0 +1,41 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module CanvasSync::JobBatches::Sidekiq
|
4
|
+
module Web
|
5
|
+
module Helpers
|
6
|
+
VIEW_PATH = File.expand_path("../web/views", __dir__)
|
7
|
+
|
8
|
+
module_function
|
9
|
+
|
10
|
+
def get_template(name)
|
11
|
+
File.open(File.join(VIEW_PATH, "#{name}.erb")).read
|
12
|
+
end
|
13
|
+
|
14
|
+
def drain_zset(key)
|
15
|
+
items, _ = Sidekiq.redis do |r|
|
16
|
+
r.multi do
|
17
|
+
r.zrange(key, 0, -1)
|
18
|
+
r.zremrangebyrank(key, 0, -1)
|
19
|
+
end
|
20
|
+
end
|
21
|
+
yield items
|
22
|
+
end
|
23
|
+
|
24
|
+
def safe_relative_time(time)
|
25
|
+
time = parse_time(time)
|
26
|
+
relative_time(time)
|
27
|
+
end
|
28
|
+
|
29
|
+
def parse_time(time)
|
30
|
+
case time
|
31
|
+
when Time
|
32
|
+
time
|
33
|
+
when Integer, Float
|
34
|
+
Time.at(time)
|
35
|
+
else
|
36
|
+
Time.parse(time.to_s)
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
@@ -0,0 +1,42 @@
|
|
1
|
+
<table class="table table-striped table-bordered table-hover">
|
2
|
+
<thead>
|
3
|
+
<tr>
|
4
|
+
<th rowspan="2"><%= t('Started') %></th>
|
5
|
+
<th rowspan="2"><%= t('BID') %></th>
|
6
|
+
<th rowspan="2"><%= t('Description') %></th>
|
7
|
+
|
8
|
+
<th colspan="4"><%= t('Jobs') %></th>
|
9
|
+
<th colspan="4"><%= t('Sub-Batches') %></th>
|
10
|
+
</tr>
|
11
|
+
<tr>
|
12
|
+
<th><%= t('Pending') %></th>
|
13
|
+
<th><%= t('Failed') %></th>
|
14
|
+
<th><%= t('Complete') %></th>
|
15
|
+
<th><%= t('Total') %></th>
|
16
|
+
|
17
|
+
<th><%= t('Pending') %></th>
|
18
|
+
<th><%= t('Failed') %></th>
|
19
|
+
<th><%= t('Success') %></th>
|
20
|
+
<th><%= t('Total') %></th>
|
21
|
+
</tr>
|
22
|
+
</thead>
|
23
|
+
|
24
|
+
<% batches.each do |batch| %>
|
25
|
+
<% status = CanvasSync::JobBatches::Batch::Status.new(batch) %>
|
26
|
+
<tr>
|
27
|
+
<td><%= safe_relative_time(batch.created_at.to_f) %></th>
|
28
|
+
<td><a href="<%= root_path %>batches/<%= batch.bid %>"><%= batch.bid %></a></td>
|
29
|
+
<td><%= batch.description %></th>
|
30
|
+
|
31
|
+
<td><%= status.pending %></th>
|
32
|
+
<td><%= status.failures %></th>
|
33
|
+
<td><%= status.completed_count %></th>
|
34
|
+
<td><%= status.job_count %></th>
|
35
|
+
|
36
|
+
<td><%= status.child_count - status.successful_children_count %></th>
|
37
|
+
<td><%= status.failed_children_count %></th>
|
38
|
+
<td><%= status.successful_children_count %></th>
|
39
|
+
<td><%= status.child_count %></th>
|
40
|
+
</tr>
|
41
|
+
<% end %>
|
42
|
+
</table>
|