canvas_sync 0.16.2 → 0.17.0.beta3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +49 -137
- data/app/models/canvas_sync/sync_batch.rb +5 -0
- data/db/migrate/20170915210836_create_canvas_sync_job_log.rb +12 -31
- data/db/migrate/20180725155729_add_job_id_to_canvas_sync_job_logs.rb +4 -13
- data/db/migrate/20190916154829_add_fork_count_to_canvas_sync_job_logs.rb +3 -11
- data/db/migrate/20201018210836_create_canvas_sync_sync_batches.rb +11 -0
- data/lib/canvas_sync.rb +36 -118
- data/lib/canvas_sync/concerns/api_syncable.rb +27 -0
- data/lib/canvas_sync/job.rb +5 -5
- data/lib/canvas_sync/job_batches/batch.rb +399 -0
- data/lib/canvas_sync/job_batches/batch_aware_job.rb +62 -0
- data/lib/canvas_sync/job_batches/callback.rb +153 -0
- data/lib/canvas_sync/job_batches/chain_builder.rb +210 -0
- data/lib/canvas_sync/job_batches/context_hash.rb +147 -0
- data/lib/canvas_sync/job_batches/jobs/base_job.rb +7 -0
- data/lib/canvas_sync/job_batches/jobs/concurrent_batch_job.rb +18 -0
- data/lib/canvas_sync/job_batches/jobs/serial_batch_job.rb +73 -0
- data/lib/canvas_sync/job_batches/sidekiq.rb +93 -0
- data/lib/canvas_sync/job_batches/status.rb +63 -0
- data/lib/canvas_sync/jobs/begin_sync_chain_job.rb +34 -0
- data/lib/canvas_sync/jobs/report_checker.rb +3 -6
- data/lib/canvas_sync/jobs/report_processor_job.rb +2 -5
- data/lib/canvas_sync/jobs/report_starter.rb +27 -19
- data/lib/canvas_sync/jobs/sync_accounts_job.rb +3 -5
- data/lib/canvas_sync/jobs/sync_admins_job.rb +2 -4
- data/lib/canvas_sync/jobs/sync_assignment_groups_job.rb +2 -4
- data/lib/canvas_sync/jobs/sync_assignments_job.rb +2 -4
- data/lib/canvas_sync/jobs/sync_context_module_items_job.rb +2 -4
- data/lib/canvas_sync/jobs/sync_context_modules_job.rb +2 -4
- data/lib/canvas_sync/jobs/sync_provisioning_report_job.rb +5 -35
- data/lib/canvas_sync/jobs/sync_roles_job.rb +2 -5
- data/lib/canvas_sync/jobs/sync_simple_table_job.rb +11 -32
- data/lib/canvas_sync/jobs/sync_submissions_job.rb +2 -4
- data/lib/canvas_sync/jobs/sync_terms_job.rb +25 -8
- data/lib/canvas_sync/misc_helper.rb +15 -0
- data/lib/canvas_sync/version.rb +1 -1
- data/spec/canvas_sync/canvas_sync_spec.rb +136 -153
- data/spec/canvas_sync/jobs/job_spec.rb +9 -17
- data/spec/canvas_sync/jobs/report_checker_spec.rb +1 -3
- data/spec/canvas_sync/jobs/report_processor_job_spec.rb +0 -3
- data/spec/canvas_sync/jobs/report_starter_spec.rb +19 -28
- data/spec/canvas_sync/jobs/sync_admins_job_spec.rb +1 -4
- data/spec/canvas_sync/jobs/sync_assignment_groups_job_spec.rb +2 -1
- data/spec/canvas_sync/jobs/sync_assignments_job_spec.rb +3 -2
- data/spec/canvas_sync/jobs/sync_context_module_items_job_spec.rb +3 -2
- data/spec/canvas_sync/jobs/sync_context_modules_job_spec.rb +3 -2
- data/spec/canvas_sync/jobs/sync_provisioning_report_job_spec.rb +3 -35
- data/spec/canvas_sync/jobs/sync_roles_job_spec.rb +1 -4
- data/spec/canvas_sync/jobs/sync_simple_table_job_spec.rb +5 -12
- data/spec/canvas_sync/jobs/sync_submissions_job_spec.rb +2 -1
- data/spec/canvas_sync/jobs/sync_terms_job_spec.rb +1 -4
- data/spec/dummy/app/models/account.rb +3 -0
- data/spec/dummy/app/models/pseudonym.rb +14 -0
- data/spec/dummy/app/models/submission.rb +1 -0
- data/spec/dummy/app/models/user.rb +1 -0
- data/spec/dummy/config/environments/test.rb +2 -0
- data/spec/dummy/db/migrate/20201016181346_create_pseudonyms.rb +24 -0
- data/spec/dummy/db/schema.rb +24 -4
- data/spec/job_batching/batch_aware_job_spec.rb +100 -0
- data/spec/job_batching/batch_spec.rb +363 -0
- data/spec/job_batching/callback_spec.rb +38 -0
- data/spec/job_batching/flow_spec.rb +91 -0
- data/spec/job_batching/integration/integration.rb +57 -0
- data/spec/job_batching/integration/nested.rb +88 -0
- data/spec/job_batching/integration/simple.rb +47 -0
- data/spec/job_batching/integration/workflow.rb +134 -0
- data/spec/job_batching/integration_helper.rb +48 -0
- data/spec/job_batching/sidekiq_spec.rb +124 -0
- data/spec/job_batching/status_spec.rb +92 -0
- data/spec/job_batching/support/base_job.rb +14 -0
- data/spec/job_batching/support/sample_callback.rb +2 -0
- data/spec/spec_helper.rb +17 -0
- metadata +90 -8
- data/lib/canvas_sync/job_chain.rb +0 -57
- data/lib/canvas_sync/jobs/fork_gather.rb +0 -59
- data/spec/canvas_sync/jobs/fork_gather_spec.rb +0 -73
@@ -29,6 +29,33 @@ module CanvasSync::Concerns
|
|
29
29
|
end
|
30
30
|
end
|
31
31
|
|
32
|
+
def bulk_sync_from_api_result(api_array, conflict_target: :canvas_id, import_args: {}, all_pages: true, batch_size: 1000)
|
33
|
+
columns = api_sync_options.keys
|
34
|
+
|
35
|
+
update_conditions = {
|
36
|
+
condition: Importers::BulkImporter.condition_sql(self, columns),
|
37
|
+
columns: columns,
|
38
|
+
}
|
39
|
+
update_conditions[:conflict_target] = conflict_target if conflict_target.present?
|
40
|
+
options = { validate: false, on_duplicate_key_update: update_conditions }.merge(import_args)
|
41
|
+
|
42
|
+
if all_pages
|
43
|
+
batcher = BatchProcessor.new(of: batch_size) do |batch|
|
44
|
+
import(columns, batch, options)
|
45
|
+
end
|
46
|
+
api_array.all_pages_each do |api_item|
|
47
|
+
item = new.assign_from_api_params(api_items)
|
48
|
+
batcher << item
|
49
|
+
end
|
50
|
+
batcher.flush
|
51
|
+
else
|
52
|
+
items = api_array.map do |api_item|
|
53
|
+
new.assign_from_api_params(api_items)
|
54
|
+
end
|
55
|
+
import(columns, batch, options)
|
56
|
+
end
|
57
|
+
end
|
58
|
+
|
32
59
|
def api_sync_options=(opts)
|
33
60
|
@api_sync_options = opts
|
34
61
|
end
|
data/lib/canvas_sync/job.rb
CHANGED
@@ -3,6 +3,8 @@ require "active_job"
|
|
3
3
|
module CanvasSync
|
4
4
|
# Inherit from this class to build a Job that will log to the canvas_sync_job_logs table
|
5
5
|
class Job < ActiveJob::Base
|
6
|
+
attr_reader :job_log
|
7
|
+
|
6
8
|
before_enqueue do |job|
|
7
9
|
create_job_log(job)
|
8
10
|
end
|
@@ -13,8 +15,6 @@ module CanvasSync
|
|
13
15
|
@job_log.started_at = Time.now
|
14
16
|
@job_log.save
|
15
17
|
|
16
|
-
@job_chain = job.arguments[0] if job.arguments[0].is_a?(Hash) && job.arguments[0].include?(:jobs)
|
17
|
-
|
18
18
|
begin
|
19
19
|
block.call
|
20
20
|
@job_log.status = JobLog::SUCCESS_STATUS
|
@@ -22,11 +22,11 @@ module CanvasSync
|
|
22
22
|
@job_log.exception = "#{e.class}: #{e.message}"
|
23
23
|
@job_log.backtrace = e.backtrace.join('\n')
|
24
24
|
@job_log.status = JobLog::ERROR_STATUS
|
25
|
-
if
|
25
|
+
if batch_context&.[](:on_failure)&.present?
|
26
26
|
begin
|
27
|
-
class_name, method =
|
27
|
+
class_name, method = batch_context[:on_failure].split('.')
|
28
28
|
klass = class_name.constantize
|
29
|
-
klass.send(method.to_sym, e,
|
29
|
+
klass.send(method.to_sym, e, batch_context: batch_context, job_log: @job_log)
|
30
30
|
rescue => e2
|
31
31
|
@job_log.backtrace += "\n\nError Occurred while handling an Error: #{e2.class}: #{e2.message}"
|
32
32
|
@job_log.backtrace += "\n" + e2.backtrace.join('\n')
|
@@ -0,0 +1,399 @@
|
|
1
|
+
|
2
|
+
begin
|
3
|
+
require 'sidekiq'
|
4
|
+
rescue LoadError
|
5
|
+
end
|
6
|
+
|
7
|
+
require_relative './batch_aware_job'
|
8
|
+
require_relative "./callback"
|
9
|
+
require_relative "./context_hash"
|
10
|
+
require_relative "./status"
|
11
|
+
Dir[File.dirname(__FILE__) + "/jobs/*.rb"].each { |file| require file }
|
12
|
+
require_relative "./chain_builder"
|
13
|
+
|
14
|
+
# Implement Job Batching similar to Sidekiq::Batch. Supports ActiveJob and Sidekiq, or a mix thereof.
|
15
|
+
# Much of this code is modifed/extended from https://github.com/breamware/sidekiq-batch
|
16
|
+
|
17
|
+
module CanvasSync
|
18
|
+
module JobBatches
|
19
|
+
class Batch
|
20
|
+
class NoBlockGivenError < StandardError; end
|
21
|
+
|
22
|
+
def self.batch_attr(key, read_only: true)
|
23
|
+
class_eval <<-RUBY, __FILE__, __LINE__ + 1
|
24
|
+
def #{key}=(value)
|
25
|
+
raise "#{key} is read-only once the batch has been started" if #{read_only.to_s} && (@initialized || @existing)
|
26
|
+
@#{key} = value
|
27
|
+
persist_bid_attr('#{key}', value)
|
28
|
+
end
|
29
|
+
|
30
|
+
def #{key}
|
31
|
+
return @#{key} if defined?(@#{key})
|
32
|
+
if (@initialized || @existing)
|
33
|
+
@#{key} = read_bid_attr('#{key}')
|
34
|
+
end
|
35
|
+
end
|
36
|
+
RUBY
|
37
|
+
end
|
38
|
+
|
39
|
+
delegate :redis, to: :class
|
40
|
+
|
41
|
+
BID_EXPIRE_TTL = 2_592_000
|
42
|
+
|
43
|
+
attr_reader :bid
|
44
|
+
|
45
|
+
def initialize(existing_bid = nil)
|
46
|
+
@bid = existing_bid || SecureRandom.urlsafe_base64(10)
|
47
|
+
@existing = !(!existing_bid || existing_bid.empty?) # Basically existing_bid.present?
|
48
|
+
@initialized = false
|
49
|
+
@bidkey = "BID-" + @bid.to_s
|
50
|
+
@pending_attrs = {}
|
51
|
+
@ready_to_queue = []
|
52
|
+
self.created_at = Time.now.utc.to_f unless @existing
|
53
|
+
end
|
54
|
+
|
55
|
+
batch_attr :description
|
56
|
+
batch_attr :created_at
|
57
|
+
batch_attr :callback_queue, read_only: false
|
58
|
+
batch_attr :callback_batch, read_only: false
|
59
|
+
batch_attr :allow_context_changes
|
60
|
+
|
61
|
+
def context
|
62
|
+
return @context if defined?(@context)
|
63
|
+
|
64
|
+
if (@initialized || @existing)
|
65
|
+
@context = ContextHash.new(bid)
|
66
|
+
else
|
67
|
+
@context = ContextHash.new(bid, {})
|
68
|
+
end
|
69
|
+
end
|
70
|
+
|
71
|
+
def context=(value)
|
72
|
+
raise "context is read-only once the batch has been started" if (@initialized || @existing) # && !allow_context_changes
|
73
|
+
raise "context must be a Hash" unless value.is_a?(Hash) || value.nil?
|
74
|
+
return nil if value.nil? && @context.nil?
|
75
|
+
|
76
|
+
value = {} if value.nil?
|
77
|
+
value = value.local if value.is_a?(ContextHash)
|
78
|
+
|
79
|
+
@context ||= ContextHash.new(bid, {})
|
80
|
+
@context.set_local(value)
|
81
|
+
# persist_bid_attr('context', JSON.unparse(@context.local))
|
82
|
+
end
|
83
|
+
|
84
|
+
def save_context_changes
|
85
|
+
@context&.save!
|
86
|
+
end
|
87
|
+
|
88
|
+
def on(event, callback, options = {})
|
89
|
+
return unless Callback::VALID_CALLBACKS.include?(event.to_s)
|
90
|
+
callback_key = "#{@bidkey}-callbacks-#{event}"
|
91
|
+
redis do |r|
|
92
|
+
r.multi do
|
93
|
+
r.sadd(callback_key, JSON.unparse({
|
94
|
+
callback: callback,
|
95
|
+
opts: options
|
96
|
+
}))
|
97
|
+
r.expire(callback_key, BID_EXPIRE_TTL)
|
98
|
+
end
|
99
|
+
end
|
100
|
+
end
|
101
|
+
|
102
|
+
def jobs
|
103
|
+
raise NoBlockGivenError unless block_given?
|
104
|
+
|
105
|
+
if !@existing && !@initialized
|
106
|
+
parent_bid = Thread.current[:batch]&.bid
|
107
|
+
|
108
|
+
redis do |r|
|
109
|
+
r.multi do
|
110
|
+
r.hset(@bidkey, "parent_bid", parent_bid.to_s) if parent_bid
|
111
|
+
r.expire(@bidkey, BID_EXPIRE_TTL)
|
112
|
+
end
|
113
|
+
end
|
114
|
+
|
115
|
+
flush_pending_attrs
|
116
|
+
@context&.save!
|
117
|
+
|
118
|
+
@initialized = true
|
119
|
+
end
|
120
|
+
|
121
|
+
@ready_to_queue = []
|
122
|
+
|
123
|
+
begin
|
124
|
+
parent = Thread.current[:batch]
|
125
|
+
Thread.current[:batch] = self
|
126
|
+
yield
|
127
|
+
ensure
|
128
|
+
Thread.current[:batch] = parent
|
129
|
+
end
|
130
|
+
|
131
|
+
redis do |r|
|
132
|
+
r.multi do
|
133
|
+
if parent_bid
|
134
|
+
r.hincrby("BID-#{parent_bid}", "children", 1)
|
135
|
+
r.hincrby("BID-#{parent_bid}", "total", @ready_to_queue.size)
|
136
|
+
r.expire("BID-#{parent_bid}", BID_EXPIRE_TTL)
|
137
|
+
end
|
138
|
+
|
139
|
+
r.hincrby(@bidkey, "pending", @ready_to_queue.size)
|
140
|
+
r.hincrby(@bidkey, "total", @ready_to_queue.size)
|
141
|
+
r.expire(@bidkey, BID_EXPIRE_TTL)
|
142
|
+
|
143
|
+
if @ready_to_queue.size > 0
|
144
|
+
r.sadd(@bidkey + "-jids", @ready_to_queue)
|
145
|
+
r.expire(@bidkey + "-jids", BID_EXPIRE_TTL)
|
146
|
+
end
|
147
|
+
end
|
148
|
+
end
|
149
|
+
|
150
|
+
@ready_to_queue
|
151
|
+
end
|
152
|
+
|
153
|
+
def increment_job_queue(jid)
|
154
|
+
@ready_to_queue << jid
|
155
|
+
end
|
156
|
+
|
157
|
+
def invalidate_all
|
158
|
+
redis do |r|
|
159
|
+
r.setex("invalidated-bid-#{bid}", BID_EXPIRE_TTL, 1)
|
160
|
+
end
|
161
|
+
end
|
162
|
+
|
163
|
+
def parent_bid
|
164
|
+
redis do |r|
|
165
|
+
r.hget(@bidkey, "parent_bid")
|
166
|
+
end
|
167
|
+
end
|
168
|
+
|
169
|
+
def parent
|
170
|
+
if parent_bid
|
171
|
+
Batch.new(parent_bid)
|
172
|
+
end
|
173
|
+
end
|
174
|
+
|
175
|
+
def valid?(batch = self)
|
176
|
+
valid = !redis { |r| r.exists?("invalidated-bid-#{batch.bid}") }
|
177
|
+
batch.parent ? valid && valid?(batch.parent) : valid
|
178
|
+
end
|
179
|
+
|
180
|
+
# Any Batches or Jobs created in the given block won't be assocaiated to the current batch
|
181
|
+
def self.without_batch
|
182
|
+
parent = Thread.current[:batch]
|
183
|
+
Thread.current[:batch] = nil
|
184
|
+
yield
|
185
|
+
ensure
|
186
|
+
Thread.current[:batch] = parent
|
187
|
+
end
|
188
|
+
|
189
|
+
private
|
190
|
+
|
191
|
+
def persist_bid_attr(attribute, value)
|
192
|
+
if @initialized || @existing
|
193
|
+
redis do |r|
|
194
|
+
r.multi do
|
195
|
+
r.hset(@bidkey, attribute, value)
|
196
|
+
r.expire(@bidkey, BID_EXPIRE_TTL)
|
197
|
+
end
|
198
|
+
end
|
199
|
+
else
|
200
|
+
@pending_attrs[attribute] = value
|
201
|
+
end
|
202
|
+
end
|
203
|
+
|
204
|
+
def read_bid_attr(attribute)
|
205
|
+
redis do |r|
|
206
|
+
r.hget(@bidkey, attribute)
|
207
|
+
end
|
208
|
+
end
|
209
|
+
|
210
|
+
def flush_pending_attrs
|
211
|
+
redis do |r|
|
212
|
+
r.mapped_hmset(@bidkey, @pending_attrs)
|
213
|
+
end
|
214
|
+
@pending_attrs = {}
|
215
|
+
end
|
216
|
+
|
217
|
+
class << self
|
218
|
+
def process_failed_job(bid, jid)
|
219
|
+
_, pending, failed, children, complete, parent_bid = redis do |r|
|
220
|
+
r.multi do
|
221
|
+
r.sadd("BID-#{bid}-failed", jid)
|
222
|
+
|
223
|
+
r.hincrby("BID-#{bid}", "pending", 0)
|
224
|
+
r.scard("BID-#{bid}-failed")
|
225
|
+
r.hincrby("BID-#{bid}", "children", 0)
|
226
|
+
r.scard("BID-#{bid}-complete")
|
227
|
+
r.hget("BID-#{bid}", "parent_bid")
|
228
|
+
|
229
|
+
r.expire("BID-#{bid}-failed", BID_EXPIRE_TTL)
|
230
|
+
end
|
231
|
+
end
|
232
|
+
|
233
|
+
# if the batch failed, and has a parent, update the parent to show one pending and failed job
|
234
|
+
if parent_bid
|
235
|
+
redis do |r|
|
236
|
+
r.multi do
|
237
|
+
r.hincrby("BID-#{parent_bid}", "pending", 1)
|
238
|
+
r.sadd("BID-#{parent_bid}-failed", jid)
|
239
|
+
r.expire("BID-#{parent_bid}-failed", BID_EXPIRE_TTL)
|
240
|
+
end
|
241
|
+
end
|
242
|
+
end
|
243
|
+
|
244
|
+
if pending.to_i == failed.to_i && children == complete
|
245
|
+
enqueue_callbacks(:complete, bid)
|
246
|
+
end
|
247
|
+
end
|
248
|
+
|
249
|
+
def process_dead_job(bid, jid)
|
250
|
+
_, failed, children, complete, parent_bid = redis do |r|
|
251
|
+
r.multi do
|
252
|
+
r.sadd("BID-#{bid}-dead", jid)
|
253
|
+
|
254
|
+
r.scard("BID-#{bid}-dead")
|
255
|
+
r.hincrby("BID-#{bid}", "children", 0)
|
256
|
+
r.scard("BID-#{bid}-complete")
|
257
|
+
r.hget("BID-#{bid}", "parent_bid")
|
258
|
+
|
259
|
+
r.expire("BID-#{bid}-dead", BID_EXPIRE_TTL)
|
260
|
+
end
|
261
|
+
end
|
262
|
+
|
263
|
+
if parent_bid
|
264
|
+
redis do |r|
|
265
|
+
r.multi do
|
266
|
+
r.sadd("BID-#{parent_bid}-dead", jid)
|
267
|
+
r.expire("BID-#{parent_bid}-dead", BID_EXPIRE_TTL)
|
268
|
+
end
|
269
|
+
end
|
270
|
+
end
|
271
|
+
|
272
|
+
enqueue_callbacks(:dead, bid)
|
273
|
+
end
|
274
|
+
|
275
|
+
def process_successful_job(bid, jid)
|
276
|
+
failed, pending, children, complete, success, total, parent_bid = redis do |r|
|
277
|
+
r.multi do
|
278
|
+
r.scard("BID-#{bid}-failed")
|
279
|
+
r.hincrby("BID-#{bid}", "pending", -1)
|
280
|
+
r.hincrby("BID-#{bid}", "children", 0)
|
281
|
+
r.scard("BID-#{bid}-complete")
|
282
|
+
r.scard("BID-#{bid}-success")
|
283
|
+
r.hget("BID-#{bid}", "total")
|
284
|
+
r.hget("BID-#{bid}", "parent_bid")
|
285
|
+
|
286
|
+
r.srem("BID-#{bid}-failed", jid)
|
287
|
+
r.srem("BID-#{bid}-jids", jid)
|
288
|
+
r.expire("BID-#{bid}", BID_EXPIRE_TTL)
|
289
|
+
end
|
290
|
+
end
|
291
|
+
|
292
|
+
all_success = pending.to_i.zero? && children == success
|
293
|
+
# if complete or successfull call complete callback (the complete callback may then call successful)
|
294
|
+
if (pending.to_i == failed.to_i && children == complete) || all_success
|
295
|
+
enqueue_callbacks(:complete, bid)
|
296
|
+
enqueue_callbacks(:success, bid) if all_success
|
297
|
+
end
|
298
|
+
end
|
299
|
+
|
300
|
+
def enqueue_callbacks(event, bid)
|
301
|
+
batch_key = "BID-#{bid}"
|
302
|
+
callback_key = "#{batch_key}-callbacks-#{event}"
|
303
|
+
already_processed, _, callbacks, queue, parent_bid, callback_batch = redis do |r|
|
304
|
+
r.multi do
|
305
|
+
r.hget(batch_key, event)
|
306
|
+
r.hset(batch_key, event, true)
|
307
|
+
r.smembers(callback_key)
|
308
|
+
r.hget(batch_key, "callback_queue")
|
309
|
+
r.hget(batch_key, "parent_bid")
|
310
|
+
r.hget(batch_key, "callback_batch")
|
311
|
+
end
|
312
|
+
end
|
313
|
+
|
314
|
+
return if already_processed == 'true'
|
315
|
+
|
316
|
+
queue ||= "default"
|
317
|
+
parent_bid = !parent_bid || parent_bid.empty? ? nil : parent_bid # Basically parent_bid.blank?
|
318
|
+
callback_args = callbacks.reduce([]) do |memo, jcb|
|
319
|
+
cb = JSON.load(jcb)
|
320
|
+
memo << [cb['callback'], event.to_s, cb['opts'], bid, parent_bid]
|
321
|
+
end
|
322
|
+
|
323
|
+
opts = {"bid" => bid, "event" => event}
|
324
|
+
|
325
|
+
# Run callback batch finalize synchronously
|
326
|
+
if callback_batch
|
327
|
+
# Extract opts from cb_args or use current
|
328
|
+
# Pass in stored event as callback finalize is processed on complete event
|
329
|
+
cb_opts = callback_args.first&.at(2) || opts
|
330
|
+
|
331
|
+
logger.debug {"Run callback batch bid: #{bid} event: #{event} args: #{callback_args.inspect}"}
|
332
|
+
# Finalize now
|
333
|
+
finalizer = Batch::Callback::Finalize.new
|
334
|
+
status = Status.new bid
|
335
|
+
finalizer.dispatch(status, cb_opts)
|
336
|
+
|
337
|
+
return
|
338
|
+
end
|
339
|
+
|
340
|
+
logger.debug {"Enqueue callback bid: #{bid} event: #{event} args: #{callback_args.inspect}"}
|
341
|
+
|
342
|
+
if callback_args.empty?
|
343
|
+
# Finalize now
|
344
|
+
finalizer = Batch::Callback::Finalize.new
|
345
|
+
status = Status.new bid
|
346
|
+
finalizer.dispatch(status, opts)
|
347
|
+
else
|
348
|
+
# Otherwise finalize in sub batch complete callback
|
349
|
+
cb_batch = self.new
|
350
|
+
cb_batch.callback_batch = true
|
351
|
+
logger.debug {"Adding callback batch: #{cb_batch.bid} for batch: #{bid}"}
|
352
|
+
cb_batch.on(:complete, "#{Batch::Callback::Finalize.to_s}#dispatch", opts)
|
353
|
+
cb_batch.jobs do
|
354
|
+
push_callbacks callback_args, queue
|
355
|
+
end
|
356
|
+
end
|
357
|
+
end
|
358
|
+
|
359
|
+
def cleanup_redis(bid)
|
360
|
+
logger.debug {"Cleaning redis of batch #{bid}"}
|
361
|
+
redis do |r|
|
362
|
+
r.del(
|
363
|
+
"BID-#{bid}",
|
364
|
+
"BID-#{bid}-callbacks-complete",
|
365
|
+
"BID-#{bid}-callbacks-success",
|
366
|
+
"BID-#{bid}-failed",
|
367
|
+
|
368
|
+
"BID-#{bid}-success",
|
369
|
+
"BID-#{bid}-complete",
|
370
|
+
"BID-#{bid}-jids",
|
371
|
+
)
|
372
|
+
end
|
373
|
+
end
|
374
|
+
|
375
|
+
def redis(*args, &blk)
|
376
|
+
defined?(::Sidekiq) ? ::Sidekiq.redis(*args, &blk) : nil # TODO
|
377
|
+
end
|
378
|
+
|
379
|
+
def logger
|
380
|
+
defined?(::Sidekiq) ? ::Sidekiq.logger : Rails.logger
|
381
|
+
end
|
382
|
+
|
383
|
+
private
|
384
|
+
|
385
|
+
def push_callbacks(args, queue)
|
386
|
+
Batch::Callback::Worker.enqueue_all(args, queue)
|
387
|
+
end
|
388
|
+
end
|
389
|
+
end
|
390
|
+
|
391
|
+
ActiveJob::Base.include BatchAwareJob
|
392
|
+
end
|
393
|
+
end
|
394
|
+
|
395
|
+
# Automatically integrate with Sidekiq if it is present.
|
396
|
+
if defined?(::Sidekiq)
|
397
|
+
require_relative './sidekiq'
|
398
|
+
CanvasSync::JobBatches::Sidekiq.configure
|
399
|
+
end
|