canvas_sync 0.18.10 → 0.19.0.beta1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (30) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +12 -9
  3. data/lib/canvas_sync/concerns/ability_helper.rb +19 -11
  4. data/lib/canvas_sync/job_batches/batch.rb +47 -2
  5. data/lib/canvas_sync/job_batches/callback.rb +7 -4
  6. data/lib/canvas_sync/job_batches/chain_builder.rb +38 -74
  7. data/lib/canvas_sync/job_batches/jobs/concurrent_batch_job.rb +5 -5
  8. data/lib/canvas_sync/job_batches/jobs/managed_batch_job.rb +54 -22
  9. data/lib/canvas_sync/job_batches/jobs/serial_batch_job.rb +5 -5
  10. data/lib/canvas_sync/job_batches/pool.rb +83 -54
  11. data/lib/canvas_sync/job_batches/pool_refill.lua +40 -0
  12. data/lib/canvas_sync/job_batches/sidekiq/web/batches_assets/css/styles.less +6 -2
  13. data/lib/canvas_sync/job_batches/sidekiq/web/batches_assets/js/batch_tree.js +3 -1
  14. data/lib/canvas_sync/job_batches/sidekiq/web/views/_batches_table.erb +3 -1
  15. data/lib/canvas_sync/job_batches/sidekiq/web/views/_jobs_table.erb +2 -0
  16. data/lib/canvas_sync/job_batches/sidekiq/web/views/pool.erb +10 -1
  17. data/lib/canvas_sync/job_batches/sidekiq/web.rb +5 -1
  18. data/lib/canvas_sync/job_batches/status.rb +4 -0
  19. data/lib/canvas_sync/jobs/canvas_process_waiter.rb +3 -35
  20. data/lib/canvas_sync/misc_helper.rb +48 -0
  21. data/lib/canvas_sync/version.rb +1 -1
  22. data/lib/canvas_sync.rb +3 -10
  23. data/spec/canvas_sync/canvas_sync_spec.rb +201 -115
  24. data/spec/canvas_sync/jobs/canvas_process_waiter_spec.rb +0 -48
  25. data/spec/canvas_sync/misc_helper_spec.rb +58 -0
  26. data/spec/dummy/log/test.log +69092 -0
  27. data/spec/job_batching/pool_spec.rb +161 -0
  28. data/spec/job_batching/support/base_job.rb +1 -1
  29. metadata +9 -5
  30. data/lib/canvas_sync/job_batches/hincr_max.lua +0 -5
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 911b4fffbc2c21c25f32b45c0c90419dddcef40497b63c5d5757ec0e12544e36
4
- data.tar.gz: 90377a8facd25b77726ab82754a50c7aa3a9851c1012801496e19d6258ea7275
3
+ metadata.gz: b2a5e403bcaaaaf0fcbab739a2b9b6657ed42a5a5692de0b9d6a84e429d2a232
4
+ data.tar.gz: acef06fb5cae9aa20d70e308972d9416fd8280ddb3c944718ef937679ac468b5
5
5
  SHA512:
6
- metadata.gz: 5186a70509b324acdd5e8a8e023fdfb9503b7e899894938944632759eeb0bf362cd467f30700f3d8ba02a0ee06476c7646b83acf34ea3eeae5d0375a518d1096
7
- data.tar.gz: 11812e324c87f8055baaade4a36a822b090b66c95691521a32eb89855c51ceddeaee417f87aae9f5b5c5401c1fe3a6433d102b4a5d9948f5278835ce71f8530d
6
+ metadata.gz: 91ed4f8c3312c398ee9bb19cad4333ddda8f738330470d4582c1c92ff00abeef12e52ff344a94edc50a726616c2533126c99b211da1a348cd18d5062a6939e40
7
+ data.tar.gz: a95413c17cc1029fd5c8adb6fbd19add7a0d18fda2c8c790f71bbc57df8b51223c1a554120dd93bbedc61223c60a7f549849d1686358fe9e357af5bf20467eb1
data/README.md CHANGED
@@ -128,13 +128,12 @@ chain = CanvasSync.default_provisioning_report_chain(
128
128
  )
129
129
 
130
130
  # Add a custom job to the end of the chain.
131
- chain << { job: CanvasSyncCompleteWorker, parameters: [{ job_id: job.id }] }
132
- chain << { job: CanvasSyncCompleteWorker, options: { job_id: job.id } } # If an options key is provided, it will be automatically appended to the end of the :parameters array
131
+ chain << { job: CanvasSyncCompleteWorker, args: [job.id], kwargs: { job_id: job.id } }
133
132
 
134
133
  chain.process!
135
134
 
136
135
  # The chain object provides a fairly extensive API:
137
- chain.insert({ job: SomeOtherJob }) # Adds the job to the end of the chain
136
+ chain.insert({ job: SomeOtherJob, args: [], kwargs: {} }) # Adds the job to the end of the chain
138
137
  chain.insert_at(0, { job: SomeOtherJob }) # Adds the job to the beginning of the chain
139
138
  chain.insert({ job: SomeOtherJob }, after: 'CanvasSync::Jobs::SyncTermsJob') # Adds the job right after the SyncTermsJob
140
139
  chain.insert({ job: SomeOtherJob }, before: 'CanvasSync::Jobs::SyncTermsJob') # Adds the job right before the SyncTermsJob
@@ -163,7 +162,7 @@ end
163
162
 
164
163
  You must implement a job that will enqueue a report starter for your report. (TODO: would be nice to make some sort of builder for this, so you just define the report and its params and then the gem runs it in a pre-defined job.)
165
164
 
166
- Let's say we have a custom Canvas report called "my_really_cool_report_csv". First, we would need to create a job class that will enqueue a report starter. To work with the `CanvasSync` interface, your class must accept 2 parameters: `job_chain`, and `options`.
165
+ Let's say we have a custom Canvas report called "my_really_cool_report_csv". First, we would need to create a job class that will enqueue a report starter.
167
166
 
168
167
  ```ruby
169
168
  class MyReallyCoolReportJob < CanvasSync::Jobs::ReportStarter
@@ -309,7 +308,8 @@ pool_id = pool.pid
309
308
  # Add a job to the pool
310
309
  pool << {
311
310
  job: SomeJob, # The Class of a ActiveJob Job or Sidekiq Worker
312
- parameters: [1, 2, 3], # Array of params to pass th e Job
311
+ args: [1, 2, 3], # Array of params to pass th e Job
312
+ kwargs: {},
313
313
  priority: 100, # Only effective if order=:priority, higher is higher
314
314
  }
315
315
 
@@ -317,7 +317,8 @@ pool << {
317
317
  pool.add_jobs([
318
318
  {
319
319
  job: SomeJob, # The Class of a ActiveJob Job or Sidekiq Worker
320
- parameters: [1, 2, 3], # Array of params to pass th e Job
320
+ args: [1, 2, 3], # Array of params to pass th e Job
321
+ kwargs: {},
321
322
  priority: 100, # Only effective if order=:priority, higher is higher
322
323
  },
323
324
  # ...
@@ -367,7 +368,7 @@ If you want your own jobs to also log to the table all you have to do is have yo
367
368
 
368
369
  If you want to be able to utilize the `CanvasSync::JobLog` without `ActiveJob` (so you can get access to `Sidekiq` features that `ActiveJob` doesn't support), then add the following to an initializer in your Rails app:
369
370
 
370
- ```
371
+ ```ruby
371
372
  Sidekiq.configure_server do |config|
372
373
  config.server_middleware do |chain|
373
374
  chain.add CanvasSync::Sidekiq::Middleware
@@ -397,7 +398,7 @@ chain.process!
397
398
 
398
399
  You can configure CanvasSync settings by doing the following:
399
400
 
400
- ```
401
+ ```ruby
401
402
  CanvasSync.configure do |config|
402
403
  config.classes_to_only_log_errors_on << "ClassToOnlyLogErrorsOn"
403
404
  end
@@ -412,7 +413,7 @@ You can pass in global_options to a job chain. Global options are added to the
412
413
  various internal processes.
413
414
 
414
415
  Pass global options into a job chain, using the options param nested in a :global key.
415
- options: { global: {...} }
416
+ `options: { global: {...} }`
416
417
 
417
418
  report_timeout (integer): Number of days until a Canvas report should timeout. Default is 1.
418
419
  report_compilation_timeout (integer): Number of days until a Canvas report should timeout. Default is 1 hour.
@@ -421,12 +422,14 @@ report_max_tries (integer): The number of times to attempt a report be
421
422
  if it has an 'error' status in Canvas or is deleted.
422
423
 
423
424
  This is an example job chain with global options:
425
+ ```ruby
424
426
  job_chain = CanvasSync.default_provisioning_report_chain(
425
427
  MODELS_TO_SYNC,
426
428
  term_scope: :active,
427
429
  full_sync_every: 'sunday',
428
430
  options: { global: { report_timeout: 2 } }
429
431
  )
432
+ ```
430
433
 
431
434
  ## Handling Job errors
432
435
 
@@ -35,6 +35,23 @@ module CanvasSync::Concerns
35
35
  labels.is_a?(String) ? labels.split(',') : []
36
36
  end
37
37
 
38
+ def canvas_account_role_labels(account = 'self')
39
+ account = 'self' if account.to_s == "root"
40
+ account = account.canvas_id if account.respond_to?(:canvas_id)
41
+
42
+ if "::Admin".safe_constantize && ::Admin < ::ActiveRecord::Base
43
+ account = current_organization.canvas_account_id if account == 'self'
44
+ adm_query = ::Admin.where(canvas_account_id: account, workflow_state: "active")
45
+ adm_query.pluck(:role_name)
46
+ else
47
+ Rails.cache.fetch([self.class.name, "AccountAdminLinks", account, canvas_user_id], expires_in: 1.hour) do
48
+ admin_entries = canvas_sync_client.account_admins(account, user_id: [canvas_user_id])
49
+ admin_entries = admin_entries.select{|ent| ent[:workflow_state] == 'active' }
50
+ admin_entries.map{|ent| ent[:role] }
51
+ end
52
+ end
53
+ end
54
+
38
55
  def lti_roles
39
56
  @lti_roles ||= RoleStore.new(launch_params["https://purl.imsglobal.org/spec/lti/claim/roles"] || launch_params['ext_roles'] || '')
40
57
  end
@@ -148,16 +165,7 @@ module CanvasSync::Concerns
148
165
  end
149
166
 
150
167
  def canvas_root_account_roles
151
- role_labels = if "::Admin".safe_constantize && ::Admin < ::ActiveRecord::Base
152
- adm_query = ::Admin.where(canvas_account_id: current_organization.canvas_account_id, workflow_state: "active")
153
- adm_query.pluck(:role_name)
154
- else
155
- Rails.cache.fetch([self.class.name, "RootAccountAdminLinks", canvas_user_id], expires_in: 1.hour) do
156
- admin_entries = canvas_sync_client.account_admins('self', user_id: [canvas_user_id])
157
- admin_entries = admin_entries.select{|ent| ent[:workflow_state] == 'active' }
158
- admin_entries.map{|ent| ent[:role] }
159
- end
160
- end
168
+ role_labels = canvas_account_role_labels('self')
161
169
  ::Role.for_labels(role_labels, ::Account.find_by(canvas_parent_account_id: nil))
162
170
  end
163
171
 
@@ -171,7 +179,7 @@ module CanvasSync::Concerns
171
179
 
172
180
  def canvas_super_user?
173
181
  cache_on_session(:canvas_super_user?) do
174
- canvas_site_admin? || canvas_root_account_roles.map(&:label).include?("Account Admin")
182
+ canvas_site_admin? || canvas_account_role_labels(:root).include?("Account Admin")
175
183
  end
176
184
  end
177
185
 
@@ -28,6 +28,7 @@ module CanvasSync
28
28
  delegate :redis, to: :class
29
29
 
30
30
  BID_EXPIRE_TTL = 90.days.to_i
31
+ INDEX_ALL_BATCHES = false
31
32
  SCHEDULE_CALLBACK = RedisScript.new(Pathname.new(__FILE__) + "../schedule_callback.lua")
32
33
  BID_HIERARCHY = RedisScript.new(Pathname.new(__FILE__) + "../hier_batch_ids.lua")
33
34
 
@@ -202,7 +203,7 @@ module CanvasSync
202
203
 
203
204
  def flush_pending_attrs
204
205
  super
205
- redis.zadd("batches", created_at, bid)
206
+ redis.zadd("batches", created_at, bid) if INDEX_ALL_BATCHES
206
207
  end
207
208
 
208
209
  private
@@ -323,8 +324,13 @@ module CanvasSync
323
324
  end
324
325
 
325
326
  queue ||= "default"
326
- parent_bid = !parent_bid || parent_bid.empty? ? nil : parent_bid # Basically parent_bid.blank?
327
+ parent_bid = !parent_bid || parent_bid.empty? ? nil : parent_bid # Basically parent_bid.blank?
328
+
329
+ # Internal callback params. If this is present, we're trying to enqueue callbacks for a callback, which is a special case that
330
+ # indicates that the callback completed and we need to close the triggering batch (which is in a done-but-not-cleaned state)
327
331
  callback_params = JSON.parse(callback_params) if callback_params.present?
332
+
333
+ # User-configured parameters/arguments to pass to the callback
328
334
  callback_args = callbacks.reduce([]) do |memo, jcb|
329
335
  cb = JSON.load(jcb)
330
336
  memo << [cb['callback'], event.to_s, cb['opts'], bid, parent_bid]
@@ -341,6 +347,8 @@ module CanvasSync
341
347
  if should_schedule_batch
342
348
  logger.debug {"Enqueue callback bid: #{bid} event: #{event} args: #{callback_args.inspect}"}
343
349
 
350
+ # Create a new Batch to handle the callbacks and add it to the _parent_ batch
351
+ # (this ensures that the parent's lifecycle status can't change until the child's callbacks are done)
344
352
  with_batch(parent_bid) do
345
353
  cb_batch = self.new
346
354
  cb_batch.callback_params = {
@@ -357,9 +365,12 @@ module CanvasSync
357
365
  end
358
366
 
359
367
  if callback_params.present?
368
+ # This is a callback for a callback. Passing `origin` to the Finalizer allows it to also cleanup the original/callback-triggering batch
360
369
  opts['origin'] = callback_params
361
370
  end
362
371
 
372
+ # The Finalizer marks this batch as complete, bumps any necessary counters, cleans up this Batch _if_ no callbacks were scheduled,
373
+ # and enqueues parent-Batch callbacks if needed.
363
374
  logger.debug {"Run batch finalizer bid: #{bid} event: #{event} args: #{callback_args.inspect}"}
364
375
  finalizer = Batch::Callback::Finalize.new
365
376
  status = Status.new bid
@@ -376,6 +387,7 @@ module CanvasSync
376
387
  "BID-#{bid}-callbacks-complete",
377
388
  "BID-#{bid}-callbacks-success",
378
389
  "BID-#{bid}-failed",
390
+ "BID-#{bid}-dead",
379
391
 
380
392
  "BID-#{bid}-batches-success",
381
393
  "BID-#{bid}-batches-complete",
@@ -397,6 +409,39 @@ module CanvasSync
397
409
  cleanup_redis(bid)
398
410
  end
399
411
 
412
+ # Internal method to cleanup a Redis Hash and related keys
413
+ def cleanup_redis_index_for(key, suffixes = [""])
414
+ if r.hget(k, "created_at").present?
415
+ r.multi do |r|
416
+ suffixes.each do |suffix|
417
+ r.expire(key + suffix, BID_EXPIRE_TTL)
418
+ end
419
+ end
420
+ false
421
+ else
422
+ r.multi do |r|
423
+ suffixes.each do |suffix|
424
+ r.unlink(key + suffix)
425
+ end
426
+ end
427
+ true
428
+ end
429
+ end
430
+
431
+ # Administrative/console method to cleanup expired batches from the WebUI
432
+ def cleanup_redis_index!
433
+ suffixes = ["", "-callbacks-complete", "-callbacks-success", "-failed", "-dead", "-batches-success", "-batches-complete", "-batches-failed", "-bids", "-jids", "-pending_callbacks"]
434
+
435
+ cleanup_index = ->(index) {
436
+ r.zrangebyscore(index, "0", BID_EXPIRE_TTL.seconds.ago.to_i).each do |bid|
437
+ r.zrem(index, bid) if cleanup_redis_index_for("BID-#{bid}", suffixes)
438
+ end
439
+ }
440
+
441
+ cleanup_index.call("BID-ROOT-bids")
442
+ cleanup_index.call("batches")
443
+ end
444
+
400
445
  def redis(&blk)
401
446
  return RedisProxy.new unless block_given?
402
447
 
@@ -112,10 +112,13 @@ module CanvasSync
112
112
  end
113
113
 
114
114
  if parent_bid && !(pending.to_i.zero? && children == success)
115
- # if batch was not successfull check and see if its parent is complete
116
- # if the parent is complete we trigger the complete callback
117
- # We don't want to run this if the batch was successfull because the success
118
- # callback may add more jobs to the parent batch
115
+ # If batch was not successfull check and see if its parent is complete
116
+ # if the parent is complete we trigger its complete callback.
117
+ #
118
+ # Otherwise, we don't want to to trigger the parent's :complete here (and
119
+ # instead opt to have success tigger parent :complete) - this
120
+ # allows the success callback to add additional jobs to the parent batch
121
+ # before triggering :complete.
119
122
 
120
123
  Batch.logger.debug {"Finalize parent complete bid: #{parent_bid}"}
121
124
  _, _, complete, pending, children, failure = Batch.redis do |r|
@@ -8,12 +8,13 @@ module CanvasSync
8
8
  def initialize(base_type = SerialBatchJob)
9
9
  if base_type.is_a?(Hash)
10
10
  @base_job = base_type
11
+ @base_job[:args] ||= @base_job[:parameters] || []
12
+ @base_job[:kwargs] ||= {}
11
13
  else
12
- @base_job = {
13
- job: base_type,
14
- parameters: [],
15
- }
14
+ @base_job = build_job_hash(base_type)
16
15
  end
16
+
17
+ self.class.get_chain_parameter(base_job)
17
18
  end
18
19
 
19
20
  def process!
@@ -25,23 +26,25 @@ module CanvasSync
25
26
  if key.is_a?(Class)
26
27
  get_sub_chain(key)
27
28
  else
29
+ # Legacy Support
30
+ key = :args if key == :parameters
31
+
28
32
  @base_job[key]
29
33
  end
30
34
  end
31
35
 
32
- def params
33
- ParamsMapper.new(self[:parameters])
34
- end
36
+ def args; return self[:args]; end
37
+ def kwargs; return self[:kwargs]; end
35
38
 
36
39
  def <<(new_job)
37
40
  insert_at(-1, new_job)
38
41
  end
39
42
 
40
- def insert_at(position, new_jobs, *args, &blk)
43
+ def insert_at(position, new_jobs, *args, **kwargs, &blk)
41
44
  chain = self.class.get_chain_parameter(base_job)
42
45
  if new_jobs.is_a?(Class) || new_jobs.is_a?(String)
43
- new_jobs = build_job_hash(new_jobs, *args, &blk)
44
- elsif args.length > 0
46
+ new_jobs = build_job_hash(new_jobs, args: args, kwargs: kwargs, &blk)
47
+ elsif args.count > 0 || kwargs.count > 0
45
48
  raise "Unexpected number of arguments"
46
49
  end
47
50
  new_jobs = [new_jobs] unless new_jobs.is_a?(Array)
@@ -51,8 +54,7 @@ module CanvasSync
51
54
  def insert(new_jobs, *args, **kwargs, &blk)
52
55
  if new_jobs.is_a?(Class) || new_jobs.is_a?(String)
53
56
  job_kwargs = kwargs.except(*VALID_PLACEMENT_PARAMETERS)
54
- args << job_kwargs if job_kwargs.present?
55
- new_jobs = build_job_hash(new_jobs, *args, &blk)
57
+ new_jobs = build_job_hash(new_jobs, args: args, kwargs: job_kwargs, &blk)
56
58
  kwargs = kwargs.slice(*VALID_PLACEMENT_PARAMETERS)
57
59
  else
58
60
  invalid_params = kwargs.keys - VALID_PLACEMENT_PARAMETERS
@@ -124,14 +126,6 @@ module CanvasSync
124
126
  end
125
127
  end
126
128
 
127
- # Legacy Support
128
- def merge_options(job, options)
129
- find_matching_jobs(job).each do |j, parent, index|
130
- j[:options] ||= {}
131
- j[:options].deep_merge!(options)
132
- end
133
- end
134
-
135
129
  def apply_block(&blk)
136
130
  return unless blk.present?
137
131
  instance_exec(&blk)
@@ -139,10 +133,11 @@ module CanvasSync
139
133
 
140
134
  private
141
135
 
142
- def build_job_hash(job, *params, &blk)
136
+ def build_job_hash(job, args: [], kwargs: {}, &blk)
143
137
  hsh = {
144
138
  job: job,
145
- parameters: params,
139
+ args: args,
140
+ kwargs: kwargs,
146
141
  }
147
142
  self.class.new(hsh).apply_block(&blk) if blk.present?
148
143
  hsh
@@ -180,9 +175,17 @@ module CanvasSync
180
175
  end
181
176
 
182
177
  class << self
183
- def build(job, *args, &blk)
178
+ # Support builder syntaxt/DSL
179
+ # Chain.build(ConcurrentBatchJob) do
180
+ # insert(SomeJob, arg1, kwarg: 1)
181
+ # insert(SerialBatchJob) do
182
+ # insert(SomeJob, arg1, kwarg: 1)
183
+ # end
184
+ # end
185
+ def build(job, *args, **kwargs, &blk)
184
186
  new(job).tap do |ch|
185
- ch[:parameters] = args
187
+ ch.base_job[:args] = args
188
+ ch.base_job[:kwargs] = kwargs
186
189
  ch.apply_block(&blk)
187
190
  end
188
191
  end
@@ -205,8 +208,11 @@ module CanvasSync
205
208
  end
206
209
 
207
210
  key = _job_type_definitions[job_def[:job].to_s][:chain_parameter]
208
- mapper = ParamsMapper.new(job_def[:parameters])
209
- mapper[key] ||= []
211
+ if key.is_a?(Numeric)
212
+ job_def[:args][key] ||= []
213
+ else
214
+ job_def[:kwargs][key] ||= []
215
+ end
210
216
  end
211
217
 
212
218
  # TODO: Add a Chain progress web View
@@ -219,18 +225,19 @@ module CanvasSync
219
225
  # > [DONE] Use a Lua script to find child batch IDs. Support max_depth, items_per_depth, top_depth_slice parameters
220
226
  def enqueue_job(job_def)
221
227
  job_class = job_def[:job].constantize
222
- job_options = job_def[:parameters] || []
228
+ job_args = job_def[:args] || job_def[:parameters] || []
229
+ job_kwargs = job_def[:kwargs] || {}
223
230
 
224
231
  # Legacy Support
225
232
  if job_def[:options]
226
- job_options << {} unless job_options[-1].is_a?(Hash)
227
- job_options[-1].merge!(job_def[:options])
233
+ job_args << {} unless job_args[-1].is_a?(Hash)
234
+ job_args[-1].merge!(job_def[:options])
228
235
  end
229
236
 
230
237
  if job_class.respond_to? :perform_async
231
- job_class.perform_async(*job_options)
238
+ job_class.perform_async(*job_args, **job_kwargs)
232
239
  else
233
- job_class.perform_later(*job_options)
240
+ job_class.perform_later(*job_args, **job_kwargs)
234
241
  end
235
242
  end
236
243
  end
@@ -238,48 +245,5 @@ module CanvasSync
238
245
 
239
246
  ChainBuilder.register_chain_job(ConcurrentBatchJob, 0)
240
247
  ChainBuilder.register_chain_job(SerialBatchJob, 0)
241
-
242
- class ParamsMapper
243
- def initialize(backend)
244
- @backend = backend
245
- end
246
-
247
- def [](key)
248
- get_parameter(key)
249
- end
250
-
251
- def []=(key, value)
252
- set_parameter(key, value)
253
- end
254
-
255
- def to_a
256
- @backend
257
- end
258
-
259
- private
260
-
261
- def get_parameter(key)
262
- if key.is_a?(Numeric)
263
- @backend[key]
264
- else
265
- kwargs = @backend.last
266
- return nil unless kwargs.is_a?(Hash)
267
- kwargs[key]
268
- end
269
- end
270
-
271
- def set_parameter(key, value)
272
- if key.is_a?(Numeric)
273
- @backend[key] = value
274
- else
275
- kwargs = @backend.last
276
- unless kwargs.is_a?(Hash)
277
- kwargs = {}
278
- @backend.push(kwargs)
279
- end
280
- kwargs[key] = value
281
- end
282
- end
283
- end
284
248
  end
285
249
  end
@@ -3,19 +3,19 @@ require_relative './base_job'
3
3
  module CanvasSync
4
4
  module JobBatches
5
5
  class ConcurrentBatchJob < BaseJob
6
- def self.make_batch(sub_jobs, context: nil, &blk)
6
+ def self.make_batch(sub_jobs, **kwargs, &blk)
7
7
  ManagedBatchJob.make_batch(
8
8
  sub_jobs,
9
+ **kwargs,
9
10
  ordered: false,
10
11
  concurrency: true,
11
- context: context,
12
- desc_prefix: 'ConcurrentBatchJob',
12
+ desc_prefix: 'ConcurrentBatchJob: ',
13
13
  &blk
14
14
  )
15
15
  end
16
16
 
17
- def perform(sub_jobs, context: nil)
18
- self.class.make_batch(sub_jobs, context: context)
17
+ def perform(sub_jobs, **kwargs)
18
+ self.class.make_batch(sub_jobs, **kwargs)
19
19
  end
20
20
  end
21
21
  end
@@ -3,7 +3,7 @@ require_relative './base_job'
3
3
  module CanvasSync
4
4
  module JobBatches
5
5
  class ManagedBatchJob < BaseJob
6
- def self.make_batch(sub_jobs, ordered: true, concurrency: nil, context: nil, desc_prefix: nil, &blk)
6
+ def self.make_batch(sub_jobs, ordered: true, concurrency: nil, context: nil, preflight_check: nil, desc_prefix: nil, &blk)
7
7
  desc_prefix ||= ''
8
8
 
9
9
  if concurrency == 0 || concurrency == nil || concurrency == true
@@ -13,6 +13,7 @@ module CanvasSync
13
13
  end
14
14
 
15
15
  root_batch = Batch.new
16
+ man_batch_id = nil
16
17
 
17
18
  if concurrency < sub_jobs.count
18
19
  man_batch_id = SecureRandom.urlsafe_base64(10)
@@ -22,6 +23,7 @@ module CanvasSync
22
23
  r.hset("MNGBID-#{man_batch_id}", "root_bid", root_batch.bid)
23
24
  r.hset("MNGBID-#{man_batch_id}", "ordered", ordered ? 1 : 0)
24
25
  r.hset("MNGBID-#{man_batch_id}", "concurrency", concurrency)
26
+ r.hset("MNGBID-#{man_batch_id}", "preflight_check", preflight_check)
25
27
  r.expire("MNGBID-#{man_batch_id}", Batch::BID_EXPIRE_TTL)
26
28
 
27
29
  mapped_sub_jobs = sub_jobs.each_with_index.map do |j, i|
@@ -48,12 +50,14 @@ module CanvasSync
48
50
 
49
51
  blk.call(ManagedBatchProxy.new(root_batch)) if blk.present?
50
52
 
51
- root_batch.description = "#{desc_prefix}: #{root_batch.description || 'Root'}"
53
+ root_batch.description = "#{desc_prefix}#{root_batch.description || 'Root'}"
54
+
55
+ root_batch.context["managed_batch_bid"] = man_batch_id if man_batch_id
52
56
 
53
57
  if concurrency < sub_jobs.count
54
58
  root_batch.jobs {}
55
59
  concurrency.times do
56
- perform_next_sequence_job(man_batch_id)
60
+ perform_next_sequence_job(man_batch_id, skip_preflight: true)
57
61
  end
58
62
  else
59
63
  root_batch.jobs do
@@ -62,10 +66,12 @@ module CanvasSync
62
66
  end
63
67
  end
64
68
  end
69
+
70
+ root_batch
65
71
  end
66
72
 
67
- def perform(sub_jobs, context: nil, ordered: true, concurrency: nil)
68
- self.class.make_batch(sub_jobs, ordered: ordered, concurrency: concurrency, context: context)
73
+ def perform(sub_jobs, **kwargs)
74
+ self.class.make_batch(sub_jobs, **kwargs)
69
75
  end
70
76
 
71
77
  def self.cleanup_redis(status, options)
@@ -85,37 +91,63 @@ module CanvasSync
85
91
 
86
92
  protected
87
93
 
88
- def self.perform_next_sequence_job(man_batch_id)
89
- root_bid, ordered = Batch.redis do |r|
94
+ def self.perform_next_sequence_job(man_batch_id, skip_preflight: false)
95
+ root_bid, ordered, preflight_check = Batch.redis do |r|
90
96
  r.multi do |r|
91
97
  r.hget("MNGBID-#{man_batch_id}", "root_bid")
92
98
  r.hget("MNGBID-#{man_batch_id}", "ordered")
99
+ r.hget("MNGBID-#{man_batch_id}", "preflight_check")
93
100
  end
94
101
  end
95
102
 
96
- ordered = CanvasSync::MiscHelper.to_boolean(ordered)
97
-
98
- next_job_json = Batch.redis do |r|
99
- if ordered
100
- r.lpop("MNGBID-#{man_batch_id}-jobs")
103
+ if !skip_preflight && preflight_check.present?
104
+ if preflight_check.include?(".")
105
+ clazz, method_name = preflight_check.split('.')
106
+ clazz = clazz.constantize
101
107
  else
102
- r.spop("MNGBID-#{man_batch_id}-jobs")
108
+ clazz = Object
109
+ method_name = preflight_check
103
110
  end
111
+ preflight_check = ->(*args) { clazz.send(method_name, *args) }
112
+ else
113
+ preflight_check = ->(*args) { true }
104
114
  end
105
115
 
106
- return unless next_job_json.present?
116
+ ordered = CanvasSync::MiscHelper.to_boolean(ordered)
117
+
118
+ loop do
119
+ next_job_json = Batch.redis do |r|
120
+ if ordered
121
+ r.lpop("MNGBID-#{man_batch_id}-jobs")
122
+ else
123
+ r.spop("MNGBID-#{man_batch_id}-jobs")
124
+ end
125
+ end
107
126
 
108
- next_job = JSON.parse(next_job_json)
109
- next_job = ::ActiveJob::Arguments.deserialize(next_job)[0]
127
+ break unless next_job_json.present?
110
128
 
111
- Batch.new(root_bid).jobs do
112
- Batch.new.tap do |batch|
113
- batch.description = "Managed Batch Fiber (#{man_batch_id})"
114
- batch.on(:success, "#{self.to_s}.job_succeeded_callback", managed_batch_id: man_batch_id)
115
- batch.jobs do
116
- ChainBuilder.enqueue_job(next_job)
129
+ next_job = JSON.parse(next_job_json)
130
+ next_job = ::ActiveJob::Arguments.deserialize(next_job)[0]
131
+
132
+ preflight_result = preflight_check.call(next_job)
133
+ if preflight_result == :abort
134
+ cleanup_redis(nil, { "managed_batch_id" => man_batch_id })
135
+ break
136
+ elsif !preflight_check
137
+ next
138
+ end
139
+
140
+ Batch.new(root_bid).jobs do
141
+ Batch.new.tap do |batch|
142
+ batch.description = "Managed Batch Fiber (#{man_batch_id})"
143
+ batch.on(:success, "#{self.to_s}.job_succeeded_callback", managed_batch_id: man_batch_id)
144
+ batch.jobs do
145
+ ChainBuilder.enqueue_job(next_job)
146
+ end
117
147
  end
118
148
  end
149
+
150
+ break
119
151
  end
120
152
  end
121
153
 
@@ -3,19 +3,19 @@ require_relative './base_job'
3
3
  module CanvasSync
4
4
  module JobBatches
5
5
  class SerialBatchJob < BaseJob
6
- def self.make_batch(sub_jobs, context: nil, &blk)
6
+ def self.make_batch(sub_jobs, **kwargs, &blk)
7
7
  ManagedBatchJob.make_batch(
8
8
  sub_jobs,
9
+ **kwargs,
9
10
  ordered: true,
10
11
  concurrency: false,
11
- context: context,
12
- desc_prefix: 'SerialBatchJob',
12
+ desc_prefix: 'SerialBatchJob: ',
13
13
  &blk
14
14
  )
15
15
  end
16
16
 
17
- def perform(sub_jobs, context: nil)
18
- self.class.make_batch(sub_jobs, context: context)
17
+ def perform(sub_jobs, **kwargs)
18
+ self.class.make_batch(sub_jobs, **kwargs)
19
19
  end
20
20
  end
21
21
  end