canvas_sync 0.16.5 → 0.17.0.beta1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (80) hide show
  1. checksums.yaml +5 -5
  2. data/README.md +49 -137
  3. data/app/models/canvas_sync/sync_batch.rb +5 -0
  4. data/db/migrate/20201018210836_create_canvas_sync_sync_batches.rb +11 -0
  5. data/lib/canvas_sync/importers/bulk_importer.rb +4 -7
  6. data/lib/canvas_sync/job.rb +4 -10
  7. data/lib/canvas_sync/job_batches/batch.rb +399 -0
  8. data/lib/canvas_sync/job_batches/batch_aware_job.rb +62 -0
  9. data/lib/canvas_sync/job_batches/callback.rb +153 -0
  10. data/lib/canvas_sync/job_batches/chain_builder.rb +203 -0
  11. data/lib/canvas_sync/job_batches/context_hash.rb +147 -0
  12. data/lib/canvas_sync/job_batches/jobs/base_job.rb +7 -0
  13. data/lib/canvas_sync/job_batches/jobs/concurrent_batch_job.rb +18 -0
  14. data/lib/canvas_sync/job_batches/jobs/serial_batch_job.rb +73 -0
  15. data/lib/canvas_sync/job_batches/sidekiq.rb +91 -0
  16. data/lib/canvas_sync/job_batches/status.rb +63 -0
  17. data/lib/canvas_sync/jobs/begin_sync_chain_job.rb +34 -0
  18. data/lib/canvas_sync/jobs/report_checker.rb +3 -6
  19. data/lib/canvas_sync/jobs/report_processor_job.rb +2 -5
  20. data/lib/canvas_sync/jobs/report_starter.rb +28 -20
  21. data/lib/canvas_sync/jobs/sync_accounts_job.rb +3 -5
  22. data/lib/canvas_sync/jobs/sync_admins_job.rb +2 -4
  23. data/lib/canvas_sync/jobs/sync_assignment_groups_job.rb +2 -4
  24. data/lib/canvas_sync/jobs/sync_assignments_job.rb +2 -4
  25. data/lib/canvas_sync/jobs/sync_context_module_items_job.rb +2 -4
  26. data/lib/canvas_sync/jobs/sync_context_modules_job.rb +2 -4
  27. data/lib/canvas_sync/jobs/sync_provisioning_report_job.rb +4 -31
  28. data/lib/canvas_sync/jobs/sync_roles_job.rb +2 -5
  29. data/lib/canvas_sync/jobs/sync_simple_table_job.rb +11 -32
  30. data/lib/canvas_sync/jobs/sync_submissions_job.rb +2 -4
  31. data/lib/canvas_sync/jobs/sync_terms_job.rb +22 -7
  32. data/lib/canvas_sync/processors/assignment_groups_processor.rb +2 -3
  33. data/lib/canvas_sync/processors/assignments_processor.rb +2 -3
  34. data/lib/canvas_sync/processors/context_module_items_processor.rb +2 -3
  35. data/lib/canvas_sync/processors/context_modules_processor.rb +2 -3
  36. data/lib/canvas_sync/processors/normal_processor.rb +1 -2
  37. data/lib/canvas_sync/processors/provisioning_report_processor.rb +2 -10
  38. data/lib/canvas_sync/processors/submissions_processor.rb +2 -3
  39. data/lib/canvas_sync/version.rb +1 -1
  40. data/lib/canvas_sync.rb +34 -97
  41. data/spec/canvas_sync/canvas_sync_spec.rb +126 -153
  42. data/spec/canvas_sync/jobs/job_spec.rb +9 -17
  43. data/spec/canvas_sync/jobs/report_checker_spec.rb +1 -3
  44. data/spec/canvas_sync/jobs/report_processor_job_spec.rb +0 -3
  45. data/spec/canvas_sync/jobs/report_starter_spec.rb +19 -28
  46. data/spec/canvas_sync/jobs/sync_admins_job_spec.rb +1 -4
  47. data/spec/canvas_sync/jobs/sync_assignment_groups_job_spec.rb +2 -1
  48. data/spec/canvas_sync/jobs/sync_assignments_job_spec.rb +3 -2
  49. data/spec/canvas_sync/jobs/sync_context_module_items_job_spec.rb +3 -2
  50. data/spec/canvas_sync/jobs/sync_context_modules_job_spec.rb +3 -2
  51. data/spec/canvas_sync/jobs/sync_provisioning_report_job_spec.rb +3 -35
  52. data/spec/canvas_sync/jobs/sync_roles_job_spec.rb +1 -4
  53. data/spec/canvas_sync/jobs/sync_simple_table_job_spec.rb +5 -12
  54. data/spec/canvas_sync/jobs/sync_submissions_job_spec.rb +2 -1
  55. data/spec/canvas_sync/jobs/sync_terms_job_spec.rb +1 -4
  56. data/spec/dummy/config/environments/test.rb +2 -0
  57. data/spec/dummy/db/schema.rb +9 -1
  58. data/spec/job_batching/batch_aware_job_spec.rb +100 -0
  59. data/spec/job_batching/batch_spec.rb +363 -0
  60. data/spec/job_batching/callback_spec.rb +38 -0
  61. data/spec/job_batching/flow_spec.rb +91 -0
  62. data/spec/job_batching/integration/integration.rb +57 -0
  63. data/spec/job_batching/integration/nested.rb +88 -0
  64. data/spec/job_batching/integration/simple.rb +47 -0
  65. data/spec/job_batching/integration/workflow.rb +134 -0
  66. data/spec/job_batching/integration_helper.rb +48 -0
  67. data/spec/job_batching/sidekiq_spec.rb +124 -0
  68. data/spec/job_batching/status_spec.rb +92 -0
  69. data/spec/job_batching/support/base_job.rb +14 -0
  70. data/spec/job_batching/support/sample_callback.rb +2 -0
  71. data/spec/spec_helper.rb +10 -0
  72. metadata +91 -23
  73. data/lib/canvas_sync/job_chain.rb +0 -102
  74. data/lib/canvas_sync/jobs/fork_gather.rb +0 -74
  75. data/spec/canvas_sync/jobs/fork_gather_spec.rb +0 -73
  76. data/spec/dummy/db/test.sqlite3 +0 -0
  77. data/spec/dummy/log/development.log +0 -1248
  78. data/spec/dummy/log/test.log +0 -43258
  79. data/spec/support/fixtures/reports/provisioning_csv_unzipped/courses.csv +0 -3
  80. data/spec/support/fixtures/reports/provisioning_csv_unzipped/users.csv +0 -4
@@ -0,0 +1,203 @@
1
+ module CanvasSync
2
+ module JobBatches
3
+ class ChainBuilder
4
+ VALID_PLACEMENT_PARAMETERS = %i[before after with].freeze
5
+
6
+ attr_reader :base_job
7
+
8
+ def initialize(base_type = SerialBatchJob)
9
+ if base_type.is_a?(Hash)
10
+ @base_job = base_type
11
+ else
12
+ @base_job = {
13
+ job: base_type,
14
+ parameters: [],
15
+ }
16
+ end
17
+ end
18
+
19
+ def process!
20
+ normalize!
21
+ self.class.enqueue_job(base_job)
22
+ end
23
+
24
+ def [](key)
25
+ if key.is_a?(Class)
26
+ get_sub_chain(key)
27
+ else
28
+ @base_job[key]
29
+ end
30
+ end
31
+
32
+ def params
33
+ ParamsMapper.new(self[:parameters])
34
+ end
35
+
36
+ def <<(new_job)
37
+ insert_at(-1, new_job)
38
+ end
39
+
40
+ def insert_at(position, new_jobs)
41
+ chain = self.class.get_chain_parameter(base_job)
42
+ new_jobs = [new_jobs] unless new_jobs.is_a?(Array)
43
+ chain.insert(-1, *new_jobs)
44
+ end
45
+
46
+ def insert(new_jobs, **kwargs)
47
+ invalid_params = kwargs.keys - VALID_PLACEMENT_PARAMETERS
48
+ raise "Invalid placement parameters: #{invalid_params.map(&:to_s).join(', ')}" if invalid_params.present?
49
+ raise "At most one placement parameter may be provided" if kwargs.values.compact.length > 1
50
+
51
+ new_jobs = [new_jobs] unless new_jobs.is_a?(Array)
52
+
53
+ if !kwargs.present?
54
+ insert_at(-1, new_jobs)
55
+ else
56
+ placement = kwargs.keys[0]
57
+ relative_to = kwargs.values[0]
58
+
59
+ matching_jobs = find_matching_jobs(relative_to)
60
+ raise "Could not find a \"#{relative_to}\" job in the chain" if matching_jobs.count == 0
61
+ raise "Found multiple \"#{relative_to}\" jobs in the chain" if matching_jobs.count > 1
62
+
63
+ parent_job, sub_index = matching_jobs[0]
64
+ chain = self.class.get_chain_parameter(parent_job)
65
+ needed_parent_type = placement == :with ? ConcurrentBatchJob : SerialBatchJob
66
+
67
+ if parent_job[:job] != needed_parent_type
68
+ old_job = chain[sub_index]
69
+ parent_job = chain[sub_index] = {
70
+ job: needed_parent_type,
71
+ parameters: [],
72
+ }
73
+ sub_index = 0
74
+ chain = self.class.get_chain_parameter(parent_job)
75
+ chain << old_job
76
+ end
77
+
78
+ if placement == :with
79
+ chain.insert(-1, *new_jobs)
80
+ else
81
+ sub_index += 1 if placement == :after
82
+ chain.insert(sub_index, *new_jobs)
83
+ end
84
+ end
85
+ end
86
+
87
+ def get_sub_chain(sub_type)
88
+ matching_jobs = find_matching_jobs(sub_type)
89
+ raise "Found multiple \"#{sub_type}\" jobs in the chain" if matching_jobs.count > 1
90
+ return nil if matching_jobs.count == 0
91
+
92
+ new(matching_jobs[0])
93
+ end
94
+
95
+ def normalize!(job_def = self.base_job)
96
+ if job_def.is_a?(ChainBuilder)
97
+ job_def.normalize!
98
+ else
99
+ job_def[:job] = job_def[:job].to_s
100
+ if (chain = self.class.get_chain_parameter(job_def, raise_error: false)).present?
101
+ chain.map! { |sub_job| normalize!(sub_job) }
102
+ end
103
+ job_def
104
+ end
105
+ end
106
+
107
+ private
108
+
109
+ def find_matching_jobs(search_job, parent_job = self.base_job)
110
+ return to_enum(:find_matching_jobs, search_job, parent_job) unless block_given?
111
+
112
+ sub_jobs = self.class.get_chain_parameter(parent_job)
113
+ sub_jobs.each_with_index do |sub_job, i|
114
+ if sub_job[:job].to_s == search_job.to_s
115
+ yield [parent_job, i]
116
+ elsif self.class._job_type_definitions[sub_job[:job]]
117
+ find_matching_jobs(search_job) { |item| yield item }
118
+ end
119
+ end
120
+ end
121
+
122
+ class << self
123
+ def _job_type_definitions
124
+ @job_type_definitions ||= {}
125
+ end
126
+
127
+ def register_chain_job(job_class, chain_parameter, **options)
128
+ _job_type_definitions[job_class.to_s] = {
129
+ **options,
130
+ chain_parameter: chain_parameter,
131
+ }
132
+ end
133
+
134
+ def get_chain_parameter(job_def, raise_error: true)
135
+ unless _job_type_definitions[job_def[:job].to_s].present?
136
+ raise "Job Type #{base_job[:job].to_s} does not accept a sub-chain" if raise_error
137
+ return nil
138
+ end
139
+
140
+ key = _job_type_definitions[job_def[:job].to_s][:chain_parameter]
141
+ mapper = ParamsMapper.new(job_def[:parameters])
142
+ mapper[key] ||= []
143
+ end
144
+
145
+ def enqueue_job(job_def)
146
+ job_class = job_def[:job].constantize
147
+ job_options = job_def[:parameters] || []
148
+ if job_class.respond_to? :perform_async
149
+ job_class.perform_async(*job_options)
150
+ else
151
+ job_class.perform_later(*job_options)
152
+ end
153
+ end
154
+ end
155
+ end
156
+
157
+ ChainBuilder.register_chain_job(ConcurrentBatchJob, 0)
158
+ ChainBuilder.register_chain_job(SerialBatchJob, 0)
159
+
160
+ class ParamsMapper
161
+ def initialize(backend)
162
+ @backend = backend
163
+ end
164
+
165
+ def [](key)
166
+ get_parameter(key)
167
+ end
168
+
169
+ def []=(key, value)
170
+ set_parameter(key, value)
171
+ end
172
+
173
+ def to_a
174
+ @backend
175
+ end
176
+
177
+ private
178
+
179
+ def get_parameter(key)
180
+ if key.is_a?(Numeric)
181
+ @backend[key]
182
+ else
183
+ kwargs = @backend.last
184
+ return nil unless kwargs.is_a?(Hash)
185
+ kwargs[key]
186
+ end
187
+ end
188
+
189
+ def set_parameter(key, value)
190
+ if key.is_a?(Numeric)
191
+ @backend[key] = value
192
+ else
193
+ kwargs = @backend.last
194
+ unless kwargs.is_a?(Hash)
195
+ kwargs = {}
196
+ @backend.push(kwargs)
197
+ end
198
+ kwargs[key] = value
199
+ end
200
+ end
201
+ end
202
+ end
203
+ end
@@ -0,0 +1,147 @@
1
+ module CanvasSync
2
+ module JobBatches
3
+ class ContextHash
4
+ delegate_missing_to :flatten
5
+
6
+ def initialize(bid, hash = nil)
7
+ @bid_stack = [bid]
8
+ @hash_map = {}
9
+ @dirty = false
10
+ @flattened = nil
11
+ @hash_map[bid] = hash.with_indifferent_access if hash
12
+ end
13
+
14
+ # Local is "the nearest batch with a context value"
15
+ # This allows for, for example, SerialBatchJob to have a modifiable context stored on it's main Batch
16
+ # that can be accessed transparently from one of it's internal, context-less Batches
17
+ def local_bid
18
+ bid = @bid_stack[-1]
19
+ while bid.present?
20
+ bhash = reolve_hash(bid)
21
+ return bid if bhash
22
+ bid = get_parent_bid(bid)
23
+ end
24
+ nil
25
+ end
26
+
27
+ def local
28
+ @hash_map[local_bid]
29
+ end
30
+
31
+ def set_local(new_hash)
32
+ @dirty = true
33
+ local.clear.merge!(new_hash)
34
+ end
35
+
36
+ def clear
37
+ local.clear
38
+ @flattened = nil
39
+ @dirty = true
40
+ self
41
+ end
42
+
43
+ def []=(key, value)
44
+ @flattened = nil
45
+ @dirty = true
46
+ local[key] = value
47
+ end
48
+
49
+ def [](key)
50
+ bid = @bid_stack[-1]
51
+ while bid.present?
52
+ bhash = reolve_hash(bid)
53
+ return bhash[key] if bhash&.key?(key)
54
+ bid = get_parent_bid(bid)
55
+ end
56
+ nil
57
+ end
58
+
59
+ def reload!
60
+ @dirty = false
61
+ @hash_map = {}
62
+ self
63
+ end
64
+
65
+ def save!(force: false)
66
+ return unless dirty? || force
67
+ Batch.redis do |r|
68
+ r.hset("BID-#{local_bid}", 'context', JSON.unparse(local))
69
+ end
70
+ end
71
+
72
+ def dirty?
73
+ @dirty
74
+ end
75
+
76
+ def is_a?(arg)
77
+ return true if Hash <= arg
78
+ super
79
+ end
80
+
81
+ def flatten
82
+ return @flattened if @flattened
83
+
84
+ load_all
85
+ flattened = {}
86
+ @bid_stack.compact.each do |bid|
87
+ flattened.merge!(@hash_map[bid]) if @hash_map[bid]
88
+ end
89
+ flattened.freeze
90
+
91
+ @flattened = flattened.with_indifferent_access
92
+ end
93
+
94
+ private
95
+
96
+ def get_parent_hash(bid)
97
+ reolve_hash(get_parent_bid(bid)).freeze
98
+ end
99
+
100
+ def get_parent_bid(bid)
101
+ index = @bid_stack.index(bid)
102
+ raise "Invalid BID #{bid}" if index.nil? # Sanity Check - this shouldn't happen
103
+
104
+ index -= 1
105
+ if index >= 0
106
+ @bid_stack[index]
107
+ else
108
+ pbid = Batch.redis { |r| r.hget("BID-#{bid}", "parent_bid") }
109
+ @bid_stack.unshift(pbid)
110
+ pbid
111
+ end
112
+ end
113
+
114
+ def reolve_hash(bid)
115
+ return nil unless bid.present?
116
+ return @hash_map[bid] if @hash_map.key?(bid)
117
+
118
+ context_json, editable = Batch.redis do |r|
119
+ r.multi do
120
+ r.hget("BID-#{bid}", "context")
121
+ r.hget("BID-#{bid}", "allow_context_changes")
122
+ end
123
+ end
124
+
125
+ if context_json.present?
126
+ context_hash = JSON.parse(context_json)
127
+ context_hash = context_hash.with_indifferent_access
128
+ context_hash.each do |k, v|
129
+ v.freeze
130
+ end
131
+ context_hash.freeze unless editable
132
+
133
+ @hash_map[bid] = context_hash
134
+ else
135
+ @hash_map[bid] = nil
136
+ end
137
+ end
138
+
139
+ def load_all
140
+ while @bid_stack[0].present?
141
+ get_parent_hash(@bid_stack[0])
142
+ end
143
+ @hash_map
144
+ end
145
+ end
146
+ end
147
+ end
@@ -0,0 +1,7 @@
1
+ module CanvasSync
2
+ module JobBatches
3
+ class BaseJob < ActiveJob::Base
4
+
5
+ end
6
+ end
7
+ end
@@ -0,0 +1,18 @@
1
+ require_relative './base_job'
2
+
3
+ module CanvasSync
4
+ module JobBatches
5
+ class ConcurrentBatchJob < BaseJob
6
+ def perform(sub_jobs, context: nil)
7
+ Batch.new.tap do |b|
8
+ b.context = context
9
+ b.jobs do
10
+ sub_jobs.each do |j|
11
+ ChainBuilder.enqueue_job(j)
12
+ end
13
+ end
14
+ end
15
+ end
16
+ end
17
+ end
18
+ end
@@ -0,0 +1,73 @@
1
+ require_relative './base_job'
2
+
3
+ module CanvasSync
4
+ module JobBatches
5
+ class SerialBatchJob < BaseJob
6
+ def perform(sub_jobs, context: nil)
7
+ serial_id = SecureRandom.urlsafe_base64(10)
8
+
9
+ root_batch = Batch.new
10
+
11
+ Batch.redis do |r|
12
+ r.multi do
13
+ mapped_sub_jobs = sub_jobs.map do |j|
14
+ j = ActiveJob::Arguments.serialize([j])
15
+ JSON.unparse(j)
16
+ end
17
+ r.hset("SERBID-#{serial_id}", "root_bid", root_batch.bid)
18
+ r.expire("SERBID-#{serial_id}", Batch::BID_EXPIRE_TTL)
19
+ r.rpush("SERBID-#{serial_id}-jobs", mapped_sub_jobs)
20
+ r.expire("SERBID-#{serial_id}-jobs", Batch::BID_EXPIRE_TTL)
21
+ end
22
+ end
23
+
24
+ root_batch.allow_context_changes = true
25
+ root_batch.context = context
26
+ root_batch.on(:success, "#{self.class.to_s}.cleanup_redis", serial_batch_id: serial_id)
27
+ root_batch.jobs do
28
+ self.class.perform_next_sequence_job(serial_id)
29
+ end
30
+ end
31
+
32
+ def self.cleanup_redis(status, options)
33
+ serial_id = options['serial_batch_id']
34
+ Batch.redis do |r|
35
+ r.del(
36
+ "SERBID-#{serial_id}",
37
+ "SERBID-#{serial_id}-jobs",
38
+ )
39
+ end
40
+ end
41
+
42
+ def self.job_succeeded_callback(status, options)
43
+ serial_id = options['serial_batch_id']
44
+ perform_next_sequence_job(serial_id)
45
+ end
46
+
47
+ protected
48
+
49
+ def self.perform_next_sequence_job(serial_id)
50
+ root_bid, next_job_json = Batch.redis do |r|
51
+ r.multi do
52
+ r.hget("SERBID-#{serial_id}", "root_bid")
53
+ r.lpop("SERBID-#{serial_id}-jobs")
54
+ end
55
+ end
56
+
57
+ return unless next_job_json.present?
58
+
59
+ next_job = JSON.parse(next_job_json)
60
+ next_job = ActiveJob::Arguments.deserialize(next_job)[0]
61
+
62
+ Batch.new(root_bid).jobs do
63
+ Batch.new.tap do |batch|
64
+ batch.on(:success, "#{self.to_s}.job_succeeded_callback", serial_batch_id: serial_id)
65
+ batch.jobs do
66
+ ChainBuilder.enqueue_job(next_job)
67
+ end
68
+ end
69
+ end
70
+ end
71
+ end
72
+ end
73
+ end
@@ -0,0 +1,91 @@
1
+ begin
2
+ require 'sidekiq/batch'
3
+ rescue LoadError
4
+ end
5
+
6
+ module CanvasSync
7
+ module JobBatches
8
+ module Sidekiq
9
+ module WorkerExtension
10
+ def bid
11
+ Thread.current[:batch].bid
12
+ end
13
+
14
+ def batch
15
+ Thread.current[:batch]
16
+ end
17
+
18
+ def valid_within_batch?
19
+ batch.valid?
20
+ end
21
+ end
22
+
23
+ class ClientMiddleware
24
+ def call(_worker, msg, _queue, _redis_pool = nil)
25
+ if (batch = Thread.current[:batch])
26
+ batch.increment_job_queue(msg['jid']) if (msg[:bid] = batch.bid)
27
+ end
28
+ yield
29
+ end
30
+ end
31
+
32
+ class ServerMiddleware
33
+ def call(_worker, msg, _queue)
34
+ if (bid = msg['bid'])
35
+ begin
36
+ Thread.current[:batch] = Batch.new(bid)
37
+ yield
38
+ Thread.current[:batch] = nil
39
+ Batch.process_successful_job(bid, msg['jid'])
40
+ rescue
41
+ Batch.process_failed_job(bid, msg['jid'])
42
+ raise
43
+ ensure
44
+ Thread.current[:batch] = nil
45
+ end
46
+ else
47
+ yield
48
+ end
49
+ end
50
+ end
51
+
52
+ def self.configure
53
+ if defined?(::Sidekiq::Batch) && ::Sidekiq::Batch != JobBatches::Batch
54
+ print "WARNING: Detected Sidekiq Pro or sidekiq-batch. CanvasSync JobBatches may not be fully compatible!"
55
+ end
56
+
57
+ ::Sidekiq.configure_client do |config|
58
+ config.client_middleware do |chain|
59
+ chain.remove ::Sidekiq::Batch::Middleware::ClientMiddleware if defined?(::Sidekiq::Batch::Middleware::ClientMiddleware)
60
+ chain.add JobBatches::Sidekiq::ClientMiddleware
61
+ end
62
+ end
63
+ ::Sidekiq.configure_server do |config|
64
+ config.client_middleware do |chain|
65
+ chain.remove ::Sidekiq::Batch::Middleware::ClientMiddleware if defined?(::Sidekiq::Batch::Middleware::ClientMiddleware)
66
+ chain.add JobBatches::Sidekiq::ClientMiddleware
67
+ end
68
+
69
+ config.server_middleware do |chain|
70
+ chain.remove ::Sidekiq::Batch::Middleware::ServerMiddleware if defined?(::Sidekiq::Batch::Middleware::ServerMiddleware)
71
+ chain.add JobBatches::Sidekiq::ServerMiddleware
72
+ end
73
+
74
+ config.death_handlers << ->(job, ex) do
75
+ return unless job['bid'].present?
76
+
77
+ if defined?(::Apartment)
78
+ ::Apartment::Tenant.switch(job['apartment'] || 'public') do
79
+ Sidekiq::Batch.process_dead_job(job['bid'], job['jid'])
80
+ end
81
+ else
82
+ Sidekiq::Batch.process_dead_job(job['bid'], job['jid'])
83
+ end
84
+ end
85
+ end
86
+ ::Sidekiq.const_set(:Batch, CanvasSync::JobBatches::Batch)
87
+ ::Sidekiq::Worker.send(:include, JobBatches::Sidekiq::WorkerExtension)
88
+ end
89
+ end
90
+ end
91
+ end
@@ -0,0 +1,63 @@
1
+ module CanvasSync
2
+ module JobBatches
3
+ class Batch
4
+ class Status
5
+ attr_reader :bid
6
+
7
+ def initialize(bid)
8
+ @bid = bid
9
+ end
10
+
11
+ def join
12
+ raise "Not supported"
13
+ end
14
+
15
+ def pending
16
+ Batch.redis { |r| r.hget("BID-#{bid}", 'pending') }.to_i
17
+ end
18
+
19
+ def failures
20
+ Batch.redis { |r| r.scard("BID-#{bid}-failed") }.to_i
21
+ end
22
+
23
+ def created_at
24
+ Batch.redis { |r| r.hget("BID-#{bid}", 'created_at') }
25
+ end
26
+
27
+ def total
28
+ Batch.redis { |r| r.hget("BID-#{bid}", 'total') }.to_i
29
+ end
30
+
31
+ def parent_bid
32
+ Batch.redis { |r| r.hget("BID-#{bid}", "parent_bid") }
33
+ end
34
+
35
+ def failure_info
36
+ Batch.redis { |r| r.smembers("BID-#{bid}-failed") } || []
37
+ end
38
+
39
+ def complete?
40
+ 'true' == Batch.redis { |r| r.hget("BID-#{bid}", 'complete') }
41
+ end
42
+
43
+ def child_count
44
+ Batch.redis { |r| r.hget("BID-#{bid}", 'children') }.to_i
45
+ end
46
+
47
+ def data
48
+ {
49
+ bid: bid,
50
+ total: total,
51
+ failures: failures,
52
+ pending: pending,
53
+ created_at: created_at,
54
+ complete: complete?,
55
+ failure_info: failure_info,
56
+ parent_bid: parent_bid,
57
+ child_count: child_count
58
+ }
59
+ end
60
+ end
61
+ end
62
+ end
63
+ end
@@ -0,0 +1,34 @@
1
+ module CanvasSync
2
+ module Jobs
3
+ class BeginSyncChainJob < CanvasSync::Job
4
+ def perform(chain_definition, globals = {})
5
+ if !globals[:updated_after].present? || globals[:updated_after] == true
6
+ last_batch = SyncBatch.where(status: 'completed').last
7
+ globals[:updated_after] = last_batch&.started_at&.iso8601
8
+ end
9
+
10
+ sync_batch = SyncBatch.create!(
11
+ started_at: DateTime.now,
12
+ status: 'pending',
13
+ )
14
+
15
+ JobBatches::Batch.new.tap do |b|
16
+ b.description = "CanvasSync Root Batch"
17
+ b.on(:complete, "#{self.class.to_s}.batch_completed", sync_batch_id: sync_batch.id)
18
+ b.context = globals
19
+ b.jobs do
20
+ JobBatches::SerialBatchJob.perform_now(chain_definition)
21
+ end
22
+ end
23
+ end
24
+
25
+ def self.batch_completed(status, options)
26
+ sbatch = SyncBatch.find(options['sync_batch_id'])
27
+ sbatch.update!(
28
+ status: status.failures.positive? ? 'failed' : 'completed',
29
+ completed_at: DateTime.now,
30
+ )
31
+ end
32
+ end
33
+ end
34
+ end
@@ -4,21 +4,19 @@ module CanvasSync
4
4
  # Re-enqueues itself if the report is still processing on Canvas.
5
5
  # Enqueues the ReportProcessor when the report has completed.
6
6
  class ReportChecker < CanvasSync::Job
7
- # @param job_chain [Hash]
8
7
  # @param report_name [Hash] e.g., 'provisioning_csv'
9
8
  # @param report_id [Integer]
10
9
  # @param processor [String] a stringified report processor class name
11
10
  # @param options [Hash] hash of options that will be passed to the job processor
12
11
  # @return [nil]
13
- def perform(job_chain, report_name, report_id, processor, options) # rubocop:disable Metrics/AbcSize
14
- account_id = options[:account_id] || job_chain[:global_options][:account_id] || "self"
15
- report_status = CanvasSync.get_canvas_sync_client(job_chain[:global_options])
12
+ def perform(report_name, report_id, processor, options) # rubocop:disable Metrics/AbcSize
13
+ account_id = options[:account_id] || batch_context[:account_id] || "self"
14
+ report_status = CanvasSync.get_canvas_sync_client(batch_context)
16
15
  .report_status(account_id, report_name, report_id)
17
16
 
18
17
  case report_status["status"].downcase
19
18
  when "complete"
20
19
  CanvasSync::Jobs::ReportProcessorJob.perform_later(
21
- job_chain,
22
20
  report_name,
23
21
  report_status["attachment"]["url"],
24
22
  processor,
@@ -33,7 +31,6 @@ module CanvasSync
33
31
  CanvasSync::Jobs::ReportChecker
34
32
  .set(wait: report_checker_wait_time)
35
33
  .perform_later(
36
- job_chain,
37
34
  report_name,
38
35
  report_id,
39
36
  processor,
@@ -6,22 +6,19 @@ module CanvasSync
6
6
  # download the report, and then pass the file path and options into the
7
7
  # process method on the processor.
8
8
  class ReportProcessorJob < CanvasSync::Job
9
- # @param job_chain [Hash]
10
9
  # @param report_name [Hash] e.g., 'provisioning_csv'
11
10
  # @param report_url [String]
12
11
  # @param processor [String] a stringified report processor class name
13
12
  # @param options [Hash] hash of options that will be passed to the job processor
14
13
  # @return [nil]
15
- def perform(job_chain, report_name, report_url, processor, options, report_id)
14
+ def perform(report_name, report_url, processor, options, report_id)
16
15
  @job_log.update_attributes(job_class: processor)
17
16
  download(report_name, report_url) do |file_path|
18
- options = job_chain[:global_options].merge(options).merge({
17
+ options = batch_context.merge(options).merge({
19
18
  report_processor_job_id: @job_log.job_id
20
19
  })
21
20
  processor.constantize.process(file_path, options, report_id)
22
21
  end
23
-
24
- CanvasSync.invoke_next(job_chain)
25
22
  end
26
23
 
27
24
  private