canvas_sync 0.17.26.beta1 → 0.17.27.beta1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: ce544260a5cc94f9193cb99da3bc55e06ecd1078e0ef6a5afca282cc4d39169f
4
- data.tar.gz: 30a5bccedebde1cb00aeb31219b1d9f605672fa8ea58cb50af483cf8f98d15fb
3
+ metadata.gz: 413b8669b85123f239a5917982347c9cb90bd56229c42e11a92ce8bc619171f5
4
+ data.tar.gz: b99de51cad35d7c894ed54ab8910cbe6d0b594cadab814683396350dbbc4b95a
5
5
  SHA512:
6
- metadata.gz: e84bde52492c2276650bd662adff677c0aa6a1c511587886d45fc6d328389af242ade72fe872dda2093df9ebb151aa25a4a353559a31d74330849f0a0c93d159
7
- data.tar.gz: 3c30eceedad806932b61abc66456973250c7247ba7431406f1c5440b09b05cc3b7174112e34e78392d8f3e9c89dfcd7eba43377a0f8ee30d6320a00a50303163
6
+ metadata.gz: e78f116e14b49c88fb04d95b33623c99a31c6a09735491361b0c287a2d48fff2bf061710f5715854b8601b7eb0c1aa3de9bfd93d87d08f71c260eac830a08fce
7
+ data.tar.gz: 361de97175217d570b0e60f4b3737a38d685ea4404d568ad35df281f495f08288284d734a02e2f8c5809c2a81e3863bb3030a217ce5d5bab71ca60d889eff31c
@@ -65,7 +65,7 @@ module CanvasSync
65
65
  }
66
66
 
67
67
  row_buffer = nil
68
- if defined?(User) && klass == User && csv_column_names.include?(:user_id)
68
+ if defined?(User) && klass == User && csv_column_names.include?('user_id')
69
69
  row_buffer = UserRowBuffer.new(&row_buffer_out)
70
70
  else
71
71
  row_buffer = NullRowBuffer.new(&row_buffer_out)
@@ -95,6 +95,10 @@ module CanvasSync
95
95
  @flattened = flattened.with_indifferent_access
96
96
  end
97
97
 
98
+ def to_h
99
+ flatten
100
+ end
101
+
98
102
  private
99
103
 
100
104
  def get_parent_hash(bid)
@@ -3,16 +3,19 @@ require_relative './base_job'
3
3
  module CanvasSync
4
4
  module JobBatches
5
5
  class ConcurrentBatchJob < BaseJob
6
+ def self.make_batch(sub_jobs, context: nil, &blk)
7
+ ManagedBatchJob.make_batch(
8
+ sub_jobs,
9
+ ordered: false,
10
+ concurrency: true,
11
+ context: context,
12
+ desc_prefix: 'ConcurrentBatchJob',
13
+ &blk
14
+ )
15
+ end
16
+
6
17
  def perform(sub_jobs, context: nil)
7
- Batch.new.tap do |b|
8
- b.description = "Concurrent Batch Root"
9
- b.context = context
10
- b.jobs do
11
- sub_jobs.each do |j|
12
- ChainBuilder.enqueue_job(j)
13
- end
14
- end
15
- end
18
+ self.class.make_batch(sub_jobs, context: context)
16
19
  end
17
20
  end
18
21
  end
@@ -3,8 +3,8 @@ require_relative './base_job'
3
3
  module CanvasSync
4
4
  module JobBatches
5
5
  class ManagedBatchJob < BaseJob
6
- def perform(sub_jobs, context: nil, ordered: true, concurrency: nil)
7
- man_batch_id = SecureRandom.urlsafe_base64(10)
6
+ def self.make_batch(sub_jobs, ordered: true, concurrency: nil, context: nil, desc_prefix: nil, &blk)
7
+ desc_prefix ||= ''
8
8
 
9
9
  if concurrency == 0 || concurrency == nil || concurrency == true
10
10
  concurrency = sub_jobs.count
@@ -14,38 +14,60 @@ module CanvasSync
14
14
 
15
15
  root_batch = Batch.new
16
16
 
17
- Batch.redis do |r|
18
- r.multi do
19
- r.hset("MNGBID-#{man_batch_id}", "root_bid", root_batch.bid)
20
- r.hset("MNGBID-#{man_batch_id}", "ordered", ordered)
21
- r.hset("MNGBID-#{man_batch_id}", "concurrency", concurrency)
22
- r.expire("MNGBID-#{man_batch_id}", Batch::BID_EXPIRE_TTL)
23
-
24
- mapped_sub_jobs = sub_jobs.each_with_index.map do |j, i|
25
- j['_mngbid_index_'] = i # This allows duplicate jobs when a Redis Set is used
26
- j = ActiveJob::Arguments.serialize([j])
27
- JSON.unparse(j)
28
- end
29
- if ordered
30
- r.rpush("MNGBID-#{man_batch_id}-jobs", mapped_sub_jobs)
31
- else
32
- r.sadd("MNGBID-#{man_batch_id}-jobs", mapped_sub_jobs)
17
+ if concurrency < sub_jobs.count
18
+ man_batch_id = SecureRandom.urlsafe_base64(10)
19
+
20
+ Batch.redis do |r|
21
+ r.multi do
22
+ r.hset("MNGBID-#{man_batch_id}", "root_bid", root_batch.bid)
23
+ r.hset("MNGBID-#{man_batch_id}", "ordered", ordered)
24
+ r.hset("MNGBID-#{man_batch_id}", "concurrency", concurrency)
25
+ r.expire("MNGBID-#{man_batch_id}", Batch::BID_EXPIRE_TTL)
26
+
27
+ mapped_sub_jobs = sub_jobs.each_with_index.map do |j, i|
28
+ j['_mngbid_index_'] = i # This allows duplicate jobs when a Redis Set is used
29
+ j = ActiveJob::Arguments.serialize([j])
30
+ JSON.unparse(j)
31
+ end
32
+ if ordered
33
+ r.rpush("MNGBID-#{man_batch_id}-jobs", mapped_sub_jobs)
34
+ else
35
+ r.sadd("MNGBID-#{man_batch_id}-jobs", mapped_sub_jobs)
36
+ end
37
+ r.expire("MNGBID-#{man_batch_id}-jobs", Batch::BID_EXPIRE_TTL)
33
38
  end
34
- r.expire("MNGBID-#{man_batch_id}-jobs", Batch::BID_EXPIRE_TTL)
35
39
  end
40
+
41
+ root_batch.allow_context_changes = (concurrency == 1)
42
+ root_batch.on(:success, "#{to_s}.cleanup_redis", managed_batch_id: man_batch_id)
43
+
44
+ desc_prefix = "MGD(#{man_batch_id}): #{desc_prefix}"
36
45
  end
37
46
 
38
- root_batch.description = "Managed Batch Root (#{man_batch_id})"
39
- root_batch.allow_context_changes = (concurrency == 1)
40
47
  root_batch.context = context
41
- root_batch.on(:success, "#{self.class.to_s}.cleanup_redis", managed_batch_id: man_batch_id)
42
- root_batch.jobs {}
43
48
 
44
- concurrency.times do
45
- self.class.perform_next_sequence_job(man_batch_id)
49
+ blk.call(ManagedBatchProxy.new(root_batch)) if blk.present?
50
+
51
+ root_batch.description = "#{desc_prefix}: #{root_batch.description || 'Root'}"
52
+
53
+ if concurrency < sub_jobs.count
54
+ root_batch.jobs {}
55
+ concurrency.times do
56
+ perform_next_sequence_job(man_batch_id)
57
+ end
58
+ else
59
+ root_batch.jobs do
60
+ sub_jobs.each do |j|
61
+ ChainBuilder.enqueue_job(j)
62
+ end
63
+ end
46
64
  end
47
65
  end
48
66
 
67
+ def perform(sub_jobs, context: nil, ordered: true, concurrency: nil)
68
+ self.class.make_batch(sub_jobs, ordered: ordered, concurrency: concurrency, context: context)
69
+ end
70
+
49
71
  def self.cleanup_redis(status, options)
50
72
  man_batch_id = options['managed_batch_id']
51
73
  Batch.redis do |r|
@@ -94,6 +116,21 @@ module CanvasSync
94
116
  end
95
117
  end
96
118
  end
119
+
120
+ class ManagedBatchProxy
121
+ def initialize(real_batch)
122
+ @real_batch = real_batch
123
+ end
124
+
125
+ delegate_missing_to :real_batch
126
+
127
+ def jobs
128
+ raise "Managed Batches do not support calling .jobs directly!"
129
+ end
130
+
131
+ private
132
+ attr_reader :real_batch
133
+ end
97
134
  end
98
135
  end
99
136
  end
@@ -3,14 +3,20 @@ require_relative './base_job'
3
3
  module CanvasSync
4
4
  module JobBatches
5
5
  class SerialBatchJob < BaseJob
6
- def perform(sub_jobs, context: nil)
7
- ManagedBatchJob.new.perform(
6
+ def self.make_batch(sub_jobs, context: nil, &blk)
7
+ ManagedBatchJob.make_batch(
8
8
  sub_jobs,
9
- context: context,
10
9
  ordered: true,
11
10
  concurrency: false,
11
+ context: context,
12
+ desc_prefix: 'SerialBatchJob',
13
+ &blk
12
14
  )
13
15
  end
16
+
17
+ def perform(sub_jobs, context: nil)
18
+ self.class.make_batch(sub_jobs, context: context)
19
+ end
14
20
  end
15
21
  end
16
22
  end
@@ -12,7 +12,7 @@ module CanvasSync::JobBatches::Sidekiq
12
12
  end
13
13
 
14
14
  def drain_zset(key)
15
- items, _ = Sidekiq.redis do |r|
15
+ items, _ = Batch.redis do |r|
16
16
  r.multi do
17
17
  r.zrange(key, 0, -1)
18
18
  r.zremrangebyrank(key, 0, -1)
@@ -31,6 +31,8 @@ module CanvasSync
31
31
  status: 'processing',
32
32
  )
33
33
 
34
+ globals[:batch_genre] = genre
35
+ globals[:batch_start_time] = sync_batch.started_at.iso8601
34
36
  globals[:sync_batch_id] = sync_batch.id
35
37
 
36
38
  JobBatches::Batch.new.tap do |b|
@@ -6,15 +6,42 @@ module CanvasSync
6
6
  context = options[:context] || {}
7
7
  if options[:term_scope]
8
8
  Term.send(options[:term_scope]).find_each.map do |term|
9
- local_context = context.merge(canvas_term_id: get_term_id(term))
10
- JobBatches::ConcurrentBatchJob.perform_now(jobs, context: local_context)
9
+ term_id = get_term_id(term)
10
+ local_context = context.merge(canvas_term_id: term_id)
11
+
12
+ # Override the delta-syncing date if:
13
+ # 1. the Term hasn't been synced before or
14
+ # 2. the Term underwent a period of not syncing
15
+ term_last_sync = CanvasSync.redis.get(self.class.last_sync_key(term_id))
16
+ if batch_context[:updated_after]
17
+ if !term_last_sync.present? || batch_context[:updated_after] > term_last_sync
18
+ local_context[:updated_after] = term_last_sync.presence
19
+ end
20
+ end
21
+
22
+ JobBatches::ManagedBatchJob.make_batch(jobs, ordered: false, concurrency: true) do |b|
23
+ b.description = "TermBatchJob(#{term.canvas_id}) Root"
24
+ b.context = local_context
25
+ b.on(:success, "#{self.class.to_s}.batch_finished") unless options[:mark_synced] == false
26
+ end
11
27
  end
12
28
  else
13
- JobBatches::ConcurrentBatchJob.perform_now(jobs, context: context)
29
+ JobBatches::ConcurrentBatchJob.make_batch(jobs, context: context)
14
30
  end
15
31
  end
16
32
  end
17
33
 
34
+ def self.batch_finished(status, opts)
35
+ ctx = JobBatches::Batch.current_context
36
+ term_id = ctx[:canvas_term_id]
37
+ CanvasSync.redis.set(last_sync_key(term_id), ctx[:batch_start_time])
38
+ end
39
+
40
+ def self.last_sync_key(term_id)
41
+ ctx = JobBatches::Batch.current_context
42
+ "#{CanvasSync.redis_prefix}:#{ctx[:batch_genre]}:#{term_id}:last_sync"
43
+ end
44
+
18
45
  def get_term_id(term)
19
46
  term.try(:canvas_id) || term.canvas_term_id
20
47
  end
@@ -1,3 +1,3 @@
1
1
  module CanvasSync
2
- VERSION = "0.17.26.beta1".freeze
2
+ VERSION = "0.17.27.beta1".freeze
3
3
  end
data/lib/canvas_sync.rb CHANGED
@@ -320,5 +320,15 @@ module CanvasSync
320
320
  return if invalid.empty?
321
321
  raise "Invalid live event(s) specified: #{invalid.join(', ')}. Only #{SUPPORTED_LIVE_EVENTS.join(', ')} are supported."
322
322
  end
323
+
324
+ def redis(*args, &blk)
325
+ JobBatches::Batch.redis(*args, &blk)
326
+ end
327
+
328
+ def redis_prefix
329
+ pfx = "cs"
330
+ pfx = "#{Apartment::Tenant.current}:#{pfx}" if defined?(Apartment)
331
+ pfx
332
+ end
323
333
  end
324
334
  end
@@ -10,6 +10,11 @@ RSpec.describe CanvasSync::Processors::ProvisioningReportProcessor do
10
10
  }.to change { User.count }.by(2)
11
11
  end
12
12
 
13
+ it 'uses a User row with a sis_id' do
14
+ subject.process('spec/support/fixtures/reports/users.csv', { models: ['users'] }, 1)
15
+ expect(User.find_by(canvas_id: 2).sis_id).to eq 'sis_id_2'
16
+ end
17
+
13
18
  it 'processes courses' do
14
19
  expect {
15
20
  subject.process('spec/support/fixtures/reports/courses.csv', { models: ['courses'] }, 1)