canvas_sync 0.17.26.beta1 → 0.17.28

Sign up to get free protection for your applications and to get access to all the features.
Files changed (30) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +1 -0
  3. data/lib/canvas_sync/generators/templates/migrations/create_content_migrations.rb +24 -0
  4. data/lib/canvas_sync/generators/templates/models/content_migration.rb +10 -0
  5. data/lib/canvas_sync/importers/bulk_importer.rb +1 -1
  6. data/lib/canvas_sync/job_batches/context_hash.rb +4 -0
  7. data/lib/canvas_sync/job_batches/jobs/concurrent_batch_job.rb +12 -9
  8. data/lib/canvas_sync/job_batches/jobs/managed_batch_job.rb +62 -25
  9. data/lib/canvas_sync/job_batches/jobs/serial_batch_job.rb +9 -3
  10. data/lib/canvas_sync/job_batches/sidekiq/web/helpers.rb +1 -1
  11. data/lib/canvas_sync/jobs/begin_sync_chain_job.rb +2 -0
  12. data/lib/canvas_sync/jobs/sync_content_migrations_job.rb +20 -0
  13. data/lib/canvas_sync/jobs/term_batches_job.rb +30 -3
  14. data/lib/canvas_sync/processors/content_migrations_processor.rb +19 -0
  15. data/lib/canvas_sync/processors/model_mappings.yml +40 -0
  16. data/lib/canvas_sync/version.rb +1 -1
  17. data/lib/canvas_sync.rb +12 -0
  18. data/spec/canvas_sync/jobs/sync_content_migrations_job_spec.rb +30 -0
  19. data/spec/canvas_sync/processors/content_migrations_processor_spec.rb +13 -0
  20. data/spec/canvas_sync/processors/provisioning_report_processor_spec.rb +5 -0
  21. data/spec/dummy/app/models/content_migration.rb +16 -0
  22. data/spec/dummy/db/migrate/20220308072643_create_content_migrations.rb +30 -0
  23. data/spec/dummy/db/schema.rb +19 -1
  24. data/spec/support/fixtures/reports/content_migrations.csv +3 -0
  25. data/spec/support/fixtures/reports/users.csv +3 -2
  26. metadata +19 -13
  27. data/spec/dummy/log/development.log +0 -2069
  28. data/spec/dummy/log/test.log +0 -83945
  29. data/spec/support/fixtures/reports/provisioning_csv_unzipped/courses.csv +0 -3
  30. data/spec/support/fixtures/reports/provisioning_csv_unzipped/users.csv +0 -4
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: ce544260a5cc94f9193cb99da3bc55e06ecd1078e0ef6a5afca282cc4d39169f
4
- data.tar.gz: 30a5bccedebde1cb00aeb31219b1d9f605672fa8ea58cb50af483cf8f98d15fb
3
+ metadata.gz: 8359f6a5054489357a480d15019981f0ade50a145f714bcbd2274c6570689316
4
+ data.tar.gz: c98c3d8ecda4291e162e31d7c4dfdf9ddff65763f0bb7c345006585f53894cf4
5
5
  SHA512:
6
- metadata.gz: e84bde52492c2276650bd662adff677c0aa6a1c511587886d45fc6d328389af242ade72fe872dda2093df9ebb151aa25a4a353559a31d74330849f0a0c93d159
7
- data.tar.gz: 3c30eceedad806932b61abc66456973250c7247ba7431406f1c5440b09b05cc3b7174112e34e78392d8f3e9c89dfcd7eba43377a0f8ee30d6320a00a50303163
6
+ metadata.gz: 4ce90338e8e3751fd5767a09eaaf89e733532ffa59df5569bf908930929f34d5167bc01e2637a5eef6ba5ef515c2e6410cc17ce757542b4d0620bb08bd03fd38
7
+ data.tar.gz: e4456ad37bbf87b63a495d041f038ad6ece797b59c11d19cd7028a030aef452281105f2c53ab56bef3b4cf515f838366874b56838c87936d113c3cacf5570771
data/README.md CHANGED
@@ -34,6 +34,7 @@ The following custom reports are required for the specified models:
34
34
  - assignment_groups = "Assignment Group Export" (proserv_assignment_group_export_csv)
35
35
  - context_modules = "Professional Services Context Modules Report" (proserv_context_modules_csv)
36
36
  - context_module_items = "Professional Services Context Module Items Report" (proserv_context_module_items_csv)
37
+ - content_migrations = "Professional Services Content Migrations Report" (proserv_content_migrations_csv)
37
38
 
38
39
  ## Prerequisites
39
40
 
@@ -0,0 +1,24 @@
1
+ # <%= autogenerated_migration_warning %>
2
+
3
+ class CreateContentMigrations < ActiveRecord::Migration[5.1]
4
+ def change
5
+ create_table :content_migrations do |t|
6
+ t.bigint :canvas_id
7
+ t.bigint :canvas_context_id
8
+ t.string :canvas_context_type
9
+ t.string :workflow_state
10
+ t.text :migration_settings
11
+ t.datetime :started_at
12
+ t.datetime :finished_at
13
+ t.float :progress
14
+ t.bigint :canvas_source_course_id
15
+ t.string :migration_type
16
+ t.bigint :canvas_child_subscription_id
17
+ t.bigint :canvas_root_account_id
18
+
19
+ t.timestamps
20
+ end
21
+
22
+ add_index :content_migrations, :canvas_id, unique: true
23
+ end
24
+ end
@@ -0,0 +1,10 @@
1
+ # <%= autogenerated_model_warning %>
2
+
3
+ class ContentMigration < ApplicationRecord
4
+ include CanvasSync::Record
5
+ include CanvasSync::Concerns::ApiSyncable
6
+
7
+ validates :canvas_id, uniqueness: true, presence: true
8
+ belongs_to :context, polymorphic: true, optional: true, primary_key: :canvas_id, foreign_key: :canvas_context_id, foreign_type: :canvas_context_type
9
+
10
+ end
@@ -65,7 +65,7 @@ module CanvasSync
65
65
  }
66
66
 
67
67
  row_buffer = nil
68
- if defined?(User) && klass == User && csv_column_names.include?(:user_id)
68
+ if defined?(User) && klass == User && csv_column_names.include?('user_id')
69
69
  row_buffer = UserRowBuffer.new(&row_buffer_out)
70
70
  else
71
71
  row_buffer = NullRowBuffer.new(&row_buffer_out)
@@ -95,6 +95,10 @@ module CanvasSync
95
95
  @flattened = flattened.with_indifferent_access
96
96
  end
97
97
 
98
+ def to_h
99
+ flatten
100
+ end
101
+
98
102
  private
99
103
 
100
104
  def get_parent_hash(bid)
@@ -3,16 +3,19 @@ require_relative './base_job'
3
3
  module CanvasSync
4
4
  module JobBatches
5
5
  class ConcurrentBatchJob < BaseJob
6
+ def self.make_batch(sub_jobs, context: nil, &blk)
7
+ ManagedBatchJob.make_batch(
8
+ sub_jobs,
9
+ ordered: false,
10
+ concurrency: true,
11
+ context: context,
12
+ desc_prefix: 'ConcurrentBatchJob',
13
+ &blk
14
+ )
15
+ end
16
+
6
17
  def perform(sub_jobs, context: nil)
7
- Batch.new.tap do |b|
8
- b.description = "Concurrent Batch Root"
9
- b.context = context
10
- b.jobs do
11
- sub_jobs.each do |j|
12
- ChainBuilder.enqueue_job(j)
13
- end
14
- end
15
- end
18
+ self.class.make_batch(sub_jobs, context: context)
16
19
  end
17
20
  end
18
21
  end
@@ -3,8 +3,8 @@ require_relative './base_job'
3
3
  module CanvasSync
4
4
  module JobBatches
5
5
  class ManagedBatchJob < BaseJob
6
- def perform(sub_jobs, context: nil, ordered: true, concurrency: nil)
7
- man_batch_id = SecureRandom.urlsafe_base64(10)
6
+ def self.make_batch(sub_jobs, ordered: true, concurrency: nil, context: nil, desc_prefix: nil, &blk)
7
+ desc_prefix ||= ''
8
8
 
9
9
  if concurrency == 0 || concurrency == nil || concurrency == true
10
10
  concurrency = sub_jobs.count
@@ -14,38 +14,60 @@ module CanvasSync
14
14
 
15
15
  root_batch = Batch.new
16
16
 
17
- Batch.redis do |r|
18
- r.multi do
19
- r.hset("MNGBID-#{man_batch_id}", "root_bid", root_batch.bid)
20
- r.hset("MNGBID-#{man_batch_id}", "ordered", ordered)
21
- r.hset("MNGBID-#{man_batch_id}", "concurrency", concurrency)
22
- r.expire("MNGBID-#{man_batch_id}", Batch::BID_EXPIRE_TTL)
23
-
24
- mapped_sub_jobs = sub_jobs.each_with_index.map do |j, i|
25
- j['_mngbid_index_'] = i # This allows duplicate jobs when a Redis Set is used
26
- j = ActiveJob::Arguments.serialize([j])
27
- JSON.unparse(j)
28
- end
29
- if ordered
30
- r.rpush("MNGBID-#{man_batch_id}-jobs", mapped_sub_jobs)
31
- else
32
- r.sadd("MNGBID-#{man_batch_id}-jobs", mapped_sub_jobs)
17
+ if concurrency < sub_jobs.count
18
+ man_batch_id = SecureRandom.urlsafe_base64(10)
19
+
20
+ Batch.redis do |r|
21
+ r.multi do
22
+ r.hset("MNGBID-#{man_batch_id}", "root_bid", root_batch.bid)
23
+ r.hset("MNGBID-#{man_batch_id}", "ordered", ordered)
24
+ r.hset("MNGBID-#{man_batch_id}", "concurrency", concurrency)
25
+ r.expire("MNGBID-#{man_batch_id}", Batch::BID_EXPIRE_TTL)
26
+
27
+ mapped_sub_jobs = sub_jobs.each_with_index.map do |j, i|
28
+ j['_mngbid_index_'] = i # This allows duplicate jobs when a Redis Set is used
29
+ j = ActiveJob::Arguments.serialize([j])
30
+ JSON.unparse(j)
31
+ end
32
+ if ordered
33
+ r.rpush("MNGBID-#{man_batch_id}-jobs", mapped_sub_jobs)
34
+ else
35
+ r.sadd("MNGBID-#{man_batch_id}-jobs", mapped_sub_jobs)
36
+ end
37
+ r.expire("MNGBID-#{man_batch_id}-jobs", Batch::BID_EXPIRE_TTL)
33
38
  end
34
- r.expire("MNGBID-#{man_batch_id}-jobs", Batch::BID_EXPIRE_TTL)
35
39
  end
40
+
41
+ root_batch.allow_context_changes = (concurrency == 1)
42
+ root_batch.on(:success, "#{to_s}.cleanup_redis", managed_batch_id: man_batch_id)
43
+
44
+ desc_prefix = "MGD(#{man_batch_id}): #{desc_prefix}"
36
45
  end
37
46
 
38
- root_batch.description = "Managed Batch Root (#{man_batch_id})"
39
- root_batch.allow_context_changes = (concurrency == 1)
40
47
  root_batch.context = context
41
- root_batch.on(:success, "#{self.class.to_s}.cleanup_redis", managed_batch_id: man_batch_id)
42
- root_batch.jobs {}
43
48
 
44
- concurrency.times do
45
- self.class.perform_next_sequence_job(man_batch_id)
49
+ blk.call(ManagedBatchProxy.new(root_batch)) if blk.present?
50
+
51
+ root_batch.description = "#{desc_prefix}: #{root_batch.description || 'Root'}"
52
+
53
+ if concurrency < sub_jobs.count
54
+ root_batch.jobs {}
55
+ concurrency.times do
56
+ perform_next_sequence_job(man_batch_id)
57
+ end
58
+ else
59
+ root_batch.jobs do
60
+ sub_jobs.each do |j|
61
+ ChainBuilder.enqueue_job(j)
62
+ end
63
+ end
46
64
  end
47
65
  end
48
66
 
67
+ def perform(sub_jobs, context: nil, ordered: true, concurrency: nil)
68
+ self.class.make_batch(sub_jobs, ordered: ordered, concurrency: concurrency, context: context)
69
+ end
70
+
49
71
  def self.cleanup_redis(status, options)
50
72
  man_batch_id = options['managed_batch_id']
51
73
  Batch.redis do |r|
@@ -94,6 +116,21 @@ module CanvasSync
94
116
  end
95
117
  end
96
118
  end
119
+
120
+ class ManagedBatchProxy
121
+ def initialize(real_batch)
122
+ @real_batch = real_batch
123
+ end
124
+
125
+ delegate_missing_to :real_batch
126
+
127
+ def jobs
128
+ raise "Managed Batches do not support calling .jobs directly!"
129
+ end
130
+
131
+ private
132
+ attr_reader :real_batch
133
+ end
97
134
  end
98
135
  end
99
136
  end
@@ -3,14 +3,20 @@ require_relative './base_job'
3
3
  module CanvasSync
4
4
  module JobBatches
5
5
  class SerialBatchJob < BaseJob
6
- def perform(sub_jobs, context: nil)
7
- ManagedBatchJob.new.perform(
6
+ def self.make_batch(sub_jobs, context: nil, &blk)
7
+ ManagedBatchJob.make_batch(
8
8
  sub_jobs,
9
- context: context,
10
9
  ordered: true,
11
10
  concurrency: false,
11
+ context: context,
12
+ desc_prefix: 'SerialBatchJob',
13
+ &blk
12
14
  )
13
15
  end
16
+
17
+ def perform(sub_jobs, context: nil)
18
+ self.class.make_batch(sub_jobs, context: context)
19
+ end
14
20
  end
15
21
  end
16
22
  end
@@ -12,7 +12,7 @@ module CanvasSync::JobBatches::Sidekiq
12
12
  end
13
13
 
14
14
  def drain_zset(key)
15
- items, _ = Sidekiq.redis do |r|
15
+ items, _ = Batch.redis do |r|
16
16
  r.multi do
17
17
  r.zrange(key, 0, -1)
18
18
  r.zremrangebyrank(key, 0, -1)
@@ -31,6 +31,8 @@ module CanvasSync
31
31
  status: 'processing',
32
32
  )
33
33
 
34
+ globals[:batch_genre] = genre
35
+ globals[:batch_start_time] = sync_batch.started_at.iso8601
34
36
  globals[:sync_batch_id] = sync_batch.id
35
37
 
36
38
  JobBatches::Batch.new.tap do |b|
@@ -0,0 +1,20 @@
1
+ module CanvasSync
2
+ module Jobs
3
+ class SyncContentMigrationsJob < ReportStarter
4
+ # Syncs ContentMigrations
5
+ #
6
+ # Starts a report processor for the content migrations report
7
+ # (the proserv_content_migrations_csv report must be enabled)
8
+ #
9
+ # @param options [Hash]
10
+ def perform(options)
11
+ super(
12
+ "proserv_content_migrations_csv",
13
+ merge_report_params(options),
14
+ CanvasSync::Processors::ContentMigrationsProcessor.to_s,
15
+ {},
16
+ )
17
+ end
18
+ end
19
+ end
20
+ end
@@ -6,15 +6,42 @@ module CanvasSync
6
6
  context = options[:context] || {}
7
7
  if options[:term_scope]
8
8
  Term.send(options[:term_scope]).find_each.map do |term|
9
- local_context = context.merge(canvas_term_id: get_term_id(term))
10
- JobBatches::ConcurrentBatchJob.perform_now(jobs, context: local_context)
9
+ term_id = get_term_id(term)
10
+ local_context = context.merge(canvas_term_id: term_id)
11
+
12
+ # Override the delta-syncing date if:
13
+ # 1. the Term hasn't been synced before or
14
+ # 2. the Term underwent a period of not syncing
15
+ term_last_sync = CanvasSync.redis.get(self.class.last_sync_key(term_id))
16
+ if batch_context[:updated_after]
17
+ if !term_last_sync.present? || batch_context[:updated_after] > term_last_sync
18
+ local_context[:updated_after] = term_last_sync.presence
19
+ end
20
+ end
21
+
22
+ JobBatches::ManagedBatchJob.make_batch(jobs, ordered: false, concurrency: true) do |b|
23
+ b.description = "TermBatchJob(#{term.canvas_id}) Root"
24
+ b.context = local_context
25
+ b.on(:success, "#{self.class.to_s}.batch_finished")
26
+ end
11
27
  end
12
28
  else
13
- JobBatches::ConcurrentBatchJob.perform_now(jobs, context: context)
29
+ JobBatches::ConcurrentBatchJob.make_batch(jobs, context: context)
14
30
  end
15
31
  end
16
32
  end
17
33
 
34
+ def self.batch_finished(status, opts)
35
+ ctx = JobBatches::Batch.current_context
36
+ term_id = ctx[:canvas_term_id]
37
+ CanvasSync.redis.set(last_sync_key(term_id), ctx[:batch_start_time])
38
+ end
39
+
40
+ def self.last_sync_key(term_id)
41
+ ctx = JobBatches::Batch.current_context
42
+ "#{CanvasSync.redis_prefix}:#{ctx[:batch_genre]}:#{term_id}:last_sync"
43
+ end
44
+
18
45
  def get_term_id(term)
19
46
  term.try(:canvas_id) || term.canvas_term_id
20
47
  end
@@ -0,0 +1,19 @@
1
+ require_relative "./report_processor"
2
+
3
+ module CanvasSync
4
+ module Processors
5
+ # Processes a content migrations report using the bulk importer.
6
+ #
7
+ # @param report_file_path [String]
8
+ # @param options [Hash]
9
+ class ContentMigrationsProcessor < ReportProcessor
10
+ def self.process(report_file_path, _options, report_id)
11
+ new(report_file_path, _options)
12
+ end
13
+
14
+ def initialize(report_file_path, options)
15
+ do_bulk_import(report_file_path, ContentMigration, options: options)
16
+ end
17
+ end
18
+ end
19
+ end
@@ -469,3 +469,43 @@ grading_period_groups:
469
469
  status:
470
470
  database_column_name: workflow_state
471
471
  type: string
472
+
473
+ content_migrations:
474
+ conflict_target: canvas_migration_id
475
+ report_columns:
476
+ canvas_migration_id:
477
+ database_column_name: canvas_id
478
+ type: integer
479
+ canvas_context_id:
480
+ database_column_name: canvas_context_id
481
+ type: integer
482
+ canvas_context_type:
483
+ database_column_name: canvas_context_type
484
+ type: string
485
+ workflow_state:
486
+ database_column_name: workflow_state
487
+ type: string
488
+ migration_settings:
489
+ database_column_name: migration_settings
490
+ type: text
491
+ started_at:
492
+ database_column_name: started_at
493
+ type: datetime
494
+ finished_at:
495
+ database_column_name: finished_at
496
+ type: datetime
497
+ progress:
498
+ database_column_name: progress
499
+ type: float
500
+ canvas_source_course_id:
501
+ database_column_name: canvas_source_course_id
502
+ type: integer
503
+ migration_type:
504
+ database_column_name: migration_type
505
+ type: string
506
+ canvas_child_subscription_id:
507
+ database_column_name: canvas_child_subscription_id
508
+ type: integer
509
+ canvas_root_account_id:
510
+ database_column_name: canvas_root_account_id
511
+ type: integer
@@ -1,3 +1,3 @@
1
1
  module CanvasSync
2
- VERSION = "0.17.26.beta1".freeze
2
+ VERSION = "0.17.28".freeze
3
3
  end
data/lib/canvas_sync.rb CHANGED
@@ -44,6 +44,7 @@ module CanvasSync
44
44
  user_observers
45
45
  grading_periods
46
46
  grading_period_groups
47
+ content_migrations
47
48
  ].freeze
48
49
 
49
50
  SUPPORTED_TERM_SCOPE_MODELS = %w[
@@ -150,6 +151,7 @@ module CanvasSync
150
151
  assignment_groups: CanvasSync::Jobs::SyncAssignmentGroupsJob,
151
152
  context_modules: CanvasSync::Jobs::SyncContextModulesJob,
152
153
  context_module_items: CanvasSync::Jobs::SyncContextModuleItemsJob,
154
+ content_migrations: CanvasSync::Jobs::SyncContentMigrationsJob,
153
155
  }.with_indifferent_access
154
156
 
155
157
  root_chain = base_canvas_sync_chain(**kwargs, globals: options[:global] || kwargs[:globals])
@@ -320,5 +322,15 @@ module CanvasSync
320
322
  return if invalid.empty?
321
323
  raise "Invalid live event(s) specified: #{invalid.join(', ')}. Only #{SUPPORTED_LIVE_EVENTS.join(', ')} are supported."
322
324
  end
325
+
326
+ def redis(*args, &blk)
327
+ JobBatches::Batch.redis(*args, &blk)
328
+ end
329
+
330
+ def redis_prefix
331
+ pfx = "cs"
332
+ pfx = "#{Apartment::Tenant.current}:#{pfx}" if defined?(Apartment)
333
+ pfx
334
+ end
323
335
  end
324
336
  end
@@ -0,0 +1,30 @@
1
+ require "spec_helper"
2
+
3
+ RSpec.describe CanvasSync::Jobs::SyncContentMigrationsJob do
4
+ describe "#perform" do
5
+ context "no parameters is specified" do
6
+ it "enqueues a ReportStarter for the proserv_content_migrations_csv" do
7
+ expect_any_instance_of(Bearcat::Client).to receive(:start_report)
8
+ .with("self", "proserv_content_migrations_csv", { parameters: { } })
9
+ .and_return("id" => 1)
10
+
11
+ expect(CanvasSync::Jobs::ReportChecker).to receive(:set).and_call_original
12
+
13
+ CanvasSync::Jobs::SyncContentMigrationsJob.perform_now({})
14
+ end
15
+ end
16
+
17
+ context "updated_after parameters is specified" do
18
+ it "enqueues a ReportStarter for the proserv_content_migrations_csv and get data from given date" do
19
+ expect_any_instance_of(Bearcat::Client).to receive(:start_report)
20
+ .with("self", "proserv_content_migrations_csv", { parameters: { updated_after: 6.hours.ago.to_s } })
21
+ .and_return("id" => 1)
22
+
23
+ expect(CanvasSync::Jobs::ReportChecker).to receive(:set).and_call_original
24
+
25
+ set_batch_context(updated_after: 6.hours.ago.to_s)
26
+ CanvasSync::Jobs::SyncContentMigrationsJob.perform_now({})
27
+ end
28
+ end
29
+ end
30
+ end
@@ -0,0 +1,13 @@
1
+ require "spec_helper"
2
+
3
+ RSpec.describe CanvasSync::Processors::ContentMigrationsProcessor do
4
+ let(:subject) { CanvasSync::Processors::ContentMigrationsProcessor }
5
+
6
+ describe "#process" do
7
+ it "inserts content migrations" do
8
+ expect {
9
+ subject.process("spec/support/fixtures/reports/content_migrations.csv", {}, 1)
10
+ }.to change { ContentMigration.count }.by(2)
11
+ end
12
+ end
13
+ end
@@ -10,6 +10,11 @@ RSpec.describe CanvasSync::Processors::ProvisioningReportProcessor do
10
10
  }.to change { User.count }.by(2)
11
11
  end
12
12
 
13
+ it 'uses a User row with a sis_id' do
14
+ subject.process('spec/support/fixtures/reports/users.csv', { models: ['users'] }, 1)
15
+ expect(User.find_by(canvas_id: 2).sis_id).to eq 'sis_id_2'
16
+ end
17
+
13
18
  it 'processes courses' do
14
19
  expect {
15
20
  subject.process('spec/support/fixtures/reports/courses.csv', { models: ['courses'] }, 1)
@@ -0,0 +1,16 @@
1
+ # #
2
+ # AUTO GENERATED MODEL
3
+ # This model was auto generated by the CanvasSync Gem.
4
+ # You can customize it as needed, but make sure you test
5
+ # any changes you make to the auto generated methods.
6
+ #
7
+
8
+
9
+ class ContentMigration < ApplicationRecord
10
+ include CanvasSync::Record
11
+ include CanvasSync::Concerns::ApiSyncable
12
+
13
+ validates :canvas_id, uniqueness: true, presence: true
14
+ belongs_to :context, polymorphic: true, optional: true, primary_key: :canvas_id, foreign_key: :canvas_context_id, foreign_type: :canvas_context_type
15
+
16
+ end
@@ -0,0 +1,30 @@
1
+ # #
2
+ # AUTO GENERATED MIGRATION
3
+ # This migration was auto generated by the CanvasSync Gem.
4
+ # You can add new columns to this table, but removing or
5
+ # re-naming ones created here may break Canvas Syncing.
6
+ #
7
+
8
+
9
+ class CreateContentMigrations < ActiveRecord::Migration[5.1]
10
+ def change
11
+ create_table :content_migrations do |t|
12
+ t.bigint :canvas_id
13
+ t.bigint :canvas_context_id
14
+ t.string :canvas_context_type
15
+ t.string :workflow_state
16
+ t.text :migration_settings
17
+ t.datetime :started_at
18
+ t.datetime :finished_at
19
+ t.float :progress
20
+ t.bigint :canvas_source_course_id
21
+ t.string :migration_type
22
+ t.bigint :canvas_child_subscription_id
23
+ t.bigint :canvas_root_account_id
24
+
25
+ t.timestamps
26
+ end
27
+
28
+ add_index :content_migrations, :canvas_id, unique: true
29
+ end
30
+ end
@@ -10,7 +10,7 @@
10
10
  #
11
11
  # It's strongly recommended that you check this file into your version control system.
12
12
 
13
- ActiveRecord::Schema.define(version: 2021_10_01_184920) do
13
+ ActiveRecord::Schema.define(version: 2022_03_08_072643) do
14
14
 
15
15
  # These are extensions that must be enabled in order to support this database
16
16
  enable_extension "plpgsql"
@@ -110,6 +110,24 @@ ActiveRecord::Schema.define(version: 2021_10_01_184920) do
110
110
  t.string "batch_bid"
111
111
  end
112
112
 
113
+ create_table "content_migrations", force: :cascade do |t|
114
+ t.bigint "canvas_id"
115
+ t.bigint "canvas_context_id"
116
+ t.string "canvas_context_type"
117
+ t.string "workflow_state"
118
+ t.text "migration_settings"
119
+ t.datetime "started_at"
120
+ t.datetime "finished_at"
121
+ t.float "progress"
122
+ t.bigint "canvas_source_course_id"
123
+ t.string "migration_type"
124
+ t.bigint "canvas_child_subscription_id"
125
+ t.bigint "canvas_root_account_id"
126
+ t.datetime "created_at", null: false
127
+ t.datetime "updated_at", null: false
128
+ t.index ["canvas_id"], name: "index_content_migrations_on_canvas_id", unique: true
129
+ end
130
+
113
131
  create_table "context_module_items", force: :cascade do |t|
114
132
  t.bigint "canvas_id"
115
133
  t.bigint "canvas_context_module_id"
@@ -0,0 +1,3 @@
1
+ canvas_migration_id,canvas_context_id,canvas_context_type,workflow_state,migration_settings,started_at,finished_at,progress,canvas_source_course_id,migration_type,canvas_child_subscription_id,canvas_root_account_id
2
+ 101,1250,Course,imported,,2022-02-18 10:10:40,2022-02-18 10:11:40,100,1200,course_copy_importer,,1
3
+ 102,1251,Course,imported,,2022-02-18 10:10:40,2022-02-18 10:11:40,100,1120,master_course_import,3,1
@@ -1,4 +1,5 @@
1
1
  canvas_user_id,user_id,integration_id,authentication_provider_id,login_id,first_name,last_name,full_name,sortable_name,short_name,email,status,created_by_sis
2
2
  1,sis_id_1,,123,spiderdude1337,Patches,the Spider,Patches the Spider,"the Spider, Patches",,poorbastard@yharnam.com,active,FALSE
3
- 2,sis_id_2,,1234,retireddude109,Hunter,Djura,Hunter Djura,"Djura, Hunter",,dreamer@yharnam.com,active,FALSE
4
- 2,sis_id_2,,1234,retireddude110,Hunter,Djura,Hunter Djura,"Djura, Hunter",,dreamer@yharnam.com,active,FALSE
3
+ 2,,,1234,retireddude109,Hunter,Djura,Hunter Djura,"Djura, Hunter",,dreamer@yharnam.com,active,FALSE
4
+ 2,sis_id_2,,1234,retireddude110,Hunter,Djura,Hunter Djura,"Djura, Hunter",,dreamer@yharnam.com,active,FALSE
5
+ 2,,,1234,retireddude110,Hunter,Djura,Hunter Djura,"Djura, Hunter",,dreamer@yharnam.com,active,FALSE