canvas_sync 0.16.4 → 0.16.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +5 -5
- data/db/migrate/20170915210836_create_canvas_sync_job_log.rb +12 -31
- data/db/migrate/20180725155729_add_job_id_to_canvas_sync_job_logs.rb +4 -13
- data/db/migrate/20190916154829_add_fork_count_to_canvas_sync_job_logs.rb +3 -11
- data/lib/canvas_sync.rb +7 -27
- data/lib/canvas_sync/importers/bulk_importer.rb +7 -4
- data/lib/canvas_sync/job.rb +8 -2
- data/lib/canvas_sync/job_chain.rb +46 -1
- data/lib/canvas_sync/jobs/fork_gather.rb +27 -12
- data/lib/canvas_sync/jobs/report_starter.rb +1 -1
- data/lib/canvas_sync/jobs/sync_provisioning_report_job.rb +4 -4
- data/lib/canvas_sync/jobs/sync_simple_table_job.rb +4 -4
- data/lib/canvas_sync/misc_helper.rb +15 -0
- data/lib/canvas_sync/processors/assignment_groups_processor.rb +3 -2
- data/lib/canvas_sync/processors/assignments_processor.rb +3 -2
- data/lib/canvas_sync/processors/context_module_items_processor.rb +3 -2
- data/lib/canvas_sync/processors/context_modules_processor.rb +3 -2
- data/lib/canvas_sync/processors/normal_processor.rb +2 -1
- data/lib/canvas_sync/processors/provisioning_report_processor.rb +10 -2
- data/lib/canvas_sync/processors/submissions_processor.rb +3 -2
- data/lib/canvas_sync/version.rb +1 -1
- data/spec/canvas_sync/jobs/fork_gather_spec.rb +9 -9
- data/spec/canvas_sync/jobs/sync_provisioning_report_job_spec.rb +2 -2
- data/spec/canvas_sync/jobs/sync_simple_table_job_spec.rb +1 -1
- data/spec/dummy/app/models/account.rb +3 -0
- data/spec/dummy/app/models/pseudonym.rb +14 -0
- data/spec/dummy/app/models/submission.rb +1 -0
- data/spec/dummy/app/models/user.rb +1 -0
- data/spec/dummy/db/migrate/20201016181346_create_pseudonyms.rb +24 -0
- data/spec/dummy/db/schema.rb +16 -4
- data/spec/dummy/db/test.sqlite3 +0 -0
- data/spec/dummy/log/development.log +1248 -0
- data/spec/dummy/log/test.log +43258 -0
- data/spec/support/fixtures/reports/provisioning_csv_unzipped/courses.csv +3 -0
- data/spec/support/fixtures/reports/provisioning_csv_unzipped/users.csv +4 -0
- metadata +22 -8
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
|
-
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: 9be3b81c22d5b45a02d88fa29b5d57afa152ee7dec6ce0ecf3d81c041507c619
|
4
|
+
data.tar.gz: 0c42577064cbf018f8fd8d5f665e1adfbc542e38e704a675e735491543b3ab40
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 9ea295b8cc43aa6b27bf6067cf684d024430a9fd85608bdc0a243113920b1b65b72c257e6a99644afb5d20455ecf2265949ce941a9ae1d9bea160ab1bee803ba
|
7
|
+
data.tar.gz: e63b7f557e792cb850a5799753f9effe436ad391bc6e4442768683b8c4d7c8eea1d1e9bbd6419127f2f4401f6cce6129019fb73192496f7efc1d731d7ef48847
|
@@ -1,35 +1,16 @@
|
|
1
|
-
|
2
|
-
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
t.text :job_arguments
|
1
|
+
class CreateCanvasSyncJobLog < CanvasSync::MiscHelper::MigrationClass
|
2
|
+
def change
|
3
|
+
create_table :canvas_sync_job_logs do |t|
|
4
|
+
t.datetime :started_at
|
5
|
+
t.datetime :completed_at
|
6
|
+
t.string :exception
|
7
|
+
t.text :backtrace
|
8
|
+
t.string :job_class
|
9
|
+
t.string :status
|
10
|
+
t.text :metadata
|
11
|
+
t.text :job_arguments
|
13
12
|
|
14
|
-
|
15
|
-
end
|
16
|
-
end
|
17
|
-
end
|
18
|
-
else
|
19
|
-
class CreateCanvasSyncJobLog < ActiveRecord::Migration
|
20
|
-
def change
|
21
|
-
create_table :canvas_sync_job_logs do |t|
|
22
|
-
t.datetime :started_at
|
23
|
-
t.datetime :completed_at
|
24
|
-
t.string :exception
|
25
|
-
t.text :backtrace
|
26
|
-
t.string :job_class
|
27
|
-
t.string :status
|
28
|
-
t.text :metadata
|
29
|
-
t.text :job_arguments
|
30
|
-
|
31
|
-
t.timestamps
|
32
|
-
end
|
13
|
+
t.timestamps
|
33
14
|
end
|
34
15
|
end
|
35
16
|
end
|
@@ -1,15 +1,6 @@
|
|
1
|
-
|
2
|
-
|
3
|
-
|
4
|
-
|
5
|
-
add_index :canvas_sync_job_logs, :job_id
|
6
|
-
end
|
7
|
-
end
|
8
|
-
else
|
9
|
-
class AddJobIdToCanvasSyncJobLogs < ActiveRecord::Migration
|
10
|
-
def change
|
11
|
-
add_column :canvas_sync_job_logs, :job_id, :string
|
12
|
-
add_index :canvas_sync_job_logs, :job_id
|
13
|
-
end
|
1
|
+
class AddJobIdToCanvasSyncJobLogs < CanvasSync::MiscHelper::MigrationClass
|
2
|
+
def change
|
3
|
+
add_column :canvas_sync_job_logs, :job_id, :string
|
4
|
+
add_index :canvas_sync_job_logs, :job_id
|
14
5
|
end
|
15
6
|
end
|
@@ -1,13 +1,5 @@
|
|
1
|
-
|
2
|
-
|
3
|
-
|
4
|
-
add_column :canvas_sync_job_logs, :fork_count, :integer
|
5
|
-
end
|
6
|
-
end
|
7
|
-
else
|
8
|
-
class AddForkCountToCanvasSyncJobLogs < ActiveRecord::Migration
|
9
|
-
def change
|
10
|
-
add_column :canvas_sync_job_logs, :fork_count, :integer
|
11
|
-
end
|
1
|
+
class AddForkCountToCanvasSyncJobLogs < CanvasSync::MiscHelper::MigrationClass
|
2
|
+
def change
|
3
|
+
add_column :canvas_sync_job_logs, :fork_count, :integer
|
12
4
|
end
|
13
5
|
end
|
data/lib/canvas_sync.rb
CHANGED
@@ -2,6 +2,7 @@ require "bearcat"
|
|
2
2
|
|
3
3
|
require "canvas_sync/version"
|
4
4
|
require "canvas_sync/engine"
|
5
|
+
require "canvas_sync/misc_helper"
|
5
6
|
require "canvas_sync/class_callback_executor"
|
6
7
|
require "canvas_sync/job"
|
7
8
|
require "canvas_sync/job_chain"
|
@@ -99,6 +100,7 @@ module CanvasSync
|
|
99
100
|
invoke_next(job_chain)
|
100
101
|
end
|
101
102
|
|
103
|
+
# @deprecated
|
102
104
|
def duplicate_chain(job_chain)
|
103
105
|
Marshal.load(Marshal.dump(job_chain))
|
104
106
|
end
|
@@ -109,35 +111,13 @@ module CanvasSync
|
|
109
111
|
#
|
110
112
|
# @param job_chain [Hash] A chain of jobs to execute
|
111
113
|
def invoke_next(job_chain, extra_options: {})
|
112
|
-
job_chain = job_chain
|
113
|
-
|
114
|
-
return if job_chain[:jobs].empty?
|
115
|
-
|
116
|
-
# Make sure all job classes are serialized as strings
|
117
|
-
job_chain[:jobs].each { |job| job[:job] = job[:job].to_s }
|
118
|
-
|
119
|
-
duped_job_chain = Marshal.load(Marshal.dump(job_chain))
|
120
|
-
jobs = duped_job_chain[:jobs]
|
121
|
-
next_job = jobs.shift
|
122
|
-
next_job_class = next_job[:job].constantize
|
123
|
-
next_options = next_job[:options] || {}
|
124
|
-
next_options.merge!(extra_options)
|
125
|
-
next_job_class.perform_later(duped_job_chain, next_options)
|
114
|
+
job_chain = JobChain.new(job_chain) unless job_chain.is_a?(JobChain)
|
115
|
+
job_chain.perform_next(extra_options)
|
126
116
|
end
|
127
117
|
|
128
|
-
def fork(job_log, job_chain, keys: [])
|
129
|
-
job_chain = job_chain
|
130
|
-
|
131
|
-
duped_job_chain = Marshal.load(Marshal.dump(job_chain))
|
132
|
-
duped_job_chain[:global_options][:fork_path] ||= []
|
133
|
-
duped_job_chain[:global_options][:fork_keys] ||= []
|
134
|
-
duped_job_chain[:global_options][:fork_path] << job_log.job_id
|
135
|
-
duped_job_chain[:global_options][:fork_keys] << keys.map(&:to_s)
|
136
|
-
duped_job_chain[:global_options][:on_failure] ||= 'CanvasSync::Jobs::ForkGather.handle_branch_error'
|
137
|
-
sub_items = yield duped_job_chain
|
138
|
-
sub_count = sub_items.respond_to?(:count) ? sub_items.count : sub_items
|
139
|
-
job_log.fork_count = sub_count
|
140
|
-
sub_items
|
118
|
+
def fork(job_log, job_chain, keys: [], &blk)
|
119
|
+
job_chain = JobChain.new(job_chain) unless job_chain.is_a?(JobChain)
|
120
|
+
job_chain.fork(job_log, keys: keys, &blk)
|
141
121
|
end
|
142
122
|
|
143
123
|
# Given a Model or Relation, scope it down to items that should be synced
|
@@ -64,13 +64,12 @@ module CanvasSync
|
|
64
64
|
columns = columns.dup
|
65
65
|
|
66
66
|
update_conditions = {
|
67
|
-
condition: condition_sql(klass, columns),
|
67
|
+
condition: condition_sql(klass, columns, import_args[:sync_start_time]),
|
68
68
|
columns: columns
|
69
69
|
}
|
70
70
|
update_conditions[:conflict_target] = conflict_target if conflict_target
|
71
71
|
|
72
72
|
options = { validate: false, on_duplicate_key_update: update_conditions }.merge(import_args)
|
73
|
-
|
74
73
|
options.delete(:on_duplicate_key_update) if options.key?(:on_duplicate_key_ignore)
|
75
74
|
klass.import(columns, rows, options)
|
76
75
|
end
|
@@ -85,10 +84,14 @@ module CanvasSync
|
|
85
84
|
# started_at = Time.now
|
86
85
|
# run_the_users_sync!
|
87
86
|
# changed = User.where("updated_at >= ?", started_at)
|
88
|
-
def self.condition_sql(klass, columns)
|
87
|
+
def self.condition_sql(klass, columns, report_start)
|
89
88
|
columns_str = columns.map { |c| "#{klass.quoted_table_name}.#{c}" }.join(", ")
|
90
89
|
excluded_str = columns.map { |c| "EXCLUDED.#{c}" }.join(", ")
|
91
|
-
"(#{columns_str}) IS DISTINCT FROM (#{excluded_str})"
|
90
|
+
condition_sql = "(#{columns_str}) IS DISTINCT FROM (#{excluded_str})"
|
91
|
+
if klass.column_names.include?("updated_at") && report_start
|
92
|
+
condition_sql += " AND #{klass.quoted_table_name}.updated_at < '#{report_start}'"
|
93
|
+
end
|
94
|
+
condition_sql
|
92
95
|
end
|
93
96
|
|
94
97
|
def self.batch_size
|
data/lib/canvas_sync/job.rb
CHANGED
@@ -3,6 +3,8 @@ require "active_job"
|
|
3
3
|
module CanvasSync
|
4
4
|
# Inherit from this class to build a Job that will log to the canvas_sync_job_logs table
|
5
5
|
class Job < ActiveJob::Base
|
6
|
+
attr_reader :job_chain, :job_log
|
7
|
+
|
6
8
|
before_enqueue do |job|
|
7
9
|
create_job_log(job)
|
8
10
|
end
|
@@ -13,7 +15,11 @@ module CanvasSync
|
|
13
15
|
@job_log.started_at = Time.now
|
14
16
|
@job_log.save
|
15
17
|
|
16
|
-
|
18
|
+
if job.arguments[0].is_a?(Hash) && job.arguments[0].include?(:jobs)
|
19
|
+
# @job_chain = JobChain.new(job.arguments[0])
|
20
|
+
@job_chain = job.arguments[0]
|
21
|
+
job.arguments[0] = @job_chain
|
22
|
+
end
|
17
23
|
|
18
24
|
begin
|
19
25
|
block.call
|
@@ -22,7 +28,7 @@ module CanvasSync
|
|
22
28
|
@job_log.exception = "#{e.class}: #{e.message}"
|
23
29
|
@job_log.backtrace = e.backtrace.join('\n')
|
24
30
|
@job_log.status = JobLog::ERROR_STATUS
|
25
|
-
if @job_chain&.
|
31
|
+
if @job_chain&.dig(:global_options, :on_failure)&.present?
|
26
32
|
begin
|
27
33
|
class_name, method = @job_chain[:global_options][:on_failure].split('.')
|
28
34
|
klass = class_name.constantize
|
@@ -45,7 +45,52 @@ module CanvasSync
|
|
45
45
|
end
|
46
46
|
|
47
47
|
def process!(extra_options: {})
|
48
|
-
|
48
|
+
perform_next(extra_options)
|
49
|
+
end
|
50
|
+
|
51
|
+
def duplicate
|
52
|
+
self.class.new(Marshal.load(Marshal.dump(chain_data)))
|
53
|
+
end
|
54
|
+
|
55
|
+
def normalize!
|
56
|
+
@chain_data[:global_options] ||= {}
|
57
|
+
end
|
58
|
+
|
59
|
+
def serialize
|
60
|
+
normalize!
|
61
|
+
chain_data
|
62
|
+
end
|
63
|
+
|
64
|
+
def perform_next(extra_options = {})
|
65
|
+
return if jobs.empty?
|
66
|
+
|
67
|
+
# Make sure all job classes are serialized as strings
|
68
|
+
jobs.each { |job| job[:job] = job[:job].to_s }
|
69
|
+
|
70
|
+
duped_job_chain = duplicate
|
71
|
+
|
72
|
+
jobs = duped_job_chain[:jobs]
|
73
|
+
next_job = jobs.shift
|
74
|
+
next_job_class = next_job[:job].constantize
|
75
|
+
next_options = next_job[:options] || {}
|
76
|
+
next_options.merge!(extra_options)
|
77
|
+
next_job_class.perform_later(duped_job_chain.serialize, next_options)
|
78
|
+
end
|
79
|
+
|
80
|
+
def fork(job_log, keys: [])
|
81
|
+
duped_job_chain = duplicate
|
82
|
+
duped_job_chain[:fork_state] ||= {}
|
83
|
+
duped_job_chain[:fork_state][:forking_path] ||= []
|
84
|
+
duped_job_chain[:fork_state][:pre_fork_globals] ||= []
|
85
|
+
|
86
|
+
duped_job_chain[:fork_state][:forking_path] << job_log.job_id
|
87
|
+
duped_job_chain[:fork_state][:pre_fork_globals] << global_options
|
88
|
+
# duped_job_chain[:global_options][:on_failure] ||= ['CanvasSync::Jobs::ForkGather.handle_branch_error']
|
89
|
+
|
90
|
+
sub_items = yield duped_job_chain
|
91
|
+
sub_count = sub_items.respond_to?(:count) ? sub_items.count : sub_items
|
92
|
+
job_log.update!(fork_count: sub_count)
|
93
|
+
sub_items
|
49
94
|
end
|
50
95
|
|
51
96
|
private
|
@@ -4,25 +4,40 @@ module CanvasSync
|
|
4
4
|
def perform(job_chain, options)
|
5
5
|
forked_job = self.class.forked_at_job(job_chain)
|
6
6
|
|
7
|
-
|
8
|
-
forked_job.
|
9
|
-
forked_job.
|
10
|
-
|
11
|
-
|
7
|
+
while true
|
8
|
+
if forked_job.present?
|
9
|
+
forked_job.with_lock do
|
10
|
+
forked_job.fork_count -= 1
|
11
|
+
forked_job.save!
|
12
|
+
end
|
13
|
+
|
14
|
+
if forked_job.fork_count <= 0
|
15
|
+
pfgs = job_chain[:fork_state][:pre_fork_globals].pop
|
16
|
+
job_chain[:global_options] = pfgs
|
12
17
|
|
13
|
-
|
14
|
-
|
15
|
-
|
18
|
+
if options[:gather_all]
|
19
|
+
# If we want to gather all, repeat for the next level fork
|
20
|
+
forked_job = self.class.forked_at_job(job_chain)
|
21
|
+
else
|
22
|
+
forked_job = nil
|
23
|
+
end
|
24
|
+
else
|
25
|
+
# If a fork was found and it isn't complete, break the loop before continuing the chain
|
26
|
+
break
|
16
27
|
end
|
17
|
-
|
28
|
+
|
29
|
+
# Repeat this logic for [if gather_all] the next fork up, or [if not gather_all] nil
|
30
|
+
next
|
18
31
|
end
|
19
|
-
|
32
|
+
|
33
|
+
# If there is no current fork (either not in a fork, or all forks were closed), continue the chain
|
20
34
|
CanvasSync.invoke_next(job_chain)
|
35
|
+
break
|
21
36
|
end
|
22
37
|
end
|
23
38
|
|
24
39
|
def self.handle_branch_error(e, job_chain:, skip_invoke: false, **kwargs)
|
25
|
-
return nil unless job_chain&.
|
40
|
+
return nil unless job_chain&.dig(:fork_state, :forking_path).present?
|
26
41
|
|
27
42
|
duped_chain = CanvasSync.duplicate_chain(job_chain)
|
28
43
|
job_list = duped_chain[:jobs]
|
@@ -46,7 +61,7 @@ module CanvasSync
|
|
46
61
|
protected
|
47
62
|
|
48
63
|
def self.forked_at_job(job_chain)
|
49
|
-
fork_item = (job_chain
|
64
|
+
fork_item = (job_chain.dig(:fork_state, :forking_path) || []).pop
|
50
65
|
|
51
66
|
if fork_item.present?
|
52
67
|
CanvasSync::JobLog.find_by(job_id: fork_item)
|
@@ -12,7 +12,7 @@ module CanvasSync
|
|
12
12
|
# @return [nil]
|
13
13
|
def perform(job_chain, report_name, report_params, processor, options, allow_redownloads: false)
|
14
14
|
account_id = options[:account_id] || job_chain[:global_options][:account_id] || "self"
|
15
|
-
|
15
|
+
options[:sync_start_time] = DateTime.now.utc.iso8601
|
16
16
|
report_id = if allow_redownloads
|
17
17
|
get_cached_report(job_chain, account_id, report_name, report_params)
|
18
18
|
else
|
@@ -8,15 +8,15 @@ module CanvasSync
|
|
8
8
|
# models to sync.
|
9
9
|
def perform(job_chain, options)
|
10
10
|
if options[:term_scope]
|
11
|
-
sub_reports = CanvasSync.fork(@job_log, job_chain, keys: [:canvas_term_id]) do |
|
11
|
+
sub_reports = CanvasSync.fork(@job_log, job_chain, keys: [:canvas_term_id]) do |fork_template|
|
12
12
|
Term.send(options[:term_scope]).find_each.map do |term|
|
13
|
+
fork = fork_template.duplicate
|
13
14
|
# Deep copy the job_chain so each report gets the correct term id passed into
|
14
15
|
# its options with no side effects
|
15
16
|
term_id = get_term_id(term)
|
16
|
-
|
17
|
-
duped_job_chain[:global_options][:canvas_term_id] = term_id
|
17
|
+
fork[:global_options][:canvas_term_id] = term_id
|
18
18
|
{
|
19
|
-
job_chain:
|
19
|
+
job_chain: fork.serialize,
|
20
20
|
params: report_params(options, term_id),
|
21
21
|
options: options,
|
22
22
|
}
|
@@ -9,15 +9,15 @@ module CanvasSync
|
|
9
9
|
# @param options [Hash]
|
10
10
|
def perform(job_chain, options)
|
11
11
|
if options[:term_scope]
|
12
|
-
sub_reports = CanvasSync.fork(@job_log, job_chain, keys: [:canvas_term_id]) do |
|
12
|
+
sub_reports = CanvasSync.fork(@job_log, job_chain, keys: [:canvas_term_id]) do |fork_template|
|
13
13
|
Term.send(options[:term_scope]).find_each.map do |term|
|
14
|
+
fork = fork_template.duplicate
|
14
15
|
# Deep copy the job_chain so each report gets the correct term id passed into
|
15
16
|
# its options with no side effects
|
16
17
|
term_id = get_term_id(term)
|
17
|
-
|
18
|
-
duped_job_chain[:global_options][:canvas_term_id] = term_id
|
18
|
+
fork[:global_options][:canvas_term_id] = term_id
|
19
19
|
{
|
20
|
-
job_chain:
|
20
|
+
job_chain: fork.serialize,
|
21
21
|
params: report_params(options, term_id),
|
22
22
|
options: options,
|
23
23
|
}
|
@@ -0,0 +1,15 @@
|
|
1
|
+
require 'active_record'
|
2
|
+
|
3
|
+
module CanvasSync
|
4
|
+
module MiscHelper
|
5
|
+
MigrationClass = Rails.version < '5.0' ? ActiveRecord::Migration : ActiveRecord::Migration[4.2]
|
6
|
+
|
7
|
+
def self.to_boolean(v)
|
8
|
+
if Rails.version < '5.0'
|
9
|
+
ActiveRecord::Type::Boolean.new.type_cast_from_user(v)
|
10
|
+
else
|
11
|
+
ActiveRecord::Type::Boolean.new.deserialize(v)
|
12
|
+
end
|
13
|
+
end
|
14
|
+
end
|
15
|
+
end
|
@@ -8,15 +8,16 @@ module CanvasSync
|
|
8
8
|
# @param options [Hash]
|
9
9
|
class AssignmentGroupsProcessor < ReportProcessor
|
10
10
|
def self.process(report_file_path, _options, report_id)
|
11
|
-
new(report_file_path)
|
11
|
+
new(report_file_path, _options)
|
12
12
|
end
|
13
13
|
|
14
|
-
def initialize(report_file_path)
|
14
|
+
def initialize(report_file_path, options)
|
15
15
|
CanvasSync::Importers::BulkImporter.import(
|
16
16
|
report_file_path,
|
17
17
|
mapping[:assignment_groups][:report_columns],
|
18
18
|
AssignmentGroup,
|
19
19
|
mapping[:assignment_groups][:conflict_target].to_sym,
|
20
|
+
import_args: options
|
20
21
|
)
|
21
22
|
end
|
22
23
|
end
|
@@ -8,15 +8,16 @@ module CanvasSync
|
|
8
8
|
# @param options [Hash]
|
9
9
|
class AssignmentsProcessor < ReportProcessor
|
10
10
|
def self.process(report_file_path, _options, report_id)
|
11
|
-
new(report_file_path)
|
11
|
+
new(report_file_path, _options)
|
12
12
|
end
|
13
13
|
|
14
|
-
def initialize(report_file_path)
|
14
|
+
def initialize(report_file_path, options)
|
15
15
|
CanvasSync::Importers::BulkImporter.import(
|
16
16
|
report_file_path,
|
17
17
|
mapping[:assignments][:report_columns],
|
18
18
|
Assignment,
|
19
19
|
mapping[:assignments][:conflict_target].to_sym,
|
20
|
+
import_args: options
|
20
21
|
)
|
21
22
|
end
|
22
23
|
end
|
@@ -8,15 +8,16 @@ module CanvasSync
|
|
8
8
|
# @param options [Hash]
|
9
9
|
class ContextModuleItemsProcessor < ReportProcessor
|
10
10
|
def self.process(report_file_path, _options, report_id)
|
11
|
-
new(report_file_path)
|
11
|
+
new(report_file_path, _options)
|
12
12
|
end
|
13
13
|
|
14
|
-
def initialize(report_file_path)
|
14
|
+
def initialize(report_file_path, options)
|
15
15
|
CanvasSync::Importers::BulkImporter.import(
|
16
16
|
report_file_path,
|
17
17
|
mapping[:context_module_items][:report_columns],
|
18
18
|
ContextModuleItem,
|
19
19
|
mapping[:context_module_items][:conflict_target].to_sym,
|
20
|
+
import_args: options
|
20
21
|
)
|
21
22
|
end
|
22
23
|
end
|