canvas_sync 0.17.19 → 0.17.23.beta4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +45 -1
  3. data/lib/canvas_sync/concerns/sync_mapping.rb +114 -0
  4. data/lib/canvas_sync/generators/install_generator.rb +1 -0
  5. data/lib/canvas_sync/generators/templates/migrations/create_grading_period_groups.rb +22 -0
  6. data/lib/canvas_sync/generators/templates/migrations/create_grading_periods.rb +22 -0
  7. data/lib/canvas_sync/generators/templates/migrations/create_user_observers.rb +17 -0
  8. data/lib/canvas_sync/generators/templates/models/grading_period.rb +8 -0
  9. data/lib/canvas_sync/generators/templates/models/grading_period_group.rb +9 -0
  10. data/lib/canvas_sync/generators/templates/models/user_observer.rb +11 -0
  11. data/lib/canvas_sync/importers/bulk_importer.rb +23 -18
  12. data/lib/canvas_sync/job_batches/chain_builder.rb +1 -1
  13. data/lib/canvas_sync/jobs/report_checker.rb +37 -4
  14. data/lib/canvas_sync/jobs/report_starter.rb +2 -2
  15. data/lib/canvas_sync/processors/assignment_groups_processor.rb +1 -7
  16. data/lib/canvas_sync/processors/assignments_processor.rb +1 -7
  17. data/lib/canvas_sync/processors/context_module_items_processor.rb +1 -7
  18. data/lib/canvas_sync/processors/context_modules_processor.rb +1 -7
  19. data/lib/canvas_sync/processors/model_mappings.yml +68 -0
  20. data/lib/canvas_sync/processors/normal_processor.rb +3 -3
  21. data/lib/canvas_sync/processors/provisioning_report_processor.rb +21 -63
  22. data/lib/canvas_sync/processors/report_processor.rb +14 -9
  23. data/lib/canvas_sync/processors/submissions_processor.rb +1 -7
  24. data/lib/canvas_sync/record.rb +4 -0
  25. data/lib/canvas_sync/version.rb +1 -1
  26. data/lib/canvas_sync.rb +4 -1
  27. data/spec/canvas_sync/processors/provisioning_report_processor_spec.rb +40 -0
  28. data/spec/dummy/app/models/grading_period.rb +14 -0
  29. data/spec/dummy/app/models/grading_period_group.rb +15 -0
  30. data/spec/dummy/app/models/user_observer.rb +17 -0
  31. data/spec/dummy/db/migrate/20210907233329_create_user_observers.rb +23 -0
  32. data/spec/dummy/db/migrate/20210907233330_create_grading_periods.rb +28 -0
  33. data/spec/dummy/db/migrate/20210907233331_create_grading_period_groups.rb +28 -0
  34. data/spec/dummy/db/schema.rb +42 -1
  35. data/spec/dummy/log/development.log +1167 -0
  36. data/spec/dummy/log/test.log +6693 -0
  37. data/spec/support/fixtures/reports/grading_period_groups.csv +2 -0
  38. data/spec/support/fixtures/reports/grading_periods.csv +3 -0
  39. data/spec/support/fixtures/reports/user_observers.csv +3 -0
  40. metadata +33 -18
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: d995ce469310b7b54158745305c968edd4c7a910bcad87891ffabfa29cd22a30
4
- data.tar.gz: 83a0bb75ceb290402137f2531e1925d4872a0e272fcabf75a6c3451b3de5f721
3
+ metadata.gz: c53a6ea9523ce389c70e26035183573de72ee06c547e380842216b879a6d8bdc
4
+ data.tar.gz: 166c315f6ba7b0efa337c55a56b8cd418e080f8f98393c4eaed1fd9fdc75a862
5
5
  SHA512:
6
- metadata.gz: 9287196092064977d34259de638cfd41cf189f6dc9b6a97c6fd507d2555f8f3b0527643d38991765d56bf428856407608ef0c1834b48b6a6dd3201ed232b9879
7
- data.tar.gz: a751ad1e08d3feb2716e41d4c552f983f2d83590321dc4d1cc2a18d2f800859587fa1884272bbdbaff0cc4aefe867c0af8f0dfb24c2dc6aba25190e7a190449a
6
+ metadata.gz: f7137c46728c087f7e53902099744c4545db67d01da9117f243bbdf9e6ab099c5668a9f9be44752b6d4a4ca4208d1e98ebb2831e777be531416f01cc3befd21c
7
+ data.tar.gz: 62eecd4a5581297f95ee82c2e81c5c7821565defc9885024153ab17a708c00b5eda22f18e83773decb1884a73d6a7a74453d108c26e50f8f868934cb03a60e54
data/README.md CHANGED
@@ -190,7 +190,30 @@ Overrides are useful for two scenarios:
190
190
  - You have an existing application where the column names do not match up with what CanvasSync expects
191
191
  - You want to sync some other column in the report that CanvasSync is not configured to sync
192
192
 
193
- In order to create an override, place a file called `canvas_sync_provisioning_mapping.yml` in your Rails `config` directory. Define the tables and columns you want to override using the following format:
193
+ Mappings can be modified by editing the Model class like such:
194
+ ```ruby
195
+ class User < ApplicationRecord
196
+ include CanvasSync::Record
197
+
198
+ sync_mapping(reset: false) do # `reset: false` is the default
199
+ # The mapping can be totally cleared with `reset: true` in the `sync_mapping` call, or like such:
200
+ reset_links
201
+
202
+ # Add a new column:
203
+ link_column :column_in_report => :column_in_database, type: :datetime
204
+
205
+ # If the column name on the report and in the DB are the same, a shorthand can be used:
206
+ link_column :omit_from_final_grade, type: :datetime
207
+
208
+ # If the defaults define a column you don't want synced, you can remove it from the mapping:
209
+ unlink_column :column_in_database
210
+ end
211
+
212
+ # ...
213
+ end
214
+ ```
215
+
216
+ You can also create a file called `canvas_sync_provisioning_mapping.yml` in your Rails `config` directory. However, this approach requires you to re-specify the complete table in order to modify a table. Define the tables and columns you want to override using the following format:
194
217
 
195
218
  ```ruby
196
219
  users:
@@ -383,6 +406,27 @@ Available config options (if you add more, please update this!):
383
406
 
384
407
  * `config.classes_to_only_log_errors_on` - use this if you are utilizing the `CanvasSync::JobLog` table, but want certain classes to only persist in the `job_logs` table if an error is encountered. This is useful if you've got a very frequently used job that's filling up your database, and only really care about tracking failures.
385
408
 
409
+ ## Global Options
410
+ You can pass in global_options to a job chain. Global options are added to the batch_context and referenced by
411
+ various internal processes.
412
+
413
+ Pass global options into a job chain, using the options param nested in a :global key.
414
+ options: { global: {...} }
415
+
416
+ report_timeout (integer): Number of days until a Canvas report should timeout. Default is 1.
417
+ report_compilation_timeout (integer): Number of days until a Canvas report should timeout. Default is 1 hour.
418
+ You can likely pass a float to achieve sub-day timeouts, but not tested.
419
+ report_max_tries (integer): The number of times to attempt a report before giving up. A report is considered failed
420
+ if it has an 'error' status in Canvas or is deleted.
421
+
422
+ This is an example job chain with global options:
423
+ job_chain = CanvasSync.default_provisioning_report_chain(
424
+ MODELS_TO_SYNC,
425
+ term_scope: :active,
426
+ full_sync_every: 'sunday',
427
+ options: { global: { report_timeout: 2 } }
428
+ )
429
+
386
430
  ## Handling Job errors
387
431
 
388
432
  If you need custom handling for when a CanvasSync Job fails, you can add an `:on_failure` option to you Job Chain's `:global_options`.
@@ -0,0 +1,114 @@
1
+ module CanvasSync::Concerns
2
+ module SyncMapping
3
+ extend ActiveSupport::Concern
4
+
5
+ class_methods do
6
+ def sync_mapping(key = nil, reset: false, &blk)
7
+ key ||= Mapping.normalize_model_name(self)
8
+ key = key.to_s
9
+ existing_map = get_sync_mapping(key)
10
+ mapper = Mapping.new(existing_map&.deep_dup || {}.with_indifferent_access)
11
+ mapper.reset_links if reset
12
+ mapper.instance_exec(&blk)
13
+ @sync_mappings[key] = mapper.map_def.freeze
14
+ end
15
+
16
+ def get_sync_mapping(key = nil)
17
+ key ||= Mapping.normalize_model_name(self)
18
+ key = key.to_s
19
+ @sync_mappings ||= {}
20
+ @sync_mappings[key] || superclass.try(:get_sync_mapping, key) || Mapping.default_for(key)
21
+ end
22
+ end
23
+
24
+ class Mapping
25
+ attr_reader :map_def
26
+
27
+ def initialize(map_def, model: nil)
28
+ @model = model
29
+ @map_def = map_def
30
+ @map_def[:conflict_target] ||= []
31
+ @map_def[:report_columns] ||= {}
32
+ end
33
+
34
+ def self.normalize_model_name(model)
35
+ model = model.name unless model.is_a?(String)
36
+ model.pluralize.underscore
37
+ end
38
+
39
+ def self.default_for(key)
40
+ default_mappings[key]
41
+ end
42
+
43
+ def self.default_mappings
44
+ @mappings ||= begin
45
+ maps = {}
46
+ default_v1_mappings.each do |mname, legacy|
47
+ m = maps[mname] = {}
48
+
49
+ m[:conflict_target] = Array(legacy[:conflict_target]).map(&:to_sym).map do |lct|
50
+ legacy[:report_columns][lct][:database_column_name]
51
+ end
52
+
53
+ m[:report_columns] = {}
54
+ legacy[:report_columns].each do |rcol, opts|
55
+ m[:report_columns][opts[:database_column_name]] = opts.except(:database_column_name).merge!(
56
+ report_column: rcol,
57
+ ).freeze
58
+ end
59
+ end
60
+ maps.with_indifferent_access.freeze
61
+ end
62
+ end
63
+
64
+ def self.default_v1_mappings
65
+ @legacy_mappings ||= begin
66
+ mapping = YAML.load_file(File.join(__dir__, '../processors', "model_mappings.yml")).deep_symbolize_keys!
67
+ override_filepath = Rails.root.join("config/canvas_sync_provisioning_mapping.yml")
68
+
69
+ if File.file?(override_filepath)
70
+ override = YAML.load_file(override_filepath).deep_symbolize_keys!
71
+ mapping = mapping.merge(override)
72
+ end
73
+
74
+ mapping.freeze
75
+ end
76
+ end
77
+
78
+ def conflict_target(*columns)
79
+ if columns.count == 0
80
+ @map_def[:conflict_target]
81
+ else
82
+ @map_def[:conflict_target] = columns.flatten.compact
83
+ end
84
+ end
85
+
86
+ def reset_links
87
+ @map_def[:report_columns] = {}.with_indifferent_access
88
+ end
89
+
90
+ def unlink_column(key)
91
+ @map_def.delete(key)
92
+ end
93
+
94
+ def link_column(m, type: nil, &blk)
95
+ if m.is_a?(Hash)
96
+ raise "Hash should have exactly 1 entry" if m && m.count != 1
97
+ @map_def[:report_columns][m.values[0]] = {
98
+ report_column: m.keys[0],
99
+ type: type,
100
+ transform: blk,
101
+ }
102
+ elsif m.is_a?(Symbol)
103
+ @map_def[:report_columns][m] = {
104
+ report_column: m,
105
+ type: type,
106
+ transform: blk,
107
+ }
108
+ else
109
+ raise "Cannot handle argument of type #{m.class}"
110
+ end
111
+ end
112
+ end
113
+ end
114
+ end
@@ -48,6 +48,7 @@ module CanvasSync
48
48
  models.each do |model|
49
49
  migration_template "migrations/create_#{model}.rb", "db/migrate/create_#{model}.rb"
50
50
  template "models/#{model.singularize}.rb", "app/models/#{model.singularize}.rb"
51
+ rescue
51
52
  end
52
53
  end
53
54
  end
@@ -0,0 +1,22 @@
1
+ # <%= autogenerated_migration_warning %>
2
+
3
+ class CreateGradingPeriodGroups < ActiveRecord::Migration[5.1]
4
+ def change
5
+ create_table :grading_period_groups do |t|
6
+ t.bigint :canvas_id, null: false
7
+ t.bigint :canvas_course_id
8
+ t.bigint :canvas_account_id
9
+ t.string :title
10
+ t.boolean :weighted
11
+ t.boolean :display_totals_for_all_grading_periods
12
+
13
+ t.string :workflow_state
14
+
15
+ t.timestamps
16
+ end
17
+
18
+ add_index :grading_period_groups, :canvas_id, unique: true
19
+ add_index :grading_period_groups, :canvas_course_id
20
+ add_index :grading_period_groups, :canvas_account_id
21
+ end
22
+ end
@@ -0,0 +1,22 @@
1
+ # <%= autogenerated_migration_warning %>
2
+
3
+ class CreateGradingPeriods < ActiveRecord::Migration[5.1]
4
+ def change
5
+ create_table :grading_periods do |t|
6
+ t.bigint :canvas_id, null: false
7
+ t.string :title
8
+ t.float :weight
9
+ t.datetime :start_date
10
+ t.datetime :end_date
11
+ t.datetime :close_date
12
+ t.bigint :canvas_grading_period_group_id
13
+
14
+ t.string :workflow_state
15
+
16
+ t.timestamps
17
+ end
18
+
19
+ add_index :grading_periods, :canvas_id, unique: true
20
+ add_index :grading_periods, :canvas_grading_period_group_id
21
+ end
22
+ end
@@ -0,0 +1,17 @@
1
+ # <%= autogenerated_migration_warning %>
2
+
3
+ class CreateUserObservers < ActiveRecord::Migration[5.1]
4
+ def change
5
+ create_table :user_observers do |t|
6
+ t.bigint :observing_user_id
7
+ t.bigint :observed_user_id
8
+ t.string :workflow_state
9
+
10
+ t.timestamps
11
+ end
12
+
13
+ add_index :user_observers, [:observed_user_id, :observing_user_id], unique: true
14
+ add_index :user_observers, :observing_user_id
15
+ add_index :user_observers, :observed_user_id
16
+ end
17
+ end
@@ -0,0 +1,8 @@
1
+ # <%= autogenerated_model_warning %>
2
+
3
+ class GradingPeriod < ApplicationRecord
4
+ include CanvasSync::Record
5
+
6
+ validates :canvas_id, uniqueness: true, presence: true
7
+ belongs_to :grading_period_group, primary_key: :canvas_id, foreign_key: :canvas_grading_period_group_id, optional: true
8
+ end
@@ -0,0 +1,9 @@
1
+ # <%= autogenerated_model_warning %>
2
+
3
+ class GradingPeriodGroup < ApplicationRecord
4
+ include CanvasSync::Record
5
+
6
+ validates :canvas_id, uniqueness: true, presence: true
7
+ belongs_to :course, primary_key: :canvas_id, foreign_key: :canvas_course_id, optional: true
8
+ belongs_to :account, primary_key: :canvas_id, foreign_key: :canvas_account_id, optional: true
9
+ end
@@ -0,0 +1,11 @@
1
+ # <%= autogenerated_model_warning %>
2
+
3
+ class UserObserver < ApplicationRecord
4
+ include CanvasSync::Record
5
+ include CanvasSync::Concerns::ApiSyncable
6
+
7
+ validates :canvas_id, uniqueness: true, presence: true
8
+
9
+ belongs_to :observing_user, primary_key: :canvas_id, foreign_key: :observing_user_id, class_name: 'User', optional: true
10
+ belongs_to :observed_user, primary_key: :canvas_id, foreign_key: :observed_user_id, class_name: 'User', optional: true
11
+ end
@@ -24,38 +24,43 @@ module CanvasSync
24
24
  end
25
25
 
26
26
  def self.perform_in_batches(report_file_path, mapping, klass, conflict_target, import_args: {})
27
- csv_column_names = mapping.keys
28
- database_column_names = mapping.values.map { |value| value[:database_column_name] }
27
+ csv_column_names = mapping.values.map { |value| value[:report_column].to_s }
28
+ database_column_names = mapping.keys
29
29
 
30
- puts mapping.inspect
31
-
32
- conflict_target = Array(conflict_target).map(&:to_sym)
33
- database_conflict_column_name = conflict_target.map{|ct| mapping[ct][:database_column_name] }
30
+ conflict_target = Array(conflict_target).map(&:to_s)
31
+ conflict_target_indices = conflict_target.map{|ct| database_column_names.index(ct) }
34
32
 
35
33
  row_ids = {}
36
34
  batcher = CanvasSync::BatchProcessor.new(of: batch_size) do |batch|
37
35
  row_ids = {}
38
- perform_import(klass, database_column_names, batch, database_conflict_column_name, import_args)
36
+ perform_import(klass, database_column_names, batch, conflict_target, import_args)
39
37
  end
40
38
 
41
39
  row_buffer_out = ->(row) {
40
+ formatted_row = mapping.map do |db_col, col_def|
41
+ value = nil
42
+ value = row[col_def[:report_column]] if col_def[:report_column]
43
+
44
+ if col_def[:type]
45
+ if col_def[:type].to_sym == :datetime
46
+ # TODO: add some timezone config to the mapping.
47
+ # In cases where the timestamp or date doesn't include a timezone, you should be able to specify one
48
+ value = DateTime.parse(value).utc rescue nil # rubocop:disable Style/RescueModifier
49
+ end
50
+ end
51
+
52
+ value = col_def[:transform].call(value, row) if col_def[:transform]
53
+
54
+ value
55
+ end
56
+
42
57
  if conflict_target.present?
43
- key = conflict_target.map{|ct| row[ct] }
58
+ key = conflict_target_indices.map{|ct| formatted_row[ct] }
44
59
  next if row_ids[key]
45
60
 
46
61
  row_ids[key] = true
47
62
  end
48
63
 
49
- formatted_row = csv_column_names.map do |column|
50
- if mapping[column][:type].to_sym == :datetime
51
- # TODO: add some timezone config to the mapping.
52
- # In cases where the timestamp or date doesn't include a timezone, you should be able to specify one
53
- DateTime.parse(row[column]).utc rescue nil # rubocop:disable Style/RescueModifier
54
- else
55
- row[column]
56
- end
57
- end
58
-
59
64
  batcher << formatted_row
60
65
  }
61
66
 
@@ -40,7 +40,7 @@ module CanvasSync
40
40
  def insert_at(position, new_jobs)
41
41
  chain = self.class.get_chain_parameter(base_job)
42
42
  new_jobs = [new_jobs] unless new_jobs.is_a?(Array)
43
- chain.insert(-1, *new_jobs)
43
+ chain.insert(position, *new_jobs)
44
44
  end
45
45
 
46
46
  def insert(new_jobs, **kwargs)
@@ -4,8 +4,9 @@ module CanvasSync
4
4
  # Re-enqueues itself if the report is still processing on Canvas.
5
5
  # Enqueues the ReportProcessor when the report has completed.
6
6
  class ReportChecker < CanvasSync::Job
7
- REPORT_TIMEOUT = 12.hours
7
+ REPORT_TIMEOUT = 24.hours
8
8
  COMPILATION_TIMEOUT = 1.hour
9
+ MAX_TRIES = 3
9
10
 
10
11
  # @param report_name [Hash] e.g., 'provisioning_csv'
11
12
  # @param report_id [Integer]
@@ -13,6 +14,7 @@ module CanvasSync
13
14
  # @param options [Hash] hash of options that will be passed to the job processor
14
15
  # @return [nil]
15
16
  def perform(report_name, report_id, processor, options, checker_context = {}) # rubocop:disable Metrics/AbcSize
17
+ max_tries = options[:report_max_tries] || batch_context[:report_max_tries] || MAX_TRIES
16
18
  account_id = options[:account_id] || batch_context[:account_id] || "self"
17
19
  report_status = CanvasSync.get_canvas_sync_client(batch_context)
18
20
  .report_status(account_id, report_name, report_id)
@@ -27,9 +29,17 @@ module CanvasSync
27
29
  report_id,
28
30
  )
29
31
  when "error", "deleted"
30
- message = "Report failed to process; status was #{report_status} for report_name: #{report_name}, report_id: #{report_id}" # rubocop:disable Metrics/LineLength
32
+ checker_context[:failed_attempts] ||= 0
33
+ checker_context[:failed_attempts] += 1
34
+ failed_attempts = checker_context[:failed_attempts]
35
+ message = "Report failed to process; status was #{report_status} for report_name: #{report_name}, report_id: #{report_id}, #{current_organization.name}. This report has now failed #{checker_context[:failed_attempts]} time." # rubocop:disable Metrics/LineLength
31
36
  Rails.logger.error(message)
32
- raise message
37
+ if failed_attempts >= max_tries
38
+ Rails.logger.error("This report has failed #{failed_attempts} times. Giving up.")
39
+ raise message
40
+ else
41
+ restart_report(options, report_name, processor, checker_context)
42
+ end
33
43
  else
34
44
  report_timeout = parse_timeout(options[:report_timeout] || batch_context[:report_timeout] || REPORT_TIMEOUT)
35
45
  if timeout_met?(options[:sync_start_time], report_timeout)
@@ -51,7 +61,7 @@ module CanvasSync
51
61
  report_id,
52
62
  processor,
53
63
  options,
54
- checker_context,
64
+ checker_context
55
65
  )
56
66
  end
57
67
  end
@@ -66,6 +76,29 @@ module CanvasSync
66
76
  def parse_timeout(val)
67
77
  val
68
78
  end
79
+
80
+ def restart_report(options, report_name, processor, checker_context)
81
+ account_id = options[:account_id] || batch_context[:account_id] || "self"
82
+ options[:sync_start_time] = DateTime.now.utc.iso8601
83
+ new_context = {}
84
+ new_context[:failed_attempts] = checker_context[:failed_attempts]
85
+ report_id = start_report(account_id, report_name, options[:report_params])
86
+ CanvasSync::Jobs::ReportChecker
87
+ .set(wait: report_checker_wait_time)
88
+ .perform_later(
89
+ report_name,
90
+ report_id,
91
+ processor,
92
+ options,
93
+ new_context
94
+ )
95
+ end
96
+
97
+ def start_report(account_id, report_name, report_params)
98
+ report = CanvasSync.get_canvas_sync_client(batch_context)
99
+ .start_report(account_id, report_name, report_params)
100
+ report["id"]
101
+ end
69
102
  end
70
103
  end
71
104
  end