canvas_sync 0.17.18 → 0.17.23.beta2

Sign up to get free protection for your applications and to get access to all the features.
Files changed (41) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +45 -1
  3. data/lib/canvas_sync/concerns/sync_mapping.rb +112 -0
  4. data/lib/canvas_sync/generators/install_generator.rb +1 -0
  5. data/lib/canvas_sync/generators/templates/migrations/create_grading_period_groups.rb +22 -0
  6. data/lib/canvas_sync/generators/templates/migrations/create_grading_periods.rb +22 -0
  7. data/lib/canvas_sync/generators/templates/migrations/create_user_observers.rb +17 -0
  8. data/lib/canvas_sync/generators/templates/models/grading_period.rb +8 -0
  9. data/lib/canvas_sync/generators/templates/models/grading_period_group.rb +9 -0
  10. data/lib/canvas_sync/generators/templates/models/user_observer.rb +11 -0
  11. data/lib/canvas_sync/importers/bulk_importer.rb +27 -16
  12. data/lib/canvas_sync/job_batches/chain_builder.rb +1 -1
  13. data/lib/canvas_sync/jobs/begin_sync_chain_job.rb +1 -1
  14. data/lib/canvas_sync/jobs/report_checker.rb +37 -4
  15. data/lib/canvas_sync/jobs/report_starter.rb +2 -2
  16. data/lib/canvas_sync/processors/assignment_groups_processor.rb +1 -7
  17. data/lib/canvas_sync/processors/assignments_processor.rb +1 -7
  18. data/lib/canvas_sync/processors/context_module_items_processor.rb +1 -7
  19. data/lib/canvas_sync/processors/context_modules_processor.rb +1 -7
  20. data/lib/canvas_sync/processors/model_mappings.yml +68 -0
  21. data/lib/canvas_sync/processors/normal_processor.rb +3 -3
  22. data/lib/canvas_sync/processors/provisioning_report_processor.rb +21 -63
  23. data/lib/canvas_sync/processors/report_processor.rb +14 -9
  24. data/lib/canvas_sync/processors/submissions_processor.rb +1 -7
  25. data/lib/canvas_sync/record.rb +4 -0
  26. data/lib/canvas_sync/version.rb +1 -1
  27. data/lib/canvas_sync.rb +4 -1
  28. data/spec/canvas_sync/processors/provisioning_report_processor_spec.rb +40 -0
  29. data/spec/dummy/app/models/grading_period.rb +14 -0
  30. data/spec/dummy/app/models/grading_period_group.rb +15 -0
  31. data/spec/dummy/app/models/user_observer.rb +17 -0
  32. data/spec/dummy/db/migrate/20210907233329_create_user_observers.rb +23 -0
  33. data/spec/dummy/db/migrate/20210907233330_create_grading_periods.rb +28 -0
  34. data/spec/dummy/db/migrate/20210907233331_create_grading_period_groups.rb +28 -0
  35. data/spec/dummy/db/schema.rb +42 -1
  36. data/spec/dummy/log/development.log +1167 -0
  37. data/spec/dummy/log/test.log +4734 -0
  38. data/spec/support/fixtures/reports/grading_period_groups.csv +2 -0
  39. data/spec/support/fixtures/reports/grading_periods.csv +3 -0
  40. data/spec/support/fixtures/reports/user_observers.csv +3 -0
  41. metadata +33 -18
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 30e1a16308bbc8795dc671c19f35d29577665213261829d3180a9efeefab4adf
4
- data.tar.gz: bade0d8907b22bf252b85ba4067798ebd3b6582deed06adfccf7a8951a6164b4
3
+ metadata.gz: 0d5da2edfe978cc9fec69714ce7a1bd90cce3977023478473a7999143e78df90
4
+ data.tar.gz: 510f341eb1d090275b302e971a6f26c59e046da7699cfaee567a34c23ce751c8
5
5
  SHA512:
6
- metadata.gz: 4a155b415154694a42c218bd5ef91f21ca7cdcc81967cc2f2c16a180eebfb32d1b279c72ea2eab58beaabb8e8a8905fe4ef4086ce758c9ee14b1583eccf06849
7
- data.tar.gz: 3269c39188c086e7b3b03d6280414fbdc7a32c6780af5744da08f3f9432f4eb5ed993c3af4d22e2fe477d92ca8e4d705c62ba9e9005559a9e1145922f565aa4f
6
+ metadata.gz: 891a38e3bd240de7c5c248d14bc21f84bc1336a19fb1509e74769076d7059bc4e3a2b7b8764cafe3aba8b6127d2278241a30b8f78f0735467408c269710dab31
7
+ data.tar.gz: fc71f424836699e98cfa39ee435e919e81cb9bcc12deef30f4597806746ec8047136efc180db31ad906dc309d62526584fbdd17db04d2fe7b9b7495402f869a9
data/README.md CHANGED
@@ -190,7 +190,30 @@ Overrides are useful for two scenarios:
190
190
  - You have an existing application where the column names do not match up with what CanvasSync expects
191
191
  - You want to sync some other column in the report that CanvasSync is not configured to sync
192
192
 
193
- In order to create an override, place a file called `canvas_sync_provisioning_mapping.yml` in your Rails `config` directory. Define the tables and columns you want to override using the following format:
193
+ Mappings can be modified by editing the Model class like such:
194
+ ```ruby
195
+ class User < ApplicationRecord
196
+ include CanvasSync::Record
197
+
198
+ sync_mapping(reset: false) do # `reset: false` is the default
199
+ # The mapping can be totally cleared with `reset: true` in the `sync_mapping` call, or like such:
200
+ reset_links
201
+
202
+ # Add a new column:
203
+ link_column :column_in_report => :column_in_database, type: :datetime
204
+
205
+ # If the column name on the report and in the DB are the same, a shorthand can be used:
206
+ link_column :omit_from_final_grade, type: :datetime
207
+
208
+ # If the defaults define a column you don't want synced, you can remove it from the mapping:
209
+ unlink_column :column_in_database
210
+ end
211
+
212
+ # ...
213
+ end
214
+ ```
215
+
216
+ You can also create a file called `canvas_sync_provisioning_mapping.yml` in your Rails `config` directory. However, this approach requires you to re-specify the complete table in order to modify a table. Define the tables and columns you want to override using the following format:
194
217
 
195
218
  ```ruby
196
219
  users:
@@ -383,6 +406,27 @@ Available config options (if you add more, please update this!):
383
406
 
384
407
  * `config.classes_to_only_log_errors_on` - use this if you are utilizing the `CanvasSync::JobLog` table, but want certain classes to only persist in the `job_logs` table if an error is encountered. This is useful if you've got a very frequently used job that's filling up your database, and only really care about tracking failures.
385
408
 
409
+ ## Global Options
410
+ You can pass in global_options to a job chain. Global options are added to the batch_context and referenced by
411
+ various internal processes.
412
+
413
+ Pass global options into a job chain, using the options param nested in a :global key.
414
+ options: { global: {...} }
415
+
416
+ report_timeout (integer): Number of days until a Canvas report should timeout. Default is 1.
417
+ report_compilation_timeout (integer): Number of days until a Canvas report should timeout. Default is 1 hour.
418
+ You can likely pass a float to achieve sub-day timeouts, but not tested.
419
+ report_max_tries (integer): The number of times to attempt a report before giving up. A report is considered failed
420
+ if it has an 'error' status in Canvas or is deleted.
421
+
422
+ This is an example job chain with global options:
423
+ job_chain = CanvasSync.default_provisioning_report_chain(
424
+ MODELS_TO_SYNC,
425
+ term_scope: :active,
426
+ full_sync_every: 'sunday',
427
+ options: { global: { report_timeout: 2 } }
428
+ )
429
+
386
430
  ## Handling Job errors
387
431
 
388
432
  If you need custom handling for when a CanvasSync Job fails, you can add an `:on_failure` option to you Job Chain's `:global_options`.
@@ -0,0 +1,112 @@
1
+ module CanvasSync::Concerns
2
+ module SyncMapping
3
+ extend ActiveSupport::Concern
4
+
5
+ class_methods do
6
+ def sync_mapping(key = nil, reset: false, &blk)
7
+ key ||= Mapping.normalize_model_name(self)
8
+ key = key.to_s
9
+ existing_map = get_sync_mapping(key)
10
+ mapper = Mapping.new(existing_map&.deep_dup || {}.with_indifferent_access)
11
+ mapper.reset_links if reset
12
+ mapper.instance_exec(&blk)
13
+ @sync_mappings[key] = mapper.map_def.freeze
14
+ end
15
+
16
+ def get_sync_mapping(key = nil)
17
+ key ||= Mapping.normalize_model_name(self)
18
+ key = key.to_s
19
+ @sync_mappings ||= {}
20
+ @sync_mappings[key] || superclass.try(:get_sync_mapping, key) || Mapping.default_for(key)
21
+ end
22
+ end
23
+
24
+ class Mapping
25
+ attr_reader :map_def
26
+
27
+ def initialize(map_def = {}, model: nil)
28
+ @map_def = map_def
29
+ @model = model
30
+ end
31
+
32
+ def self.normalize_model_name(model)
33
+ model = model.name unless model.is_a?(String)
34
+ model.pluralize.underscore
35
+ end
36
+
37
+ def self.default_for(key)
38
+ default_mappings[key]
39
+ end
40
+
41
+ def self.default_mappings
42
+ @mappings ||= begin
43
+ maps = {}
44
+ default_v1_mappings.each do |mname, legacy|
45
+ m = maps[mname] = {}
46
+
47
+ m[:conflict_target] = Array(legacy[:conflict_target]).map(&:to_sym).map do |lct|
48
+ legacy[:report_columns][lct][:database_column_name]
49
+ end
50
+
51
+ m[:report_columns] = {}
52
+ legacy[:report_columns].each do |rcol, opts|
53
+ m[:report_columns][opts[:database_column_name]] = opts.except(:database_column_name).merge!(
54
+ report_column: rcol,
55
+ ).freeze
56
+ end
57
+ end
58
+ maps.with_indifferent_access.freeze
59
+ end
60
+ end
61
+
62
+ def self.default_v1_mappings
63
+ @legacy_mappings ||= begin
64
+ mapping = YAML.load_file(File.join(__dir__, '../processors', "model_mappings.yml")).deep_symbolize_keys!
65
+ override_filepath = Rails.root.join("config/canvas_sync_provisioning_mapping.yml")
66
+
67
+ if File.file?(override_filepath)
68
+ override = YAML.load_file(override_filepath).deep_symbolize_keys!
69
+ mapping = mapping.merge(override)
70
+ end
71
+
72
+ mapping.freeze
73
+ end
74
+ end
75
+
76
+ def conflict_target(*columns)
77
+ if columns.count == 0
78
+ @map_def[:conflict_target]
79
+ else
80
+ @map_def[:conflict_target] = columns.flatten.compact
81
+ end
82
+ end
83
+
84
+ def reset_links
85
+ @map_def = {}
86
+ end
87
+
88
+ def unlink_column(key)
89
+ @map_def.delete(key)
90
+ end
91
+
92
+ def link_column(m, type: nil, &blk)
93
+ if m.is_a?(Hash)
94
+ raise "Hash should have exactly 1 entry" if m && m.count != 1
95
+ @map_def[:report_columns][m.values[0]] = {
96
+ report_column: m.keys[0],
97
+ type: type,
98
+ transform: blk,
99
+ }
100
+ elsif m.is_a?(Symbol)
101
+ @map_def[:report_columns][m] = {
102
+ report_column: m,
103
+ type: type,
104
+ transform: blk,
105
+ }
106
+ else
107
+ raise "Cannot handle argument of type #{m.class}"
108
+ end
109
+ end
110
+ end
111
+ end
112
+ end
@@ -48,6 +48,7 @@ module CanvasSync
48
48
  models.each do |model|
49
49
  migration_template "migrations/create_#{model}.rb", "db/migrate/create_#{model}.rb"
50
50
  template "models/#{model.singularize}.rb", "app/models/#{model.singularize}.rb"
51
+ rescue
51
52
  end
52
53
  end
53
54
  end
@@ -0,0 +1,22 @@
1
+ # <%= autogenerated_migration_warning %>
2
+
3
+ class CreateGradingPeriodGroups < ActiveRecord::Migration[5.1]
4
+ def change
5
+ create_table :grading_period_groups do |t|
6
+ t.bigint :canvas_id, null: false
7
+ t.bigint :canvas_course_id
8
+ t.bigint :canvas_account_id
9
+ t.string :title
10
+ t.boolean :weighted
11
+ t.boolean :display_totals_for_all_grading_periods
12
+
13
+ t.string :workflow_state
14
+
15
+ t.timestamps
16
+ end
17
+
18
+ add_index :grading_period_groups, :canvas_id, unique: true
19
+ add_index :grading_period_groups, :canvas_course_id
20
+ add_index :grading_period_groups, :canvas_account_id
21
+ end
22
+ end
@@ -0,0 +1,22 @@
1
+ # <%= autogenerated_migration_warning %>
2
+
3
+ class CreateGradingPeriods < ActiveRecord::Migration[5.1]
4
+ def change
5
+ create_table :grading_periods do |t|
6
+ t.bigint :canvas_id, null: false
7
+ t.string :title
8
+ t.float :weight
9
+ t.datetime :start_date
10
+ t.datetime :end_date
11
+ t.datetime :close_date
12
+ t.bigint :canvas_grading_period_group_id
13
+
14
+ t.string :workflow_state
15
+
16
+ t.timestamps
17
+ end
18
+
19
+ add_index :grading_periods, :canvas_id, unique: true
20
+ add_index :grading_periods, :canvas_grading_period_group_id
21
+ end
22
+ end
@@ -0,0 +1,17 @@
1
+ # <%= autogenerated_migration_warning %>
2
+
3
+ class CreateUserObservers < ActiveRecord::Migration[5.1]
4
+ def change
5
+ create_table :user_observers do |t|
6
+ t.bigint :observing_user_id
7
+ t.bigint :observed_user_id
8
+ t.string :workflow_state
9
+
10
+ t.timestamps
11
+ end
12
+
13
+ add_index :user_observers, [:observed_user_id, :observing_user_id], unique: true
14
+ add_index :user_observers, :observing_user_id
15
+ add_index :user_observers, :observed_user_id
16
+ end
17
+ end
@@ -0,0 +1,8 @@
1
+ # <%= autogenerated_model_warning %>
2
+
3
+ class GradingPeriod < ApplicationRecord
4
+ include CanvasSync::Record
5
+
6
+ validates :canvas_id, uniqueness: true, presence: true
7
+ belongs_to :grading_period_group, primary_key: :canvas_id, foreign_key: :canvas_grading_period_group_id, optional: true
8
+ end
@@ -0,0 +1,9 @@
1
+ # <%= autogenerated_model_warning %>
2
+
3
+ class GradingPeriodGroup < ApplicationRecord
4
+ include CanvasSync::Record
5
+
6
+ validates :canvas_id, uniqueness: true, presence: true
7
+ belongs_to :course, primary_key: :canvas_id, foreign_key: :canvas_course_id, optional: true
8
+ belongs_to :account, primary_key: :canvas_id, foreign_key: :canvas_account_id, optional: true
9
+ end
@@ -0,0 +1,11 @@
1
+ # <%= autogenerated_model_warning %>
2
+
3
+ class UserObserver < ApplicationRecord
4
+ include CanvasSync::Record
5
+ include CanvasSync::Concerns::ApiSyncable
6
+
7
+ validates :canvas_id, uniqueness: true, presence: true
8
+
9
+ belongs_to :observing_user, primary_key: :canvas_id, foreign_key: :observing_user_id, class_name: 'User', optional: true
10
+ belongs_to :observed_user, primary_key: :canvas_id, foreign_key: :observed_user_id, class_name: 'User', optional: true
11
+ end
@@ -24,30 +24,41 @@ module CanvasSync
24
24
  end
25
25
 
26
26
  def self.perform_in_batches(report_file_path, mapping, klass, conflict_target, import_args: {})
27
- csv_column_names = mapping.keys
28
- database_column_names = mapping.values.map { |value| value[:database_column_name] }
29
- database_conflict_column_name = conflict_target ? mapping[conflict_target][:database_column_name] : nil
27
+ csv_column_names = mapping.values.map { |value| value[:report_column].to_s }
28
+ database_column_names = mapping.keys
29
+
30
+ conflict_target = Array(conflict_target).map(&:to_s)
31
+ conflict_target_indices = conflict_target.map{|ct| database_column_names.index(ct) }
30
32
 
31
33
  row_ids = {}
32
34
  batcher = CanvasSync::BatchProcessor.new(of: batch_size) do |batch|
33
35
  row_ids = {}
34
- perform_import(klass, database_column_names, batch, database_conflict_column_name, import_args)
36
+ perform_import(klass, database_column_names, batch, conflict_target, import_args)
35
37
  end
36
38
 
37
39
  row_buffer_out = ->(row) {
38
- if conflict_target
39
- next if row_ids[row[conflict_target]]
40
- row_ids[row[conflict_target]] = true
40
+ formatted_row = mapping.map do |db_col, col_def|
41
+ value = nil
42
+ value = row[col_def[:report_column]] if col_def[:report_column]
43
+
44
+ if col_def[:type]
45
+ if col_def[:type].to_sym == :datetime
46
+ # TODO: add some timezone config to the mapping.
47
+ # In cases where the timestamp or date doesn't include a timezone, you should be able to specify one
48
+ value = DateTime.parse(value).utc rescue nil # rubocop:disable Style/RescueModifier
49
+ end
50
+ end
51
+
52
+ value = col_def[:transform].call(value, row) if col_def[:transform]
53
+
54
+ value
41
55
  end
42
56
 
43
- formatted_row = csv_column_names.map do |column|
44
- if mapping[column][:type].to_sym == :datetime
45
- # TODO: add some timezone config to the mapping.
46
- # In cases where the timestamp or date doesn't include a timezone, you should be able to specify one
47
- DateTime.parse(row[column]).utc rescue nil # rubocop:disable Style/RescueModifier
48
- else
49
- row[column]
50
- end
57
+ if conflict_target.present?
58
+ key = conflict_target_indices.map{|ct| formatted_row[ct] }
59
+ next if row_ids[key]
60
+
61
+ row_ids[key] = true
51
62
  end
52
63
 
53
64
  batcher << formatted_row
@@ -79,7 +90,7 @@ module CanvasSync
79
90
  condition: condition_sql(klass, columns, import_args[:sync_start_time]),
80
91
  columns: columns
81
92
  }
82
- update_conditions[:conflict_target] = conflict_target if conflict_target
93
+ update_conditions[:conflict_target] = conflict_target if conflict_target.present?
83
94
 
84
95
  options = { validate: false, on_duplicate_key_update: update_conditions }.merge(import_args)
85
96
  options.delete(:on_duplicate_key_update) if options.key?(:on_duplicate_key_ignore)
@@ -40,7 +40,7 @@ module CanvasSync
40
40
  def insert_at(position, new_jobs)
41
41
  chain = self.class.get_chain_parameter(base_job)
42
42
  new_jobs = [new_jobs] unless new_jobs.is_a?(Array)
43
- chain.insert(-1, *new_jobs)
43
+ chain.insert(position, *new_jobs)
44
44
  end
45
45
 
46
46
  def insert(new_jobs, **kwargs)
@@ -46,7 +46,7 @@ module CanvasSync
46
46
  m = Regexp.last_match
47
47
  day = m[1]
48
48
  skip = m[2] || "1"
49
- Date.new.send(:"#{day}?") && last_full_sync.end_of_day <= (skip.to_i.weeks.ago.end_of_day)
49
+ DateTime.now.send(:"#{day}?") && last_full_sync.end_of_day <= (skip.to_i.weeks.ago.end_of_day)
50
50
  when opt.match?(%r{^(\d+)\%$})
51
51
  m = Regexp.last_match
52
52
  rand(100) < m[1].to_i
@@ -4,8 +4,9 @@ module CanvasSync
4
4
  # Re-enqueues itself if the report is still processing on Canvas.
5
5
  # Enqueues the ReportProcessor when the report has completed.
6
6
  class ReportChecker < CanvasSync::Job
7
- REPORT_TIMEOUT = 12.hours
7
+ REPORT_TIMEOUT = 24.hours
8
8
  COMPILATION_TIMEOUT = 1.hour
9
+ MAX_TRIES = 3
9
10
 
10
11
  # @param report_name [Hash] e.g., 'provisioning_csv'
11
12
  # @param report_id [Integer]
@@ -13,6 +14,7 @@ module CanvasSync
13
14
  # @param options [Hash] hash of options that will be passed to the job processor
14
15
  # @return [nil]
15
16
  def perform(report_name, report_id, processor, options, checker_context = {}) # rubocop:disable Metrics/AbcSize
17
+ max_tries = options[:report_max_tries] || batch_context[:report_max_tries] || MAX_TRIES
16
18
  account_id = options[:account_id] || batch_context[:account_id] || "self"
17
19
  report_status = CanvasSync.get_canvas_sync_client(batch_context)
18
20
  .report_status(account_id, report_name, report_id)
@@ -27,9 +29,17 @@ module CanvasSync
27
29
  report_id,
28
30
  )
29
31
  when "error", "deleted"
30
- message = "Report failed to process; status was #{report_status} for report_name: #{report_name}, report_id: #{report_id}" # rubocop:disable Metrics/LineLength
32
+ checker_context[:failed_attempts] ||= 0
33
+ checker_context[:failed_attempts] += 1
34
+ failed_attempts = checker_context[:failed_attempts]
35
+ message = "Report failed to process; status was #{report_status} for report_name: #{report_name}, report_id: #{report_id}, #{current_organization.name}. This report has now failed #{checker_context[:failed_attempts]} time." # rubocop:disable Metrics/LineLength
31
36
  Rails.logger.error(message)
32
- raise message
37
+ if failed_attempts >= max_tries
38
+ Rails.logger.error("This report has failed #{failed_attempts} times. Giving up.")
39
+ raise message
40
+ else
41
+ restart_report(options, report_name, processor, checker_context)
42
+ end
33
43
  else
34
44
  report_timeout = parse_timeout(options[:report_timeout] || batch_context[:report_timeout] || REPORT_TIMEOUT)
35
45
  if timeout_met?(options[:sync_start_time], report_timeout)
@@ -51,7 +61,7 @@ module CanvasSync
51
61
  report_id,
52
62
  processor,
53
63
  options,
54
- checker_context,
64
+ checker_context
55
65
  )
56
66
  end
57
67
  end
@@ -66,6 +76,29 @@ module CanvasSync
66
76
  def parse_timeout(val)
67
77
  val
68
78
  end
79
+
80
+ def restart_report(options, report_name, processor, checker_context)
81
+ account_id = options[:account_id] || batch_context[:account_id] || "self"
82
+ options[:sync_start_time] = DateTime.now.utc.iso8601
83
+ new_context = {}
84
+ new_context[:failed_attempts] = checker_context[:failed_attempts]
85
+ report_id = start_report(account_id, report_name, options[:report_params])
86
+ CanvasSync::Jobs::ReportChecker
87
+ .set(wait: report_checker_wait_time)
88
+ .perform_later(
89
+ report_name,
90
+ report_id,
91
+ processor,
92
+ options,
93
+ new_context
94
+ )
95
+ end
96
+
97
+ def start_report(account_id, report_name, report_params)
98
+ report = CanvasSync.get_canvas_sync_client(batch_context)
99
+ .start_report(account_id, report_name, report_params)
100
+ report["id"]
101
+ end
69
102
  end
70
103
  end
71
104
  end