canvas_sync 0.26.1 → 0.27.1.beta2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +117 -20
  3. data/app/controllers/canvas_sync/api/v1/live_events_controller.rb +1 -0
  4. data/lib/canvas_sync/config.rb +1 -1
  5. data/lib/canvas_sync/importers/bulk_importer.rb +2 -0
  6. data/lib/canvas_sync/jobs/begin_sync_chain_job.rb +1 -1
  7. data/lib/canvas_sync/jobs/beta_cleanup/create_temp_tables_job.rb +30 -0
  8. data/lib/canvas_sync/jobs/beta_cleanup/delete_related_records_job.rb +125 -0
  9. data/lib/canvas_sync/jobs/beta_cleanup/delete_temp_tables_job.rb +16 -0
  10. data/lib/canvas_sync/jobs/report_starter.rb +33 -46
  11. data/lib/canvas_sync/jobs/report_sync_task.rb +273 -0
  12. data/lib/canvas_sync/jobs/sync_accounts_job.rb +10 -7
  13. data/lib/canvas_sync/jobs/sync_assignment_groups_job.rb +2 -15
  14. data/lib/canvas_sync/jobs/sync_assignment_overrides_job.rb +26 -14
  15. data/lib/canvas_sync/jobs/sync_assignments_job.rb +2 -15
  16. data/lib/canvas_sync/jobs/sync_content_migrations_job.rb +2 -15
  17. data/lib/canvas_sync/jobs/sync_context_module_items_job.rb +2 -15
  18. data/lib/canvas_sync/jobs/sync_context_modules_job.rb +2 -15
  19. data/lib/canvas_sync/jobs/sync_course_progresses_job.rb +2 -16
  20. data/lib/canvas_sync/jobs/sync_provisioning_report_job.rb +135 -14
  21. data/lib/canvas_sync/jobs/sync_rubric_assessments_job.rb +2 -10
  22. data/lib/canvas_sync/jobs/sync_rubric_associations_job.rb +2 -10
  23. data/lib/canvas_sync/jobs/sync_rubrics_job.rb +2 -10
  24. data/lib/canvas_sync/jobs/sync_scores_job.rb +2 -13
  25. data/lib/canvas_sync/jobs/sync_submissions_job.rb +9 -18
  26. data/lib/canvas_sync/jobs/term_batches_job.rb +4 -2
  27. data/lib/canvas_sync/version.rb +1 -1
  28. data/lib/canvas_sync.rb +31 -4
  29. data/spec/canvas_sync/canvas_sync_spec.rb +62 -22
  30. data/spec/canvas_sync/jobs/report_starter_spec.rb +102 -55
  31. data/spec/canvas_sync/jobs/report_sync_task_spec.rb +367 -0
  32. data/spec/canvas_sync/jobs/sync_provisioning_report_job_spec.rb +24 -35
  33. data/spec/canvas_sync/processors/assignment_groups_processor_spec.rb +3 -4
  34. data/spec/canvas_sync/processors/assignment_overrides_processor_spec.rb +7 -4
  35. data/spec/canvas_sync/processors/assignments_processor_spec.rb +3 -4
  36. data/spec/canvas_sync/processors/content_migrations_processor_spec.rb +3 -4
  37. data/spec/canvas_sync/processors/context_module_items_processor_spec.rb +4 -5
  38. data/spec/canvas_sync/processors/context_modules_processor_spec.rb +3 -4
  39. data/spec/canvas_sync/processors/course_completion_report_processor_spec.rb +7 -4
  40. data/spec/canvas_sync/processors/provisioning_report_processor_spec.rb +46 -24
  41. data/spec/canvas_sync/processors/rubric_assessments_spec.rb +3 -4
  42. data/spec/canvas_sync/processors/rubric_associations_spec.rb +3 -4
  43. data/spec/canvas_sync/processors/rubrics_processor_spec.rb +3 -4
  44. data/spec/canvas_sync/processors/submissions_processor_spec.rb +3 -4
  45. data/spec/factories/account_factory.rb +1 -1
  46. metadata +7 -33
  47. data/lib/canvas_sync/jobs/report_checker.rb +0 -108
  48. data/lib/canvas_sync/jobs/report_processor_job.rb +0 -35
  49. data/lib/canvas_sync/processors/assignment_groups_processor.rb +0 -19
  50. data/lib/canvas_sync/processors/assignment_overrides_processor.rb +0 -41
  51. data/lib/canvas_sync/processors/assignments_processor.rb +0 -19
  52. data/lib/canvas_sync/processors/content_migrations_processor.rb +0 -19
  53. data/lib/canvas_sync/processors/context_module_items_processor.rb +0 -19
  54. data/lib/canvas_sync/processors/context_modules_processor.rb +0 -19
  55. data/lib/canvas_sync/processors/course_completion_report_processor.rb +0 -20
  56. data/lib/canvas_sync/processors/provisioning_report_processor.rb +0 -149
  57. data/lib/canvas_sync/processors/rubric_assessments_processor.rb +0 -19
  58. data/lib/canvas_sync/processors/rubric_associations_processor.rb +0 -19
  59. data/lib/canvas_sync/processors/rubrics_processor.rb +0 -19
  60. data/lib/canvas_sync/processors/submissions_processor.rb +0 -19
  61. data/spec/canvas_sync/jobs/report_checker_spec.rb +0 -57
  62. data/spec/canvas_sync/jobs/report_processor_job_spec.rb +0 -25
  63. data/spec/canvas_sync/jobs/sync_assignment_groups_job_spec.rb +0 -18
  64. data/spec/canvas_sync/jobs/sync_assignments_job_spec.rb +0 -30
  65. data/spec/canvas_sync/jobs/sync_content_migrations_job_spec.rb +0 -30
  66. data/spec/canvas_sync/jobs/sync_context_module_items_job_spec.rb +0 -30
  67. data/spec/canvas_sync/jobs/sync_context_modules_job_spec.rb +0 -30
  68. data/spec/canvas_sync/jobs/sync_scores_job_spec.rb +0 -15
  69. data/spec/canvas_sync/jobs/sync_submissions_job_spec.rb +0 -23
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 4df6839f29a0cd2bf0dfe0f3203a117ccc1bc14c1422a2d2bbb8020fed7a29e5
4
- data.tar.gz: f07f9efcf576dce523e34dd4ee77819ed5ceb502265fb0f167659a34e53db8c2
3
+ metadata.gz: 50549b542e0425d35fc1effb9f505ce4a1d75e1517eb1e32b2a8bbffb00ee7de
4
+ data.tar.gz: 6442ed0b393e7e234984eb78dced6ef82aa388767ffba09f483de78b411e4ff5
5
5
  SHA512:
6
- metadata.gz: a633c8a2c949083c1b5d177235283dda2a959d6471519034265e585daf3b2305b4f7832cc8a8ee0b9b2839ba8625834fc37b7c4a34fb3d3be2c563d638b433ab
7
- data.tar.gz: 4f173ff8a8437776e2cb37ce321c8fc826ee01119a6e5ec6f05d54793d303d570d51ae63de2b2a03b0e531740597d5b752a10cef7bc4a325f736ced1595f6ec0
6
+ metadata.gz: 7c5c55806050f7fb686d7331d39b611def2783c0f9db57c6437cea99df30d69038671b3fc9b7b1bc9e4c4065e4a1ce8eb164db92cf4a40399c823e2d1e0f28a7
7
+ data.tar.gz: f743ab5316387d8a0519e29d8675e2246ace7cefdb8d9acff9a5d2ed42e3cc0fce55287a2c779105f7b51502331aac3cb75bb900a38096247626b57274c92c47
data/README.md CHANGED
@@ -125,9 +125,9 @@ These jobs can also be generated from template using `bin/rails generate canvas_
125
125
 
126
126
  This gem also helps with syncing and processing other reports if needed. In order to do so, you must:
127
127
 
128
- - Define a `Processor` class that implements a `process` method for handling the results of the report
129
- - Integrate your reports with the `ReportStarter`
130
- - Tell the gem what jobs to run
128
+ - Create a job class that extends `CanvasSync::Jobs::ReportSyncTask`
129
+ - Define the report name and implement a `process` method for handling the results
130
+ - Optionally override `report_parameters` for custom report parameters
131
131
 
132
132
  ### `updated_after`
133
133
  An `updated_after` param may be passed when triggering a provision or making a chain:
@@ -183,38 +183,73 @@ chain.insert({ job: SomeOtherJob }, after: 'CanvasSync::Jobs::SyncCoursesJob') #
183
183
  chain.get_sub_chain('CanvasSync::Jobs::SyncTermsJob')
184
184
  ```
185
185
 
186
- ### Processor
186
+ ### Custom Report Sync Jobs
187
187
 
188
- Your processor class must implement a `process` class method that receives a `report_file_path` and a hash of `options`. (See the `CanvasSync::Processors::ProvisioningReportProcessor` for an example.) The gem handles the work of enqueueing and downloading the report and then passes the file path to your class to process as needed. A simple example might be:
188
+ To create a custom report sync job, extend `CanvasSync::Jobs::ReportSyncTask` and define your report. The gem automatically handles:
189
+ - Starting the Canvas report
190
+ - Polling for completion
191
+ - Downloading the report file
192
+ - Error handling and retries
193
+ - Timeout management
194
+
195
+ Let's say we have a custom Canvas report called "my_really_cool_report_csv". Here's how to create a sync job:
189
196
 
190
197
  ```ruby
191
- class MyCoolProcessor
192
- def self.process(report_file_path, options)
193
- puts "I downloaded a report to #{report_file_path}! Isn't that neat!"
198
+ class MyReallyCoolReportJob < CanvasSync::Jobs::ReportSyncTask
199
+ # Define the Canvas report name
200
+ report_name "my_really_cool_report_csv"
201
+
202
+ # Optional: Override report parameters
203
+ def report_parameters
204
+ super.merge(
205
+ "course_ids" => [1,2,3],
206
+ "param2" => "value"
207
+ )
208
+ end
209
+
210
+ # Implement the process method to handle the downloaded report
211
+ def process(file)
212
+ # file is the path to the downloaded report
213
+ puts "I downloaded a report to #{file}! Isn't that neat!"
214
+ # Add your processing logic here, e.g.:
215
+ # do_bulk_import(file, MyModel)
194
216
  end
195
217
  end
196
218
  ```
197
219
 
198
- ### Report starter
220
+ #### Examples
199
221
 
200
- You must implement a job that will enqueue a report starter for your report. (TODO: would be nice to make some sort of builder for this, so you just define the report and its params and then the gem runs it in a pre-defined job.)
222
+ For simple CSV reports (single model):
223
+ ```ruby
224
+ class SyncRubricAssessmentsJob < CanvasSync::Jobs::ReportSyncTask
225
+ report_name "rubric_assessments_csv"
201
226
 
202
- Let's say we have a custom Canvas report called "my_really_cool_report_csv". First, we would need to create a job class that will enqueue a report starter.
227
+ def process(file)
228
+ do_bulk_import(file, RubricAssessment)
229
+ end
230
+ end
231
+ ```
203
232
 
233
+ For ZIP reports (multiple models like provisioning reports):
204
234
  ```ruby
205
- class MyReallyCoolReportJob < CanvasSync::Jobs::ReportStarter
206
- def perform(options)
207
- super(
208
- 'my_really_cool_report_csv', # Report name
209
- { "parameters[param1]" => true }, # Report parameters
210
- MyCoolProcessor.to_s, # Your processor class as a string
211
- options
212
- )
235
+ class SyncProvisioningReportJob < CanvasSync::Jobs::ReportSyncTask
236
+ report_name "provisioning_csv"
237
+
238
+ def report_parameters
239
+ ...
240
+ end
241
+
242
+ def process(file)
243
+ # Handle ZIP extraction and process each model
244
+ # See lib/canvas_sync/jobs/sync_provisioning_report_job.rb for full example
213
245
  end
214
246
  end
215
247
  ```
216
248
 
217
- You can also see examples in `lib/canvas_sync/jobs/sync_users_job.rb` and `lib/canvas_sync/jobs/sync_provisioning_report.rb`.
249
+ You can also see more examples in:
250
+ - `lib/canvas_sync/jobs/sync_provisioning_report_job.rb` (ZIP report with multiple models)
251
+ - `lib/canvas_sync/jobs/sync_rubric_assessments_job.rb` (Simple CSV report)
252
+ - `lib/canvas_sync/jobs/sync_course_progresses_job.rb` (Simple CSV report)
218
253
 
219
254
  ### Batching
220
255
 
@@ -322,6 +357,68 @@ end
322
357
 
323
358
  `before_jit_sync` is provided as well, but it's use case in niche. It can `throw :jit_found, record` to abort the rest of the JIT process and instead return a specific record. Again, should be quite niche.
324
359
 
360
+ ### UserViaPseudonym
361
+
362
+ CanvasSync provides the `CanvasSync::Concerns::UserViaPseudonym` concern for models that need to associate with Canvas users through their pseudonym (login) records. This is particularly useful so that records will always be linked to the correct user, regardless of whether users are merged in Canvas.
363
+
364
+ #### Basic Usage
365
+
366
+ Include the concern in your model and use the `belongs_to_user` class method:
367
+
368
+ ```ruby
369
+ class MyModel < ApplicationRecord
370
+ include CanvasSync::Concerns::UserViaPseudonym
371
+
372
+ # Associate with user using Canvas ID
373
+ belongs_to_user :teacher
374
+
375
+ # Or associate using SIS ID
376
+ belongs_to_user :student, using_sis_id: true
377
+ end
378
+ ```
379
+
380
+ This translates largely into:
381
+ ```ruby
382
+ belongs_to :teacher_pseudonym, class_name: "Pseudonym"
383
+ has_one :teacher, through: :pseudonym, class_name: "User"
384
+
385
+ def user=(u)
386
+ self.pseudonym = u.pseudonyms.active.last || u.pseudonyms.last
387
+ end
388
+ ```
389
+
390
+ #### Cached User IDs (Performance Optimization)
391
+
392
+ For better performance, you can add a cached user ID column to your table. This eliminates the need for joins when you just need the user ID:
393
+
394
+ ```ruby
395
+ # In your migration:
396
+ add_column :my_models, :cached_teacher_user_id, :bigint
397
+ add_index :my_models, :cached_teacher_user_id
398
+
399
+ # In your model:
400
+ class MyModel < ApplicationRecord
401
+ include CanvasSync::Concerns::UserViaPseudonym
402
+
403
+ belongs_to_user :teacher, cache_column: :cached_teacher_user_id
404
+ end
405
+ ```
406
+
407
+ ##### Refreshing Cached User IDs
408
+
409
+ When using cached columns, you can refresh them in bulk:
410
+
411
+ ```ruby
412
+ # Refresh all records
413
+ MyModel.update_cached_user_ids!
414
+
415
+ # Or refresh specific records
416
+ MyModel.where(some_condition: true).update_cached_user_ids!
417
+ ```
418
+
419
+ CanvasSync will also do this automatically after syncing.
420
+
421
+
325
422
  ### Job Batching
326
423
  CanvasSync adds a `CanvasSync::JobBatches` module. It adds Sidekiq/sidekiq-batch like support for Job Batches.
327
424
  It integrates automatically with both Sidekiq and ActiveJob. The API is highly similar to the Sidekiq-batch implementation,
@@ -111,6 +111,7 @@ module CanvasSync::Api
111
111
  end
112
112
 
113
113
  def switch_tenant(&block)
114
+ # TODO Move detection of this param into the PandaPal Apartment Elevator
114
115
  if defined?(PandaPal) && (org = params[:organization] || params[:org]).present?
115
116
  org = PandaPal::Organization.find(org)
116
117
  if org.respond_to?(:switch_tenant)
@@ -2,7 +2,7 @@ module CanvasSync
2
2
  class Config
3
3
  include ActiveSupport::Configurable
4
4
 
5
- config_accessor(:classes_to_only_log_errors_on) { ["CanvasSync::Jobs::ReportChecker"] }
5
+ config_accessor(:classes_to_only_log_errors_on) { ["CanvasSync::Jobs::ReportChecker", "CanvasSync::Jobs::ReportSyncTask::CheckerJob"] }
6
6
  config_accessor(:redis_key_prefix) { "cs" }
7
7
  end
8
8
  end
@@ -1,3 +1,5 @@
1
+ require "activerecord-import"
2
+
1
3
  module CanvasSync
2
4
  module Importers
3
5
  class BulkImporter
@@ -45,7 +45,7 @@ module CanvasSync
45
45
  b.on(:success, "#{self.class.to_s}.batch_completed", sync_batch_id: sync_batch.id)
46
46
  b.context = globals
47
47
  b.jobs do
48
- JobBatches::SerialBatchJob.perform_now(chain_definition)
48
+ JobBatches::SerialBatchJob.perform_now(chain_definition, description: "Top Serial Batch")
49
49
  end
50
50
  sync_batch.update(batch_bid: b.bid)
51
51
  end
@@ -0,0 +1,30 @@
1
+ module CanvasSync
2
+ module Jobs::BetaCleanup
3
+ # This job creates "temporary" tables that holds the records that were updated since a given date
4
+ # and deletes those records from the main tables.
5
+ # The tables are actual tables and not the temporary tables in the DB sense
6
+ # because they need to be reused in other contexts.
7
+ # They are deleted in a different job at a later point
8
+ class CreateTempTablesJob < CanvasSync::Job
9
+ def perform(options = {})
10
+ canvas_tables = options[:models]
11
+
12
+ return if canvas_tables.empty?
13
+
14
+ canvas_tables.each do |table_name|
15
+ model = table_name.singularize.camelize.constantize
16
+
17
+ updated_after = options[:updated_after] || 2.weeks.ago
18
+ ActiveRecord::Base.connection.drop_table("beta_#{table_name}", if_exists: true)
19
+ ActiveRecord::Base.connection.exec_query(<<~SQL
20
+ CREATE TABLE beta_#{table_name} AS
21
+ SELECT * FROM #{model.quoted_table_name} WHERE updated_at >= '#{updated_after}';
22
+ SQL
23
+ )
24
+ model.where("updated_at >= '#{updated_after}'").delete_all
25
+ end
26
+ end
27
+ end
28
+ end
29
+ end
30
+
@@ -0,0 +1,125 @@
1
+ module CanvasSync
2
+ module Jobs::BetaCleanup
3
+ class DeleteRelatedRecordsJob < CanvasSync::Job
4
+ # This method assumes that the main tables have been dumped into temporary tables (until provided date range, e.g 2 weeks)
5
+ # and that the main tables have been re-synced
6
+ # Considering these two criteria, assuming we have the main table now with
7
+ # records [1, 2, 3] and the temp table with records [1, 2, 3, 4], this means that
8
+ # record '4' needs to be taken out of related tables.
9
+ # This could for example be a record with a 'canvas_user_id = 4' in a table that is not part of CanvasSync models
10
+ # When this is implemented in CanvasSync, we could store the cleanup id and prefix the table name with it
11
+ # e.g: beta_cleanup_1_users
12
+ def delete_matching_records_between_main_and_temp
13
+ @canvas_tables.each do |table_name|
14
+ model = table_name.singularize.camelize.constantize
15
+
16
+ # here we can safely use the id to find the records we want - the primary keys are preserved in the temporary table
17
+ ActiveRecord::Base.connection.exec_query(<<~SQL
18
+ DELETE FROM beta_#{table_name} WHERE canvas_id IN (SELECT canvas_id FROM #{model.quoted_table_name});
19
+ SQL
20
+ )
21
+ end
22
+ end
23
+
24
+ # possible foreign keys per table. Some LTI tables may or may not always use the Canvas ID
25
+ #
26
+ # Some tables like user_observers have multiple user_id in them (e.g 'observed_user_id' and 'observing_user_id')
27
+ # Not sure if this matters here, to investigate
28
+ def foreign_keys
29
+ @foreign_keys ||= @canvas_tables.map do |table_name|
30
+ model_name = table_name.singularize
31
+ fks = ["#{model_name}_id", "canvas_#{model_name}_id"]
32
+ fks.each { |fk| foreign_key_to_table[fk] = table_name }
33
+ fks
34
+ end.flatten
35
+ end
36
+
37
+ # Find better names for these next two methods
38
+ # fk => table_name (1:1)
39
+ def foreign_key_to_table
40
+ @foreign_key_to_table ||= {}.with_indifferent_access
41
+ end
42
+
43
+ # fk => table_name[] (1:many)
44
+ def foreign_key_to_tables
45
+ h = {}.with_indifferent_access
46
+ ActiveRecord::Base.connection.tables.map do |lti_table|
47
+ # ignore the models we just synced or the beta tables
48
+ # This may not be super reliable in case someone calls a table 'beta_xyz'
49
+ next if @canvas_tables.include?(lti_table.gsub(/^beta_/, ''))
50
+
51
+ columns = ActiveRecord::Base.connection.columns(lti_table).map(&:name)
52
+ cols_in_table = foreign_keys & columns
53
+
54
+ next if cols_in_table.empty?
55
+
56
+ cols_in_table.map do |col|
57
+ h[col] ||= []
58
+ h[col] << lti_table
59
+ end
60
+ end
61
+ @foreign_key_to_tables ||= h
62
+ end
63
+
64
+ def active_tables
65
+ # check the count of each "beta_" table
66
+ active_tables_sql = @canvas_tables.map do |table|
67
+ "SELECT 'beta_#{table}' AS table_name FROM beta_#{table} GROUP BY table_name HAVING COUNT(*) > 0"
68
+ end.join(' UNION ALL ')
69
+
70
+ # active tables are tables with at least one record
71
+ # If a beta table has no rows, we can ignore it in the next step
72
+ active_tables = ActiveRecord::Base.connection.exec_query(active_tables_sql).rows.flatten
73
+ end
74
+
75
+ def perform(options = {})
76
+ canvas_tables = options[:models] || []
77
+ @canvas_tables = canvas_tables
78
+
79
+ return [] if canvas_tables.empty?
80
+
81
+ delete_matching_records_between_main_and_temp
82
+
83
+ # every table is cleaned by foreign key to be idempotent in case of job failures (instead of cleaned by table)
84
+ # we can not clean by table because multiple tables may be using the same 'user_id' foreign key for example
85
+ foreign_key_to_tables.each do |fk, table_names|
86
+ temp_related_table = "beta_#{foreign_key_to_table[fk]}"
87
+ next unless active_tables.include?(temp_related_table)
88
+
89
+ pk = fk.include?('canvas') ? 'canvas_id' : 'id'
90
+
91
+ # Ideally this commented CTE is used so this process is idempotent but there is an issue when there are two tables with, for example user_id & canvas_user_id respectively
92
+ # The user_id foreign key will be processed and will delete the temporary records, which means the second foreign key, canvas_user_id, will not be able to retrive any records from it
93
+ # I don't think idempotency is such a big deal here because:
94
+ # 1) this runs only in beta instances
95
+ # 2) it runs for data within (most likely) the last 2 weeks, which means not that many records
96
+ # In the meantime, we can use the CTE that does not delete records
97
+ =begin
98
+ sql = <<~SQL
99
+ WITH stale_records AS (
100
+ DELETE FROM #{temp_related_table}
101
+ WHERE #{pk} IN (
102
+ SELECT #{pk} FROM #{temp_related_table} ORDER BY #{pk} LIMIT 1000
103
+ )
104
+ RETURNING #{pk}
105
+ ),
106
+ SQL
107
+ =end
108
+ sql = <<~SQL
109
+ WITH stale_records AS (
110
+ SELECT #{pk} FROM #{temp_related_table} ORDER BY #{pk} LIMIT 1000
111
+ ),
112
+ SQL
113
+
114
+ # using CTEs for multiple deletion statements in one query
115
+ # This allows using the same 'stale_records' CTE for all of the delete statements
116
+ sql << table_names.map { |table_name| "#{table_name}_del AS ( DELETE FROM #{table_name} WHERE #{fk} IN (SELECT #{pk} FROM stale_records) )" }.join(",\n")
117
+ sql << 'SELECT count(*) AS deleted_count FROM stale_records'
118
+ ActiveRecord::Base.transaction do # transaction to use the CTE multiple times
119
+ ActiveRecord::Base.connection.exec_query(sql)
120
+ end
121
+ end
122
+ end
123
+ end
124
+ end
125
+ end
@@ -0,0 +1,16 @@
1
+ module CanvasSync
2
+ module Jobs::BetaCleanup
3
+ class DeleteTempTablesJob < CanvasSync::Job
4
+
5
+ def perform(options = {})
6
+ tables = options[:models] || []
7
+ return if tables.empty?
8
+
9
+ tables.each do |table_name|
10
+ ActiveRecord::Base.connection.drop_table(table_name, if_exists: true)
11
+ end
12
+ end
13
+ end
14
+ end
15
+ end
16
+
@@ -1,8 +1,12 @@
1
+ require_relative "./report_sync_task"
2
+
1
3
  module CanvasSync
2
4
  module Jobs
3
- # Starts a Canvas report and enqueues a ReportChecker
5
+ # @deprecated Use ReportSyncTask instead
6
+ # This class is now a shim that delegates to LegacyReportShimTask for backwards compatibility.
7
+ # ReportChecker and ReportProcessorJob are no longer used when going through this shim.
4
8
  class ReportStarter < CanvasSync::Job
5
- # @param report_name [Hash] e.g., 'provisioning_csv'
9
+ # @param report_name [String] e.g., 'provisioning_csv'
6
10
  # @param report_params [Hash] The Canvas report parameters
7
11
  # @param processor [String] a stringified report processor class name
8
12
  # @param options [Hash] hash of options that will be passed to the job processor
@@ -10,27 +14,17 @@ module CanvasSync
10
14
  # so that any later jobs in the chain will use the same generated report
11
15
  # @return [nil]
12
16
  def perform(report_name, report_params, processor, options, allow_redownloads: false)
13
- account_id = options[:account_id] || batch_context[:account_id] || "self"
14
- options[:sync_start_time] = DateTime.now.utc.iso8601
15
- options[:report_params] = report_params
16
- report_id = start_report(account_id, report_name, report_params)
17
- # TODO: Restore report caching support (does nayone actually use it?)
18
- # report_id = if allow_redownloads
19
- # get_cached_report(account_id, report_name, report_params)
20
- # else
21
- # start_report(account_id, report_name, report_params)
22
- # end
17
+ # Merge the legacy configuration into options that will be passed through batch_context
18
+ merged_options = options.merge({
19
+ legacy_report_starter: {
20
+ report: report_name,
21
+ params: report_params,
22
+ processor: processor,
23
+ },
24
+ })
23
25
 
24
- batch = JobBatches::Batch.new
25
- batch.description = "CanvasSync #{report_name} Fiber"
26
- batch.jobs do
27
- CanvasSync::Jobs::ReportChecker.set(wait: report_checker_wait_time).perform_later(
28
- report_name,
29
- report_id,
30
- processor.to_s,
31
- options
32
- )
33
- end
26
+ # Call perform_later on the shim task class
27
+ LegacyReportShimTask.perform_later(merged_options)
34
28
  end
35
29
 
36
30
  protected
@@ -40,35 +34,28 @@ module CanvasSync
40
34
  # merge_report_params(options, params, {}) is used. That doesn't work in Ruby 3.
41
35
  # In order to maintain compatibility with 2 and with any apps, this oddness is needed
42
36
  def merge_report_params(options, params={}, _kw_placeholder=nil, term_scope: true)
43
- term_scope = options[:canvas_term_id] || batch_context[:canvas_term_id] if term_scope == true
44
- if term_scope.present?
45
- params[:enrollment_term_id] = term_scope
46
- end
47
- if (updated_after = batch_context[:updated_after]).present?
48
- params[:updated_after] = updated_after
49
- end
50
- params.merge!(options[:report_params]) if options[:report_params].present?
51
- params.merge!(options[:report_parameters]) if options[:report_parameters].present?
52
- { parameters: params }
37
+ LegacyReportShimTask.merge_report_params(options, params, term_scope: term_scope)
53
38
  end
39
+ end
54
40
 
55
- private
41
+ # Internal shim task that reads configuration from batch_context to support the old ReportStarter API
42
+ class LegacyReportShimTask < CanvasSync::Jobs::ReportSyncTask
43
+ def report_name
44
+ options[:legacy_report_starter][:report]
45
+ end
56
46
 
57
- def get_cached_report(job_chain, account_id, report_name, report_params)
58
- # TODO: job_chain[:global_options] is no longer available and batch_context won't work for this
59
- if job_chain[:global_options][report_name].present?
60
- job_chain[:global_options][report_name]
61
- else
62
- report_id = start_report(job_chain, account_id, report_name, report_params)
63
- job_chain[:global_options][report_name] = report_id
64
- report_id
65
- end
47
+ def report_parameters
48
+ options[:legacy_report_starter][:params]
66
49
  end
67
50
 
68
- def start_report(account_id, report_name, report_params)
69
- report = CanvasSync.get_canvas_sync_client(batch_context)
70
- .start_report(account_id, report_name, report_params)
71
- report["id"]
51
+ def process(file)
52
+ processor_class_name = options[:legacy_report_starter][:processor]
53
+ processor_class = processor_class_name.constantize
54
+ account_id = options[:account_id] || batch_context[:account_id] || "self"
55
+
56
+ # The old processor signature: process(file_path, options, report_id)
57
+ # Note: The third param was report_id in ReportProcessorJob but was account_id in practice
58
+ processor_class.process(file, options, account_id)
72
59
  end
73
60
  end
74
61
  end