canvas_sync 0.3.0 → 0.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 8a91540c9a7c6e087d7b9c3f21340e689c0b77da
4
- data.tar.gz: 9aad571654e1fe0b167f2af732bb1f49f4204a1f
3
+ metadata.gz: 6140a1f5a19c611e6ef92944993bcb0fb676e333
4
+ data.tar.gz: b26ca4c005bcdaa8fdfd4d6dc4f99342f391601a
5
5
  SHA512:
6
- metadata.gz: 0654cbfc7984fca42f257a9ba0644fb21edf01b3d12d65ab53af9ca5698760c416168208162ab63b8fd98ce992c220ab26303218ed189f31b21d94539c5c0414
7
- data.tar.gz: 9a256cb21575fb57108c39570a5259a967dfc4aa509e2bd48790e8f27aa35b72cc94f1cff7b15db877a72941e8ff7a1783b9635e1e7ecfb35f652e88f6ff6aca
6
+ metadata.gz: 2c27000087b00e839a5eb39fec4c03d08c193427ac19cfb8e9757f3103f5167a7f48fdd2f34af75f6257a6a8555d109f92494df2bf89b20e6c75ed6895b79b7b
7
+ data.tar.gz: 17987763dd6c6e7b47fe1815fa835cd57b4b1f01997879fd8f8279607b3f64b75b5c22eadeaaeeb8e3d0192235edcac69d7972a52e20aebab1a51a6842995700
@@ -4,5 +4,8 @@ module CanvasSync
4
4
  # Use this model to track failures, job run times, and metadata about a job.
5
5
  class JobLog < ApplicationRecord
6
6
  serialize :job_arguments, Array
7
+
8
+ ERROR_STATUS = 'error'
9
+ SUCCESS_STATUS = 'success'
7
10
  end
8
11
  end
@@ -13,13 +13,13 @@ module CanvasSync
13
13
  # @param klass [Object] e.g., User
14
14
  # @param conflict_target [Symbol] represents the database column that will determine if we need to update
15
15
  # or insert a given row. e.g.,: canvas_user_id
16
- # @param exclude_duplicates [Boolean] importing will break if the file has any duplicate rows. Set this
17
- # to true in order to have the bulk importer filter those out.
16
+ # @param import_args [Hash] Any arguments passed here will be passed through to ActiveRecord::BulkImport.
17
+ # Note: passing the key [:on_duplicate_key_ignore] will override the default behavior of [:on_duplicate_key_update]
18
18
  # @yieldparam [Array] row if a block is passed in it will yield the current row from the CSV.
19
19
  # This can be used if you need to filter or massage the data in any way.
20
- def self.import(report_file_path, mapping, klass, conflict_target, exclude_duplicates=false)
20
+ def self.import(report_file_path, mapping, klass, conflict_target, import_args: {})
21
21
  csv_column_names = mapping.keys
22
- database_column_names = mapping.values.map{ |value| value[:database_column_name] }
22
+ database_column_names = mapping.values.map {|value| value[:database_column_name]}
23
23
  rows = []
24
24
  row_ids = {}
25
25
 
@@ -27,38 +27,41 @@ module CanvasSync
27
27
  row = yield(row) if block_given?
28
28
  next if row.nil?
29
29
 
30
- if exclude_duplicates
31
- next if row_ids[row[conflict_target]]
32
- row_ids[row[conflict_target]] = true
33
- end
30
+ next if row_ids[row[conflict_target]]
31
+ row_ids[row[conflict_target]] = true
34
32
 
35
33
  rows << csv_column_names.map do |column|
36
34
  if mapping[column][:type] == :datetime
37
- DateTime.parse(row[column]).utc if row[column].present?
35
+ # todo - add some timezone config to the mapping.
36
+ # In cases where the timestamp or date doesn't include a timezone, you should be able to specify one
37
+ DateTime.parse(row[column]).utc rescue ''
38
38
  else
39
39
  row[column]
40
40
  end
41
41
  end
42
42
 
43
43
  if rows.length >= batch_size
44
- perform_import(klass, database_column_names, rows, conflict_target)
44
+ perform_import(klass, database_column_names, rows, conflict_target, import_args)
45
45
  rows = []
46
+ row_ids = {}
46
47
  end
47
48
  end
48
49
 
49
- perform_import(klass, database_column_names, rows, conflict_target)
50
+ perform_import(klass, database_column_names, rows, conflict_target, import_args)
50
51
  end
51
52
 
52
53
  private
53
54
 
54
- def self.perform_import(klass, columns, rows, conflict_target)
55
+ def self.perform_import(klass, columns, rows, conflict_target, import_args={})
55
56
  return if rows.length == 0
56
57
  columns = columns.dup
57
- klass.import(columns, rows, validate: false, on_duplicate_key_update: {
58
- conflict_target: conflict_target,
59
- condition: condition_sql(klass, columns),
60
- columns: columns
61
- })
58
+ options = {validate: false, on_duplicate_key_update: {
59
+ conflict_target: conflict_target,
60
+ condition: condition_sql(klass, columns),
61
+ columns: columns
62
+ }}.merge(import_args)
63
+ options.delete(:on_duplicate_key_update) if options.has_key?(:on_duplicate_key_ignore)
64
+ klass.import(columns, rows, options)
62
65
  end
63
66
 
64
67
  # This method generates SQL that looks like:
@@ -72,8 +75,8 @@ module CanvasSync
72
75
  # run_the_users_sync!
73
76
  # changed = User.where("updated_at >= ?", started_at)
74
77
  def self.condition_sql(klass, columns)
75
- columns_str = columns.map { |c| "#{klass.quoted_table_name}.#{c}" }.join(", ")
76
- excluded_str = columns.map { |c| "EXCLUDED.#{c}" }.join(", ")
78
+ columns_str = columns.map {|c| "#{klass.quoted_table_name}.#{c}"}.join(", ")
79
+ excluded_str = columns.map {|c| "EXCLUDED.#{c}"}.join(", ")
77
80
  "(#{columns_str}) IS DISTINCT FROM (#{excluded_str})"
78
81
  end
79
82
 
@@ -5,9 +5,9 @@ module CanvasSync
5
5
  class Job < ActiveJob::Base
6
6
  around_perform do |job, block|
7
7
  @job_log = CanvasSync::JobLog.create(
8
- started_at: Time.now,
9
- job_class: self.class.name,
10
- job_arguments: job.arguments
8
+ started_at: Time.now,
9
+ job_class: self.class.name,
10
+ job_arguments: job.arguments
11
11
  )
12
12
 
13
13
  begin
@@ -15,10 +15,16 @@ module CanvasSync
15
15
  rescue => e
16
16
  @job_log.exception = "#{e.class}: #{e.message}"
17
17
  @job_log.backtrace = e.backtrace
18
+ @job_log.status = JobLog::ERROR_STATUS
18
19
  raise e
19
20
  ensure
20
- @job_log.completed_at = Time.now
21
- @job_log.save!
21
+ if @job_log.job_class == 'CanvasSync::Jobs::ReportChecker' && @job_log.status != JobLog::ERROR_STATUS
22
+ @job_log.destroy
23
+ else
24
+ @job_log.completed_at = Time.now
25
+ @job_log.status ||= JobLog::SUCCESS_STATUS
26
+ @job_log.save!
27
+ end
22
28
  end
23
29
  end
24
30
 
@@ -13,6 +13,7 @@ module CanvasSync
13
13
  # @param options [Hash] hash of options that will be passed to the job processor
14
14
  # @return [nil]
15
15
  def perform(job_chain, report_name, report_url, processor, options)
16
+ @job_log.update_attributes(job_class: processor)
16
17
  download(report_name, report_url) do |file_path|
17
18
  options = options.merge({
18
19
  legacy_support: job_chain[:global_options][:legacy_support],
@@ -62,8 +62,7 @@ module CanvasSync
62
62
  report_file_path,
63
63
  mapping[:users][:report_columns],
64
64
  User,
65
- mapping[:users][:conflict_target].to_sym,
66
- true
65
+ mapping[:users][:conflict_target].to_sym
67
66
  )
68
67
  end
69
68
 
@@ -81,8 +80,7 @@ module CanvasSync
81
80
  report_file_path,
82
81
  mapping[:enrollments][:report_columns],
83
82
  Enrollment,
84
- mapping[:enrollments][:conflict_target].to_sym,
85
- true
83
+ mapping[:enrollments][:conflict_target].to_sym
86
84
  )
87
85
  end
88
86
 
@@ -1,3 +1,3 @@
1
1
  module CanvasSync
2
- VERSION = "0.3.0"
2
+ VERSION = '0.3.1'
3
3
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: canvas_sync
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.0
4
+ version: 0.3.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Nate Collings
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2017-11-09 00:00:00.000000000 Z
11
+ date: 2017-11-17 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler