bridge_cache 0.3.14 → 0.3.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. checksums.yaml +4 -4
  2. data/Rakefile +1 -1
  3. data/app/lib/bridge_cache/data/bridge_model.rb +76 -68
  4. data/app/lib/bridge_cache/jobs/cleanup_job.rb +6 -4
  5. data/app/lib/bridge_cache/jobs/import_check.rb +32 -16
  6. data/app/lib/bridge_cache/jobs/import_data.rb +24 -17
  7. data/app/lib/bridge_cache/jobs/import_row.rb +7 -5
  8. data/app/lib/bridge_cache/jobs/import_start.rb +13 -13
  9. data/app/lib/bridge_cache/plugins/csv_dump.rb +89 -79
  10. data/app/lib/bridge_cache/plugins/data_transform.rb +18 -13
  11. data/app/lib/bridge_cache/utils/worker_helper.rb +9 -6
  12. data/app/lib/bridge_cache/webhooks/message.rb +13 -11
  13. data/app/lib/bridge_cache/webhooks/request.rb +10 -8
  14. data/app/models/bridge_cache/affiliated_sub_account.rb +12 -7
  15. data/app/models/bridge_cache/course_template.rb +21 -9
  16. data/app/models/bridge_cache/custom_field.rb +4 -2
  17. data/app/models/bridge_cache/domain.rb +14 -7
  18. data/app/models/bridge_cache/enrollment.rb +4 -2
  19. data/app/models/bridge_cache/enrollment_profile.rb +4 -4
  20. data/app/models/bridge_cache/grant.rb +4 -2
  21. data/app/models/bridge_cache/group.rb +10 -5
  22. data/app/models/bridge_cache/group_enrollment.rb +5 -3
  23. data/app/models/bridge_cache/host.rb +3 -2
  24. data/app/models/bridge_cache/learner_custom_field_value.rb +4 -2
  25. data/app/models/bridge_cache/live_course.rb +15 -6
  26. data/app/models/bridge_cache/live_course_enrollment.rb +8 -4
  27. data/app/models/bridge_cache/live_course_session.rb +9 -4
  28. data/app/models/bridge_cache/live_course_session_registration.rb +9 -5
  29. data/app/models/bridge_cache/membership.rb +5 -3
  30. data/app/models/bridge_cache/program.rb +17 -7
  31. data/app/models/bridge_cache/program_enrollment.rb +4 -2
  32. data/app/models/bridge_cache/program_group_enrollment.rb +4 -2
  33. data/app/models/bridge_cache/program_item.rb +2 -1
  34. data/app/models/bridge_cache/tag.rb +3 -2
  35. data/app/models/bridge_cache/tagging.rb +5 -3
  36. data/app/models/bridge_cache/user.rb +20 -10
  37. data/app/models/bridge_cache/user_course_visit.rb +6 -3
  38. data/db/migrate/20170517203244_create_bridge_cache_domains.rb +1 -0
  39. data/db/migrate/20170517203328_create_bridge_cache_users.rb +1 -0
  40. data/db/migrate/20170517203421_create_bridge_cache_programs.rb +1 -0
  41. data/db/migrate/20170517204055_create_bridge_cache_custom_fields.rb +1 -0
  42. data/db/migrate/20170517204248_create_bridge_cache_grants.rb +1 -0
  43. data/db/migrate/20170518211404_create_bridge_cache_enrollments.rb +1 -0
  44. data/db/migrate/20170518211627_create_bridge_cache_groups.rb +1 -0
  45. data/db/migrate/20170518211703_create_bridge_cache_program_items.rb +1 -0
  46. data/db/migrate/20170518211714_create_bridge_cache_program_enrollments.rb +1 -0
  47. data/db/migrate/20170518211736_create_bridge_cache_program_group_enrollments.rb +1 -0
  48. data/db/migrate/20170518211748_create_bridge_cache_tags.rb +1 -0
  49. data/db/migrate/20170518211754_create_bridge_cache_taggings.rb +1 -0
  50. data/db/migrate/20170518211817_create_bridge_cache_course_templates.rb +1 -0
  51. data/db/migrate/20170523164248_create_bridge_cache_group_enrollments.rb +1 -0
  52. data/db/migrate/20170523165014_create_bridge_cache_memberships.rb +1 -0
  53. data/db/migrate/20170615145554_create_bridge_cache_learner_custom_field_values.rb +1 -0
  54. data/db/migrate/20171017211850_create_affiliated_sub_accounts.rb +1 -0
  55. data/db/migrate/20171019195443_create_live_courses.rb +1 -0
  56. data/db/migrate/20171019195503_create_live_course_enrollments.rb +1 -0
  57. data/db/migrate/20171019195512_create_live_course_session_registrations.rb +1 -0
  58. data/db/migrate/20171019195516_create_live_course_sessions.rb +1 -0
  59. data/db/migrate/20171031165231_add_indexes.rb +5 -2
  60. data/db/migrate/20180316205458_create_bridge_cache_enrollment_profiles.rb +1 -0
  61. data/db/migrate/20180320190443_add_index_to_enrollment_profile.rb +1 -0
  62. data/db/migrate/20180406204906_add_uniq_constraints.rb +43 -21
  63. data/db/migrate/20180801162929_create_bridge_cache_hosts.rb +1 -0
  64. data/db/migrate/20180925210056_add_bridge_timestamps_to_bridge_models.rb +2 -0
  65. data/db/migrate/20190801163930_create_user_course_visits.rb +3 -1
  66. data/db/migrate/20200717142423_add_hris_id_to_bridge_cache_users.rb +7 -0
  67. data/db/migrate/20201102153622_change_external_course_id_to_string.rb +5 -0
  68. data/lib/bridge_cache.rb +1 -0
  69. data/lib/bridge_cache/version.rb +1 -1
  70. metadata +23 -13
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: eea899b543665aad075f716c90a25364439044cbd691ab481b084729bf915a56
4
- data.tar.gz: 1dac6755245cb45fa1c0f8923e56ff6fd01eb6dc5328abed7c28e3dffa476f67
3
+ metadata.gz: c515d7e48aed8e9e9d05b6cec9d956074704206846744fca065335b23a389609
4
+ data.tar.gz: 30c0dee61a1f5dd8120a2c495ad8b625fe7e1da3ba270bba101b64a6be3987ec
5
5
  SHA512:
6
- metadata.gz: 8211bf6a5a48b13b50822a451c150167a718d96f994550ff69c9fca71f6d3f493cffab97d7db53109d89f4ff90f0646e94b15c0ef510567b8f4e71a665cf48e9
7
- data.tar.gz: c3779fc81eb4a86ddde1a8ae13e7c9ba3162c7e773df8c53a1802170100725a5bfaf3a2c99d92b56e4a38c6e4e89d93a7c56b5ad9795b50f64c27a2e7ad35edf
6
+ metadata.gz: 908acbd97a28b594ae5295900064a94cf8a905c7ef19ef066b7e92e11c39baa86297cee0704903dc70c5bdee19b68195c182322a85e690b34ba9b0d1c640ea62
7
+ data.tar.gz: b6ad05dc8cf847bfbbea1d262abea61131368b865827e316055a263bf3e23114875f6c78bbe0c6976c241d6833b2a586dc61b6624344b9adf77cf877f02ebddb
data/Rakefile CHANGED
@@ -14,7 +14,7 @@ RDoc::Task.new(:rdoc) do |rdoc|
14
14
  rdoc.rdoc_files.include('lib/**/*.rb')
15
15
  end
16
16
 
17
- APP_RAKEFILE = File.expand_path('../test/dummy/Rakefile', __FILE__)
17
+ APP_RAKEFILE = File.expand_path('test/dummy/Rakefile', __dir__)
18
18
  load 'rails/tasks/engine.rake'
19
19
 
20
20
  load 'rails/tasks/statistics.rake'
@@ -1,92 +1,100 @@
1
- module BridgeCache::Data
2
- module BridgeModel
3
- EXCLUDED_COLUMNS = Set['id', 'created_at', 'updated_at']
4
- OVERRIDDEN_COLUMNS = { 'bridge_created_at' => 'created_at'.freeze, 'bridge_updated_at' => 'updated_at'.freeze }
1
+ # rubocop:disable Naming/PredicateName,Metrics/MethodLength
2
+ module BridgeCache
3
+ module Data
4
+ module BridgeModel
5
+ EXCLUDED_COLUMNS = Set['id', 'created_at', 'updated_at']
6
+ OVERRIDDEN_COLUMNS = { 'bridge_created_at' => 'created_at'.freeze,
7
+ 'bridge_updated_at' => 'updated_at'.freeze }.freeze
8
+
9
+ def adjusted_table_name
10
+ if BridgeCache.use_internal_database
11
+ table_name
12
+ else
13
+ to_s.demodulize.underscore.pluralize
14
+ end
15
+ end
5
16
 
6
- def adjusted_table_name
7
- if BridgeCache.use_internal_database
8
- table_name
9
- else
10
- to_s.demodulize.underscore.pluralize
17
+ def self.extended(base)
18
+ base.class_eval do
19
+ scope :in_domain, ->(domain_id) { for_domain(domain_id) }
20
+ scope :active, -> { is_active? }
21
+ self.table_name = adjusted_table_name
22
+ end
11
23
  end
12
- end
13
24
 
14
- def self.extended(base)
15
- base.class_eval do
16
- scope :in_domain, ->(domain_id) { for_domain(domain_id) }
17
- scope :active, -> { is_active? }
18
- self.table_name = adjusted_table_name
25
+ def format_import_row(row)
26
+ row
19
27
  end
20
- end
21
28
 
22
- def format_import_row(row)
23
- row
24
- end
29
+ def import_from_csv(file_path)
30
+ BridgeCache::Plugins::CSVDump.dump_to_table(self, file_path)
31
+ end
25
32
 
26
- def import_from_csv(file_path)
27
- BridgeCache::Plugins::CSVDump.dump_to_table(self, file_path)
28
- end
33
+ def create_from_csv_row(row)
34
+ BridgeCache::Plugins::CSVDump.dump_row(self, row)
35
+ end
29
36
 
30
- def create_from_csv_row(row)
31
- BridgeCache::Plugins::CSVDump.dump_row(self, row)
32
- end
37
+ def cleanup(row_ids)
38
+ # Implement this method in your model if you want to do any sort of post creation cleanup.
39
+ # See tagging.rb for an example.
40
+ end
33
41
 
34
- def cleanup(row_ids)
35
- # Implement this method in your model if you want to do any sort of post creation cleanup.
36
- # See tagging.rb for an example.
37
- end
42
+ def webhook_updated(message)
43
+ create_or_update_from_webhook(message)
44
+ end
38
45
 
39
- def webhook_updated(message)
40
- create_or_update_from_webhook(message)
41
- end
46
+ def webhook_created(message)
47
+ create_or_update_from_webhook(message)
48
+ end
42
49
 
43
- def webhook_created(message)
44
- create_or_update_from_webhook(message)
45
- end
50
+ def webhook_deleted(message)
51
+ message.payload_class.where(bridge_id: message.resource_object['id']).destroy_all
52
+ end
46
53
 
47
- def webhook_deleted(message)
48
- message.payload_class.where(bridge_id: message.resource_object['id']).destroy_all
49
- end
54
+ def webhook_completed(_message)
55
+ raise 'Method not implemented'
56
+ end
50
57
 
51
- def webhook_completed(_message)
52
- raise 'Method not implemented'
53
- end
58
+ def for_domain(domain_id)
59
+ where(domain_id: domain_id)
60
+ end
54
61
 
55
- def for_domain(domain_id)
56
- where(domain_id: domain_id)
57
- end
62
+ def uniq_constraint_name
63
+ "#{table_name.downcase}_bridge_uniq"
64
+ end
58
65
 
59
- def uniq_constraint_name
60
- "#{table_name.downcase}_bridge_uniq"
61
- end
66
+ def unique_column_names
67
+ %w[bridge_id]
68
+ end
62
69
 
63
- def unique_column_names
64
- %w[bridge_id]
65
- end
70
+ def is_active?
71
+ where(deleted_at: nil) if column_names.include? 'deleted_at'
72
+ end
66
73
 
67
- def is_active?
68
- where(deleted_at: nil) if column_names.include? 'deleted_at'
69
- end
74
+ def csv_mapping
75
+ columns = column_names.as_json
76
+ mapping = {}
70
77
 
71
- def csv_mapping
72
- columns = column_names.as_json
73
- mapping = {}
74
- columns.each do |column|
75
- next if EXCLUDED_COLUMNS.include? column
76
- if OVERRIDDEN_COLUMNS.key?(column)
77
- mapping[OVERRIDDEN_COLUMNS[column]] = column
78
- else
79
- mapping[column] = column
78
+ columns.each do |column|
79
+ next if EXCLUDED_COLUMNS.include? column
80
+
81
+ if OVERRIDDEN_COLUMNS.key?(column)
82
+ mapping[OVERRIDDEN_COLUMNS[column]] = column
83
+ else
84
+ mapping[column] = column
85
+ end
80
86
  end
87
+
88
+ mapping['bridge_id'] = 'bridge_id'
89
+ mapping
81
90
  end
82
- mapping['bridge_id'] = 'bridge_id'
83
- mapping
84
- end
85
91
 
86
- private
92
+ private
87
93
 
88
- def create_or_update_from_webhook(message)
89
- message.payload_class.create_from_csv_row(message.resource_object)
94
+ def create_or_update_from_webhook(message)
95
+ message.payload_class.create_from_csv_row(message.resource_object)
96
+ end
90
97
  end
91
98
  end
92
99
  end
100
+ # rubocop:enable Naming/PredicateName,Metrics/MethodLength
@@ -1,7 +1,9 @@
1
- module BridgeCache::Jobs
2
- class CleanupJob < ActiveJob::Base
3
- def perform(model, row_ids)
4
- "BridgeCache::#{model.camelcase}".constantize.cleanup(row_ids)
1
+ module BridgeCache
2
+ module Jobs
3
+ class CleanupJob < ActiveJob::Base
4
+ def perform(model, row_ids)
5
+ "BridgeCache::#{model.camelcase}".constantize.cleanup(row_ids)
6
+ end
5
7
  end
6
8
  end
7
9
  end
@@ -1,22 +1,38 @@
1
- module BridgeCache::Jobs
2
- class ImportCheck < ActiveJob::Base
3
- queue_as :default
4
- include BridgeCache::Utils::WorkerHelper
1
+ # rubocop:disable Metrics/AbcSize,Metrics/MethodLength
2
+ module BridgeCache
3
+ module Jobs
4
+ class ImportCheck < ActiveJob::Base
5
+ queue_as :default
6
+ include BridgeCache::Utils::WorkerHelper
5
7
 
6
- ATTEMPTS_THRESHOLD = 30
8
+ def max_import_attempts
9
+ ENV['BRIDGE_CACHE_MAX_IMPORT_ATTEMPTS'].to_i || 30
10
+ end
11
+
12
+ def remote_data
13
+ @remote_data ||= BridgeBlueprint::RemoteData.new(@account_settings[:url], @account_settings[:api_key],
14
+ @account_settings[:api_secret])
15
+ end
16
+
17
+ def perform(account_settings, attempts, chain = [], retries = 0)
18
+ @account_settings = account_settings.with_indifferent_access
19
+
20
+ if attempts > max_import_attempts
21
+ Rails.logger.fatal("Bridge data dump failed to download after #{(max_import_attempts * 30) / 60} minutes")
22
+ elsif remote_data.status == BridgeBlueprint::Constants::STATUS_COMPLETE
23
+ BridgeCache::Jobs::ImportData.set(queue: queue_name).perform_later(@account_settings, chain)
24
+ elsif remote_data.status == BridgeBlueprint::Constants::STATUS_PENDING
25
+ BridgeCache::Jobs::ImportCheck.set(queue: queue_name, wait: 30.seconds).perform_later(@account_settings,
26
+ attempts + 1, chain)
27
+ elsif remote_data.status == BridgeBlueprint::Constants::STATUS_FAILED ||
28
+ remote_data.status == BridgeBlueprint::Constants::STATUS_NOT_FOUND
29
+
30
+ raise 'Bridge data dump download failed' if retries < 1
7
31
 
8
- def perform(account_settings, attempts, chain = [])
9
- account_settings = account_settings.with_indifferent_access
10
- remote_data = BridgeBlueprint::RemoteData.new(account_settings[:url], account_settings[:api_key], account_settings[:api_secret])
11
- if attempts > ATTEMPTS_THRESHOLD
12
- Rails.logger.fatal("Bridge data dump failed to download after #{(ATTEMPTS_THRESHOLD * 30) / 60} minutes")
13
- elsif remote_data.status == BridgeBlueprint::Constants::STATUS_COMPLETE
14
- BridgeCache::Jobs::ImportData.set(queue: queue_name).perform_later(account_settings, chain)
15
- elsif remote_data.status == BridgeBlueprint::Constants::STATUS_PENDING
16
- BridgeCache::Jobs::ImportCheck.set(queue: queue_name, wait: 30.seconds).perform_later(account_settings, attempts + 1, chain)
17
- elsif remote_data.status == BridgeBlueprint::Constants::STATUS_FAILED || remote_data.status == BridgeBlueprint::Constants::STATUS_NOT_FOUND
18
- raise 'Bridge data dump download failed'
32
+ BridgeCache::Jobs::ImportStart.set(queue: queue_name).perform_later(@account_settings, chain, retries - 1)
33
+ end
19
34
  end
20
35
  end
21
36
  end
22
37
  end
38
+ # rubocop:enable Metrics/AbcSize,Metrics/MethodLength
@@ -1,23 +1,30 @@
1
- module BridgeCache::Jobs
2
- class ImportData < ActiveJob::Base
3
- include BridgeCache::Utils::WorkerHelper
4
- queue_as :default
1
+ # rubocop:disable Metrics/AbcSize,Metrics/MethodLength
2
+ module BridgeCache
3
+ module Jobs
4
+ class ImportData < ActiveJob::Base
5
+ include BridgeCache::Utils::WorkerHelper
6
+ queue_as :default
5
7
 
6
- def perform(account_settings, chain = [])
7
- account_settings = account_settings.with_indifferent_access
8
- remote_data = BridgeBlueprint::RemoteData.new(account_settings[:url], account_settings[:api_key], account_settings[:api_secret])
9
- Dir.mktmpdir do |dir|
10
- path = "#{dir}/data_dump"
11
- remote_data.store_file(path)
12
- import = BridgeBlueprint::DataDump.new(path)
13
- models = account_settings[:models] || BridgeCache.resolved_models(account_settings[:is_subaccount])
14
- models.each do |model|
15
- ids = []
16
- ids << BridgeCache::Plugins::CSVDump.bulk_import(import, model)
17
- BridgeCache::Jobs::CleanupJob.set(queue: queue_name).perform_later(model, ids)
8
+ def perform(account_settings, chain = [])
9
+ account_settings = account_settings.with_indifferent_access
10
+ remote_data = BridgeBlueprint::RemoteData.new(account_settings[:url], account_settings[:api_key],
11
+ account_settings[:api_secret])
12
+ Dir.mktmpdir do |dir|
13
+ path = "#{dir}/data_dump"
14
+ remote_data.store_file(path)
15
+ import = BridgeBlueprint::DataDump.new(path)
16
+ models = account_settings[:models] || BridgeCache.resolved_models(account_settings[:is_subaccount])
17
+
18
+ models.each do |model|
19
+ ids = []
20
+ ids << BridgeCache::Plugins::CSVDump.bulk_import(import, model)
21
+ BridgeCache::Jobs::CleanupJob.set(queue: queue_name).perform_later(model, ids)
22
+ end
18
23
  end
24
+
25
+ BridgeCache::Utils::WorkerHelper.invoke_next(chain)
19
26
  end
20
- BridgeCache::Utils::WorkerHelper.invoke_next(chain)
21
27
  end
22
28
  end
23
29
  end
30
+ # rubocop:enable Metrics/AbcSize,Metrics/MethodLength
@@ -1,9 +1,11 @@
1
- module BridgeCache::Jobs
2
- class ImportRow < ActiveJob::Base
3
- queue_as :default
1
+ module BridgeCache
2
+ module Jobs
3
+ class ImportRow < ActiveJob::Base
4
+ queue_as :default
4
5
 
5
- def perform(model, row)
6
- "BridgeCache::#{model.camelcase}".constantize.create_from_csv_row(row)
6
+ def perform(model, row)
7
+ "BridgeCache::#{model.camelcase}".constantize.create_from_csv_row(row)
8
+ end
7
9
  end
8
10
  end
9
11
  end
@@ -1,17 +1,17 @@
1
- module BridgeCache::Jobs
2
- class ImportStart < ActiveJob::Base
3
- queue_as :default
1
+ module BridgeCache
2
+ module Jobs
3
+ class ImportStart < ActiveJob::Base
4
+ queue_as :default
4
5
 
5
- def perform(account_settings, chain = [])
6
- data = BridgeBlueprint::RemoteData.new(account_settings[:url], account_settings[:api_key], account_settings[:api_secret])
7
- data.start_data_report
8
- BridgeCache::Jobs::ImportCheck
9
- .set(queue: queue_name, wait: 30.seconds)
10
- .perform_later(
11
- account_settings,
12
- 0,
13
- chain
14
- )
6
+ def perform(account_settings, chain = [], retries = 0)
7
+ data = BridgeBlueprint::RemoteData.new(account_settings[:url], account_settings[:api_key],
8
+ account_settings[:api_secret])
9
+ data.start_data_report
10
+
11
+ BridgeCache::Jobs::ImportCheck
12
+ .set(queue: queue_name, wait: 30.seconds)
13
+ .perform_later(account_settings, 0, chain, retries)
14
+ end
15
15
  end
16
16
  end
17
17
  end
@@ -1,97 +1,107 @@
1
1
  require 'csv'
2
2
 
3
- module BridgeCache::Plugins
4
- class CSVDump
5
- MAX_ROW_INTERVAL = 5000
6
-
7
- def self.bulk_import(iterator, model)
8
- ids = []
9
- rows = []
10
- klass = "BridgeCache::#{model.camelcase}".constantize
11
-
12
- csv_column_names = klass.csv_mapping.keys
13
- database_column_names = klass.csv_mapping.values
14
-
15
- iterator.each_row(model.pluralize) do |row|
16
- row = remove_bad_columns(klass, BridgeCache::Plugins::DataTransform.set_bridge_id(row).to_h)
17
- row = klass.format_import_row(row)
18
- rows << csv_column_names.map { |column| row[column] }
19
- ids << row['bridge_id'] if row['bridge_id'].present? # Some CSV's do not have an ID column
20
-
21
- if rows.length >= BridgeCache.batch_size
22
- perform_bulk_import(klass, database_column_names, rows)
23
- rows = []
3
+ # rubocop:disable Metrics/MethodLength,Metrics/AbcSize
4
+ module BridgeCache
5
+ module Plugins
6
+ class CSVDump
7
+ MAX_ROW_INTERVAL = 5000
8
+
9
+ def self.bulk_import(iterator, model)
10
+ ids = []
11
+ rows = []
12
+ klass = "BridgeCache::#{model.camelcase}".constantize
13
+
14
+ csv_column_names = klass.csv_mapping.keys
15
+ database_column_names = klass.csv_mapping.values
16
+
17
+ iterator.each_row(model.pluralize) do |row|
18
+ row = remove_bad_columns(klass, BridgeCache::Plugins::DataTransform.set_bridge_id(row).to_h)
19
+ row = klass.format_import_row(row)
20
+ rows << csv_column_names.map { |column| row[column] }
21
+ ids << row['bridge_id'] if row['bridge_id'].present? # Some CSV's do not have an ID column
22
+
23
+ if rows.length >= BridgeCache.batch_size
24
+ perform_bulk_import(klass, database_column_names, rows)
25
+ rows = []
26
+ end
24
27
  end
28
+
29
+ perform_bulk_import(klass, database_column_names, rows)
30
+ ids
25
31
  end
26
32
 
27
- perform_bulk_import(klass, database_column_names, rows)
28
- ids
29
- end
33
+ def self.dump_to_table(clazz, file_path)
34
+ count = 1
35
+ total = 0
36
+ rows = []
30
37
 
31
- def self.dump_to_table(clazz, file_path)
32
- count = 1
33
- total = 0
34
- rows = []
35
- CSV.foreach(file_path, headers: true) do |_row|
36
- total += 1
37
- end
38
- CSV.foreach(file_path, headers: true) do |row|
39
- rows << initialze_row(clazz, row) if count < MAX_ROW_INTERVAL
40
- if count % MAX_ROW_INTERVAL == 0 || count == total
41
- dump_rows(rows)
42
- count = 0
43
- rows = []
38
+ CSV.foreach(file_path, headers: true) do |_row|
39
+ total += 1
40
+ end
41
+
42
+ CSV.foreach(file_path, headers: true) do |row|
43
+ rows << initialze_row(clazz, row) if count < MAX_ROW_INTERVAL
44
+
45
+ if (count % MAX_ROW_INTERVAL).zero? || count == total
46
+ dump_rows(rows)
47
+
48
+ count = 0
49
+ rows = []
50
+ end
51
+
52
+ count += 1
44
53
  end
45
- count += 1
46
54
  end
47
- end
48
55
 
49
- def self.dump_row(clazz, row)
50
- instance = initialze_row(clazz, row)
51
- dump_rows([instance])
52
- end
56
+ def self.dump_row(clazz, row)
57
+ instance = initialze_row(clazz, row)
58
+ dump_rows([instance])
59
+ end
53
60
 
54
- def self.perform_bulk_import(klass, columns, rows)
55
- return if rows.empty?
56
- columns = columns.dup
57
- klass.import(columns, rows, validate: false, on_duplicate_key_update: {
58
- conflict_target: klass.unique_column_names,
59
- condition: condition_sql(klass, columns),
60
- columns: columns
61
- })
62
- end
61
+ def self.perform_bulk_import(klass, columns, rows)
62
+ return if rows.empty?
63
63
 
64
- # This method generates SQL that looks like:
65
- # (users.sis_id, users.email) IS DISTINCT FROM (EXCLUDED.sis_id, EXCLUDED.email)
66
- #
67
- # This prevents activerecord-import from setting the `updated_at` column for
68
- # rows that haven't actually changed. This allows you to query for rows that have changed
69
- # by doing something like:
70
- #
71
- # started_at = Time.now
72
- # run_the_users_sync!
73
- # changed = User.where("updated_at >= ?", started_at)
74
- def self.condition_sql(klass, columns)
75
- columns_str = columns.map { |c| "#{klass.quoted_table_name}.#{c}" }.join(', ')
76
- excluded_str = columns.map { |c| "EXCLUDED.#{c}" }.join(', ')
77
- "(#{columns_str}) IS DISTINCT FROM (#{excluded_str})"
78
- end
64
+ columns = columns.dup
65
+ klass.import(columns, rows, validate: false, on_duplicate_key_update: {
66
+ conflict_target: klass.unique_column_names,
67
+ condition: condition_sql(klass, columns),
68
+ columns: columns
69
+ })
70
+ end
79
71
 
80
- def self.initialze_row(clazz, row)
81
- instance = clazz.find_or_create_by(bridge_id: row['id'])
82
- instance.assign_attributes(remove_bad_columns(clazz, BridgeCache::Plugins::DataTransform.set_bridge_id(row).to_h))
83
- instance
84
- end
72
+ # This method generates SQL that looks like:
73
+ # (users.sis_id, users.email) IS DISTINCT FROM (EXCLUDED.sis_id, EXCLUDED.email)
74
+ #
75
+ # This prevents activerecord-import from setting the `updated_at` column for
76
+ # rows that haven't actually changed. This allows you to query for rows that have changed
77
+ # by doing something like:
78
+ #
79
+ # started_at = Time.now
80
+ # run_the_users_sync!
81
+ # changed = User.where("updated_at >= ?", started_at)
82
+ def self.condition_sql(klass, columns)
83
+ columns_str = columns.map { |c| "#{klass.quoted_table_name}.#{c}" }.join(', ')
84
+ excluded_str = columns.map { |c| "EXCLUDED.#{c}" }.join(', ')
85
+ "(#{columns_str}) IS DISTINCT FROM (#{excluded_str})"
86
+ end
85
87
 
86
- def self.dump_rows(rows)
87
- rows.each do |row|
88
- row.save! if row.changed?
88
+ def self.initialze_row(clazz, row)
89
+ instance = clazz.find_or_create_by(bridge_id: row['id'])
90
+ instance.assign_attributes(remove_bad_columns(clazz,
91
+ BridgeCache::Plugins::DataTransform.set_bridge_id(row).to_h))
92
+ instance
93
+ end
94
+
95
+ def self.dump_rows(rows)
96
+ rows.each do |row|
97
+ row.save! if row.changed?
98
+ end
89
99
  end
90
- end
91
100
 
92
- def self.remove_bad_columns(clazz, row)
93
- row = row.delete_if { |key, _value| !clazz.column_names.include?(key) }
94
- row
101
+ def self.remove_bad_columns(clazz, row)
102
+ row.delete_if { |key, _value| !clazz.column_names.include?(key) }
103
+ end
95
104
  end
96
105
  end
97
106
  end
107
+ # rubocop:enable Metrics/MethodLength,Metrics/AbcSize