bridge_cache 0.3.14 → 0.3.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/Rakefile +1 -1
- data/app/lib/bridge_cache/data/bridge_model.rb +76 -68
- data/app/lib/bridge_cache/jobs/cleanup_job.rb +6 -4
- data/app/lib/bridge_cache/jobs/import_check.rb +32 -16
- data/app/lib/bridge_cache/jobs/import_data.rb +24 -17
- data/app/lib/bridge_cache/jobs/import_row.rb +7 -5
- data/app/lib/bridge_cache/jobs/import_start.rb +13 -13
- data/app/lib/bridge_cache/plugins/csv_dump.rb +89 -79
- data/app/lib/bridge_cache/plugins/data_transform.rb +18 -13
- data/app/lib/bridge_cache/utils/worker_helper.rb +9 -6
- data/app/lib/bridge_cache/webhooks/message.rb +13 -11
- data/app/lib/bridge_cache/webhooks/request.rb +10 -8
- data/app/models/bridge_cache/affiliated_sub_account.rb +12 -7
- data/app/models/bridge_cache/course_template.rb +21 -9
- data/app/models/bridge_cache/custom_field.rb +4 -2
- data/app/models/bridge_cache/domain.rb +14 -7
- data/app/models/bridge_cache/enrollment.rb +4 -2
- data/app/models/bridge_cache/enrollment_profile.rb +4 -4
- data/app/models/bridge_cache/grant.rb +4 -2
- data/app/models/bridge_cache/group.rb +10 -5
- data/app/models/bridge_cache/group_enrollment.rb +5 -3
- data/app/models/bridge_cache/host.rb +3 -2
- data/app/models/bridge_cache/learner_custom_field_value.rb +4 -2
- data/app/models/bridge_cache/live_course.rb +15 -6
- data/app/models/bridge_cache/live_course_enrollment.rb +8 -4
- data/app/models/bridge_cache/live_course_session.rb +9 -4
- data/app/models/bridge_cache/live_course_session_registration.rb +9 -5
- data/app/models/bridge_cache/membership.rb +5 -3
- data/app/models/bridge_cache/program.rb +17 -7
- data/app/models/bridge_cache/program_enrollment.rb +4 -2
- data/app/models/bridge_cache/program_group_enrollment.rb +4 -2
- data/app/models/bridge_cache/program_item.rb +2 -1
- data/app/models/bridge_cache/tag.rb +3 -2
- data/app/models/bridge_cache/tagging.rb +5 -3
- data/app/models/bridge_cache/user.rb +20 -10
- data/app/models/bridge_cache/user_course_visit.rb +6 -3
- data/db/migrate/20170517203244_create_bridge_cache_domains.rb +1 -0
- data/db/migrate/20170517203328_create_bridge_cache_users.rb +1 -0
- data/db/migrate/20170517203421_create_bridge_cache_programs.rb +1 -0
- data/db/migrate/20170517204055_create_bridge_cache_custom_fields.rb +1 -0
- data/db/migrate/20170517204248_create_bridge_cache_grants.rb +1 -0
- data/db/migrate/20170518211404_create_bridge_cache_enrollments.rb +1 -0
- data/db/migrate/20170518211627_create_bridge_cache_groups.rb +1 -0
- data/db/migrate/20170518211703_create_bridge_cache_program_items.rb +1 -0
- data/db/migrate/20170518211714_create_bridge_cache_program_enrollments.rb +1 -0
- data/db/migrate/20170518211736_create_bridge_cache_program_group_enrollments.rb +1 -0
- data/db/migrate/20170518211748_create_bridge_cache_tags.rb +1 -0
- data/db/migrate/20170518211754_create_bridge_cache_taggings.rb +1 -0
- data/db/migrate/20170518211817_create_bridge_cache_course_templates.rb +1 -0
- data/db/migrate/20170523164248_create_bridge_cache_group_enrollments.rb +1 -0
- data/db/migrate/20170523165014_create_bridge_cache_memberships.rb +1 -0
- data/db/migrate/20170615145554_create_bridge_cache_learner_custom_field_values.rb +1 -0
- data/db/migrate/20171017211850_create_affiliated_sub_accounts.rb +1 -0
- data/db/migrate/20171019195443_create_live_courses.rb +1 -0
- data/db/migrate/20171019195503_create_live_course_enrollments.rb +1 -0
- data/db/migrate/20171019195512_create_live_course_session_registrations.rb +1 -0
- data/db/migrate/20171019195516_create_live_course_sessions.rb +1 -0
- data/db/migrate/20171031165231_add_indexes.rb +5 -2
- data/db/migrate/20180316205458_create_bridge_cache_enrollment_profiles.rb +1 -0
- data/db/migrate/20180320190443_add_index_to_enrollment_profile.rb +1 -0
- data/db/migrate/20180406204906_add_uniq_constraints.rb +43 -21
- data/db/migrate/20180801162929_create_bridge_cache_hosts.rb +1 -0
- data/db/migrate/20180925210056_add_bridge_timestamps_to_bridge_models.rb +2 -0
- data/db/migrate/20190801163930_create_user_course_visits.rb +3 -1
- data/db/migrate/20200717142423_add_hris_id_to_bridge_cache_users.rb +7 -0
- data/db/migrate/20201102153622_change_external_course_id_to_string.rb +5 -0
- data/lib/bridge_cache.rb +1 -0
- data/lib/bridge_cache/version.rb +1 -1
- metadata +23 -13
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: c515d7e48aed8e9e9d05b6cec9d956074704206846744fca065335b23a389609
|
4
|
+
data.tar.gz: 30c0dee61a1f5dd8120a2c495ad8b625fe7e1da3ba270bba101b64a6be3987ec
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 908acbd97a28b594ae5295900064a94cf8a905c7ef19ef066b7e92e11c39baa86297cee0704903dc70c5bdee19b68195c182322a85e690b34ba9b0d1c640ea62
|
7
|
+
data.tar.gz: b6ad05dc8cf847bfbbea1d262abea61131368b865827e316055a263bf3e23114875f6c78bbe0c6976c241d6833b2a586dc61b6624344b9adf77cf877f02ebddb
|
data/Rakefile
CHANGED
@@ -14,7 +14,7 @@ RDoc::Task.new(:rdoc) do |rdoc|
|
|
14
14
|
rdoc.rdoc_files.include('lib/**/*.rb')
|
15
15
|
end
|
16
16
|
|
17
|
-
APP_RAKEFILE = File.expand_path('
|
17
|
+
APP_RAKEFILE = File.expand_path('test/dummy/Rakefile', __dir__)
|
18
18
|
load 'rails/tasks/engine.rake'
|
19
19
|
|
20
20
|
load 'rails/tasks/statistics.rake'
|
@@ -1,92 +1,100 @@
|
|
1
|
-
|
2
|
-
|
3
|
-
|
4
|
-
|
1
|
+
# rubocop:disable Naming/PredicateName,Metrics/MethodLength
|
2
|
+
module BridgeCache
|
3
|
+
module Data
|
4
|
+
module BridgeModel
|
5
|
+
EXCLUDED_COLUMNS = Set['id', 'created_at', 'updated_at']
|
6
|
+
OVERRIDDEN_COLUMNS = { 'bridge_created_at' => 'created_at'.freeze,
|
7
|
+
'bridge_updated_at' => 'updated_at'.freeze }.freeze
|
8
|
+
|
9
|
+
def adjusted_table_name
|
10
|
+
if BridgeCache.use_internal_database
|
11
|
+
table_name
|
12
|
+
else
|
13
|
+
to_s.demodulize.underscore.pluralize
|
14
|
+
end
|
15
|
+
end
|
5
16
|
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
17
|
+
def self.extended(base)
|
18
|
+
base.class_eval do
|
19
|
+
scope :in_domain, ->(domain_id) { for_domain(domain_id) }
|
20
|
+
scope :active, -> { is_active? }
|
21
|
+
self.table_name = adjusted_table_name
|
22
|
+
end
|
11
23
|
end
|
12
|
-
end
|
13
24
|
|
14
|
-
|
15
|
-
|
16
|
-
scope :in_domain, ->(domain_id) { for_domain(domain_id) }
|
17
|
-
scope :active, -> { is_active? }
|
18
|
-
self.table_name = adjusted_table_name
|
25
|
+
def format_import_row(row)
|
26
|
+
row
|
19
27
|
end
|
20
|
-
end
|
21
28
|
|
22
|
-
|
23
|
-
|
24
|
-
|
29
|
+
def import_from_csv(file_path)
|
30
|
+
BridgeCache::Plugins::CSVDump.dump_to_table(self, file_path)
|
31
|
+
end
|
25
32
|
|
26
|
-
|
27
|
-
|
28
|
-
|
33
|
+
def create_from_csv_row(row)
|
34
|
+
BridgeCache::Plugins::CSVDump.dump_row(self, row)
|
35
|
+
end
|
29
36
|
|
30
|
-
|
31
|
-
|
32
|
-
|
37
|
+
def cleanup(row_ids)
|
38
|
+
# Implement this method in your model if you want to do any sort of post creation cleanup.
|
39
|
+
# See tagging.rb for an example.
|
40
|
+
end
|
33
41
|
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
end
|
42
|
+
def webhook_updated(message)
|
43
|
+
create_or_update_from_webhook(message)
|
44
|
+
end
|
38
45
|
|
39
|
-
|
40
|
-
|
41
|
-
|
46
|
+
def webhook_created(message)
|
47
|
+
create_or_update_from_webhook(message)
|
48
|
+
end
|
42
49
|
|
43
|
-
|
44
|
-
|
45
|
-
|
50
|
+
def webhook_deleted(message)
|
51
|
+
message.payload_class.where(bridge_id: message.resource_object['id']).destroy_all
|
52
|
+
end
|
46
53
|
|
47
|
-
|
48
|
-
|
49
|
-
|
54
|
+
def webhook_completed(_message)
|
55
|
+
raise 'Method not implemented'
|
56
|
+
end
|
50
57
|
|
51
|
-
|
52
|
-
|
53
|
-
|
58
|
+
def for_domain(domain_id)
|
59
|
+
where(domain_id: domain_id)
|
60
|
+
end
|
54
61
|
|
55
|
-
|
56
|
-
|
57
|
-
|
62
|
+
def uniq_constraint_name
|
63
|
+
"#{table_name.downcase}_bridge_uniq"
|
64
|
+
end
|
58
65
|
|
59
|
-
|
60
|
-
|
61
|
-
|
66
|
+
def unique_column_names
|
67
|
+
%w[bridge_id]
|
68
|
+
end
|
62
69
|
|
63
|
-
|
64
|
-
|
65
|
-
|
70
|
+
def is_active?
|
71
|
+
where(deleted_at: nil) if column_names.include? 'deleted_at'
|
72
|
+
end
|
66
73
|
|
67
|
-
|
68
|
-
|
69
|
-
|
74
|
+
def csv_mapping
|
75
|
+
columns = column_names.as_json
|
76
|
+
mapping = {}
|
70
77
|
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
mapping[column] = column
|
78
|
+
columns.each do |column|
|
79
|
+
next if EXCLUDED_COLUMNS.include? column
|
80
|
+
|
81
|
+
if OVERRIDDEN_COLUMNS.key?(column)
|
82
|
+
mapping[OVERRIDDEN_COLUMNS[column]] = column
|
83
|
+
else
|
84
|
+
mapping[column] = column
|
85
|
+
end
|
80
86
|
end
|
87
|
+
|
88
|
+
mapping['bridge_id'] = 'bridge_id'
|
89
|
+
mapping
|
81
90
|
end
|
82
|
-
mapping['bridge_id'] = 'bridge_id'
|
83
|
-
mapping
|
84
|
-
end
|
85
91
|
|
86
|
-
|
92
|
+
private
|
87
93
|
|
88
|
-
|
89
|
-
|
94
|
+
def create_or_update_from_webhook(message)
|
95
|
+
message.payload_class.create_from_csv_row(message.resource_object)
|
96
|
+
end
|
90
97
|
end
|
91
98
|
end
|
92
99
|
end
|
100
|
+
# rubocop:enable Naming/PredicateName,Metrics/MethodLength
|
@@ -1,7 +1,9 @@
|
|
1
|
-
module BridgeCache
|
2
|
-
|
3
|
-
|
4
|
-
|
1
|
+
module BridgeCache
|
2
|
+
module Jobs
|
3
|
+
class CleanupJob < ActiveJob::Base
|
4
|
+
def perform(model, row_ids)
|
5
|
+
"BridgeCache::#{model.camelcase}".constantize.cleanup(row_ids)
|
6
|
+
end
|
5
7
|
end
|
6
8
|
end
|
7
9
|
end
|
@@ -1,22 +1,38 @@
|
|
1
|
-
|
2
|
-
|
3
|
-
|
4
|
-
|
1
|
+
# rubocop:disable Metrics/AbcSize,Metrics/MethodLength
|
2
|
+
module BridgeCache
|
3
|
+
module Jobs
|
4
|
+
class ImportCheck < ActiveJob::Base
|
5
|
+
queue_as :default
|
6
|
+
include BridgeCache::Utils::WorkerHelper
|
5
7
|
|
6
|
-
|
8
|
+
def max_import_attempts
|
9
|
+
ENV['BRIDGE_CACHE_MAX_IMPORT_ATTEMPTS'].to_i || 30
|
10
|
+
end
|
11
|
+
|
12
|
+
def remote_data
|
13
|
+
@remote_data ||= BridgeBlueprint::RemoteData.new(@account_settings[:url], @account_settings[:api_key],
|
14
|
+
@account_settings[:api_secret])
|
15
|
+
end
|
16
|
+
|
17
|
+
def perform(account_settings, attempts, chain = [], retries = 0)
|
18
|
+
@account_settings = account_settings.with_indifferent_access
|
19
|
+
|
20
|
+
if attempts > max_import_attempts
|
21
|
+
Rails.logger.fatal("Bridge data dump failed to download after #{(max_import_attempts * 30) / 60} minutes")
|
22
|
+
elsif remote_data.status == BridgeBlueprint::Constants::STATUS_COMPLETE
|
23
|
+
BridgeCache::Jobs::ImportData.set(queue: queue_name).perform_later(@account_settings, chain)
|
24
|
+
elsif remote_data.status == BridgeBlueprint::Constants::STATUS_PENDING
|
25
|
+
BridgeCache::Jobs::ImportCheck.set(queue: queue_name, wait: 30.seconds).perform_later(@account_settings,
|
26
|
+
attempts + 1, chain)
|
27
|
+
elsif remote_data.status == BridgeBlueprint::Constants::STATUS_FAILED ||
|
28
|
+
remote_data.status == BridgeBlueprint::Constants::STATUS_NOT_FOUND
|
29
|
+
|
30
|
+
raise 'Bridge data dump download failed' if retries < 1
|
7
31
|
|
8
|
-
|
9
|
-
|
10
|
-
remote_data = BridgeBlueprint::RemoteData.new(account_settings[:url], account_settings[:api_key], account_settings[:api_secret])
|
11
|
-
if attempts > ATTEMPTS_THRESHOLD
|
12
|
-
Rails.logger.fatal("Bridge data dump failed to download after #{(ATTEMPTS_THRESHOLD * 30) / 60} minutes")
|
13
|
-
elsif remote_data.status == BridgeBlueprint::Constants::STATUS_COMPLETE
|
14
|
-
BridgeCache::Jobs::ImportData.set(queue: queue_name).perform_later(account_settings, chain)
|
15
|
-
elsif remote_data.status == BridgeBlueprint::Constants::STATUS_PENDING
|
16
|
-
BridgeCache::Jobs::ImportCheck.set(queue: queue_name, wait: 30.seconds).perform_later(account_settings, attempts + 1, chain)
|
17
|
-
elsif remote_data.status == BridgeBlueprint::Constants::STATUS_FAILED || remote_data.status == BridgeBlueprint::Constants::STATUS_NOT_FOUND
|
18
|
-
raise 'Bridge data dump download failed'
|
32
|
+
BridgeCache::Jobs::ImportStart.set(queue: queue_name).perform_later(@account_settings, chain, retries - 1)
|
33
|
+
end
|
19
34
|
end
|
20
35
|
end
|
21
36
|
end
|
22
37
|
end
|
38
|
+
# rubocop:enable Metrics/AbcSize,Metrics/MethodLength
|
@@ -1,23 +1,30 @@
|
|
1
|
-
|
2
|
-
|
3
|
-
|
4
|
-
|
1
|
+
# rubocop:disable Metrics/AbcSize,Metrics/MethodLength
|
2
|
+
module BridgeCache
|
3
|
+
module Jobs
|
4
|
+
class ImportData < ActiveJob::Base
|
5
|
+
include BridgeCache::Utils::WorkerHelper
|
6
|
+
queue_as :default
|
5
7
|
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
8
|
+
def perform(account_settings, chain = [])
|
9
|
+
account_settings = account_settings.with_indifferent_access
|
10
|
+
remote_data = BridgeBlueprint::RemoteData.new(account_settings[:url], account_settings[:api_key],
|
11
|
+
account_settings[:api_secret])
|
12
|
+
Dir.mktmpdir do |dir|
|
13
|
+
path = "#{dir}/data_dump"
|
14
|
+
remote_data.store_file(path)
|
15
|
+
import = BridgeBlueprint::DataDump.new(path)
|
16
|
+
models = account_settings[:models] || BridgeCache.resolved_models(account_settings[:is_subaccount])
|
17
|
+
|
18
|
+
models.each do |model|
|
19
|
+
ids = []
|
20
|
+
ids << BridgeCache::Plugins::CSVDump.bulk_import(import, model)
|
21
|
+
BridgeCache::Jobs::CleanupJob.set(queue: queue_name).perform_later(model, ids)
|
22
|
+
end
|
18
23
|
end
|
24
|
+
|
25
|
+
BridgeCache::Utils::WorkerHelper.invoke_next(chain)
|
19
26
|
end
|
20
|
-
BridgeCache::Utils::WorkerHelper.invoke_next(chain)
|
21
27
|
end
|
22
28
|
end
|
23
29
|
end
|
30
|
+
# rubocop:enable Metrics/AbcSize,Metrics/MethodLength
|
@@ -1,9 +1,11 @@
|
|
1
|
-
module BridgeCache
|
2
|
-
|
3
|
-
|
1
|
+
module BridgeCache
|
2
|
+
module Jobs
|
3
|
+
class ImportRow < ActiveJob::Base
|
4
|
+
queue_as :default
|
4
5
|
|
5
|
-
|
6
|
-
|
6
|
+
def perform(model, row)
|
7
|
+
"BridgeCache::#{model.camelcase}".constantize.create_from_csv_row(row)
|
8
|
+
end
|
7
9
|
end
|
8
10
|
end
|
9
11
|
end
|
@@ -1,17 +1,17 @@
|
|
1
|
-
module BridgeCache
|
2
|
-
|
3
|
-
|
1
|
+
module BridgeCache
|
2
|
+
module Jobs
|
3
|
+
class ImportStart < ActiveJob::Base
|
4
|
+
queue_as :default
|
4
5
|
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
0,
|
13
|
-
|
14
|
-
)
|
6
|
+
def perform(account_settings, chain = [], retries = 0)
|
7
|
+
data = BridgeBlueprint::RemoteData.new(account_settings[:url], account_settings[:api_key],
|
8
|
+
account_settings[:api_secret])
|
9
|
+
data.start_data_report
|
10
|
+
|
11
|
+
BridgeCache::Jobs::ImportCheck
|
12
|
+
.set(queue: queue_name, wait: 30.seconds)
|
13
|
+
.perform_later(account_settings, 0, chain, retries)
|
14
|
+
end
|
15
15
|
end
|
16
16
|
end
|
17
17
|
end
|
@@ -1,97 +1,107 @@
|
|
1
1
|
require 'csv'
|
2
2
|
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
rows
|
3
|
+
# rubocop:disable Metrics/MethodLength,Metrics/AbcSize
|
4
|
+
module BridgeCache
|
5
|
+
module Plugins
|
6
|
+
class CSVDump
|
7
|
+
MAX_ROW_INTERVAL = 5000
|
8
|
+
|
9
|
+
def self.bulk_import(iterator, model)
|
10
|
+
ids = []
|
11
|
+
rows = []
|
12
|
+
klass = "BridgeCache::#{model.camelcase}".constantize
|
13
|
+
|
14
|
+
csv_column_names = klass.csv_mapping.keys
|
15
|
+
database_column_names = klass.csv_mapping.values
|
16
|
+
|
17
|
+
iterator.each_row(model.pluralize) do |row|
|
18
|
+
row = remove_bad_columns(klass, BridgeCache::Plugins::DataTransform.set_bridge_id(row).to_h)
|
19
|
+
row = klass.format_import_row(row)
|
20
|
+
rows << csv_column_names.map { |column| row[column] }
|
21
|
+
ids << row['bridge_id'] if row['bridge_id'].present? # Some CSV's do not have an ID column
|
22
|
+
|
23
|
+
if rows.length >= BridgeCache.batch_size
|
24
|
+
perform_bulk_import(klass, database_column_names, rows)
|
25
|
+
rows = []
|
26
|
+
end
|
24
27
|
end
|
28
|
+
|
29
|
+
perform_bulk_import(klass, database_column_names, rows)
|
30
|
+
ids
|
25
31
|
end
|
26
32
|
|
27
|
-
|
28
|
-
|
29
|
-
|
33
|
+
def self.dump_to_table(clazz, file_path)
|
34
|
+
count = 1
|
35
|
+
total = 0
|
36
|
+
rows = []
|
30
37
|
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
38
|
+
CSV.foreach(file_path, headers: true) do |_row|
|
39
|
+
total += 1
|
40
|
+
end
|
41
|
+
|
42
|
+
CSV.foreach(file_path, headers: true) do |row|
|
43
|
+
rows << initialze_row(clazz, row) if count < MAX_ROW_INTERVAL
|
44
|
+
|
45
|
+
if (count % MAX_ROW_INTERVAL).zero? || count == total
|
46
|
+
dump_rows(rows)
|
47
|
+
|
48
|
+
count = 0
|
49
|
+
rows = []
|
50
|
+
end
|
51
|
+
|
52
|
+
count += 1
|
44
53
|
end
|
45
|
-
count += 1
|
46
54
|
end
|
47
|
-
end
|
48
55
|
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
56
|
+
def self.dump_row(clazz, row)
|
57
|
+
instance = initialze_row(clazz, row)
|
58
|
+
dump_rows([instance])
|
59
|
+
end
|
53
60
|
|
54
|
-
|
55
|
-
|
56
|
-
columns = columns.dup
|
57
|
-
klass.import(columns, rows, validate: false, on_duplicate_key_update: {
|
58
|
-
conflict_target: klass.unique_column_names,
|
59
|
-
condition: condition_sql(klass, columns),
|
60
|
-
columns: columns
|
61
|
-
})
|
62
|
-
end
|
61
|
+
def self.perform_bulk_import(klass, columns, rows)
|
62
|
+
return if rows.empty?
|
63
63
|
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
# started_at = Time.now
|
72
|
-
# run_the_users_sync!
|
73
|
-
# changed = User.where("updated_at >= ?", started_at)
|
74
|
-
def self.condition_sql(klass, columns)
|
75
|
-
columns_str = columns.map { |c| "#{klass.quoted_table_name}.#{c}" }.join(', ')
|
76
|
-
excluded_str = columns.map { |c| "EXCLUDED.#{c}" }.join(', ')
|
77
|
-
"(#{columns_str}) IS DISTINCT FROM (#{excluded_str})"
|
78
|
-
end
|
64
|
+
columns = columns.dup
|
65
|
+
klass.import(columns, rows, validate: false, on_duplicate_key_update: {
|
66
|
+
conflict_target: klass.unique_column_names,
|
67
|
+
condition: condition_sql(klass, columns),
|
68
|
+
columns: columns
|
69
|
+
})
|
70
|
+
end
|
79
71
|
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
72
|
+
# This method generates SQL that looks like:
|
73
|
+
# (users.sis_id, users.email) IS DISTINCT FROM (EXCLUDED.sis_id, EXCLUDED.email)
|
74
|
+
#
|
75
|
+
# This prevents activerecord-import from setting the `updated_at` column for
|
76
|
+
# rows that haven't actually changed. This allows you to query for rows that have changed
|
77
|
+
# by doing something like:
|
78
|
+
#
|
79
|
+
# started_at = Time.now
|
80
|
+
# run_the_users_sync!
|
81
|
+
# changed = User.where("updated_at >= ?", started_at)
|
82
|
+
def self.condition_sql(klass, columns)
|
83
|
+
columns_str = columns.map { |c| "#{klass.quoted_table_name}.#{c}" }.join(', ')
|
84
|
+
excluded_str = columns.map { |c| "EXCLUDED.#{c}" }.join(', ')
|
85
|
+
"(#{columns_str}) IS DISTINCT FROM (#{excluded_str})"
|
86
|
+
end
|
85
87
|
|
86
|
-
|
87
|
-
|
88
|
-
|
88
|
+
def self.initialze_row(clazz, row)
|
89
|
+
instance = clazz.find_or_create_by(bridge_id: row['id'])
|
90
|
+
instance.assign_attributes(remove_bad_columns(clazz,
|
91
|
+
BridgeCache::Plugins::DataTransform.set_bridge_id(row).to_h))
|
92
|
+
instance
|
93
|
+
end
|
94
|
+
|
95
|
+
def self.dump_rows(rows)
|
96
|
+
rows.each do |row|
|
97
|
+
row.save! if row.changed?
|
98
|
+
end
|
89
99
|
end
|
90
|
-
end
|
91
100
|
|
92
|
-
|
93
|
-
|
94
|
-
|
101
|
+
def self.remove_bad_columns(clazz, row)
|
102
|
+
row.delete_if { |key, _value| !clazz.column_names.include?(key) }
|
103
|
+
end
|
95
104
|
end
|
96
105
|
end
|
97
106
|
end
|
107
|
+
# rubocop:enable Metrics/MethodLength,Metrics/AbcSize
|