draisine 0.7.10

Sign up to get free protection for your applications and to get access to all the features.
Files changed (60) hide show
  1. checksums.yaml +7 -0
  2. data/.gitignore +9 -0
  3. data/.rspec +2 -0
  4. data/.travis.yml +4 -0
  5. data/Gemfile +4 -0
  6. data/Guardfile +70 -0
  7. data/LICENSE.txt +21 -0
  8. data/README.md +134 -0
  9. data/Rakefile +6 -0
  10. data/app/controllers/draisine/soap_controller.rb +49 -0
  11. data/bin/console +7 -0
  12. data/bin/setup +6 -0
  13. data/config/routes.rb +4 -0
  14. data/draisine.gemspec +32 -0
  15. data/lib/draisine/active_record.rb +191 -0
  16. data/lib/draisine/auditor/result.rb +48 -0
  17. data/lib/draisine/auditor.rb +130 -0
  18. data/lib/draisine/concerns/array_setter.rb +23 -0
  19. data/lib/draisine/concerns/attributes_mapping.rb +46 -0
  20. data/lib/draisine/concerns/import.rb +36 -0
  21. data/lib/draisine/conflict_detector.rb +38 -0
  22. data/lib/draisine/conflict_resolver.rb +97 -0
  23. data/lib/draisine/engine.rb +6 -0
  24. data/lib/draisine/importer.rb +111 -0
  25. data/lib/draisine/ip_checker.rb +15 -0
  26. data/lib/draisine/jobs/inbound_delete_job.rb +8 -0
  27. data/lib/draisine/jobs/inbound_update_job.rb +8 -0
  28. data/lib/draisine/jobs/job_base.rb +39 -0
  29. data/lib/draisine/jobs/outbound_create_job.rb +7 -0
  30. data/lib/draisine/jobs/outbound_delete_job.rb +7 -0
  31. data/lib/draisine/jobs/outbound_update_job.rb +7 -0
  32. data/lib/draisine/jobs/soap_delete_job.rb +7 -0
  33. data/lib/draisine/jobs/soap_update_job.rb +7 -0
  34. data/lib/draisine/partitioner.rb +73 -0
  35. data/lib/draisine/poller.rb +101 -0
  36. data/lib/draisine/query_mechanisms/base.rb +15 -0
  37. data/lib/draisine/query_mechanisms/default.rb +13 -0
  38. data/lib/draisine/query_mechanisms/last_modified_date.rb +18 -0
  39. data/lib/draisine/query_mechanisms/system_modstamp.rb +18 -0
  40. data/lib/draisine/query_mechanisms.rb +18 -0
  41. data/lib/draisine/registry.rb +22 -0
  42. data/lib/draisine/setup.rb +97 -0
  43. data/lib/draisine/soap_handler.rb +79 -0
  44. data/lib/draisine/syncer.rb +52 -0
  45. data/lib/draisine/type_mapper.rb +105 -0
  46. data/lib/draisine/util/caching_client.rb +73 -0
  47. data/lib/draisine/util/hash_diff.rb +39 -0
  48. data/lib/draisine/util/parse_time.rb +14 -0
  49. data/lib/draisine/util/salesforce_comparisons.rb +53 -0
  50. data/lib/draisine/version.rb +3 -0
  51. data/lib/draisine.rb +48 -0
  52. data/lib/ext/databasedotcom.rb +98 -0
  53. data/lib/generators/draisine/delta_migration_generator.rb +77 -0
  54. data/lib/generators/draisine/integration_generator.rb +53 -0
  55. data/lib/generators/draisine/templates/delta_migration.rb +24 -0
  56. data/lib/generators/draisine/templates/migration.rb +21 -0
  57. data/lib/generators/draisine/templates/model.rb +11 -0
  58. data/salesforce/sample_delete_trigger.apex +7 -0
  59. data/salesforce/sample_test_class_for_delete_trigger.apex +15 -0
  60. metadata +242 -0
@@ -0,0 +1,130 @@
1
+ require "draisine/auditor/result"
2
+
3
+ module Draisine
4
+ class Auditor
5
+ def self.run(model_class:, start_date: Time.current.beginning_of_day, end_date: Time.current, mechanism: :default)
6
+ # TODO: instead of using one huge partition, combine multiple results into one
7
+ partitions = partition(model_class: model_class, start_date: start_date, end_date: end_date, partition_size: 10**12, mechanism: mechanism)
8
+ run_partition(partitions.first)
9
+ end
10
+
11
+ def self.run_partition(partition)
12
+ new(partition).run
13
+ end
14
+
15
+ def self.partition(model_class:, start_date:, end_date:, partition_size: 100, mechanism: :default)
16
+ Partitioner.partition(
17
+ model_class: model_class,
18
+ start_date: start_date,
19
+ end_date: end_date,
20
+ partition_size: partition_size,
21
+ mechanism: mechanism)
22
+ end
23
+
24
+ attr_reader :partition, :model_class, :start_date, :end_date, :result
25
+ def initialize(partition)
26
+ @partition = partition
27
+ @model_class = partition.model_class
28
+ @start_date = partition.start_date
29
+ @end_date = partition.end_date
30
+ end
31
+
32
+ def run
33
+ @result = Result.new
34
+
35
+ check_unpersisted_records
36
+ check_deletes
37
+ check_modifications
38
+
39
+ result.calculate_result!
40
+ rescue => e
41
+ result.error!(e)
42
+ raise
43
+ end
44
+
45
+ def check_unpersisted_records
46
+ return unless partition.unpersisted_ids.present?
47
+
48
+ bad_records = model_class.where(id: partition.unpersisted_ids)
49
+ bad_records.each do |record|
50
+ result.discrepancy(
51
+ type: :local_record_without_salesforce_id,
52
+ salesforce_type: salesforce_object_name,
53
+ salesforce_id: nil,
54
+ local_id: record.id,
55
+ local_type: record.class.name,
56
+ local_attributes: record.attributes)
57
+ end
58
+ end
59
+
60
+ def check_deletes
61
+ return unless partition.deleted_ids.present?
62
+
63
+ ghost_models = model_class.where(salesforce_id: partition.deleted_ids).all
64
+ ghost_models.each do |ghost_model|
65
+ result.discrepancy(
66
+ type: :remote_delete_kept_locally,
67
+ salesforce_type: salesforce_object_name,
68
+ salesforce_id: ghost_model.salesforce_id,
69
+ local_id: ghost_model.id,
70
+ local_type: ghost_model.class.name,
71
+ local_attributes: ghost_model.attributes)
72
+ end
73
+ end
74
+
75
+ def check_modifications
76
+ updated_ids = partition.updated_ids
77
+ return unless updated_ids.present?
78
+
79
+ local_records = model_class.where(salesforce_id: updated_ids).to_a
80
+ remote_records = client.fetch_multiple(salesforce_object_name, updated_ids)
81
+
82
+ local_records_map = build_map(local_records) {|record| record.salesforce_id }
83
+ remote_records_map = build_map(remote_records) {|record| record.Id }
84
+
85
+ missing_ids = updated_ids - local_records_map.keys
86
+ missing_ids.each do |id|
87
+ result.discrepancy(
88
+ type: :remote_record_missing_locally,
89
+ salesforce_type: salesforce_object_name,
90
+ salesforce_id: id,
91
+ remote_attributes: remote_records_map.fetch(id))
92
+ end
93
+
94
+ attr_list = model_class.salesforce_audited_attributes
95
+ local_records_map.each do |salesforce_id, local_record|
96
+ remote_record = remote_records_map[salesforce_id]
97
+ next unless remote_record
98
+ conflict_detector = ConflictDetector.new(local_record, remote_record, attr_list)
99
+
100
+ if conflict_detector.conflict?
101
+ result.discrepancy(
102
+ type: :mismatching_records,
103
+ salesforce_type: salesforce_object_name,
104
+ salesforce_id: salesforce_id,
105
+ local_id: local_record.id,
106
+ local_type: local_record.class.name,
107
+ local_attributes: local_record.salesforce_attributes,
108
+ remote_attributes: remote_record.attributes,
109
+ diff_keys: conflict_detector.diff.diff_keys)
110
+ end
111
+ end
112
+ end
113
+
114
+ protected
115
+
116
+ def client
117
+ Draisine.salesforce_client
118
+ end
119
+
120
+ def build_map(list_of_hashes, &key_block)
121
+ list_of_hashes.each_with_object({}) do |item, rs|
122
+ rs[key_block.call(item)] = item
123
+ end
124
+ end
125
+
126
+ def salesforce_object_name
127
+ model_class.salesforce_object_name
128
+ end
129
+ end
130
+ end
@@ -0,0 +1,23 @@
1
+ require "active_support/concern"
2
+
3
+ module Draisine
4
+ module Concerns
5
+ module ArraySetter
6
+ extend ActiveSupport::Concern
7
+
8
+ module ClassMethods
9
+ def salesforce_array_setter(attr)
10
+ mod = Module.new do
11
+ define_method "#{attr}=" do |value|
12
+ value = [] if value.nil?
13
+ value = value.split(';') if value.kind_of?(String)
14
+ super(value)
15
+ end
16
+ end
17
+ prepend mod
18
+ attr
19
+ end
20
+ end
21
+ end
22
+ end
23
+ end
@@ -0,0 +1,46 @@
1
+ require "active_support/concern"
2
+
3
+ module Draisine
4
+ module Concerns
5
+ module AttributesMapping
6
+ extend ActiveSupport::Concern
7
+
8
+ module ClassMethods
9
+ attr_accessor :salesforce_mapping
10
+
11
+ def salesforce_synced_attributes
12
+ @salesforce_synced_attributes ||= salesforce_mapping.keys
13
+ end
14
+
15
+ def salesforce_reverse_mapping
16
+ @salesforce_reverse_mapping ||= salesforce_mapping.map(&:reverse).to_h
17
+ end
18
+ end
19
+
20
+ def salesforce_mapped_attributes(attributes, mapping = self.class.salesforce_mapping)
21
+ attributes.slice(*mapping.keys).each_with_object({}) do |(key, value), acc|
22
+ acc[mapping.fetch(key)] = value
23
+ end
24
+ end
25
+
26
+ def salesforce_assign_attributes(attributes)
27
+ salesforce_mapped_attributes(attributes.with_indifferent_access).each do |key, value|
28
+ method_name = "#{key}="
29
+ if respond_to?(method_name)
30
+ value = Draisine::SalesforceComparisons.salesforce_cleanup(value)
31
+ __send__(method_name, value)
32
+ end
33
+ end
34
+ end
35
+
36
+ def salesforce_reverse_mapped_attributes(attributes)
37
+ salesforce_mapped_attributes(attributes, self.class.salesforce_reverse_mapping)
38
+ end
39
+
40
+ def salesforce_attributes
41
+ salesforce_reverse_mapped_attributes(attributes)
42
+ .with_indifferent_access
43
+ end
44
+ end
45
+ end
46
+ end
@@ -0,0 +1,36 @@
1
+ require "active_support/concern"
2
+
3
+ module Draisine
4
+ module Concerns
5
+ module Import
6
+ extend ActiveSupport::Concern
7
+
8
+ module ClassMethods
9
+ # Doesn't update record if found
10
+ def import_with_attrs(sf_id, attrs)
11
+ find_or_initialize_by(salesforce_id: sf_id) do |model|
12
+ model.salesforce_update_without_sync(attrs)
13
+ end
14
+ end
15
+
16
+ # Does update record if found
17
+ def import_or_update_with_attrs(sf_id, attrs, check_modstamp = false)
18
+ find_or_initialize_by(salesforce_id: sf_id).tap do |model|
19
+ model.salesforce_update_without_sync(attrs, check_modstamp)
20
+ end
21
+ end
22
+ end
23
+
24
+ def salesforce_update_without_sync(attributes, check_modstamp = false)
25
+ salesforce_skipping_sync do
26
+ modstamp = attributes["SystemModstamp"]
27
+ own_modstamp = self.attributes["SystemModstamp"]
28
+ if !check_modstamp || !modstamp || !own_modstamp || own_modstamp < modstamp
29
+ salesforce_assign_attributes(attributes)
30
+ save!
31
+ end
32
+ end
33
+ end
34
+ end
35
+ end
36
+ end
@@ -0,0 +1,38 @@
1
+ module Draisine
2
+ class ConflictDetector
3
+ attr_reader :model, :remote_model, :attributes_list
4
+ def initialize(model, remote_model, attributes_list = model.class.salesforce_audited_attributes)
5
+ @model = model
6
+ @remote_model = remote_model
7
+ @attributes_list = attributes_list
8
+ end
9
+
10
+ def conflict?
11
+ conflict_type != :no_conflict
12
+ end
13
+
14
+ def conflict_type
15
+ if model && remote_model
16
+ if diff.diff_keys.empty?
17
+ :no_conflict
18
+ else
19
+ :mismatching_records
20
+ end
21
+ elsif model
22
+ :remote_record_missing
23
+ elsif remote_model
24
+ :local_record_missing
25
+ else
26
+ :no_conflict
27
+ end
28
+ end
29
+
30
+ def diff
31
+ return unless model && remote_model
32
+
33
+ @diff ||= HashDiff.sf_diff(
34
+ model.salesforce_attributes.slice(*attributes_list).compact,
35
+ remote_model.attributes.slice(*attributes_list).compact)
36
+ end
37
+ end
38
+ end
@@ -0,0 +1,97 @@
1
+ module Draisine
2
+ class ConflictResolver
3
+ ALLOWED_RESOLUTIONS = %w[
4
+ remote_push remote_pull local_delete merge
5
+ ]
6
+
7
+ attr_reader :model_class, :client, :salesforce_object_name,
8
+ :local_id, :salesforce_id
9
+
10
+ def initialize(model_class, client, local_id, salesforce_id)
11
+ @model_class = model_class
12
+ @client = client
13
+ @salesforce_object_name = model_class.salesforce_object_name
14
+ @local_id = local_id
15
+ @salesforce_id = salesforce_id
16
+ end
17
+
18
+ def conflict?
19
+ ConflictDetector.new(model, remote_model, model_class.salesforce_synced_attributes).conflict?
20
+ end
21
+
22
+ def resolve(resolution_type, options = {})
23
+ resolution_type = resolution_type.to_s
24
+ fail ArgumentError, "Unknown resolution type '#{resolution_type}'" unless allowed_resolution?(resolution_type)
25
+
26
+ __send__(resolution_type, options)
27
+ end
28
+
29
+ def allowed_resolution?(resolution_type)
30
+ ALLOWED_RESOLUTIONS.include?(resolution_type)
31
+ end
32
+
33
+ def remote_pull(_options = {})
34
+ fail ArgumentError, "remote model is required for remote pull" unless remote_model
35
+
36
+ model_class.salesforce_inbound_update(remote_model.attributes)
37
+ end
38
+
39
+ def remote_push(_options = {})
40
+ fail ArgumentError, "local model is required for remote push" unless model
41
+
42
+ if model.salesforce_id.present?
43
+ model.salesforce_outbound_update(model.salesforce_attributes)
44
+ else
45
+ model.salesforce_outbound_create
46
+ end
47
+ end
48
+
49
+ def remote_delete(_options = {})
50
+ fail ArgumentError, "local model is required for remote delete" unless model
51
+
52
+ model.salesforce_outbound_delete
53
+ end
54
+
55
+ def local_delete(_options = {})
56
+ model_class.salesforce_inbound_delete(salesforce_id)
57
+ end
58
+
59
+ def merge(options)
60
+ fail ArgumentError unless model && remote_model
61
+ assert_required_options!(options, [:local_attributes, :remote_attributes])
62
+
63
+ local_attrs_to_merge = options.fetch(:local_attributes)
64
+ remote_attrs_to_merge = options.fetch(:remote_attributes)
65
+
66
+ model.salesforce_outbound_update(
67
+ model.salesforce_attributes.slice(*local_attrs_to_merge))
68
+ model.salesforce_inbound_update(
69
+ remote_model.attributes.slice(*remote_attrs_to_merge), false)
70
+ end
71
+
72
+ def model
73
+ @model ||= if local_id
74
+ model_class.find_by(id: local_id)
75
+ else
76
+ model_class.find_by(salesforce_id: salesforce_id)
77
+ end
78
+ end
79
+
80
+ def remote_model
81
+ return @remote_model unless @remote_model.nil?
82
+ @remote_model = begin
83
+ client.find(salesforce_object_name, salesforce_id)
84
+ rescue Databasedotcom::SalesForceError
85
+ false
86
+ end
87
+ end
88
+
89
+ protected
90
+
91
+ def assert_required_options!(options, keys)
92
+ keys.each do |key|
93
+ fail ArgumentError, "missing required option #{key}" unless options.key?(key)
94
+ end
95
+ end
96
+ end
97
+ end
@@ -0,0 +1,6 @@
1
+ require 'rails'
2
+
3
+ module Draisine
4
+ class Engine < ::Rails::Engine
5
+ end
6
+ end
@@ -0,0 +1,111 @@
1
+ module Draisine
2
+ class Importer
3
+ attr_reader :model_class
4
+
5
+ def initialize(model_class)
6
+ @model_class = model_class
7
+ end
8
+
9
+ def import(start_id: nil, start_date: nil, batch_size: 500)
10
+ find_each(batch_size: batch_size, start_id: start_id, start_date: start_date) do |sobj|
11
+ attrs = sobj.attributes
12
+ model_class.import_with_attrs(
13
+ attrs.fetch("Id"),
14
+ attrs.slice(*model_class.salesforce_synced_attributes))
15
+ end
16
+ end
17
+
18
+ def import_new(batch_size: 500, start_date_window_size: 20.minutes)
19
+ last_model = model_class.order("salesforce_id DESC").first
20
+ start_id = last_model.try(:salesforce_id)
21
+ start_date = last_model.try(:CreatedDate)
22
+ start_date -= start_date_window_size if start_date
23
+
24
+ import(start_id: start_id, batch_size: batch_size, start_date: start_date)
25
+ end
26
+
27
+ def import_fields(batch_size: 500, fields:)
28
+ model_class.find_in_batches(batch_size: batch_size) do |batch|
29
+ attempt do
30
+ sobjs = client.fetch_multiple(model_class.salesforce_object_name, batch.map(&:salesforce_id), batch_size, fields)
31
+ sobjs_map = sobjs.map {|sobj| [sobj.Id, sobj] }.to_h
32
+ batch.each do |model|
33
+ sobject = sobjs_map[model.salesforce_id]
34
+ next unless sobject
35
+ model.salesforce_assign_attributes(sobject.attributes.slice(*fields))
36
+ model.salesforce_skipping_sync { model.save! }
37
+ end
38
+ end
39
+ end
40
+ end
41
+
42
+ protected
43
+
44
+ def find_each(batch_size:, start_id:, start_date: nil, &block)
45
+ salesforce_model = client.materialize(salesforce_object_name)
46
+ # if we have start_date set, only use id starting from the second query
47
+ last_id = start_id unless start_date
48
+
49
+ counter = 0
50
+ loop do
51
+ query = import_query(salesforce_model, salesforce_object_name, batch_size, last_id, start_date)
52
+ collection = attempt { client.query(query) }
53
+ break unless collection.count > 0
54
+
55
+ model_class.transaction do
56
+ collection.each do |sobj|
57
+ yield sobj
58
+ end
59
+ end
60
+
61
+ counter += collection.count
62
+ last_id = collection.last.attributes.fetch("Id")
63
+ logger.info "[#{model_class} import] Imported #{counter} records, last record id #{last_id}"
64
+ end
65
+ logger.info "[#{model_class} import] Finished, imported a total of #{counter} records"
66
+ end
67
+
68
+ def client
69
+ Draisine.salesforce_client
70
+ end
71
+
72
+ def salesforce_object_name
73
+ model_class.salesforce_object_name
74
+ end
75
+
76
+ def import_query(salesforce_model, salesforce_object_name, batch_size, start_id = nil, start_date = nil)
77
+ conds = [
78
+ start_id && "Id > '#{start_id}'",
79
+ start_date && "CreatedDate >= #{start_date.iso8601}"
80
+ ].compact
81
+ where_clause = conds.presence && "WHERE #{conds.join(" AND ")}"
82
+
83
+ <<-QUERY
84
+ SELECT #{salesforce_model.field_list}
85
+ FROM #{salesforce_object_name}
86
+ #{where_clause}
87
+ ORDER BY Id ASC
88
+ LIMIT #{batch_size}
89
+ QUERY
90
+ end
91
+
92
+ def attempt(times = 5)
93
+ attempts ||= 0
94
+ yield
95
+ rescue => e
96
+ attempts += 1
97
+ logger.error "#{e.class}: #{e.message}"
98
+ if attempts < times
99
+ logger.error "Retrying... (attempt ##{attempts})"
100
+ retry
101
+ else
102
+ logger.error "Too many attempts, failing..."
103
+ raise
104
+ end
105
+ end
106
+
107
+ def logger
108
+ @logger ||= Rails.logger || Logger.new($stdout)
109
+ end
110
+ end
111
+ end
@@ -0,0 +1,15 @@
1
+ require "ipaddr"
2
+
3
+ module Draisine
4
+ class IpChecker
5
+ attr_reader :ip_ranges
6
+ def initialize(ip_ranges)
7
+ @ip_ranges = ip_ranges.map {|net| IPAddr.new(net) }
8
+ end
9
+
10
+ def check(ip)
11
+ addr = IPAddr.new(ip)
12
+ ip_ranges.any? {|range| range.include?(addr) }
13
+ end
14
+ end
15
+ end
@@ -0,0 +1,8 @@
1
+ module Draisine
2
+ class InboundDeleteJob < Draisine::JobBase
3
+ def _perform(class_name, salesforce_id)
4
+ klass = class_name.constantize
5
+ klass.salesforce_inbound_delete(salesforce_id)
6
+ end
7
+ end
8
+ end
@@ -0,0 +1,8 @@
1
+ module Draisine
2
+ class InboundUpdateJob < Draisine::JobBase
3
+ def _perform(class_name, attributes)
4
+ klass = class_name.constantize
5
+ klass.salesforce_inbound_update(attributes)
6
+ end
7
+ end
8
+ end
@@ -0,0 +1,39 @@
1
+ module Draisine
2
+ class JobBase < ActiveJob::Base
3
+ queue_as :draisine_job
4
+
5
+ def perform(*args)
6
+ _perform(*args)
7
+ rescue Exception => ex
8
+ logger.error "#{ex.class}: #{ex}\n#{ex.backtrace.join("\n")}"
9
+
10
+ if retry_attempt < retries_count
11
+ @retry_attempt = retry_attempt + 1
12
+ logger.error "Retrying (attempt #{retry_attempt})"
13
+ retry_job
14
+ else
15
+ logger.error "Too many attempts, no more retries"
16
+ Draisine.job_error_handler.call(ex, self, arguments)
17
+ end
18
+ end
19
+
20
+ def _perform(*args)
21
+ end
22
+
23
+ def retries_count
24
+ Draisine.job_retry_attempts
25
+ end
26
+
27
+ def retry_attempt
28
+ @retry_attempt ||= 0
29
+ end
30
+
31
+ def serialize
32
+ super.merge('_retry_attempt' => retry_attempt)
33
+ end
34
+
35
+ def deserialize(job_data)
36
+ @retry_attempt = job_data.fetch('_retry_attempt', 0)
37
+ end
38
+ end
39
+ end
@@ -0,0 +1,7 @@
1
+ module Draisine
2
+ class OutboundCreateJob < Draisine::JobBase
3
+ def _perform(instance)
4
+ instance.salesforce_outbound_create
5
+ end
6
+ end
7
+ end
@@ -0,0 +1,7 @@
1
+ module Draisine
2
+ class OutboundDeleteJob < Draisine::JobBase
3
+ def _perform(instance)
4
+ instance.salesforce_outbound_delete
5
+ end
6
+ end
7
+ end
@@ -0,0 +1,7 @@
1
+ module Draisine
2
+ class OutboundUpdateJob < Draisine::JobBase
3
+ def _perform(instance, changed_attributes)
4
+ instance.salesforce_outbound_update(changed_attributes)
5
+ end
6
+ end
7
+ end
@@ -0,0 +1,7 @@
1
+ module Draisine
2
+ class SoapDeleteJob < Draisine::JobBase
3
+ def _perform(message)
4
+ SoapHandler.new.delete(message)
5
+ end
6
+ end
7
+ end
@@ -0,0 +1,7 @@
1
+ module Draisine
2
+ class SoapUpdateJob < Draisine::JobBase
3
+ def _perform(message)
4
+ SoapHandler.new.update(message)
5
+ end
6
+ end
7
+ end
@@ -0,0 +1,73 @@
1
+ module Draisine
2
+ Partition = Struct.new(:model_class, :start_date, :end_date, :updated_ids, :deleted_ids, :unpersisted_ids) do
3
+ def initialize(model_class, *args)
4
+ model_class = model_class.constantize if model_class.is_a?(String)
5
+ super(model_class, *args)
6
+ end
7
+
8
+ def as_json(*)
9
+ model_class, *fields = to_a
10
+ [model_class.name, *fields].as_json
11
+ end
12
+
13
+ def self.from_json(fields)
14
+ new(*fields)
15
+ end
16
+ end
17
+
18
+ class Partitioner
19
+ def self.partition(model_class:, start_date:, end_date:, partition_size: 100, mechanism: :default)
20
+ new(model_class, mechanism).partition(start_date, end_date, partition_size: partition_size)
21
+ end
22
+
23
+ attr_reader :model_class, :mechanism
24
+ def initialize(model_class, mechanism = :default)
25
+ @model_class = model_class
26
+ @mechanism = QueryMechanisms.fetch(mechanism).new(model_class)
27
+ end
28
+
29
+ def partition(start_date, end_date, partition_size: 100)
30
+ updated_ids = get_updated_ids(start_date, end_date)
31
+ deleted_ids = get_deleted_ids(start_date, end_date)
32
+ unpersisted_ids = get_unpersisted_ids(start_date, end_date)
33
+
34
+ # if anyone knows how to do this packing procedure better, please tell me
35
+ all_ids = updated_ids.map {|id| [:updated, id] } +
36
+ deleted_ids.map {|id| [:deleted, id] } +
37
+ unpersisted_ids.map {|id| [:unpersisted, id] }
38
+
39
+ if all_ids.present?
40
+ all_ids.each_slice(partition_size).map do |slice|
41
+ part = slice.group_by(&:first).map {|k,v| [k, v.map(&:last)] }.to_h
42
+ Partition.new(model_class.name, start_date, end_date, part[:updated], part[:deleted], part[:unpersisted])
43
+ end
44
+ else
45
+ [Partition.new(model_class.name, start_date, end_date)]
46
+ end
47
+ end
48
+
49
+ protected
50
+
51
+ def get_updated_ids(start_date, end_date)
52
+ mechanism.get_updated_ids(start_date, end_date) |
53
+ model_class
54
+ .where("updated_at >= ? AND updated_at <= ?", start_date, end_date)
55
+ .uniq.pluck(:salesforce_id).compact
56
+ end
57
+
58
+ def get_deleted_ids(start_date, end_date)
59
+ mechanism.get_deleted_ids(start_date, end_date)
60
+ end
61
+
62
+ def get_unpersisted_ids(start_date, end_date)
63
+ model_class
64
+ .where("salesforce_id IS NULL OR salesforce_id = ?", '')
65
+ .where("updated_at >= ? and updated_at <= ?", start_date, end_date)
66
+ .pluck(:id)
67
+ end
68
+
69
+ def client
70
+ Draisine.salesforce_client
71
+ end
72
+ end
73
+ end