draisine 0.7.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. checksums.yaml +7 -0
  2. data/.gitignore +9 -0
  3. data/.rspec +2 -0
  4. data/.travis.yml +4 -0
  5. data/Gemfile +4 -0
  6. data/Guardfile +70 -0
  7. data/LICENSE.txt +21 -0
  8. data/README.md +134 -0
  9. data/Rakefile +6 -0
  10. data/app/controllers/draisine/soap_controller.rb +49 -0
  11. data/bin/console +7 -0
  12. data/bin/setup +6 -0
  13. data/config/routes.rb +4 -0
  14. data/draisine.gemspec +32 -0
  15. data/lib/draisine/active_record.rb +191 -0
  16. data/lib/draisine/auditor/result.rb +48 -0
  17. data/lib/draisine/auditor.rb +130 -0
  18. data/lib/draisine/concerns/array_setter.rb +23 -0
  19. data/lib/draisine/concerns/attributes_mapping.rb +46 -0
  20. data/lib/draisine/concerns/import.rb +36 -0
  21. data/lib/draisine/conflict_detector.rb +38 -0
  22. data/lib/draisine/conflict_resolver.rb +97 -0
  23. data/lib/draisine/engine.rb +6 -0
  24. data/lib/draisine/importer.rb +111 -0
  25. data/lib/draisine/ip_checker.rb +15 -0
  26. data/lib/draisine/jobs/inbound_delete_job.rb +8 -0
  27. data/lib/draisine/jobs/inbound_update_job.rb +8 -0
  28. data/lib/draisine/jobs/job_base.rb +39 -0
  29. data/lib/draisine/jobs/outbound_create_job.rb +7 -0
  30. data/lib/draisine/jobs/outbound_delete_job.rb +7 -0
  31. data/lib/draisine/jobs/outbound_update_job.rb +7 -0
  32. data/lib/draisine/jobs/soap_delete_job.rb +7 -0
  33. data/lib/draisine/jobs/soap_update_job.rb +7 -0
  34. data/lib/draisine/partitioner.rb +73 -0
  35. data/lib/draisine/poller.rb +101 -0
  36. data/lib/draisine/query_mechanisms/base.rb +15 -0
  37. data/lib/draisine/query_mechanisms/default.rb +13 -0
  38. data/lib/draisine/query_mechanisms/last_modified_date.rb +18 -0
  39. data/lib/draisine/query_mechanisms/system_modstamp.rb +18 -0
  40. data/lib/draisine/query_mechanisms.rb +18 -0
  41. data/lib/draisine/registry.rb +22 -0
  42. data/lib/draisine/setup.rb +97 -0
  43. data/lib/draisine/soap_handler.rb +79 -0
  44. data/lib/draisine/syncer.rb +52 -0
  45. data/lib/draisine/type_mapper.rb +105 -0
  46. data/lib/draisine/util/caching_client.rb +73 -0
  47. data/lib/draisine/util/hash_diff.rb +39 -0
  48. data/lib/draisine/util/parse_time.rb +14 -0
  49. data/lib/draisine/util/salesforce_comparisons.rb +53 -0
  50. data/lib/draisine/version.rb +3 -0
  51. data/lib/draisine.rb +48 -0
  52. data/lib/ext/databasedotcom.rb +98 -0
  53. data/lib/generators/draisine/delta_migration_generator.rb +77 -0
  54. data/lib/generators/draisine/integration_generator.rb +53 -0
  55. data/lib/generators/draisine/templates/delta_migration.rb +24 -0
  56. data/lib/generators/draisine/templates/migration.rb +21 -0
  57. data/lib/generators/draisine/templates/model.rb +11 -0
  58. data/salesforce/sample_delete_trigger.apex +7 -0
  59. data/salesforce/sample_test_class_for_delete_trigger.apex +15 -0
  60. metadata +242 -0
@@ -0,0 +1,101 @@
1
+ module Draisine
2
+ class Poller
3
+ Result = Struct.new(:created_count, :updated_count, :deleted_count)
4
+
5
+ class <<self
6
+ def run(model_class:, mechanism: :default, start_date:, end_date: Time.current, **run_args)
7
+ partitions = partition(
8
+ model_class: model_class,
9
+ mechanism: mechanism,
10
+ start_date: start_date,
11
+ end_date: end_date,
12
+ partition_size: 10**12)
13
+ run_partition(partitions.first, **run_args)
14
+ end
15
+ alias_method :poll, :run
16
+
17
+ def partition(model_class:, mechanism: :default, start_date:, end_date: Time.current, partition_size: 100)
18
+ Partitioner.partition(
19
+ model_class: model_class,
20
+ mechanism: mechanism,
21
+ start_date: start_date,
22
+ end_date: end_date,
23
+ partition_size: partition_size)
24
+ end
25
+
26
+ def run_partition(partition, **run_args)
27
+ new(partition).run(**run_args)
28
+ end
29
+ alias_method :poll_partition, :run_partition
30
+ end
31
+
32
+
33
+ attr_reader :partition, :model_class, :start_date, :end_date
34
+ def initialize(partition)
35
+ @partition = partition
36
+ @model_class = partition.model_class
37
+ @start_date = partition.start_date
38
+ @end_date = partition.end_date
39
+ end
40
+
41
+ def run(import_created: true, import_updated: false, import_deleted: true)
42
+ created_count = updated_count = deleted_count = 0
43
+ if import_created || import_updated
44
+ created_count, updated_count = import_changes(import_created, import_updated)
45
+ end
46
+
47
+ deleted_count = import_deletes if import_deleted
48
+
49
+ Result.new(
50
+ created_count,
51
+ updated_count,
52
+ deleted_count)
53
+ end
54
+ alias_method :poll, :run
55
+
56
+ protected
57
+
58
+ def import_changes(import_created, import_updated)
59
+ updated_ids = partition.updated_ids
60
+ return [0, 0] unless updated_ids.present?
61
+
62
+ created_count = updated_count = 0
63
+ changed_objects = client.fetch_multiple(salesforce_object_name, updated_ids)
64
+
65
+ existing_models = model_class
66
+ .where(salesforce_id: updated_ids)
67
+ .each_with_object({}) { |model, rs| rs[model.salesforce_id] = model }
68
+
69
+ changed_objects.each do |object|
70
+ id = object.attributes.fetch('Id')
71
+ model = existing_models[id]
72
+ is_new = !model
73
+ attrs = object.attributes
74
+ if is_new && import_created
75
+ model_class.import_or_update_with_attrs(id, attrs)
76
+ created_count += 1
77
+ elsif !is_new && import_updated
78
+ if model.salesforce_update_without_sync(attrs, true)
79
+ updated_count += 1
80
+ end
81
+ end
82
+ end
83
+
84
+ [created_count, updated_count]
85
+ end
86
+
87
+ def import_deletes
88
+ deleted_ids = partition.deleted_ids
89
+ return 0 unless deleted_ids.present?
90
+ model_class.where(salesforce_id: deleted_ids).delete_all
91
+ end
92
+
93
+ def client
94
+ Draisine.salesforce_client
95
+ end
96
+
97
+ def salesforce_object_name
98
+ model_class.salesforce_object_name
99
+ end
100
+ end
101
+ end
@@ -0,0 +1,15 @@
1
+ module Draisine
2
+ module QueryMechanisms
3
+ class Base
4
+ attr_reader :model_class, :client
5
+ def initialize(model_class, client = Draisine.salesforce_client)
6
+ @model_class = model_class
7
+ @client = client
8
+ end
9
+
10
+ def salesforce_object_name
11
+ model_class.salesforce_object_name
12
+ end
13
+ end
14
+ end
15
+ end
@@ -0,0 +1,13 @@
1
+ module Draisine
2
+ module QueryMechanisms
3
+ class Default < Base
4
+ def get_updated_ids(start_date, end_date)
5
+ client.get_updated_ids(salesforce_object_name, start_date, end_date)
6
+ end
7
+
8
+ def get_deleted_ids(start_date, end_date)
9
+ client.get_deleted_ids(salesforce_object_name, start_date, end_date)
10
+ end
11
+ end
12
+ end
13
+ end
@@ -0,0 +1,18 @@
1
+ module Draisine
2
+ module QueryMechanisms
3
+ class LastModifiedDate < Base
4
+ def get_updated_ids(start_date, end_date)
5
+ response = client.query <<-EOQ
6
+ SELECT Id FROM #{salesforce_object_name}
7
+ WHERE LastModifiedDate >= #{start_date.iso8601}
8
+ AND LastModifiedDate <= #{end_date.iso8601}
9
+ EOQ
10
+ response.map(&:Id)
11
+ end
12
+
13
+ def get_deleted_ids(start_date, end_date)
14
+ []
15
+ end
16
+ end
17
+ end
18
+ end
@@ -0,0 +1,18 @@
1
+ module Draisine
2
+ module QueryMechanisms
3
+ class SystemModstamp < Base
4
+ def get_updated_ids(start_date, end_date)
5
+ response = client.query <<-EOQ
6
+ SELECT Id FROM #{salesforce_object_name}
7
+ WHERE SystemModstamp >= #{start_date.iso8601}
8
+ AND SystemModstamp <= #{end_date.iso8601}
9
+ EOQ
10
+ response.map(&:Id)
11
+ end
12
+
13
+ def get_deleted_ids(start_date, end_date)
14
+ []
15
+ end
16
+ end
17
+ end
18
+ end
@@ -0,0 +1,18 @@
1
+ module Draisine
2
+ module QueryMechanisms
3
+ require "draisine/query_mechanisms/base"
4
+ require "draisine/query_mechanisms/default"
5
+ require "draisine/query_mechanisms/system_modstamp"
6
+ require "draisine/query_mechanisms/last_modified_date"
7
+
8
+ MAP = {
9
+ default: Default,
10
+ system_modstamp: SystemModstamp,
11
+ last_modified_date: LastModifiedDate
12
+ }
13
+
14
+ def self.fetch(name)
15
+ MAP.fetch(name)
16
+ end
17
+ end
18
+ end
@@ -0,0 +1,22 @@
1
+ module Draisine
2
+ class Registry
3
+ attr_reader :models
4
+
5
+ def initialize
6
+ @models = {}
7
+ end
8
+
9
+ def find(name)
10
+ models.fetch(name)
11
+ end
12
+
13
+ def register(model, name)
14
+ models[name] = model
15
+ models[model.name] ||= model
16
+ end
17
+ end
18
+
19
+ def self.registry
20
+ @registry ||= Registry.new
21
+ end
22
+ end
@@ -0,0 +1,97 @@
1
+ require "logger"
2
+
3
+ module Draisine
4
+ def self.salesforce_client=(client)
5
+ @salesforce_client = client
6
+ end
7
+
8
+ def self.salesforce_client
9
+ unless @salesforce_client
10
+ fail <<-EOM
11
+ DatabaseDotcom client was not properly set up. You can set it up as follows:
12
+ sf_client = Databasedotcom::Client.new("config/databasedotcom.yml")
13
+ sf_client.authenticate :username => <username>, :password => <password>
14
+ Draisine.salesforce_client = sf_client
15
+ EOM
16
+ end
17
+ @salesforce_client
18
+ end
19
+
20
+ def self.organization_id
21
+ unless @organization_id
22
+ fail <<-EOM
23
+ Draisine.organization_id was not properly set up.
24
+ You can use Draisine.organization_id= method to set it.
25
+ See https://cloudjedi.wordpress.com/no-fuss-salesforce-id-converter/ if
26
+ you need to convert your 15-char id into 18-char.
27
+ EOM
28
+ end
29
+ @organization_id
30
+ end
31
+
32
+ def self.organization_id=(id)
33
+ unless id.kind_of?(String) && id.length == 18
34
+ fail ArgumentError, "You should set organization id to an 18 character string"
35
+ end
36
+ @organization_id = id
37
+ end
38
+
39
+ def self.job_error_handler
40
+ @job_error_handler ||= proc {|error, job_instance, args| raise error }
41
+ end
42
+
43
+ def self.job_error_handler=(handler)
44
+ @job_error_handler = handler
45
+ end
46
+
47
+ def self.sync_callback
48
+ @sync_callback ||= proc {|type, salesforce_id, options| }
49
+ end
50
+
51
+ def self.sync_callback=(callback)
52
+ @sync_callback = callback
53
+ end
54
+
55
+ def self.job_retry_attempts
56
+ @job_retry_attempts ||= 0
57
+ end
58
+
59
+ def self.sync_soap_operations?
60
+ @sync_soap_operations = true if @sync_soap_operations.nil?
61
+ @sync_soap_operations
62
+ end
63
+
64
+ def self.sync_soap_operations=(value)
65
+ @sync_soap_operations = value
66
+ end
67
+
68
+ def self.job_retry_attempts=(count)
69
+ @job_retry_attempts = count
70
+ end
71
+
72
+ def self.invalid_organization_handler
73
+ @invalid_organization_handler ||= proc {|message| fail Draisine::SoapHandler::InvalidOrganizationError, "invalid organization id in the inbound message from salesforce" }
74
+ end
75
+
76
+ def self.invalid_organization_handler=(handler)
77
+ @invalid_organization_handler = handler
78
+ end
79
+
80
+ # https://help.salesforce.com/apex/HTViewSolution?language=en_US&id=000003652
81
+ def self.allowed_ip_ranges
82
+ @allowed_ip_ranges ||= [
83
+ '96.43.144.0/20',
84
+ '136.146.210.8/15',
85
+ '204.14.232.0/21',
86
+ '85.222.128.0/19',
87
+ '185.79.140.0/22',
88
+ '182.50.76.0/22',
89
+ '202.129.242.0/23',
90
+ '127.0.0.1'
91
+ ]
92
+ end
93
+
94
+ def self.allowed_ip_ranges=(ranges)
95
+ @allowed_ip_ranges = ranges
96
+ end
97
+ end
@@ -0,0 +1,79 @@
1
+ require 'active_support/core_ext/hash/conversions'
2
+ require 'active_support/core_ext/hash/indifferent_access'
3
+
4
+ module Draisine
5
+ class SoapHandler
6
+ InvalidOrganizationError = Class.new(StandardError)
7
+
8
+ def initialize
9
+ end
10
+
11
+ def update(message_xml)
12
+ message = parse(message_xml)
13
+
14
+ assert_valid_message!(message)
15
+ extract_sobjects(message).each do |sobject|
16
+ type = sobject.fetch('xsi:type').sub('sf:', '')
17
+ klass = Draisine.registry.find(type)
18
+ klass.salesforce_on_inbound_update(sobject)
19
+ end
20
+ rescue InvalidOrganizationError => e
21
+ Draisine.invalid_organization_handler.call(message)
22
+ end
23
+
24
+ def delete(message_xml)
25
+ message = parse(message_xml)
26
+
27
+ assert_valid_message!(message)
28
+ extract_sobjects(message).each do |sobject|
29
+ type = sobject.fetch('Object_Type__c')
30
+ id = sobject.fetch('Object_Id__c')
31
+ klass = Draisine.registry.find(type)
32
+ klass.salesforce_on_inbound_delete(id)
33
+ end
34
+ rescue InvalidOrganizationError => e
35
+ Draisine.invalid_organization_handler.call(message)
36
+ end
37
+
38
+ protected
39
+
40
+ def parse(message_xml)
41
+ case message_xml
42
+ when Hash
43
+ message_xml
44
+ when String
45
+ Hash.from_xml(message_xml)
46
+ else
47
+ raise ArgumentError
48
+ end
49
+ end
50
+
51
+ def extract_sobjects(message)
52
+ Array.wrap(message['Envelope']['Body']['notifications']['Notification']).map do |sobject|
53
+ sobject.fetch('sObject')
54
+ end
55
+ end
56
+
57
+ def assert_valid_organization_id!(message)
58
+ unless diggable_to?(message, ['Envelope', 'Body', 'notifications', 'OrganizationId']) &&
59
+ message['Envelope']['Body']['notifications']['OrganizationId'] == Draisine.organization_id
60
+ fail InvalidOrganizationError, "a message from invalid organization id received"
61
+ end
62
+ end
63
+
64
+ def assert_valid_message!(message)
65
+ unless diggable_to?(message, ['Envelope', 'Body', 'notifications', 'Notification'])
66
+ fail ArgumentError, "malformed xml inbound message from salesforce"
67
+ end
68
+ assert_valid_organization_id!(message)
69
+ end
70
+
71
+ def diggable_to?(hash, path)
72
+ path.each do |key|
73
+ return false unless hash.respond_to?(:key?) && hash.key?(key)
74
+ hash = hash[key]
75
+ end
76
+ true
77
+ end
78
+ end
79
+ end
@@ -0,0 +1,52 @@
1
+ module Draisine
2
+ # Wrapper around salesforce client implementation
3
+ # Might have pluggable adapters in the future.
4
+ class Syncer
5
+ attr_reader :salesforce_object_name
6
+
7
+ def initialize(salesforce_object_name, client = nil)
8
+ @salesforce_object_name ||= salesforce_object_name
9
+ @client = client
10
+ end
11
+
12
+ def get(id, options = {})
13
+ raise ArgumentError unless id.present?
14
+ response = client.http_get(build_sobject_url(id), options)
15
+ JSON.parse(response.body)
16
+ end
17
+
18
+ def create(attrs)
19
+ response = client.http_post(build_sobject_url(nil), attrs.to_json)
20
+ JSON.parse(response.body)
21
+ end
22
+
23
+ def update(id, attrs)
24
+ raise ArgumentError unless id.present?
25
+ return unless attrs.present?
26
+ client.http_patch(build_sobject_url(id), attrs.to_json)
27
+ end
28
+
29
+ def delete(id)
30
+ raise ArgumentError unless id.present?
31
+ client.http_delete(build_sobject_url(id))
32
+ end
33
+
34
+ def get_system_modstamp(id)
35
+ raise ArgumentError unless id.present?
36
+ time = get(id, fields: "SystemModstamp")['SystemModstamp']
37
+ time && Time.parse(time)
38
+ end
39
+
40
+ protected
41
+
42
+ def client
43
+ @client || Draisine.salesforce_client
44
+ end
45
+
46
+ def build_sobject_url(id)
47
+ url = "/services/data/v#{client.version}/sobjects/#{salesforce_object_name}"
48
+ url << "/#{id}" if url
49
+ url
50
+ end
51
+ end
52
+ end
@@ -0,0 +1,105 @@
1
+ module Draisine
2
+ class TypeMapper
3
+ Type = Struct.new(:ar_type, :ar_options, :serialized, :array)
4
+
5
+ ActiveRecordColumnDef = Struct.new(:column_name, :column_type, :options) do
6
+ def self.from_ar_column(ar_col)
7
+ new(ar_col.name, ar_col.type, { limit: ar_col.limit }.compact)
8
+ end
9
+ end
10
+
11
+ MAX_ALLOWED_STRING_TYPE_LENGTH = 40
12
+
13
+ def self.type(ar_type, ar_options: {}, serialized: false, array: false)
14
+ Type.new(ar_type, ar_options, serialized, array)
15
+ end
16
+
17
+ def self.determine_type_for_float(name, sf_schema)
18
+ if (precision = sf_schema[:precision]) && (scale = sf_schema[:scale]) && scale == 0
19
+ type(:integer, ar_options: { limit: 8 })
20
+ else
21
+ type(:float, ar_options: { limit: 53 })
22
+ end
23
+ end
24
+
25
+ def self.determine_type_for_string(name, sf_schema)
26
+ if (length = sf_schema[:length]) && length > 0 && length <= MAX_ALLOWED_STRING_TYPE_LENGTH
27
+ type(:string, ar_options: { limit: length })
28
+ else
29
+ type(:text)
30
+ end
31
+ end
32
+
33
+ # Apparently, mysql has a hard limit of 64k per row.
34
+ # That's why we're using text types where we could also use strings.
35
+ TYPES_MAP = {
36
+ "boolean" => type(:boolean),
37
+ "string" => method(:determine_type_for_string),
38
+ "reference" => type(:string, ar_options: { limit: 20 }),
39
+ "picklist" => type(:binary, serialized: true),
40
+ "textarea" => method(:determine_type_for_string),
41
+ "phone" => method(:determine_type_for_string),
42
+ "email" => method(:determine_type_for_string),
43
+ "url" => method(:determine_type_for_string),
44
+ "int" => type(:integer),
45
+ "date" => type(:date),
46
+ "time" => type(:time),
47
+ "multipicklist" => type(:binary, serialized: true, array: true),
48
+ "double" => method(:determine_type_for_float),
49
+ "datetime" => type(:datetime),
50
+ "anyType" => type(:binary, serialized: true),
51
+ "combobox" => type(:text),
52
+ "currency" => type(:decimal, ar_options: { precision: 18, scale: 6 }),
53
+ "percent" => type(:decimal, ar_options: { precision: 18, scale: 6 })
54
+ # Leave this one for now
55
+ # "encrypted_string" => :string,
56
+ }
57
+
58
+ EXCLUDED_COLUMNS = ["Id"]
59
+
60
+ attr_reader :sf_type_map, :type_map
61
+ def initialize(sf_type_map)
62
+ @sf_type_map = sf_type_map
63
+ @type_map = sf_type_map.reject {|name, schema| ignored_column?(name, schema) }.
64
+ select {|name, schema| type_for(name, schema) }.
65
+ map {|name, schema| [name, type_for(name, schema)] }.
66
+ to_h
67
+ end
68
+
69
+ def active_record_column_defs
70
+ type_map.map {|name, type| active_record_column_def(name, type) }
71
+ end
72
+
73
+ def columns
74
+ @columns ||= type_map.keys
75
+ end
76
+
77
+ def updateable_columns
78
+ @updateable_columns ||= sf_type_map.select {|_, type| type[:updateable?] }.keys
79
+ end
80
+
81
+ def serialized_columns
82
+ @serialized_columns ||= type_map.select {|_, type| type.serialized }.keys
83
+ end
84
+
85
+ def array_columns
86
+ @array_columns ||= type_map.select {|_, type| type.array }.keys
87
+ end
88
+
89
+ protected
90
+
91
+ def type_for(sf_column_name, sf_column_schema)
92
+ sf_type = sf_column_schema.fetch(:type)
93
+ type = TYPES_MAP.fetch(sf_type) { warn "Unknown column type #{sf_type} for column #{sf_column_name}, ignoring it" }
94
+ type.respond_to?(:call) ? type.call(sf_column_name, sf_column_schema) : type
95
+ end
96
+
97
+ def ignored_column?(sf_column_name, sf_column_schema)
98
+ EXCLUDED_COLUMNS.include?(sf_column_name)
99
+ end
100
+
101
+ def active_record_column_def(column_name, type)
102
+ ActiveRecordColumnDef.new(column_name, type.ar_type, type.ar_options)
103
+ end
104
+ end
105
+ end
@@ -0,0 +1,73 @@
1
+ module Draisine
2
+ class CachingClient
3
+ class Cache
4
+ attr_reader :cache
5
+
6
+ def initialize
7
+ @cache = {}
8
+ end
9
+
10
+ def [](key)
11
+ cache[key]
12
+ end
13
+
14
+ def fetch(key, &block)
15
+ cache.fetch(key) { cache[key] = yield }
16
+ end
17
+
18
+ def []=(key, value)
19
+ cache[key] = value
20
+ end
21
+
22
+ def add(record)
23
+ self[record.attributes.fetch('Id')] = record
24
+ end
25
+
26
+ def add_multiple(records)
27
+ records.each {|record| add(record) }
28
+ end
29
+
30
+ def has_ids?(ids)
31
+ (ids - cache.keys).empty?
32
+ end
33
+
34
+ def fetch_multiple(ids, &block)
35
+ if has_ids?(ids)
36
+ cache.values_at(*ids)
37
+ else
38
+ yield.tap do |records|
39
+ add_multiple(records)
40
+ end
41
+ end
42
+ end
43
+ end
44
+
45
+ attr_reader :cache_map, :client
46
+
47
+ def initialize(client = Draisine.salesforce_client)
48
+ @cache_map = Hash.new {|h,k| h[k] = Cache.new }
49
+ @client = client
50
+ end
51
+
52
+ def find(salesforce_object_name, id)
53
+ cache_map[salesforce_object_name].fetch(id) do
54
+ client.find(salesforce_object_name, id)
55
+ end
56
+ end
57
+
58
+ def fetch_multiple(salesforce_object_name, ids)
59
+ cache_map[salesforce_object_name].fetch_multiple(ids) do
60
+ client.fetch_multiple(salesforce_object_name, ids)
61
+ end
62
+ end
63
+ alias_method :prefetch, :fetch_multiple
64
+
65
+ def method_missing(method, *args, &block)
66
+ if client.respond_to?(method)
67
+ client.__send__(method, *args, &block)
68
+ else
69
+ super
70
+ end
71
+ end
72
+ end
73
+ end
@@ -0,0 +1,39 @@
1
+ module Draisine
2
+ HashDiff = Struct.new(:added, :removed, :changed, :unchanged) do
3
+ def self.diff(hash1, hash2, equality = -> (a, b) { a == b })
4
+ unless hash1.respond_to?(:key?) && hash2.respond_to?(:key?)
5
+ fail ArgumentError, "both arguments should be hashes"
6
+ end
7
+
8
+ added = []
9
+ removed = []
10
+ changed = []
11
+ unchanged = []
12
+
13
+ (hash1.keys | hash2.keys).each do |key|
14
+ if hash1.key?(key) && hash2.key?(key)
15
+ if equality.call(hash1[key], hash2[key])
16
+ unchanged << key
17
+ else
18
+ changed << key
19
+ end
20
+ elsif hash1.key?(key)
21
+ removed << key
22
+ else
23
+ added << key
24
+ end
25
+ end
26
+
27
+ new(added, removed, changed, unchanged)
28
+ end
29
+
30
+ def self.sf_diff(hash1, hash2)
31
+ diff(hash1, hash2, SalesforceComparisons.method(:salesforce_equals?))
32
+ end
33
+
34
+ def diff_keys
35
+ changed | added | removed
36
+ end
37
+ end
38
+
39
+ end
@@ -0,0 +1,14 @@
1
+ module Draisine
2
+ def self.parse_time(time_or_string)
3
+ case time_or_string
4
+ when Time, DateTime
5
+ time_or_string
6
+ when String
7
+ Time.parse(time_or_string)
8
+ else
9
+ Time.parse(time_or_string.to_s)
10
+ end
11
+ rescue => e
12
+ nil
13
+ end
14
+ end