terrestrial 0.3.0 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. checksums.yaml +5 -5
  2. data/.ruby-version +1 -1
  3. data/Gemfile.lock +44 -53
  4. data/README.md +3 -6
  5. data/bin/test +1 -1
  6. data/features/env.rb +12 -2
  7. data/features/example.feature +23 -26
  8. data/lib/terrestrial.rb +31 -0
  9. data/lib/terrestrial/adapters/abstract_adapter.rb +6 -0
  10. data/lib/terrestrial/adapters/memory_adapter.rb +82 -6
  11. data/lib/terrestrial/adapters/sequel_postgres_adapter.rb +191 -0
  12. data/lib/terrestrial/configurations/conventional_association_configuration.rb +65 -35
  13. data/lib/terrestrial/configurations/conventional_configuration.rb +280 -124
  14. data/lib/terrestrial/configurations/mapping_config_options_proxy.rb +97 -0
  15. data/lib/terrestrial/deleted_record.rb +12 -8
  16. data/lib/terrestrial/dirty_map.rb +17 -9
  17. data/lib/terrestrial/functional_pipeline.rb +64 -0
  18. data/lib/terrestrial/inspection_string.rb +6 -1
  19. data/lib/terrestrial/lazy_object_proxy.rb +1 -0
  20. data/lib/terrestrial/many_to_many_association.rb +34 -20
  21. data/lib/terrestrial/many_to_one_association.rb +11 -3
  22. data/lib/terrestrial/one_to_many_association.rb +9 -0
  23. data/lib/terrestrial/public_conveniencies.rb +65 -82
  24. data/lib/terrestrial/record.rb +106 -0
  25. data/lib/terrestrial/relation_mapping.rb +43 -12
  26. data/lib/terrestrial/relational_store.rb +33 -11
  27. data/lib/terrestrial/upsert_record.rb +54 -0
  28. data/lib/terrestrial/version.rb +1 -1
  29. data/spec/automatic_timestamps_spec.rb +339 -0
  30. data/spec/changes_api_spec.rb +81 -0
  31. data/spec/config_override_spec.rb +28 -19
  32. data/spec/custom_serializers_spec.rb +3 -2
  33. data/spec/database_default_fields_spec.rb +213 -0
  34. data/spec/database_generated_id_spec.rb +291 -0
  35. data/spec/database_owned_fields_and_timestamps_spec.rb +200 -0
  36. data/spec/deletion_spec.rb +1 -1
  37. data/spec/error_handling/factory_error_handling_spec.rb +1 -4
  38. data/spec/error_handling/serialization_error_spec.rb +1 -4
  39. data/spec/error_handling/upsert_error_spec.rb +7 -11
  40. data/spec/graph_persistence_spec.rb +52 -18
  41. data/spec/ordered_association_spec.rb +10 -12
  42. data/spec/predefined_queries_spec.rb +14 -12
  43. data/spec/readme_examples_spec.rb +1 -1
  44. data/spec/sequel_query_efficiency_spec.rb +19 -16
  45. data/spec/spec_helper.rb +6 -1
  46. data/spec/support/blog_schema.rb +7 -3
  47. data/spec/support/object_graph_setup.rb +30 -39
  48. data/spec/support/object_store_setup.rb +16 -196
  49. data/spec/support/seed_data_setup.rb +15 -149
  50. data/spec/support/seed_records.rb +141 -0
  51. data/spec/support/sequel_test_support.rb +46 -13
  52. data/spec/terrestrial/abstract_record_spec.rb +138 -106
  53. data/spec/terrestrial/adapters/sequel_postgres_adapter_spec.rb +138 -0
  54. data/spec/terrestrial/deleted_record_spec.rb +0 -27
  55. data/spec/terrestrial/dirty_map_spec.rb +52 -77
  56. data/spec/terrestrial/functional_pipeline_spec.rb +153 -0
  57. data/spec/terrestrial/inspection_string_spec.rb +61 -0
  58. data/spec/terrestrial/upsert_record_spec.rb +29 -0
  59. data/terrestrial.gemspec +7 -8
  60. metadata +43 -40
  61. data/MissingFeatures.md +0 -64
  62. data/lib/terrestrial/abstract_record.rb +0 -99
  63. data/lib/terrestrial/association_loaders.rb +0 -52
  64. data/lib/terrestrial/upserted_record.rb +0 -15
  65. data/spec/terrestrial/public_conveniencies_spec.rb +0 -63
  66. data/spec/terrestrial/upserted_record_spec.rb +0 -59
@@ -0,0 +1,97 @@
1
+ module Terrestrial
2
+ module Configurations
3
+ class MappingConfigOptionsProxy
4
+ def initialize(configuration, mapping_name)
5
+ @configuration = configuration
6
+ @mapping_name = mapping_name
7
+ end
8
+
9
+ attr_reader :configuration, :mapping_name
10
+ private :configuration, :mapping_name
11
+
12
+ def relation_name(name)
13
+ add_override(relation_name: name)
14
+ end
15
+ alias_method :table_name, :relation_name
16
+
17
+ def subset(subset_name, &block)
18
+ configuration.add_subset(mapping_name, subset_name, block)
19
+ end
20
+
21
+ def has_many(*args)
22
+ add_association(:has_many, args)
23
+ end
24
+
25
+ def has_many_through(*args)
26
+ add_association(:has_many_through, args)
27
+ end
28
+
29
+ def belongs_to(*args)
30
+ add_association(:belongs_to, args)
31
+ end
32
+
33
+ def fields(field_names)
34
+ add_override(fields: field_names)
35
+ end
36
+
37
+ def primary_key(field_names)
38
+ add_override(primary_key: field_names)
39
+ end
40
+
41
+ def use_database_id(&block)
42
+ add_override(use_database_id: true)
43
+ block && add_override(database_id_setter: block)
44
+ end
45
+
46
+ def database_owned_field(field_name, &object_setter)
47
+ configuration.overrides.fetch(mapping_name)[:database_owned_fields_setter_map] ||= {}
48
+ db_owned_fields = configuration.overrides.fetch(mapping_name).fetch(:database_owned_fields_setter_map)
49
+
50
+ db_owned_fields.merge!({field_name => object_setter})
51
+ end
52
+
53
+ def database_default_field(field_name, &object_setter)
54
+ configuration.overrides.fetch(mapping_name)[:database_default_fields_setter_map] ||= {}
55
+ db_default_fields = configuration.overrides.fetch(mapping_name).fetch(:database_default_fields_setter_map)
56
+
57
+ db_default_fields.merge!({field_name => object_setter})
58
+ end
59
+
60
+ def created_at_timestamp(field_name = Default, &block)
61
+ add_override(created_at_field: field_name)
62
+ block && add_override(created_at_setter: block)
63
+ end
64
+
65
+ def updated_at_timestamp(field_name = Default, &block)
66
+ add_override(updated_at_field: field_name)
67
+ block && add_override(updated_at_setter: block)
68
+ end
69
+
70
+ def factory(callable)
71
+ add_override(factory: callable)
72
+ end
73
+
74
+ def class(entity_class)
75
+ add_override('class': entity_class)
76
+ end
77
+
78
+ def class_name(class_name)
79
+ add_override(class_name: class_name)
80
+ end
81
+
82
+ def serializer(serializer_func)
83
+ add_override(serializer: serializer_func)
84
+ end
85
+
86
+ private
87
+
88
+ def add_override(*args)
89
+ configuration.add_override(mapping_name, *args)
90
+ end
91
+
92
+ def add_association(*args)
93
+ configuration.add_association(mapping_name, *args)
94
+ end
95
+ end
96
+ end
97
+ end
@@ -1,20 +1,24 @@
1
- require "terrestrial/abstract_record"
1
+ require "terrestrial/record"
2
2
 
3
3
  module Terrestrial
4
- class DeletedRecord < AbstractRecord
4
+ class DeletedRecord < Record
5
+ def initialize(mapping, attributes, depth)
6
+ @mapping = mapping
7
+ @attributes = attributes
8
+ @depth = depth
9
+ end
10
+
11
+ attr_reader :mapping, :attributes, :depth
12
+
5
13
  def if_delete(&block)
6
14
  block.call(self)
7
15
  self
8
16
  end
9
17
 
10
- def subset?(_other)
11
- false
12
- end
13
-
14
18
  protected
15
19
 
16
- def operation
17
- :delete
20
+ def new_with_attributes(new_attributes)
21
+ self.class.new(mapping, new_attributes, depth)
18
22
  end
19
23
  end
20
24
  end
@@ -13,15 +13,12 @@ module Terrestrial
13
13
  end
14
14
 
15
15
  def load(record)
16
- storage.store(hash_key(record), deep_clone(record))
16
+ storage.store(hash_key(record), record.deep_clone)
17
17
  record
18
18
  end
19
19
 
20
20
  def dirty?(record)
21
- record_as_loaded = storage.fetch(hash_key(record), NotFound)
22
- return true if record_as_loaded == NotFound
23
-
24
- !record.subset?(record_as_loaded)
21
+ !same_as_loaded?(record) || deleted?(record)
25
22
  end
26
23
 
27
24
  def reject_unchanged_fields(record)
@@ -36,12 +33,23 @@ module Terrestrial
36
33
 
37
34
  NotFound = Module.new
38
35
 
39
- def hash_key(record)
40
- deep_clone([record.namespace, record.identity])
36
+ def same_as_loaded?(record)
37
+ record_as_loaded = storage.fetch(hash_key(record), NotFound)
38
+
39
+ if record_as_loaded == NotFound
40
+ false
41
+ else
42
+ record.subset?(record_as_loaded)
43
+ end
44
+ end
45
+
46
+ def deleted?(record)
47
+ record.if_delete { return true }
48
+ return false
41
49
  end
42
50
 
43
- def deep_clone(record)
44
- Marshal.load(Marshal.dump(record))
51
+ def hash_key(record)
52
+ [record.namespace, record.identity]
45
53
  end
46
54
  end
47
55
  end
@@ -0,0 +1,64 @@
1
+ module Terrestrial
2
+ class FunctionalPipeline
3
+ def self.from_array(steps = [])
4
+ new(steps.map { |name, func| Step.new(name, func) })
5
+ end
6
+
7
+ def initialize(steps = [])
8
+ @steps = steps
9
+ end
10
+
11
+ def call(args, &block)
12
+ result = execution_result([[:input, args]], &block)
13
+
14
+ [result.last.last, result]
15
+ end
16
+
17
+ def describe
18
+ @steps.map(&:name)
19
+ end
20
+
21
+ def append(name, func)
22
+ self.class.new(@steps + [Step.new(name, func)])
23
+ end
24
+
25
+ def take_until(step_name)
26
+ step = @steps.detect { |step| step.name == step_name }
27
+ last_step_index = @steps.index(step)
28
+ steps = @steps.slice(0..last_step_index)
29
+
30
+ self.class.new(steps)
31
+ end
32
+
33
+ def drop_until(step_name)
34
+ step = @steps.detect { |step| step.name == step_name }
35
+ first_step_index = @steps.index(step) + 1
36
+ steps = @steps.slice(first_step_index..-1)
37
+
38
+ self.class.new(steps)
39
+ end
40
+
41
+ private
42
+
43
+ def execution_result(initial_state, &block)
44
+ @steps.reduce(initial_state) { |state, step|
45
+ new_value = step.call(state.last.last)
46
+ block && block.call(step.name, new_value)
47
+ state + [ [step.name, new_value] ]
48
+ }
49
+ end
50
+
51
+ class Step
52
+ def initialize(name, func)
53
+ @name = name
54
+ @func = func
55
+ end
56
+
57
+ attr_reader :name, :func
58
+
59
+ def call(*args, &block)
60
+ func.call(*args, &block)
61
+ end
62
+ end
63
+ end
64
+ end
@@ -1,8 +1,13 @@
1
1
  module Terrestrial
2
2
  module InspectionString
3
3
  def inspect
4
+ original_inspect_string = super
5
+ # this is kind of a silly way of getting the object id hex string but
6
+ # multiple Ruby versions have changed how this calculated.
7
+ hex_object_id = /#{self.class.to_s}:0x([0-9a-f]+)/.match(original_inspect_string)[1]
8
+
4
9
  (
5
- ["\#<#{self.class.name}:0x#{sprintf("%014x", (object_id.<<(1)))}"] +
10
+ ["\#<#{self.class.to_s}:0x#{hex_object_id}"] +
6
11
  inspectable_properties.map { |name|
7
12
  [
8
13
  name,
@@ -1,4 +1,5 @@
1
1
  module Terrestrial
2
+ # TODO: This should do a better job of showing what will be loaded when it is inspected
2
3
  class LazyObjectProxy
3
4
  include InspectionString
4
5
 
@@ -3,9 +3,10 @@ require "terrestrial/dataset"
3
3
 
4
4
  module Terrestrial
5
5
  class ManyToManyAssociation
6
- def initialize(mapping_name:, join_mapping_name:, foreign_key:, key:, proxy_factory:, association_foreign_key:, association_key:, order:)
6
+ def initialize(mapping_name:, join_mapping_name:, join_dataset:, foreign_key:, key:, proxy_factory:, association_foreign_key:, association_key:, order:)
7
7
  @mapping_name = mapping_name
8
8
  @join_mapping_name = join_mapping_name
9
+ @join_dataset = join_dataset
9
10
  @foreign_key = foreign_key
10
11
  @key = key
11
12
  @proxy_factory = proxy_factory
@@ -18,10 +19,18 @@ module Terrestrial
18
19
  [mapping_name, join_mapping_name]
19
20
  end
20
21
 
22
+ def outgoing_foreign_keys
23
+ []
24
+ end
25
+
26
+ def local_foreign_keys
27
+ []
28
+ end
29
+
21
30
  attr_reader :mapping_name, :join_mapping_name
22
31
 
23
- attr_reader :foreign_key, :key, :proxy_factory, :association_key, :association_foreign_key, :order
24
- private :foreign_key, :key, :proxy_factory, :association_key, :association_foreign_key, :order
32
+ attr_reader :join_dataset, :foreign_key, :key, :proxy_factory, :association_key, :association_foreign_key, :order
33
+ private :join_dataset, :foreign_key, :key, :proxy_factory, :association_key, :association_foreign_key, :order
25
34
 
26
35
  def build_proxy(data_superset:, loader:, record:)
27
36
  proxy_factory.call(
@@ -37,36 +46,31 @@ module Terrestrial
37
46
  end
38
47
 
39
48
  def eager_superset((superset, join_superset), (associated_dataset))
40
- join_data = Dataset.new(
49
+ subselect_data = Dataset.new(
41
50
  join_superset
42
51
  .where(foreign_key => associated_dataset.select(association_key))
43
52
  .to_a
44
53
  )
45
54
 
46
55
  eager_superset = Dataset.new(
47
- superset.where(key => join_data.select(association_foreign_key)).to_a
56
+ superset.where(key => subselect_data.select(association_foreign_key)).to_a
48
57
  )
49
58
 
50
59
  [
51
60
  eager_superset,
52
- join_data,
61
+ subselect_data,
53
62
  ]
54
63
  end
55
64
 
56
65
  def build_query((superset, join_superset), parent_record)
57
- ids = join_superset
58
- .where(foreign_key => foreign_key_value(parent_record))
59
- .select(association_foreign_key)
66
+ subselect_ids = join_superset
67
+ .where(foreign_key => foreign_key_value(parent_record))
68
+ .select(association_foreign_key)
60
69
 
61
70
  order
62
- .apply(
63
- superset.where(
64
- key => ids
65
- )
66
- )
67
- .lazy.map { |record|
68
- [record, [foreign_keys(parent_record, record)]]
69
- }
71
+ .apply(superset.where(key => subselect_ids))
72
+ .lazy
73
+ .map { |record| [record, [foreign_keys(parent_record, record)]] }
70
74
  end
71
75
 
72
76
  def dump(parent_record, collection, depth, &block)
@@ -102,13 +106,23 @@ module Terrestrial
102
106
  depth + depth_modifier,
103
107
  )
104
108
 
105
- fks = foreign_keys(parent_record, record)
109
+ join_foreign_keys = foreign_keys(parent_record, record)
106
110
  join_record_depth = depth + join_record_depth_modifier
107
111
 
112
+ # TODO: This is a bit hard to figure out
113
+ #
114
+ # The block defined in GraphSerializer#updated_nodes_recursive (inspect the block to confirm)
115
+ # join_foreign_keys is the two foreign key values in a hash
116
+ # the hash is two of the arugments here
117
+ # first one is normally an object to be serialized but serializing this hash will just return the same hash
118
+ # second one is the foreign keys that would need to accompany the object
119
+ #
120
+ # Passing it twice like this is allows it to go though the GraphSerializer like a regular user defined object
121
+
108
122
  join_records = block.call(
109
123
  join_mapping_name,
110
- fks,
111
- fks,
124
+ join_foreign_keys, # normally this is the object which gets serialized
125
+ join_foreign_keys, # normally this is the foreign key data the object doesn't know about
112
126
  join_record_depth
113
127
  ).flatten(1)
114
128
 
@@ -13,6 +13,14 @@ module Terrestrial
13
13
  [mapping_name]
14
14
  end
15
15
 
16
+ def outgoing_foreign_keys
17
+ []
18
+ end
19
+
20
+ def local_foreign_keys
21
+ [foreign_key]
22
+ end
23
+
16
24
  attr_reader :mapping_name
17
25
 
18
26
  attr_reader :foreign_key, :key, :proxy_factory
@@ -42,17 +50,17 @@ module Terrestrial
42
50
 
43
51
  def dump(parent_record, collection, depth, &block)
44
52
  collection
53
+ .reject(&:nil?)
45
54
  .flat_map { |object|
46
55
  block.call(mapping_name, object, _foreign_key_does_not_go_here = {}, depth + depth_modifier)
47
56
  }
48
- .reject(&:nil?)
49
57
  end
50
58
  alias_method :delete, :dump
51
59
 
52
60
  def extract_foreign_key(record)
53
61
  {
54
- foreign_key => (record && record.fetch(key)),
55
- }
62
+ foreign_key => record.fetch(key),
63
+ }.reject { |_k, v| v.nil? }
56
64
  end
57
65
 
58
66
  private
@@ -13,6 +13,15 @@ module Terrestrial
13
13
  def mapping_names
14
14
  [mapping_name]
15
15
  end
16
+
17
+ def outgoing_foreign_keys
18
+ [foreign_key]
19
+ end
20
+
21
+ def local_foreign_keys
22
+ []
23
+ end
24
+
16
25
  attr_reader :mapping_name
17
26
 
18
27
  attr_reader :foreign_key, :key, :order, :proxy_factory
@@ -1,9 +1,11 @@
1
1
  require "terrestrial/identity_map"
2
2
  require "terrestrial/dirty_map"
3
- require "terrestrial/upserted_record"
3
+ require "terrestrial/upsert_record"
4
4
  require "terrestrial/relational_store"
5
5
  require "terrestrial/configurations/conventional_configuration"
6
6
  require "terrestrial/inspection_string"
7
+ require "terrestrial/functional_pipeline"
8
+ require "terrestrial/adapters/sequel_postgres_adapter"
7
9
 
8
10
  module Terrestrial
9
11
  class ObjectStore
@@ -31,23 +33,42 @@ module Terrestrial
31
33
  end
32
34
 
33
35
  module PublicConveniencies
34
- def config(database_connection)
35
- Configurations::ConventionalConfiguration.new(database_connection)
36
- end
37
-
38
- def object_store(mappings:, datastore:)
36
+ def config(database, clock: Time)
39
37
  dirty_map = Private.build_dirty_map
40
38
  identity_map = Private.build_identity_map
41
39
 
42
- stores = Hash[mappings.map { |name, _mapping|
40
+ Configurations::ConventionalConfiguration.new(
41
+ datastore: Private.datastore_adapter(database),
42
+ clock: clock,
43
+ dirty_map: dirty_map,
44
+ identity_map: identity_map,
45
+ )
46
+ end
47
+
48
+ def object_store(config:)
49
+ load_pipeline = Private.build_load_pipeline(
50
+ dirty_map: config.dirty_map,
51
+ identity_map: config.identity_map,
52
+ )
53
+ dump_pipeline = Private.build_dump_pipeline(
54
+ dirty_map: config.dirty_map,
55
+ datastore: config.datastore,
56
+ clock: config.clock,
57
+ )
58
+
59
+ mappings = config.mappings
60
+ mapping_names = mappings.keys
61
+ stores = Hash[mapping_names.map { |mapping_name|
43
62
  [
44
- name,
45
- Private.single_type_store(
63
+ mapping_name,
64
+ Private.relational_store(
65
+ name: mapping_name,
46
66
  mappings: mappings ,
47
- name: name,
48
- datastore: datastore,
49
- identity_map: identity_map,
50
- dirty_map: dirty_map,
67
+ datastore: config.datastore,
68
+ identity_map: config.identity_map,
69
+ dirty_map: config.dirty_map,
70
+ load_pipeline: load_pipeline,
71
+ dump_pipeline: dump_pipeline,
51
72
  )
52
73
  ]
53
74
  }]
@@ -58,24 +79,13 @@ module Terrestrial
58
79
  module Private
59
80
  module_function
60
81
 
61
- def single_type_store(mappings:, name:, datastore:, identity_map:, dirty_map:)
62
- dataset = datastore[mappings.fetch(name).namespace]
63
-
82
+ def relational_store(mappings:, name:, datastore:, identity_map:, dirty_map:, load_pipeline:, dump_pipeline:)
64
83
  RelationalStore.new(
65
84
  mappings: mappings,
66
85
  mapping_name: name,
67
86
  datastore: datastore,
68
- dataset: dataset,
69
- load_pipeline: build_load_pipeline(
70
- dirty_map: dirty_map,
71
- identity_map: identity_map,
72
- ),
73
- dump_pipeline: build_dump_pipeline(
74
- dirty_map: dirty_map,
75
- transaction: datastore.method(:transaction),
76
- upsert: method(:upsert_record).curry.call(datastore),
77
- delete: method(:delete_record).curry.call(datastore),
78
- )
87
+ load_pipeline: load_pipeline,
88
+ dump_pipeline: dump_pipeline,
79
89
  )
80
90
  end
81
91
 
@@ -87,10 +97,23 @@ module Terrestrial
87
97
  DirtyMap.new(storage)
88
98
  end
89
99
 
100
+ def datastore_adapter(datastore)
101
+ if datastore.is_a?(Terrestrial::Adapters::AbstractAdapter)
102
+ return datastore
103
+ end
104
+
105
+ case datastore.class.name
106
+ when "Sequel::Postgres::Database"
107
+ Adapters::SequelPostgresAdapter.new(datastore)
108
+ else
109
+ raise "No adapter found for #{datastore.inspect}"
110
+ end
111
+ end
112
+
90
113
  def build_load_pipeline(dirty_map:, identity_map:)
91
114
  ->(mapping, record, associated_fields = {}) {
92
115
  [
93
- record_factory(mapping),
116
+ ->(record) { Record.new(mapping, record) },
94
117
  dirty_map.method(:load),
95
118
  ->(record) {
96
119
  attributes = record.to_h.select { |k,_v|
@@ -106,64 +129,24 @@ module Terrestrial
106
129
  }
107
130
  end
108
131
 
109
- def build_dump_pipeline(dirty_map:, transaction:, upsert:, delete:)
110
- ->(records) {
111
- [
112
- :uniq.to_proc,
113
- ->(rs) { rs.select { |r| dirty_map.dirty?(r) } },
114
- ->(rs) { rs.map { |r| dirty_map.reject_unchanged_fields(r) } },
115
- ->(rs) { rs.sort_by(&:depth) },
116
- ->(rs) {
117
- transaction.call {
132
+ def build_dump_pipeline(dirty_map:, datastore:, clock:)
133
+ Terrestrial::FunctionalPipeline.from_array([
134
+ [:dedup, :uniq.to_proc],
135
+ [:sort_by_depth, ->(rs) { rs.sort_by(&:depth) }],
136
+ [:select_changed, ->(rs) { rs.select { |r| dirty_map.dirty?(r) } }],
137
+ [:remove_unchanged_fields, ->(rs) { rs.map { |r| dirty_map.reject_unchanged_fields(r) } }],
138
+ [:save_records, ->(rs) {
139
+ datastore.transaction {
118
140
  rs.each { |r|
119
- r.if_upsert(&upsert)
120
- .if_delete(&delete)
141
+ r.if_upsert(&datastore.method(:upsert))
142
+ r.if_delete(&datastore.method(:delete))
121
143
  }
122
144
  }
123
- },
124
- ->(rs) { rs.map { |r| dirty_map.load_if_new(r) } },
125
- ].reduce(records) { |agg, operation|
126
- operation.call(agg)
127
- }
128
- }
129
- end
130
-
131
- def record_factory(mapping)
132
- ->(record_hash) {
133
- identity = Hash[
134
- mapping.primary_key.map { |field|
135
- [field, record_hash.fetch(field)]
136
145
  }
137
- ]
138
-
139
- UpsertedRecord.new(
140
- mapping.namespace,
141
- identity,
142
- record_hash,
143
- )
144
- }
145
- end
146
-
147
- def upsert_record(datastore, record)
148
- row_count = 0
149
- unless record.non_identity_attributes.empty?
150
- row_count = datastore[record.namespace].
151
- where(record.identity).
152
- update(record.non_identity_attributes)
153
- end
154
-
155
- if row_count < 1
156
- row_count = datastore[record.namespace].insert(record.to_h)
157
- end
158
-
159
- row_count
160
- rescue Object => e
161
- raise UpsertError.new(record.namespace, record.to_h, e)
146
+ ],
147
+ [:add_new_records_to_dirty_map, ->(rs) { rs.map { |r| dirty_map.load_if_new(r) } }],
148
+ ])
162
149
  end
163
-
164
- def delete_record(datastore, record)
165
- datastore[record.namespace].where(record.identity).delete
166
- end
167
- end
150
+ end
168
151
  end
169
152
  end