terrestrial 0.1.1 → 0.3.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (59) hide show
  1. checksums.yaml +4 -4
  2. data/.ruby-version +1 -1
  3. data/Gemfile.lock +29 -24
  4. data/README.md +35 -17
  5. data/Rakefile +4 -9
  6. data/TODO.md +25 -18
  7. data/bin/test +31 -0
  8. data/docs/domain_object_contract.md +50 -0
  9. data/features/env.rb +4 -6
  10. data/features/example.feature +28 -28
  11. data/features/step_definitions/example_steps.rb +2 -2
  12. data/lib/terrestrial/adapters/memory_adapter.rb +241 -0
  13. data/lib/terrestrial/collection_mutability_proxy.rb +7 -2
  14. data/lib/terrestrial/dirty_map.rb +5 -0
  15. data/lib/terrestrial/error.rb +69 -0
  16. data/lib/terrestrial/graph_loader.rb +58 -35
  17. data/lib/terrestrial/graph_serializer.rb +37 -30
  18. data/lib/terrestrial/inspection_string.rb +19 -0
  19. data/lib/terrestrial/lazy_collection.rb +2 -2
  20. data/lib/terrestrial/lazy_object_proxy.rb +1 -1
  21. data/lib/terrestrial/many_to_one_association.rb +17 -11
  22. data/lib/terrestrial/public_conveniencies.rb +125 -95
  23. data/lib/terrestrial/relation_mapping.rb +30 -0
  24. data/lib/terrestrial/{mapper_facade.rb → relational_store.rb} +11 -1
  25. data/lib/terrestrial/version.rb +1 -1
  26. data/spec/config_override_spec.rb +10 -14
  27. data/spec/custom_serializers_spec.rb +4 -6
  28. data/spec/deletion_spec.rb +12 -14
  29. data/spec/error_handling/factory_error_handling_spec.rb +61 -0
  30. data/spec/error_handling/serialization_error_spec.rb +50 -0
  31. data/spec/error_handling/upsert_error_spec.rb +132 -0
  32. data/spec/graph_persistence_spec.rb +80 -24
  33. data/spec/graph_traversal_spec.rb +14 -6
  34. data/spec/new_graph_persistence_spec.rb +43 -9
  35. data/spec/object_identity_spec.rb +5 -7
  36. data/spec/ordered_association_spec.rb +4 -6
  37. data/spec/predefined_queries_spec.rb +4 -6
  38. data/spec/querying_spec.rb +4 -12
  39. data/spec/readme_examples_spec.rb +3 -6
  40. data/spec/{persistence_efficiency_spec.rb → sequel_query_efficiency_spec.rb} +101 -19
  41. data/spec/spec_helper.rb +24 -2
  42. data/spec/support/memory_adapter_test_support.rb +21 -0
  43. data/spec/support/{mapper_setup.rb → object_store_setup.rb} +5 -5
  44. data/spec/support/seed_data_setup.rb +3 -1
  45. data/spec/support/sequel_test_support.rb +58 -25
  46. data/spec/{sequel_mapper → terrestrial}/abstract_record_spec.rb +0 -0
  47. data/spec/{sequel_mapper → terrestrial}/collection_mutability_proxy_spec.rb +0 -0
  48. data/spec/{sequel_mapper → terrestrial}/deleted_record_spec.rb +0 -0
  49. data/spec/{sequel_mapper → terrestrial}/dirty_map_spec.rb +38 -6
  50. data/spec/{sequel_mapper → terrestrial}/lazy_collection_spec.rb +2 -3
  51. data/spec/{sequel_mapper → terrestrial}/lazy_object_proxy_spec.rb +0 -0
  52. data/spec/{sequel_mapper → terrestrial}/public_conveniencies_spec.rb +12 -7
  53. data/spec/{sequel_mapper → terrestrial}/upserted_record_spec.rb +0 -0
  54. data/{sequel_mapper.gemspec → terrestrial.gemspec} +3 -3
  55. metadata +47 -39
  56. data/lib/terrestrial/short_inspection_string.rb +0 -18
  57. data/spec/proxying_spec.rb +0 -88
  58. data/spec/support/mock_sequel.rb +0 -193
  59. data/spec/support/sequel_persistence_setup.rb +0 -19
@@ -1,6 +1,3 @@
1
- require "terrestrial/upserted_record"
2
- require "terrestrial/deleted_record"
3
-
4
1
  module Terrestrial
5
2
  class GraphSerializer
6
3
  def initialize(mappings:)
@@ -34,53 +31,63 @@ module Terrestrial
34
31
  private
35
32
 
36
33
  def associated_records(mapping, current_record, association_fields, depth)
37
- mapping.associations
34
+ mapping
35
+ .associations
38
36
  .map { |name, association|
39
- [association_fields.fetch(name), association]
40
- }
41
- .map { |collection, association|
42
- [nodes(collection), deleted_nodes(collection), association]
37
+ dump_association(
38
+ association,
39
+ current_record,
40
+ association_fields.fetch(name),
41
+ depth,
42
+ )
43
43
  }
44
- .map { |nodes, deleted_nodes, association|
45
- association.dump(current_record, nodes, depth) { |assoc_mapping_name, assoc_object, foreign_key, assoc_depth|
46
- call(assoc_mapping_name, assoc_object, assoc_depth, foreign_key).tap { |associated_record, *_join_records|
47
- # TODO: remove this mutation
48
- current_record.merge!(association.extract_foreign_key(associated_record))
49
- }
50
- } +
51
- association.delete(current_record, deleted_nodes, depth) { |assoc_mapping_name, assoc_object, foreign_key, assoc_depth|
52
- delete(assoc_mapping_name, assoc_object, assoc_depth, foreign_key)
53
- }
44
+ end
45
+
46
+ def dump_association(association, current_record, collection, depth)
47
+ updated_nodes_recursive(association, current_record, collection, depth) +
48
+ deleted_nodes(association, current_record, collection, depth)
49
+ end
50
+
51
+ def updated_nodes_recursive(association, current_record, collection, depth)
52
+ association.dump(current_record, get_loaded(collection), depth) { |assoc_mapping_name, assoc_object, pass_down_foreign_key, assoc_depth|
53
+ recurse(current_record, association, assoc_mapping_name, assoc_object, assoc_depth, pass_down_foreign_key)
54
+ }
55
+ end
56
+
57
+ def recurse(current_record, association, assoc_mapping_name, assoc_object, assoc_depth, foreign_key)
58
+ (assoc_object && call(assoc_mapping_name, assoc_object, assoc_depth, foreign_key))
59
+ .tap { |associated_record, *_join_records|
60
+ current_record.merge!(association.extract_foreign_key(associated_record))
54
61
  }
55
62
  end
56
63
 
64
+ def deleted_nodes(association, current_record, collection, depth)
65
+ nodes = get_deleted(collection)
66
+ association.delete(current_record, nodes, depth) { |assoc_mapping_name, assoc_object, foreign_key, assoc_depth|
67
+ delete(assoc_mapping_name, assoc_object, assoc_depth, foreign_key)
68
+ }
69
+ end
70
+
57
71
  def delete(mapping_name, object, depth, _foreign_key)
58
72
  mapping = mappings.fetch(mapping_name)
59
- serialized_record = mapping.serializer.call(object)
60
-
61
- [
62
- DeletedRecord.new(
63
- mapping.namespace,
64
- mapping.primary_key,
65
- serialized_record,
66
- depth,
67
- )
68
- ]
73
+ mapping.delete(object, depth)
69
74
  end
70
75
 
71
- def nodes(collection)
76
+ def get_loaded(collection)
72
77
  if collection.respond_to?(:each_loaded)
73
78
  collection.each_loaded
74
79
  elsif collection.is_a?(Struct)
75
80
  [collection]
76
81
  elsif collection.respond_to?(:each)
77
82
  collection.each
83
+ elsif collection.nil?
84
+ [nil]
78
85
  else
79
86
  [collection]
80
87
  end
81
88
  end
82
89
 
83
- def deleted_nodes(collection)
90
+ def get_deleted(collection)
84
91
  if collection.respond_to?(:each_deleted)
85
92
  collection.each_deleted
86
93
  else
@@ -0,0 +1,19 @@
1
+ module Terrestrial
2
+ module InspectionString
3
+ def inspect
4
+ (
5
+ ["\#<#{self.class.name}:0x#{sprintf("%014x", (object_id.<<(1)))}"] +
6
+ inspectable_properties.map { |name|
7
+ [
8
+ name,
9
+ instance_variable_get("@#{name}").inspect
10
+ ].join("=")
11
+ }
12
+ ).join(" ") + ">"
13
+ end
14
+
15
+ private def inspectable_properties
16
+ []
17
+ end
18
+ end
19
+ end
@@ -1,8 +1,8 @@
1
- require "terrestrial/short_inspection_string"
1
+ require "terrestrial/inspection_string"
2
2
 
3
3
  module Terrestrial
4
4
  class LazyCollection
5
- include ShortInspectionString
5
+ include InspectionString
6
6
  include Enumerable
7
7
 
8
8
  def initialize(database_enum, loader, queries)
@@ -1,6 +1,6 @@
1
1
  module Terrestrial
2
2
  class LazyObjectProxy
3
- include ShortInspectionString
3
+ include InspectionString
4
4
 
5
5
  def initialize(object_loader, key_fields)
6
6
  @object_loader = object_loader
@@ -19,13 +19,13 @@ module Terrestrial
19
19
  private :foreign_key, :key, :proxy_factory
20
20
 
21
21
  def build_proxy(data_superset:, loader:, record:)
22
- proxy_factory.call(
23
- query: build_query(data_superset, record),
24
- loader: loader,
25
- preloaded_data: {
26
- key => foreign_key_value(record),
27
- },
28
- )
22
+ foreign_key_nil?(record) ? nil : proxy_factory.call(
23
+ query: build_query(data_superset, record),
24
+ loader: loader,
25
+ preloaded_data: {
26
+ key => foreign_key_value(record),
27
+ },
28
+ )
29
29
  end
30
30
 
31
31
  def eager_superset((superset), (associated_dataset))
@@ -41,20 +41,26 @@ module Terrestrial
41
41
  end
42
42
 
43
43
  def dump(parent_record, collection, depth, &block)
44
- collection.flat_map { |object|
45
- block.call(mapping_name, object, _foreign_key_does_not_go_here = {}, depth + depth_modifier)
46
- }
44
+ collection
45
+ .flat_map { |object|
46
+ block.call(mapping_name, object, _foreign_key_does_not_go_here = {}, depth + depth_modifier)
47
+ }
48
+ .reject(&:nil?)
47
49
  end
48
50
  alias_method :delete, :dump
49
51
 
50
52
  def extract_foreign_key(record)
51
53
  {
52
- foreign_key => record.fetch(key),
54
+ foreign_key => (record && record.fetch(key)),
53
55
  }
54
56
  end
55
57
 
56
58
  private
57
59
 
60
+ def foreign_key_nil?(record)
61
+ foreign_key_value(record).nil?
62
+ end
63
+
58
64
  def foreign_key_value(record)
59
65
  record.fetch(foreign_key)
60
66
  end
@@ -1,23 +1,48 @@
1
1
  require "terrestrial/identity_map"
2
2
  require "terrestrial/dirty_map"
3
3
  require "terrestrial/upserted_record"
4
- require "terrestrial/mapper_facade"
4
+ require "terrestrial/relational_store"
5
5
  require "terrestrial/configurations/conventional_configuration"
6
+ require "terrestrial/inspection_string"
6
7
 
7
8
  module Terrestrial
9
+ class ObjectStore
10
+ include Fetchable
11
+ include InspectionString
12
+
13
+ def initialize(stores)
14
+ @mappings = stores.keys
15
+ @stores = stores
16
+ end
17
+
18
+ def [](mapping_name)
19
+ @stores[mapping_name]
20
+ end
21
+
22
+ def from(mapping_name)
23
+ fetch(mapping_name)
24
+ end
25
+
26
+ private
27
+
28
+ def inspectable_properties
29
+ [ :mappings ]
30
+ end
31
+ end
32
+
8
33
  module PublicConveniencies
9
34
  def config(database_connection)
10
35
  Configurations::ConventionalConfiguration.new(database_connection)
11
36
  end
12
37
 
13
- def mappers(mappings:, datastore:)
14
- dirty_map = build_dirty_map
15
- identity_map = build_identity_map
38
+ def object_store(mappings:, datastore:)
39
+ dirty_map = Private.build_dirty_map
40
+ identity_map = Private.build_identity_map
16
41
 
17
- Hash[mappings.map { |name, _mapping|
42
+ stores = Hash[mappings.map { |name, _mapping|
18
43
  [
19
44
  name,
20
- mapper(
45
+ Private.single_type_store(
21
46
  mappings: mappings ,
22
47
  name: name,
23
48
  datastore: datastore,
@@ -26,114 +51,119 @@ module Terrestrial
26
51
  )
27
52
  ]
28
53
  }]
29
- end
30
-
31
- private
32
54
 
33
- def mapper(mappings:, name:, datastore:, identity_map:, dirty_map:)
34
- dataset = datastore[mappings.fetch(name).namespace]
35
-
36
- MapperFacade.new(
37
- mappings: mappings,
38
- mapping_name: name,
39
- datastore: datastore,
40
- dataset: dataset,
41
- load_pipeline: build_load_pipeline(
42
- dirty_map: dirty_map,
43
- identity_map: identity_map,
44
- ),
45
- dump_pipeline: build_dump_pipeline(
46
- dirty_map: dirty_map,
47
- transaction: datastore.method(:transaction),
48
- upsert: method(:upsert_record).curry.call(datastore),
49
- delete: method(:delete_record).curry.call(datastore),
50
- )
51
- )
55
+ ObjectStore.new(stores)
52
56
  end
53
57
 
54
- private
58
+ module Private
59
+ module_function
55
60
 
56
- def build_identity_map(storage = {})
57
- IdentityMap.new(storage)
58
- end
61
+ def single_type_store(mappings:, name:, datastore:, identity_map:, dirty_map:)
62
+ dataset = datastore[mappings.fetch(name).namespace]
59
63
 
60
- def build_dirty_map(storage = {})
61
- DirtyMap.new(storage)
62
- end
64
+ RelationalStore.new(
65
+ mappings: mappings,
66
+ mapping_name: name,
67
+ datastore: datastore,
68
+ dataset: dataset,
69
+ load_pipeline: build_load_pipeline(
70
+ dirty_map: dirty_map,
71
+ identity_map: identity_map,
72
+ ),
73
+ dump_pipeline: build_dump_pipeline(
74
+ dirty_map: dirty_map,
75
+ transaction: datastore.method(:transaction),
76
+ upsert: method(:upsert_record).curry.call(datastore),
77
+ delete: method(:delete_record).curry.call(datastore),
78
+ )
79
+ )
80
+ end
63
81
 
64
- def build_load_pipeline(dirty_map:, identity_map:)
65
- ->(mapping, record, associated_fields = {}) {
66
- [
67
- record_factory(mapping),
68
- dirty_map.method(:load),
69
- ->(record) {
70
- attributes = record.to_h.select { |k,_v|
71
- mapping.fields.include?(k)
72
- }
82
+ def build_identity_map(storage = {})
83
+ IdentityMap.new(storage)
84
+ end
73
85
 
74
- object = mapping.factory.call(attributes.merge(associated_fields))
75
- identity_map.call(mapping, record, object)
76
- },
77
- ].reduce(record) { |agg, operation|
78
- operation.call(agg)
79
- }
80
- }
81
- end
86
+ def build_dirty_map(storage = {})
87
+ DirtyMap.new(storage)
88
+ end
82
89
 
83
- def build_dump_pipeline(dirty_map:, transaction:, upsert:, delete:)
84
- ->(records) {
85
- [
86
- :uniq.to_proc,
87
- ->(rs) { rs.select { |r| dirty_map.dirty?(r) } },
88
- ->(rs) { rs.map { |r| dirty_map.reject_unchanged_fields(r) } },
89
- ->(rs) { rs.sort_by(&:depth) },
90
- ->(rs) {
91
- transaction.call {
92
- rs.each { |r|
93
- r.if_upsert(&upsert)
94
- .if_delete(&delete)
90
+ def build_load_pipeline(dirty_map:, identity_map:)
91
+ ->(mapping, record, associated_fields = {}) {
92
+ [
93
+ record_factory(mapping),
94
+ dirty_map.method(:load),
95
+ ->(record) {
96
+ attributes = record.to_h.select { |k,_v|
97
+ mapping.fields.include?(k)
95
98
  }
99
+
100
+ object = mapping.load(attributes.merge(associated_fields))
101
+ identity_map.call(mapping, record, object)
102
+ },
103
+ ].reduce(record) { |agg, operation|
104
+ operation.call(agg)
96
105
  }
97
- },
98
- ].reduce(records) { |agg, operation|
99
- operation.call(agg)
100
106
  }
101
- }
102
- end
107
+ end
103
108
 
104
- def record_factory(mapping)
105
- ->(record_hash) {
106
- identity = Hash[
107
- mapping.primary_key.map { |field|
108
- [field, record_hash.fetch(field)]
109
+ def build_dump_pipeline(dirty_map:, transaction:, upsert:, delete:)
110
+ ->(records) {
111
+ [
112
+ :uniq.to_proc,
113
+ ->(rs) { rs.select { |r| dirty_map.dirty?(r) } },
114
+ ->(rs) { rs.map { |r| dirty_map.reject_unchanged_fields(r) } },
115
+ ->(rs) { rs.sort_by(&:depth) },
116
+ ->(rs) {
117
+ transaction.call {
118
+ rs.each { |r|
119
+ r.if_upsert(&upsert)
120
+ .if_delete(&delete)
121
+ }
122
+ }
123
+ },
124
+ ->(rs) { rs.map { |r| dirty_map.load_if_new(r) } },
125
+ ].reduce(records) { |agg, operation|
126
+ operation.call(agg)
109
127
  }
110
- ]
128
+ }
129
+ end
111
130
 
112
- UpsertedRecord.new(
113
- mapping.namespace,
114
- identity,
115
- record_hash,
116
- )
117
- }
118
- end
131
+ def record_factory(mapping)
132
+ ->(record_hash) {
133
+ identity = Hash[
134
+ mapping.primary_key.map { |field|
135
+ [field, record_hash.fetch(field)]
136
+ }
137
+ ]
119
138
 
120
- def upsert_record(datastore, record)
121
- row_count = 0
122
- unless record.non_identity_attributes.empty?
123
- row_count = datastore[record.namespace].
124
- where(record.identity).
125
- update(record.non_identity_attributes)
139
+ UpsertedRecord.new(
140
+ mapping.namespace,
141
+ identity,
142
+ record_hash,
143
+ )
144
+ }
126
145
  end
127
146
 
128
- if row_count < 1
129
- row_count = datastore[record.namespace].insert(record.to_h)
147
+ def upsert_record(datastore, record)
148
+ row_count = 0
149
+ unless record.non_identity_attributes.empty?
150
+ row_count = datastore[record.namespace].
151
+ where(record.identity).
152
+ update(record.non_identity_attributes)
153
+ end
154
+
155
+ if row_count < 1
156
+ row_count = datastore[record.namespace].insert(record.to_h)
157
+ end
158
+
159
+ row_count
160
+ rescue Object => e
161
+ raise UpsertError.new(record.namespace, record.to_h, e)
130
162
  end
131
163
 
132
- row_count
133
- end
134
-
135
- def delete_record(datastore, record)
136
- datastore[record.namespace].where(record.identity).delete
164
+ def delete_record(datastore, record)
165
+ datastore[record.namespace].where(record.identity).delete
166
+ end
137
167
  end
138
168
  end
139
169
  end