hold 1.0.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/README.md +573 -0
- data/lib/hold.rb +14 -0
- data/lib/hold/file/hash_repository.rb +59 -0
- data/lib/hold/in_memory.rb +184 -0
- data/lib/hold/interfaces.rb +441 -0
- data/lib/hold/sequel.rb +41 -0
- data/lib/hold/sequel/dataset_lazy_array.rb +33 -0
- data/lib/hold/sequel/identity_set_repository.rb +565 -0
- data/lib/hold/sequel/polymorphic_repository.rb +121 -0
- data/lib/hold/sequel/property_mapper.rb +138 -0
- data/lib/hold/sequel/property_mapper/array.rb +62 -0
- data/lib/hold/sequel/property_mapper/column.rb +42 -0
- data/lib/hold/sequel/property_mapper/created_at.rb +14 -0
- data/lib/hold/sequel/property_mapper/custom_query.rb +34 -0
- data/lib/hold/sequel/property_mapper/custom_query_single_value.rb +36 -0
- data/lib/hold/sequel/property_mapper/foreign_key.rb +96 -0
- data/lib/hold/sequel/property_mapper/hash.rb +60 -0
- data/lib/hold/sequel/property_mapper/identity.rb +41 -0
- data/lib/hold/sequel/property_mapper/many_to_many.rb +158 -0
- data/lib/hold/sequel/property_mapper/one_to_many.rb +199 -0
- data/lib/hold/sequel/property_mapper/transformed_column.rb +38 -0
- data/lib/hold/sequel/property_mapper/updated_at.rb +17 -0
- data/lib/hold/sequel/query.rb +92 -0
- data/lib/hold/sequel/query_array_cell.rb +21 -0
- data/lib/hold/sequel/repository_observer.rb +28 -0
- data/lib/hold/sequel/with_polymorphic_type_column.rb +117 -0
- data/lib/hold/serialized.rb +104 -0
- data/lib/hold/serialized/json_serializer.rb +12 -0
- data/lib/hold/version.rb +3 -0
- metadata +199 -0
data/lib/hold/sequel.rb
ADDED
@@ -0,0 +1,41 @@
|
|
1
|
+
require 'hold/interfaces'
|
2
|
+
require 'hold/sequel/identity_set_repository'
|
3
|
+
require 'hold/sequel/polymorphic_repository'
|
4
|
+
require 'hold/sequel/with_polymorphic_type_column'
|
5
|
+
require 'hold/sequel/query'
|
6
|
+
require 'hold/sequel/dataset_lazy_array'
|
7
|
+
require 'hold/sequel/query_array_cell'
|
8
|
+
require 'hold/sequel/repository_observer'
|
9
|
+
require 'hold/sequel/property_mapper'
|
10
|
+
require 'hold/sequel/property_mapper/column'
|
11
|
+
require 'hold/sequel/property_mapper/identity'
|
12
|
+
require 'hold/sequel/property_mapper/updated_at'
|
13
|
+
require 'hold/sequel/property_mapper/created_at'
|
14
|
+
require 'hold/sequel/property_mapper/transformed_column'
|
15
|
+
require 'hold/sequel/property_mapper/foreign_key'
|
16
|
+
require 'hold/sequel/property_mapper/one_to_many'
|
17
|
+
require 'hold/sequel/property_mapper/many_to_many'
|
18
|
+
require 'hold/sequel/property_mapper/hash'
|
19
|
+
require 'hold/sequel/property_mapper/array'
|
20
|
+
require 'hold/sequel/property_mapper/custom_query'
|
21
|
+
require 'hold/sequel/property_mapper/custom_query_single_value'
|
22
|
+
require 'sequel'
|
23
|
+
|
24
|
+
module Hold
|
25
|
+
# Module containing implementations of hold interfaces which persist in a relational database, using the Sequel
|
26
|
+
# library, via some configurable mapping.
|
27
|
+
module Sequel
|
28
|
+
|
29
|
+
def self.translate_exceptions
|
30
|
+
begin
|
31
|
+
yield
|
32
|
+
rescue ::Sequel::DatabaseError => e
|
33
|
+
case e.message
|
34
|
+
when /duplicate|unique/i then raise Hold::IdentityConflict.new(e)
|
35
|
+
else raise Hold::Error.new("#{e.class}: #{e.message}")
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
end
|
41
|
+
end
|
@@ -0,0 +1,33 @@
|
|
1
|
+
module Hold::Sequel
|
2
|
+
# For returning ThinModels::LazyArray instances based off a Sequel dataset:
|
3
|
+
class DatasetLazyArray < ThinModels::LazyArray::MemoizedLength
|
4
|
+
def initialize(dataset, count_dataset=nil, &block)
|
5
|
+
@dataset = dataset
|
6
|
+
@count_dataset = count_dataset || @dataset
|
7
|
+
@block = block
|
8
|
+
end
|
9
|
+
|
10
|
+
def _each(&block)
|
11
|
+
rows = Hold::Sequel.translate_exceptions {@dataset.all}
|
12
|
+
(@block ? @block.call(rows) : rows).each(&block)
|
13
|
+
end
|
14
|
+
|
15
|
+
def _length
|
16
|
+
Hold::Sequel.translate_exceptions {@count_dataset.count}
|
17
|
+
end
|
18
|
+
|
19
|
+
def slice_from_start_and_length(offset, limit)
|
20
|
+
rows = if limit > 0
|
21
|
+
Hold::Sequel.translate_exceptions do
|
22
|
+
@dataset.limit(limit, offset).all
|
23
|
+
end
|
24
|
+
else
|
25
|
+
[]
|
26
|
+
end
|
27
|
+
# we're supposed to return nil if offset > length of the array,
|
28
|
+
# as per Array#slice:
|
29
|
+
return nil if rows.empty? && offset > 0 && offset > length
|
30
|
+
@block ? @block.call(rows) : rows
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
@@ -0,0 +1,565 @@
|
|
1
|
+
require 'wirer'
|
2
|
+
|
3
|
+
module Hold::Sequel
|
4
|
+
def self.IdentitySetRepository(model_class, main_table=nil, &block)
|
5
|
+
Class.new(IdentitySetRepository) do
|
6
|
+
set_model_class model_class
|
7
|
+
use_table(main_table, :id_column => :id, :id_sequence => true) if main_table
|
8
|
+
class_eval(&block) if block
|
9
|
+
end
|
10
|
+
end
|
11
|
+
|
12
|
+
class IdentitySetRepository
|
13
|
+
include Hold::IdentitySetRepository
|
14
|
+
|
15
|
+
class << self
|
16
|
+
def model_class
|
17
|
+
@model_class ||= (superclass.model_class if superclass < IdentitySetRepository)
|
18
|
+
end
|
19
|
+
|
20
|
+
def tables
|
21
|
+
@tables ||= (superclass < IdentitySetRepository ? superclass.tables.dup : [])
|
22
|
+
end
|
23
|
+
|
24
|
+
def property_mapper_args
|
25
|
+
@property_mapper_args ||= (superclass < IdentitySetRepository ? superclass.property_mapper_args.dup : [])
|
26
|
+
end
|
27
|
+
|
28
|
+
include Wirer::Factory::Interface
|
29
|
+
|
30
|
+
def constructor_dependencies
|
31
|
+
{:database => Wirer::Dependency.new_from_args(Sequel::Database)}
|
32
|
+
end
|
33
|
+
|
34
|
+
def new_from_dependencies(deps, *p)
|
35
|
+
new(deps[:database], *p)
|
36
|
+
end
|
37
|
+
|
38
|
+
def provides_class; self; end
|
39
|
+
|
40
|
+
def provides_features
|
41
|
+
[[:get_class, model_class]]
|
42
|
+
end
|
43
|
+
|
44
|
+
def setter_dependencies(instance=nil)
|
45
|
+
dependencies = {:observers => Wirer::Dependency.new(
|
46
|
+
:module => Hold::Sequel::RepositoryObserver,
|
47
|
+
:features => [[:observes_repo_for_class, model_class]],
|
48
|
+
:multiple => true,
|
49
|
+
:optional => true
|
50
|
+
)}
|
51
|
+
property_mapper_args.each do |property_name, mapper_class, options, block|
|
52
|
+
mapper_class.setter_dependencies_for(options, &block).each do |dep_name, dep_args|
|
53
|
+
mapper_dep_name = :"#{property_name}__#{dep_name}"
|
54
|
+
dependencies[mapper_dep_name] = Wirer::Dependency.new_from_arg_or_args_list(dep_args)
|
55
|
+
end
|
56
|
+
end
|
57
|
+
dependencies
|
58
|
+
end
|
59
|
+
|
60
|
+
def inject_dependency(instance, dep_name, value)
|
61
|
+
if dep_name == :observers
|
62
|
+
value.each {|observer| instance.add_observer(observer)}
|
63
|
+
else
|
64
|
+
mapper_name, dep_name = dep_name.to_s.split('__', 2)
|
65
|
+
instance.mapper(mapper_name.to_sym).send("#{dep_name}=", value)
|
66
|
+
end
|
67
|
+
end
|
68
|
+
|
69
|
+
private
|
70
|
+
def set_model_class(model_class)
|
71
|
+
@model_class = model_class
|
72
|
+
end
|
73
|
+
|
74
|
+
def use_table(name, options={})
|
75
|
+
options[:id_column] ||= :id
|
76
|
+
tables << [name.to_sym, options.freeze]
|
77
|
+
end
|
78
|
+
|
79
|
+
def map_property(property_name, mapper_class, options={}, &block)
|
80
|
+
raise unless mapper_class <= PropertyMapper
|
81
|
+
property_mapper_args << [property_name, mapper_class, options, block]
|
82
|
+
end
|
83
|
+
|
84
|
+
# Some convenience mapper DSL methods for each of the mapper subclasses:
|
85
|
+
{ :column => 'Column', :foreign_key => 'ForeignKey',
|
86
|
+
:one_to_many => 'OneToMany', :many_to_many => 'ManyToMany',
|
87
|
+
:created_at => 'CreatedAt', :updated_at => 'UpdatedAt',
|
88
|
+
:hash_property => 'Hash', :array_property => 'Array',
|
89
|
+
:transformed_column => 'TransformedColumn',
|
90
|
+
:custom_query => 'CustomQuery', :custom_query_single_value => 'CustomQuerySingleValue'
|
91
|
+
}.each do |name, mapper_class|
|
92
|
+
class_eval <<-EOS, __FILE__, __LINE__+1
|
93
|
+
def map_#{name}(property_name, options={}, &block)
|
94
|
+
map_property(property_name, PropertyMapper::#{mapper_class}, options, &block)
|
95
|
+
end
|
96
|
+
EOS
|
97
|
+
end
|
98
|
+
end
|
99
|
+
|
100
|
+
def model_class; self.class.model_class; end
|
101
|
+
|
102
|
+
attr_reader :db, :main_table, :property_mappers, :identity_property,
|
103
|
+
:identity_mapper, :id_sequence_table, :default_properties
|
104
|
+
|
105
|
+
def initialize(db)
|
106
|
+
raise "abstract superclass" if instance_of?(IdentitySetRepository)
|
107
|
+
@db = db
|
108
|
+
|
109
|
+
@tables = []
|
110
|
+
@tables_id_columns = {}
|
111
|
+
self.class.tables.each do |name,options|
|
112
|
+
@tables << name
|
113
|
+
@tables_id_columns[name] = options[:id_column]
|
114
|
+
@id_sequence_table = name if options[:id_sequence]
|
115
|
+
@main_table = name if options[:default]
|
116
|
+
end
|
117
|
+
@main_table ||= @tables.first
|
118
|
+
|
119
|
+
@property_mappers = {}
|
120
|
+
@default_properties = {}
|
121
|
+
|
122
|
+
# map the identity_property
|
123
|
+
@identity_property = :id # todo make this configurable
|
124
|
+
@identity_mapper = @property_mappers[@identity_property] = PropertyMapper::Identity.new(self, @identity_property)
|
125
|
+
|
126
|
+
self.class.property_mapper_args.each do |property_name, mapper_class, options, block|
|
127
|
+
@property_mappers[property_name] = mapper_class.new(self, property_name, options, &block)
|
128
|
+
@default_properties[property_name] = true if mapper_class <= PropertyMapper::Column
|
129
|
+
# for foreign key properties, by default we only load the ID (which is already present on the parent result row):
|
130
|
+
@default_properties[property_name] = JUST_ID if mapper_class <= PropertyMapper::ForeignKey
|
131
|
+
end
|
132
|
+
|
133
|
+
@property_mappers.freeze
|
134
|
+
end
|
135
|
+
|
136
|
+
JUST_ID = [:id].freeze
|
137
|
+
|
138
|
+
def inspect
|
139
|
+
"<##{self.class}: #{model_class}>"
|
140
|
+
end
|
141
|
+
|
142
|
+
def allocates_ids?
|
143
|
+
!!@id_sequence_table
|
144
|
+
end
|
145
|
+
|
146
|
+
# is this repository capable of loading instances of the given model class?
|
147
|
+
# repositories which support polymorhpic loading may override this.
|
148
|
+
def can_get_class?(model_class)
|
149
|
+
model_class == self.model_class
|
150
|
+
end
|
151
|
+
|
152
|
+
# is this repository capable of storing instances of the given model class?
|
153
|
+
# repositories which support polymorhpic writes may override this.
|
154
|
+
def can_set_class?(model_class)
|
155
|
+
model_class == self.model_class
|
156
|
+
end
|
157
|
+
|
158
|
+
# see Hold::Sequel::RepositoryObserver for the interface you need to expose to be an observer here.
|
159
|
+
#
|
160
|
+
# If you're using Wirer to construct the repository, a better way to hook the repo up with observers is to
|
161
|
+
# add RepositoryObservers to the Wirer::Container and have them provide feature [:observes_repo_for_class, model_class].
|
162
|
+
# They'll then get picked up by our multiple setter_dependency and added as an observer just after construction.
|
163
|
+
def add_observer(observer)
|
164
|
+
@observers ||= []
|
165
|
+
@observers << observer
|
166
|
+
end
|
167
|
+
|
168
|
+
# convenience to get a particular property mapper of this repo:
|
169
|
+
def mapper(name)
|
170
|
+
raise ArgumentError unless name.is_a?(Symbol)
|
171
|
+
@property_mappers[name] or raise "#{self.class}: no such property mapper #{name.inspect}"
|
172
|
+
end
|
173
|
+
|
174
|
+
# if you want to avoid the need to manually pass in target_repo parameters for each property
|
175
|
+
# mapped by a foreign key mapper etc - this will have the mappers go find the dependency themselves.
|
176
|
+
def get_repo_dependencies_from(repo_set)
|
177
|
+
@property_mappers.each_value {|mapper| mapper.get_repo_dependencies_from(repo_set)}
|
178
|
+
end
|
179
|
+
|
180
|
+
def table_id_column(table)
|
181
|
+
@tables_id_columns[table]
|
182
|
+
end
|
183
|
+
|
184
|
+
private
|
185
|
+
|
186
|
+
# mini DSL for use in mapper_config block passed to constructor, which is instance_evalled:
|
187
|
+
|
188
|
+
def map_property(property_name, mapper_class=PropertyMapper, *p, &b)
|
189
|
+
raise unless mapper_class <= PropertyMapper
|
190
|
+
@property_mappers[property_name] = mapper_class.new(self, property_name, *p, &b)
|
191
|
+
end
|
192
|
+
|
193
|
+
# Some convenience mapper DSL methods for each of the mapper subclasses:
|
194
|
+
{ :column => 'Column', :foreign_key => 'ForeignKey',
|
195
|
+
:one_to_many => 'OneToMany', :many_to_many => 'ManyToMany',
|
196
|
+
:created_at => 'CreatedAt', :updated_at => 'UpdatedAt',
|
197
|
+
:hash_property => 'Hash', :array_property => 'Array',
|
198
|
+
:custom_query => 'CustomQuery', :custom_query_single_value => 'CustomQuerySingleValue'
|
199
|
+
}.each do |name, mapper_class|
|
200
|
+
class_eval <<-EOS, __FILE__, __LINE__+1
|
201
|
+
def map_#{name}(property_name, options={}, &block)
|
202
|
+
map_property(property_name, PropertyMapper::#{mapper_class}, options, &block)
|
203
|
+
end
|
204
|
+
EOS
|
205
|
+
end
|
206
|
+
|
207
|
+
def use_table(name, options={})
|
208
|
+
@tables << name
|
209
|
+
@tables_id_columns[name] = options[:id_column] || :id
|
210
|
+
@id_sequence_table = name if options[:id_sequence]
|
211
|
+
end
|
212
|
+
|
213
|
+
|
214
|
+
|
215
|
+
# Some helpers
|
216
|
+
|
217
|
+
def translate_exceptions(&b)
|
218
|
+
Hold::Sequel.translate_exceptions(&b)
|
219
|
+
end
|
220
|
+
|
221
|
+
def insert_row_for_entity(entity, table, id=nil)
|
222
|
+
row = {}
|
223
|
+
@property_mappers.each_value do |mapper|
|
224
|
+
mapper.build_insert_row(entity, table, row, id)
|
225
|
+
end
|
226
|
+
row
|
227
|
+
end
|
228
|
+
|
229
|
+
def update_row_for_entity(id, update_entity, table)
|
230
|
+
row = {}
|
231
|
+
@property_mappers.each_value do |mapper|
|
232
|
+
mapper.build_update_row(update_entity, table, row)
|
233
|
+
end
|
234
|
+
row
|
235
|
+
end
|
236
|
+
|
237
|
+
public
|
238
|
+
|
239
|
+
def construct_entity(property_hash, row=nil)
|
240
|
+
# new_skipping_checks is supported by ThinModels::Struct(::Typed) and skips any type checks or
|
241
|
+
# attribute name checks on the supplied attributes.
|
242
|
+
@model_class_new_method ||= model_class.respond_to?(:new_skipping_checks) ? :new_skipping_checks : :new
|
243
|
+
model_class.send(@model_class_new_method, property_hash) do |model, property|
|
244
|
+
get_property(model, property)
|
245
|
+
end
|
246
|
+
end
|
247
|
+
|
248
|
+
def construct_entity_from_id(id)
|
249
|
+
model_class.new(@identity_property => id) do |model, property|
|
250
|
+
get_property(model, property)
|
251
|
+
end
|
252
|
+
end
|
253
|
+
|
254
|
+
# this determines if an optimisation can be done whereby if only the ID property is
|
255
|
+
# requested to be loaded, the object(s) can be constructed directly from their ids
|
256
|
+
# without needing to be fetched from the database.
|
257
|
+
def can_construct_from_id_alone?(properties)
|
258
|
+
properties == JUST_ID
|
259
|
+
end
|
260
|
+
|
261
|
+
def dataset_to_select_tables(*tables)
|
262
|
+
main_table, *other_tables = tables
|
263
|
+
main_id = @identity_mapper.qualified_column_name(main_table)
|
264
|
+
other_tables.inject(@db[main_table]) do |dataset, table|
|
265
|
+
dataset.join(table, @identity_mapper.qualified_column_name(table) => main_id)
|
266
|
+
end
|
267
|
+
end
|
268
|
+
|
269
|
+
def columns_aliases_and_tables_for_properties(properties)
|
270
|
+
columns_by_property = {}; aliased_columns = []; tables = []
|
271
|
+
properties.each do |p|
|
272
|
+
next if p == @identity_property # this gets special handling
|
273
|
+
cs, as, ts = mapper(p).columns_aliases_and_tables_for_select
|
274
|
+
columns_by_property[p] = cs
|
275
|
+
aliased_columns.concat(as)
|
276
|
+
tables.concat(ts)
|
277
|
+
end
|
278
|
+
tables.unshift(@main_table) if tables.delete(@main_table)
|
279
|
+
|
280
|
+
# the identity mapper gets called last, so that it can get a hint about what
|
281
|
+
# tables are already required for the other columns. (seeing as how an identity column
|
282
|
+
# needs to be present on every table used for a given repo, it should never need to
|
283
|
+
# add an extra table just in order to select the ID)
|
284
|
+
id_cols, id_aliases, id_tables = @identity_mapper.columns_aliases_and_tables_for_select(tables.first || @main_table)
|
285
|
+
columns_by_property[@identity_property] = id_cols
|
286
|
+
aliased_columns.concat(id_aliases)
|
287
|
+
tables.concat(id_tables)
|
288
|
+
aliased_columns.uniq!; tables.uniq!
|
289
|
+
return columns_by_property, aliased_columns, tables
|
290
|
+
end
|
291
|
+
|
292
|
+
def transaction(*p, &b)
|
293
|
+
@db.transaction(*p, &b)
|
294
|
+
end
|
295
|
+
|
296
|
+
# This is the main mechanism to retrieve stuff from the repo via custom
|
297
|
+
# queries.
|
298
|
+
|
299
|
+
def query(properties=nil, &b)
|
300
|
+
properties = @default_properties if properties == true || properties.nil?
|
301
|
+
Query.new(self, properties, &b)
|
302
|
+
end
|
303
|
+
|
304
|
+
# Can take a block which may add extra conditions, joins, order etc onto
|
305
|
+
# the relevant query.
|
306
|
+
def get_many_with_dataset(options={}, &b)
|
307
|
+
query(options[:properties], &b).to_a(options[:lazy])
|
308
|
+
end
|
309
|
+
|
310
|
+
def get_all(options={})
|
311
|
+
query(options[:properties]).to_a(options[:lazy])
|
312
|
+
end
|
313
|
+
|
314
|
+
# like get_many_with_dataset but just gets a single row, or nil if not
|
315
|
+
# found. adds limit(1) to the dataset for you.
|
316
|
+
def get_with_dataset(options={}, &b)
|
317
|
+
query(options[:properties], &b).single_result
|
318
|
+
end
|
319
|
+
|
320
|
+
def get_property(entity, property, options={})
|
321
|
+
unless property.is_a? Symbol
|
322
|
+
fail ArgumentError, 'get_property must suppy a symbol'
|
323
|
+
end
|
324
|
+
begin
|
325
|
+
result = query(property => options[:properties]) do |dataset, property_columns|
|
326
|
+
filter = @identity_mapper.make_filter(entity.id, property_columns[@identity_property])
|
327
|
+
dataset.filter(filter)
|
328
|
+
end.single_result
|
329
|
+
rescue TypeError
|
330
|
+
# catches test errors caught by []ing a string post 1.8
|
331
|
+
raise ArgumentError, 'get_property caught a type error, check options'
|
332
|
+
end
|
333
|
+
result && result[property]
|
334
|
+
end
|
335
|
+
|
336
|
+
def get_by_id(id, options={})
|
337
|
+
properties = options[:properties]
|
338
|
+
return construct_entity_from_id(id) if can_construct_from_id_alone?(properties)
|
339
|
+
|
340
|
+
query(properties) do |dataset, property_columns|
|
341
|
+
filter = @identity_mapper.make_filter(id, property_columns[@identity_property])
|
342
|
+
dataset.filter(filter)
|
343
|
+
end.single_result
|
344
|
+
end
|
345
|
+
|
346
|
+
# multi-get via a single SELECT... WHERE id IN (1,2,3,4)
|
347
|
+
def get_many_by_ids(ids, options={})
|
348
|
+
properties = options[:properties]
|
349
|
+
return ids.map {|id| construct_entity_from_id(id)} if can_construct_from_id_alone?(properties)
|
350
|
+
|
351
|
+
results_by_id = {}
|
352
|
+
results = query(options[:properties]) do |ds,mapping|
|
353
|
+
id_filter = @identity_mapper.make_multi_filter(ids.uniq, mapping[@identity_property])
|
354
|
+
ds.filter(id_filter)
|
355
|
+
end.to_a(options[:lazy])
|
356
|
+
results.each {|object| results_by_id[object.id] = object}
|
357
|
+
ids.map {|id| results_by_id[id]}
|
358
|
+
end
|
359
|
+
|
360
|
+
def get_many_by_property(property, value, options={})
|
361
|
+
properties_to_fetch ||= @default_properties.dup
|
362
|
+
properties_to_fetch[property] = true
|
363
|
+
query(options[:properties]) do |dataset, property_columns|
|
364
|
+
filter = mapper(property).make_filter(value, property_columns[property])
|
365
|
+
dataset.filter(filter)
|
366
|
+
end.to_a(options[:lazy])
|
367
|
+
end
|
368
|
+
|
369
|
+
def get_by_property(property, value, options={})
|
370
|
+
properties_to_fetch ||= @default_properties.dup
|
371
|
+
properties_to_fetch[property] = true
|
372
|
+
query(options[:properties]) do |dataset, property_columns|
|
373
|
+
filter = mapper(property).make_filter(value, property_columns[property])
|
374
|
+
dataset.filter(filter)
|
375
|
+
end.single_result
|
376
|
+
end
|
377
|
+
|
378
|
+
|
379
|
+
|
380
|
+
def contains_id?(id)
|
381
|
+
dataset = dataset_to_select_tables(@main_table)
|
382
|
+
id_filter = @identity_mapper.make_filter(id, [@tables_id_columns[@main_table]])
|
383
|
+
dataset.filter(id_filter).select(1).limit(1).single_value ? true : false
|
384
|
+
end
|
385
|
+
|
386
|
+
def contains?(entity)
|
387
|
+
id = entity.id and contains_id?(id)
|
388
|
+
end
|
389
|
+
|
390
|
+
|
391
|
+
# CUD
|
392
|
+
|
393
|
+
# Calls one of store_new (insert) or update as appropriate.
|
394
|
+
#
|
395
|
+
# Where the repo allocates_ids, you can supply an entity without an ID and store_new will be called.
|
396
|
+
#
|
397
|
+
# If the entity has an ID, it will check whether it's currently contained in the repository
|
398
|
+
# before calling store_new or update as appropriate.
|
399
|
+
def store(entity)
|
400
|
+
id = entity.id
|
401
|
+
if id
|
402
|
+
transaction do
|
403
|
+
if contains_id?(id)
|
404
|
+
update(entity)
|
405
|
+
else
|
406
|
+
store_new(entity)
|
407
|
+
end
|
408
|
+
end
|
409
|
+
else
|
410
|
+
if allocates_ids?
|
411
|
+
store_new(entity)
|
412
|
+
else
|
413
|
+
raise Hold::MissingIdentity
|
414
|
+
end
|
415
|
+
end
|
416
|
+
entity
|
417
|
+
end
|
418
|
+
|
419
|
+
# inserts rows into all relevant tables for the given entity.
|
420
|
+
# ensures that where one of the tables is used for an id sequence,
|
421
|
+
# that this row is inserted first and the resulting insert_id
|
422
|
+
# obtained is passed when building subsequent rows.
|
423
|
+
#
|
424
|
+
# note: order of inserts is important here if you have foreign key dependencies between
|
425
|
+
# the ID columns of the different tables; if so you'll need to order your use_table
|
426
|
+
# declarations accordingly.
|
427
|
+
def store_new(entity)
|
428
|
+
transaction do
|
429
|
+
rows = {}; insert_id = nil
|
430
|
+
pre_insert(entity)
|
431
|
+
@property_mappers.each_value {|mapper| mapper.pre_insert(entity)}
|
432
|
+
if @id_sequence_table
|
433
|
+
row = insert_row_for_entity(entity, @id_sequence_table)
|
434
|
+
insert_id = translate_exceptions {@db[@id_sequence_table].insert(row)}
|
435
|
+
rows[@id_sequence_table] = row
|
436
|
+
end
|
437
|
+
# note: order is important here if you have foreign key dependencies, order
|
438
|
+
# your use_table declarations appropriately:
|
439
|
+
@tables.each do |table|
|
440
|
+
next if table == @id_sequence_table # done that already
|
441
|
+
row = insert_row_for_entity(entity, table, insert_id)
|
442
|
+
translate_exceptions {@db[table].insert(row)}
|
443
|
+
rows[table] = row
|
444
|
+
end
|
445
|
+
# identity_mapper should be called first, so that other mappers have the new ID
|
446
|
+
# available on the entity when called.
|
447
|
+
@identity_mapper.post_insert(entity, rows, insert_id)
|
448
|
+
@property_mappers.each_value do |mapper|
|
449
|
+
next if mapper == @identity_mapper
|
450
|
+
mapper.post_insert(entity, rows, insert_id)
|
451
|
+
end
|
452
|
+
post_insert(entity, rows, insert_id)
|
453
|
+
entity
|
454
|
+
end
|
455
|
+
end
|
456
|
+
|
457
|
+
# Remember to call super if you override this.
|
458
|
+
# If you do any extra inserting in an overridden pre_insert, call super beforehand
|
459
|
+
def pre_insert(entity)
|
460
|
+
@observers.each {|observer| observer.pre_insert(self, entity)} if @observers
|
461
|
+
end
|
462
|
+
|
463
|
+
# Remember to call super if you override this.
|
464
|
+
# If you do any extra inserting in an overridden post_insert, call super afterwards
|
465
|
+
def post_insert(entity, rows, insert_id)
|
466
|
+
@observers.each {|observer| observer.post_insert(self, entity, rows, insert_id)} if @observers
|
467
|
+
end
|
468
|
+
|
469
|
+
def update(entity, update_entity=entity)
|
470
|
+
id = entity.id or raise Hold::MissingIdentity
|
471
|
+
transaction do
|
472
|
+
rows = {}; data_from_mappers = {}
|
473
|
+
pre_update(entity, update_entity)
|
474
|
+
@property_mappers.each do |name, mapper|
|
475
|
+
data_from_mappers[name] = mapper.pre_update(entity, update_entity)
|
476
|
+
end
|
477
|
+
@tables.each do |table|
|
478
|
+
row = update_row_for_entity(id, update_entity, table)
|
479
|
+
unless row.empty?
|
480
|
+
id_filter = @identity_mapper.make_filter(id, [@tables_id_columns[table]])
|
481
|
+
translate_exceptions {@db[table].filter(id_filter).update(row)}
|
482
|
+
end
|
483
|
+
rows[table] = row
|
484
|
+
end
|
485
|
+
@property_mappers.each do |name, mapper|
|
486
|
+
mapper.post_update(entity, update_entity, rows, data_from_mappers[name])
|
487
|
+
end
|
488
|
+
post_update(entity, update_entity, rows)
|
489
|
+
entity.merge!(update_entity) if entity.respond_to?(:merge!)
|
490
|
+
entity
|
491
|
+
end
|
492
|
+
end
|
493
|
+
|
494
|
+
# Remember to call super if you override this.
|
495
|
+
# If you do any extra updating in an overridden pre_update, call super beforehand
|
496
|
+
def pre_update(entity, update_entity)
|
497
|
+
@observers.each {|observer| observer.pre_update(self, entity, update_entity)} if @observers
|
498
|
+
end
|
499
|
+
|
500
|
+
# Remember to call super if you override this.
|
501
|
+
# If you do any extra updating in an overridden post_update, call super afterwards
|
502
|
+
def post_update(entity, update_entity, rows)
|
503
|
+
@observers.each {|observer| observer.post_update(self, entity, update_entity, rows)} if @observers
|
504
|
+
end
|
505
|
+
|
506
|
+
def update_by_id(id, update_entity)
|
507
|
+
entity = construct_entity(@identity_property => id)
|
508
|
+
update(entity, update_entity)
|
509
|
+
end
|
510
|
+
|
511
|
+
# deletes rows for this id in all tables of the repo.
|
512
|
+
#
|
513
|
+
# note: order of
|
514
|
+
# deletes is important here if you have foreign key dependencies between
|
515
|
+
# the ID columns of the different tables; this goes in the reverse order
|
516
|
+
# to that used for inserts by store_new, which in turn is determined by the
|
517
|
+
# order of your use_table declarations
|
518
|
+
def delete(entity)
|
519
|
+
id = entity.id or raise Hold::MissingIdentity
|
520
|
+
transaction do
|
521
|
+
pre_delete(entity)
|
522
|
+
@property_mappers.each do |name, mapper|
|
523
|
+
mapper.pre_delete(entity)
|
524
|
+
end
|
525
|
+
@tables.reverse_each do |table|
|
526
|
+
id_filter = @identity_mapper.make_filter(id, [@tables_id_columns[table]])
|
527
|
+
@db[table].filter(id_filter).delete
|
528
|
+
end
|
529
|
+
@property_mappers.each do |name, mapper|
|
530
|
+
mapper.post_delete(entity)
|
531
|
+
end
|
532
|
+
post_delete(entity)
|
533
|
+
end
|
534
|
+
end
|
535
|
+
|
536
|
+
# Remember to call super if you override this.
|
537
|
+
# If you do any extra deleting in an overridden pre_delete, call super beforehand
|
538
|
+
def pre_delete(entity)
|
539
|
+
@observers.each {|observer| observer.pre_delete(self, entity)} if @observers
|
540
|
+
end
|
541
|
+
|
542
|
+
# Remember to call super if you override this.
|
543
|
+
# If you do any extra deleting in an overridden post_delete, call super afterwards
|
544
|
+
def post_delete(entity)
|
545
|
+
@observers.each {|observer| observer.post_delete(self, entity)} if @observers
|
546
|
+
end
|
547
|
+
|
548
|
+
def delete_id(id)
|
549
|
+
entity = construct_entity(@identity_property => id)
|
550
|
+
delete(entity)
|
551
|
+
end
|
552
|
+
|
553
|
+
# ArrayCells for top-level collections
|
554
|
+
|
555
|
+
def array_cell_for_dataset(&b)
|
556
|
+
QueryArrayCell.new(self, &b)
|
557
|
+
end
|
558
|
+
|
559
|
+
def count_dataset
|
560
|
+
dataset = dataset_to_select_tables(@main_table)
|
561
|
+
dataset = yield dataset if block_given?
|
562
|
+
dataset.count
|
563
|
+
end
|
564
|
+
end
|
565
|
+
end
|