hold 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/README.md +573 -0
- data/lib/hold.rb +14 -0
- data/lib/hold/file/hash_repository.rb +59 -0
- data/lib/hold/in_memory.rb +184 -0
- data/lib/hold/interfaces.rb +441 -0
- data/lib/hold/sequel.rb +41 -0
- data/lib/hold/sequel/dataset_lazy_array.rb +33 -0
- data/lib/hold/sequel/identity_set_repository.rb +565 -0
- data/lib/hold/sequel/polymorphic_repository.rb +121 -0
- data/lib/hold/sequel/property_mapper.rb +138 -0
- data/lib/hold/sequel/property_mapper/array.rb +62 -0
- data/lib/hold/sequel/property_mapper/column.rb +42 -0
- data/lib/hold/sequel/property_mapper/created_at.rb +14 -0
- data/lib/hold/sequel/property_mapper/custom_query.rb +34 -0
- data/lib/hold/sequel/property_mapper/custom_query_single_value.rb +36 -0
- data/lib/hold/sequel/property_mapper/foreign_key.rb +96 -0
- data/lib/hold/sequel/property_mapper/hash.rb +60 -0
- data/lib/hold/sequel/property_mapper/identity.rb +41 -0
- data/lib/hold/sequel/property_mapper/many_to_many.rb +158 -0
- data/lib/hold/sequel/property_mapper/one_to_many.rb +199 -0
- data/lib/hold/sequel/property_mapper/transformed_column.rb +38 -0
- data/lib/hold/sequel/property_mapper/updated_at.rb +17 -0
- data/lib/hold/sequel/query.rb +92 -0
- data/lib/hold/sequel/query_array_cell.rb +21 -0
- data/lib/hold/sequel/repository_observer.rb +28 -0
- data/lib/hold/sequel/with_polymorphic_type_column.rb +117 -0
- data/lib/hold/serialized.rb +104 -0
- data/lib/hold/serialized/json_serializer.rb +12 -0
- data/lib/hold/version.rb +3 -0
- metadata +199 -0
@@ -0,0 +1,121 @@
|
|
1
|
+
module Hold::Sequel
|
2
|
+
class PolymorphicRepository
|
3
|
+
include Hold::IdentitySetRepository
|
4
|
+
|
5
|
+
attr_reader :db, :table, :type_column, :id_column, :type_to_model_class_mapping,
|
6
|
+
:repos_for_model_classes, :model_class_to_type_mapping
|
7
|
+
|
8
|
+
def initialize(db, options={})
|
9
|
+
@db = db
|
10
|
+
@table = options[:table] || :base
|
11
|
+
@type_column = options[:type_column] || :type
|
12
|
+
@id_column = options[:id_column] || :id
|
13
|
+
@type_to_model_class_mapping = options[:mapping]
|
14
|
+
@model_class_to_type_mapping = @type_to_model_class_mapping.invert
|
15
|
+
|
16
|
+
@repos_for_model_classes = options[:repos] || {}
|
17
|
+
@dataset = @db[@table].select(Sequel.as(@type_column,:_type), Sequel.as(@id_column,:_id))
|
18
|
+
end
|
19
|
+
|
20
|
+
def can_get_class?(model_class)
|
21
|
+
@model_class_to_type_mapping.has_key?(model_class)
|
22
|
+
end
|
23
|
+
|
24
|
+
def can_set_class?(model_class)
|
25
|
+
@model_class_to_type_mapping.has_key?(model_class)
|
26
|
+
end
|
27
|
+
|
28
|
+
def get_repo_dependencies_from(repo_set)
|
29
|
+
@type_to_model_class_mapping.each do |type,model_class|
|
30
|
+
@repos_for_model_classes[model_class] ||= repo_set.repo_for_model_class(model_class)
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
def type_to_repo_mapping
|
35
|
+
@type_to_repo_mapping ||= begin
|
36
|
+
result = {}
|
37
|
+
@type_to_model_class_mapping.each {|t,m| result[t] = @repos_for_model_classes[m]}
|
38
|
+
result
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
42
|
+
def construct_entity(property_hash, row=nil)
|
43
|
+
type = property_hash[:_type] or raise "missing _type in result row"
|
44
|
+
@type_to_model_class_mapping[type].new(property_hash)
|
45
|
+
end
|
46
|
+
|
47
|
+
def transaction(*p, &b)
|
48
|
+
@db.transaction(*p, &b)
|
49
|
+
end
|
50
|
+
|
51
|
+
# - Takes multiple result rows with type and id column
|
52
|
+
# - Groups the IDs by type and does a separate get_many_by_ids query on the relevant repo
|
53
|
+
# - Combines the results from the separate queries putting them into the order of the IDs from
|
54
|
+
# the original rows (or in the order of the ids given, where they are given)
|
55
|
+
def load_from_rows(rows, options={}, ids=nil)
|
56
|
+
ids ||= rows.map {|row| row[:_id]}
|
57
|
+
ids_by_type = Hash.new {|h,k| h[k]=[]}
|
58
|
+
rows.each {|row| ids_by_type[row[:_type]] << row[:_id]}
|
59
|
+
results_by_id = {}
|
60
|
+
ids_by_type.each do |type, type_ids|
|
61
|
+
repo = type_to_repo_mapping[type] or raise "PolymorphicRepository: no repo found for type value #{type}"
|
62
|
+
repo.get_many_by_ids(type_ids, options).each_with_index do |result, index|
|
63
|
+
results_by_id[type_ids[index]] = result
|
64
|
+
end
|
65
|
+
end
|
66
|
+
results_by_id.values_at(*ids)
|
67
|
+
end
|
68
|
+
|
69
|
+
def load_from_row(row, options={})
|
70
|
+
repo = type_to_repo_mapping[row[:_type]] or raise "PolymorphicRepository: no repo found for type value #{row[:_type]}"
|
71
|
+
repo.get_by_id(row[:_id], options)
|
72
|
+
end
|
73
|
+
|
74
|
+
def get_with_dataset(options={}, &b)
|
75
|
+
dataset = @dataset
|
76
|
+
dataset = yield @dataset if block_given?
|
77
|
+
row = dataset.limit(1).first and load_from_row(row, options)
|
78
|
+
end
|
79
|
+
|
80
|
+
def get_by_id(id, options={})
|
81
|
+
get_with_dataset(options) {|ds| ds.filter(@id_column => id)}
|
82
|
+
end
|
83
|
+
|
84
|
+
def get_many_by_ids(ids, options={})
|
85
|
+
rows = @dataset.filter(@id_column => ids).all
|
86
|
+
load_from_rows(rows, options, ids)
|
87
|
+
end
|
88
|
+
|
89
|
+
def contains_id?(id)
|
90
|
+
@dataset.filter(@id_column => id).select(1).limit(1).single_value ? true : false
|
91
|
+
end
|
92
|
+
|
93
|
+
|
94
|
+
|
95
|
+
|
96
|
+
def store(object)
|
97
|
+
repo = @repos_for_model_classes[object.class] or raise Error
|
98
|
+
repo.store(id, object)
|
99
|
+
end
|
100
|
+
|
101
|
+
def store_new(object)
|
102
|
+
repo = @repos_for_model_classes[object.class] or raise Error
|
103
|
+
repo.store_new(id, object)
|
104
|
+
end
|
105
|
+
|
106
|
+
def update(entity, update_entity)
|
107
|
+
repo = @repos_for_model_classes[entity.class] or raise Error
|
108
|
+
repo.update(entity, update_entity)
|
109
|
+
end
|
110
|
+
|
111
|
+
def update_by_id(id, update_entity)
|
112
|
+
repo = @repos_for_model_classes[update_entity.class] or raise Error
|
113
|
+
repo.update_by_id(id, update_entity)
|
114
|
+
end
|
115
|
+
|
116
|
+
def delete(object)
|
117
|
+
repo = @repos_for_model_classes[object.class] or raise Error
|
118
|
+
repo.delete(object)
|
119
|
+
end
|
120
|
+
end
|
121
|
+
end
|
@@ -0,0 +1,138 @@
|
|
1
|
+
module Hold::Sequel
|
2
|
+
# Abstract superclass.
|
3
|
+
# Responsibility of a PropertyMapper is to map data for a particular property of a model class, between the
|
4
|
+
# instances of that model class, and the database
|
5
|
+
class PropertyMapper
|
6
|
+
def self.setter_dependencies_for(options={}); {}; end
|
7
|
+
|
8
|
+
attr_reader :repository, :property_name, :property
|
9
|
+
|
10
|
+
# If you pass a block, it will be instance_evalled, allowing you to create one-off custom property mappers
|
11
|
+
# by overriding bits of this implementation in the block.
|
12
|
+
def initialize(repo, property_name, options=nil, &block)
|
13
|
+
@repository = repo
|
14
|
+
@property_name = property_name
|
15
|
+
instance_eval(&block) if block
|
16
|
+
end
|
17
|
+
|
18
|
+
# columns: column names to include in a SELECT in order to select this property. these should be
|
19
|
+
# qualified with the relevant table name but not aliased
|
20
|
+
#
|
21
|
+
# aliases: the above columns, aliased for use in the SELECT clause. be alias should something unique
|
22
|
+
# which the mapper can later use to retreive from a result row.
|
23
|
+
#
|
24
|
+
# Any tables which need to be present in the FROM clause in order to select the columns.
|
25
|
+
# relevant joins will be constructed by the parent repo.
|
26
|
+
#
|
27
|
+
# a 'preferred_table' hint may be passed by the repo to indicate that it'd prefer you load the
|
28
|
+
# column off a particular table; at present this is only used by the IdentityMapper
|
29
|
+
def columns_aliases_and_tables_for_select(preferred_table=nil)
|
30
|
+
return [], [], []
|
31
|
+
end
|
32
|
+
|
33
|
+
# Obtains the value of this property from a sequel result row and/or identity value.
|
34
|
+
#
|
35
|
+
# where the mapper has columns_aliases_and_tables_for_select, it will get passed a result row object here
|
36
|
+
# which contains the sql values for these columns (amongst others potentially)
|
37
|
+
#
|
38
|
+
# Where the identity value is available it will also be passed.
|
39
|
+
#
|
40
|
+
# One or other of id, row must always be passed.
|
41
|
+
def load_value(row=nil, id=nil, properties=nil)
|
42
|
+
end
|
43
|
+
|
44
|
+
# called inside the INSERT transaction for insertion of the given entity.
|
45
|
+
#
|
46
|
+
# this is called first thing before insert rows are built (via build_insert_row) for each table of the
|
47
|
+
# repo.
|
48
|
+
def pre_insert(entity)
|
49
|
+
end
|
50
|
+
|
51
|
+
# called inside the UPDATE transaction for insertion of the given entity.
|
52
|
+
#
|
53
|
+
# this is called first thing before update rows are built (via build_update_row) for each table of the
|
54
|
+
# repo.
|
55
|
+
#
|
56
|
+
# anything returned from pre_update will be passed to post_update's data_from_pre_update arg if the
|
57
|
+
# update succeeds.
|
58
|
+
def pre_update(entity, update_entity)
|
59
|
+
end
|
60
|
+
|
61
|
+
# called inside the DELETE transaction for a given entity.
|
62
|
+
#
|
63
|
+
# this is called first thing before rows are deleted for each table of the repo.
|
64
|
+
def pre_delete(entity)
|
65
|
+
end
|
66
|
+
|
67
|
+
# called inside the DELETE transaction for a given entity.
|
68
|
+
#
|
69
|
+
# this is called last thing after rows are deleted for each table of the repo.
|
70
|
+
def post_delete(entity)
|
71
|
+
end
|
72
|
+
|
73
|
+
# gets this property off the entity, and sets associated keys on a sequel row hash for insertion
|
74
|
+
# into the given table. May be passed an ID if an last_insert_id id value for the entity was previously
|
75
|
+
# obtained from an ID sequence on insertion into another table as part of the same combined entity
|
76
|
+
# store_new.
|
77
|
+
#
|
78
|
+
# this is called inside the transaction which wraps the insert, so this is effectively your pre-insert
|
79
|
+
# hook and you can safely do other things inside it in the knowledge they'll be rolled back in the
|
80
|
+
# event of a subsequent problem.
|
81
|
+
def build_insert_row(entity, table, row, id=nil)
|
82
|
+
end
|
83
|
+
|
84
|
+
# gets this property off the update_entity, and sets associated keys on a sequel row hash for update
|
85
|
+
# of the given table for the given entity.
|
86
|
+
#
|
87
|
+
# as with build_update_row, this is done inside the update transaction, it's effectively your
|
88
|
+
# pre-update hook.
|
89
|
+
def build_update_row(update_entity, table, row)
|
90
|
+
end
|
91
|
+
|
92
|
+
# used to make a sequel filter condition setting relevant columns equal to values equivalent
|
93
|
+
# to the given property value. May raise if mapper doesn't support this
|
94
|
+
def make_filter(value, columns_mapped_to)
|
95
|
+
raise Hold::UnsupportedOperation
|
96
|
+
end
|
97
|
+
|
98
|
+
# As for make_filter but takes multiple possible values and does a column IN (1,2,3,4) type thing.
|
99
|
+
def make_multi_filter(values, columns_mapped_to)
|
100
|
+
raise Hold::UnsupportedOperation
|
101
|
+
end
|
102
|
+
|
103
|
+
# like load_value, but works in a batched fashion, allowing a batched loading strategy to
|
104
|
+
# be used for associated objects.
|
105
|
+
# takes a block and yields the loaded values one at a time to it together with their index
|
106
|
+
def load_values(rows=nil, ids=nil, properties=nil)
|
107
|
+
if rows
|
108
|
+
rows.each_with_index {|row, i| yield load_value(row, ids && ids[i], properties), i}
|
109
|
+
else
|
110
|
+
ids.each_with_index {|id, i| yield load_value(nil, id, properties), i}
|
111
|
+
end
|
112
|
+
end
|
113
|
+
|
114
|
+
# called after rows built via build_insert_row have successfully been used in a INSERT
|
115
|
+
# for the entity passed. Should update the entity property, where appropriate, with any default
|
116
|
+
# values which were supplied by the repository (via default_for) on insert, and should do
|
117
|
+
# any additional work in order to save any values which are not mapped to columns on one of the repo's
|
118
|
+
# own :tables
|
119
|
+
#
|
120
|
+
# Is also passed the last_insert_id resulting from any insert, to help fill out any autoincrement
|
121
|
+
# primary key column.
|
122
|
+
#
|
123
|
+
# is executed inside the same transaction as the INSERT
|
124
|
+
def post_insert(entity, rows, last_insert_id=nil)
|
125
|
+
end
|
126
|
+
|
127
|
+
# called after rows built via build_update_row have successfully been used in a UPDATE
|
128
|
+
# for the id and update_entity passed. Should update the entity property, where appropriate, with any default
|
129
|
+
# values which were supplied by the repository (via default_for) on update, and should do
|
130
|
+
# any additional work in order to save any values which are not mapped to columns on one of the repo's
|
131
|
+
# own :tables
|
132
|
+
#
|
133
|
+
# is executed inside the same transaction as the UPDATE
|
134
|
+
def post_update(entity, update_entity, rows, data_from_pre_update)
|
135
|
+
end
|
136
|
+
|
137
|
+
end
|
138
|
+
end
|
@@ -0,0 +1,62 @@
|
|
1
|
+
module Hold::Sequel
|
2
|
+
# A property which is an array of primitive values. Persisted 'all in one go' in a separate table.
|
3
|
+
class PropertyMapper::Array < PropertyMapper
|
4
|
+
attr_reader :table, :foreign_key, :value_column
|
5
|
+
|
6
|
+
def initialize(repo, property_name, options)
|
7
|
+
super(repo, property_name)
|
8
|
+
|
9
|
+
@table = options[:table] || :"#{repo.main_table}_#{property_name}"
|
10
|
+
@foreign_key = options[:foreign_key] || :"#{repo.main_table.to_s.singularize}_id"
|
11
|
+
@value_column = options[:value_column] || :value
|
12
|
+
@order_column = options[:order_column]
|
13
|
+
|
14
|
+
@dataset = @repository.db[@table]
|
15
|
+
@select_v = @repository.db[@table].select(Sequel.as(@value_column,:value))
|
16
|
+
@select_v = @select_v.order(@order_column) if @order_column
|
17
|
+
@select_all = @repository.db[@table].select(
|
18
|
+
Sequel.as(@value_column,:value),
|
19
|
+
Sequel.as(@foreign_key,:id))
|
20
|
+
@select_all = @select_all.order(@order_column) if @order_column
|
21
|
+
end
|
22
|
+
|
23
|
+
def load_value(row=nil, id=nil, properties=nil)
|
24
|
+
@select_v.filter(@foreign_key => id).map {|row| row[:value]}
|
25
|
+
end
|
26
|
+
|
27
|
+
def load_values(rows=nil, ids=nil, properties=nil, &block)
|
28
|
+
results = Hash.new {|h,k| h[k]=[]}
|
29
|
+
@select_all.filter(@foreign_key => ids).each do |row|
|
30
|
+
results[row[:id]] << row[:value]
|
31
|
+
end
|
32
|
+
result.values_at(*ids).each_with_index(&block)
|
33
|
+
end
|
34
|
+
|
35
|
+
def pre_delete(entity)
|
36
|
+
@dataset.filter(@foreign_key => entity.id).delete
|
37
|
+
end
|
38
|
+
|
39
|
+
def post_insert(entity, rows, last_insert_id=nil)
|
40
|
+
array = entity[@property_name] or return
|
41
|
+
insert_rows = []
|
42
|
+
array.each_with_index do |v,i|
|
43
|
+
row = {@foreign_key => entity.id || last_insert_id, @value_column => v}
|
44
|
+
row[@order_column] = i if @order_column
|
45
|
+
insert_rows << row
|
46
|
+
end
|
47
|
+
@dataset.multi_insert(insert_rows)
|
48
|
+
end
|
49
|
+
|
50
|
+
def post_update(entity, update_entity, rows, data_from_pre_update)
|
51
|
+
array = update_entity[@property_name] or return
|
52
|
+
@dataset.filter(@foreign_key => entity.id).delete
|
53
|
+
insert_rows = []
|
54
|
+
array.each_with_index do |v,i|
|
55
|
+
row = {@foreign_key => entity.id, @value_column => v}
|
56
|
+
row[@order_column] = i if @order_column
|
57
|
+
insert_rows << row
|
58
|
+
end
|
59
|
+
@dataset.multi_insert(insert_rows)
|
60
|
+
end
|
61
|
+
end
|
62
|
+
end
|
@@ -0,0 +1,42 @@
|
|
1
|
+
module Hold::Sequel
|
2
|
+
# Simplest case: maps the property directly to a column on the corresponding table.
|
3
|
+
class PropertyMapper::Column < PropertyMapper
|
4
|
+
attr_reader :column_name, :table, :column_alias, :column_qualified, :columns_aliases_and_tables_for_select
|
5
|
+
|
6
|
+
def initialize(repo, property_name, options)
|
7
|
+
super(repo, property_name)
|
8
|
+
|
9
|
+
@table = options[:table] || @repository.main_table
|
10
|
+
|
11
|
+
@column_name = (options[:column_name] || property_name).to_sym
|
12
|
+
@column_alias = :"#{@table}_#{@column_name}"
|
13
|
+
@column_qualified = Sequel::SQL::QualifiedIdentifier.new(@table, @column_name)
|
14
|
+
@columns_aliases_and_tables_for_select = [
|
15
|
+
[@column_qualified],
|
16
|
+
[Sequel::SQL::AliasedExpression.new(@column_qualified, @column_alias)],
|
17
|
+
[@table]
|
18
|
+
]
|
19
|
+
end
|
20
|
+
|
21
|
+
def load_value(row, id=nil, version=nil)
|
22
|
+
row[@column_alias]
|
23
|
+
end
|
24
|
+
|
25
|
+
def build_insert_row(entity, table, row, id=nil)
|
26
|
+
row[@column_name] = entity[@property_name] if @table == table && entity.has_key?(@property_name)
|
27
|
+
end
|
28
|
+
|
29
|
+
alias :build_update_row :build_insert_row
|
30
|
+
|
31
|
+
# for now ignoring the columns_mapped_to, since Identity mapper is the only one
|
32
|
+
# for which this matters at present
|
33
|
+
|
34
|
+
def make_filter(value, columns_mapped_to=nil)
|
35
|
+
{@column_qualified => value}
|
36
|
+
end
|
37
|
+
|
38
|
+
def make_multi_filter(values, columns_mapped_to=nil)
|
39
|
+
{@column_qualified => values}
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|
@@ -0,0 +1,14 @@
|
|
1
|
+
module Hold::Sequel
|
2
|
+
class PropertyMapper::CreatedAt < PropertyMapper::Column
|
3
|
+
def build_insert_row(entity, table, row, id=nil)
|
4
|
+
row[@column_name] = Time.now if table == @table
|
5
|
+
end
|
6
|
+
|
7
|
+
def build_update_row(entity, table, row)
|
8
|
+
end
|
9
|
+
|
10
|
+
def post_insert(entity, rows, last_insert_id=nil)
|
11
|
+
entity[@property_name] = rows[@table][@column_name]
|
12
|
+
end
|
13
|
+
end
|
14
|
+
end
|
@@ -0,0 +1,34 @@
|
|
1
|
+
module Hold::Sequel
|
2
|
+
# A read-only mapper for array properties, which allows you to fetch the items via an arbitrary custom query
|
3
|
+
# against a target repository. You supply a block which takes the dataset and mapper arguments supplied by
|
4
|
+
# the repository's query_for_version method, but also an additional ID argument for the ID of the object
|
5
|
+
# for which the property is being fetched.
|
6
|
+
#
|
7
|
+
# example:
|
8
|
+
# map_custom_query('foos') do |id, dataset, mapping|
|
9
|
+
# dataset.join(:bar, ...).
|
10
|
+
# ...
|
11
|
+
# .filter(:boz_id => id)
|
12
|
+
# end
|
13
|
+
class PropertyMapper::CustomQuery < PropertyMapper
|
14
|
+
def self.setter_dependencies_for(options={})
|
15
|
+
features = [*options[:model_class]].map {|klass| [:get_class, klass]}
|
16
|
+
{:target_repo => [IdentitySetRepository, *features]}
|
17
|
+
end
|
18
|
+
|
19
|
+
attr_reader :model_class
|
20
|
+
attr_accessor :target_repo
|
21
|
+
|
22
|
+
def initialize(repo, property_name, options={}, &block)
|
23
|
+
super(repo, property_name, &nil) # re &nil: our &block is otherwise implicitly passed on to super it seems, bit odd
|
24
|
+
@model_class = options[:model_class] or raise ArgumentError
|
25
|
+
@query_block = options[:query] || block
|
26
|
+
end
|
27
|
+
|
28
|
+
def load_value(row=nil, id=nil, version=nil)
|
29
|
+
target_repo.query(version) do |dataset, mapping|
|
30
|
+
@query_block.call(id, dataset, mapping)
|
31
|
+
end.to_a
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
@@ -0,0 +1,36 @@
|
|
1
|
+
module Hold::Sequel
|
2
|
+
# A read-only mapper for properties which are a single instance of a model class loaded from another repo.
|
3
|
+
#
|
4
|
+
# It allows you to fetch the item via an arbitrary custom query against the target repository.
|
5
|
+
#
|
6
|
+
# You supply a block which takes the dataset and mapper arguments supplied by the repository's query_for_version
|
7
|
+
# method, but also an additional ID argument for the ID of the object for which the property is being fetched.
|
8
|
+
#
|
9
|
+
# example:
|
10
|
+
# map_custom_query_single_value('foo') do |id, dataset, mapping|
|
11
|
+
# dataset.join(:bar, ...).
|
12
|
+
# ...
|
13
|
+
# .filter(:boz_id => id)
|
14
|
+
# end
|
15
|
+
class PropertyMapper::CustomQuerySingleValue < PropertyMapper
|
16
|
+
def self.setter_dependencies_for(options={})
|
17
|
+
features = [*options[:model_class]].map {|klass| [:get_class, klass]}
|
18
|
+
{:target_repo => [IdentitySetRepository, *features]}
|
19
|
+
end
|
20
|
+
|
21
|
+
attr_reader :model_class
|
22
|
+
attr_accessor :target_repo
|
23
|
+
|
24
|
+
def initialize(repo, property_name, options={}, &block)
|
25
|
+
super(repo, property_name, &nil) # re &nil: our &block is otherwise implicitly passed on to super it seems, bit odd
|
26
|
+
@model_class = options[:model_class] or raise ArgumentError
|
27
|
+
@query_block = block
|
28
|
+
end
|
29
|
+
|
30
|
+
def load_value(row=nil, id=nil, version=nil)
|
31
|
+
target_repo.query(version) do |dataset, mapping|
|
32
|
+
@query_block.call(id, dataset, mapping)
|
33
|
+
end.single_result
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|