hold 1.0.2 → 1.0.3

Sign up to get free protection for your applications and to get access to all the features.
@@ -367,16 +367,9 @@ module Hold::Sequel
367
367
  end
368
368
 
369
369
  def get_by_property(property, value, options={})
370
- properties_to_fetch ||= @default_properties.dup
371
- properties_to_fetch[property] = true
372
- query(options[:properties]) do |dataset, property_columns|
373
- filter = mapper(property).make_filter(value, property_columns[property])
374
- dataset.filter(filter)
375
- end.single_result
370
+ get_many_by_property(property, value, options).first
376
371
  end
377
372
 
378
-
379
-
380
373
  def contains_id?(id)
381
374
  dataset = dataset_to_select_tables(@main_table)
382
375
  id_filter = @identity_mapper.make_filter(id, [@tables_id_columns[@main_table]])
@@ -1,121 +1,130 @@
1
- module Hold::Sequel
2
- class PolymorphicRepository
3
- include Hold::IdentitySetRepository
4
-
5
- attr_reader :db, :table, :type_column, :id_column, :type_to_model_class_mapping,
6
- :repos_for_model_classes, :model_class_to_type_mapping
7
-
8
- def initialize(db, options={})
9
- @db = db
10
- @table = options[:table] || :base
11
- @type_column = options[:type_column] || :type
12
- @id_column = options[:id_column] || :id
13
- @type_to_model_class_mapping = options[:mapping]
14
- @model_class_to_type_mapping = @type_to_model_class_mapping.invert
15
-
16
- @repos_for_model_classes = options[:repos] || {}
17
- @dataset = @db[@table].select(Sequel.as(@type_column,:_type), Sequel.as(@id_column,:_id))
18
- end
1
+ module Hold
2
+ module Sequel
3
+ # Polymorphic repository
4
+ class PolymorphicRepository
5
+ include Hold::IdentitySetRepository
6
+
7
+ attr_reader :db, :table, :type_column, :id_column,
8
+ :type_to_model_class_mapping, :repos_for_model_classes,
9
+ :model_class_to_type_mapping
10
+
11
+ def initialize(db, options = {})
12
+ @db = db
13
+ @table = options[:table] || :base
14
+ @type_column = options[:type_column] || :type
15
+ @id_column = options[:id_column] || :id
16
+ @type_to_model_class_mapping = options[:mapping]
17
+ @model_class_to_type_mapping = @type_to_model_class_mapping.invert
18
+
19
+ @repos_for_model_classes = options[:repos] || {}
20
+ @dataset = @db[@table].select(Sequel.as(@type_column, :_type),
21
+ Sequel.as(@id_column, :_id))
22
+ end
19
23
 
20
- def can_get_class?(model_class)
21
- @model_class_to_type_mapping.has_key?(model_class)
22
- end
24
+ def can_get_class?(model_class)
25
+ @model_class_to_type_mapping.key?(model_class)
26
+ end
23
27
 
24
- def can_set_class?(model_class)
25
- @model_class_to_type_mapping.has_key?(model_class)
26
- end
28
+ def can_set_class?(model_class)
29
+ @model_class_to_type_mapping.key?(model_class)
30
+ end
27
31
 
28
- def get_repo_dependencies_from(repo_set)
29
- @type_to_model_class_mapping.each do |type,model_class|
30
- @repos_for_model_classes[model_class] ||= repo_set.repo_for_model_class(model_class)
32
+ def get_repo_dependencies_from(repo_set)
33
+ @type_to_model_class_mapping.each do |_, model_class|
34
+ @repos_for_model_classes[model_class] ||=
35
+ repo_set.repo_for_model_class(model_class)
36
+ end
31
37
  end
32
- end
33
38
 
34
- def type_to_repo_mapping
35
- @type_to_repo_mapping ||= begin
36
- result = {}
37
- @type_to_model_class_mapping.each {|t,m| result[t] = @repos_for_model_classes[m]}
38
- result
39
+ def type_to_repo_mapping
40
+ @type_to_repo_mapping ||=
41
+ begin
42
+ @type_to_repo_mapping.each_with_obeject({}) do |(t, m), hash|
43
+ hash[t] = @repos_for_model_classes[m]
44
+ end
45
+ end
39
46
  end
40
- end
41
47
 
42
- def construct_entity(property_hash, row=nil)
43
- type = property_hash[:_type] or raise "missing _type in result row"
44
- @type_to_model_class_mapping[type].new(property_hash)
45
- end
48
+ def construct_entity(property_hash, _row = nil)
49
+ type = property_hash[:_type] || (fail 'missing _type in result row')
50
+ @type_to_model_class_mapping[type].new(property_hash)
51
+ end
46
52
 
47
- def transaction(*p, &b)
48
- @db.transaction(*p, &b)
49
- end
53
+ def transaction(*p, &b)
54
+ @db.transaction(*p, &b)
55
+ end
50
56
 
51
- # - Takes multiple result rows with type and id column
52
- # - Groups the IDs by type and does a separate get_many_by_ids query on the relevant repo
53
- # - Combines the results from the separate queries putting them into the order of the IDs from
54
- # the original rows (or in the order of the ids given, where they are given)
55
- def load_from_rows(rows, options={}, ids=nil)
56
- ids ||= rows.map {|row| row[:_id]}
57
- ids_by_type = Hash.new {|h,k| h[k]=[]}
58
- rows.each {|row| ids_by_type[row[:_type]] << row[:_id]}
59
- results_by_id = {}
60
- ids_by_type.each do |type, type_ids|
61
- repo = type_to_repo_mapping[type] or raise "PolymorphicRepository: no repo found for type value #{type}"
62
- repo.get_many_by_ids(type_ids, options).each_with_index do |result, index|
63
- results_by_id[type_ids[index]] = result
57
+ # - Takes multiple result rows with type and id column
58
+ # - Groups the IDs by type and does a separate get_many_by_ids query on
59
+ # the relevant repo
60
+ # - Combines the results from the separate queries putting them into the
61
+ # order of the IDs from the original rows (or in the order of the ids
62
+ # given, where they are given)
63
+ def load_from_rows(rows, options = {}, ids = nil)
64
+ ids ||= rows.map { |row| row[:_id] }
65
+ ids_by_type = Hash.new { |h, k| h[k] = [] }
66
+ rows.each { |row| ids_by_type[row[:_type]] << row[:_id] }
67
+ results_by_id = {}
68
+ ids_by_type.each do |type, type_ids|
69
+ repo = type_to_repo_mapping[type] ||
70
+ (fail "PolymorphicRepository: no repo found for type #{type}")
71
+ repo.get_many_by_ids(type_ids, options)
72
+ .each_with_index { |res, i| results_by_id[type_ids[i]] = res }
64
73
  end
74
+ results_by_id.values_at(*ids)
65
75
  end
66
- results_by_id.values_at(*ids)
67
- end
68
76
 
69
- def load_from_row(row, options={})
70
- repo = type_to_repo_mapping[row[:_type]] or raise "PolymorphicRepository: no repo found for type value #{row[:_type]}"
71
- repo.get_by_id(row[:_id], options)
72
- end
73
-
74
- def get_with_dataset(options={}, &b)
75
- dataset = @dataset
76
- dataset = yield @dataset if block_given?
77
- row = dataset.limit(1).first and load_from_row(row, options)
78
- end
77
+ def load_from_row(row, options = {})
78
+ repo =
79
+ type_to_repo_mapping[row[:_type]] ||
80
+ (fail "PolymorphicRepository: no repo found for type #{row[:_type]}")
79
81
 
80
- def get_by_id(id, options={})
81
- get_with_dataset(options) {|ds| ds.filter(@id_column => id)}
82
- end
83
-
84
- def get_many_by_ids(ids, options={})
85
- rows = @dataset.filter(@id_column => ids).all
86
- load_from_rows(rows, options, ids)
87
- end
82
+ repo.get_by_id(row[:_id], options)
83
+ end
88
84
 
89
- def contains_id?(id)
90
- @dataset.filter(@id_column => id).select(1).limit(1).single_value ? true : false
91
- end
85
+ def get_with_dataset(options = {})
86
+ dataset = @dataset
87
+ dataset = yield @dataset if block_given?
88
+ (row = dataset.limit(1).first) && load_from_row(row, options)
89
+ end
92
90
 
91
+ def get_by_id(id, options = {})
92
+ get_with_dataset(options) { |ds| ds.filter(@id_column => id) }
93
+ end
93
94
 
95
+ def get_many_by_ids(ids, options = {})
96
+ rows = @dataset.filter(@id_column => ids).all
97
+ load_from_rows(rows, options, ids)
98
+ end
94
99
 
100
+ def contains_id?(id)
101
+ !@dataset.filter(@id_column => id).select(1).limit(1).single_value.nil?
102
+ end
95
103
 
96
- def store(object)
97
- repo = @repos_for_model_classes[object.class] or raise Error
98
- repo.store(id, object)
99
- end
104
+ def store(object)
105
+ repo = @repos_for_model_classes[object.class] || (fail StdError)
106
+ repo.store(id, object)
107
+ end
100
108
 
101
- def store_new(object)
102
- repo = @repos_for_model_classes[object.class] or raise Error
103
- repo.store_new(id, object)
104
- end
109
+ def store_new(object)
110
+ repo = @repos_for_model_classes[object.class] || (fail StdError)
111
+ repo.store_new(id, object)
112
+ end
105
113
 
106
- def update(entity, update_entity)
107
- repo = @repos_for_model_classes[entity.class] or raise Error
108
- repo.update(entity, update_entity)
109
- end
114
+ def update(entity, update_entity)
115
+ repo = @repos_for_model_classes[entity.class] || (fail StdError)
116
+ repo.update(entity, update_entity)
117
+ end
110
118
 
111
- def update_by_id(id, update_entity)
112
- repo = @repos_for_model_classes[update_entity.class] or raise Error
113
- repo.update_by_id(id, update_entity)
114
- end
119
+ def update_by_id(id, update_entity)
120
+ repo = @repos_for_model_classes[update_entity.class] || (fail StdError)
121
+ repo.update_by_id(id, update_entity)
122
+ end
115
123
 
116
- def delete(object)
117
- repo = @repos_for_model_classes[object.class] or raise Error
118
- repo.delete(object)
124
+ def delete(object)
125
+ repo = @repos_for_model_classes[object.class] || (fail StdError)
126
+ repo.delete(object)
127
+ end
119
128
  end
120
129
  end
121
130
  end
@@ -1,138 +1,163 @@
1
- module Hold::Sequel
2
- # Abstract superclass.
3
- # Responsibility of a PropertyMapper is to map data for a particular property of a model class, between the
4
- # instances of that model class, and the database
5
- class PropertyMapper
6
- def self.setter_dependencies_for(options={}); {}; end
1
+ module Hold
2
+ module Sequel
3
+ # Abstract superclass.
4
+ #
5
+ # Responsibility of a PropertyMapper is to map data for a particular
6
+ # property of a model class, between the instances of that model class, and
7
+ # the database
8
+ class PropertyMapper
9
+ def self.setter_dependencies_for(_options = {})
10
+ {}
11
+ end
7
12
 
8
- attr_reader :repository, :property_name, :property
13
+ attr_reader :repository, :property_name, :property
9
14
 
10
- # If you pass a block, it will be instance_evalled, allowing you to create one-off custom property mappers
11
- # by overriding bits of this implementation in the block.
12
- def initialize(repo, property_name, options=nil, &block)
13
- @repository = repo
14
- @property_name = property_name
15
- instance_eval(&block) if block
16
- end
15
+ # If you pass a block, it will be instance_evalled, allowing you to create
16
+ # one-off custom property mappers by overriding bits of this
17
+ # implementation in the block.
18
+ def initialize(repo, property_name, _options = nil, &block)
19
+ @repository = repo
20
+ @property_name = property_name
21
+ instance_eval(&block) if block
22
+ end
17
23
 
18
- # columns: column names to include in a SELECT in order to select this property. these should be
19
- # qualified with the relevant table name but not aliased
20
- #
21
- # aliases: the above columns, aliased for use in the SELECT clause. be alias should something unique
22
- # which the mapper can later use to retreive from a result row.
23
- #
24
- # Any tables which need to be present in the FROM clause in order to select the columns.
25
- # relevant joins will be constructed by the parent repo.
26
- #
27
- # a 'preferred_table' hint may be passed by the repo to indicate that it'd prefer you load the
28
- # column off a particular table; at present this is only used by the IdentityMapper
29
- def columns_aliases_and_tables_for_select(preferred_table=nil)
30
- return [], [], []
31
- end
24
+ # columns: column names to include in a SELECT in order to select this
25
+ # property. These should be qualified with the relevant table name but not
26
+ # aliased.
27
+ #
28
+ # aliases: the above columns, aliased for use in the SELECT clause. be
29
+ # alias should something unique which the mapper can later use to retreive
30
+ # from a result row.
31
+ #
32
+ # Any tables which need to be present in the FROM clause in order to
33
+ # select the columns. Relevant joins will be constructed by the parent
34
+ # repo.
35
+ #
36
+ # A 'preferred_table' hint may be passed by the repo to indicate that it'd
37
+ # prefer you load the column off a particular table; at present this is
38
+ # only used by the IdentityMapper
39
+ def columns_aliases_and_tables_for_select(_preferred_table = nil)
40
+ [[], [], []]
41
+ end
32
42
 
33
- # Obtains the value of this property from a sequel result row and/or identity value.
34
- #
35
- # where the mapper has columns_aliases_and_tables_for_select, it will get passed a result row object here
36
- # which contains the sql values for these columns (amongst others potentially)
37
- #
38
- # Where the identity value is available it will also be passed.
39
- #
40
- # One or other of id, row must always be passed.
41
- def load_value(row=nil, id=nil, properties=nil)
42
- end
43
+ # Obtains the value of this property from a sequel result row and/or
44
+ # identity value.
45
+ #
46
+ # where the mapper has columns_aliases_and_tables_for_select, it will get
47
+ # passed a result row object here which contains the sql values for these
48
+ # columns (amongst others potentially)
49
+ #
50
+ # Where the identity value is available it will also be passed.
51
+ #
52
+ # One or other of id, row must always be passed.
53
+ def load_value(_row = nil, _id = nil, _properties = nil)
54
+ end
43
55
 
44
- # called inside the INSERT transaction for insertion of the given entity.
45
- #
46
- # this is called first thing before insert rows are built (via build_insert_row) for each table of the
47
- # repo.
48
- def pre_insert(entity)
49
- end
56
+ # called inside the INSERT transaction for insertion of the given entity.
57
+ #
58
+ # this is called first thing before insert rows are built (via
59
+ # build_insert_row) for each table of the repo.
60
+ def pre_insert(_entity)
61
+ end
50
62
 
51
- # called inside the UPDATE transaction for insertion of the given entity.
52
- #
53
- # this is called first thing before update rows are built (via build_update_row) for each table of the
54
- # repo.
55
- #
56
- # anything returned from pre_update will be passed to post_update's data_from_pre_update arg if the
57
- # update succeeds.
58
- def pre_update(entity, update_entity)
59
- end
63
+ # called inside the UPDATE transaction for insertion of the given entity.
64
+ #
65
+ # this is called first thing before update rows are built (via
66
+ # build_update_row) for each table of the repo.
67
+ #
68
+ # anything returned from pre_update will be passed to post_update's
69
+ # data_from_pre_update arg if the update succeeds.
70
+ def pre_update(_entity, _update_entity)
71
+ end
60
72
 
61
- # called inside the DELETE transaction for a given entity.
62
- #
63
- # this is called first thing before rows are deleted for each table of the repo.
64
- def pre_delete(entity)
65
- end
73
+ # called inside the DELETE transaction for a given entity.
74
+ #
75
+ # this is called first thing before rows are deleted for each table of the
76
+ # repo.
77
+ def pre_delete(_entity)
78
+ end
66
79
 
67
- # called inside the DELETE transaction for a given entity.
68
- #
69
- # this is called last thing after rows are deleted for each table of the repo.
70
- def post_delete(entity)
71
- end
80
+ # called inside the DELETE transaction for a given entity.
81
+ #
82
+ # this is called last thing after rows are deleted for each table of the
83
+ # repo.
84
+ def post_delete(_entity)
85
+ end
72
86
 
73
- # gets this property off the entity, and sets associated keys on a sequel row hash for insertion
74
- # into the given table. May be passed an ID if an last_insert_id id value for the entity was previously
75
- # obtained from an ID sequence on insertion into another table as part of the same combined entity
76
- # store_new.
77
- #
78
- # this is called inside the transaction which wraps the insert, so this is effectively your pre-insert
79
- # hook and you can safely do other things inside it in the knowledge they'll be rolled back in the
80
- # event of a subsequent problem.
81
- def build_insert_row(entity, table, row, id=nil)
82
- end
87
+ # gets this property off the entity, and sets associated keys on a sequel
88
+ # row hash for insertion into the given table. May be passed an ID if an
89
+ # last_insert_id id value for the entity was previously obtained from an
90
+ # ID sequence on insertion into another table as part of the same combined
91
+ # entity store_new.
92
+ #
93
+ # this is called inside the transaction which wraps the insert, so this is
94
+ # effectively your pre-insert hook and you can safely do other things
95
+ # inside it in the knowledge they'll be rolled back in the event of a
96
+ # subsequent problem.
97
+ def build_insert_row(_entity, _table, _row, _id = nil)
98
+ end
83
99
 
84
- # gets this property off the update_entity, and sets associated keys on a sequel row hash for update
85
- # of the given table for the given entity.
86
- #
87
- # as with build_update_row, this is done inside the update transaction, it's effectively your
88
- # pre-update hook.
89
- def build_update_row(update_entity, table, row)
90
- end
100
+ # gets this property off the update_entity, and sets associated keys on a
101
+ # sequel row hash for update of the given table for the given entity.
102
+ #
103
+ # as with build_update_row, this is done inside the update transaction,
104
+ # it's effectively your pre-update hook.
105
+ def build_update_row(_update_entity, _table, _row)
106
+ end
91
107
 
92
- # used to make a sequel filter condition setting relevant columns equal to values equivalent
93
- # to the given property value. May raise if mapper doesn't support this
94
- def make_filter(value, columns_mapped_to)
95
- raise Hold::UnsupportedOperation
96
- end
108
+ # used to make a sequel filter condition setting relevant columns equal to
109
+ # values equivalent to the given property value. May raise if mapper
110
+ # doesn't support this
111
+ def make_filter(_value, _columns_mapped_to)
112
+ fail Hold::UnsupportedOperation
113
+ end
97
114
 
98
- # As for make_filter but takes multiple possible values and does a column IN (1,2,3,4) type thing.
99
- def make_multi_filter(values, columns_mapped_to)
100
- raise Hold::UnsupportedOperation
101
- end
115
+ # As for make_filter but takes multiple possible values and does a column
116
+ # IN (1,2,3,4) type thing.
117
+ def make_multi_filter(_values, _columns_mapped_to)
118
+ fail Hold::UnsupportedOperation
119
+ end
102
120
 
103
- # like load_value, but works in a batched fashion, allowing a batched loading strategy to
104
- # be used for associated objects.
105
- # takes a block and yields the loaded values one at a time to it together with their index
106
- def load_values(rows=nil, ids=nil, properties=nil)
107
- if rows
108
- rows.each_with_index {|row, i| yield load_value(row, ids && ids[i], properties), i}
109
- else
110
- ids.each_with_index {|id, i| yield load_value(nil, id, properties), i}
121
+ # like load_value, but works in a batched fashion, allowing a batched
122
+ # loading strategy to be used for associated objects.
123
+ # takes a block and yields the loaded values one at a time to it together
124
+ # with their index
125
+ def load_values(rows = nil, ids = nil, properties = nil)
126
+ if rows
127
+ rows.each_with_index do |row, i|
128
+ yield load_value(row, ids && ids[i], properties), i
129
+ end
130
+ else
131
+ ids.each_with_index do |id, i|
132
+ yield load_value(nil, id, properties), i
133
+ end
134
+ end
111
135
  end
112
- end
113
136
 
114
- # called after rows built via build_insert_row have successfully been used in a INSERT
115
- # for the entity passed. Should update the entity property, where appropriate, with any default
116
- # values which were supplied by the repository (via default_for) on insert, and should do
117
- # any additional work in order to save any values which are not mapped to columns on one of the repo's
118
- # own :tables
119
- #
120
- # Is also passed the last_insert_id resulting from any insert, to help fill out any autoincrement
121
- # primary key column.
122
- #
123
- # is executed inside the same transaction as the INSERT
124
- def post_insert(entity, rows, last_insert_id=nil)
125
- end
137
+ # called after rows built via build_insert_row have successfully been used
138
+ # in a INSERT for the entity passed. Should update the entity property,
139
+ # where appropriate, with any default values which were supplied by the
140
+ # repository (via default_for) on insert, and should do any additional
141
+ # work in order to save any values which are not mapped to columns on one
142
+ # of the repo's own :tables
143
+ #
144
+ # Is also passed the last_insert_id resulting from any insert, to help
145
+ # fill out any autoincrement primary key column.
146
+ #
147
+ # is executed inside the same transaction as the INSERT
148
+ def post_insert(_entity, _rows, _last_insert_id = nil)
149
+ end
126
150
 
127
- # called after rows built via build_update_row have successfully been used in a UPDATE
128
- # for the id and update_entity passed. Should update the entity property, where appropriate, with any default
129
- # values which were supplied by the repository (via default_for) on update, and should do
130
- # any additional work in order to save any values which are not mapped to columns on one of the repo's
131
- # own :tables
132
- #
133
- # is executed inside the same transaction as the UPDATE
134
- def post_update(entity, update_entity, rows, data_from_pre_update)
151
+ # called after rows built via build_update_row have successfully been used
152
+ # in a UPDATE for the id and update_entity passed. Should update the
153
+ # entity property, where appropriate, with any default values which were
154
+ # supplied by the repository (via default_for) on update, and should do
155
+ # any additional work in order to save any values which are not mapped to
156
+ # columns on one of the repo's own :tables
157
+ #
158
+ # is executed inside the same transaction as the UPDATE
159
+ def post_update(_entity, _update_entity, _rows, _data_from_pre_update)
160
+ end
135
161
  end
136
-
137
162
  end
138
163
  end