hold 1.0.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,96 @@
1
+ module Hold::Sequel
2
+ # Maps to an associated object which is fetched by id from a target repository using a foriegn key column
3
+ class PropertyMapper::ForeignKey < PropertyMapper
4
+ def self.setter_dependencies_for(options={})
5
+ features = [*options[:model_class]].map {|klass| [:get_class, klass]}
6
+ {:target_repo => [Hold::IdentitySetRepository, *features]}
7
+ end
8
+
9
+ attr_accessor :target_repo
10
+
11
+ attr_reader :columns_aliases_and_tables_for_select, :column_alias, :column_name, :table,
12
+ :column_qualified, :auto_store_new, :model_class
13
+
14
+ # auto_store_new: where the value for this property is an object without an ID,
15
+ # automatically store_new the object in the target_repo before trying to store
16
+ # the object in question with this foreign key property. In the absence of this
17
+ # setting, values without an ID will cause an exception
18
+ def initialize(repo, property_name, options)
19
+ super(repo, property_name)
20
+
21
+ @table = options[:table] || @repository.main_table
22
+ @column_name = options[:column_name] || :"#{property_name}_id"
23
+ @column_alias = :"#{@table}_#{@column_name}"
24
+ @column_qualified = Sequel::SQL::QualifiedIdentifier.new(@table, @column_name)
25
+ @columns_aliases_and_tables_for_select = [
26
+ [@column_qualified],
27
+ [Sequel::SQL::AliasedExpression.new(@column_qualified, @column_alias)],
28
+ [@table]
29
+ ]
30
+
31
+ @auto_store_new = options[:auto_store_new] || false
32
+ @model_class = options[:model_class] or raise ArgumentError
33
+ end
34
+
35
+ def load_value(row, id=nil, properties=nil)
36
+ fkey = row[@column_alias] and target_repo.get_by_id(fkey, :properties => properties)
37
+ end
38
+
39
+ def ensure_value_has_id_where_present(value)
40
+ if value && !value.id
41
+ if @auto_store_new
42
+ target_repo.store_new(value)
43
+ else
44
+ raise "value for ForeignKey mapped property #{@property_name} has no id, and :auto_store_new not specified"
45
+ end
46
+ end
47
+ end
48
+
49
+ def pre_insert(entity)
50
+ ensure_value_has_id_where_present(entity[@property_name])
51
+ end
52
+
53
+ def pre_update(entity, update_entity)
54
+ ensure_value_has_id_where_present(update_entity[@property_name])
55
+ end
56
+
57
+ def build_insert_row(entity, table, row, id=nil)
58
+ if @table == table && entity.has_key?(@property_name)
59
+ value = entity[@property_name]
60
+ row[@column_name] = value && value.id
61
+ end
62
+ end
63
+ alias :build_update_row :build_insert_row
64
+
65
+ # for now ignoring the columns_mapped_to, since Identity mapper is the only one
66
+ # for which this matters at present
67
+
68
+ def make_filter(value, columns_mapped_to=nil)
69
+ {@column_qualified => value && value.id}
70
+ end
71
+
72
+ def make_multi_filter(values, columns_mapped_to=nil)
73
+ {@column_qualified => values.map {|v| v.id}}
74
+ end
75
+
76
+ def make_filter_by_id(id, columns_mapped_to=nil)
77
+ {@column_qualified => id}
78
+ end
79
+
80
+ def make_filter_by_ids(ids, columns_mapped_to=nil)
81
+ {@column_qualified => ids}
82
+ end
83
+
84
+ # efficient batch load which takes advantage of get_many_by_ids on the target repo
85
+ def load_values(rows, ids=nil, properties=nil, &b)
86
+ fkeys = rows.map {|row| row[@column_alias]}
87
+ non_nil_fkeys = fkeys.compact
88
+ non_nil_fkey_results = if non_nil_fkeys.empty? then [] else
89
+ target_repo.get_many_by_ids(non_nil_fkeys, :properties => properties)
90
+ end
91
+ fkeys.each_with_index do |fkey, index|
92
+ yield(fkey ? non_nil_fkey_results.shift : nil, index)
93
+ end
94
+ end
95
+ end
96
+ end
@@ -0,0 +1,60 @@
1
+ module Hold::Sequel
2
+ # A property which is a hash of strings to other primitive values. Persisted 'all in one go'
3
+ # in a separate table.
4
+ class PropertyMapper::Hash < PropertyMapper
5
+ attr_reader :table, :foreign_key, :key_column, :value_column
6
+
7
+ def initialize(repo, property_name, options)
8
+ super(repo, property_name)
9
+
10
+ @table = options[:table] || :"#{repo.main_table}_#{property_name}"
11
+ @foreign_key = options[:foreign_key] || :"#{repo.main_table.to_s.singularize}_id"
12
+ @key_column = options[:key_column] || :key
13
+ @value_column = options[:value_column] || :value
14
+
15
+ @dataset = @repository.db[@table]
16
+ @select_kv = @repository.db[@table].select(
17
+ Sequel.as(@key_column,:key),
18
+ Sequel.as(@value_column,:value))
19
+ @select_all = @repository.db[@table].select(
20
+ Sequel.as(@key_column,:key),
21
+ Sequel.as(@value_column,:value),
22
+ Sequel.as(@foreign_key,:id))
23
+ end
24
+
25
+ def load_value(row=nil, id=nil, properties=nil)
26
+ result = {}
27
+ @select_kv.filter(@foreign_key => id).each do |row|
28
+ result[row[:key]] = row[:value]
29
+ end
30
+ result
31
+ end
32
+
33
+ def load_values(rows=nil, ids=nil, properties=nil, &block)
34
+ results = Hash.new {|h,k| h[k]={}}
35
+ @select_all.filter(@foreign_key => ids).each do |row|
36
+ results[row[:id]][row[:key]] = row[:value]
37
+ end
38
+ result.values_at(*ids).each_with_index(&block)
39
+ end
40
+
41
+ def pre_delete(entity)
42
+ @dataset.filter(@foreign_key => entity.id).delete
43
+ end
44
+
45
+ def post_insert(entity, rows, last_insert_id=nil)
46
+ hash = entity[@property_name] or return
47
+ @dataset.multi_insert(hash.map do |k,v|
48
+ {@foreign_key => last_insert_id, @key_column => k, @value_column => v}
49
+ end)
50
+ end
51
+
52
+ def post_update(entity, update_entity, rows, data_from_pre_update)
53
+ hash = update_entity[@property_name] or return
54
+ @dataset.filter(@foreign_key => entity.id).delete
55
+ @dataset.multi_insert(hash.map do |k,v|
56
+ {@foreign_key => entity.id, @key_column => k, @value_column => v}
57
+ end)
58
+ end
59
+ end
60
+ end
@@ -0,0 +1,41 @@
1
+ module Hold::Sequel
2
+ class PropertyMapper::Identity < PropertyMapper
3
+ def columns_aliases_and_tables_for_select(preferred_table=@repository.main_table)
4
+ qualified = qualified_column_name(preferred_table)
5
+ return [qualified], [qualified.as(:id)], [preferred_table]
6
+ end
7
+
8
+ def qualified_column_name(preferred_table=@repository.main_table)
9
+ id_column = @repository.table_id_column(preferred_table)
10
+ Sequel::SQL::QualifiedIdentifier.new(preferred_table, id_column)
11
+ end
12
+
13
+ # the ID needs to go into insert rows for /all/ tables of the repo
14
+ def build_insert_row(entity, table, row, id=nil)
15
+ id ||= entity[@property_name] or return
16
+ id_column = @repository.table_id_column(table)
17
+ row[id_column] = id
18
+ end
19
+
20
+ # we don't update the ID - considered immutable
21
+ def build_update_row(entity, table, row)
22
+ end
23
+
24
+ # After a successful insert, we assign the last_insert_id back onto the entity's id property:
25
+ def post_insert(entity, rows, last_insert_id=nil)
26
+ entity[@property_name] = last_insert_id if last_insert_id
27
+ end
28
+
29
+ def load_value(row, id=nil, version=nil)
30
+ row[:id]
31
+ end
32
+
33
+ def make_filter(value, columns_mapped_to)
34
+ {columns_mapped_to.first => value}
35
+ end
36
+
37
+ def make_multi_filter(values, columns_mapped_to)
38
+ {columns_mapped_to.first => values}
39
+ end
40
+ end
41
+ end
@@ -0,0 +1,158 @@
1
+ module Hold::Sequel
2
+ # Maps to an array of associated objects stored in another repo, where a :join_table exists with columns for:
3
+ # - our id property (:left_key)
4
+ # - other repo's id property (:right_key)
5
+ # - order position within the list, starting from 0 (:order_column)
6
+ #
7
+ # By default these properties aren't writeable - when they are writeable:
8
+ #
9
+ # (for now at least) the rows of the join table are owned and managed soley by the parent objects via this
10
+ # mapper. The associated objects themselves, however, are free-floating and are not touched during
11
+ # create/update/delete (except optionally to store_new any new ones on create of the parent object,
12
+ # when :auto_store_new => true).
13
+ #
14
+ # If you supply a hash as :filter, this will be used to filter the join table, and will also be merged
15
+ # into any rows inserted into the join table. So if you use it on a writeable property, it needs to be
16
+ # map columns just to values rather than to other sql conditions.
17
+ #
18
+ # NB: for now this does not assume (or do anything special with respect to) the presence of a reciprocal
19
+ # many-to_many property on the target repo. This functionality will need adding later to help figure out
20
+ # the side-effects of changes to a many-to-many property when it comes to cache invalidation, and to
21
+ # ensure that the order given by the order_column is not upset by updates to the corresponding reciprocal
22
+ # property.
23
+ #
24
+ # So:
25
+ # - Rows are inserted into the join table after the parent object is created
26
+ # - Rows in the join table are nuked and re-inserted after this property on the parent object is updated
27
+ # - Rows in the join table are deleted before the parent object is deleted (unless :manual_cascade_delete
28
+ # => false is specified hinting that ON CASCADE DELETE is set on the foreign key so we needn't bother)
29
+ class PropertyMapper::ManyToMany < PropertyMapper
30
+ def self.setter_dependencies_for(options={})
31
+ features = [*options[:model_class]].map {|klass| [:get_class, klass]}
32
+ {:target_repo => [IdentitySetRepository, *features]}
33
+ end
34
+
35
+ attr_accessor :target_repo
36
+
37
+ attr_reader :join_table, :left_key, :right_key, :order_column, :writeable,
38
+ :manual_cascade_delete, :auto_store_new, :distinct, :filter, :model_class
39
+
40
+ def initialize(repo, property_name, options, &block)
41
+ super(repo, property_name, &nil)
42
+
43
+ @join_table = options[:join_table] || :"#{repo.main_table}_#{property_name}"
44
+ @left_key = options[:left_key] || :"#{repo.main_table.to_s.singularize}_id"
45
+ @right_key = options[:right_key] || :"#{property_name.to_s.singularize}_id"
46
+ @qualified_left_key = Sequel::SQL::QualifiedIdentifier.new(@join_table, @left_key)
47
+ @qualified_right_key = Sequel::SQL::QualifiedIdentifier.new(@join_table, @right_key)
48
+
49
+ @filter = options[:filter]
50
+ @join_table_dataset = @repository.db[@join_table]
51
+ @distinct = options[:distinct] || false
52
+
53
+ @order_column = options[:order_column] and
54
+ @qualified_order_column = Sequel::SQL::QualifiedIdentifier.new(@join_table, @order_column)
55
+
56
+ @writeable = options[:writeable] || false
57
+ @manual_cascade_delete = options[:manual_cascade_delete] != false
58
+ @auto_store_new = options[:auto_store_new] || false
59
+
60
+ @model_class = options[:model_class] or raise ArgumentError
61
+
62
+ # in case you want to override anything on the instance:
63
+ instance_eval(&block) if block
64
+ end
65
+
66
+ def load_value(row, id, properties=nil)
67
+ target_repo.query(properties) do |dataset, property_columns|
68
+ id_column = property_columns[target_repo.identity_property].first
69
+ dataset = dataset.
70
+ join(@join_table, @qualified_right_key => id_column).
71
+ filter(@qualified_left_key => id)
72
+ dataset = dataset.filter(@filter) if @filter
73
+ dataset = dataset.distinct if @distinct
74
+ @qualified_order_column ? dataset.order(@qualified_order_column) : dataset
75
+ end.to_a
76
+ end
77
+
78
+ # efficient batch load for the non-lazy case
79
+ def load_values(rows, ids=nil, properties=nil, &b)
80
+ query = target_repo.query(properties) do |dataset, mapping|
81
+ id_column = mapping[target_repo.identity_property]
82
+ dataset = dataset
83
+ .join(@join_table, @qualified_right_key => id_column)
84
+ .filter(@qualified_left_key => ids)
85
+ .select(Sequel.as(@qualified_left_key,:_many_to_many_id))
86
+ dataset = dataset.filter(@filter) if @filter
87
+ dataset = dataset.distinct if @distinct
88
+ dataset = dataset.order(:_many_to_many_id, @qualified_order_column) if @qualified_order_column
89
+ dataset
90
+ end
91
+
92
+ groups = []; id_to_group = {}
93
+ ids.each_with_index {|id,index| id_to_group[id] = groups[index] = []}
94
+ query.results_with_rows.each do |entity, row|
95
+ id_to_group[row[:_many_to_many_id]] << entity
96
+ end
97
+ groups.each_with_index(&b)
98
+ end
99
+
100
+ # find all instances in this repo whose value for this property contains the given member instance
101
+ def get_many_by_member(member)
102
+ @repository.query do |dataset, property_columns|
103
+ id_column = property_columns[@repository.identity_property].first
104
+ dataset = dataset.
105
+ join(@join_table, @qualified_left_key => id_column).
106
+ filter(@qualified_right_key => member.id)
107
+ dataset = dataset.filter(@filter) if @filter
108
+ dataset = dataset.distinct if @distinct
109
+ dataset
110
+ end.to_a
111
+ end
112
+
113
+
114
+
115
+ def insert_join_table_rows(entity, id, values)
116
+ rows = []
117
+ values.each_with_index do |value, index|
118
+ value_id = value.id || if @auto_store_new
119
+ target_repo.store_new(value); value.id
120
+ else
121
+ raise "value for ManyToMany mapped property #{@property_name} has no id, and :auto_store_new not specified"
122
+ end
123
+ row = {@left_key => id, @right_key => value_id}
124
+ row[@order_column] = index if @order_column
125
+ row.merge!(@filter) if @filter
126
+ add_denormalized_columns_to_join_table_row(entity, value, row)
127
+ rows << row
128
+ end
129
+ @join_table_dataset.multi_insert(rows)
130
+ end
131
+
132
+ # this is a hook for you to override
133
+ def add_denormalized_columns_to_join_table_row(entity, value, row)
134
+ end
135
+
136
+ def delete_join_table_rows(id)
137
+ filters = {@left_key => id}
138
+ filters.merge!(@filter) if @filter
139
+ @join_table_dataset.filter(filters).delete
140
+ end
141
+
142
+ def post_insert(entity, rows, insert_id)
143
+ return unless @writeable
144
+ values = entity[@property_name] and insert_join_table_rows(entity, insert_id, values)
145
+ end
146
+
147
+ def post_update(entity, update_entity, rows, result_from_pre_update=nil)
148
+ return unless @writeable
149
+ update_values = update_entity[@property_name] or return
150
+ delete_join_table_rows(entity.id)
151
+ insert_join_table_rows(entity, entity.id, update_values)
152
+ end
153
+
154
+ def pre_delete(entity)
155
+ delete_join_table_rows(entity.id) if @manual_cascade_delete
156
+ end
157
+ end
158
+ end
@@ -0,0 +1,199 @@
1
+ module Hold::Sequel
2
+ # Maps to an array of associated objects stored in another repo, which has a foreign_key-mapped
3
+ # property pointing at instances of our model class.
4
+ #
5
+ # By default these properties aren't writeable - when they are writeable, the values are treated
6
+ # like wholy-owned sub-components of the parent object.
7
+ #
8
+ # So, objects which are values of this property are:
9
+ # - Created after the parent object is created
10
+ # - Created/updated/deleted as appropriate after this property on the parent object is updated
11
+ # - Deleted before the parent object is deleted (unless :manual_cascade_delete => false is
12
+ # specified hinting that ON CASCADE DELETE is set on the foreign key so we needn't bother)
13
+ #
14
+ # On update:
15
+ # We allow you to re-order and/or update the existing values while maintaining their
16
+ # identities, remove some objects which were in the collection before (which get deleted) and possibly
17
+ # throw in new objects too (which get created), but you can't throw something in there which was
18
+ # previously attached to some other object, for the same reason that this doesn't fly on insert.
19
+ #
20
+ # If you specify a denormalized_count_column, this will be used to store the count of associated
21
+ # objects on a column on the main table of the parent object.
22
+ class PropertyMapper::OneToMany < PropertyMapper
23
+ def self.setter_dependencies_for(options={})
24
+ features = [*options[:model_class]].map {|klass| [:get_class, klass]}
25
+ {:target_repo => [IdentitySetRepository, *features]}
26
+ end
27
+
28
+ attr_accessor :target_repo
29
+
30
+ attr_reader :writeable, :manual_cascade_delete, :order_property, :order_direction,
31
+ :foreign_key_property_name, :denormalized_count_column, :model_class
32
+
33
+ def initialize(repo, property_name, options)
34
+ super(repo, property_name)
35
+
36
+ @foreign_key_property_name = options[:property] or raise "missing :property arg"
37
+ @order_property = options[:order_property]
38
+ @order_direction = options[:order_direction] || :asc
39
+
40
+ @extra_properties = {@foreign_key_property_name => true}
41
+ @extra_properties[@order_property] = true if @order_property
42
+
43
+ @writeable = options[:writeable] || false
44
+ @manual_cascade_delete = options[:manual_cascade_delete] != false
45
+
46
+ @denormalized_count_column = options[:denormalized_count_column]
47
+
48
+ @model_class = options[:model_class] or raise ArgumentError
49
+ end
50
+
51
+ def foreign_key_mapper
52
+ @foreign_key_mapper ||= begin
53
+ mapper = target_repo.mapper(@foreign_key_property_name)
54
+ unless mapper.is_a?(PropertyMapper::ForeignKey)
55
+ raise "OneToManyMapper: Expected ForeignKey mapper with name #{@foreign_key_property_name}"
56
+ end
57
+ unless mapper.target_repo.can_get_class?(@repository.model_class)
58
+ raise "OneToManyMapper: ForeignKey mapper's target repo #{mapper.target_repo.inspect} can't get our repository's model_class #{@repository.model_class}"
59
+ end
60
+ mapper
61
+ end
62
+ end
63
+
64
+ def load_value(row, id, properties=nil)
65
+ properties = (properties || target_repo.default_properties).merge(@extra_properties)
66
+ target_repo.query(properties) do |dataset, mapping|
67
+ filter = foreign_key_mapper.make_filter_by_id(id, mapping[@foreign_key_property_name])
68
+ dataset = dataset.filter(filter)
69
+ dataset = dataset.order(Sequel.send(@order_direction, @order_property)) if @order_property
70
+ dataset
71
+ end.to_a
72
+ end
73
+
74
+ def load_values(rows, ids=nil, properties=nil, &b)
75
+ properties = (properties || target_repo.default_properties).merge(@extra_properties)
76
+ query = target_repo.query(properties) do |dataset, mapping|
77
+ filter = foreign_key_mapper.make_filter_by_ids(ids, mapping[@foreign_key_property_name])
78
+ dataset = dataset
79
+ .filter(filter)
80
+ .select(foreign_key_mapper.column_qualified.as(:_one_to_many_id))
81
+
82
+ if @order_property
83
+ dataset = dataset.order(:_one_to_many_id, target_repo.mapper(@order_property).column_qualified.send(@order_direction))
84
+ end
85
+
86
+ dataset
87
+ end
88
+
89
+ groups = []; id_to_group = {}
90
+ ids.each_with_index {|id,index| id_to_group[id] = groups[index] = []}
91
+ query.results_with_rows.each do |entity, row|
92
+ id_to_group[row[:_one_to_many_id]] << entity
93
+ end
94
+ groups.each_with_index(&b)
95
+ end
96
+
97
+
98
+ # adds a join to the target_repo's table, onto a dataset from the mapper's repository.
99
+ def add_join(dataset)
100
+ # FIXME: doesn't take any care to pick a unique alias for the table when joining to it
101
+ # FIXME: doesn't use mapping to determine id_column
102
+ dataset.join(target_repo.table_name, foreign_key_mapper.column_name => @repository.identity_mapper.column_name)
103
+ end
104
+
105
+ # help the parent repo find instances whose value for this property contains a particular member.
106
+ # since we're one-to-many rather than many-to-many, this is relatively simple. we just get the foreign key
107
+ # property on the proposed member, see if it's set to anything, and if so if that thing exists within our repo.
108
+ # if it does then it's the only such object, because the member's foreign key can only point at one thing.
109
+ def get_many_by_member(member)
110
+ if member.has_key?(@foreign_key_property_name)
111
+ object = member[@foreign_key_property_name]
112
+ [object] if object && @repository.contains?(object) # we might not actually contain it
113
+ else
114
+ object = target_repo.get_property(member, @foreign_key_property_name)
115
+ [object] if object # we know we contain it since the target_repo's foreign_key_mapper has us as its target_repo
116
+ end
117
+ end
118
+
119
+
120
+ def build_insert_row(entity, table, row, id=nil)
121
+ return unless @denormalized_count_column && table == @repository.main_table
122
+ values = entity[@property_name]
123
+ row[@denormalized_count_column] = (values ? values.length : 0)
124
+ end
125
+
126
+ def post_insert(entity, rows, insert_id)
127
+ return unless @writeable
128
+
129
+ values = entity[@property_name] or return
130
+
131
+ # save the assocatied objects!
132
+ values.each_with_index do |value, index|
133
+ # if we allowed this you would potentially be detaching the object from its previous parent,
134
+ # but then we'd have to apply hooks etc to that object too, so rather avoid:
135
+ raise "OneToMany mapper for #{@property_name}: already-persisted values are not supported on insert" if value.id
136
+ set_foreign_key_and_order_properties_on_value(entity, value, index)
137
+ target_repo.store_new(value)
138
+ end
139
+ end
140
+
141
+ def set_foreign_key_and_order_properties_on_value(entity, value, index)
142
+ # ensure their corresponding foreign key mapped property points back at us
143
+ if (existing_value = value[@foreign_key_property_name])
144
+ # the associated object has a foreign key mapped property pointing at something else.
145
+ #
146
+ # we could have config to allow it to go and update the foreign key in cases like this, but could
147
+ # be messy in the presence of order columns etc.
148
+ raise "OneToMany mapper: one of the values for mapped property #{@property_name} has an existing \
149
+ value for the corresponding #{@foreign_key_property_name} property which is not equal \
150
+ to our good selves" unless existing_value == entity
151
+ else
152
+ value[@foreign_key_property_name] = entity
153
+ end
154
+
155
+ # ensure their order_property corresponds to their order in the array, at least for new members.
156
+ # (in an update, existing members may change order)
157
+ if @order_property == :position
158
+ if !value.id && (existing_index = value[@order_property])
159
+ raise "OneToMany mapper: one of the new values for mapped property #{@property_name} has an existing \
160
+ value for the order property #{@order_property} property which is not equal to its index in \
161
+ the array" unless existing_index == index
162
+ else
163
+ value[@order_property] = index
164
+ end
165
+ end
166
+ end
167
+
168
+ def pre_update(entity, update_entity)
169
+ # if an update is specified for this property, find out what the existing values are first:
170
+ load_value(nil, entity.id) if @writeable && update_entity[@property_name]
171
+ end
172
+
173
+ def build_update_row(entity, table, row, id=nil)
174
+ return unless @denormalized_count_column && table == @repository.main_table
175
+ values = entity[@property_name] and row[@denormalized_count_column] = values.length
176
+ end
177
+
178
+ def post_update(entity, update_entity, rows, values_before)
179
+ return unless @writeable
180
+ update_values = update_entity[@property_name] or return
181
+ # delete any values which are no longer around:
182
+ (values_before - update_values).each {|value| target_repo.delete(value)}
183
+ # insert any new ones / update any existing ones which remain:
184
+ update_values.each_with_index do |value, index|
185
+ raise "OneToMany mapper: already-persisted values are only allowed for property update where they \
186
+ were already a value of the property beforehand" if value.id && !values_before.include?(value)
187
+
188
+ set_foreign_key_and_order_properties_on_value(entity, value, index)
189
+ # this will insert any new values, or update any existing ones.
190
+ target_repo.store(value)
191
+ end
192
+ end
193
+
194
+ def pre_delete(entity)
195
+ return unless @manual_cascade_delete
196
+ load_value(nil, entity.id).each {|value| target_repo.delete(value)}
197
+ end
198
+ end
199
+ end