dm-core 0.9.11 → 0.10.0
Sign up to get free protection for your applications and to get access to all the features.
- data/.autotest +17 -14
- data/.gitignore +3 -1
- data/FAQ +6 -5
- data/History.txt +5 -50
- data/Manifest.txt +66 -76
- data/QUICKLINKS +1 -1
- data/README.txt +21 -15
- data/Rakefile +6 -7
- data/SPECS +2 -29
- data/TODO +1 -1
- data/deps.rip +2 -0
- data/dm-core.gemspec +11 -15
- data/lib/dm-core.rb +105 -110
- data/lib/dm-core/adapters.rb +135 -16
- data/lib/dm-core/adapters/abstract_adapter.rb +251 -181
- data/lib/dm-core/adapters/data_objects_adapter.rb +482 -534
- data/lib/dm-core/adapters/in_memory_adapter.rb +90 -69
- data/lib/dm-core/adapters/mysql_adapter.rb +22 -115
- data/lib/dm-core/adapters/oracle_adapter.rb +249 -0
- data/lib/dm-core/adapters/postgres_adapter.rb +7 -173
- data/lib/dm-core/adapters/sqlite3_adapter.rb +4 -97
- data/lib/dm-core/adapters/yaml_adapter.rb +116 -0
- data/lib/dm-core/associations/many_to_many.rb +372 -90
- data/lib/dm-core/associations/many_to_one.rb +220 -73
- data/lib/dm-core/associations/one_to_many.rb +319 -255
- data/lib/dm-core/associations/one_to_one.rb +66 -53
- data/lib/dm-core/associations/relationship.rb +561 -156
- data/lib/dm-core/collection.rb +1101 -379
- data/lib/dm-core/core_ext/kernel.rb +12 -0
- data/lib/dm-core/core_ext/symbol.rb +10 -0
- data/lib/dm-core/identity_map.rb +4 -34
- data/lib/dm-core/migrations.rb +1283 -0
- data/lib/dm-core/model.rb +570 -369
- data/lib/dm-core/model/descendant_set.rb +81 -0
- data/lib/dm-core/model/hook.rb +45 -0
- data/lib/dm-core/model/is.rb +32 -0
- data/lib/dm-core/model/property.rb +247 -0
- data/lib/dm-core/model/relationship.rb +335 -0
- data/lib/dm-core/model/scope.rb +90 -0
- data/lib/dm-core/property.rb +808 -273
- data/lib/dm-core/property_set.rb +141 -98
- data/lib/dm-core/query.rb +1037 -483
- data/lib/dm-core/query/conditions/comparison.rb +872 -0
- data/lib/dm-core/query/conditions/operation.rb +221 -0
- data/lib/dm-core/query/direction.rb +43 -0
- data/lib/dm-core/query/operator.rb +84 -0
- data/lib/dm-core/query/path.rb +138 -0
- data/lib/dm-core/query/sort.rb +45 -0
- data/lib/dm-core/repository.rb +210 -94
- data/lib/dm-core/resource.rb +641 -421
- data/lib/dm-core/spec/adapter_shared_spec.rb +294 -0
- data/lib/dm-core/spec/data_objects_adapter_shared_spec.rb +106 -0
- data/lib/dm-core/support/chainable.rb +22 -0
- data/lib/dm-core/support/deprecate.rb +12 -0
- data/lib/dm-core/support/logger.rb +13 -0
- data/lib/dm-core/{naming_conventions.rb → support/naming_conventions.rb} +6 -6
- data/lib/dm-core/transaction.rb +333 -92
- data/lib/dm-core/type.rb +98 -60
- data/lib/dm-core/types/boolean.rb +1 -1
- data/lib/dm-core/types/discriminator.rb +34 -20
- data/lib/dm-core/types/object.rb +7 -4
- data/lib/dm-core/types/paranoid_boolean.rb +11 -9
- data/lib/dm-core/types/paranoid_datetime.rb +11 -9
- data/lib/dm-core/types/serial.rb +3 -3
- data/lib/dm-core/types/text.rb +3 -4
- data/lib/dm-core/version.rb +1 -1
- data/script/performance.rb +102 -109
- data/script/profile.rb +169 -38
- data/spec/lib/adapter_helpers.rb +105 -0
- data/spec/lib/collection_helpers.rb +18 -0
- data/spec/lib/counter_adapter.rb +34 -0
- data/spec/lib/pending_helpers.rb +27 -0
- data/spec/lib/rspec_immediate_feedback_formatter.rb +53 -0
- data/spec/public/associations/many_to_many_spec.rb +193 -0
- data/spec/public/associations/many_to_one_spec.rb +73 -0
- data/spec/public/associations/one_to_many_spec.rb +77 -0
- data/spec/public/associations/one_to_one_spec.rb +156 -0
- data/spec/public/collection_spec.rb +65 -0
- data/spec/public/migrations_spec.rb +359 -0
- data/spec/public/model/relationship_spec.rb +924 -0
- data/spec/public/model_spec.rb +159 -0
- data/spec/public/property_spec.rb +829 -0
- data/spec/public/resource_spec.rb +71 -0
- data/spec/public/sel_spec.rb +44 -0
- data/spec/public/setup_spec.rb +145 -0
- data/spec/public/shared/association_collection_shared_spec.rb +317 -0
- data/spec/public/shared/collection_shared_spec.rb +1670 -0
- data/spec/public/shared/finder_shared_spec.rb +1619 -0
- data/spec/public/shared/resource_shared_spec.rb +924 -0
- data/spec/public/shared/sel_shared_spec.rb +112 -0
- data/spec/public/transaction_spec.rb +129 -0
- data/spec/public/types/discriminator_spec.rb +130 -0
- data/spec/semipublic/adapters/abstract_adapter_spec.rb +30 -0
- data/spec/semipublic/adapters/in_memory_adapter_spec.rb +12 -0
- data/spec/semipublic/adapters/mysql_adapter_spec.rb +17 -0
- data/spec/semipublic/adapters/oracle_adapter_spec.rb +194 -0
- data/spec/semipublic/adapters/postgres_adapter_spec.rb +17 -0
- data/spec/semipublic/adapters/sqlite3_adapter_spec.rb +17 -0
- data/spec/semipublic/adapters/yaml_adapter_spec.rb +12 -0
- data/spec/semipublic/associations/many_to_one_spec.rb +53 -0
- data/spec/semipublic/associations/relationship_spec.rb +194 -0
- data/spec/semipublic/associations_spec.rb +177 -0
- data/spec/semipublic/collection_spec.rb +142 -0
- data/spec/semipublic/property_spec.rb +61 -0
- data/spec/semipublic/query/conditions_spec.rb +528 -0
- data/spec/semipublic/query/path_spec.rb +443 -0
- data/spec/semipublic/query_spec.rb +2626 -0
- data/spec/semipublic/resource_spec.rb +47 -0
- data/spec/semipublic/shared/condition_shared_spec.rb +9 -0
- data/spec/semipublic/shared/resource_shared_spec.rb +126 -0
- data/spec/spec.opts +3 -1
- data/spec/spec_helper.rb +80 -57
- data/tasks/ci.rb +19 -31
- data/tasks/dm.rb +43 -48
- data/tasks/doc.rb +8 -11
- data/tasks/gemspec.rb +5 -5
- data/tasks/hoe.rb +15 -16
- data/tasks/install.rb +8 -10
- metadata +74 -111
- data/lib/dm-core/associations.rb +0 -207
- data/lib/dm-core/associations/relationship_chain.rb +0 -81
- data/lib/dm-core/auto_migrations.rb +0 -105
- data/lib/dm-core/dependency_queue.rb +0 -32
- data/lib/dm-core/hook.rb +0 -11
- data/lib/dm-core/is.rb +0 -16
- data/lib/dm-core/logger.rb +0 -232
- data/lib/dm-core/migrations/destructive_migrations.rb +0 -17
- data/lib/dm-core/migrator.rb +0 -29
- data/lib/dm-core/scope.rb +0 -58
- data/lib/dm-core/support.rb +0 -7
- data/lib/dm-core/support/array.rb +0 -13
- data/lib/dm-core/support/assertions.rb +0 -8
- data/lib/dm-core/support/errors.rb +0 -23
- data/lib/dm-core/support/kernel.rb +0 -11
- data/lib/dm-core/support/symbol.rb +0 -41
- data/lib/dm-core/type_map.rb +0 -80
- data/lib/dm-core/types.rb +0 -19
- data/script/all +0 -4
- data/spec/integration/association_spec.rb +0 -1382
- data/spec/integration/association_through_spec.rb +0 -203
- data/spec/integration/associations/many_to_many_spec.rb +0 -449
- data/spec/integration/associations/many_to_one_spec.rb +0 -163
- data/spec/integration/associations/one_to_many_spec.rb +0 -188
- data/spec/integration/auto_migrations_spec.rb +0 -413
- data/spec/integration/collection_spec.rb +0 -1073
- data/spec/integration/data_objects_adapter_spec.rb +0 -32
- data/spec/integration/dependency_queue_spec.rb +0 -46
- data/spec/integration/model_spec.rb +0 -197
- data/spec/integration/mysql_adapter_spec.rb +0 -85
- data/spec/integration/postgres_adapter_spec.rb +0 -731
- data/spec/integration/property_spec.rb +0 -253
- data/spec/integration/query_spec.rb +0 -514
- data/spec/integration/repository_spec.rb +0 -61
- data/spec/integration/resource_spec.rb +0 -513
- data/spec/integration/sqlite3_adapter_spec.rb +0 -352
- data/spec/integration/sti_spec.rb +0 -273
- data/spec/integration/strategic_eager_loading_spec.rb +0 -156
- data/spec/integration/transaction_spec.rb +0 -75
- data/spec/integration/type_spec.rb +0 -275
- data/spec/lib/logging_helper.rb +0 -18
- data/spec/lib/mock_adapter.rb +0 -27
- data/spec/lib/model_loader.rb +0 -100
- data/spec/lib/publicize_methods.rb +0 -28
- data/spec/models/content.rb +0 -16
- data/spec/models/vehicles.rb +0 -34
- data/spec/models/zoo.rb +0 -48
- data/spec/unit/adapters/abstract_adapter_spec.rb +0 -133
- data/spec/unit/adapters/adapter_shared_spec.rb +0 -15
- data/spec/unit/adapters/data_objects_adapter_spec.rb +0 -632
- data/spec/unit/adapters/in_memory_adapter_spec.rb +0 -98
- data/spec/unit/adapters/postgres_adapter_spec.rb +0 -133
- data/spec/unit/associations/many_to_many_spec.rb +0 -32
- data/spec/unit/associations/many_to_one_spec.rb +0 -159
- data/spec/unit/associations/one_to_many_spec.rb +0 -393
- data/spec/unit/associations/one_to_one_spec.rb +0 -7
- data/spec/unit/associations/relationship_spec.rb +0 -71
- data/spec/unit/associations_spec.rb +0 -242
- data/spec/unit/auto_migrations_spec.rb +0 -111
- data/spec/unit/collection_spec.rb +0 -182
- data/spec/unit/data_mapper_spec.rb +0 -35
- data/spec/unit/identity_map_spec.rb +0 -126
- data/spec/unit/is_spec.rb +0 -80
- data/spec/unit/migrator_spec.rb +0 -33
- data/spec/unit/model_spec.rb +0 -321
- data/spec/unit/naming_conventions_spec.rb +0 -36
- data/spec/unit/property_set_spec.rb +0 -90
- data/spec/unit/property_spec.rb +0 -753
- data/spec/unit/query_spec.rb +0 -571
- data/spec/unit/repository_spec.rb +0 -93
- data/spec/unit/resource_spec.rb +0 -649
- data/spec/unit/scope_spec.rb +0 -142
- data/spec/unit/transaction_spec.rb +0 -493
- data/spec/unit/type_map_spec.rb +0 -114
- data/spec/unit/type_spec.rb +0 -119
@@ -0,0 +1,10 @@
|
|
1
|
+
class Symbol
|
2
|
+
(DataMapper::Query::Conditions::Comparison.slugs | [ :not, :asc, :desc ]).each do |sym|
|
3
|
+
class_eval <<-RUBY, __FILE__, __LINE__ + 1
|
4
|
+
def #{sym}
|
5
|
+
#{"warn \"explicit use of '#{sym}' operator is deprecated (#{caller[0]})\"" if sym == :eql || sym == :in}
|
6
|
+
DataMapper::Query::Operator.new(self, #{sym.inspect})
|
7
|
+
end
|
8
|
+
RUBY
|
9
|
+
end
|
10
|
+
end # class Symbol
|
data/lib/dm-core/identity_map.rb
CHANGED
@@ -2,41 +2,11 @@ module DataMapper
|
|
2
2
|
|
3
3
|
# Tracks objects to help ensure that each object gets loaded only once.
|
4
4
|
# See: http://www.martinfowler.com/eaaCatalog/identityMap.html
|
5
|
-
class IdentityMap
|
6
|
-
|
7
|
-
def get(key)
|
8
|
-
@cache[key] || (@second_level_cache && @second_level_cache.get(key))
|
9
|
-
end
|
5
|
+
class IdentityMap < Hash
|
6
|
+
extend Deprecate
|
10
7
|
|
11
|
-
|
8
|
+
deprecate :get, :[]
|
9
|
+
deprecate :set, :[]=
|
12
10
|
|
13
|
-
# Add a resource to the IdentityMap
|
14
|
-
def set(key, resource)
|
15
|
-
@second_level_cache.set(key, resource) if @second_level_cache
|
16
|
-
@cache[key] = resource
|
17
|
-
end
|
18
|
-
|
19
|
-
alias []= set
|
20
|
-
|
21
|
-
# Remove a resource from the IdentityMap
|
22
|
-
def delete(key)
|
23
|
-
@second_level_cache.delete(key) if @second_level_cache
|
24
|
-
@cache.delete(key)
|
25
|
-
end
|
26
|
-
|
27
|
-
private
|
28
|
-
|
29
|
-
def initialize(second_level_cache = nil)
|
30
|
-
@cache = {}
|
31
|
-
@second_level_cache = second_level_cache
|
32
|
-
end
|
33
|
-
|
34
|
-
def cache
|
35
|
-
@cache
|
36
|
-
end
|
37
|
-
|
38
|
-
def method_missing(method, *args, &block)
|
39
|
-
cache.__send__(method, *args, &block)
|
40
|
-
end
|
41
11
|
end # class IdentityMap
|
42
12
|
end # module DataMapper
|
@@ -0,0 +1,1283 @@
|
|
1
|
+
# TODO: move to dm-more/dm-migrations
|
2
|
+
|
3
|
+
module DataMapper
|
4
|
+
module Migrations
|
5
|
+
module SingletonMethods
|
6
|
+
# destructively migrates the repository upwards to match model definitions
|
7
|
+
#
|
8
|
+
# @param [Symbol] name repository to act on, :default is the default
|
9
|
+
#
|
10
|
+
# @api public
|
11
|
+
def migrate!(repository_name = nil)
|
12
|
+
repository(repository_name).migrate!
|
13
|
+
end
|
14
|
+
|
15
|
+
# drops and recreates the repository upwards to match model definitions
|
16
|
+
#
|
17
|
+
# @param [Symbol] name repository to act on, :default is the default
|
18
|
+
#
|
19
|
+
# @api public
|
20
|
+
def auto_migrate!(repository_name = nil)
|
21
|
+
auto_migrate_down!(repository_name)
|
22
|
+
auto_migrate_up!(repository_name)
|
23
|
+
end
|
24
|
+
|
25
|
+
# TODO: document
|
26
|
+
# @api public
|
27
|
+
def auto_upgrade!(repository_name = nil)
|
28
|
+
repository_execute(:auto_upgrade!, repository_name)
|
29
|
+
end
|
30
|
+
|
31
|
+
private
|
32
|
+
|
33
|
+
# TODO: document
|
34
|
+
# @api private
|
35
|
+
def auto_migrate_down!(repository_name)
|
36
|
+
repository_execute(:auto_migrate_down!, repository_name)
|
37
|
+
end
|
38
|
+
|
39
|
+
# TODO: document
|
40
|
+
# @api private
|
41
|
+
def auto_migrate_up!(repository_name)
|
42
|
+
repository_execute(:auto_migrate_up!, repository_name)
|
43
|
+
end
|
44
|
+
|
45
|
+
# TODO: document
|
46
|
+
# @api private
|
47
|
+
def repository_execute(method, repository_name)
|
48
|
+
DataMapper::Model.descendants.each do |model|
|
49
|
+
model.send(method, repository_name || model.default_repository_name)
|
50
|
+
end
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
module DataObjectsAdapter
|
55
|
+
# TODO: document
|
56
|
+
# @api private
|
57
|
+
def self.included(base)
|
58
|
+
base.extend ClassMethods
|
59
|
+
|
60
|
+
DataMapper.extend(Migrations::SingletonMethods)
|
61
|
+
|
62
|
+
[ :Repository, :Model ].each do |name|
|
63
|
+
DataMapper.const_get(name).send(:include, Migrations.const_get(name))
|
64
|
+
end
|
65
|
+
end
|
66
|
+
|
67
|
+
# Returns whether the storage_name exists.
|
68
|
+
#
|
69
|
+
# @param [String] storage_name
|
70
|
+
# a String defining the name of a storage, for example a table name.
|
71
|
+
#
|
72
|
+
# @return [Boolean]
|
73
|
+
# true if the storage exists
|
74
|
+
#
|
75
|
+
# @api semipublic
|
76
|
+
def storage_exists?(storage_name)
|
77
|
+
statement = <<-SQL.compress_lines
|
78
|
+
SELECT COUNT(*)
|
79
|
+
FROM "information_schema"."tables"
|
80
|
+
WHERE "table_type" = 'BASE TABLE'
|
81
|
+
AND "table_schema" = ?
|
82
|
+
AND "table_name" = ?
|
83
|
+
SQL
|
84
|
+
|
85
|
+
query(statement, schema_name, storage_name).first > 0
|
86
|
+
end
|
87
|
+
|
88
|
+
# Returns whether the field exists.
|
89
|
+
#
|
90
|
+
# @param [String] storage_name
|
91
|
+
# a String defining the name of a storage, for example a table name.
|
92
|
+
# @param [String] field
|
93
|
+
# a String defining the name of a field, for example a column name.
|
94
|
+
#
|
95
|
+
# @return [Boolean]
|
96
|
+
# true if the field exists.
|
97
|
+
#
|
98
|
+
# @api semipublic
|
99
|
+
def field_exists?(storage_name, column_name)
|
100
|
+
statement = <<-SQL.compress_lines
|
101
|
+
SELECT COUNT(*)
|
102
|
+
FROM "information_schema"."columns"
|
103
|
+
WHERE "table_schema" = ?
|
104
|
+
AND "table_name" = ?
|
105
|
+
AND "column_name" = ?
|
106
|
+
SQL
|
107
|
+
|
108
|
+
query(statement, schema_name, storage_name, column_name).first > 0
|
109
|
+
end
|
110
|
+
|
111
|
+
# TODO: document
|
112
|
+
# @api semipublic
|
113
|
+
def upgrade_model_storage(model)
|
114
|
+
properties = model.properties_with_subclasses(name)
|
115
|
+
|
116
|
+
if success = create_model_storage(model)
|
117
|
+
return properties
|
118
|
+
end
|
119
|
+
|
120
|
+
table_name = model.storage_name(name)
|
121
|
+
|
122
|
+
with_connection do |connection|
|
123
|
+
properties.map do |property|
|
124
|
+
schema_hash = property_schema_hash(property)
|
125
|
+
next if field_exists?(table_name, schema_hash[:name])
|
126
|
+
|
127
|
+
statement = alter_table_add_column_statement(connection, table_name, schema_hash)
|
128
|
+
command = connection.create_command(statement)
|
129
|
+
command.execute_non_query
|
130
|
+
|
131
|
+
property
|
132
|
+
end.compact
|
133
|
+
end
|
134
|
+
end
|
135
|
+
|
136
|
+
# TODO: document
|
137
|
+
# @api semipublic
|
138
|
+
def create_model_storage(model)
|
139
|
+
properties = model.properties_with_subclasses(name)
|
140
|
+
|
141
|
+
return false if storage_exists?(model.storage_name(name))
|
142
|
+
return false if properties.empty?
|
143
|
+
|
144
|
+
with_connection do |connection|
|
145
|
+
statement = create_table_statement(connection, model, properties)
|
146
|
+
command = connection.create_command(statement)
|
147
|
+
command.execute_non_query
|
148
|
+
|
149
|
+
(create_index_statements(model) + create_unique_index_statements(model)).each do |statement|
|
150
|
+
command = connection.create_command(statement)
|
151
|
+
command.execute_non_query
|
152
|
+
end
|
153
|
+
end
|
154
|
+
|
155
|
+
true
|
156
|
+
end
|
157
|
+
|
158
|
+
# TODO: document
|
159
|
+
# @api semipublic
|
160
|
+
def destroy_model_storage(model)
|
161
|
+
return true unless supports_drop_table_if_exists? || storage_exists?(model.storage_name(name))
|
162
|
+
execute(drop_table_statement(model))
|
163
|
+
true
|
164
|
+
end
|
165
|
+
|
166
|
+
module SQL #:nodoc:
|
167
|
+
# private ## This cannot be private for current migrations
|
168
|
+
|
169
|
+
# Adapters that support AUTO INCREMENT fields for CREATE TABLE
|
170
|
+
# statements should overwrite this to return true
|
171
|
+
#
|
172
|
+
# @api private
|
173
|
+
def supports_serial?
|
174
|
+
false
|
175
|
+
end
|
176
|
+
|
177
|
+
# TODO: document
|
178
|
+
# @api private
|
179
|
+
def supports_drop_table_if_exists?
|
180
|
+
false
|
181
|
+
end
|
182
|
+
|
183
|
+
# TODO: document
|
184
|
+
# @api private
|
185
|
+
def schema_name
|
186
|
+
raise NotImplementedError, "#{self.class}#schema_name not implemented"
|
187
|
+
end
|
188
|
+
|
189
|
+
# TODO: document
|
190
|
+
# @api private
|
191
|
+
def alter_table_add_column_statement(connection, table_name, schema_hash)
|
192
|
+
"ALTER TABLE #{quote_name(table_name)} ADD COLUMN #{property_schema_statement(connection, schema_hash)}"
|
193
|
+
end
|
194
|
+
|
195
|
+
# TODO: document
|
196
|
+
# @api private
|
197
|
+
def create_table_statement(connection, model, properties)
|
198
|
+
statement = <<-SQL.compress_lines
|
199
|
+
CREATE TABLE #{quote_name(model.storage_name(name))}
|
200
|
+
(#{properties.map { |property| property_schema_statement(connection, property_schema_hash(property)) }.join(', ')},
|
201
|
+
PRIMARY KEY(#{ properties.key.map { |property| quote_name(property.field) }.join(', ')}))
|
202
|
+
SQL
|
203
|
+
|
204
|
+
statement
|
205
|
+
end
|
206
|
+
|
207
|
+
# TODO: document
|
208
|
+
# @api private
|
209
|
+
def drop_table_statement(model)
|
210
|
+
if supports_drop_table_if_exists?
|
211
|
+
"DROP TABLE IF EXISTS #{quote_name(model.storage_name(name))}"
|
212
|
+
else
|
213
|
+
"DROP TABLE #{quote_name(model.storage_name(name))}"
|
214
|
+
end
|
215
|
+
end
|
216
|
+
|
217
|
+
# TODO: document
|
218
|
+
# @api private
|
219
|
+
def create_index_statements(model)
|
220
|
+
table_name = model.storage_name(name)
|
221
|
+
model.properties(name).indexes.map do |index_name, fields|
|
222
|
+
<<-SQL.compress_lines
|
223
|
+
CREATE INDEX #{quote_name("index_#{table_name}_#{index_name}")} ON
|
224
|
+
#{quote_name(table_name)} (#{fields.map { |field| quote_name(field) }.join(', ')})
|
225
|
+
SQL
|
226
|
+
end
|
227
|
+
end
|
228
|
+
|
229
|
+
# TODO: document
|
230
|
+
# @api private
|
231
|
+
def create_unique_index_statements(model)
|
232
|
+
table_name = model.storage_name(name)
|
233
|
+
model.properties(name).unique_indexes.map do |index_name, fields|
|
234
|
+
<<-SQL.compress_lines
|
235
|
+
CREATE UNIQUE INDEX #{quote_name("unique_#{table_name}_#{index_name}")} ON
|
236
|
+
#{quote_name(table_name)} (#{fields.map { |field| quote_name(field) }.join(', ')})
|
237
|
+
SQL
|
238
|
+
end
|
239
|
+
end
|
240
|
+
|
241
|
+
# TODO: document
|
242
|
+
# @api private
|
243
|
+
def property_schema_hash(property)
|
244
|
+
schema = (self.class.type_map[property.type] || self.class.type_map[property.primitive]).merge(:name => property.field)
|
245
|
+
|
246
|
+
if property.primitive == String && schema[:primitive] != 'TEXT' && schema[:primitive] != 'CLOB'
|
247
|
+
schema[:length] = property.length
|
248
|
+
elsif property.primitive == BigDecimal || property.primitive == Float
|
249
|
+
schema[:precision] = property.precision
|
250
|
+
schema[:scale] = property.scale
|
251
|
+
end
|
252
|
+
|
253
|
+
schema[:nullable] = property.nullable?
|
254
|
+
schema[:serial] = property.serial?
|
255
|
+
|
256
|
+
if property.default.nil? || property.default.respond_to?(:call)
|
257
|
+
# remove the default if the property is not nullable
|
258
|
+
schema.delete(:default) unless property.nullable?
|
259
|
+
else
|
260
|
+
if property.type.respond_to?(:dump)
|
261
|
+
schema[:default] = property.type.dump(property.default, property)
|
262
|
+
else
|
263
|
+
schema[:default] = property.default
|
264
|
+
end
|
265
|
+
end
|
266
|
+
|
267
|
+
schema
|
268
|
+
end
|
269
|
+
|
270
|
+
# TODO: document
|
271
|
+
# @api private
|
272
|
+
def property_schema_statement(connection, schema)
|
273
|
+
statement = quote_name(schema[:name])
|
274
|
+
statement << " #{schema[:primitive]}"
|
275
|
+
|
276
|
+
if schema[:precision] && schema[:scale]
|
277
|
+
statement << "(#{[ :precision, :scale ].map { |key| connection.quote_value(schema[key]) }.join(', ')})"
|
278
|
+
elsif schema[:length]
|
279
|
+
statement << "(#{connection.quote_value(schema[:length])})"
|
280
|
+
end
|
281
|
+
|
282
|
+
statement << " DEFAULT #{connection.quote_value(schema[:default])}" if schema.key?(:default)
|
283
|
+
statement << ' NOT NULL' unless schema[:nullable]
|
284
|
+
statement
|
285
|
+
end
|
286
|
+
end # module SQL
|
287
|
+
|
288
|
+
include SQL
|
289
|
+
|
290
|
+
module ClassMethods
|
291
|
+
# Default types for all data object based adapters.
|
292
|
+
#
|
293
|
+
# @return [Hash] default types for data objects adapters.
|
294
|
+
#
|
295
|
+
# @api private
|
296
|
+
def type_map
|
297
|
+
length = Property::DEFAULT_LENGTH
|
298
|
+
precision = Property::DEFAULT_PRECISION
|
299
|
+
scale = Property::DEFAULT_SCALE_BIGDECIMAL
|
300
|
+
|
301
|
+
@type_map ||= {
|
302
|
+
Integer => { :primitive => 'INTEGER' },
|
303
|
+
String => { :primitive => 'VARCHAR', :length => length },
|
304
|
+
Class => { :primitive => 'VARCHAR', :length => length },
|
305
|
+
BigDecimal => { :primitive => 'DECIMAL', :precision => precision, :scale => scale },
|
306
|
+
Float => { :primitive => 'FLOAT', :precision => precision },
|
307
|
+
DateTime => { :primitive => 'TIMESTAMP' },
|
308
|
+
Date => { :primitive => 'DATE' },
|
309
|
+
Time => { :primitive => 'TIMESTAMP' },
|
310
|
+
TrueClass => { :primitive => 'BOOLEAN' },
|
311
|
+
Types::Object => { :primitive => 'TEXT' },
|
312
|
+
Types::Text => { :primitive => 'TEXT' },
|
313
|
+
}.freeze
|
314
|
+
end
|
315
|
+
end # module ClassMethods
|
316
|
+
end # module DataObjectsAdapter
|
317
|
+
|
318
|
+
module MysqlAdapter
|
319
|
+
DEFAULT_ENGINE = 'InnoDB'.freeze
|
320
|
+
DEFAULT_CHARACTER_SET = 'utf8'.freeze
|
321
|
+
DEFAULT_COLLATION = 'utf8_unicode_ci'.freeze
|
322
|
+
|
323
|
+
# TODO: document
|
324
|
+
# @api private
|
325
|
+
def self.included(base)
|
326
|
+
base.extend ClassMethods
|
327
|
+
end
|
328
|
+
|
329
|
+
# TODO: document
|
330
|
+
# @api semipublic
|
331
|
+
def storage_exists?(storage_name)
|
332
|
+
query('SHOW TABLES LIKE ?', storage_name).first == storage_name
|
333
|
+
end
|
334
|
+
|
335
|
+
# TODO: document
|
336
|
+
# @api semipublic
|
337
|
+
def field_exists?(storage_name, field)
|
338
|
+
result = query("SHOW COLUMNS FROM #{quote_name(storage_name)} LIKE ?", field).first
|
339
|
+
result ? result.field == field : false
|
340
|
+
end
|
341
|
+
|
342
|
+
module SQL #:nodoc:
|
343
|
+
# private ## This cannot be private for current migrations
|
344
|
+
|
345
|
+
# TODO: document
|
346
|
+
# @api private
|
347
|
+
def supports_serial?
|
348
|
+
true
|
349
|
+
end
|
350
|
+
|
351
|
+
# TODO: document
|
352
|
+
# @api private
|
353
|
+
def supports_drop_table_if_exists?
|
354
|
+
true
|
355
|
+
end
|
356
|
+
|
357
|
+
# TODO: document
|
358
|
+
# @api private
|
359
|
+
def schema_name
|
360
|
+
# TODO: is there a cleaner way to find out the current DB we are connected to?
|
361
|
+
normalized_uri.path.split('/').last
|
362
|
+
end
|
363
|
+
|
364
|
+
# TODO: update dkubb/dm-more/dm-migrations to use schema_name and remove this
|
365
|
+
alias db_name schema_name
|
366
|
+
|
367
|
+
# TODO: document
|
368
|
+
# @api private
|
369
|
+
def create_table_statement(connection, model, properties)
|
370
|
+
"#{super} ENGINE = #{DEFAULT_ENGINE} CHARACTER SET #{character_set} COLLATE #{collation}"
|
371
|
+
end
|
372
|
+
|
373
|
+
# TODO: document
|
374
|
+
# @api private
|
375
|
+
def property_schema_hash(property)
|
376
|
+
schema = super
|
377
|
+
|
378
|
+
if schema[:primitive] == 'TEXT'
|
379
|
+
schema[:primitive] = text_column_statement(property.length)
|
380
|
+
schema.delete(:default)
|
381
|
+
end
|
382
|
+
|
383
|
+
if property.primitive == Integer && property.min && property.max
|
384
|
+
schema[:primitive] = integer_column_statement(property.min..property.max)
|
385
|
+
end
|
386
|
+
|
387
|
+
schema
|
388
|
+
end
|
389
|
+
|
390
|
+
# TODO: document
|
391
|
+
# @api private
|
392
|
+
def property_schema_statement(connection, schema)
|
393
|
+
statement = super
|
394
|
+
|
395
|
+
if supports_serial? && schema[:serial]
|
396
|
+
statement << ' AUTO_INCREMENT'
|
397
|
+
end
|
398
|
+
|
399
|
+
statement
|
400
|
+
end
|
401
|
+
|
402
|
+
# TODO: document
|
403
|
+
# @api private
|
404
|
+
def character_set
|
405
|
+
@character_set ||= show_variable('character_set_connection') || DEFAULT_CHARACTER_SET
|
406
|
+
end
|
407
|
+
|
408
|
+
# TODO: document
|
409
|
+
# @api private
|
410
|
+
def collation
|
411
|
+
@collation ||= show_variable('collation_connection') || DEFAULT_COLLATION
|
412
|
+
end
|
413
|
+
|
414
|
+
# TODO: document
|
415
|
+
# @api private
|
416
|
+
def show_variable(name)
|
417
|
+
result = query('SHOW VARIABLES LIKE ?', name).first
|
418
|
+
result ? result.value.freeze : nil
|
419
|
+
end
|
420
|
+
|
421
|
+
private
|
422
|
+
|
423
|
+
# Return SQL statement for the text column
|
424
|
+
#
|
425
|
+
# @param [Integer] length
|
426
|
+
# the max allowed length
|
427
|
+
#
|
428
|
+
# @return [String]
|
429
|
+
# the statement to create the text column
|
430
|
+
#
|
431
|
+
# @api private
|
432
|
+
def text_column_statement(length)
|
433
|
+
if length < 2**8 then 'TINYTEXT'
|
434
|
+
elsif length < 2**16 then 'TEXT'
|
435
|
+
elsif length < 2**24 then 'MEDIUMTEXT'
|
436
|
+
elsif length < 2**32 then 'LONGTEXT'
|
437
|
+
|
438
|
+
# http://www.postgresql.org/files/documentation/books/aw_pgsql/node90.html
|
439
|
+
# Implies that PostgreSQL doesn't have a size limit on text
|
440
|
+
# fields, so this param validation happens here instead of
|
441
|
+
# DM::Property#initialize.
|
442
|
+
else
|
443
|
+
raise ArgumentError, "length of #{length} exceeds maximum size supported"
|
444
|
+
end
|
445
|
+
end
|
446
|
+
|
447
|
+
# Return SQL statement for the integer column
|
448
|
+
#
|
449
|
+
# @param [Range] range
|
450
|
+
# the min/max allowed integers
|
451
|
+
#
|
452
|
+
# @return [String]
|
453
|
+
# the statement to create the integer column
|
454
|
+
#
|
455
|
+
# @api private
|
456
|
+
def integer_column_statement(range)
|
457
|
+
'%s(%d)%s' % [
|
458
|
+
integer_column_type(range),
|
459
|
+
integer_display_size(range),
|
460
|
+
integer_statement_sign(range),
|
461
|
+
]
|
462
|
+
end
|
463
|
+
|
464
|
+
# Return the integer column type
|
465
|
+
#
|
466
|
+
# Use the smallest available column type that will satisfy the
|
467
|
+
# allowable range of numbers
|
468
|
+
#
|
469
|
+
# @param [Range] range
|
470
|
+
# the min/max allowed integers
|
471
|
+
#
|
472
|
+
# @return [String]
|
473
|
+
# the column type
|
474
|
+
#
|
475
|
+
# @api private
|
476
|
+
def integer_column_type(range)
|
477
|
+
if range.first < 0
|
478
|
+
signed_integer_column_type(range)
|
479
|
+
else
|
480
|
+
unsigned_integer_column_type(range)
|
481
|
+
end
|
482
|
+
end
|
483
|
+
|
484
|
+
# Return the signed integer column type
|
485
|
+
#
|
486
|
+
# @param [Range] range
|
487
|
+
# the min/max allowed integers
|
488
|
+
#
|
489
|
+
# @return [String]
|
490
|
+
#
|
491
|
+
# @api private
|
492
|
+
def signed_integer_column_type(range)
|
493
|
+
min = range.first
|
494
|
+
max = range.last
|
495
|
+
|
496
|
+
if min >= -2**7 && max < 2**7 then 'TINYINT'
|
497
|
+
elsif min >= -2**15 && max < 2**15 then 'SMALLINT'
|
498
|
+
elsif min >= -2**23 && max < 2**23 then 'MEDIUMINT'
|
499
|
+
elsif min >= -2**31 && max < 2**31 then 'INT'
|
500
|
+
elsif min >= -2**63 && max < 2**63 then 'BIGINT'
|
501
|
+
else
|
502
|
+
raise ArgumentError, "min #{min} and max #{max} exceeds supported range"
|
503
|
+
end
|
504
|
+
end
|
505
|
+
|
506
|
+
# Return the unsigned integer column type
|
507
|
+
#
|
508
|
+
# @param [Range] range
|
509
|
+
# the min/max allowed integers
|
510
|
+
#
|
511
|
+
# @return [String]
|
512
|
+
#
|
513
|
+
# @api private
|
514
|
+
def unsigned_integer_column_type(range)
|
515
|
+
max = range.last
|
516
|
+
|
517
|
+
if max < 2**8 then 'TINYINT'
|
518
|
+
elsif max < 2**16 then 'SMALLINT'
|
519
|
+
elsif max < 2**24 then 'MEDIUMINT'
|
520
|
+
elsif max < 2**32 then 'INT'
|
521
|
+
elsif max < 2**64 then 'BIGINT'
|
522
|
+
else
|
523
|
+
raise ArgumentError, "min #{range.first} and max #{max} exceeds supported range"
|
524
|
+
end
|
525
|
+
end
|
526
|
+
|
527
|
+
# Return the integer column display size
|
528
|
+
#
|
529
|
+
# Adjust the display size to match the maximum number of
|
530
|
+
# expected digits. This is more for documentation purposes
|
531
|
+
# and does not affect what can actually be stored in a
|
532
|
+
# specific column
|
533
|
+
#
|
534
|
+
# @param [Range] range
|
535
|
+
# the min/max allowed integers
|
536
|
+
#
|
537
|
+
# @return [Integer]
|
538
|
+
# the display size for the integer
|
539
|
+
#
|
540
|
+
# @api private
|
541
|
+
def integer_display_size(range)
|
542
|
+
[ range.first.to_s.length, range.last.to_s.length ].max
|
543
|
+
end
|
544
|
+
|
545
|
+
# Return the integer sign statement
|
546
|
+
#
|
547
|
+
# @param [Range] range
|
548
|
+
# the min/max allowed integers
|
549
|
+
#
|
550
|
+
# @return [String, nil]
|
551
|
+
# statement if unsigned, nil if signed
|
552
|
+
#
|
553
|
+
# @api private
|
554
|
+
def integer_statement_sign(range)
|
555
|
+
' UNSIGNED' unless range.first < 0
|
556
|
+
end
|
557
|
+
end # module SQL
|
558
|
+
|
559
|
+
include SQL
|
560
|
+
|
561
|
+
module ClassMethods
|
562
|
+
# Types for MySQL databases.
|
563
|
+
#
|
564
|
+
# @return [Hash] types for MySQL databases.
|
565
|
+
#
|
566
|
+
# @api private
|
567
|
+
def type_map
|
568
|
+
@type_map ||= super.merge(
|
569
|
+
DateTime => { :primitive => 'DATETIME' },
|
570
|
+
Time => { :primitive => 'DATETIME' }
|
571
|
+
).freeze
|
572
|
+
end
|
573
|
+
end # module ClassMethods
|
574
|
+
end # module MysqlAdapter
|
575
|
+
|
576
|
+
module PostgresAdapter
|
577
|
+
# TODO: document
|
578
|
+
# @api private
|
579
|
+
def self.included(base)
|
580
|
+
base.extend ClassMethods
|
581
|
+
end
|
582
|
+
|
583
|
+
# TODO: document
|
584
|
+
# @api semipublic
|
585
|
+
def upgrade_model_storage(model)
|
586
|
+
without_notices { super }
|
587
|
+
end
|
588
|
+
|
589
|
+
# TODO: document
|
590
|
+
# @api semipublic
|
591
|
+
def create_model_storage(model)
|
592
|
+
without_notices { super }
|
593
|
+
end
|
594
|
+
|
595
|
+
# TODO: document
|
596
|
+
# @api semipublic
|
597
|
+
def destroy_model_storage(model)
|
598
|
+
if supports_drop_table_if_exists?
|
599
|
+
without_notices { super }
|
600
|
+
else
|
601
|
+
super
|
602
|
+
end
|
603
|
+
end
|
604
|
+
|
605
|
+
module SQL #:nodoc:
|
606
|
+
# private ## This cannot be private for current migrations
|
607
|
+
|
608
|
+
# TODO: document
|
609
|
+
# @api private
|
610
|
+
def supports_drop_table_if_exists?
|
611
|
+
@supports_drop_table_if_exists ||= postgres_version >= '8.2'
|
612
|
+
end
|
613
|
+
|
614
|
+
# TODO: document
|
615
|
+
# @api private
|
616
|
+
def schema_name
|
617
|
+
@schema_name ||= query('SELECT current_schema()').first.freeze
|
618
|
+
end
|
619
|
+
|
620
|
+
# TODO: document
|
621
|
+
# @api private
|
622
|
+
def postgres_version
|
623
|
+
@postgres_version ||= query('SELECT version()').first.split[1].freeze
|
624
|
+
end
|
625
|
+
|
626
|
+
# TODO: document
|
627
|
+
# @api private
|
628
|
+
def without_notices
|
629
|
+
# execute the block with NOTICE messages disabled
|
630
|
+
begin
|
631
|
+
execute('SET client_min_messages = warning')
|
632
|
+
yield
|
633
|
+
ensure
|
634
|
+
execute('RESET client_min_messages')
|
635
|
+
end
|
636
|
+
end
|
637
|
+
|
638
|
+
# TODO: document
|
639
|
+
# @api private
|
640
|
+
def property_schema_hash(property)
|
641
|
+
schema = super
|
642
|
+
|
643
|
+
# Postgres does not support precision and scale for Float
|
644
|
+
if property.primitive == Float
|
645
|
+
schema.delete(:precision)
|
646
|
+
schema.delete(:scale)
|
647
|
+
end
|
648
|
+
|
649
|
+
if property.primitive == Integer && property.min && property.max
|
650
|
+
schema[:primitive] = integer_column_statement(property.min..property.max)
|
651
|
+
end
|
652
|
+
|
653
|
+
if schema[:serial]
|
654
|
+
schema[:primitive] = serial_column_statement(property.min..property.max)
|
655
|
+
end
|
656
|
+
|
657
|
+
schema
|
658
|
+
end
|
659
|
+
|
660
|
+
private
|
661
|
+
|
662
|
+
# Return SQL statement for the integer column
|
663
|
+
#
|
664
|
+
# @param [Range] range
|
665
|
+
# the min/max allowed integers
|
666
|
+
#
|
667
|
+
# @return [String]
|
668
|
+
# the statement to create the integer column
|
669
|
+
#
|
670
|
+
# @api private
|
671
|
+
def integer_column_statement(range)
|
672
|
+
min = range.first
|
673
|
+
max = range.last
|
674
|
+
|
675
|
+
if min >= -2**15 && max < 2**15 then 'SMALLINT'
|
676
|
+
elsif min >= -2**31 && max < 2**31 then 'INTEGER'
|
677
|
+
elsif min >= -2**63 && max < 2**63 then 'BIGINT'
|
678
|
+
else
|
679
|
+
raise ArgumentError, "min #{min} and max #{max} exceeds supported range"
|
680
|
+
end
|
681
|
+
end
|
682
|
+
|
683
|
+
# Return SQL statement for the serial column
|
684
|
+
#
|
685
|
+
# @param [Integer] max
|
686
|
+
# the max allowed integer
|
687
|
+
#
|
688
|
+
# @return [String]
|
689
|
+
# the statement to create the serial column
|
690
|
+
#
|
691
|
+
# @api private
|
692
|
+
def serial_column_statement(range)
|
693
|
+
max = range.last
|
694
|
+
|
695
|
+
if max.nil? || max < 2**31 then 'SERIAL'
|
696
|
+
elsif max < 2**63 then 'BIGSERIAL'
|
697
|
+
else
|
698
|
+
raise ArgumentError, "min #{range.first} and max #{max} exceeds supported range"
|
699
|
+
end
|
700
|
+
end
|
701
|
+
end # module SQL
|
702
|
+
|
703
|
+
include SQL
|
704
|
+
|
705
|
+
module ClassMethods
|
706
|
+
# Types for PostgreSQL databases.
|
707
|
+
#
|
708
|
+
# @return [Hash] types for PostgreSQL databases.
|
709
|
+
#
|
710
|
+
# @api private
|
711
|
+
def type_map
|
712
|
+
precision = Property::DEFAULT_PRECISION
|
713
|
+
scale = Property::DEFAULT_SCALE_BIGDECIMAL
|
714
|
+
|
715
|
+
@type_map ||= super.merge(
|
716
|
+
BigDecimal => { :primitive => 'NUMERIC', :precision => precision, :scale => scale },
|
717
|
+
Float => { :primitive => 'DOUBLE PRECISION' }
|
718
|
+
).freeze
|
719
|
+
end
|
720
|
+
end # module ClassMethods
|
721
|
+
end # module PostgresAdapter
|
722
|
+
|
723
|
+
module Sqlite3Adapter
|
724
|
+
# TODO: document
|
725
|
+
# @api private
|
726
|
+
def self.included(base)
|
727
|
+
base.extend ClassMethods
|
728
|
+
end
|
729
|
+
|
730
|
+
# TODO: document
|
731
|
+
# @api semipublic
|
732
|
+
def storage_exists?(storage_name)
|
733
|
+
query_table(storage_name).size > 0
|
734
|
+
end
|
735
|
+
|
736
|
+
# TODO: document
|
737
|
+
# @api semipublic
|
738
|
+
def field_exists?(storage_name, column_name)
|
739
|
+
query_table(storage_name).any? do |row|
|
740
|
+
row.name == column_name
|
741
|
+
end
|
742
|
+
end
|
743
|
+
|
744
|
+
module SQL #:nodoc:
|
745
|
+
# private ## This cannot be private for current migrations
|
746
|
+
|
747
|
+
# TODO: document
|
748
|
+
# @api private
|
749
|
+
def supports_serial?
|
750
|
+
@supports_serial ||= sqlite_version >= '3.1.0'
|
751
|
+
end
|
752
|
+
|
753
|
+
# TODO: document
|
754
|
+
# @api private
|
755
|
+
def supports_drop_table_if_exists?
|
756
|
+
@supports_drop_table_if_exists ||= sqlite_version >= '3.3.0'
|
757
|
+
end
|
758
|
+
|
759
|
+
# TODO: document
|
760
|
+
# @api private
|
761
|
+
def query_table(table_name)
|
762
|
+
query("PRAGMA table_info(#{quote_name(table_name)})")
|
763
|
+
end
|
764
|
+
|
765
|
+
# TODO: document
|
766
|
+
# @api private
|
767
|
+
def create_table_statement(connection, model, properties)
|
768
|
+
statement = <<-SQL.compress_lines
|
769
|
+
CREATE TABLE #{quote_name(model.storage_name(name))}
|
770
|
+
(#{properties.map { |property| property_schema_statement(connection, property_schema_hash(property)) }.join(', ')}
|
771
|
+
SQL
|
772
|
+
|
773
|
+
# skip adding the primary key if one of the columns is serial. In
|
774
|
+
# SQLite the serial column must be the primary key, so it has already
|
775
|
+
# been defined
|
776
|
+
unless properties.any? { |property| property.serial? }
|
777
|
+
statement << ", PRIMARY KEY(#{properties.key.map { |property| quote_name(property.field) }.join(', ')})"
|
778
|
+
end
|
779
|
+
|
780
|
+
statement << ')'
|
781
|
+
statement
|
782
|
+
end
|
783
|
+
|
784
|
+
# TODO: document
|
785
|
+
# @api private
|
786
|
+
def property_schema_statement(connection, schema)
|
787
|
+
statement = super
|
788
|
+
|
789
|
+
if supports_serial? && schema[:serial]
|
790
|
+
statement << ' PRIMARY KEY AUTOINCREMENT'
|
791
|
+
end
|
792
|
+
|
793
|
+
statement
|
794
|
+
end
|
795
|
+
|
796
|
+
# TODO: document
|
797
|
+
# @api private
|
798
|
+
def sqlite_version
|
799
|
+
@sqlite_version ||= query('SELECT sqlite_version(*)').first.freeze
|
800
|
+
end
|
801
|
+
end # module SQL
|
802
|
+
|
803
|
+
include SQL
|
804
|
+
|
805
|
+
module ClassMethods
|
806
|
+
# Types for SQLite 3 databases.
|
807
|
+
#
|
808
|
+
# @return [Hash] types for SQLite 3 databases.
|
809
|
+
#
|
810
|
+
# @api private
|
811
|
+
def type_map
|
812
|
+
@type_map ||= super.merge(Class => { :primitive => 'VARCHAR' }).freeze
|
813
|
+
end
|
814
|
+
end # module ClassMethods
|
815
|
+
end # module Sqlite3Adapter
|
816
|
+
|
817
|
+
module OracleAdapter
|
818
|
+
# TODO: document
|
819
|
+
# @api private
|
820
|
+
def self.included(base)
|
821
|
+
base.extend ClassMethods
|
822
|
+
end
|
823
|
+
|
824
|
+
# TODO: document
|
825
|
+
# @api semipublic
|
826
|
+
def storage_exists?(storage_name)
|
827
|
+
statement = <<-SQL.compress_lines
|
828
|
+
SELECT COUNT(*)
|
829
|
+
FROM all_tables
|
830
|
+
WHERE owner = ?
|
831
|
+
AND table_name = ?
|
832
|
+
SQL
|
833
|
+
|
834
|
+
query(statement, schema_name, oracle_upcase(storage_name)).first > 0
|
835
|
+
end
|
836
|
+
|
837
|
+
# TODO: document
|
838
|
+
# @api semipublic
|
839
|
+
def sequence_exists?(sequence_name)
|
840
|
+
return false unless sequence_name
|
841
|
+
statement = <<-SQL.compress_lines
|
842
|
+
SELECT COUNT(*)
|
843
|
+
FROM all_sequences
|
844
|
+
WHERE sequence_owner = ?
|
845
|
+
AND sequence_name = ?
|
846
|
+
SQL
|
847
|
+
|
848
|
+
query(statement, schema_name, oracle_upcase(sequence_name)).first > 0
|
849
|
+
end
|
850
|
+
|
851
|
+
# TODO: document
|
852
|
+
# @api semipublic
|
853
|
+
def field_exists?(storage_name, field_name)
|
854
|
+
statement = <<-SQL.compress_lines
|
855
|
+
SELECT COUNT(*)
|
856
|
+
FROM all_tab_columns
|
857
|
+
WHERE owner = ?
|
858
|
+
AND table_name = ?
|
859
|
+
AND column_name = ?
|
860
|
+
SQL
|
861
|
+
|
862
|
+
query(statement, schema_name, oracle_upcase(storage_name), oracle_upcase(field_name)).first > 0
|
863
|
+
end
|
864
|
+
|
865
|
+
# TODO: document
|
866
|
+
# @api semipublic
|
867
|
+
def storage_fields(storage_name)
|
868
|
+
statement = <<-SQL.compress_lines
|
869
|
+
SELECT column_name
|
870
|
+
FROM all_tab_columns
|
871
|
+
WHERE owner = ?
|
872
|
+
AND table_name = ?
|
873
|
+
SQL
|
874
|
+
|
875
|
+
query(statement, schema_name, oracle_upcase(storage_name))
|
876
|
+
end
|
877
|
+
|
878
|
+
# TODO: document
|
879
|
+
# @api semipublic
|
880
|
+
def create_model_storage(model)
|
881
|
+
properties = model.properties_with_subclasses(name)
|
882
|
+
table_name = model.storage_name(name)
|
883
|
+
truncate_or_delete = self.class.auto_migrate_with
|
884
|
+
table_is_truncated = truncate_or_delete && @truncated_tables && @truncated_tables[table_name]
|
885
|
+
|
886
|
+
return false if storage_exists?(table_name) && !table_is_truncated
|
887
|
+
return false if properties.empty?
|
888
|
+
|
889
|
+
with_connection do |connection|
|
890
|
+
# if table was truncated then check if all columns for properties are present
|
891
|
+
# TODO: check all other column definition options
|
892
|
+
if table_is_truncated && storage_has_all_fields?(table_name, properties)
|
893
|
+
@truncated_tables[table_name] = nil
|
894
|
+
else
|
895
|
+
# forced drop of table if properties are different
|
896
|
+
if truncate_or_delete
|
897
|
+
destroy_model_storage(model, true)
|
898
|
+
end
|
899
|
+
|
900
|
+
statement = create_table_statement(connection, model, properties)
|
901
|
+
command = connection.create_command(statement)
|
902
|
+
command.execute_non_query
|
903
|
+
|
904
|
+
(create_index_statements(model) + create_unique_index_statements(model)).each do |statement|
|
905
|
+
command = connection.create_command(statement)
|
906
|
+
command.execute_non_query
|
907
|
+
end
|
908
|
+
|
909
|
+
# added creation of sequence
|
910
|
+
create_sequence_statements(model).each do |statement|
|
911
|
+
command = connection.create_command(statement)
|
912
|
+
command.execute_non_query
|
913
|
+
end
|
914
|
+
end
|
915
|
+
|
916
|
+
end
|
917
|
+
|
918
|
+
true
|
919
|
+
end
|
920
|
+
|
921
|
+
# TODO: document
|
922
|
+
# @api semipublic
|
923
|
+
def destroy_model_storage(model, forced = false)
|
924
|
+
table_name = model.storage_name(name)
|
925
|
+
truncate_or_delete = self.class.auto_migrate_with
|
926
|
+
if storage_exists?(table_name)
|
927
|
+
if truncate_or_delete && !forced
|
928
|
+
statement = case truncate_or_delete
|
929
|
+
when :truncate
|
930
|
+
truncate_table_statement(model)
|
931
|
+
when :delete
|
932
|
+
delete_table_statement(model)
|
933
|
+
else
|
934
|
+
raise ArgumentError, "Unsupported auto_migrate_with option"
|
935
|
+
end
|
936
|
+
execute(statement)
|
937
|
+
@truncated_tables ||= {}
|
938
|
+
@truncated_tables[table_name] = true
|
939
|
+
else
|
940
|
+
execute(drop_table_statement(model))
|
941
|
+
@truncated_tables[table_name] = nil if @truncated_tables
|
942
|
+
end
|
943
|
+
end
|
944
|
+
# added destroy of sequences
|
945
|
+
reset_sequences = self.class.auto_migrate_reset_sequences
|
946
|
+
table_is_truncated = @truncated_tables && @truncated_tables[table_name]
|
947
|
+
unless truncate_or_delete && !reset_sequences && !forced
|
948
|
+
if sequence_exists?(model_sequence_name(model))
|
949
|
+
if table_is_truncated && !forced
|
950
|
+
statement = reset_sequence_statement(model)
|
951
|
+
else
|
952
|
+
statement = drop_sequence_statement(model)
|
953
|
+
end
|
954
|
+
execute(statement) if statement
|
955
|
+
end
|
956
|
+
end
|
957
|
+
true
|
958
|
+
end
|
959
|
+
|
960
|
+
private
|
961
|
+
|
962
|
+
def storage_has_all_fields?(table_name, properties)
|
963
|
+
properties.map{|p| oracle_upcase(p.field)}.sort == storage_fields(table_name).sort
|
964
|
+
end
|
965
|
+
|
966
|
+
# If table or column name contains just lowercase characters then do uppercase
|
967
|
+
# as uppercase version will be used in Oracle data dictionary tables
|
968
|
+
def oracle_upcase(name)
|
969
|
+
name =~ /[A-Z]/ ? name : name.upcase
|
970
|
+
end
|
971
|
+
|
972
|
+
module SQL #:nodoc:
|
973
|
+
# private ## This cannot be private for current migrations
|
974
|
+
|
975
|
+
# TODO: document
|
976
|
+
# @api private
|
977
|
+
def schema_name
|
978
|
+
@schema_name ||= query("SELECT SYS_CONTEXT('userenv','current_schema') FROM dual").first.freeze
|
979
|
+
end
|
980
|
+
|
981
|
+
# TODO: document
|
982
|
+
# @api private
|
983
|
+
def create_sequence_statements(model)
|
984
|
+
table_name = model.storage_name(name)
|
985
|
+
serial = model.serial(name)
|
986
|
+
|
987
|
+
statements = []
|
988
|
+
if sequence_name = model_sequence_name(model)
|
989
|
+
statements << <<-SQL.compress_lines
|
990
|
+
CREATE SEQUENCE #{quote_name(sequence_name)} NOCACHE
|
991
|
+
SQL
|
992
|
+
|
993
|
+
# create trigger only if custom sequence name was not specified
|
994
|
+
unless serial.options[:sequence]
|
995
|
+
statements << <<-SQL.compress_lines
|
996
|
+
CREATE OR REPLACE TRIGGER #{quote_name(default_trigger_name(table_name))}
|
997
|
+
BEFORE INSERT ON #{quote_name(table_name)} FOR EACH ROW
|
998
|
+
BEGIN
|
999
|
+
IF inserting THEN
|
1000
|
+
IF :new.#{quote_name(serial.field)} IS NULL THEN
|
1001
|
+
SELECT #{quote_name(sequence_name)}.NEXTVAL INTO :new.#{quote_name(serial.field)} FROM dual;
|
1002
|
+
END IF;
|
1003
|
+
END IF;
|
1004
|
+
END;
|
1005
|
+
SQL
|
1006
|
+
end
|
1007
|
+
end
|
1008
|
+
|
1009
|
+
statements
|
1010
|
+
end
|
1011
|
+
|
1012
|
+
# TODO: document
|
1013
|
+
# @api private
|
1014
|
+
def drop_sequence_statement(model)
|
1015
|
+
if sequence_name = model_sequence_name(model)
|
1016
|
+
"DROP SEQUENCE #{quote_name(sequence_name)}"
|
1017
|
+
else
|
1018
|
+
nil
|
1019
|
+
end
|
1020
|
+
end
|
1021
|
+
|
1022
|
+
# TODO: document
|
1023
|
+
# @api private
|
1024
|
+
def reset_sequence_statement(model)
|
1025
|
+
if sequence_name = model_sequence_name(model)
|
1026
|
+
<<-SQL.compress_lines
|
1027
|
+
DECLARE
|
1028
|
+
cval INTEGER;
|
1029
|
+
BEGIN
|
1030
|
+
SELECT #{quote_name(sequence_name)}.NEXTVAL INTO cval FROM dual;
|
1031
|
+
EXECUTE IMMEDIATE 'ALTER SEQUENCE #{quote_name(sequence_name)} INCREMENT BY -' || cval || ' MINVALUE 0';
|
1032
|
+
SELECT #{quote_name(sequence_name)}.NEXTVAL INTO cval FROM dual;
|
1033
|
+
EXECUTE IMMEDIATE 'ALTER SEQUENCE #{quote_name(sequence_name)} INCREMENT BY 1';
|
1034
|
+
END;
|
1035
|
+
SQL
|
1036
|
+
else
|
1037
|
+
nil
|
1038
|
+
end
|
1039
|
+
end
|
1040
|
+
|
1041
|
+
# TODO: document
|
1042
|
+
# @api private
|
1043
|
+
def truncate_table_statement(model)
|
1044
|
+
"TRUNCATE TABLE #{quote_name(model.storage_name(name))}"
|
1045
|
+
end
|
1046
|
+
|
1047
|
+
# TODO: document
|
1048
|
+
# @api private
|
1049
|
+
def delete_table_statement(model)
|
1050
|
+
"DELETE FROM #{quote_name(model.storage_name(name))}"
|
1051
|
+
end
|
1052
|
+
|
1053
|
+
private
|
1054
|
+
|
1055
|
+
def model_sequence_name(model)
|
1056
|
+
table_name = model.storage_name(name)
|
1057
|
+
serial = model.serial(name)
|
1058
|
+
|
1059
|
+
if serial
|
1060
|
+
serial.options[:sequence] || default_sequence_name(table_name)
|
1061
|
+
else
|
1062
|
+
nil
|
1063
|
+
end
|
1064
|
+
end
|
1065
|
+
|
1066
|
+
def default_sequence_name(table_name)
|
1067
|
+
# truncate table name if necessary to fit in max length of identifier
|
1068
|
+
"#{table_name[0,self.class::IDENTIFIER_MAX_LENGTH-4]}_seq"
|
1069
|
+
end
|
1070
|
+
|
1071
|
+
def default_trigger_name(table_name)
|
1072
|
+
# truncate table name if necessary to fit in max length of identifier
|
1073
|
+
"#{table_name[0,self.class::IDENTIFIER_MAX_LENGTH-4]}_pkt"
|
1074
|
+
end
|
1075
|
+
|
1076
|
+
end # module SQL
|
1077
|
+
|
1078
|
+
include SQL
|
1079
|
+
|
1080
|
+
module ClassMethods
|
1081
|
+
# Types for Oracle databases.
|
1082
|
+
#
|
1083
|
+
# @return [Hash] types for Oracle databases.
|
1084
|
+
#
|
1085
|
+
# @api private
|
1086
|
+
def type_map
|
1087
|
+
length = Property::DEFAULT_LENGTH
|
1088
|
+
precision = Property::DEFAULT_PRECISION
|
1089
|
+
scale = Property::DEFAULT_SCALE_BIGDECIMAL
|
1090
|
+
|
1091
|
+
@type_map ||= {
|
1092
|
+
Integer => { :primitive => 'NUMBER', :precision => precision, :scale => 0 },
|
1093
|
+
String => { :primitive => 'VARCHAR2', :length => length },
|
1094
|
+
Class => { :primitive => 'VARCHAR2', :length => length },
|
1095
|
+
BigDecimal => { :primitive => 'NUMBER', :precision => precision, :scale => nil },
|
1096
|
+
Float => { :primitive => 'BINARY_FLOAT', },
|
1097
|
+
DateTime => { :primitive => 'DATE' },
|
1098
|
+
Date => { :primitive => 'DATE' },
|
1099
|
+
Time => { :primitive => 'DATE' },
|
1100
|
+
TrueClass => { :primitive => 'NUMBER', :precision => 1, :scale => 0 },
|
1101
|
+
Types::Object => { :primitive => 'CLOB' },
|
1102
|
+
Types::Text => { :primitive => 'CLOB' },
|
1103
|
+
}.freeze
|
1104
|
+
end
|
1105
|
+
|
1106
|
+
# Use table truncate or delete for auto_migrate! to speed up test execution
|
1107
|
+
#
|
1108
|
+
# @param [Symbol] :truncate, :delete or :drop_and_create (or nil)
|
1109
|
+
# do not specify parameter to return current value
|
1110
|
+
#
|
1111
|
+
# @return [Symbol] current value of auto_migrate_with option (nil returned for :drop_and_create)
|
1112
|
+
#
|
1113
|
+
# @api semipublic
|
1114
|
+
def auto_migrate_with(value = :not_specified)
|
1115
|
+
return @auto_migrate_with if value == :not_specified
|
1116
|
+
value = nil if value == :drop_and_create
|
1117
|
+
raise ArgumentError unless [nil, :truncate, :delete].include?(value)
|
1118
|
+
@auto_migrate_with = value
|
1119
|
+
end
|
1120
|
+
|
1121
|
+
# Set if sequences will or will not be reset during auto_migrate!
|
1122
|
+
#
|
1123
|
+
# @param [TrueClass, FalseClass] reset sequences?
|
1124
|
+
# do not specify parameter to return current value
|
1125
|
+
#
|
1126
|
+
# @return [Symbol] current value of auto_migrate_reset_sequences option (default value is true)
|
1127
|
+
#
|
1128
|
+
# @api semipublic
|
1129
|
+
def auto_migrate_reset_sequences(value = :not_specified)
|
1130
|
+
return @auto_migrate_reset_sequences.nil? ? true : @auto_migrate_reset_sequences if value == :not_specified
|
1131
|
+
raise ArgumentError unless [true, false].include?(value)
|
1132
|
+
@auto_migrate_reset_sequences = value
|
1133
|
+
end
|
1134
|
+
|
1135
|
+
end # module ClassMethods
|
1136
|
+
end # module PostgresAdapter
|
1137
|
+
|
1138
|
+
module Repository
|
1139
|
+
# Determine whether a particular named storage exists in this repository
|
1140
|
+
#
|
1141
|
+
# @param [String]
|
1142
|
+
# storage_name name of the storage to test for
|
1143
|
+
#
|
1144
|
+
# @return [Boolean]
|
1145
|
+
# true if the data-store +storage_name+ exists
|
1146
|
+
#
|
1147
|
+
# @api semipublic
|
1148
|
+
def storage_exists?(storage_name)
|
1149
|
+
if adapter.respond_to?(:storage_exists?)
|
1150
|
+
adapter.storage_exists?(storage_name)
|
1151
|
+
end
|
1152
|
+
end
|
1153
|
+
|
1154
|
+
# TODO: document
|
1155
|
+
# @api semipublic
|
1156
|
+
def upgrade_model_storage(model)
|
1157
|
+
if adapter.respond_to?(:upgrade_model_storage)
|
1158
|
+
adapter.upgrade_model_storage(model)
|
1159
|
+
end
|
1160
|
+
end
|
1161
|
+
|
1162
|
+
# TODO: document
|
1163
|
+
# @api semipublic
|
1164
|
+
def create_model_storage(model)
|
1165
|
+
if adapter.respond_to?(:create_model_storage)
|
1166
|
+
adapter.create_model_storage(model)
|
1167
|
+
end
|
1168
|
+
end
|
1169
|
+
|
1170
|
+
# TODO: document
|
1171
|
+
# @api semipublic
|
1172
|
+
def destroy_model_storage(model)
|
1173
|
+
if adapter.respond_to?(:destroy_model_storage)
|
1174
|
+
adapter.destroy_model_storage(model)
|
1175
|
+
end
|
1176
|
+
end
|
1177
|
+
|
1178
|
+
# Destructively automigrates the data-store to match the model.
|
1179
|
+
# First migrates all models down and then up.
|
1180
|
+
# REPEAT: THIS IS DESTRUCTIVE
|
1181
|
+
#
|
1182
|
+
# @api public
|
1183
|
+
def auto_migrate!
|
1184
|
+
DataMapper.auto_migrate!(name)
|
1185
|
+
end
|
1186
|
+
|
1187
|
+
# Safely migrates the data-store to match the model
|
1188
|
+
# preserving data already in the data-store
|
1189
|
+
#
|
1190
|
+
# @api public
|
1191
|
+
def auto_upgrade!
|
1192
|
+
DataMapper.auto_upgrade!(name)
|
1193
|
+
end
|
1194
|
+
end # module Repository
|
1195
|
+
|
1196
|
+
module Model
|
1197
|
+
# TODO: document
|
1198
|
+
# @api private
|
1199
|
+
def self.included(mod)
|
1200
|
+
mod.descendants.each { |model| model.extend self }
|
1201
|
+
end
|
1202
|
+
|
1203
|
+
# TODO: document
|
1204
|
+
# @api semipublic
|
1205
|
+
def storage_exists?(repository_name = default_repository_name)
|
1206
|
+
repository(repository_name).storage_exists?(storage_name(repository_name))
|
1207
|
+
end
|
1208
|
+
|
1209
|
+
# Destructively automigrates the data-store to match the model
|
1210
|
+
# REPEAT: THIS IS DESTRUCTIVE
|
1211
|
+
#
|
1212
|
+
# @param Symbol repository_name the repository to be migrated
|
1213
|
+
#
|
1214
|
+
# @api public
|
1215
|
+
def auto_migrate!(repository_name = self.repository_name)
|
1216
|
+
assert_valid
|
1217
|
+
auto_migrate_down!(repository_name)
|
1218
|
+
auto_migrate_up!(repository_name)
|
1219
|
+
end
|
1220
|
+
|
1221
|
+
# Safely migrates the data-store to match the model
|
1222
|
+
# preserving data already in the data-store
|
1223
|
+
#
|
1224
|
+
# @param Symbol repository_name the repository to be migrated
|
1225
|
+
#
|
1226
|
+
# @api public
|
1227
|
+
def auto_upgrade!(repository_name = self.repository_name)
|
1228
|
+
assert_valid
|
1229
|
+
if base_model == self
|
1230
|
+
repository(repository_name).upgrade_model_storage(self)
|
1231
|
+
else
|
1232
|
+
base_model.auto_upgrade!(repository_name)
|
1233
|
+
end
|
1234
|
+
end
|
1235
|
+
|
1236
|
+
# Destructively migrates the data-store down, which basically
|
1237
|
+
# deletes all the models.
|
1238
|
+
# REPEAT: THIS IS DESTRUCTIVE
|
1239
|
+
#
|
1240
|
+
# @param Symbol repository_name the repository to be migrated
|
1241
|
+
#
|
1242
|
+
# @api private
|
1243
|
+
def auto_migrate_down!(repository_name = self.repository_name)
|
1244
|
+
assert_valid
|
1245
|
+
if base_model == self
|
1246
|
+
repository(repository_name).destroy_model_storage(self)
|
1247
|
+
else
|
1248
|
+
base_model.auto_migrate_down!(repository_name)
|
1249
|
+
end
|
1250
|
+
end
|
1251
|
+
|
1252
|
+
# Auto migrates the data-store to match the model
|
1253
|
+
#
|
1254
|
+
# @param Symbol repository_name the repository to be migrated
|
1255
|
+
#
|
1256
|
+
# @api private
|
1257
|
+
def auto_migrate_up!(repository_name = self.repository_name)
|
1258
|
+
assert_valid
|
1259
|
+
if base_model == self
|
1260
|
+
repository(repository_name).create_model_storage(self)
|
1261
|
+
else
|
1262
|
+
base_model.auto_migrate_up!(repository_name)
|
1263
|
+
end
|
1264
|
+
end
|
1265
|
+
end # module Model
|
1266
|
+
end
|
1267
|
+
|
1268
|
+
module Adapters
|
1269
|
+
extendable do
|
1270
|
+
|
1271
|
+
# TODO: document
|
1272
|
+
# @api private
|
1273
|
+
def const_added(const_name)
|
1274
|
+
if DataMapper::Migrations.const_defined?(const_name)
|
1275
|
+
adapter = const_get(const_name)
|
1276
|
+
adapter.send(:include, DataMapper::Migrations.const_get(const_name))
|
1277
|
+
end
|
1278
|
+
|
1279
|
+
super
|
1280
|
+
end
|
1281
|
+
end
|
1282
|
+
end # module Adapters
|
1283
|
+
end # module DataMapper
|