sbf-dm-migrations 1.3.0.beta
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.gitignore +38 -0
- data/.rspec +1 -0
- data/.rubocop.yml +468 -0
- data/.travis.yml +52 -0
- data/Gemfile +61 -0
- data/LICENSE +20 -0
- data/README.rdoc +39 -0
- data/Rakefile +4 -0
- data/db/migrations/1_create_people_table.rb +12 -0
- data/db/migrations/2_add_dob_to_people.rb +13 -0
- data/db/migrations/config.rb +4 -0
- data/dm-migrations.gemspec +20 -0
- data/examples/Rakefile +149 -0
- data/examples/sample_migration.rb +58 -0
- data/examples/sample_migration_spec.rb +46 -0
- data/lib/dm-migrations/adapters/dm-do-adapter.rb +304 -0
- data/lib/dm-migrations/adapters/dm-mysql-adapter.rb +306 -0
- data/lib/dm-migrations/adapters/dm-oracle-adapter.rb +339 -0
- data/lib/dm-migrations/adapters/dm-postgres-adapter.rb +152 -0
- data/lib/dm-migrations/adapters/dm-sqlite-adapter.rb +88 -0
- data/lib/dm-migrations/adapters/dm-sqlserver-adapter.rb +184 -0
- data/lib/dm-migrations/adapters/dm-yaml-adapter.rb +21 -0
- data/lib/dm-migrations/auto_migration.rb +227 -0
- data/lib/dm-migrations/exceptions/duplicate_migration.rb +6 -0
- data/lib/dm-migrations/migration.rb +323 -0
- data/lib/dm-migrations/migration_runner.rb +76 -0
- data/lib/dm-migrations/sql/column.rb +5 -0
- data/lib/dm-migrations/sql/mysql.rb +84 -0
- data/lib/dm-migrations/sql/oracle.rb +9 -0
- data/lib/dm-migrations/sql/postgres.rb +89 -0
- data/lib/dm-migrations/sql/sqlite.rb +59 -0
- data/lib/dm-migrations/sql/sqlserver.rb +9 -0
- data/lib/dm-migrations/sql/table.rb +15 -0
- data/lib/dm-migrations/sql/table_creator.rb +105 -0
- data/lib/dm-migrations/sql/table_modifier.rb +57 -0
- data/lib/dm-migrations/sql.rb +7 -0
- data/lib/dm-migrations/version.rb +5 -0
- data/lib/dm-migrations.rb +3 -0
- data/lib/spec/example/migration_example_group.rb +69 -0
- data/lib/spec/matchers/migration_matchers.rb +96 -0
- data/spec/integration/auto_migration_spec.rb +590 -0
- data/spec/integration/auto_upgrade_spec.rb +41 -0
- data/spec/integration/migration_runner_spec.rb +84 -0
- data/spec/integration/migration_spec.rb +156 -0
- data/spec/integration/sql_spec.rb +290 -0
- data/spec/isolated/require_after_setup_spec.rb +24 -0
- data/spec/isolated/require_before_setup_spec.rb +24 -0
- data/spec/isolated/require_spec.rb +23 -0
- data/spec/spec_helper.rb +16 -0
- data/spec/unit/migration_spec.rb +501 -0
- data/spec/unit/sql/column_spec.rb +14 -0
- data/spec/unit/sql/postgres_spec.rb +90 -0
- data/spec/unit/sql/sqlite_extensions_spec.rb +103 -0
- data/spec/unit/sql/table_creator_spec.rb +91 -0
- data/spec/unit/sql/table_modifier_spec.rb +47 -0
- data/spec/unit/sql/table_spec.rb +26 -0
- data/spec/unit/sql_spec.rb +7 -0
- data/tasks/spec.rake +21 -0
- data/tasks/yard.rake +9 -0
- data/tasks/yardstick.rake +19 -0
- metadata +120 -0
@@ -0,0 +1,152 @@
|
|
1
|
+
require 'dm-migrations/auto_migration'
|
2
|
+
require 'dm-migrations/adapters/dm-do-adapter'
|
3
|
+
|
4
|
+
module DataMapper
|
5
|
+
module Migrations
|
6
|
+
module PostgresAdapter
|
7
|
+
include SQL, DataObjectsAdapter
|
8
|
+
|
9
|
+
# @api private
|
10
|
+
def self.included(base)
|
11
|
+
base.extend DataObjectsAdapter::ClassMethods
|
12
|
+
base.extend ClassMethods
|
13
|
+
end
|
14
|
+
|
15
|
+
# @api semipublic
|
16
|
+
def upgrade_model_storage(model)
|
17
|
+
without_notices { super }
|
18
|
+
end
|
19
|
+
|
20
|
+
# @api semipublic
|
21
|
+
def create_model_storage(model)
|
22
|
+
without_notices { super }
|
23
|
+
end
|
24
|
+
|
25
|
+
# @api semipublic
|
26
|
+
def destroy_model_storage(model)
|
27
|
+
if supports_drop_table_if_exists?
|
28
|
+
without_notices { super }
|
29
|
+
else
|
30
|
+
super
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
module SQL # :nodoc:
|
35
|
+
# private ## This cannot be private for current migrations
|
36
|
+
|
37
|
+
# @api private
|
38
|
+
def supports_drop_table_if_exists?
|
39
|
+
@supports_drop_table_if_exists ||= postgres_version >= '8.2'
|
40
|
+
end
|
41
|
+
|
42
|
+
# @api private
|
43
|
+
def schema_name
|
44
|
+
@schema_name ||= select('SELECT current_schema()').first.freeze
|
45
|
+
end
|
46
|
+
|
47
|
+
# @api private
|
48
|
+
def postgres_version
|
49
|
+
@postgres_version ||= select('SELECT version()').first.split[1].freeze
|
50
|
+
end
|
51
|
+
|
52
|
+
# @api private
|
53
|
+
def without_notices
|
54
|
+
# execute the block with NOTICE messages disabled
|
55
|
+
execute('SET client_min_messages = warning')
|
56
|
+
yield
|
57
|
+
ensure
|
58
|
+
execute('RESET client_min_messages')
|
59
|
+
end
|
60
|
+
|
61
|
+
# @api private
|
62
|
+
def property_schema_hash(property)
|
63
|
+
schema = super
|
64
|
+
|
65
|
+
# Postgres does not support precision and scale for Float
|
66
|
+
if property.is_a?(Property::Float)
|
67
|
+
schema.delete(:precision)
|
68
|
+
schema.delete(:scale)
|
69
|
+
end
|
70
|
+
|
71
|
+
if property.is_a?(Property::Integer)
|
72
|
+
min = property.min
|
73
|
+
max = property.max
|
74
|
+
|
75
|
+
schema[:primitive] = integer_column_statement(min..max) if min && max
|
76
|
+
end
|
77
|
+
|
78
|
+
schema[:primitive] = serial_column_statement(min..max) if schema[:serial]
|
79
|
+
|
80
|
+
schema
|
81
|
+
end
|
82
|
+
|
83
|
+
# Return SQL statement for the integer column
|
84
|
+
#
|
85
|
+
# @param [Range] range
|
86
|
+
# the min/max allowed integers
|
87
|
+
#
|
88
|
+
# @return [String]
|
89
|
+
# the statement to create the integer column
|
90
|
+
#
|
91
|
+
# @api private
|
92
|
+
private def integer_column_statement(range)
|
93
|
+
min = range.first
|
94
|
+
max = range.last
|
95
|
+
|
96
|
+
smallint = 2**15
|
97
|
+
integer = 2**31
|
98
|
+
bigint = 2**63
|
99
|
+
|
100
|
+
if min >= -smallint && max < smallint
|
101
|
+
'SMALLINT'
|
102
|
+
elsif min >= -integer && max < integer
|
103
|
+
'INTEGER'
|
104
|
+
elsif min >= -bigint && max < bigint
|
105
|
+
'BIGINT'
|
106
|
+
else
|
107
|
+
raise ArgumentError, "min #{min} and max #{max} exceeds supported range"
|
108
|
+
end
|
109
|
+
end
|
110
|
+
|
111
|
+
# Return SQL statement for the serial column
|
112
|
+
#
|
113
|
+
# @param [Range] range
|
114
|
+
# the range of allowed integers
|
115
|
+
#
|
116
|
+
# @return [String]
|
117
|
+
# the statement to create the serial column
|
118
|
+
#
|
119
|
+
# @api private
|
120
|
+
private def serial_column_statement(range)
|
121
|
+
max = range.last
|
122
|
+
|
123
|
+
if max.nil? || max < 2**31
|
124
|
+
'SERIAL'
|
125
|
+
elsif max < 2**63
|
126
|
+
'BIGSERIAL'
|
127
|
+
else
|
128
|
+
raise ArgumentError, "min #{range.first} and max #{max} exceeds supported range"
|
129
|
+
end
|
130
|
+
end
|
131
|
+
end
|
132
|
+
|
133
|
+
module ClassMethods
|
134
|
+
# Types for PostgreSQL databases.
|
135
|
+
#
|
136
|
+
# @return [Hash] types for PostgreSQL databases.
|
137
|
+
#
|
138
|
+
# @api private
|
139
|
+
def type_map
|
140
|
+
precision = Property::Numeric.precision
|
141
|
+
scale = Property::Decimal.scale
|
142
|
+
|
143
|
+
super.merge(
|
144
|
+
Property::Binary => {primitive: 'BYTEA'},
|
145
|
+
BigDecimal => {primitive: 'NUMERIC', precision: precision, scale: scale},
|
146
|
+
Float => {primitive: 'DOUBLE PRECISION'}
|
147
|
+
).freeze
|
148
|
+
end
|
149
|
+
end
|
150
|
+
end
|
151
|
+
end
|
152
|
+
end
|
@@ -0,0 +1,88 @@
|
|
1
|
+
require 'dm-migrations/auto_migration'
|
2
|
+
require 'dm-migrations/adapters/dm-do-adapter'
|
3
|
+
|
4
|
+
module DataMapper
|
5
|
+
module Migrations
|
6
|
+
module SqliteAdapter
|
7
|
+
include SQL, DataObjectsAdapter
|
8
|
+
|
9
|
+
# @api private
|
10
|
+
def self.included(base)
|
11
|
+
base.extend DataObjectsAdapter::ClassMethods
|
12
|
+
base.extend ClassMethods
|
13
|
+
end
|
14
|
+
|
15
|
+
# @api semipublic
|
16
|
+
def storage_exists?(storage_name)
|
17
|
+
table_info(storage_name).any?
|
18
|
+
end
|
19
|
+
|
20
|
+
# @api semipublic
|
21
|
+
def field_exists?(storage_name, column_name)
|
22
|
+
table_info(storage_name).any? do |row|
|
23
|
+
row.name == column_name
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
module SQL # :nodoc:
|
28
|
+
# private ## This cannot be private for current migrations
|
29
|
+
|
30
|
+
# @api private
|
31
|
+
def supports_serial?
|
32
|
+
@supports_serial ||= sqlite_version >= '3.1.0'
|
33
|
+
end
|
34
|
+
|
35
|
+
# @api private
|
36
|
+
def supports_drop_table_if_exists?
|
37
|
+
@supports_drop_table_if_exists ||= sqlite_version >= '3.3.0'
|
38
|
+
end
|
39
|
+
|
40
|
+
# @api private
|
41
|
+
def table_info(table_name)
|
42
|
+
select("PRAGMA table_info(#{quote_name(table_name)})")
|
43
|
+
end
|
44
|
+
|
45
|
+
# @api private
|
46
|
+
def create_table_statement(connection, model, properties)
|
47
|
+
statement = DataMapper::Ext::String.compress_lines(<<-SQL)
|
48
|
+
CREATE TABLE #{quote_name(model.storage_name(name))}
|
49
|
+
(#{properties.map { |property| property_schema_statement(connection, property_schema_hash(property)) }.join(', ')}
|
50
|
+
SQL
|
51
|
+
|
52
|
+
# skip adding the primary key if one of the columns is serial. In
|
53
|
+
# SQLite the serial column must be the primary key, so it has already
|
54
|
+
# been defined
|
55
|
+
statement << ", PRIMARY KEY(#{properties.key.map { |property| quote_name(property.field) }.join(', ')})" unless properties.any?(&:serial?)
|
56
|
+
|
57
|
+
statement << ')'
|
58
|
+
statement
|
59
|
+
end
|
60
|
+
|
61
|
+
# @api private
|
62
|
+
def property_schema_statement(connection, schema)
|
63
|
+
statement = super
|
64
|
+
|
65
|
+
statement << ' PRIMARY KEY AUTOINCREMENT' if supports_serial? && schema[:serial]
|
66
|
+
|
67
|
+
statement
|
68
|
+
end
|
69
|
+
|
70
|
+
# @api private
|
71
|
+
def sqlite_version
|
72
|
+
@sqlite_version ||= select('SELECT sqlite_version(*)').first.freeze
|
73
|
+
end
|
74
|
+
end
|
75
|
+
|
76
|
+
module ClassMethods
|
77
|
+
# Types for SQLite 3 databases.
|
78
|
+
#
|
79
|
+
# @return [Hash] types for SQLite 3 databases.
|
80
|
+
#
|
81
|
+
# @api private
|
82
|
+
def type_map
|
83
|
+
super.merge(Class => {primitive: 'VARCHAR'}).freeze
|
84
|
+
end
|
85
|
+
end
|
86
|
+
end
|
87
|
+
end
|
88
|
+
end
|
@@ -0,0 +1,184 @@
|
|
1
|
+
require 'dm-migrations/auto_migration'
|
2
|
+
require 'dm-migrations/adapters/dm-do-adapter'
|
3
|
+
|
4
|
+
module DataMapper
|
5
|
+
module Migrations
|
6
|
+
module SqlserverAdapter
|
7
|
+
DEFAULT_CHARACTER_SET = 'utf8'.freeze
|
8
|
+
|
9
|
+
include SQL, DataObjectsAdapter
|
10
|
+
|
11
|
+
# @api private
|
12
|
+
def self.included(base)
|
13
|
+
base.extend DataObjectsAdapter::ClassMethods
|
14
|
+
base.extend ClassMethods
|
15
|
+
end
|
16
|
+
|
17
|
+
# @api semipublic
|
18
|
+
def storage_exists?(storage_name)
|
19
|
+
select('SELECT name FROM sysobjects WHERE name LIKE ?', storage_name).first == storage_name
|
20
|
+
end
|
21
|
+
|
22
|
+
# @api semipublic
|
23
|
+
def field_exists?(storage_name, field_name)
|
24
|
+
result = select(
|
25
|
+
'SELECT c.name FROM sysobjects as o JOIN syscolumns AS c ON o.id = c.id WHERE o.name = ? AND c.name LIKE ?',
|
26
|
+
storage_name,
|
27
|
+
field_name
|
28
|
+
).first
|
29
|
+
result ? result.to_s == field_name.to_s : false
|
30
|
+
end
|
31
|
+
|
32
|
+
module SQL # :nodoc:
|
33
|
+
# private ## This cannot be private for current migrations
|
34
|
+
|
35
|
+
# @api private
|
36
|
+
def supports_serial?
|
37
|
+
true
|
38
|
+
end
|
39
|
+
|
40
|
+
# @api private
|
41
|
+
def supports_drop_table_if_exists?
|
42
|
+
false
|
43
|
+
end
|
44
|
+
|
45
|
+
# @api private
|
46
|
+
def schema_name
|
47
|
+
select('SELECT DB_NAME()')
|
48
|
+
end
|
49
|
+
|
50
|
+
# TODO: update dkubb/dm-more/dm-migrations to use schema_name and remove this
|
51
|
+
|
52
|
+
alias_method :db_name, :schema_name
|
53
|
+
|
54
|
+
# @api private
|
55
|
+
def create_table_statement(connection, model, properties)
|
56
|
+
statement = DataMapper::Ext::String.compress_lines(<<-SQL)
|
57
|
+
CREATE TABLE #{quote_name(model.storage_name(name))}
|
58
|
+
(#{properties.map { |property| property_schema_statement(connection, property_schema_hash(property)) }.join(', ')}
|
59
|
+
SQL
|
60
|
+
|
61
|
+
# specific the primary keys.
|
62
|
+
# don't have to filter out Serial (aka IDENTITY) type
|
63
|
+
statement << ", PRIMARY KEY(#{properties.key.map { |property| quote_name(property.field) }.join(', ')})"
|
64
|
+
|
65
|
+
statement << ')'
|
66
|
+
statement
|
67
|
+
end
|
68
|
+
|
69
|
+
# @api private
|
70
|
+
def property_schema_hash(property)
|
71
|
+
schema = super
|
72
|
+
|
73
|
+
if property.is_a?(Property::Integer)
|
74
|
+
min = property.min
|
75
|
+
max = property.max
|
76
|
+
|
77
|
+
schema[:primitive] = integer_column_statement(min..max) if min && max
|
78
|
+
end
|
79
|
+
|
80
|
+
schema_primitive = schema[:primitive]
|
81
|
+
|
82
|
+
if schema_primitive == 'NVARCHAR'
|
83
|
+
schema[:length] = if property.length <= 4000
|
84
|
+
property.length
|
85
|
+
else
|
86
|
+
'max'
|
87
|
+
end
|
88
|
+
end
|
89
|
+
|
90
|
+
schema.delete(:default) if schema[:primitive] == 'TEXT'
|
91
|
+
|
92
|
+
schema
|
93
|
+
end
|
94
|
+
|
95
|
+
# @api private
|
96
|
+
def property_schema_statement(connection, schema)
|
97
|
+
if supports_serial? && schema[:serial]
|
98
|
+
statement = quote_name(schema[:name])
|
99
|
+
statement << " #{schema[:primitive]}"
|
100
|
+
|
101
|
+
length = schema[:length]
|
102
|
+
|
103
|
+
if schema[:precision] && schema[:scale]
|
104
|
+
statement << "(#{%i(precision scale).map { |key| connection.quote_value(schema[key]) }.join(', ')})"
|
105
|
+
elsif length
|
106
|
+
statement << "(#{connection.quote_value(length)})"
|
107
|
+
end
|
108
|
+
|
109
|
+
statement << ' IDENTITY'
|
110
|
+
else
|
111
|
+
statement = super
|
112
|
+
end
|
113
|
+
|
114
|
+
statement
|
115
|
+
end
|
116
|
+
|
117
|
+
# @api private
|
118
|
+
def character_set
|
119
|
+
@character_set ||= show_variable('character_set_connection') || DEFAULT_CHARACTER_SET
|
120
|
+
end
|
121
|
+
|
122
|
+
# @api private
|
123
|
+
def collation
|
124
|
+
@collation ||= show_variable('collation_connection') || DEFAULT_COLLATION
|
125
|
+
end
|
126
|
+
|
127
|
+
# @api private
|
128
|
+
def show_variable(_name)
|
129
|
+
raise 'SqlserverAdapter#show_variable: Not implemented'
|
130
|
+
end
|
131
|
+
|
132
|
+
# Return SQL statement for the integer column
|
133
|
+
#
|
134
|
+
# @param [Range] range
|
135
|
+
# the min/max allowed integers
|
136
|
+
#
|
137
|
+
# @return [String]
|
138
|
+
# the statement to create the integer column
|
139
|
+
#
|
140
|
+
# @api private
|
141
|
+
private def integer_column_statement(range)
|
142
|
+
min = range.first
|
143
|
+
max = range.last
|
144
|
+
|
145
|
+
smallint = 2**15
|
146
|
+
integer = 2**31
|
147
|
+
bigint = 2**63
|
148
|
+
|
149
|
+
if min >= 0 && max < 2**8
|
150
|
+
'TINYINT'
|
151
|
+
elsif min >= -smallint && max < smallint then 'SMALLINT'
|
152
|
+
elsif min >= -integer && max < integer then 'INT'
|
153
|
+
elsif min >= -bigint && max < bigint then 'BIGINT'
|
154
|
+
else
|
155
|
+
raise ArgumentError, "min #{min} and max #{max} exceeds supported range"
|
156
|
+
end
|
157
|
+
end
|
158
|
+
end
|
159
|
+
|
160
|
+
module ClassMethods
|
161
|
+
# Types for Sqlserver databases.
|
162
|
+
#
|
163
|
+
# @return [Hash] types for Sqlserver databases.
|
164
|
+
#
|
165
|
+
# @api private
|
166
|
+
def type_map
|
167
|
+
length = Property::String.length
|
168
|
+
_precision = Property::Numeric.precision
|
169
|
+
_scale = Property::Decimal.scale
|
170
|
+
|
171
|
+
super.merge(
|
172
|
+
DateTime => {primitive: 'DATETIME'},
|
173
|
+
Date => {primitive: 'SMALLDATETIME'},
|
174
|
+
Time => {primitive: 'SMALLDATETIME'},
|
175
|
+
TrueClass => {primitive: 'BIT'},
|
176
|
+
String => {primitive: 'NVARCHAR', length: length},
|
177
|
+
Property::Text => {primitive: 'NVARCHAR', length: 'max'},
|
178
|
+
Property::Binary => {primitive: 'VARBINARY', length: 'max'}
|
179
|
+
).freeze
|
180
|
+
end
|
181
|
+
end
|
182
|
+
end
|
183
|
+
end
|
184
|
+
end
|
@@ -0,0 +1,21 @@
|
|
1
|
+
require 'dm-migrations/auto_migration'
|
2
|
+
require 'dm-migrations/adapters/dm-do-adapter'
|
3
|
+
|
4
|
+
module DataMapper
|
5
|
+
module Migrations
|
6
|
+
module YamlAdapter
|
7
|
+
def self.included(_base)
|
8
|
+
DataMapper.extend(Migrations::SingletonMethods)
|
9
|
+
%i(Repository Model).each do |name|
|
10
|
+
DataMapper.const_get(name).send(:include, Migrations.const_get(name))
|
11
|
+
end
|
12
|
+
end
|
13
|
+
|
14
|
+
# @api semipublic
|
15
|
+
def destroy_model_storage(model)
|
16
|
+
yaml_file(model).unlink if yaml_file(model).file?
|
17
|
+
true
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
@@ -0,0 +1,227 @@
|
|
1
|
+
require 'dm-core'
|
2
|
+
|
3
|
+
module DataMapper
|
4
|
+
module Migrations
|
5
|
+
module SingletonMethods
|
6
|
+
# destructively migrates the repository upwards to match model definitions
|
7
|
+
#
|
8
|
+
# @param [Symbol] repository_name repository to act on, :default is the default
|
9
|
+
#
|
10
|
+
# @api public
|
11
|
+
def migrate!(repository_name = nil)
|
12
|
+
repository(repository_name).migrate!
|
13
|
+
end
|
14
|
+
|
15
|
+
# drops and recreates the repository upwards to match model definitions
|
16
|
+
#
|
17
|
+
# @param [Symbol] repository_name repository to act on, :default is the default
|
18
|
+
#
|
19
|
+
# @api public
|
20
|
+
def auto_migrate!(repository_name = nil)
|
21
|
+
repository_execute(:auto_migrate!, repository_name)
|
22
|
+
end
|
23
|
+
|
24
|
+
# @api public
|
25
|
+
def auto_upgrade!(repository_name = nil)
|
26
|
+
repository_execute(:auto_upgrade!, repository_name)
|
27
|
+
end
|
28
|
+
|
29
|
+
# @api semipublic
|
30
|
+
private def auto_migrate_down!(repository_name)
|
31
|
+
repository_execute(:auto_migrate_down!, repository_name)
|
32
|
+
end
|
33
|
+
|
34
|
+
# @api semipublic
|
35
|
+
private def auto_migrate_up!(repository_name)
|
36
|
+
repository_execute(:auto_migrate_up!, repository_name)
|
37
|
+
end
|
38
|
+
|
39
|
+
# @api private
|
40
|
+
private def repository_execute(method, repository_name)
|
41
|
+
models = DataMapper::Model.descendants
|
42
|
+
models = models.select { |m| m.repository_name == repository_name } if repository_name
|
43
|
+
models.each do |model|
|
44
|
+
model.send(method, model.repository_name)
|
45
|
+
end
|
46
|
+
end
|
47
|
+
end
|
48
|
+
|
49
|
+
module Repository
|
50
|
+
# Determine whether a particular named storage exists in this repository
|
51
|
+
#
|
52
|
+
# @param [String]
|
53
|
+
# storage_name name of the storage to test for
|
54
|
+
#
|
55
|
+
# @return [Boolean]
|
56
|
+
# true if the data-store +storage_name+ exists
|
57
|
+
#
|
58
|
+
# @api semipublic
|
59
|
+
def storage_exists?(storage_name)
|
60
|
+
adapter = self.adapter
|
61
|
+
return false unless adapter.respond_to?(:storage_exists?)
|
62
|
+
|
63
|
+
adapter.storage_exists?(storage_name)
|
64
|
+
end
|
65
|
+
|
66
|
+
# @api semipublic
|
67
|
+
def upgrade_model_storage(model)
|
68
|
+
adapter = self.adapter
|
69
|
+
return unless adapter.respond_to?(:upgrade_model_storage)
|
70
|
+
|
71
|
+
adapter.upgrade_model_storage(model)
|
72
|
+
end
|
73
|
+
|
74
|
+
# @api semipublic
|
75
|
+
def create_model_storage(model)
|
76
|
+
adapter = self.adapter
|
77
|
+
return unless adapter.respond_to?(:create_model_storage)
|
78
|
+
|
79
|
+
adapter.create_model_storage(model)
|
80
|
+
end
|
81
|
+
|
82
|
+
# @api semipublic
|
83
|
+
def destroy_model_storage(model)
|
84
|
+
adapter = self.adapter
|
85
|
+
return unless adapter.respond_to?(:destroy_model_storage)
|
86
|
+
|
87
|
+
adapter.destroy_model_storage(model)
|
88
|
+
end
|
89
|
+
|
90
|
+
# Destructively automigrates the data-store to match the model.
|
91
|
+
# First migrates all models down and then up.
|
92
|
+
# REPEAT: THIS IS DESTRUCTIVE
|
93
|
+
#
|
94
|
+
# @api public
|
95
|
+
def auto_migrate!
|
96
|
+
DataMapper.auto_migrate!(name)
|
97
|
+
end
|
98
|
+
|
99
|
+
# Safely migrates the data-store to match the model
|
100
|
+
# preserving data already in the data-store
|
101
|
+
#
|
102
|
+
# @api public
|
103
|
+
def auto_upgrade!
|
104
|
+
DataMapper.auto_upgrade!(name)
|
105
|
+
end
|
106
|
+
end
|
107
|
+
|
108
|
+
module Model
|
109
|
+
# @api private
|
110
|
+
def self.included(mod)
|
111
|
+
mod.descendants.each { |model| model.extend self }
|
112
|
+
end
|
113
|
+
|
114
|
+
# @api semipublic
|
115
|
+
def storage_exists?(repository_name = default_repository_name)
|
116
|
+
repository(repository_name).storage_exists?(storage_name(repository_name))
|
117
|
+
end
|
118
|
+
|
119
|
+
# Destructively automigrates the data-store to match the model
|
120
|
+
# REPEAT: THIS IS DESTRUCTIVE
|
121
|
+
#
|
122
|
+
# @param [Symbol] repository_name the repository to be migrated
|
123
|
+
#
|
124
|
+
# @api public
|
125
|
+
def auto_migrate!(repository_name = self.repository_name)
|
126
|
+
assert_valid(true)
|
127
|
+
auto_migrate_down!(repository_name)
|
128
|
+
auto_migrate_up!(repository_name)
|
129
|
+
end
|
130
|
+
|
131
|
+
# Safely migrates the data-store to match the model
|
132
|
+
# preserving data already in the data-store
|
133
|
+
#
|
134
|
+
# @param [Symbol] repository_name the repository to be migrated
|
135
|
+
#
|
136
|
+
# @api public
|
137
|
+
def auto_upgrade!(repository_name = self.repository_name)
|
138
|
+
assert_valid(true)
|
139
|
+
base_model = self.base_model
|
140
|
+
if base_model == self
|
141
|
+
repository(repository_name).upgrade_model_storage(self)
|
142
|
+
else
|
143
|
+
base_model.auto_upgrade!(repository_name)
|
144
|
+
end
|
145
|
+
end
|
146
|
+
|
147
|
+
# Destructively migrates the data-store down, which basically
|
148
|
+
# deletes all the models.
|
149
|
+
# REPEAT: THIS IS DESTRUCTIVE
|
150
|
+
#
|
151
|
+
# @param [Symbol] repository_name the repository to be migrated
|
152
|
+
#
|
153
|
+
# @api private
|
154
|
+
def auto_migrate_down!(repository_name = self.repository_name)
|
155
|
+
assert_valid(true)
|
156
|
+
base_model = self.base_model
|
157
|
+
if base_model == self
|
158
|
+
repository(repository_name).destroy_model_storage(self)
|
159
|
+
else
|
160
|
+
base_model.auto_migrate_down!(repository_name)
|
161
|
+
end
|
162
|
+
end
|
163
|
+
|
164
|
+
# Auto migrates the data-store to match the model
|
165
|
+
#
|
166
|
+
# @param [Symbol] repository_name the repository to be migrated
|
167
|
+
#
|
168
|
+
# @api private
|
169
|
+
def auto_migrate_up!(repository_name = self.repository_name)
|
170
|
+
assert_valid(true)
|
171
|
+
base_model = self.base_model
|
172
|
+
if base_model == self
|
173
|
+
repository(repository_name).create_model_storage(self)
|
174
|
+
else
|
175
|
+
base_model.auto_migrate_up!(repository_name)
|
176
|
+
end
|
177
|
+
end
|
178
|
+
end
|
179
|
+
|
180
|
+
def self.include_migration_api
|
181
|
+
DataMapper.extend(SingletonMethods)
|
182
|
+
%i(Repository Model).each do |name|
|
183
|
+
DataMapper.const_get(name).send(:include, const_get(name))
|
184
|
+
end
|
185
|
+
DataMapper::Model.append_extensions(Model)
|
186
|
+
Adapters::AbstractAdapter.descendants.each do |adapter_class|
|
187
|
+
Adapters.include_migration_api(DataMapper::Inflector.demodulize(adapter_class.name))
|
188
|
+
end
|
189
|
+
end
|
190
|
+
end
|
191
|
+
|
192
|
+
module Adapters
|
193
|
+
def self.include_migration_api(const_name)
|
194
|
+
require auto_migration_extensions(const_name)
|
195
|
+
if Migrations.const_defined?(const_name)
|
196
|
+
adapter = const_get(const_name)
|
197
|
+
adapter.send(:include, migration_module(const_name))
|
198
|
+
end
|
199
|
+
rescue LoadError
|
200
|
+
# Silently ignore the fact that no adapter extensions could be required
|
201
|
+
# This means that the adapter in use doesn't support migrations
|
202
|
+
end
|
203
|
+
|
204
|
+
def self.migration_module(const_name)
|
205
|
+
Migrations.const_get(const_name)
|
206
|
+
end
|
207
|
+
|
208
|
+
class << self
|
209
|
+
# @api private
|
210
|
+
private def auto_migration_extensions(const_name)
|
211
|
+
name = adapter_name(const_name)
|
212
|
+
name = 'do' if name == 'dataobjects'
|
213
|
+
"dm-migrations/adapters/dm-#{name}-adapter"
|
214
|
+
end
|
215
|
+
end
|
216
|
+
|
217
|
+
extendable do
|
218
|
+
# @api private
|
219
|
+
def const_added(const_name)
|
220
|
+
include_migration_api(const_name)
|
221
|
+
super
|
222
|
+
end
|
223
|
+
end
|
224
|
+
end
|
225
|
+
|
226
|
+
Migrations.include_migration_api
|
227
|
+
end
|