ardm-migrations 1.2.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (61) hide show
  1. checksums.yaml +7 -0
  2. data/.gitignore +35 -0
  3. data/.travis.yml +11 -0
  4. data/Gemfile +53 -0
  5. data/LICENSE +20 -0
  6. data/README.rdoc +39 -0
  7. data/Rakefile +4 -0
  8. data/ardm-migrations.gemspec +27 -0
  9. data/db/migrations/1_create_people_table.rb +12 -0
  10. data/db/migrations/2_add_dob_to_people.rb +13 -0
  11. data/db/migrations/config.rb +4 -0
  12. data/examples/Rakefile +144 -0
  13. data/examples/sample_migration.rb +58 -0
  14. data/examples/sample_migration_spec.rb +50 -0
  15. data/lib/ardm-migrations.rb +1 -0
  16. data/lib/dm-migrations/adapters/dm-do-adapter.rb +295 -0
  17. data/lib/dm-migrations/adapters/dm-mysql-adapter.rb +299 -0
  18. data/lib/dm-migrations/adapters/dm-oracle-adapter.rb +332 -0
  19. data/lib/dm-migrations/adapters/dm-postgres-adapter.rb +159 -0
  20. data/lib/dm-migrations/adapters/dm-sqlite-adapter.rb +96 -0
  21. data/lib/dm-migrations/adapters/dm-sqlserver-adapter.rb +177 -0
  22. data/lib/dm-migrations/adapters/dm-yaml-adapter.rb +23 -0
  23. data/lib/dm-migrations/auto_migration.rb +239 -0
  24. data/lib/dm-migrations/exceptions/duplicate_migration.rb +6 -0
  25. data/lib/dm-migrations/migration.rb +300 -0
  26. data/lib/dm-migrations/migration_runner.rb +85 -0
  27. data/lib/dm-migrations/sql/column.rb +5 -0
  28. data/lib/dm-migrations/sql/mysql.rb +61 -0
  29. data/lib/dm-migrations/sql/postgres.rb +82 -0
  30. data/lib/dm-migrations/sql/sqlite.rb +51 -0
  31. data/lib/dm-migrations/sql/table.rb +15 -0
  32. data/lib/dm-migrations/sql/table_creator.rb +109 -0
  33. data/lib/dm-migrations/sql/table_modifier.rb +57 -0
  34. data/lib/dm-migrations/sql.rb +5 -0
  35. data/lib/dm-migrations/version.rb +5 -0
  36. data/lib/dm-migrations.rb +3 -0
  37. data/lib/spec/example/migration_example_group.rb +73 -0
  38. data/lib/spec/matchers/migration_matchers.rb +106 -0
  39. data/spec/integration/auto_migration_spec.rb +553 -0
  40. data/spec/integration/auto_upgrade_spec.rb +40 -0
  41. data/spec/integration/migration_runner_spec.rb +89 -0
  42. data/spec/integration/migration_spec.rb +157 -0
  43. data/spec/integration/sql_spec.rb +250 -0
  44. data/spec/isolated/require_after_setup_spec.rb +30 -0
  45. data/spec/isolated/require_before_setup_spec.rb +30 -0
  46. data/spec/isolated/require_spec.rb +25 -0
  47. data/spec/rcov.opts +6 -0
  48. data/spec/spec.opts +4 -0
  49. data/spec/spec_helper.rb +18 -0
  50. data/spec/unit/migration_spec.rb +453 -0
  51. data/spec/unit/sql/column_spec.rb +14 -0
  52. data/spec/unit/sql/postgres_spec.rb +97 -0
  53. data/spec/unit/sql/sqlite_extensions_spec.rb +108 -0
  54. data/spec/unit/sql/table_creator_spec.rb +94 -0
  55. data/spec/unit/sql/table_modifier_spec.rb +49 -0
  56. data/spec/unit/sql/table_spec.rb +28 -0
  57. data/spec/unit/sql_spec.rb +7 -0
  58. data/tasks/spec.rake +38 -0
  59. data/tasks/yard.rake +9 -0
  60. data/tasks/yardstick.rake +19 -0
  61. metadata +150 -0
@@ -0,0 +1,295 @@
1
+ require 'dm-migrations/auto_migration'
2
+
3
+ module DataMapper
4
+ module Migrations
5
+
6
+ module DataObjectsAdapter
7
+
8
+ # Returns whether the storage_name exists.
9
+ #
10
+ # @param [String] storage_name
11
+ # a String defining the name of a storage, for example a table name.
12
+ #
13
+ # @return [Boolean]
14
+ # true if the storage exists
15
+ #
16
+ # @api semipublic
17
+ def storage_exists?(storage_name)
18
+ statement = DataMapper::Ext::String.compress_lines(<<-SQL)
19
+ SELECT COUNT(*)
20
+ FROM "information_schema"."tables"
21
+ WHERE "table_type" = 'BASE TABLE'
22
+ AND "table_schema" = ?
23
+ AND "table_name" = ?
24
+ SQL
25
+
26
+ select(statement, schema_name, storage_name).first > 0
27
+ end
28
+
29
+ # Returns whether the field exists.
30
+ #
31
+ # @param [String] storage_name
32
+ # a String defining the name of a storage, for example a table name.
33
+ # @param [String] field
34
+ # a String defining the name of a field, for example a column name.
35
+ #
36
+ # @return [Boolean]
37
+ # true if the field exists.
38
+ #
39
+ # @api semipublic
40
+ def field_exists?(storage_name, column_name)
41
+ statement = DataMapper::Ext::String.compress_lines(<<-SQL)
42
+ SELECT COUNT(*)
43
+ FROM "information_schema"."columns"
44
+ WHERE "table_schema" = ?
45
+ AND "table_name" = ?
46
+ AND "column_name" = ?
47
+ SQL
48
+
49
+ select(statement, schema_name, storage_name, column_name).first > 0
50
+ end
51
+
52
+ # @api semipublic
53
+ def upgrade_model_storage(model)
54
+ name = self.name
55
+ properties = model.properties_with_subclasses(name)
56
+
57
+ if success = create_model_storage(model)
58
+ return properties
59
+ end
60
+
61
+ table_name = model.storage_name(name)
62
+
63
+ with_connection do |connection|
64
+ properties.map do |property|
65
+ schema_hash = property_schema_hash(property)
66
+ next if field_exists?(table_name, schema_hash[:name])
67
+
68
+ statement = alter_table_add_column_statement(connection, table_name, schema_hash)
69
+ command = connection.create_command(statement)
70
+ command.execute_non_query
71
+
72
+ # For simple :index => true columns, add an appropriate index.
73
+ # Upgrading doesn't know how to deal with complex indexes yet.
74
+ if property.options[:index] === true
75
+ statement = create_index_statement(model, property.name, [property.field])
76
+ command = connection.create_command(statement)
77
+ command.execute_non_query
78
+ end
79
+
80
+ property
81
+ end.compact
82
+ end
83
+ end
84
+
85
+ # @api semipublic
86
+ def create_model_storage(model)
87
+ name = self.name
88
+ properties = model.properties_with_subclasses(name)
89
+
90
+ return false if storage_exists?(model.storage_name(name))
91
+ return false if properties.empty?
92
+
93
+ with_connection do |connection|
94
+ statements = [ create_table_statement(connection, model, properties) ]
95
+ statements.concat(create_index_statements(model))
96
+ statements.concat(create_unique_index_statements(model))
97
+
98
+ statements.each do |statement|
99
+ command = connection.create_command(statement)
100
+ command.execute_non_query
101
+ end
102
+ end
103
+
104
+ true
105
+ end
106
+
107
+ # @api semipublic
108
+ def destroy_model_storage(model)
109
+ return true unless supports_drop_table_if_exists? || storage_exists?(model.storage_name(name))
110
+ execute(drop_table_statement(model))
111
+ true
112
+ end
113
+
114
+ module SQL #:nodoc:
115
+ # private ## This cannot be private for current migrations
116
+
117
+ # Adapters that support AUTO INCREMENT fields for CREATE TABLE
118
+ # statements should overwrite this to return true
119
+ #
120
+ # @api private
121
+ def supports_serial?
122
+ false
123
+ end
124
+
125
+ # @api private
126
+ def supports_drop_table_if_exists?
127
+ false
128
+ end
129
+
130
+ # @api private
131
+ def schema_name
132
+ raise NotImplementedError, "#{self.class}#schema_name not implemented"
133
+ end
134
+
135
+ # @api private
136
+ def alter_table_add_column_statement(connection, table_name, schema_hash)
137
+ "ALTER TABLE #{quote_name(table_name)} #{add_column_statement} #{property_schema_statement(connection, schema_hash)}"
138
+ end
139
+
140
+ # @api private
141
+ def create_table_statement(connection, model, properties)
142
+ statement = DataMapper::Ext::String.compress_lines(<<-SQL)
143
+ CREATE TABLE #{quote_name(model.storage_name(name))}
144
+ (#{properties.map { |property| property_schema_statement(connection, property_schema_hash(property)) }.join(', ')},
145
+ PRIMARY KEY(#{ properties.key.map { |property| quote_name(property.field) }.join(', ')}))
146
+ SQL
147
+
148
+ statement
149
+ end
150
+
151
+ # @api private
152
+ def drop_table_statement(model)
153
+ table_name = quote_name(model.storage_name(name))
154
+ if supports_drop_table_if_exists?
155
+ "DROP TABLE IF EXISTS #{table_name}"
156
+ else
157
+ "DROP TABLE #{table_name}"
158
+ end
159
+ end
160
+
161
+ # @api private
162
+ def create_index_statements(model)
163
+ name = self.name
164
+ table_name = model.storage_name(name)
165
+
166
+ indexes(model).map do |index_name, fields|
167
+ create_index_statement(model, index_name, fields)
168
+ end
169
+ end
170
+
171
+ # @api private
172
+ def create_index_statement(model, index_name, fields)
173
+ table_name = model.storage_name(name)
174
+
175
+ DataMapper::Ext::String.compress_lines(<<-SQL)
176
+ CREATE INDEX #{quote_name("index_#{table_name}_#{index_name}")} ON
177
+ #{quote_name(table_name)} (#{fields.map { |field| quote_name(field) }.join(', ')})
178
+ SQL
179
+ end
180
+
181
+ # @api private
182
+ def create_unique_index_statements(model)
183
+ name = self.name
184
+ table_name = model.storage_name(name)
185
+ key = model.key(name).map { |property| property.field }
186
+ unique_indexes = unique_indexes(model).reject { |index_name, fields| fields == key }
187
+
188
+ unique_indexes.map do |index_name, fields|
189
+ DataMapper::Ext::String.compress_lines(<<-SQL)
190
+ CREATE UNIQUE INDEX #{quote_name("unique_#{table_name}_#{index_name}")} ON
191
+ #{quote_name(table_name)} (#{fields.map { |field| quote_name(field) }.join(', ')})
192
+ SQL
193
+ end
194
+ end
195
+
196
+ # @api private
197
+ def property_schema_hash(property)
198
+ primitive = property.primitive
199
+ type_map = self.class.type_map
200
+
201
+ schema = (type_map[property.class] || type_map[property.class.superclass] || type_map[primitive]).merge(:name => property.field)
202
+
203
+ schema_primitive = schema[:primitive]
204
+
205
+ if primitive == String && schema_primitive != 'TEXT' && schema_primitive != 'CLOB' && schema_primitive != 'NVARCHAR'
206
+ schema[:length] = property.length
207
+ elsif primitive == BigDecimal || primitive == Float
208
+ schema[:precision] = property.precision
209
+ schema[:scale] = property.scale
210
+ end
211
+
212
+ schema[:allow_nil] = property.allow_nil?
213
+ schema[:serial] = property.serial?
214
+
215
+ default = property.default
216
+
217
+ if default.nil? || default.respond_to?(:call)
218
+ # remove the default if the property does not allow nil
219
+ schema.delete(:default) unless schema[:allow_nil]
220
+ else
221
+ schema[:default] = property.dump(default)
222
+ end
223
+
224
+ schema
225
+ end
226
+
227
+ # @api private
228
+ def property_schema_statement(connection, schema)
229
+ statement = quote_name(schema[:name])
230
+ statement << " #{schema[:primitive]}"
231
+
232
+ length = schema[:length]
233
+
234
+ if schema[:precision] && schema[:scale]
235
+ statement << "(#{[ :precision, :scale ].map { |key| connection.quote_value(schema[key]) }.join(', ')})"
236
+ elsif length == 'max'
237
+ statement << '(max)'
238
+ elsif length
239
+ statement << "(#{connection.quote_value(length)})"
240
+ end
241
+
242
+ statement << " DEFAULT #{connection.quote_value(schema[:default])}" if schema.key?(:default)
243
+ statement << ' NOT NULL' unless schema[:allow_nil]
244
+ statement
245
+ end
246
+
247
+ # @api private
248
+ def indexes(model)
249
+ model.properties(name).indexes
250
+ end
251
+
252
+ # @api private
253
+ def unique_indexes(model)
254
+ model.properties(name).unique_indexes
255
+ end
256
+
257
+ # @api private
258
+ def add_column_statement
259
+ 'ADD COLUMN'
260
+ end
261
+ end # module SQL
262
+
263
+ include SQL
264
+
265
+ module ClassMethods
266
+ # Default types for all data object based adapters.
267
+ #
268
+ # @return [Hash] default types for data objects adapters.
269
+ #
270
+ # @api private
271
+ def type_map
272
+ length = Property::String.length
273
+ precision = Property::Numeric.precision
274
+ scale = Property::Decimal.scale
275
+
276
+ {
277
+ Property::Binary => { :primitive => 'BLOB' },
278
+ Object => { :primitive => 'TEXT' },
279
+ Integer => { :primitive => 'INTEGER' },
280
+ String => { :primitive => 'VARCHAR', :length => length },
281
+ Class => { :primitive => 'VARCHAR', :length => length },
282
+ BigDecimal => { :primitive => 'DECIMAL', :precision => precision, :scale => scale },
283
+ Float => { :primitive => 'FLOAT', :precision => precision },
284
+ DateTime => { :primitive => 'TIMESTAMP' },
285
+ Date => { :primitive => 'DATE' },
286
+ Time => { :primitive => 'TIMESTAMP' },
287
+ TrueClass => { :primitive => 'BOOLEAN' },
288
+ Property::Text => { :primitive => 'TEXT' },
289
+ }.freeze
290
+ end
291
+ end
292
+ end
293
+
294
+ end
295
+ end
@@ -0,0 +1,299 @@
1
+ require 'dm-migrations/auto_migration'
2
+ require 'dm-migrations/adapters/dm-do-adapter'
3
+
4
+ module DataMapper
5
+ module Migrations
6
+ module MysqlAdapter
7
+
8
+ DEFAULT_ENGINE = 'InnoDB'.freeze
9
+ DEFAULT_CHARACTER_SET = 'utf8'.freeze
10
+ DEFAULT_COLLATION = 'utf8_unicode_ci'.freeze
11
+
12
+ include DataObjectsAdapter
13
+
14
+ # @api private
15
+ def self.included(base)
16
+ base.extend DataObjectsAdapter::ClassMethods
17
+ base.extend ClassMethods
18
+ end
19
+
20
+ # @api semipublic
21
+ def storage_exists?(storage_name)
22
+ select('SHOW TABLES LIKE ?', storage_name).first == storage_name
23
+ end
24
+
25
+ # @api semipublic
26
+ def field_exists?(storage_name, field)
27
+ result = select("SHOW COLUMNS FROM #{quote_name(storage_name)} LIKE ?", field).first
28
+ result ? result.field == field : false
29
+ end
30
+
31
+ module SQL #:nodoc:
32
+ # private ## This cannot be private for current migrations
33
+
34
+ # Allows for specification of the default storage engine to use when creating tables via
35
+ # migrations. Defaults to DEFAULT_ENGINE.
36
+ #
37
+ # adapter = DataMapper.setup(:default, 'mysql://localhost/foo')
38
+ # adapter.storage_engine = 'MyISAM'
39
+ #
40
+ # @api public
41
+ attr_accessor :storage_engine
42
+
43
+ # @api private
44
+ def supports_serial?
45
+ true
46
+ end
47
+
48
+ # @api private
49
+ def supports_drop_table_if_exists?
50
+ true
51
+ end
52
+
53
+ # @api private
54
+ def schema_name
55
+ # TODO: is there a cleaner way to find out the current DB we are connected to?
56
+ normalized_uri.path.split('/').last
57
+ end
58
+
59
+ # @api private
60
+ def create_table_statement(connection, model, properties)
61
+ "#{super} ENGINE = #{storage_engine} CHARACTER SET #{character_set} COLLATE #{collation}"
62
+ end
63
+
64
+ # @api private
65
+ def property_schema_hash(property)
66
+ schema = super
67
+
68
+ if property.kind_of?(Property::Text)
69
+ schema[:primitive] = text_column_statement(property.length)
70
+ schema.delete(:default)
71
+ end
72
+
73
+ if property.kind_of?(Property::Integer)
74
+ min = property.min
75
+ max = property.max
76
+
77
+ schema[:primitive] = integer_column_statement(min..max) if min && max
78
+ end
79
+
80
+ schema
81
+ end
82
+
83
+ # @api private
84
+ def property_schema_statement(connection, schema)
85
+ statement = super
86
+
87
+ if supports_serial? && schema[:serial]
88
+ statement << ' AUTO_INCREMENT'
89
+ end
90
+
91
+ statement
92
+ end
93
+
94
+ # @api private
95
+ def storage_engine
96
+ # Don't pull the default engine via show_variable for backwards compat where it was hard
97
+ # coded to InnoDB
98
+ @storage_engine ||= DEFAULT_ENGINE
99
+ end
100
+
101
+ # @api private
102
+ def character_set
103
+ @character_set ||= show_variable('character_set_connection') || DEFAULT_CHARACTER_SET
104
+ end
105
+
106
+ # @api private
107
+ def collation
108
+ @collation ||= show_variable('collation_connection') || DEFAULT_COLLATION
109
+ end
110
+
111
+ # @api private
112
+ def show_variable(name)
113
+ result = select('SHOW VARIABLES LIKE ?', name).first
114
+ result ? result.value.freeze : nil
115
+ end
116
+
117
+ private
118
+
119
+ # Return SQL statement for the text column
120
+ #
121
+ # @param [Integer] length
122
+ # the max allowed length
123
+ #
124
+ # @return [String]
125
+ # the statement to create the text column
126
+ #
127
+ # @api private
128
+ def text_column_statement(length)
129
+ if length < 2**8 then 'TINYTEXT'
130
+ elsif length < 2**16 then 'TEXT'
131
+ elsif length < 2**24 then 'MEDIUMTEXT'
132
+ elsif length < 2**32 then 'LONGTEXT'
133
+
134
+ # http://www.postgresql.org/files/documentation/books/aw_pgsql/node90.html
135
+ # Implies that PostgreSQL doesn't have a size limit on text
136
+ # fields, so this param validation happens here instead of
137
+ # DM::Property#initialize.
138
+ else
139
+ raise ArgumentError, "length of #{length} exceeds maximum size supported"
140
+ end
141
+ end
142
+
143
+ # Return SQL statement for the integer column
144
+ #
145
+ # @param [Range] range
146
+ # the min/max allowed integers
147
+ #
148
+ # @return [String]
149
+ # the statement to create the integer column
150
+ #
151
+ # @api private
152
+ def integer_column_statement(range)
153
+ '%s(%d)%s' % [
154
+ integer_column_type(range),
155
+ integer_display_size(range),
156
+ integer_statement_sign(range),
157
+ ]
158
+ end
159
+
160
+ # Return the integer column type
161
+ #
162
+ # Use the smallest available column type that will satisfy the
163
+ # allowable range of numbers
164
+ #
165
+ # @param [Range] range
166
+ # the min/max allowed integers
167
+ #
168
+ # @return [String]
169
+ # the column type
170
+ #
171
+ # @api private
172
+ def integer_column_type(range)
173
+ if range.first < 0
174
+ signed_integer_column_type(range)
175
+ else
176
+ unsigned_integer_column_type(range)
177
+ end
178
+ end
179
+
180
+ # Return the signed integer column type
181
+ #
182
+ # @param [Range] range
183
+ # the min/max allowed integers
184
+ #
185
+ # @return [String]
186
+ #
187
+ # @api private
188
+ def signed_integer_column_type(range)
189
+ min = range.first
190
+ max = range.last
191
+
192
+ tinyint = 2**7
193
+ smallint = 2**15
194
+ integer = 2**31
195
+ mediumint = 2**23
196
+ bigint = 2**63
197
+
198
+ if min >= -tinyint && max < tinyint then 'TINYINT'
199
+ elsif min >= -smallint && max < smallint then 'SMALLINT'
200
+ elsif min >= -mediumint && max < mediumint then 'MEDIUMINT'
201
+ elsif min >= -integer && max < integer then 'INT'
202
+ elsif min >= -bigint && max < bigint then 'BIGINT'
203
+ else
204
+ raise ArgumentError, "min #{min} and max #{max} exceeds supported range"
205
+ end
206
+ end
207
+
208
+ # Return the unsigned integer column type
209
+ #
210
+ # @param [Range] range
211
+ # the min/max allowed integers
212
+ #
213
+ # @return [String]
214
+ #
215
+ # @api private
216
+ def unsigned_integer_column_type(range)
217
+ max = range.last
218
+
219
+ if max < 2**8 then 'TINYINT'
220
+ elsif max < 2**16 then 'SMALLINT'
221
+ elsif max < 2**24 then 'MEDIUMINT'
222
+ elsif max < 2**32 then 'INT'
223
+ elsif max < 2**64 then 'BIGINT'
224
+ else
225
+ raise ArgumentError, "min #{range.first} and max #{max} exceeds supported range"
226
+ end
227
+ end
228
+
229
+ # Return the integer column display size
230
+ #
231
+ # Adjust the display size to match the maximum number of
232
+ # expected digits. This is more for documentation purposes
233
+ # and does not affect what can actually be stored in a
234
+ # specific column
235
+ #
236
+ # @param [Range] range
237
+ # the min/max allowed integers
238
+ #
239
+ # @return [Integer]
240
+ # the display size for the integer
241
+ #
242
+ # @api private
243
+ def integer_display_size(range)
244
+ [ range.first.to_s.length, range.last.to_s.length ].max
245
+ end
246
+
247
+ # Return the integer sign statement
248
+ #
249
+ # @param [Range] range
250
+ # the min/max allowed integers
251
+ #
252
+ # @return [String, nil]
253
+ # statement if unsigned, nil if signed
254
+ #
255
+ # @api private
256
+ def integer_statement_sign(range)
257
+ ' UNSIGNED' unless range.first < 0
258
+ end
259
+
260
+ # @api private
261
+ def indexes(model)
262
+ filter_indexes(model, super)
263
+ end
264
+
265
+ # @api private
266
+ def unique_indexes(model)
267
+ filter_indexes(model, super)
268
+ end
269
+
270
+ # Filter out any indexes with an unindexable column in MySQL
271
+ #
272
+ # @api private
273
+ def filter_indexes(model, indexes)
274
+ field_map = model.properties(name).field_map
275
+ indexes.select do |index_name, fields|
276
+ fields.all? { |field| !field_map[field].kind_of?(Property::Text) }
277
+ end
278
+ end
279
+ end # module SQL
280
+
281
+ include SQL
282
+
283
+ module ClassMethods
284
+ # Types for MySQL databases.
285
+ #
286
+ # @return [Hash] types for MySQL databases.
287
+ #
288
+ # @api private
289
+ def type_map
290
+ super.merge(
291
+ DateTime => { :primitive => 'DATETIME' },
292
+ Time => { :primitive => 'DATETIME' }
293
+ ).freeze
294
+ end
295
+ end
296
+
297
+ end
298
+ end
299
+ end