brick 1.0.156 → 1.0.158

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,341 @@
1
+ module Brick
2
+ module MigrationBuilder
3
+ # Many SQL types are the same as their migration data type name:
4
+ # text, integer, bigint, date, boolean, decimal, float
5
+ # These however are not:
6
+ SQL_TYPES = { 'character varying' => 'string',
7
+ 'character' => 'string', # %%% Need to put in "limit: 1"
8
+ 'xml' => 'text',
9
+ 'bytea' => 'binary',
10
+ 'timestamp without time zone' => 'timestamp',
11
+ 'timestamp with time zone' => 'timestamp',
12
+ 'time without time zone' => 'time',
13
+ 'time with time zone' => 'time',
14
+ 'double precision' => 'float',
15
+ 'smallint' => 'integer', # %%% Need to put in "limit: 2"
16
+ 'ARRAY' => 'string', # Note that we'll also add ", array: true"
17
+ # Oracle data types
18
+ 'VARCHAR2' => 'string',
19
+ 'CHAR' => 'string',
20
+ ['NUMBER', 22] => 'integer',
21
+ /^INTERVAL / => 'string', # Time interval stuff like INTERVAL YEAR(2) TO MONTH, INTERVAL '999' DAY(3), etc
22
+ 'XMLTYPE' => 'xml',
23
+ 'RAW' => 'binary',
24
+ 'SDO_GEOMETRY' => 'geometry',
25
+ # MSSQL data types
26
+ 'int' => 'integer',
27
+ 'nvarchar' => 'string',
28
+ 'nchar' => 'string',
29
+ 'datetime2' => 'timestamp',
30
+ 'bit' => 'boolean',
31
+ 'varbinary' => 'binary',
32
+ # Sqlite data types
33
+ 'TEXT' => 'text',
34
+ '' => 'string',
35
+ 'INTEGER' => 'integer',
36
+ 'REAL' => 'float',
37
+ 'BLOB' => 'binary',
38
+ 'TIMESTAMP' => 'timestamp',
39
+ 'DATETIME' => 'timestamp'
40
+ }
41
+ # (Still need to find what "inet" and "json" data types map to.)
42
+
43
+ class << self
44
+ def check_folder(is_insert_versions = true, is_delete_versions = false)
45
+ versions_to_delete_or_append = nil
46
+ if Dir.exist?(mig_path = ActiveRecord::Migrator.migrations_paths.first || "#{::Rails.root}/db/migrate")
47
+ if Dir["#{mig_path}/**/*.rb"].present?
48
+ puts "WARNING: migrations folder #{mig_path} appears to already have ruby files present."
49
+ mig_path2 = "#{::Rails.root}/tmp/brick_migrations"
50
+ is_insert_versions = false unless mig_path == mig_path2
51
+ if Dir.exist?(mig_path2)
52
+ if Dir["#{mig_path2}/**/*.rb"].present?
53
+ puts "As well, temporary folder #{mig_path2} also has ruby files present."
54
+ puts "Choose a destination -- all existing .rb files will be removed:"
55
+ mig_path2 = gets_list(list: ['Cancel operation!', "Append migration files into #{mig_path} anyway", mig_path, mig_path2])
56
+ return if mig_path2.start_with?('Cancel')
57
+
58
+ existing_mig_files = Dir["#{mig_path2}/**/*.rb"]
59
+ if (is_insert_versions = mig_path == mig_path2)
60
+ versions_to_delete_or_append = existing_mig_files.map { |ver| ver.split('/').last.split('_').first }
61
+ end
62
+ if mig_path2.start_with?('Append migration files into ')
63
+ mig_path2 = mig_path
64
+ else
65
+ is_delete_versions = true
66
+ existing_mig_files.each { |rb| File.delete(rb) }
67
+ end
68
+ else
69
+ puts "Using temporary folder #{mig_path2} for created migration files.\n\n"
70
+ end
71
+ else
72
+ puts "Creating the temporary folder #{mig_path2} for created migration files.\n\n"
73
+ Dir.mkdir(mig_path2)
74
+ end
75
+ mig_path = mig_path2
76
+ else
77
+ puts "Using standard migration folder #{mig_path} for created migration files.\n\n"
78
+ end
79
+ else
80
+ puts "Creating standard ActiveRecord migration folder #{mig_path} to hold new migration files.\n\n"
81
+ Dir.mkdir(mig_path)
82
+ end
83
+ [mig_path, is_insert_versions, is_delete_versions]
84
+ end
85
+
86
+ def generate_migrations(chosen, mig_path, is_insert_versions, is_delete_versions, relations = ::Brick.relations)
87
+ is_sqlite = ActiveRecord::Base.connection.adapter_name == 'SQLite'
88
+ key_type = ((is_sqlite || ActiveRecord.version < ::Gem::Version.new('5.1')) ? 'integer' : 'bigint')
89
+ is_4x_rails = ActiveRecord.version < ::Gem::Version.new('5.0')
90
+ ar_version = "[#{ActiveRecord.version.segments[0..1].join('.')}]" unless is_4x_rails
91
+
92
+ schemas = chosen.each_with_object({}) do |v, s|
93
+ if (v_parts = v.split('.')).length > 1
94
+ s[v_parts.first] = nil unless [::Brick.default_schema, 'public'].include?(v_parts.first)
95
+ end
96
+ end
97
+ # Start the timestamps back the same number of minutes from now as expected number of migrations to create
98
+ current_mig_time = Time.now - (schemas.length + chosen.length).minutes
99
+ done = []
100
+ fks = {}
101
+ stuck = {}
102
+ indexes = {} # Track index names to make sure things are unique
103
+ built_schemas = {} # Track all built schemas so we can place an appropriate drop_schema command only in the first
104
+ # migration in which that schema is referenced, thereby allowing rollbacks to function properly.
105
+ versions_to_create = [] # Resulting versions to be used when updating the schema_migrations table
106
+ ar_base = Object.const_defined?(:ApplicationRecord) ? ApplicationRecord : Class.new(ActiveRecord::Base)
107
+ # Start by making migrations for fringe tables (those with no foreign keys).
108
+ # Continue layer by layer, creating migrations for tables that reference ones already done, until
109
+ # no more migrations can be created. (At that point hopefully all tables are accounted for.)
110
+ while (fringe = chosen.reject do |tbl|
111
+ snag_fks = []
112
+ snags = relations.fetch(tbl, nil)&.fetch(:fks, nil)&.select do |_k, v|
113
+ v[:is_bt] && !v[:polymorphic] &&
114
+ tbl != v[:inverse_table] && # Ignore self-referencing associations (stuff like "parent_id")
115
+ !done.include?(v[:inverse_table]) &&
116
+ ::Brick.config.ignore_migration_fks.exclude?(snag_fk = "#{tbl}.#{v[:fk]}") &&
117
+ snag_fks << snag_fk
118
+ end
119
+ if snags&.present?
120
+ # puts snag_fks.inspect
121
+ stuck[tbl] = snags
122
+ end
123
+ end).present?
124
+ fringe.each do |tbl|
125
+ next unless (relation = relations.fetch(tbl, nil))&.fetch(:cols, nil)&.present?
126
+
127
+ pkey_cols = (rpk = relation[:pkey].values.flatten) & (arpk = [ar_base.primary_key].flatten.sort)
128
+ # In case things aren't as standard
129
+ if pkey_cols.empty?
130
+ pkey_cols = if rpk.empty? && relation[:cols][arpk.first]&.first == key_type
131
+ arpk
132
+ elsif rpk.first
133
+ rpk
134
+ end
135
+ end
136
+ schema = if (tbl_parts = tbl.split('.')).length > 1
137
+ if tbl_parts.first == (::Brick.default_schema || 'public')
138
+ tbl_parts.shift
139
+ nil
140
+ else
141
+ tbl_parts.first
142
+ end
143
+ end
144
+ unless schema.blank? || built_schemas.key?(schema)
145
+ mig = +" def change\n create_schema(:#{schema}) unless schema_exists?(:#{schema})\n end\n"
146
+ migration_file_write(mig_path, "create_db_schema_#{schema.underscore}", current_mig_time += 1.minute, ar_version, mig)
147
+ built_schemas[schema] = nil
148
+ end
149
+
150
+ # %%% For the moment we're skipping polymorphics
151
+ fkey_cols = relation[:fks].values.select { |assoc| assoc[:is_bt] && !assoc[:polymorphic] }
152
+ # If the primary key is also used as a foreign key, will need to do id: false and then build out
153
+ # a column definition which includes :primary_key -- %%% also using a data type of bigserial or serial
154
+ # if this one has come in as bigint or integer.
155
+ pk_is_also_fk = fkey_cols.any? { |assoc| pkey_cols&.first == assoc[:fk] } ? pkey_cols&.first : nil
156
+ # Support missing primary key (by adding: , id: false)
157
+ id_option = if pk_is_also_fk || !pkey_cols&.present?
158
+ needs_serial_col = true
159
+ +', id: false'
160
+ elsif ((pkey_col_first = (col_def = relation[:cols][pkey_cols&.first])&.first) &&
161
+ (pkey_col_first = SQL_TYPES[pkey_col_first] || SQL_TYPES[col_def&.[](0..1)] ||
162
+ SQL_TYPES.find { |r| r.first.is_a?(Regexp) && pkey_col_first =~ r.first }&.last ||
163
+ pkey_col_first
164
+ ) != key_type
165
+ )
166
+ case pkey_col_first
167
+ when 'integer'
168
+ +', id: :serial'
169
+ when 'bigint'
170
+ +', id: :bigserial'
171
+ else
172
+ +", id: :#{pkey_col_first}" # Something like: id: :integer, primary_key: :businessentityid
173
+ end +
174
+ (pkey_cols.first ? ", primary_key: :#{pkey_cols.first}" : '')
175
+ end
176
+ if !id_option && pkey_cols.sort != arpk
177
+ id_option = +", primary_key: :#{pkey_cols.first}"
178
+ end
179
+ if !is_4x_rails && (comment = relation&.fetch(:description, nil))&.present?
180
+ (id_option ||= +'') << ", comment: #{comment.inspect}"
181
+ end
182
+ # Find the ActiveRecord class in order to see if the columns have comments
183
+ unless is_4x_rails
184
+ klass = begin
185
+ tbl.tr('.', '/').singularize.camelize.constantize
186
+ rescue StandardError
187
+ end
188
+ if klass
189
+ unless ActiveRecord::Migration.table_exists?(klass.table_name)
190
+ puts "WARNING: Unable to locate table #{klass.table_name} (for #{klass.name})."
191
+ klass = nil
192
+ end
193
+ end
194
+ end
195
+ # Refer to this table name as a symbol or dotted string as appropriate
196
+ tbl_code = tbl_parts.length == 1 ? ":#{tbl_parts.first}" : "'#{tbl}'"
197
+ mig = +" def change\n return unless reverting? || !table_exists?(#{tbl_code})\n\n"
198
+ mig << " create_table #{tbl_code}#{id_option} do |t|\n"
199
+ possible_ts = [] # Track possible generic timestamps
200
+ add_fks = [] # Track foreign keys to add after table creation
201
+ relation[:cols].each do |col, col_type|
202
+ sql_type = SQL_TYPES[col_type.first] || SQL_TYPES[col_type[0..1]] ||
203
+ SQL_TYPES.find { |r| r.first.is_a?(Regexp) && col_type.first =~ r.first }&.last ||
204
+ col_type.first
205
+ suffix = col_type[3] || pkey_cols&.include?(col) ? +', null: false' : +''
206
+ suffix << ', array: true' if (col_type.first == 'ARRAY')
207
+ if !is_4x_rails && klass && (comment = klass.columns_hash.fetch(col, nil)&.comment)&.present?
208
+ suffix << ", comment: #{comment.inspect}"
209
+ end
210
+ # Determine if this column is used as part of a foreign key
211
+ if (fk = fkey_cols.find { |assoc| col == assoc[:fk] })
212
+ to_table = fk[:inverse_table].split('.')
213
+ to_table = to_table.length == 1 ? ":#{to_table.first}" : "'#{fk[:inverse_table]}'"
214
+ if needs_serial_col && pkey_cols&.include?(col) && (new_serial_type = {'integer' => 'serial', 'bigint' => 'bigserial'}[sql_type])
215
+ sql_type = new_serial_type
216
+ needs_serial_col = false
217
+ end
218
+ if fk[:fk] != "#{fk[:assoc_name].singularize}_id" # Need to do our own foreign_key tricks, not use references?
219
+ column = fk[:fk]
220
+ mig << emit_column(sql_type, column, suffix)
221
+ add_fks << [to_table, column, relations[fk[:inverse_table]]]
222
+ else
223
+ suffix << ", type: :#{sql_type}" unless sql_type == key_type
224
+ # Will the resulting default index name be longer than what Postgres allows? (63 characters)
225
+ if (idx_name = ActiveRecord::Base.connection.index_name(tbl, {column: col})).length > 63
226
+ # Try to find a shorter name that hasn't been used yet
227
+ unless indexes.key?(shorter = idx_name[0..62]) ||
228
+ indexes.key?(shorter = idx_name.tr('_', '')[0..62]) ||
229
+ indexes.key?(shorter = idx_name.tr('aeio', '')[0..62])
230
+ puts "Unable to easily find unique name for index #{idx_name} that is shorter than 64 characters,"
231
+ puts "so have resorted to this GUID-based identifier: #{shorter = "#{tbl[0..25]}_#{::SecureRandom.uuid}"}."
232
+ end
233
+ suffix << ", index: { name: '#{shorter || idx_name}' }"
234
+ indexes[shorter || idx_name] = nil
235
+ end
236
+ primary_key = nil
237
+ begin
238
+ primary_key = relations[fk[:inverse_table]][:class_name]&.constantize&.primary_key
239
+ rescue NameError => e
240
+ primary_key = ar_base.primary_key
241
+ end
242
+ mig << " t.references :#{fk[:assoc_name]}#{suffix}, foreign_key: { to_table: #{to_table}#{", primary_key: :#{primary_key}" if primary_key != ar_base.primary_key} }\n"
243
+ end
244
+ else
245
+ next if !id_option&.end_with?('id: false') && pkey_cols&.include?(col)
246
+
247
+ # See if there are generic timestamps
248
+ if sql_type == 'timestamp' && ['created_at','updated_at'].include?(col)
249
+ possible_ts << [col, !col_type[3]]
250
+ else
251
+ mig << emit_column(sql_type, col, suffix)
252
+ end
253
+ end
254
+ end
255
+ if possible_ts.length == 2 && # Both created_at and updated_at
256
+ # Rails 5 and later timestamps default to NOT NULL
257
+ (possible_ts.first.last == is_4x_rails && possible_ts.last.last == is_4x_rails)
258
+ mig << "\n t.timestamps\n"
259
+ else # Just one or the other, or a nullability mismatch
260
+ possible_ts.each { |ts| emit_column('timestamp', ts.first, nil) }
261
+ end
262
+ mig << " end\n"
263
+ if pk_is_also_fk
264
+ mig << " reversible do |dir|\n"
265
+ mig << " dir.up { execute('ALTER TABLE #{tbl} ADD PRIMARY KEY (#{pk_is_also_fk})') }\n"
266
+ mig << " end\n"
267
+ end
268
+ add_fks.each do |add_fk|
269
+ is_commented = false
270
+ # add_fk[2] holds the inverse relation
271
+ unless (pk = add_fk[2][:pkey].values.flatten&.first)
272
+ is_commented = true
273
+ mig << " # (Unable to create relationship because primary key is missing on table #{add_fk[0]})\n"
274
+ # No official PK, but if coincidentally there's a column of the same name, take a chance on it
275
+ pk = (add_fk[2][:cols].key?(add_fk[1]) && add_fk[1]) || '???'
276
+ end
277
+ # to_table column
278
+ mig << " #{'# ' if is_commented}add_foreign_key #{tbl_code}, #{add_fk[0]}, column: :#{add_fk[1]}, primary_key: :#{pk}\n"
279
+ end
280
+ mig << " end\n"
281
+ versions_to_create << migration_file_write(mig_path, "create_#{tbl_parts.map(&:underscore).join('_')}", current_mig_time += 1.minute, ar_version, mig)
282
+ end
283
+ done.concat(fringe)
284
+ chosen -= done
285
+ end
286
+
287
+ stuck_counts = Hash.new { |h, k| h[k] = 0 }
288
+ chosen.each do |leftover|
289
+ puts "Can't do #{leftover} because:\n #{stuck[leftover].map do |snag|
290
+ stuck_counts[snag.last[:inverse_table]] += 1
291
+ snag.last[:assoc_name]
292
+ end.join(', ')}"
293
+ end
294
+ if mig_path.start_with?(cur_path = ::Rails.root.to_s)
295
+ pretty_mig_path = mig_path[cur_path.length..-1]
296
+ end
297
+ puts "\n*** Created #{done.length} migration files under #{pretty_mig_path || mig_path} ***"
298
+ if (stuck_sorted = stuck_counts.to_a.sort { |a, b| b.last <=> a.last }).length.positive?
299
+ puts "-----------------------------------------"
300
+ puts "Unable to create migrations for #{stuck_sorted.length} tables#{
301
+ ". Here's the top 5 blockers" if stuck_sorted.length > 5
302
+ }:"
303
+ pp stuck_sorted[0..4]
304
+ else # Successful, and now we can update the schema_migrations table accordingly
305
+ unless ActiveRecord::Migration.table_exists?(ActiveRecord::Base.schema_migrations_table_name)
306
+ ActiveRecord::SchemaMigration.create_table
307
+ end
308
+ # Remove to_delete - to_create
309
+ if ((versions_to_delete_or_append ||= []) - versions_to_create).present? && is_delete_versions
310
+ ActiveRecord::Base.execute_sql("DELETE FROM #{
311
+ ActiveRecord::Base.schema_migrations_table_name} WHERE version IN (#{
312
+ (versions_to_delete_or_append - versions_to_create).map { |vtd| "'#{vtd}'" }.join(', ')}
313
+ )")
314
+ end
315
+ # Add to_create - to_delete
316
+ if is_insert_versions && ((versions_to_create ||= []) - versions_to_delete_or_append).present?
317
+ ActiveRecord::Base.execute_sql("INSERT INTO #{
318
+ ActiveRecord::Base.schema_migrations_table_name} (version) VALUES #{
319
+ (versions_to_create - versions_to_delete_or_append).map { |vtc| "('#{vtc}')" }.join(', ')
320
+ }")
321
+ end
322
+ end
323
+ end
324
+
325
+ private
326
+
327
+ def emit_column(type, name, suffix)
328
+ " t.#{type.start_with?('numeric') ? 'decimal' : type} :#{name}#{suffix}\n"
329
+ end
330
+
331
+ def migration_file_write(mig_path, name, current_mig_time, ar_version, mig)
332
+ File.open("#{mig_path}/#{version = current_mig_time.strftime('%Y%m%d%H%M00')}_#{name}.rb", "w") do |f|
333
+ f.write "class #{name.camelize} < ActiveRecord::Migration#{ar_version}\n"
334
+ f.write mig
335
+ f.write "end\n"
336
+ end
337
+ version
338
+ end
339
+ end
340
+ end
341
+ end