brick 1.0.190 → 1.0.192

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -32,7 +32,7 @@ module Brick
32
32
  'bit' => 'boolean',
33
33
  'varbinary' => 'binary',
34
34
  'tinyint' => 'integer', # %%% Need to put in "limit: 2"
35
- 'year' => 'date',
35
+ 'year' => 'integer',
36
36
  'set' => 'string',
37
37
  # Sqlite data types
38
38
  'TEXT' => 'text',
@@ -89,7 +89,13 @@ module Brick
89
89
  [mig_path, is_insert_versions, is_delete_versions]
90
90
  end
91
91
 
92
- def generate_migrations(chosen, mig_path, is_insert_versions, is_delete_versions, relations = ::Brick.relations)
92
+ def generate_migrations(chosen, mig_path, is_insert_versions, is_delete_versions,
93
+ relations = ::Brick.relations, do_fks_last: nil, do_schema_migrations: true)
94
+ if do_fks_last.nil?
95
+ puts 'Would you like for the foreign keys to be built inline inside of each migration file, or as a final migration?'
96
+ do_fks_last = (gets_list(list: ['Inline', 'Separate final migration for all FKs']).start_with?('Separate'))
97
+ end
98
+
93
99
  is_sqlite = ActiveRecord::Base.connection.adapter_name == 'SQLite'
94
100
  key_type = ((is_sqlite || ActiveRecord.version < ::Gem::Version.new('5.1')) ? 'integer' : 'bigint')
95
101
  is_4x_rails = ActiveRecord.version < ::Gem::Version.new('5.0')
@@ -112,6 +118,7 @@ module Brick
112
118
  # Start by making migrations for fringe tables (those with no foreign keys).
113
119
  # Continue layer by layer, creating migrations for tables that reference ones already done, until
114
120
  # no more migrations can be created. (At that point hopefully all tables are accounted for.)
121
+ after_fks = [] # Track foreign keys to add after table creation
115
122
  while (fringe = chosen.reject do |tbl|
116
123
  snag_fks = []
117
124
  snags = relations.fetch(tbl, nil)&.fetch(:fks, nil)&.select do |_k, v|
@@ -131,166 +138,58 @@ module Brick
131
138
  end
132
139
  end).present?
133
140
  fringe.each do |tbl|
134
- next unless (relation = relations.fetch(tbl, nil))&.fetch(:cols, nil)&.present?
141
+ mig = gen_migration_columns(relations, tbl, (tbl_parts = tbl.split('.')), (add_fks = []),
142
+ key_type, is_4x_rails, ar_version, do_fks_last)
143
+ after_fks.concat(add_fks) if do_fks_last
144
+ versions_to_create << migration_file_write(mig_path, ::Brick._brick_index("create_#{tbl}", nil, 'x'), current_mig_time += 1.minute, ar_version, mig)
145
+ end
146
+ done.concat(fringe)
147
+ chosen -= done
148
+ end
135
149
 
136
- pkey_cols = (rpk = relation[:pkey].values.flatten) & (arpk = [::Brick.ar_base.primary_key].flatten.sort)
137
- # In case things aren't as standard
138
- if pkey_cols.empty?
139
- pkey_cols = if rpk.empty? && relation[:cols][arpk.first]&.first == key_type
140
- arpk
141
- elsif rpk.first
142
- rpk
143
- end
144
- end
145
- schema = if (tbl_parts = tbl.split('.')).length > 1
146
- if tbl_parts.first == (::Brick.default_schema || 'public')
147
- tbl_parts.shift
148
- nil
149
- else
150
- tbl_parts.first
151
- end
152
- end
153
- unless schema.blank? || built_schemas.key?(schema)
154
- mig = +" def change\n create_schema(:#{schema}) unless schema_exists?(:#{schema})\n end\n"
155
- migration_file_write(mig_path, "create_db_schema_#{schema.underscore}", current_mig_time += 1.minute, ar_version, mig)
156
- built_schemas[schema] = nil
157
- end
150
+ if do_fks_last
151
+ # Write out any more tables that haven't been done yet
152
+ chosen.each do |tbl|
153
+ mig = gen_migration_columns(relations, tbl, (tbl_parts = tbl.split('.')), (add_fks = []),
154
+ key_type, is_4x_rails, ar_version, do_fks_last)
155
+ after_fks.concat(add_fks)
156
+ migration_file_write(mig_path, ::Brick._brick_index("create_#{tbl}", nil, 'x'), current_mig_time += 1.minute, ar_version, mig)
157
+ end
158
+ done.concat(chosen)
159
+ chosen.clear
158
160
 
159
- # %%% For the moment we're skipping polymorphics
160
- fkey_cols = relation[:fks].values.select { |assoc| assoc[:is_bt] && !assoc[:polymorphic] }
161
- # If the primary key is also used as a foreign key, will need to do id: false and then build out
162
- # a column definition which includes :primary_key -- %%% also using a data type of bigserial or serial
163
- # if this one has come in as bigint or integer.
164
- pk_is_also_fk = fkey_cols.any? { |assoc| pkey_cols&.first == assoc[:fk] } ? pkey_cols&.first : nil
165
- # Support missing primary key (by adding: , id: false)
166
- id_option = if pk_is_also_fk || !pkey_cols&.present?
167
- needs_serial_col = true
168
- +', id: false'
169
- elsif ((pkey_col_first = (col_def = relation[:cols][pkey_cols&.first])&.first) &&
170
- (pkey_col_first = SQL_TYPES[pkey_col_first] || SQL_TYPES[col_def&.[](0..1)] ||
171
- SQL_TYPES.find { |r| r.first.is_a?(Regexp) && pkey_col_first =~ r.first }&.last ||
172
- pkey_col_first
173
- ) != key_type
174
- )
175
- case pkey_col_first
176
- when 'integer'
177
- +', id: :serial'
178
- when 'bigint'
179
- +', id: :bigserial'
180
- else
181
- +", id: :#{pkey_col_first}" # Something like: id: :integer, primary_key: :businessentityid
182
- end +
183
- (pkey_cols.first ? ", primary_key: :#{pkey_cols.first}" : '')
184
- end
185
- if !id_option && pkey_cols.sort != arpk
186
- id_option = +", primary_key: :#{pkey_cols.first}"
187
- end
188
- if !is_4x_rails && (comment = relation&.fetch(:description, nil))&.present?
189
- (id_option ||= +'') << ", comment: #{comment.inspect}"
190
- end
191
- # Find the ActiveRecord class in order to see if the columns have comments
192
- unless is_4x_rails
193
- klass = begin
194
- tbl.tr('.', '/').singularize.camelize.constantize
195
- rescue StandardError
196
- end
197
- if klass
198
- unless ActiveRecord::Migration.table_exists?(klass.table_name)
199
- puts "WARNING: Unable to locate table #{klass.table_name} (for #{klass.name})."
200
- klass = nil
201
- end
202
- end
203
- end
204
- # Refer to this table name as a symbol or dotted string as appropriate
205
- tbl_code = tbl_parts.length == 1 ? ":#{tbl_parts.first}" : "'#{tbl}'"
206
- mig = +" def change\n return unless reverting? || !table_exists?(#{tbl_code})\n\n"
207
- mig << " create_table #{tbl_code}#{id_option} do |t|\n"
208
- possible_ts = [] # Track possible generic timestamps
209
- add_fks = [] # Track foreign keys to add after table creation
210
- relation[:cols].each do |col, col_type|
211
- sql_type = SQL_TYPES[col_type.first] || SQL_TYPES[col_type[0..1]] ||
212
- SQL_TYPES.find { |r| r.first.is_a?(Regexp) && col_type.first =~ r.first }&.last ||
213
- col_type.first
214
- suffix = col_type[3] || pkey_cols&.include?(col) ? +', null: false' : +''
215
- suffix << ', array: true' if (col_type.first == 'ARRAY')
216
- if !is_4x_rails && klass && (comment = klass.columns_hash.fetch(col, nil)&.comment)&.present?
217
- suffix << ", comment: #{comment.inspect}"
218
- end
219
- # Determine if this column is used as part of a foreign key
220
- if (fk = fkey_cols.find { |assoc| col == assoc[:fk] })
221
- to_table = fk[:inverse_table].split('.')
222
- to_table = to_table.length == 1 ? ":#{to_table.first}" : "'#{fk[:inverse_table]}'"
223
- if needs_serial_col && pkey_cols&.include?(col) && (new_serial_type = {'integer' => 'serial', 'bigint' => 'bigserial'}[sql_type])
224
- sql_type = new_serial_type
225
- needs_serial_col = false
226
- end
227
- if fk[:fk] != "#{fk[:assoc_name].singularize}_id" # Need to do our own foreign_key tricks, not use references?
228
- column = fk[:fk]
229
- mig << emit_column(sql_type, column, suffix)
230
- add_fks << [to_table, column, relations[fk[:inverse_table]]]
231
- else
232
- suffix << ", type: :#{sql_type}" unless sql_type == key_type
233
- # Will the resulting default index name be longer than what Postgres allows? (63 characters)
234
- if (idx_name = ActiveRecord::Base.connection.index_name(tbl, {column: col})).length > 63
235
- # Try to find a shorter name that hasn't been used yet
236
- unless indexes.key?(shorter = idx_name[0..62]) ||
237
- indexes.key?(shorter = idx_name.tr('_', '')[0..62]) ||
238
- indexes.key?(shorter = idx_name.tr('aeio', '')[0..62])
239
- puts "Unable to easily find unique name for index #{idx_name} that is shorter than 64 characters,"
240
- puts "so have resorted to this GUID-based identifier: #{shorter = "#{tbl[0..25]}_#{::SecureRandom.uuid}"}."
241
- end
242
- suffix << ", index: { name: '#{shorter || idx_name}' }"
243
- indexes[shorter || idx_name] = nil
244
- end
245
- primary_key = nil
246
- begin
247
- primary_key = relations[fk[:inverse_table]][:class_name]&.constantize&.primary_key
248
- rescue NameError => e
249
- primary_key = ::Brick.ar_base.primary_key
250
- end
251
- mig << " t.references :#{fk[:assoc_name]}#{suffix}, foreign_key: { to_table: #{to_table}#{", primary_key: :#{primary_key}" if primary_key != ::Brick.ar_base.primary_key} }\n"
252
- end
253
- else
254
- next if !id_option&.end_with?('id: false') && pkey_cols&.include?(col)
161
+ # Add a final migration to create all the foreign keys
162
+ mig = +" def change\n"
163
+ after_fks.each do |add_fk|
164
+ next unless add_fk[2] # add_fk[2] holds the inverse relation
255
165
 
256
- # See if there are generic timestamps
257
- if sql_type == 'timestamp' && ['created_at','updated_at'].include?(col)
258
- possible_ts << [col, !col_type[3]]
259
- else
260
- mig << emit_column(sql_type, col, suffix)
261
- end
262
- end
263
- end
264
- if possible_ts.length == 2 && # Both created_at and updated_at
265
- # Rails 5 and later timestamps default to NOT NULL
266
- (possible_ts.first.last == is_4x_rails && possible_ts.last.last == is_4x_rails)
267
- mig << "\n t.timestamps\n"
268
- else # Just one or the other, or a nullability mismatch
269
- possible_ts.each { |ts| emit_column('timestamp', ts.first, nil) }
166
+ unless (pk = add_fk[2][:pkey].values.flatten&.first)
167
+ # No official PK, but if coincidentally there's a column of the same name, take a chance on it
168
+ pk = (add_fk[2][:cols].key?(add_fk[1]) && add_fk[1]) || '???'
270
169
  end
271
- mig << " end\n"
272
- if pk_is_also_fk
273
- mig << " reversible do |dir|\n"
274
- mig << " dir.up { execute('ALTER TABLE #{tbl} ADD PRIMARY KEY (#{pk_is_also_fk})') }\n"
275
- mig << " end\n"
276
- end
277
- add_fks.each do |add_fk|
278
- is_commented = false
279
- # add_fk[2] holds the inverse relation
280
- unless (pk = add_fk[2][:pkey].values.flatten&.first)
281
- is_commented = true
282
- mig << " # (Unable to create relationship because primary key is missing on table #{add_fk[0]})\n"
283
- # No official PK, but if coincidentally there's a column of the same name, take a chance on it
284
- pk = (add_fk[2][:cols].key?(add_fk[1]) && add_fk[1]) || '???'
285
- end
286
- # to_table column
287
- mig << " #{'# ' if is_commented}add_foreign_key #{tbl_code}, #{add_fk[0]}, column: :#{add_fk[1]}, primary_key: :#{pk}\n"
288
- end
289
- mig << " end\n"
290
- versions_to_create << migration_file_write(mig_path, "create_#{tbl_parts.map(&:underscore).join('_')}", current_mig_time += 1.minute, ar_version, mig)
170
+ mig << " add_foreign_key #{add_fk[3]}, " # The tbl_code
171
+ # to_table column
172
+ mig << "#{add_fk[0]}, column: :#{add_fk[1]}, primary_key: :#{pk}\n"
291
173
  end
292
- done.concat(fringe)
293
- chosen -= done
174
+ if after_fks.length > 500
175
+ minutes = (after_fks.length + 1000) / 1500
176
+ mig << " if ActiveRecord::Base.connection.adapter_name == 'PostgreSQL'\n"
177
+ mig << " puts 'NOTE: It could take around #{minutes} #{'minute'.pluralize(minutes)} on a FAST machine for Postgres to do all the final processing for these foreign keys. Please be patient!'\n"
178
+
179
+ mig << " # Vacuum takes only about ten seconds when all the tables are empty,
180
+ # and about 2 minutes when the tables are fairly full.
181
+ execute('COMMIT')
182
+ execute('VACUUM FULL')
183
+ execute('BEGIN TRANSACTION')
184
+ end\n"
185
+ end
186
+
187
+ mig << +" end\n"
188
+ migration_file_write(mig_path, 'create_brick_fks.rbx', current_mig_time += 1.minute, ar_version, mig)
189
+ puts "Have written out a final migration called 'create_brick_fks.rbx' which creates #{after_fks.length} foreign keys.
190
+ This file extension (.rbx) will cause it not to run yet when you do a 'rails db:migrate'.
191
+ The idea here is to do all data loading first, and then rename that migration file back
192
+ into having a .rb extension, and run a final db:migrate to put the foreign keys in place."
294
193
  end
295
194
 
296
195
  stuck_counts = Hash.new { |h, k| h[k] = 0 }
@@ -310,7 +209,7 @@ module Brick
310
209
  ". Here's the top 5 blockers" if stuck_sorted.length > 5
311
210
  }:"
312
211
  pp stuck_sorted[0..4]
313
- else # Successful, and now we can update the schema_migrations table accordingly
212
+ elsif do_schema_migrations # Successful, and now we can update the schema_migrations table accordingly
314
213
  unless ActiveRecord::Migration.table_exists?(ActiveRecord::Base.schema_migrations_table_name)
315
214
  ActiveRecord::SchemaMigration.create_table
316
215
  end
@@ -333,13 +232,178 @@ module Brick
333
232
 
334
233
  private
335
234
 
235
+ def gen_migration_columns(relations, tbl, tbl_parts, add_fks,
236
+ key_type, is_4x_rails, ar_version, do_fks_last)
237
+ return unless (relation = relations.fetch(tbl, nil))&.fetch(:cols, nil)&.present?
238
+
239
+ mig = +''
240
+ pkey_cols = (rpk = relation[:pkey].values.flatten) & (arpk = [::Brick.ar_base.primary_key].flatten.sort)
241
+ # In case things aren't as standard
242
+ if pkey_cols.empty?
243
+ pkey_cols = if rpk.empty? && relation[:cols][arpk.first]&.first == key_type
244
+ arpk
245
+ elsif rpk.first
246
+ rpk
247
+ end
248
+ end
249
+ schema = if tbl_parts.length > 1
250
+ if tbl_parts.first == (::Brick.default_schema || 'public')
251
+ tbl_parts.shift
252
+ nil
253
+ else
254
+ tbl_parts.first
255
+ end
256
+ end
257
+ unless schema.blank? || built_schemas.key?(schema)
258
+ mig = +" def change\n create_schema(:#{schema}) unless schema_exists?(:#{schema})\n end\n"
259
+ migration_file_write(mig_path, "create_db_schema_#{schema.underscore}", current_mig_time += 1.minute, ar_version, mig)
260
+ built_schemas[schema] = nil
261
+ end
262
+
263
+ # %%% For the moment we're skipping polymorphics
264
+ fkey_cols = relation[:fks].values.select { |assoc| assoc[:is_bt] && !assoc[:polymorphic] }
265
+ # If the primary key is also used as a foreign key, will need to do id: false and then build out
266
+ # a column definition which includes :primary_key -- %%% also using a data type of bigserial or serial
267
+ # if this one has come in as bigint or integer.
268
+ pk_is_also_fk = fkey_cols.any? { |assoc| pkey_cols&.first == assoc[:fk] } ? pkey_cols&.first : nil
269
+ id_option = if pk_is_also_fk || !pkey_cols&.present?
270
+ needs_serial_col = true
271
+ +', id: false' # Support missing primary key (by adding: , id: false)
272
+ elsif ((pkey_col_first = (col_def = relation[:cols][pkey_cols&.first])&.first) &&
273
+ (pkey_col_first = SQL_TYPES[pkey_col_first] || SQL_TYPES[col_def&.[](0..1)] ||
274
+ SQL_TYPES.find { |r| r.first.is_a?(Regexp) && pkey_col_first =~ r.first }&.last ||
275
+ pkey_col_first
276
+ ) != key_type
277
+ )
278
+ case pkey_col_first
279
+ when 'integer'
280
+ +', id: :serial'
281
+ when 'bigint'
282
+ +', id: :bigserial'
283
+ else
284
+ +", id: :#{pkey_col_first}" # Something like: id: :integer, primary_key: :businessentityid
285
+ end +
286
+ (pkey_cols.first ? ", primary_key: :#{pkey_cols.first}" : '')
287
+ end
288
+ if !id_option && pkey_cols.sort != arpk
289
+ id_option = +", primary_key: :#{pkey_cols.first}"
290
+ end
291
+ if !is_4x_rails && (comment = relation&.fetch(:description, nil))&.present?
292
+ (id_option ||= +'') << ", comment: #{comment.inspect}"
293
+ end
294
+ # Find the ActiveRecord class in order to see if the columns have comments
295
+ unless is_4x_rails
296
+ klass = begin
297
+ tbl.tr('.', '/').singularize.camelize.constantize
298
+ rescue StandardError
299
+ end
300
+ if klass
301
+ unless ActiveRecord::Migration.table_exists?(klass.table_name)
302
+ puts "WARNING: Unable to locate table #{klass.table_name} (for #{klass.name})."
303
+ klass = nil
304
+ end
305
+ end
306
+ end
307
+ # Refer to this table name as a symbol or dotted string as appropriate
308
+ tbl_code = tbl_parts.length == 1 ? ":#{tbl_parts.first}" : "'#{tbl}'"
309
+ mig = +" def change\n return unless reverting? || !table_exists?(#{tbl_code})\n\n"
310
+ mig << " create_table #{tbl_code}#{id_option} do |t|\n"
311
+ possible_ts = [] # Track possible generic timestamps
312
+ relation[:cols].each do |col, col_type|
313
+ sql_type = SQL_TYPES[col_type.first] || SQL_TYPES[col_type[0..1]] ||
314
+ SQL_TYPES.find { |r| r.first.is_a?(Regexp) && col_type.first =~ r.first }&.last ||
315
+ col_type.first
316
+ suffix = col_type[3] || pkey_cols&.include?(col) ? +', null: false' : +''
317
+ suffix << ', array: true' if (col_type.first == 'ARRAY')
318
+ if !is_4x_rails && klass && (comment = klass.columns_hash.fetch(col, nil)&.comment)&.present?
319
+ suffix << ", comment: #{comment.inspect}"
320
+ end
321
+ # Determine if this column is used as part of a foreign key
322
+ if (fk = fkey_cols.find { |assoc| col == assoc[:fk] })
323
+ to_table = fk[:inverse_table].split('.')
324
+ to_table = to_table.length == 1 ? ":#{to_table.first}" : "'#{fk[:inverse_table]}'"
325
+ if needs_serial_col && pkey_cols&.include?(col) && (new_serial_type = {'integer' => 'serial', 'bigint' => 'bigserial'}[sql_type])
326
+ sql_type = new_serial_type
327
+ needs_serial_col = false
328
+ end
329
+ if do_fks_last || (fk[:fk] != "#{fk[:assoc_name].singularize}_id") # Need to do our own foreign_key tricks, not use references?
330
+ column = fk[:fk]
331
+ mig << emit_column(sql_type, column, suffix)
332
+ add_fks << [to_table, column, relations[fk[:inverse_table]], tbl_code]
333
+ else
334
+ suffix << ", type: :#{sql_type}" unless sql_type == key_type
335
+ # Will the resulting default index name be longer than what Postgres allows? (63 characters)
336
+ if (idx_name = ActiveRecord::Base.connection.index_name(tbl, {column: col})).length > 63
337
+ # Try to find a shorter name that hasn't been used yet
338
+ unless indexes.key?(shorter = idx_name[0..62]) ||
339
+ indexes.key?(shorter = idx_name.tr('_', '')[0..62]) ||
340
+ indexes.key?(shorter = idx_name.tr('aeio', '')[0..62])
341
+ puts "Unable to easily find unique name for index #{idx_name} that is shorter than 64 characters,"
342
+ puts "so have resorted to this GUID-based identifier: #{shorter = "#{tbl[0..25]}_#{::SecureRandom.uuid}"}."
343
+ end
344
+ suffix << ", index: { name: '#{shorter || idx_name}' }"
345
+ indexes[shorter || idx_name] = nil
346
+ end
347
+ next if do_fks_last
348
+
349
+ primary_key = nil
350
+ begin
351
+ primary_key = relations[fk[:inverse_table]][:class_name]&.constantize&.primary_key
352
+ rescue NameError => e
353
+ primary_key = ::Brick.ar_base.primary_key
354
+ end
355
+ fk_stuff = ", foreign_key: { to_table: #{to_table}#{", primary_key: :#{primary_key}" if primary_key != ::Brick.ar_base.primary_key} }"
356
+ mig << " t.references :#{fk[:assoc_name]}#{suffix}#{fk_stuff}\n"
357
+ end
358
+ else
359
+ next if !id_option&.end_with?('id: false') && pkey_cols&.include?(col)
360
+
361
+ # See if there are generic timestamps
362
+ if sql_type == 'timestamp' && ['created_at','updated_at'].include?(col)
363
+ possible_ts << [col, !col_type[3]]
364
+ else
365
+ mig << emit_column(sql_type, col, suffix)
366
+ end
367
+ end
368
+ end
369
+ if possible_ts.length == 2 && # Both created_at and updated_at
370
+ # Rails 5 and later timestamps default to NOT NULL
371
+ (possible_ts.first.last == is_4x_rails && possible_ts.last.last == is_4x_rails)
372
+ mig << "\n t.timestamps\n"
373
+ else # Just one or the other, or a nullability mismatch
374
+ possible_ts.each { |ts| emit_column('timestamp', ts.first, nil) }
375
+ end
376
+ mig << " end\n"
377
+ if pk_is_also_fk
378
+ mig << " reversible do |dir|\n"
379
+ mig << " dir.up { execute('ALTER TABLE #{tbl} ADD PRIMARY KEY (#{pk_is_also_fk})') }\n"
380
+ mig << " end\n"
381
+ end
382
+ add_fks.each do |add_fk|
383
+ next unless add_fk[2]
384
+
385
+ is_commented = false
386
+ # add_fk[2] holds the inverse relation
387
+ unless (pk = add_fk[2][:pkey]&.values&.flatten&.first)
388
+ is_commented = true
389
+ mig << " # (Unable to create relationship because primary key is missing on table #{add_fk[0]})\n"
390
+ # No official PK, but if coincidentally there's a column of the same name, take a chance on it
391
+ pk = (add_fk[2][:cols].key?(add_fk[1]) && add_fk[1]) || '???'
392
+ end
393
+ mig << " #{'# ' if do_fks_last}#{'# ' if is_commented}add_foreign_key #{tbl_code}, "
394
+ # to_table column
395
+ mig << "#{add_fk[0]}, column: :#{add_fk[1]}, primary_key: :#{pk}\n"
396
+ end
397
+ mig << " end\n"
398
+ end
399
+
336
400
  def emit_column(type, name, suffix)
337
401
  " t.#{type.start_with?('numeric') ? 'decimal' : type} :#{name}#{suffix}\n"
338
402
  end
339
403
 
340
404
  def migration_file_write(mig_path, name, current_mig_time, ar_version, mig)
341
- File.open("#{mig_path}/#{version = current_mig_time.strftime('%Y%m%d%H%M00')}_#{name}.rb", "w") do |f|
342
- f.write "class #{name.camelize} < ActiveRecord::Migration#{ar_version}\n"
405
+ File.open("#{mig_path}/#{version = current_mig_time.strftime('%Y%m%d%H%M00')}_#{name}#{'.rb' unless name.index('.')}", "w") do |f|
406
+ f.write "class #{name.split('.').first.camelize} < ActiveRecord::Migration#{ar_version}\n"
343
407
  f.write mig
344
408
  f.write "end\n"
345
409
  end
@@ -26,6 +26,7 @@ module Brick
26
26
  end
27
27
 
28
28
  mig_path, is_insert_versions, is_delete_versions = ::Brick::MigrationBuilder.check_folder
29
+ return unless mig_path
29
30
 
30
31
  # Generate a list of tables that can be chosen
31
32
  chosen = gets_list(list: tables, chosen: tables.dup)
@@ -6,12 +6,12 @@ require 'rails/generators/active_record'
6
6
  require 'fancy_gets'
7
7
 
8
8
  module Brick
9
- # Auto-generates models, controllers, or views
9
+ # Auto-generates models
10
10
  class ModelsGenerator < ::Rails::Generators::Base
11
11
  include FancyGets
12
12
  # include ::Rails::Generators::Migration
13
13
 
14
- desc 'Auto-generates models, controllers, or views.'
14
+ desc 'Auto-generates models.'
15
15
 
16
16
  def brick_models
17
17
  # %%% If Apartment is active and there's no schema_to_analyse, ask which schema they want
@@ -0,0 +1,101 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'brick'
4
+ require 'rails/generators'
5
+ require 'fancy_gets'
6
+ require 'generators/brick/migration_builder'
7
+ require 'generators/brick/salesforce_schema'
8
+
9
+ module Brick
10
+ # Auto-generates migration files
11
+ class SalesforceMigrationsGenerator < ::Rails::Generators::Base
12
+ include FancyGets
13
+ desc 'Auto-generates migration files for a set of Salesforce tables and columns.'
14
+
15
+ argument :wsdl_file, type: :string, default: ''
16
+
17
+ def brick_salesforce_migrations
18
+ ::Brick.apply_double_underscore_patch
19
+ # ::Brick.mode = :on
20
+ # ActiveRecord::Base.establish_connection
21
+
22
+ # Runs at the end of parsing Salesforce WSDL, and uses the discovered tables and columns to create migrations
23
+ relations = nil
24
+ end_document_proc = lambda do |salesforce_tables|
25
+ # p [:end_document]
26
+ mig_path, is_insert_versions, is_delete_versions = ::Brick::MigrationBuilder.check_folder
27
+ return unless mig_path
28
+
29
+ # Generate a list of tables that can be chosen
30
+ table_names = salesforce_tables.keys
31
+ chosen = gets_list(list: table_names, chosen: table_names.dup)
32
+
33
+ soap_data_types = {
34
+ 'tns:ID' => 'string',
35
+ 'xsd:string' => 'string',
36
+ 'xsd:dateTime' => 'datetime',
37
+ 'xsd:boolean' => 'boolean',
38
+ 'xsd:double' => 'float',
39
+ 'xsd:int' => 'integer',
40
+ 'xsd:date' => 'date',
41
+ 'xsd:anyType' => 'string', # Don't fully know on this
42
+ 'xsd:long' => 'bigint',
43
+ 'xsd:base64Binary' => 'bytea',
44
+ 'xsd:time' => 'time'
45
+ }
46
+ fk_idx = 0
47
+ # Build out a '::Brick.relations' hash that represents this Salesforce schema
48
+ relations = chosen.each_with_object({}) do |tbl_name, s|
49
+ tbl = salesforce_tables[tbl_name]
50
+ # Build out columns and foreign keys
51
+ cols = { 'id'=>['string', nil, false, true] }
52
+ fks = {}
53
+ tbl[:cols].each do |col|
54
+ next if col[:name] == 'Id'
55
+
56
+ dt = soap_data_types[col[:data_type]] || 'string'
57
+ cols[col[:name]] = [dt, nil, col[:nillable], false]
58
+ if (ref_to = col[:fk_reference_to])
59
+ fk_hash = {
60
+ is_bt: true,
61
+ fk: col[:name],
62
+ assoc_name: "#{col[:name]}_bt",
63
+ inverse_table: ref_to
64
+ }
65
+ fks["fk_salesforce_#{fk_idx += 1}"] = fk_hash
66
+ end
67
+ end
68
+ # Put it all into a relation entry, named the same as the table
69
+ s[tbl_name] = {
70
+ pkey: { "#{tbl_name}_pkey" => ['id'] },
71
+ cols: cols,
72
+ fks: fks
73
+ }
74
+ end
75
+ # Build but do not have foreign keys established yet, and do not put version entries info the schema_migrations table
76
+ ::Brick::MigrationBuilder.generate_migrations(chosen, mig_path, is_insert_versions, is_delete_versions, relations,
77
+ do_fks_last: true, do_schema_migrations: false)
78
+ end
79
+ parser = Nokogiri::XML::SAX::Parser.new(::Brick::SalesforceSchema.new(end_document_proc))
80
+ # The WSDL file must have a .xml extension, and can be in any folder in the project
81
+ # Alternatively the user can supply this option on the command line
82
+ @wsdl_file = nil if @wsdl_file == ''
83
+ loop do
84
+ break if (@wsdl_file ||= gets_list(Dir['**/*.xml'] + ['* Cancel *'])) == '* Cancel *'
85
+
86
+ parser.parse(File.read(@wsdl_file))
87
+
88
+ if relations.length > 300
89
+ puts "A Salesforce installation generally has hundreds to a few thousand tables, and many are empty.
90
+ In order to more easily navigate just those tables that have content, you might want to add this
91
+ to brick.rb:
92
+ ::Brick.omit_empty_tables_in_dropdown = true"
93
+ end
94
+ break
95
+ rescue Errno::ENOENT
96
+ puts "File \"#{@wsdl_file}\" is not found."
97
+ @wsdl_file = nil
98
+ end
99
+ end
100
+ end
101
+ end