brick 1.0.157 → 1.0.158
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/brick/extensions.rb +17 -10
- data/lib/brick/frameworks/rails/engine.rb +19 -12
- data/lib/brick/frameworks/rails/form_builder.rb +9 -2
- data/lib/brick/frameworks/rails/form_tags.rb +1 -1
- data/lib/brick/version_number.rb +1 -1
- data/lib/brick.rb +2 -2
- data/lib/generators/brick/install_generator.rb +1 -1
- data/lib/generators/brick/migration_builder.rb +341 -0
- data/lib/generators/brick/migrations_generator.rb +4 -325
- metadata +3 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 326ec09b1c28cf7c54e54bae9ea6ceed7d7e476a177be0a41af14e04f7cc3d11
|
4
|
+
data.tar.gz: 0507c22a2acf86c58737ecfedeb1a1f5ed6979b5263b28c6d6e77b0e9500ab7c
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: d6803593a5c3e17ef40005c405017ed4e7519024ca172a077ad377c201a7b2e2caf2a8b4bc5a0ef63f091124a44199ffd3b386db7d6b5d51352f2e3327537d81
|
7
|
+
data.tar.gz: 486b0a15422b0d3150b575c8d19c24cc3434e3fd8f35335d2e490e1e69e423995a2032de913def73b4f4df0e2789c6f4af7d61930db5cc144d0a0cb76be3ebdc
|
data/lib/brick/extensions.rb
CHANGED
@@ -89,6 +89,13 @@ module ActiveRecord
|
|
89
89
|
def brick_foreign_type(assoc)
|
90
90
|
reflect_on_association(assoc).foreign_type || "#{assoc}_type"
|
91
91
|
end
|
92
|
+
|
93
|
+
def _brick_all_fields
|
94
|
+
rtans = if respond_to?(:rich_text_association_names)
|
95
|
+
rich_text_association_names&.map { |rtan| rtan.to_s.start_with?('rich_text_') ? rtan[10..-1] : rtan }
|
96
|
+
end
|
97
|
+
columns_hash.keys.map(&:to_sym) + (rtans || [])
|
98
|
+
end
|
92
99
|
end
|
93
100
|
|
94
101
|
def self._brick_primary_key(relation = nil)
|
@@ -357,7 +364,7 @@ module ActiveRecord
|
|
357
364
|
# Support nested attributes which use the friendly_id gem
|
358
365
|
assoc.klass._brick_nested_friendly_id if Object.const_defined?('FriendlyId') &&
|
359
366
|
assoc.klass.instance_variable_get(:@friendly_id_config)
|
360
|
-
new_attrib_text = assoc.klass._brick_find_permits(assoc, (new_permits = assoc.klass.
|
367
|
+
new_attrib_text = assoc.klass._brick_find_permits(assoc, (new_permits = assoc.klass._brick_all_fields), done_permits)
|
361
368
|
new_permits << :_destroy
|
362
369
|
current_permits << { "#{assoc.name}_attributes".to_sym => new_permits }
|
363
370
|
s << "#{assoc.name}_attributes: #{new_attrib_text}"
|
@@ -2152,13 +2159,7 @@ class Object
|
|
2152
2159
|
code << " end\n"
|
2153
2160
|
self.define_method :new do
|
2154
2161
|
_schema, @_is_show_schema_list = ::Brick.set_db_schema(params)
|
2155
|
-
|
2156
|
-
# Convert any Filename objects with nil into an empty string so that #encode can be called on them
|
2157
|
-
new_obj.serializable_hash.each do |k, v|
|
2158
|
-
new_obj.send("#{k}=", ActiveStorage::Filename.new('')) if v.is_a?(ActiveStorage::Filename) && !v.instance_variable_get(:@filename)
|
2159
|
-
end if Object.const_defined?('ActiveStorage')
|
2160
|
-
end
|
2161
|
-
new_obj.attribute_names.each do |a|
|
2162
|
+
new_params = model.attribute_names.each_with_object({}) do |a, s|
|
2162
2163
|
if (val = params["__#{a}"])
|
2163
2164
|
# val = case new_obj.class.column_for_attribute(a).type
|
2164
2165
|
# when :datetime, :date, :time, :timestamp
|
@@ -2166,9 +2167,15 @@ class Object
|
|
2166
2167
|
# else
|
2167
2168
|
# val
|
2168
2169
|
# end
|
2169
|
-
|
2170
|
+
s[a] = val
|
2170
2171
|
end
|
2171
2172
|
end
|
2173
|
+
if (new_obj = model.new(new_params)).respond_to?(:serializable_hash)
|
2174
|
+
# Convert any Filename objects with nil into an empty string so that #encode can be called on them
|
2175
|
+
new_obj.serializable_hash.each do |k, v|
|
2176
|
+
new_obj.send("#{k}=", ActiveStorage::Filename.new('')) if v.is_a?(ActiveStorage::Filename) && !v.instance_variable_get(:@filename)
|
2177
|
+
end if Object.const_defined?('ActiveStorage')
|
2178
|
+
end
|
2172
2179
|
instance_variable_set("@#{singular_table_name}".to_sym, new_obj)
|
2173
2180
|
add_csp_hash
|
2174
2181
|
end
|
@@ -2310,7 +2317,7 @@ class Object
|
|
2310
2317
|
|
2311
2318
|
if is_need_params
|
2312
2319
|
code << " def #{params_name}\n"
|
2313
|
-
permits_txt = model._brick_find_permits(model, permits = model.
|
2320
|
+
permits_txt = model._brick_find_permits(model, permits = model._brick_all_fields)
|
2314
2321
|
code << " params.require(:#{require_name = model.name.underscore.tr('/', '_')
|
2315
2322
|
}).permit(#{permits_txt.map(&:inspect).join(', ')})\n"
|
2316
2323
|
code << " end\n"
|
@@ -713,12 +713,9 @@ window.addEventListener(\"popstate\", linkSchemas);
|
|
713
713
|
next unless @_brick_model.instance_methods.include?(through) &&
|
714
714
|
(associative = @_brick_model._br_associatives.fetch(hm.first, nil))
|
715
715
|
|
716
|
-
|
717
|
-
|
718
|
-
|
719
|
-
hm_assoc.through_reflection&.name # for standard HMT, which is HM -> BT
|
720
|
-
end
|
721
|
-
# If there is no inverse available for the source belongs_to association, make one based on the class name
|
716
|
+
# Should handle standard HMT, which is HM -> BT, as well as HM -> HM style HMT
|
717
|
+
tbl_nm = hm_assoc.source_reflection&.inverse_of&.name
|
718
|
+
# If there is no inverse available for the source belongs_to association, infer one based on the class name
|
722
719
|
unless tbl_nm
|
723
720
|
tbl_nm = associative.class_name.underscore
|
724
721
|
tbl_nm.slice!(0) if tbl_nm[0] == '/'
|
@@ -1679,12 +1676,22 @@ end
|
|
1679
1676
|
@#{obj_name}.send(\"#\{model.brick_foreign_type(v.first)}=\", v[1].first&.first&.name)
|
1680
1677
|
end
|
1681
1678
|
end if @#{obj_name}.new_record?
|
1682
|
-
|
1683
|
-
|
1684
|
-
|
1685
|
-
::Brick.config.metadata_columns.include?(k)
|
1679
|
+
rtans = #{model_name}.rich_text_association_names if #{model_name}.respond_to?(:rich_text_association_names)
|
1680
|
+
(#{model_name}.column_names + (rtans || [])).each do |k|
|
1681
|
+
next if (#{(pk.map(&:to_s) || []).inspect}.include?(k) && !bts.key?(k)) ||
|
1682
|
+
::Brick.config.metadata_columns.include?(k)
|
1683
|
+
|
1684
|
+
col = #{model_name}.columns_hash[k]
|
1685
|
+
if !col && rtans&.include?(k)
|
1686
|
+
k = k[10..-1] if k.start_with?('rich_text_')
|
1687
|
+
col = (rt_col ||= ActiveRecord::ConnectionAdapters::Column.new(
|
1688
|
+
'', nil, ActiveRecord::ConnectionAdapters::SqlTypeMetadata.new(sql_type: 'varchar', type: :text)
|
1689
|
+
)
|
1690
|
+
)
|
1691
|
+
end
|
1692
|
+
val = @#{obj_name}.attributes[k] %>
|
1686
1693
|
<tr>
|
1687
|
-
<th class=\"show-field\"<%= \" title=\\\"#\{col
|
1694
|
+
<th class=\"show-field\"<%= \" title=\\\"#\{col&.comment}\\\"\".html_safe if col&.respond_to?(:comment) && !col&.comment.blank? %>>
|
1688
1695
|
<% has_fields = true
|
1689
1696
|
if (bt = bts[k])
|
1690
1697
|
# Add a final member in this array with descriptive options to be used in <select> drop-downs
|
@@ -1716,7 +1723,7 @@ end
|
|
1716
1723
|
collection&.brick_(:each) do |obj|
|
1717
1724
|
option_detail << [
|
1718
1725
|
obj.brick_descrip(
|
1719
|
-
descrip_cols&.first&.map { |
|
1726
|
+
descrip_cols&.first&.map { |col2| obj.send(col2.last) },
|
1720
1727
|
obj_pk
|
1721
1728
|
), obj.send(obj_pk)
|
1722
1729
|
]
|
@@ -20,7 +20,14 @@ module Brick::Rails::FormBuilder
|
|
20
20
|
|
21
21
|
html_options[:prompt] = "Select #{bt_name}"
|
22
22
|
out << self.select(method.to_sym, bt[3], { value: val || '^^^brick_NULL^^^' }, html_options)
|
23
|
-
|
23
|
+
bt_obj = nil
|
24
|
+
begin
|
25
|
+
bt_obj = bt_class&.find_by(bt_pair[1] => val)
|
26
|
+
rescue ActiveRecord::SubclassNotFound => e
|
27
|
+
# %%% Would be cool to indicate to the user that a subclass is missing.
|
28
|
+
# Its name starts at: e.message.index('failed to locate the subclass: ') + 31
|
29
|
+
end
|
30
|
+
bt_link = if bt_obj
|
24
31
|
bt_path = template.send(
|
25
32
|
"#{bt_class.base_class._brick_index(:singular)}_path".to_sym,
|
26
33
|
bt_obj.send(bt_class.primary_key.to_sym)
|
@@ -100,7 +107,7 @@ module Brick::Rails::FormBuilder
|
|
100
107
|
end
|
101
108
|
# Because there are so danged many quotes in JSON, escape them specially by converting to backticks.
|
102
109
|
# (and previous to this, escape backticks with our own goofy code of ^^br_btick__ )
|
103
|
-
out << (json_field = self.hidden_field(method.to_sym, { class: 'jsonpicker', value: val_str
|
110
|
+
out << (json_field = self.hidden_field(method.to_sym, { class: 'jsonpicker', value: val_str&.gsub('`', '^^br_btick__')&.tr('\"', '`')&.html_safe }))
|
104
111
|
out << "<div id=\"_br_json_#{self.field_id(method)}\"></div>"
|
105
112
|
else
|
106
113
|
is_revert = false
|
@@ -157,7 +157,7 @@ module Brick::Rails::FormTags
|
|
157
157
|
out << link_to(ho_txt, send("#{hm_klass.base_class._brick_index(:singular)}_path".to_sym, ho_id))
|
158
158
|
end
|
159
159
|
elsif obj.respond_to?(ct_col = hms_col[1].to_sym) && (ct = obj.send(ct_col)&.to_i)&.positive?
|
160
|
-
predicates = hms_col[2].each_with_object({}) { |v, s| s[v.first] = v.last.is_a?(String) ? v.last : obj.send(v.last) }
|
160
|
+
predicates = hms_col[2].each_with_object({}) { |v, s| s["__#{v.first}"] = v.last.is_a?(String) ? v.last : obj.send(v.last) }
|
161
161
|
predicates.each { |k, v| predicates[k] = klass.name if v == '[sti_type]' }
|
162
162
|
out << "#{link_to("#{ct || 'View'} #{hms_col.first}",
|
163
163
|
send("#{hm_klass._brick_index}_path".to_sym, predicates))}\n"
|
data/lib/brick/version_number.rb
CHANGED
data/lib/brick.rb
CHANGED
@@ -221,8 +221,8 @@ module Brick
|
|
221
221
|
hm_models = ActiveRecord::Base.descendants.select do |m|
|
222
222
|
m.reflect_on_all_associations.any? { |assoc| !assoc.belongs_to? && assoc.options[:as]&.to_sym == a.name }
|
223
223
|
end
|
224
|
-
# No need to include subclassed models if their parent is already in the list
|
225
|
-
hm_models.reject! { |m| hm_models.any? { |parent| parent != m && m < parent } }
|
224
|
+
# No need to include models with no table, or subclassed models if their parent is already in the list
|
225
|
+
hm_models.reject! { |m| !m.table_exists? || hm_models.any? { |parent| parent != m && m < parent } }
|
226
226
|
if hm_models.empty?
|
227
227
|
puts "Missing any real indication as to which models \"has_many\" this polymorphic BT in model #{a.active_record.name}:"
|
228
228
|
puts " belongs_to :#{a.name}, polymorphic: true"
|
@@ -28,7 +28,7 @@ module Brick
|
|
28
28
|
relations = ::Brick.relations
|
29
29
|
if is_brick_file
|
30
30
|
# Need to remove any currently-existing additional_references so that it doesn't cloud the discovery process:
|
31
|
-
::Brick.config.additional_references
|
31
|
+
::Brick.config.additional_references&.each do |ar|
|
32
32
|
if (fks = relations.fetch(ar[0], nil)&.fetch(:fks, nil))
|
33
33
|
fks.delete(fks.find { |k, v| v[:is_bt] && k.start_with?('(brick) ') && v[:fk] == ar[1] }&.first)
|
34
34
|
end
|
@@ -0,0 +1,341 @@
|
|
1
|
+
module Brick
|
2
|
+
module MigrationBuilder
|
3
|
+
# Many SQL types are the same as their migration data type name:
|
4
|
+
# text, integer, bigint, date, boolean, decimal, float
|
5
|
+
# These however are not:
|
6
|
+
SQL_TYPES = { 'character varying' => 'string',
|
7
|
+
'character' => 'string', # %%% Need to put in "limit: 1"
|
8
|
+
'xml' => 'text',
|
9
|
+
'bytea' => 'binary',
|
10
|
+
'timestamp without time zone' => 'timestamp',
|
11
|
+
'timestamp with time zone' => 'timestamp',
|
12
|
+
'time without time zone' => 'time',
|
13
|
+
'time with time zone' => 'time',
|
14
|
+
'double precision' => 'float',
|
15
|
+
'smallint' => 'integer', # %%% Need to put in "limit: 2"
|
16
|
+
'ARRAY' => 'string', # Note that we'll also add ", array: true"
|
17
|
+
# Oracle data types
|
18
|
+
'VARCHAR2' => 'string',
|
19
|
+
'CHAR' => 'string',
|
20
|
+
['NUMBER', 22] => 'integer',
|
21
|
+
/^INTERVAL / => 'string', # Time interval stuff like INTERVAL YEAR(2) TO MONTH, INTERVAL '999' DAY(3), etc
|
22
|
+
'XMLTYPE' => 'xml',
|
23
|
+
'RAW' => 'binary',
|
24
|
+
'SDO_GEOMETRY' => 'geometry',
|
25
|
+
# MSSQL data types
|
26
|
+
'int' => 'integer',
|
27
|
+
'nvarchar' => 'string',
|
28
|
+
'nchar' => 'string',
|
29
|
+
'datetime2' => 'timestamp',
|
30
|
+
'bit' => 'boolean',
|
31
|
+
'varbinary' => 'binary',
|
32
|
+
# Sqlite data types
|
33
|
+
'TEXT' => 'text',
|
34
|
+
'' => 'string',
|
35
|
+
'INTEGER' => 'integer',
|
36
|
+
'REAL' => 'float',
|
37
|
+
'BLOB' => 'binary',
|
38
|
+
'TIMESTAMP' => 'timestamp',
|
39
|
+
'DATETIME' => 'timestamp'
|
40
|
+
}
|
41
|
+
# (Still need to find what "inet" and "json" data types map to.)
|
42
|
+
|
43
|
+
class << self
|
44
|
+
def check_folder(is_insert_versions = true, is_delete_versions = false)
|
45
|
+
versions_to_delete_or_append = nil
|
46
|
+
if Dir.exist?(mig_path = ActiveRecord::Migrator.migrations_paths.first || "#{::Rails.root}/db/migrate")
|
47
|
+
if Dir["#{mig_path}/**/*.rb"].present?
|
48
|
+
puts "WARNING: migrations folder #{mig_path} appears to already have ruby files present."
|
49
|
+
mig_path2 = "#{::Rails.root}/tmp/brick_migrations"
|
50
|
+
is_insert_versions = false unless mig_path == mig_path2
|
51
|
+
if Dir.exist?(mig_path2)
|
52
|
+
if Dir["#{mig_path2}/**/*.rb"].present?
|
53
|
+
puts "As well, temporary folder #{mig_path2} also has ruby files present."
|
54
|
+
puts "Choose a destination -- all existing .rb files will be removed:"
|
55
|
+
mig_path2 = gets_list(list: ['Cancel operation!', "Append migration files into #{mig_path} anyway", mig_path, mig_path2])
|
56
|
+
return if mig_path2.start_with?('Cancel')
|
57
|
+
|
58
|
+
existing_mig_files = Dir["#{mig_path2}/**/*.rb"]
|
59
|
+
if (is_insert_versions = mig_path == mig_path2)
|
60
|
+
versions_to_delete_or_append = existing_mig_files.map { |ver| ver.split('/').last.split('_').first }
|
61
|
+
end
|
62
|
+
if mig_path2.start_with?('Append migration files into ')
|
63
|
+
mig_path2 = mig_path
|
64
|
+
else
|
65
|
+
is_delete_versions = true
|
66
|
+
existing_mig_files.each { |rb| File.delete(rb) }
|
67
|
+
end
|
68
|
+
else
|
69
|
+
puts "Using temporary folder #{mig_path2} for created migration files.\n\n"
|
70
|
+
end
|
71
|
+
else
|
72
|
+
puts "Creating the temporary folder #{mig_path2} for created migration files.\n\n"
|
73
|
+
Dir.mkdir(mig_path2)
|
74
|
+
end
|
75
|
+
mig_path = mig_path2
|
76
|
+
else
|
77
|
+
puts "Using standard migration folder #{mig_path} for created migration files.\n\n"
|
78
|
+
end
|
79
|
+
else
|
80
|
+
puts "Creating standard ActiveRecord migration folder #{mig_path} to hold new migration files.\n\n"
|
81
|
+
Dir.mkdir(mig_path)
|
82
|
+
end
|
83
|
+
[mig_path, is_insert_versions, is_delete_versions]
|
84
|
+
end
|
85
|
+
|
86
|
+
def generate_migrations(chosen, mig_path, is_insert_versions, is_delete_versions, relations = ::Brick.relations)
|
87
|
+
is_sqlite = ActiveRecord::Base.connection.adapter_name == 'SQLite'
|
88
|
+
key_type = ((is_sqlite || ActiveRecord.version < ::Gem::Version.new('5.1')) ? 'integer' : 'bigint')
|
89
|
+
is_4x_rails = ActiveRecord.version < ::Gem::Version.new('5.0')
|
90
|
+
ar_version = "[#{ActiveRecord.version.segments[0..1].join('.')}]" unless is_4x_rails
|
91
|
+
|
92
|
+
schemas = chosen.each_with_object({}) do |v, s|
|
93
|
+
if (v_parts = v.split('.')).length > 1
|
94
|
+
s[v_parts.first] = nil unless [::Brick.default_schema, 'public'].include?(v_parts.first)
|
95
|
+
end
|
96
|
+
end
|
97
|
+
# Start the timestamps back the same number of minutes from now as expected number of migrations to create
|
98
|
+
current_mig_time = Time.now - (schemas.length + chosen.length).minutes
|
99
|
+
done = []
|
100
|
+
fks = {}
|
101
|
+
stuck = {}
|
102
|
+
indexes = {} # Track index names to make sure things are unique
|
103
|
+
built_schemas = {} # Track all built schemas so we can place an appropriate drop_schema command only in the first
|
104
|
+
# migration in which that schema is referenced, thereby allowing rollbacks to function properly.
|
105
|
+
versions_to_create = [] # Resulting versions to be used when updating the schema_migrations table
|
106
|
+
ar_base = Object.const_defined?(:ApplicationRecord) ? ApplicationRecord : Class.new(ActiveRecord::Base)
|
107
|
+
# Start by making migrations for fringe tables (those with no foreign keys).
|
108
|
+
# Continue layer by layer, creating migrations for tables that reference ones already done, until
|
109
|
+
# no more migrations can be created. (At that point hopefully all tables are accounted for.)
|
110
|
+
while (fringe = chosen.reject do |tbl|
|
111
|
+
snag_fks = []
|
112
|
+
snags = relations.fetch(tbl, nil)&.fetch(:fks, nil)&.select do |_k, v|
|
113
|
+
v[:is_bt] && !v[:polymorphic] &&
|
114
|
+
tbl != v[:inverse_table] && # Ignore self-referencing associations (stuff like "parent_id")
|
115
|
+
!done.include?(v[:inverse_table]) &&
|
116
|
+
::Brick.config.ignore_migration_fks.exclude?(snag_fk = "#{tbl}.#{v[:fk]}") &&
|
117
|
+
snag_fks << snag_fk
|
118
|
+
end
|
119
|
+
if snags&.present?
|
120
|
+
# puts snag_fks.inspect
|
121
|
+
stuck[tbl] = snags
|
122
|
+
end
|
123
|
+
end).present?
|
124
|
+
fringe.each do |tbl|
|
125
|
+
next unless (relation = relations.fetch(tbl, nil))&.fetch(:cols, nil)&.present?
|
126
|
+
|
127
|
+
pkey_cols = (rpk = relation[:pkey].values.flatten) & (arpk = [ar_base.primary_key].flatten.sort)
|
128
|
+
# In case things aren't as standard
|
129
|
+
if pkey_cols.empty?
|
130
|
+
pkey_cols = if rpk.empty? && relation[:cols][arpk.first]&.first == key_type
|
131
|
+
arpk
|
132
|
+
elsif rpk.first
|
133
|
+
rpk
|
134
|
+
end
|
135
|
+
end
|
136
|
+
schema = if (tbl_parts = tbl.split('.')).length > 1
|
137
|
+
if tbl_parts.first == (::Brick.default_schema || 'public')
|
138
|
+
tbl_parts.shift
|
139
|
+
nil
|
140
|
+
else
|
141
|
+
tbl_parts.first
|
142
|
+
end
|
143
|
+
end
|
144
|
+
unless schema.blank? || built_schemas.key?(schema)
|
145
|
+
mig = +" def change\n create_schema(:#{schema}) unless schema_exists?(:#{schema})\n end\n"
|
146
|
+
migration_file_write(mig_path, "create_db_schema_#{schema.underscore}", current_mig_time += 1.minute, ar_version, mig)
|
147
|
+
built_schemas[schema] = nil
|
148
|
+
end
|
149
|
+
|
150
|
+
# %%% For the moment we're skipping polymorphics
|
151
|
+
fkey_cols = relation[:fks].values.select { |assoc| assoc[:is_bt] && !assoc[:polymorphic] }
|
152
|
+
# If the primary key is also used as a foreign key, will need to do id: false and then build out
|
153
|
+
# a column definition which includes :primary_key -- %%% also using a data type of bigserial or serial
|
154
|
+
# if this one has come in as bigint or integer.
|
155
|
+
pk_is_also_fk = fkey_cols.any? { |assoc| pkey_cols&.first == assoc[:fk] } ? pkey_cols&.first : nil
|
156
|
+
# Support missing primary key (by adding: , id: false)
|
157
|
+
id_option = if pk_is_also_fk || !pkey_cols&.present?
|
158
|
+
needs_serial_col = true
|
159
|
+
+', id: false'
|
160
|
+
elsif ((pkey_col_first = (col_def = relation[:cols][pkey_cols&.first])&.first) &&
|
161
|
+
(pkey_col_first = SQL_TYPES[pkey_col_first] || SQL_TYPES[col_def&.[](0..1)] ||
|
162
|
+
SQL_TYPES.find { |r| r.first.is_a?(Regexp) && pkey_col_first =~ r.first }&.last ||
|
163
|
+
pkey_col_first
|
164
|
+
) != key_type
|
165
|
+
)
|
166
|
+
case pkey_col_first
|
167
|
+
when 'integer'
|
168
|
+
+', id: :serial'
|
169
|
+
when 'bigint'
|
170
|
+
+', id: :bigserial'
|
171
|
+
else
|
172
|
+
+", id: :#{pkey_col_first}" # Something like: id: :integer, primary_key: :businessentityid
|
173
|
+
end +
|
174
|
+
(pkey_cols.first ? ", primary_key: :#{pkey_cols.first}" : '')
|
175
|
+
end
|
176
|
+
if !id_option && pkey_cols.sort != arpk
|
177
|
+
id_option = +", primary_key: :#{pkey_cols.first}"
|
178
|
+
end
|
179
|
+
if !is_4x_rails && (comment = relation&.fetch(:description, nil))&.present?
|
180
|
+
(id_option ||= +'') << ", comment: #{comment.inspect}"
|
181
|
+
end
|
182
|
+
# Find the ActiveRecord class in order to see if the columns have comments
|
183
|
+
unless is_4x_rails
|
184
|
+
klass = begin
|
185
|
+
tbl.tr('.', '/').singularize.camelize.constantize
|
186
|
+
rescue StandardError
|
187
|
+
end
|
188
|
+
if klass
|
189
|
+
unless ActiveRecord::Migration.table_exists?(klass.table_name)
|
190
|
+
puts "WARNING: Unable to locate table #{klass.table_name} (for #{klass.name})."
|
191
|
+
klass = nil
|
192
|
+
end
|
193
|
+
end
|
194
|
+
end
|
195
|
+
# Refer to this table name as a symbol or dotted string as appropriate
|
196
|
+
tbl_code = tbl_parts.length == 1 ? ":#{tbl_parts.first}" : "'#{tbl}'"
|
197
|
+
mig = +" def change\n return unless reverting? || !table_exists?(#{tbl_code})\n\n"
|
198
|
+
mig << " create_table #{tbl_code}#{id_option} do |t|\n"
|
199
|
+
possible_ts = [] # Track possible generic timestamps
|
200
|
+
add_fks = [] # Track foreign keys to add after table creation
|
201
|
+
relation[:cols].each do |col, col_type|
|
202
|
+
sql_type = SQL_TYPES[col_type.first] || SQL_TYPES[col_type[0..1]] ||
|
203
|
+
SQL_TYPES.find { |r| r.first.is_a?(Regexp) && col_type.first =~ r.first }&.last ||
|
204
|
+
col_type.first
|
205
|
+
suffix = col_type[3] || pkey_cols&.include?(col) ? +', null: false' : +''
|
206
|
+
suffix << ', array: true' if (col_type.first == 'ARRAY')
|
207
|
+
if !is_4x_rails && klass && (comment = klass.columns_hash.fetch(col, nil)&.comment)&.present?
|
208
|
+
suffix << ", comment: #{comment.inspect}"
|
209
|
+
end
|
210
|
+
# Determine if this column is used as part of a foreign key
|
211
|
+
if (fk = fkey_cols.find { |assoc| col == assoc[:fk] })
|
212
|
+
to_table = fk[:inverse_table].split('.')
|
213
|
+
to_table = to_table.length == 1 ? ":#{to_table.first}" : "'#{fk[:inverse_table]}'"
|
214
|
+
if needs_serial_col && pkey_cols&.include?(col) && (new_serial_type = {'integer' => 'serial', 'bigint' => 'bigserial'}[sql_type])
|
215
|
+
sql_type = new_serial_type
|
216
|
+
needs_serial_col = false
|
217
|
+
end
|
218
|
+
if fk[:fk] != "#{fk[:assoc_name].singularize}_id" # Need to do our own foreign_key tricks, not use references?
|
219
|
+
column = fk[:fk]
|
220
|
+
mig << emit_column(sql_type, column, suffix)
|
221
|
+
add_fks << [to_table, column, relations[fk[:inverse_table]]]
|
222
|
+
else
|
223
|
+
suffix << ", type: :#{sql_type}" unless sql_type == key_type
|
224
|
+
# Will the resulting default index name be longer than what Postgres allows? (63 characters)
|
225
|
+
if (idx_name = ActiveRecord::Base.connection.index_name(tbl, {column: col})).length > 63
|
226
|
+
# Try to find a shorter name that hasn't been used yet
|
227
|
+
unless indexes.key?(shorter = idx_name[0..62]) ||
|
228
|
+
indexes.key?(shorter = idx_name.tr('_', '')[0..62]) ||
|
229
|
+
indexes.key?(shorter = idx_name.tr('aeio', '')[0..62])
|
230
|
+
puts "Unable to easily find unique name for index #{idx_name} that is shorter than 64 characters,"
|
231
|
+
puts "so have resorted to this GUID-based identifier: #{shorter = "#{tbl[0..25]}_#{::SecureRandom.uuid}"}."
|
232
|
+
end
|
233
|
+
suffix << ", index: { name: '#{shorter || idx_name}' }"
|
234
|
+
indexes[shorter || idx_name] = nil
|
235
|
+
end
|
236
|
+
primary_key = nil
|
237
|
+
begin
|
238
|
+
primary_key = relations[fk[:inverse_table]][:class_name]&.constantize&.primary_key
|
239
|
+
rescue NameError => e
|
240
|
+
primary_key = ar_base.primary_key
|
241
|
+
end
|
242
|
+
mig << " t.references :#{fk[:assoc_name]}#{suffix}, foreign_key: { to_table: #{to_table}#{", primary_key: :#{primary_key}" if primary_key != ar_base.primary_key} }\n"
|
243
|
+
end
|
244
|
+
else
|
245
|
+
next if !id_option&.end_with?('id: false') && pkey_cols&.include?(col)
|
246
|
+
|
247
|
+
# See if there are generic timestamps
|
248
|
+
if sql_type == 'timestamp' && ['created_at','updated_at'].include?(col)
|
249
|
+
possible_ts << [col, !col_type[3]]
|
250
|
+
else
|
251
|
+
mig << emit_column(sql_type, col, suffix)
|
252
|
+
end
|
253
|
+
end
|
254
|
+
end
|
255
|
+
if possible_ts.length == 2 && # Both created_at and updated_at
|
256
|
+
# Rails 5 and later timestamps default to NOT NULL
|
257
|
+
(possible_ts.first.last == is_4x_rails && possible_ts.last.last == is_4x_rails)
|
258
|
+
mig << "\n t.timestamps\n"
|
259
|
+
else # Just one or the other, or a nullability mismatch
|
260
|
+
possible_ts.each { |ts| emit_column('timestamp', ts.first, nil) }
|
261
|
+
end
|
262
|
+
mig << " end\n"
|
263
|
+
if pk_is_also_fk
|
264
|
+
mig << " reversible do |dir|\n"
|
265
|
+
mig << " dir.up { execute('ALTER TABLE #{tbl} ADD PRIMARY KEY (#{pk_is_also_fk})') }\n"
|
266
|
+
mig << " end\n"
|
267
|
+
end
|
268
|
+
add_fks.each do |add_fk|
|
269
|
+
is_commented = false
|
270
|
+
# add_fk[2] holds the inverse relation
|
271
|
+
unless (pk = add_fk[2][:pkey].values.flatten&.first)
|
272
|
+
is_commented = true
|
273
|
+
mig << " # (Unable to create relationship because primary key is missing on table #{add_fk[0]})\n"
|
274
|
+
# No official PK, but if coincidentally there's a column of the same name, take a chance on it
|
275
|
+
pk = (add_fk[2][:cols].key?(add_fk[1]) && add_fk[1]) || '???'
|
276
|
+
end
|
277
|
+
# to_table column
|
278
|
+
mig << " #{'# ' if is_commented}add_foreign_key #{tbl_code}, #{add_fk[0]}, column: :#{add_fk[1]}, primary_key: :#{pk}\n"
|
279
|
+
end
|
280
|
+
mig << " end\n"
|
281
|
+
versions_to_create << migration_file_write(mig_path, "create_#{tbl_parts.map(&:underscore).join('_')}", current_mig_time += 1.minute, ar_version, mig)
|
282
|
+
end
|
283
|
+
done.concat(fringe)
|
284
|
+
chosen -= done
|
285
|
+
end
|
286
|
+
|
287
|
+
stuck_counts = Hash.new { |h, k| h[k] = 0 }
|
288
|
+
chosen.each do |leftover|
|
289
|
+
puts "Can't do #{leftover} because:\n #{stuck[leftover].map do |snag|
|
290
|
+
stuck_counts[snag.last[:inverse_table]] += 1
|
291
|
+
snag.last[:assoc_name]
|
292
|
+
end.join(', ')}"
|
293
|
+
end
|
294
|
+
if mig_path.start_with?(cur_path = ::Rails.root.to_s)
|
295
|
+
pretty_mig_path = mig_path[cur_path.length..-1]
|
296
|
+
end
|
297
|
+
puts "\n*** Created #{done.length} migration files under #{pretty_mig_path || mig_path} ***"
|
298
|
+
if (stuck_sorted = stuck_counts.to_a.sort { |a, b| b.last <=> a.last }).length.positive?
|
299
|
+
puts "-----------------------------------------"
|
300
|
+
puts "Unable to create migrations for #{stuck_sorted.length} tables#{
|
301
|
+
". Here's the top 5 blockers" if stuck_sorted.length > 5
|
302
|
+
}:"
|
303
|
+
pp stuck_sorted[0..4]
|
304
|
+
else # Successful, and now we can update the schema_migrations table accordingly
|
305
|
+
unless ActiveRecord::Migration.table_exists?(ActiveRecord::Base.schema_migrations_table_name)
|
306
|
+
ActiveRecord::SchemaMigration.create_table
|
307
|
+
end
|
308
|
+
# Remove to_delete - to_create
|
309
|
+
if ((versions_to_delete_or_append ||= []) - versions_to_create).present? && is_delete_versions
|
310
|
+
ActiveRecord::Base.execute_sql("DELETE FROM #{
|
311
|
+
ActiveRecord::Base.schema_migrations_table_name} WHERE version IN (#{
|
312
|
+
(versions_to_delete_or_append - versions_to_create).map { |vtd| "'#{vtd}'" }.join(', ')}
|
313
|
+
)")
|
314
|
+
end
|
315
|
+
# Add to_create - to_delete
|
316
|
+
if is_insert_versions && ((versions_to_create ||= []) - versions_to_delete_or_append).present?
|
317
|
+
ActiveRecord::Base.execute_sql("INSERT INTO #{
|
318
|
+
ActiveRecord::Base.schema_migrations_table_name} (version) VALUES #{
|
319
|
+
(versions_to_create - versions_to_delete_or_append).map { |vtc| "('#{vtc}')" }.join(', ')
|
320
|
+
}")
|
321
|
+
end
|
322
|
+
end
|
323
|
+
end
|
324
|
+
|
325
|
+
private
|
326
|
+
|
327
|
+
def emit_column(type, name, suffix)
|
328
|
+
" t.#{type.start_with?('numeric') ? 'decimal' : type} :#{name}#{suffix}\n"
|
329
|
+
end
|
330
|
+
|
331
|
+
def migration_file_write(mig_path, name, current_mig_time, ar_version, mig)
|
332
|
+
File.open("#{mig_path}/#{version = current_mig_time.strftime('%Y%m%d%H%M00')}_#{name}.rb", "w") do |f|
|
333
|
+
f.write "class #{name.camelize} < ActiveRecord::Migration#{ar_version}\n"
|
334
|
+
f.write mig
|
335
|
+
f.write "end\n"
|
336
|
+
end
|
337
|
+
version
|
338
|
+
end
|
339
|
+
end
|
340
|
+
end
|
341
|
+
end
|
@@ -3,52 +3,13 @@
|
|
3
3
|
require 'rails/generators'
|
4
4
|
require 'rails/generators/active_record'
|
5
5
|
require 'fancy_gets'
|
6
|
+
require 'generators/brick/migration_builder'
|
6
7
|
|
7
8
|
module Brick
|
8
9
|
# Auto-generates migration files
|
9
10
|
class MigrationsGenerator < ::Rails::Generators::Base
|
10
11
|
include FancyGets
|
11
|
-
|
12
|
-
|
13
|
-
# Many SQL types are the same as their migration data type name:
|
14
|
-
# text, integer, bigint, date, boolean, decimal, float
|
15
|
-
# These however are not:
|
16
|
-
SQL_TYPES = { 'character varying' => 'string',
|
17
|
-
'character' => 'string', # %%% Need to put in "limit: 1"
|
18
|
-
'xml' => 'text',
|
19
|
-
'bytea' => 'binary',
|
20
|
-
'timestamp without time zone' => 'timestamp',
|
21
|
-
'timestamp with time zone' => 'timestamp',
|
22
|
-
'time without time zone' => 'time',
|
23
|
-
'time with time zone' => 'time',
|
24
|
-
'double precision' => 'float',
|
25
|
-
'smallint' => 'integer', # %%% Need to put in "limit: 2"
|
26
|
-
'ARRAY' => 'string', # Note that we'll also add ", array: true"
|
27
|
-
# Oracle data types
|
28
|
-
'VARCHAR2' => 'string',
|
29
|
-
'CHAR' => 'string',
|
30
|
-
['NUMBER', 22] => 'integer',
|
31
|
-
/^INTERVAL / => 'string', # Time interval stuff like INTERVAL YEAR(2) TO MONTH, INTERVAL '999' DAY(3), etc
|
32
|
-
'XMLTYPE' => 'xml',
|
33
|
-
'RAW' => 'binary',
|
34
|
-
'SDO_GEOMETRY' => 'geometry',
|
35
|
-
# MSSQL data types
|
36
|
-
'int' => 'integer',
|
37
|
-
'nvarchar' => 'string',
|
38
|
-
'nchar' => 'string',
|
39
|
-
'datetime2' => 'timestamp',
|
40
|
-
'bit' => 'boolean',
|
41
|
-
'varbinary' => 'binary',
|
42
|
-
# Sqlite data types
|
43
|
-
'TEXT' => 'text',
|
44
|
-
'' => 'string',
|
45
|
-
'INTEGER' => 'integer',
|
46
|
-
'REAL' => 'float',
|
47
|
-
'BLOB' => 'binary',
|
48
|
-
'TIMESTAMP' => 'timestamp',
|
49
|
-
'DATETIME' => 'timestamp'
|
50
|
-
}
|
51
|
-
# (Still need to find what "inet" and "json" data types map to.)
|
12
|
+
include ::Brick::MigrationBuilder
|
52
13
|
|
53
14
|
desc 'Auto-generates migration files for an existing database.'
|
54
15
|
|
@@ -63,294 +24,12 @@ module Brick
|
|
63
24
|
return
|
64
25
|
end
|
65
26
|
|
66
|
-
|
67
|
-
key_type = ((is_sqlite || ActiveRecord.version < ::Gem::Version.new('5.1')) ? 'integer' : 'bigint')
|
68
|
-
is_4x_rails = ActiveRecord.version < ::Gem::Version.new('5.0')
|
69
|
-
ar_version = "[#{ActiveRecord.version.segments[0..1].join('.')}]" unless is_4x_rails
|
70
|
-
is_insert_versions = true
|
71
|
-
is_delete_versions = false
|
72
|
-
versions_to_delete_or_append = nil
|
73
|
-
if Dir.exist?(mig_path = ActiveRecord::Migrator.migrations_paths.first || "#{::Rails.root}/db/migrate")
|
74
|
-
if Dir["#{mig_path}/**/*.rb"].present?
|
75
|
-
puts "WARNING: migrations folder #{mig_path} appears to already have ruby files present."
|
76
|
-
mig_path2 = "#{::Rails.root}/tmp/brick_migrations"
|
77
|
-
is_insert_versions = false unless mig_path == mig_path2
|
78
|
-
if Dir.exist?(mig_path2)
|
79
|
-
if Dir["#{mig_path2}/**/*.rb"].present?
|
80
|
-
puts "As well, temporary folder #{mig_path2} also has ruby files present."
|
81
|
-
puts "Choose a destination -- all existing .rb files will be removed:"
|
82
|
-
mig_path2 = gets_list(list: ['Cancel operation!', "Append migration files into #{mig_path} anyway", mig_path, mig_path2])
|
83
|
-
return if mig_path2.start_with?('Cancel')
|
84
|
-
|
85
|
-
existing_mig_files = Dir["#{mig_path2}/**/*.rb"]
|
86
|
-
if (is_insert_versions = mig_path == mig_path2)
|
87
|
-
versions_to_delete_or_append = existing_mig_files.map { |ver| ver.split('/').last.split('_').first }
|
88
|
-
end
|
89
|
-
if mig_path2.start_with?('Append migration files into ')
|
90
|
-
mig_path2 = mig_path
|
91
|
-
else
|
92
|
-
is_delete_versions = true
|
93
|
-
existing_mig_files.each { |rb| File.delete(rb) }
|
94
|
-
end
|
95
|
-
else
|
96
|
-
puts "Using temporary folder #{mig_path2} for created migration files.\n\n"
|
97
|
-
end
|
98
|
-
else
|
99
|
-
puts "Creating the temporary folder #{mig_path2} for created migration files.\n\n"
|
100
|
-
Dir.mkdir(mig_path2)
|
101
|
-
end
|
102
|
-
mig_path = mig_path2
|
103
|
-
else
|
104
|
-
puts "Using standard migration folder #{mig_path} for created migration files.\n\n"
|
105
|
-
end
|
106
|
-
else
|
107
|
-
puts "Creating standard ActiveRecord migration folder #{mig_path} to hold new migration files.\n\n"
|
108
|
-
Dir.mkdir(mig_path)
|
109
|
-
end
|
27
|
+
mig_path, is_insert_versions, is_delete_versions = ::Brick::MigrationBuilder.check_folder
|
110
28
|
|
111
29
|
# Generate a list of tables that can be chosen
|
112
30
|
chosen = gets_list(list: tables, chosen: tables.dup)
|
113
|
-
schemas = chosen.each_with_object({}) do |v, s|
|
114
|
-
if (v_parts = v.split('.')).length > 1
|
115
|
-
s[v_parts.first] = nil unless [::Brick.default_schema, 'public'].include?(v_parts.first)
|
116
|
-
end
|
117
|
-
end
|
118
|
-
# Start the timestamps back the same number of minutes from now as expected number of migrations to create
|
119
|
-
current_mig_time = Time.now - (schemas.length + chosen.length).minutes
|
120
|
-
done = []
|
121
|
-
fks = {}
|
122
|
-
stuck = {}
|
123
|
-
indexes = {} # Track index names to make sure things are unique
|
124
|
-
built_schemas = {} # Track all built schemas so we can place an appropriate drop_schema command only in the first
|
125
|
-
# migration in which that schema is referenced, thereby allowing rollbacks to function properly.
|
126
|
-
versions_to_create = [] # Resulting versions to be used when updating the schema_migrations table
|
127
|
-
ar_base = Object.const_defined?(:ApplicationRecord) ? ApplicationRecord : Class.new(ActiveRecord::Base)
|
128
|
-
# Start by making migrations for fringe tables (those with no foreign keys).
|
129
|
-
# Continue layer by layer, creating migrations for tables that reference ones already done, until
|
130
|
-
# no more migrations can be created. (At that point hopefully all tables are accounted for.)
|
131
|
-
while (fringe = chosen.reject do |tbl|
|
132
|
-
snag_fks = []
|
133
|
-
snags = ::Brick.relations.fetch(tbl, nil)&.fetch(:fks, nil)&.select do |_k, v|
|
134
|
-
v[:is_bt] && !v[:polymorphic] &&
|
135
|
-
tbl != v[:inverse_table] && # Ignore self-referencing associations (stuff like "parent_id")
|
136
|
-
!done.include?(v[:inverse_table]) &&
|
137
|
-
::Brick.config.ignore_migration_fks.exclude?(snag_fk = "#{tbl}.#{v[:fk]}") &&
|
138
|
-
snag_fks << snag_fk
|
139
|
-
end
|
140
|
-
if snags&.present?
|
141
|
-
# puts snag_fks.inspect
|
142
|
-
stuck[tbl] = snags
|
143
|
-
end
|
144
|
-
end).present?
|
145
|
-
fringe.each do |tbl|
|
146
|
-
next unless (relation = ::Brick.relations.fetch(tbl, nil))&.fetch(:cols, nil)&.present?
|
147
|
-
|
148
|
-
pkey_cols = (rpk = relation[:pkey].values.flatten) & (arpk = [ar_base.primary_key].flatten.sort)
|
149
|
-
# In case things aren't as standard
|
150
|
-
if pkey_cols.empty?
|
151
|
-
pkey_cols = if rpk.empty? && relation[:cols][arpk.first]&.first == key_type
|
152
|
-
arpk
|
153
|
-
elsif rpk.first
|
154
|
-
rpk
|
155
|
-
end
|
156
|
-
end
|
157
|
-
schema = if (tbl_parts = tbl.split('.')).length > 1
|
158
|
-
if tbl_parts.first == (::Brick.default_schema || 'public')
|
159
|
-
tbl_parts.shift
|
160
|
-
nil
|
161
|
-
else
|
162
|
-
tbl_parts.first
|
163
|
-
end
|
164
|
-
end
|
165
|
-
unless schema.blank? || built_schemas.key?(schema)
|
166
|
-
mig = +" def change\n create_schema(:#{schema}) unless schema_exists?(:#{schema})\n end\n"
|
167
|
-
migration_file_write(mig_path, "create_db_schema_#{schema.underscore}", current_mig_time += 1.minute, ar_version, mig)
|
168
|
-
built_schemas[schema] = nil
|
169
|
-
end
|
170
31
|
|
171
|
-
|
172
|
-
fkey_cols = relation[:fks].values.select { |assoc| assoc[:is_bt] && !assoc[:polymorphic] }
|
173
|
-
# If the primary key is also used as a foreign key, will need to do id: false and then build out
|
174
|
-
# a column definition which includes :primary_key -- %%% also using a data type of bigserial or serial
|
175
|
-
# if this one has come in as bigint or integer.
|
176
|
-
pk_is_also_fk = fkey_cols.any? { |assoc| pkey_cols&.first == assoc[:fk] } ? pkey_cols&.first : nil
|
177
|
-
# Support missing primary key (by adding: , id: false)
|
178
|
-
id_option = if pk_is_also_fk || !pkey_cols&.present?
|
179
|
-
needs_serial_col = true
|
180
|
-
+', id: false'
|
181
|
-
elsif ((pkey_col_first = (col_def = relation[:cols][pkey_cols&.first])&.first) &&
|
182
|
-
(pkey_col_first = SQL_TYPES[pkey_col_first] || SQL_TYPES[col_def&.[](0..1)] ||
|
183
|
-
SQL_TYPES.find { |r| r.first.is_a?(Regexp) && pkey_col_first =~ r.first }&.last ||
|
184
|
-
pkey_col_first
|
185
|
-
) != key_type
|
186
|
-
)
|
187
|
-
case pkey_col_first
|
188
|
-
when 'integer'
|
189
|
-
+', id: :serial'
|
190
|
-
when 'bigint'
|
191
|
-
+', id: :bigserial'
|
192
|
-
else
|
193
|
-
+", id: :#{pkey_col_first}" # Something like: id: :integer, primary_key: :businessentityid
|
194
|
-
end +
|
195
|
-
(pkey_cols.first ? ", primary_key: :#{pkey_cols.first}" : '')
|
196
|
-
end
|
197
|
-
if !id_option && pkey_cols.sort != arpk
|
198
|
-
id_option = +", primary_key: :#{pkey_cols.first}"
|
199
|
-
end
|
200
|
-
if !is_4x_rails && (comment = relation&.fetch(:description, nil))&.present?
|
201
|
-
(id_option ||= +'') << ", comment: #{comment.inspect}"
|
202
|
-
end
|
203
|
-
# Find the ActiveRecord class in order to see if the columns have comments
|
204
|
-
unless is_4x_rails
|
205
|
-
klass = begin
|
206
|
-
tbl.tr('.', '/').singularize.camelize.constantize
|
207
|
-
rescue StandardError
|
208
|
-
end
|
209
|
-
if klass
|
210
|
-
unless ActiveRecord::Migration.table_exists?(klass.table_name)
|
211
|
-
puts "WARNING: Unable to locate table #{klass.table_name} (for #{klass.name})."
|
212
|
-
klass = nil
|
213
|
-
end
|
214
|
-
end
|
215
|
-
end
|
216
|
-
# Refer to this table name as a symbol or dotted string as appropriate
|
217
|
-
tbl_code = tbl_parts.length == 1 ? ":#{tbl_parts.first}" : "'#{tbl}'"
|
218
|
-
mig = +" def change\n return unless reverting? || !table_exists?(#{tbl_code})\n\n"
|
219
|
-
mig << " create_table #{tbl_code}#{id_option} do |t|\n"
|
220
|
-
possible_ts = [] # Track possible generic timestamps
|
221
|
-
add_fks = [] # Track foreign keys to add after table creation
|
222
|
-
relation[:cols].each do |col, col_type|
|
223
|
-
sql_type = SQL_TYPES[col_type.first] || SQL_TYPES[col_type[0..1]] ||
|
224
|
-
SQL_TYPES.find { |r| r.first.is_a?(Regexp) && col_type.first =~ r.first }&.last ||
|
225
|
-
col_type.first
|
226
|
-
suffix = col_type[3] || pkey_cols&.include?(col) ? +', null: false' : +''
|
227
|
-
suffix << ', array: true' if (col_type.first == 'ARRAY')
|
228
|
-
if !is_4x_rails && klass && (comment = klass.columns_hash.fetch(col, nil)&.comment)&.present?
|
229
|
-
suffix << ", comment: #{comment.inspect}"
|
230
|
-
end
|
231
|
-
# Determine if this column is used as part of a foreign key
|
232
|
-
if (fk = fkey_cols.find { |assoc| col == assoc[:fk] })
|
233
|
-
to_table = fk[:inverse_table].split('.')
|
234
|
-
to_table = to_table.length == 1 ? ":#{to_table.first}" : "'#{fk[:inverse_table]}'"
|
235
|
-
if needs_serial_col && pkey_cols&.include?(col) && (new_serial_type = {'integer' => 'serial', 'bigint' => 'bigserial'}[sql_type])
|
236
|
-
sql_type = new_serial_type
|
237
|
-
needs_serial_col = false
|
238
|
-
end
|
239
|
-
if fk[:fk] != "#{fk[:assoc_name].singularize}_id" # Need to do our own foreign_key tricks, not use references?
|
240
|
-
column = fk[:fk]
|
241
|
-
mig << emit_column(sql_type, column, suffix)
|
242
|
-
add_fks << [to_table, column, ::Brick.relations[fk[:inverse_table]]]
|
243
|
-
else
|
244
|
-
suffix << ", type: :#{sql_type}" unless sql_type == key_type
|
245
|
-
# Will the resulting default index name be longer than what Postgres allows? (63 characters)
|
246
|
-
if (idx_name = ActiveRecord::Base.connection.index_name(tbl, {column: col})).length > 63
|
247
|
-
# Try to find a shorter name that hasn't been used yet
|
248
|
-
unless indexes.key?(shorter = idx_name[0..62]) ||
|
249
|
-
indexes.key?(shorter = idx_name.tr('_', '')[0..62]) ||
|
250
|
-
indexes.key?(shorter = idx_name.tr('aeio', '')[0..62])
|
251
|
-
puts "Unable to easily find unique name for index #{idx_name} that is shorter than 64 characters,"
|
252
|
-
puts "so have resorted to this GUID-based identifier: #{shorter = "#{tbl[0..25]}_#{::SecureRandom.uuid}"}."
|
253
|
-
end
|
254
|
-
suffix << ", index: { name: '#{shorter || idx_name}' }"
|
255
|
-
indexes[shorter || idx_name] = nil
|
256
|
-
end
|
257
|
-
primary_key = ::Brick.relations[fk[:inverse_table]][:class_name]&.constantize&.primary_key
|
258
|
-
mig << " t.references :#{fk[:assoc_name]}#{suffix}, foreign_key: { to_table: #{to_table}#{", primary_key: :#{primary_key}" if primary_key != ar_base.primary_key} }\n"
|
259
|
-
end
|
260
|
-
else
|
261
|
-
next if !id_option&.end_with?('id: false') && pkey_cols&.include?(col)
|
262
|
-
|
263
|
-
# See if there are generic timestamps
|
264
|
-
if sql_type == 'timestamp' && ['created_at','updated_at'].include?(col)
|
265
|
-
possible_ts << [col, !col_type[3]]
|
266
|
-
else
|
267
|
-
mig << emit_column(sql_type, col, suffix)
|
268
|
-
end
|
269
|
-
end
|
270
|
-
end
|
271
|
-
if possible_ts.length == 2 && # Both created_at and updated_at
|
272
|
-
# Rails 5 and later timestamps default to NOT NULL
|
273
|
-
(possible_ts.first.last == is_4x_rails && possible_ts.last.last == is_4x_rails)
|
274
|
-
mig << "\n t.timestamps\n"
|
275
|
-
else # Just one or the other, or a nullability mismatch
|
276
|
-
possible_ts.each { |ts| emit_column('timestamp', ts.first, nil) }
|
277
|
-
end
|
278
|
-
mig << " end\n"
|
279
|
-
if pk_is_also_fk
|
280
|
-
mig << " reversible do |dir|\n"
|
281
|
-
mig << " dir.up { execute('ALTER TABLE #{tbl} ADD PRIMARY KEY (#{pk_is_also_fk})') }\n"
|
282
|
-
mig << " end\n"
|
283
|
-
end
|
284
|
-
add_fks.each do |add_fk|
|
285
|
-
is_commented = false
|
286
|
-
# add_fk[2] holds the inverse relation
|
287
|
-
unless (pk = add_fk[2][:pkey].values.flatten&.first)
|
288
|
-
is_commented = true
|
289
|
-
mig << " # (Unable to create relationship because primary key is missing on table #{add_fk[0]})\n"
|
290
|
-
# No official PK, but if coincidentally there's a column of the same name, take a chance on it
|
291
|
-
pk = (add_fk[2][:cols].key?(add_fk[1]) && add_fk[1]) || '???'
|
292
|
-
end
|
293
|
-
# to_table column
|
294
|
-
mig << " #{'# ' if is_commented}add_foreign_key #{tbl_code}, #{add_fk[0]}, column: :#{add_fk[1]}, primary_key: :#{pk}\n"
|
295
|
-
end
|
296
|
-
mig << " end\n"
|
297
|
-
versions_to_create << migration_file_write(mig_path, "create_#{tbl_parts.map(&:underscore).join('_')}", current_mig_time += 1.minute, ar_version, mig)
|
298
|
-
end
|
299
|
-
done.concat(fringe)
|
300
|
-
chosen -= done
|
301
|
-
end
|
302
|
-
|
303
|
-
stuck_counts = Hash.new { |h, k| h[k] = 0 }
|
304
|
-
chosen.each do |leftover|
|
305
|
-
puts "Can't do #{leftover} because:\n #{stuck[leftover].map do |snag|
|
306
|
-
stuck_counts[snag.last[:inverse_table]] += 1
|
307
|
-
snag.last[:assoc_name]
|
308
|
-
end.join(', ')}"
|
309
|
-
end
|
310
|
-
if mig_path.start_with?(cur_path = ::Rails.root.to_s)
|
311
|
-
pretty_mig_path = mig_path[cur_path.length..-1]
|
312
|
-
end
|
313
|
-
puts "\n*** Created #{done.length} migration files under #{pretty_mig_path || mig_path} ***"
|
314
|
-
if (stuck_sorted = stuck_counts.to_a.sort { |a, b| b.last <=> a.last }).length.positive?
|
315
|
-
puts "-----------------------------------------"
|
316
|
-
puts "Unable to create migrations for #{stuck_sorted.length} tables#{
|
317
|
-
". Here's the top 5 blockers" if stuck_sorted.length > 5
|
318
|
-
}:"
|
319
|
-
pp stuck_sorted[0..4]
|
320
|
-
else # Successful, and now we can update the schema_migrations table accordingly
|
321
|
-
unless ActiveRecord::Migration.table_exists?(ActiveRecord::Base.schema_migrations_table_name)
|
322
|
-
ActiveRecord::SchemaMigration.create_table
|
323
|
-
end
|
324
|
-
# Remove to_delete - to_create
|
325
|
-
if ((versions_to_delete_or_append ||= []) - versions_to_create).present? && is_delete_versions
|
326
|
-
ActiveRecord::Base.execute_sql("DELETE FROM #{
|
327
|
-
ActiveRecord::Base.schema_migrations_table_name} WHERE version IN (#{
|
328
|
-
(versions_to_delete_or_append - versions_to_create).map { |vtd| "'#{vtd}'" }.join(', ')}
|
329
|
-
)")
|
330
|
-
end
|
331
|
-
# Add to_create - to_delete
|
332
|
-
if is_insert_versions && ((versions_to_create ||= []) - versions_to_delete_or_append).present?
|
333
|
-
ActiveRecord::Base.execute_sql("INSERT INTO #{
|
334
|
-
ActiveRecord::Base.schema_migrations_table_name} (version) VALUES #{
|
335
|
-
(versions_to_create - versions_to_delete_or_append).map { |vtc| "('#{vtc}')" }.join(', ')
|
336
|
-
}")
|
337
|
-
end
|
338
|
-
end
|
339
|
-
end
|
340
|
-
|
341
|
-
private
|
342
|
-
|
343
|
-
def emit_column(type, name, suffix)
|
344
|
-
" t.#{type.start_with?('numeric') ? 'decimal' : type} :#{name}#{suffix}\n"
|
345
|
-
end
|
346
|
-
|
347
|
-
def migration_file_write(mig_path, name, current_mig_time, ar_version, mig)
|
348
|
-
File.open("#{mig_path}/#{version = current_mig_time.strftime('%Y%m%d%H%M00')}_#{name}.rb", "w") do |f|
|
349
|
-
f.write "class #{name.camelize} < ActiveRecord::Migration#{ar_version}\n"
|
350
|
-
f.write mig
|
351
|
-
f.write "end\n"
|
352
|
-
end
|
353
|
-
version
|
32
|
+
::Brick::MigrationBuilder.generate_migrations(chosen, mig_path, is_insert_versions, is_delete_versions)
|
354
33
|
end
|
355
34
|
end
|
356
35
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: brick
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.0.
|
4
|
+
version: 1.0.158
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Lorin Thwaits
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2023-07-
|
11
|
+
date: 2023-07-29 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: activerecord
|
@@ -255,6 +255,7 @@ files:
|
|
255
255
|
- lib/brick/version_number.rb
|
256
256
|
- lib/generators/brick/USAGE
|
257
257
|
- lib/generators/brick/install_generator.rb
|
258
|
+
- lib/generators/brick/migration_builder.rb
|
258
259
|
- lib/generators/brick/migrations_generator.rb
|
259
260
|
- lib/generators/brick/models_generator.rb
|
260
261
|
- lib/generators/brick/seeds_generator.rb
|