brick 1.0.157 → 1.0.160
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/brick/compatibility.rb +7 -5
- data/lib/brick/extensions.rb +23 -15
- data/lib/brick/frameworks/rails/engine.rb +6 -79
- data/lib/brick/frameworks/rails/form_builder.rb +17 -5
- data/lib/brick/frameworks/rails/form_tags.rb +95 -1
- data/lib/brick/version_number.rb +1 -1
- data/lib/brick.rb +16 -14
- data/lib/generators/brick/install_generator.rb +1 -1
- data/lib/generators/brick/migration_builder.rb +343 -0
- data/lib/generators/brick/migrations_generator.rb +4 -326
- metadata +3 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 735ad34b99ba52ae2575c786fdee6c359c5c53e6d2edf53b19bbb5d1f02869ec
|
4
|
+
data.tar.gz: c603e9adebe13192e4be8724b51d563a6b39f89b15c61fab253debb9fc959a66
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 14b5a64e366d218ac78bb3ec26370cddcdb506ee292974321a76331a40d272a33ce49b9cb4911314a7e33835bf44255dda73c04449062e159b499e2e0d88c61f
|
7
|
+
data.tar.gz: cfa821c073296681b4b49e0d58538e076ac50ae1ea65b2df6ea8af1c92477842ee737c3d5950d803ce45f52daf48fca065f28bf6cc6a5e6518c408a64305ada6
|
data/lib/brick/compatibility.rb
CHANGED
@@ -97,11 +97,13 @@ if Object.const_defined?('ActionPack') && !ActionPack.respond_to?(:version)
|
|
97
97
|
end
|
98
98
|
end
|
99
99
|
end
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
100
|
+
if Bundler.locked_gems&.dependencies.key?('action_view')
|
101
|
+
require 'action_view' # Needed for Rails <= 4.0
|
102
|
+
if Object.const_defined?('ActionView') && !ActionView.respond_to?(:version)
|
103
|
+
module ActionView
|
104
|
+
def self.version
|
105
|
+
ActionPack.version
|
106
|
+
end
|
105
107
|
end
|
106
108
|
end
|
107
109
|
end
|
data/lib/brick/extensions.rb
CHANGED
@@ -30,14 +30,14 @@
|
|
30
30
|
|
31
31
|
# Drag something like HierModel#name onto the rows and have it automatically add five columns -- where type=zone / where type = section / etc
|
32
32
|
|
33
|
-
# Support for Postgres / MySQL enums (add enum to model, use model enums to make a drop-down in the UI)
|
34
|
-
|
35
|
-
# Currently quadrupling up routes
|
36
|
-
|
37
33
|
# Modal pop-up things for editing large text / date ranges / hierarchies of data
|
38
34
|
|
39
35
|
# For recognised self-references, have the show page display all related objects up to the parent (or the start of a circular reference)
|
40
36
|
|
37
|
+
# When creating or updating an object through an auto-generated controller, it always goes to an auto-generated view template even if the user has supplied their own index.html.erb (or similar) view template
|
38
|
+
|
39
|
+
# Upon creation of a new object, when going to the index page, highlight this new object and scroll it into view (likely to the very bottom of everything, although might be sorted differently)
|
40
|
+
|
41
41
|
# ==========================================================
|
42
42
|
# Dynamically create model or controller classes when needed
|
43
43
|
# ==========================================================
|
@@ -89,6 +89,13 @@ module ActiveRecord
|
|
89
89
|
def brick_foreign_type(assoc)
|
90
90
|
reflect_on_association(assoc).foreign_type || "#{assoc}_type"
|
91
91
|
end
|
92
|
+
|
93
|
+
def _brick_all_fields
|
94
|
+
rtans = if respond_to?(:rich_text_association_names)
|
95
|
+
rich_text_association_names&.map { |rtan| rtan.to_s.start_with?('rich_text_') ? rtan[10..-1] : rtan }
|
96
|
+
end
|
97
|
+
columns_hash.keys.map(&:to_sym) + (rtans || [])
|
98
|
+
end
|
92
99
|
end
|
93
100
|
|
94
101
|
def self._brick_primary_key(relation = nil)
|
@@ -357,7 +364,7 @@ module ActiveRecord
|
|
357
364
|
# Support nested attributes which use the friendly_id gem
|
358
365
|
assoc.klass._brick_nested_friendly_id if Object.const_defined?('FriendlyId') &&
|
359
366
|
assoc.klass.instance_variable_get(:@friendly_id_config)
|
360
|
-
new_attrib_text = assoc.klass._brick_find_permits(assoc, (new_permits = assoc.klass.
|
367
|
+
new_attrib_text = assoc.klass._brick_find_permits(assoc, (new_permits = assoc.klass._brick_all_fields), done_permits)
|
361
368
|
new_permits << :_destroy
|
362
369
|
current_permits << { "#{assoc.name}_attributes".to_sym => new_permits }
|
363
370
|
s << "#{assoc.name}_attributes: #{new_attrib_text}"
|
@@ -2152,13 +2159,7 @@ class Object
|
|
2152
2159
|
code << " end\n"
|
2153
2160
|
self.define_method :new do
|
2154
2161
|
_schema, @_is_show_schema_list = ::Brick.set_db_schema(params)
|
2155
|
-
|
2156
|
-
# Convert any Filename objects with nil into an empty string so that #encode can be called on them
|
2157
|
-
new_obj.serializable_hash.each do |k, v|
|
2158
|
-
new_obj.send("#{k}=", ActiveStorage::Filename.new('')) if v.is_a?(ActiveStorage::Filename) && !v.instance_variable_get(:@filename)
|
2159
|
-
end if Object.const_defined?('ActiveStorage')
|
2160
|
-
end
|
2161
|
-
new_obj.attribute_names.each do |a|
|
2162
|
+
new_params = model.attribute_names.each_with_object({}) do |a, s|
|
2162
2163
|
if (val = params["__#{a}"])
|
2163
2164
|
# val = case new_obj.class.column_for_attribute(a).type
|
2164
2165
|
# when :datetime, :date, :time, :timestamp
|
@@ -2166,9 +2167,15 @@ class Object
|
|
2166
2167
|
# else
|
2167
2168
|
# val
|
2168
2169
|
# end
|
2169
|
-
|
2170
|
+
s[a] = val
|
2170
2171
|
end
|
2171
2172
|
end
|
2173
|
+
if (new_obj = model.new(new_params)).respond_to?(:serializable_hash)
|
2174
|
+
# Convert any Filename objects with nil into an empty string so that #encode can be called on them
|
2175
|
+
new_obj.serializable_hash.each do |k, v|
|
2176
|
+
new_obj.send("#{k}=", ActiveStorage::Filename.new('')) if v.is_a?(ActiveStorage::Filename) && !v.instance_variable_get(:@filename)
|
2177
|
+
end if Object.const_defined?('ActiveStorage')
|
2178
|
+
end
|
2172
2179
|
instance_variable_set("@#{singular_table_name}".to_sym, new_obj)
|
2173
2180
|
add_csp_hash
|
2174
2181
|
end
|
@@ -2192,7 +2199,8 @@ class Object
|
|
2192
2199
|
render json: { result: ::Brick.unexclude_column(table_name, col) }
|
2193
2200
|
else
|
2194
2201
|
@_lookup_context.instance_variable_set("@#{singular_table_name}".to_sym,
|
2195
|
-
model.send(:create, send(params_name_sym)))
|
2202
|
+
(created_obj = model.send(:create, send(params_name_sym))))
|
2203
|
+
# %%% Surface any errors to the user in a flash message
|
2196
2204
|
@_lookup_context.instance_variable_set(:@_brick_model, model)
|
2197
2205
|
index
|
2198
2206
|
render :index
|
@@ -2310,7 +2318,7 @@ class Object
|
|
2310
2318
|
|
2311
2319
|
if is_need_params
|
2312
2320
|
code << " def #{params_name}\n"
|
2313
|
-
permits_txt = model._brick_find_permits(model, permits = model.
|
2321
|
+
permits_txt = model._brick_find_permits(model, permits = model._brick_all_fields)
|
2314
2322
|
code << " params.require(:#{require_name = model.name.underscore.tr('/', '_')
|
2315
2323
|
}).permit(#{permits_txt.map(&:inspect).join(', ')})\n"
|
2316
2324
|
code << " end\n"
|
@@ -713,12 +713,9 @@ window.addEventListener(\"popstate\", linkSchemas);
|
|
713
713
|
next unless @_brick_model.instance_methods.include?(through) &&
|
714
714
|
(associative = @_brick_model._br_associatives.fetch(hm.first, nil))
|
715
715
|
|
716
|
-
|
717
|
-
|
718
|
-
|
719
|
-
hm_assoc.through_reflection&.name # for standard HMT, which is HM -> BT
|
720
|
-
end
|
721
|
-
# If there is no inverse available for the source belongs_to association, make one based on the class name
|
716
|
+
# Should handle standard HMT, which is HM -> BT, as well as HM -> HM style HMT
|
717
|
+
tbl_nm = hm_assoc.source_reflection&.inverse_of&.name
|
718
|
+
# If there is no inverse available for the source belongs_to association, infer one based on the class name
|
722
719
|
unless tbl_nm
|
723
720
|
tbl_nm = associative.class_name.underscore
|
724
721
|
tbl_nm.slice!(0) if tbl_nm[0] == '/'
|
@@ -1670,79 +1667,9 @@ end
|
|
1670
1667
|
end
|
1671
1668
|
%>
|
1672
1669
|
<br><br>
|
1673
|
-
|
1674
|
-
|
1675
|
-
|
1676
|
-
# If it's a new record, set any default polymorphic types
|
1677
|
-
bts.each do |_k, v|
|
1678
|
-
if v[2]
|
1679
|
-
@#{obj_name}.send(\"#\{model.brick_foreign_type(v.first)}=\", v[1].first&.first&.name)
|
1680
|
-
end
|
1681
|
-
end if @#{obj_name}.new_record?
|
1682
|
-
@#{obj_name}.attributes.each do |k, val|
|
1683
|
-
next if !(col = #{model_name}.columns_hash[k]) ||
|
1684
|
-
(#{(pk.map(&:to_s) || []).inspect}.include?(k) && !bts.key?(k)) ||
|
1685
|
-
::Brick.config.metadata_columns.include?(k) %>
|
1686
|
-
<tr>
|
1687
|
-
<th class=\"show-field\"<%= \" title=\\\"#\{col.comment}\\\"\".html_safe if col.respond_to?(:comment) && !col.comment.blank? %>>
|
1688
|
-
<% has_fields = true
|
1689
|
-
if (bt = bts[k])
|
1690
|
-
# Add a final member in this array with descriptive options to be used in <select> drop-downs
|
1691
|
-
bt_name = bt[1].map { |x| x.first.name }.join('/')
|
1692
|
-
# %%% Only do this if the user has permissions to edit this bt field
|
1693
|
-
if bt[2] # Polymorphic?
|
1694
|
-
poly_class_name = orig_poly_name = @#{obj_name}.send(model.brick_foreign_type(bt.first))
|
1695
|
-
bt_pair = nil
|
1696
|
-
loop do
|
1697
|
-
bt_pair = bt[1].find { |pair| pair.first.name == poly_class_name }
|
1698
|
-
# Accommodate any valid STI by going up the chain of inheritance
|
1699
|
-
break unless bt_pair.nil? && poly_class_name = ::Brick.existing_stis[poly_class_name]
|
1700
|
-
end
|
1701
|
-
puts \"*** Might be missing an STI class called #\{orig_poly_name\} whose base class should have this:
|
1702
|
-
*** has_many :#{table_name}, as: :#\{bt.first\}
|
1703
|
-
*** Can probably auto-configure everything using these lines in an initialiser:
|
1704
|
-
*** Brick.sti_namespace_prefixes = { '::#\{orig_poly_name\}' => 'SomeParentModel' }
|
1705
|
-
*** Brick.polymorphics = { '#{table_name}.#\{bt.first\}' => ['SomeParentModel'] }\" if bt_pair.nil?
|
1706
|
-
# descrips = @_brick_bt_descrip[bt.first][bt_class]
|
1707
|
-
poly_id = @#{obj_name}.send(\"#\{bt.first\}_id\")
|
1708
|
-
# bt_class.order(obj_pk = bt_class.primary_key).each { |obj| option_detail << [obj.brick_descrip(nil, obj_pk), obj.send(obj_pk)] }
|
1709
|
-
end
|
1710
|
-
bt_pair ||= bt[1].first # If there's no polymorphism (or polymorphism status is unknown), just get the first one
|
1711
|
-
bt_class = bt_pair&.first
|
1712
|
-
if bt.length < 4
|
1713
|
-
bt << (option_detail = [[\"(No #\{bt_name\} chosen)\", '^^^brick_NULL^^^']])
|
1714
|
-
# %%% Accommodate composite keys for obj.pk at the end here
|
1715
|
-
collection, descrip_cols = bt_class&.order(Arel.sql(\"#\{bt_class.table_name}.#\{obj_pk = bt_class.primary_key}\"))&.brick_list
|
1716
|
-
collection&.brick_(:each) do |obj|
|
1717
|
-
option_detail << [
|
1718
|
-
obj.brick_descrip(
|
1719
|
-
descrip_cols&.first&.map { |col| obj.send(col.last) },
|
1720
|
-
obj_pk
|
1721
|
-
), obj.send(obj_pk)
|
1722
|
-
]
|
1723
|
-
end
|
1724
|
-
end %>
|
1725
|
-
BT <%= bt_class&.bt_link(bt.first) || orig_poly_name %>
|
1726
|
-
<% else %>
|
1727
|
-
<%= #{model_name}.human_attribute_name(k, { default: k }) %>
|
1728
|
-
<% end %>
|
1729
|
-
</th>
|
1730
|
-
<td>
|
1731
|
-
<%= f.brick_field(k, html_options = {}, val, col, bt, bt_class, bt_name, bt_pair) %>
|
1732
|
-
</td>
|
1733
|
-
</tr>
|
1734
|
-
<% end
|
1735
|
-
if has_fields %>
|
1736
|
-
<tr><td colspan=\"2\"><%= f.submit({ class: 'update' }) %></td></tr>
|
1737
|
-
<% else %>
|
1738
|
-
<tr><td colspan=\"2\">(No displayable fields)</td></tr>
|
1739
|
-
<% end %>
|
1740
|
-
</table>#{
|
1741
|
-
"<%= begin
|
1742
|
-
::Brick::Rails.display_binary(obj&.blob&.download, 500_000)&.html_safe
|
1743
|
-
rescue
|
1744
|
-
end %>" if model_name == 'ActiveStorage::Attachment'}
|
1745
|
-
<% end %>
|
1670
|
+
|
1671
|
+
<%= # Write out the mega-form
|
1672
|
+
brick_form_for(obj, options, #{model_name}, bts, #{pk.inspect}) %>
|
1746
1673
|
|
1747
1674
|
#{unless args.first == 'new'
|
1748
1675
|
# Was: confirm_are_you_sure = ActionView.version < ::Gem::Version.new('7.0') ? "data: { confirm: 'Delete #\{model_name} -- Are you sure?' }" : "form: { data: { turbo_confirm: 'Delete #\{model_name} -- Are you sure?' } }"
|
@@ -20,7 +20,14 @@ module Brick::Rails::FormBuilder
|
|
20
20
|
|
21
21
|
html_options[:prompt] = "Select #{bt_name}"
|
22
22
|
out << self.select(method.to_sym, bt[3], { value: val || '^^^brick_NULL^^^' }, html_options)
|
23
|
-
|
23
|
+
bt_obj = nil
|
24
|
+
begin
|
25
|
+
bt_obj = bt_class&.find_by(bt_pair[1] => val)
|
26
|
+
rescue ActiveRecord::SubclassNotFound => e
|
27
|
+
# %%% Would be cool to indicate to the user that a subclass is missing.
|
28
|
+
# Its name starts at: e.message.index('failed to locate the subclass: ') + 31
|
29
|
+
end
|
30
|
+
bt_link = if bt_obj
|
24
31
|
bt_path = template.send(
|
25
32
|
"#{bt_class.base_class._brick_index(:singular)}_path".to_sym,
|
26
33
|
bt_obj.send(bt_class.primary_key.to_sym)
|
@@ -60,9 +67,14 @@ module Brick::Rails::FormBuilder
|
|
60
67
|
when :boolean
|
61
68
|
out << self.check_box(method.to_sym)
|
62
69
|
when :integer, :decimal, :float
|
63
|
-
|
64
|
-
|
65
|
-
|
70
|
+
if model.respond_to?(:attribute_types) && (enum_type = model.attribute_types[method]).is_a?(ActiveRecord::Enum::EnumType)
|
71
|
+
opts = enum_type.send(:mapping)&.each_with_object([]) { |v, s| s << [v.first, v.first] } || []
|
72
|
+
out << self.select(method.to_sym, [["(No #{method} chosen)", '^^^brick_NULL^^^']] + opts, { value: val || '^^^brick_NULL^^^' }, options)
|
73
|
+
else
|
74
|
+
digit_pattern = col_type == :integer ? '\d*' : '\d*(?:\.\d*|)'
|
75
|
+
# Used to do this for float / decimal: self.number_field method.to_sym
|
76
|
+
out << self.text_field(method.to_sym, { pattern: digit_pattern, class: 'check-validity' })
|
77
|
+
end
|
66
78
|
when *DT_PICKERS.keys
|
67
79
|
template.instance_variable_set(:@_date_fields_present, true)
|
68
80
|
out << self.text_field(method.to_sym, { class: DT_PICKERS[col_type] })
|
@@ -100,7 +112,7 @@ module Brick::Rails::FormBuilder
|
|
100
112
|
end
|
101
113
|
# Because there are so danged many quotes in JSON, escape them specially by converting to backticks.
|
102
114
|
# (and previous to this, escape backticks with our own goofy code of ^^br_btick__ )
|
103
|
-
out << (json_field = self.hidden_field(method.to_sym, { class: 'jsonpicker', value: val_str
|
115
|
+
out << (json_field = self.hidden_field(method.to_sym, { class: 'jsonpicker', value: val_str&.gsub('`', '^^br_btick__')&.tr('\"', '`')&.html_safe }))
|
104
116
|
out << "<div id=\"_br_json_#{self.field_id(method)}\"></div>"
|
105
117
|
else
|
106
118
|
is_revert = false
|
@@ -157,7 +157,7 @@ module Brick::Rails::FormTags
|
|
157
157
|
out << link_to(ho_txt, send("#{hm_klass.base_class._brick_index(:singular)}_path".to_sym, ho_id))
|
158
158
|
end
|
159
159
|
elsif obj.respond_to?(ct_col = hms_col[1].to_sym) && (ct = obj.send(ct_col)&.to_i)&.positive?
|
160
|
-
predicates = hms_col[2].each_with_object({}) { |v, s| s[v.first] = v.last.is_a?(String) ? v.last : obj.send(v.last) }
|
160
|
+
predicates = hms_col[2].each_with_object({}) { |v, s| s["__#{v.first}"] = v.last.is_a?(String) ? v.last : obj.send(v.last) }
|
161
161
|
predicates.each { |k, v| predicates[k] = klass.name if v == '[sti_type]' }
|
162
162
|
out << "#{link_to("#{ct || 'View'} #{hms_col.first}",
|
163
163
|
send("#{hm_klass._brick_index}_path".to_sym, predicates))}\n"
|
@@ -203,6 +203,100 @@ module Brick::Rails::FormTags
|
|
203
203
|
out.html_safe
|
204
204
|
end # brick_grid
|
205
205
|
|
206
|
+
# Our mega show/new/update form
|
207
|
+
def brick_form_for(obj, options = {}, model = obj.class, bts = {}, pk = (obj.class.primary_key || []))
|
208
|
+
pk = [pk] unless pk.is_a?(Array)
|
209
|
+
pk.map!(&:to_s)
|
210
|
+
form_for(obj.becomes(model), options) do |f|
|
211
|
+
out = +'<table class="shadow">'
|
212
|
+
has_fields = false
|
213
|
+
# If it's a new record, set any default polymorphic types
|
214
|
+
bts&.each do |_k, v|
|
215
|
+
if v[2]
|
216
|
+
obj.send("#{model.brick_foreign_type(v.first)}=", v[1].first&.first&.name)
|
217
|
+
end
|
218
|
+
end if obj.new_record?
|
219
|
+
rtans = model.rich_text_association_names if model.respond_to?(:rich_text_association_names)
|
220
|
+
(model.column_names + (rtans || [])).each do |k|
|
221
|
+
next if (pk.include?(k) && !bts.key?(k)) ||
|
222
|
+
::Brick.config.metadata_columns.include?(k)
|
223
|
+
|
224
|
+
col = model.columns_hash[k]
|
225
|
+
if !col && rtans&.include?(k)
|
226
|
+
k = k[10..-1] if k.start_with?('rich_text_')
|
227
|
+
col = (rt_col ||= ActiveRecord::ConnectionAdapters::Column.new(
|
228
|
+
'', nil, ActiveRecord::ConnectionAdapters::SqlTypeMetadata.new(sql_type: 'varchar', type: :text)
|
229
|
+
)
|
230
|
+
)
|
231
|
+
end
|
232
|
+
val = obj.attributes[k]
|
233
|
+
out << "
|
234
|
+
<tr>
|
235
|
+
<th class=\"show-field\"#{" title=\"#{col&.comment}\"".html_safe if col&.respond_to?(:comment) && !col&.comment.blank?}>"
|
236
|
+
has_fields = true
|
237
|
+
if (bt = bts[k])
|
238
|
+
# Add a final member in this array with descriptive options to be used in <select> drop-downs
|
239
|
+
bt_name = bt[1].map { |x| x.first.name }.join('/')
|
240
|
+
# %%% Only do this if the user has permissions to edit this bt field
|
241
|
+
if bt[2] # Polymorphic?
|
242
|
+
poly_class_name = orig_poly_name = obj.send(model.brick_foreign_type(bt.first))
|
243
|
+
bt_pair = nil
|
244
|
+
loop do
|
245
|
+
bt_pair = bt[1].find { |pair| pair.first.name == poly_class_name }
|
246
|
+
# Accommodate any valid STI by going up the chain of inheritance
|
247
|
+
break unless bt_pair.nil? && poly_class_name = ::Brick.existing_stis[poly_class_name]
|
248
|
+
end
|
249
|
+
puts "*** Might be missing an STI class called #{orig_poly_name} whose base class should have this:
|
250
|
+
*** has_many :#{table_name}, as: :#{bt.first}
|
251
|
+
*** Can probably auto-configure everything using these lines in an initialiser:
|
252
|
+
*** Brick.sti_namespace_prefixes = { '::#{orig_poly_name}' => 'SomeParentModel' }
|
253
|
+
*** Brick.polymorphics = { '#{table_name}.#{bt.first}' => ['SomeParentModel'] }" if bt_pair.nil?
|
254
|
+
# descrips = @_brick_bt_descrip[bt.first][bt_class]
|
255
|
+
poly_id = obj.send("#{bt.first}_id")
|
256
|
+
# bt_class.order(obj_pk = bt_class.primary_key).each { |obj| option_detail << [obj.brick_descrip(nil, obj_pk), obj.send(obj_pk)] }
|
257
|
+
end
|
258
|
+
bt_pair ||= bt[1].first # If there's no polymorphism (or polymorphism status is unknown), just get the first one
|
259
|
+
bt_class = bt_pair&.first
|
260
|
+
if bt.length < 4
|
261
|
+
bt << (option_detail = [["(No #{bt_name} chosen)", '^^^brick_NULL^^^']])
|
262
|
+
# %%% Accommodate composite keys for obj.pk at the end here
|
263
|
+
collection, descrip_cols = bt_class&.order(Arel.sql("#{bt_class.table_name}.#{obj_pk = bt_class.primary_key}"))&.brick_list
|
264
|
+
collection&.brick_(:each) do |obj|
|
265
|
+
option_detail << [
|
266
|
+
obj.brick_descrip(
|
267
|
+
descrip_cols&.first&.map { |col2| obj.send(col2.last) },
|
268
|
+
obj_pk
|
269
|
+
), obj.send(obj_pk)
|
270
|
+
]
|
271
|
+
end
|
272
|
+
end
|
273
|
+
out << "BT #{bt_class&.bt_link(bt.first) || orig_poly_name}"
|
274
|
+
else
|
275
|
+
out << model.human_attribute_name(k, { default: k })
|
276
|
+
end
|
277
|
+
out << "
|
278
|
+
</th>
|
279
|
+
<td>
|
280
|
+
#{f.brick_field(k, html_options = {}, val, col, bt, bt_class, bt_name, bt_pair)}
|
281
|
+
</td>
|
282
|
+
</tr>"
|
283
|
+
end
|
284
|
+
if has_fields
|
285
|
+
out << "<tr><td colspan=\"2\">#{f.submit({ class: 'update' })}</td></tr>"
|
286
|
+
else
|
287
|
+
out << '<tr><td colspan="2">(No displayable fields)</td></tr>'
|
288
|
+
end
|
289
|
+
out << '</table>'
|
290
|
+
if model.name == 'ActiveStorage::Attachment'
|
291
|
+
begin
|
292
|
+
out << ::Brick::Rails.display_binary(obj&.blob&.download, 500_000)&.html_safe
|
293
|
+
rescue
|
294
|
+
end
|
295
|
+
end
|
296
|
+
out.html_safe
|
297
|
+
end
|
298
|
+
end # brick_form_for
|
299
|
+
|
206
300
|
def link_to_brick(*args, **kwargs)
|
207
301
|
return unless ::Brick.config.mode == :on
|
208
302
|
|
data/lib/brick/version_number.rb
CHANGED
data/lib/brick.rb
CHANGED
@@ -221,8 +221,8 @@ module Brick
|
|
221
221
|
hm_models = ActiveRecord::Base.descendants.select do |m|
|
222
222
|
m.reflect_on_all_associations.any? { |assoc| !assoc.belongs_to? && assoc.options[:as]&.to_sym == a.name }
|
223
223
|
end
|
224
|
-
# No need to include subclassed models if their parent is already in the list
|
225
|
-
hm_models.reject! { |m| hm_models.any? { |parent| parent != m && m < parent } }
|
224
|
+
# No need to include models with no table, or subclassed models if their parent is already in the list
|
225
|
+
hm_models.reject! { |m| !m.table_exists? || hm_models.any? { |parent| parent != m && m < parent } }
|
226
226
|
if hm_models.empty?
|
227
227
|
puts "Missing any real indication as to which models \"has_many\" this polymorphic BT in model #{a.active_record.name}:"
|
228
228
|
puts " belongs_to :#{a.name}, polymorphic: true"
|
@@ -1112,20 +1112,22 @@ require 'active_record/relation'
|
|
1112
1112
|
require 'active_record/relation/query_methods' if ActiveRecord.version < ::Gem::Version.new('5')
|
1113
1113
|
require 'rails/railtie' if ActiveRecord.version < ::Gem::Version.new('4.2')
|
1114
1114
|
|
1115
|
-
|
1116
|
-
|
1117
|
-
|
1118
|
-
|
1115
|
+
if Object.const_defined?('Rails')
|
1116
|
+
# Rake tasks
|
1117
|
+
class Railtie < ::Rails::Railtie
|
1118
|
+
Dir.glob("#{File.expand_path(__dir__)}/brick/tasks/**/*.rake").each { |task| load task }
|
1119
|
+
end
|
1119
1120
|
|
1120
|
-
# Rails < 4.2 does not have env
|
1121
|
-
module ::Rails
|
1122
|
-
|
1123
|
-
|
1124
|
-
|
1125
|
-
|
1121
|
+
# Rails < 4.2 does not have env
|
1122
|
+
module ::Rails
|
1123
|
+
unless respond_to?(:env)
|
1124
|
+
def self.env
|
1125
|
+
@_env ||= ActiveSupport::StringInquirer.new(ENV["RAILS_ENV"] || ENV["RACK_ENV"] || "development")
|
1126
|
+
end
|
1126
1127
|
|
1127
|
-
|
1128
|
-
|
1128
|
+
def self.env=(environment)
|
1129
|
+
@_env = ActiveSupport::StringInquirer.new(environment)
|
1130
|
+
end
|
1129
1131
|
end
|
1130
1132
|
end
|
1131
1133
|
end
|
@@ -28,7 +28,7 @@ module Brick
|
|
28
28
|
relations = ::Brick.relations
|
29
29
|
if is_brick_file
|
30
30
|
# Need to remove any currently-existing additional_references so that it doesn't cloud the discovery process:
|
31
|
-
::Brick.config.additional_references
|
31
|
+
::Brick.config.additional_references&.each do |ar|
|
32
32
|
if (fks = relations.fetch(ar[0], nil)&.fetch(:fks, nil))
|
33
33
|
fks.delete(fks.find { |k, v| v[:is_bt] && k.start_with?('(brick) ') && v[:fk] == ar[1] }&.first)
|
34
34
|
end
|
@@ -0,0 +1,343 @@
|
|
1
|
+
module Brick
|
2
|
+
module MigrationBuilder
|
3
|
+
include FancyGets
|
4
|
+
|
5
|
+
# Many SQL types are the same as their migration data type name:
|
6
|
+
# text, integer, bigint, date, boolean, decimal, float
|
7
|
+
# These however are not:
|
8
|
+
SQL_TYPES = { 'character varying' => 'string',
|
9
|
+
'character' => 'string', # %%% Need to put in "limit: 1"
|
10
|
+
'xml' => 'text',
|
11
|
+
'bytea' => 'binary',
|
12
|
+
'timestamp without time zone' => 'timestamp',
|
13
|
+
'timestamp with time zone' => 'timestamp',
|
14
|
+
'time without time zone' => 'time',
|
15
|
+
'time with time zone' => 'time',
|
16
|
+
'double precision' => 'float',
|
17
|
+
'smallint' => 'integer', # %%% Need to put in "limit: 2"
|
18
|
+
'ARRAY' => 'string', # Note that we'll also add ", array: true"
|
19
|
+
# Oracle data types
|
20
|
+
'VARCHAR2' => 'string',
|
21
|
+
'CHAR' => 'string',
|
22
|
+
['NUMBER', 22] => 'integer',
|
23
|
+
/^INTERVAL / => 'string', # Time interval stuff like INTERVAL YEAR(2) TO MONTH, INTERVAL '999' DAY(3), etc
|
24
|
+
'XMLTYPE' => 'xml',
|
25
|
+
'RAW' => 'binary',
|
26
|
+
'SDO_GEOMETRY' => 'geometry',
|
27
|
+
# MSSQL data types
|
28
|
+
'int' => 'integer',
|
29
|
+
'nvarchar' => 'string',
|
30
|
+
'nchar' => 'string',
|
31
|
+
'datetime2' => 'timestamp',
|
32
|
+
'bit' => 'boolean',
|
33
|
+
'varbinary' => 'binary',
|
34
|
+
# Sqlite data types
|
35
|
+
'TEXT' => 'text',
|
36
|
+
'' => 'string',
|
37
|
+
'INTEGER' => 'integer',
|
38
|
+
'REAL' => 'float',
|
39
|
+
'BLOB' => 'binary',
|
40
|
+
'TIMESTAMP' => 'timestamp',
|
41
|
+
'DATETIME' => 'timestamp'
|
42
|
+
}
|
43
|
+
# (Still need to find what "inet" and "json" data types map to.)
|
44
|
+
|
45
|
+
class << self
|
46
|
+
def check_folder(is_insert_versions = true, is_delete_versions = false)
|
47
|
+
versions_to_delete_or_append = nil
|
48
|
+
if Dir.exist?(mig_path = ActiveRecord::Migrator.migrations_paths.first || "#{::Rails.root}/db/migrate")
|
49
|
+
if Dir["#{mig_path}/**/*.rb"].present?
|
50
|
+
puts "WARNING: migrations folder #{mig_path} appears to already have ruby files present."
|
51
|
+
mig_path2 = "#{::Rails.root}/tmp/brick_migrations"
|
52
|
+
is_insert_versions = false unless mig_path == mig_path2
|
53
|
+
if Dir.exist?(mig_path2)
|
54
|
+
if Dir["#{mig_path2}/**/*.rb"].present?
|
55
|
+
puts "As well, temporary folder #{mig_path2} also has ruby files present."
|
56
|
+
puts "Choose a destination -- all existing .rb files will be removed:"
|
57
|
+
mig_path2 = gets_list(list: ['Cancel operation!', "Append migration files into #{mig_path} anyway", mig_path, mig_path2])
|
58
|
+
return if mig_path2.start_with?('Cancel')
|
59
|
+
|
60
|
+
existing_mig_files = Dir["#{mig_path2}/**/*.rb"]
|
61
|
+
if (is_insert_versions = mig_path == mig_path2)
|
62
|
+
versions_to_delete_or_append = existing_mig_files.map { |ver| ver.split('/').last.split('_').first }
|
63
|
+
end
|
64
|
+
if mig_path2.start_with?('Append migration files into ')
|
65
|
+
mig_path2 = mig_path
|
66
|
+
else
|
67
|
+
is_delete_versions = true
|
68
|
+
existing_mig_files.each { |rb| File.delete(rb) }
|
69
|
+
end
|
70
|
+
else
|
71
|
+
puts "Using temporary folder #{mig_path2} for created migration files.\n\n"
|
72
|
+
end
|
73
|
+
else
|
74
|
+
puts "Creating the temporary folder #{mig_path2} for created migration files.\n\n"
|
75
|
+
Dir.mkdir(mig_path2)
|
76
|
+
end
|
77
|
+
mig_path = mig_path2
|
78
|
+
else
|
79
|
+
puts "Using standard migration folder #{mig_path} for created migration files.\n\n"
|
80
|
+
end
|
81
|
+
else
|
82
|
+
puts "Creating standard ActiveRecord migration folder #{mig_path} to hold new migration files.\n\n"
|
83
|
+
Dir.mkdir(mig_path)
|
84
|
+
end
|
85
|
+
[mig_path, is_insert_versions, is_delete_versions]
|
86
|
+
end
|
87
|
+
|
88
|
+
def generate_migrations(chosen, mig_path, is_insert_versions, is_delete_versions, relations = ::Brick.relations)
|
89
|
+
is_sqlite = ActiveRecord::Base.connection.adapter_name == 'SQLite'
|
90
|
+
key_type = ((is_sqlite || ActiveRecord.version < ::Gem::Version.new('5.1')) ? 'integer' : 'bigint')
|
91
|
+
is_4x_rails = ActiveRecord.version < ::Gem::Version.new('5.0')
|
92
|
+
ar_version = "[#{ActiveRecord.version.segments[0..1].join('.')}]" unless is_4x_rails
|
93
|
+
|
94
|
+
schemas = chosen.each_with_object({}) do |v, s|
|
95
|
+
if (v_parts = v.split('.')).length > 1
|
96
|
+
s[v_parts.first] = nil unless [::Brick.default_schema, 'public'].include?(v_parts.first)
|
97
|
+
end
|
98
|
+
end
|
99
|
+
# Start the timestamps back the same number of minutes from now as expected number of migrations to create
|
100
|
+
current_mig_time = Time.now - (schemas.length + chosen.length).minutes
|
101
|
+
done = []
|
102
|
+
fks = {}
|
103
|
+
stuck = {}
|
104
|
+
indexes = {} # Track index names to make sure things are unique
|
105
|
+
built_schemas = {} # Track all built schemas so we can place an appropriate drop_schema command only in the first
|
106
|
+
# migration in which that schema is referenced, thereby allowing rollbacks to function properly.
|
107
|
+
versions_to_create = [] # Resulting versions to be used when updating the schema_migrations table
|
108
|
+
ar_base = Object.const_defined?(:ApplicationRecord) ? ApplicationRecord : Class.new(ActiveRecord::Base)
|
109
|
+
# Start by making migrations for fringe tables (those with no foreign keys).
|
110
|
+
# Continue layer by layer, creating migrations for tables that reference ones already done, until
|
111
|
+
# no more migrations can be created. (At that point hopefully all tables are accounted for.)
|
112
|
+
while (fringe = chosen.reject do |tbl|
|
113
|
+
snag_fks = []
|
114
|
+
snags = relations.fetch(tbl, nil)&.fetch(:fks, nil)&.select do |_k, v|
|
115
|
+
v[:is_bt] && !v[:polymorphic] &&
|
116
|
+
tbl != v[:inverse_table] && # Ignore self-referencing associations (stuff like "parent_id")
|
117
|
+
!done.include?(v[:inverse_table]) &&
|
118
|
+
::Brick.config.ignore_migration_fks.exclude?(snag_fk = "#{tbl}.#{v[:fk]}") &&
|
119
|
+
snag_fks << snag_fk
|
120
|
+
end
|
121
|
+
if snags&.present?
|
122
|
+
# puts snag_fks.inspect
|
123
|
+
stuck[tbl] = snags
|
124
|
+
end
|
125
|
+
end).present?
|
126
|
+
fringe.each do |tbl|
|
127
|
+
next unless (relation = relations.fetch(tbl, nil))&.fetch(:cols, nil)&.present?
|
128
|
+
|
129
|
+
pkey_cols = (rpk = relation[:pkey].values.flatten) & (arpk = [ar_base.primary_key].flatten.sort)
|
130
|
+
# In case things aren't as standard
|
131
|
+
if pkey_cols.empty?
|
132
|
+
pkey_cols = if rpk.empty? && relation[:cols][arpk.first]&.first == key_type
|
133
|
+
arpk
|
134
|
+
elsif rpk.first
|
135
|
+
rpk
|
136
|
+
end
|
137
|
+
end
|
138
|
+
schema = if (tbl_parts = tbl.split('.')).length > 1
|
139
|
+
if tbl_parts.first == (::Brick.default_schema || 'public')
|
140
|
+
tbl_parts.shift
|
141
|
+
nil
|
142
|
+
else
|
143
|
+
tbl_parts.first
|
144
|
+
end
|
145
|
+
end
|
146
|
+
unless schema.blank? || built_schemas.key?(schema)
|
147
|
+
mig = +" def change\n create_schema(:#{schema}) unless schema_exists?(:#{schema})\n end\n"
|
148
|
+
migration_file_write(mig_path, "create_db_schema_#{schema.underscore}", current_mig_time += 1.minute, ar_version, mig)
|
149
|
+
built_schemas[schema] = nil
|
150
|
+
end
|
151
|
+
|
152
|
+
# %%% For the moment we're skipping polymorphics
|
153
|
+
fkey_cols = relation[:fks].values.select { |assoc| assoc[:is_bt] && !assoc[:polymorphic] }
|
154
|
+
# If the primary key is also used as a foreign key, will need to do id: false and then build out
|
155
|
+
# a column definition which includes :primary_key -- %%% also using a data type of bigserial or serial
|
156
|
+
# if this one has come in as bigint or integer.
|
157
|
+
pk_is_also_fk = fkey_cols.any? { |assoc| pkey_cols&.first == assoc[:fk] } ? pkey_cols&.first : nil
|
158
|
+
# Support missing primary key (by adding: , id: false)
|
159
|
+
id_option = if pk_is_also_fk || !pkey_cols&.present?
|
160
|
+
needs_serial_col = true
|
161
|
+
+', id: false'
|
162
|
+
elsif ((pkey_col_first = (col_def = relation[:cols][pkey_cols&.first])&.first) &&
|
163
|
+
(pkey_col_first = SQL_TYPES[pkey_col_first] || SQL_TYPES[col_def&.[](0..1)] ||
|
164
|
+
SQL_TYPES.find { |r| r.first.is_a?(Regexp) && pkey_col_first =~ r.first }&.last ||
|
165
|
+
pkey_col_first
|
166
|
+
) != key_type
|
167
|
+
)
|
168
|
+
case pkey_col_first
|
169
|
+
when 'integer'
|
170
|
+
+', id: :serial'
|
171
|
+
when 'bigint'
|
172
|
+
+', id: :bigserial'
|
173
|
+
else
|
174
|
+
+", id: :#{pkey_col_first}" # Something like: id: :integer, primary_key: :businessentityid
|
175
|
+
end +
|
176
|
+
(pkey_cols.first ? ", primary_key: :#{pkey_cols.first}" : '')
|
177
|
+
end
|
178
|
+
if !id_option && pkey_cols.sort != arpk
|
179
|
+
id_option = +", primary_key: :#{pkey_cols.first}"
|
180
|
+
end
|
181
|
+
if !is_4x_rails && (comment = relation&.fetch(:description, nil))&.present?
|
182
|
+
(id_option ||= +'') << ", comment: #{comment.inspect}"
|
183
|
+
end
|
184
|
+
# Find the ActiveRecord class in order to see if the columns have comments
|
185
|
+
unless is_4x_rails
|
186
|
+
klass = begin
|
187
|
+
tbl.tr('.', '/').singularize.camelize.constantize
|
188
|
+
rescue StandardError
|
189
|
+
end
|
190
|
+
if klass
|
191
|
+
unless ActiveRecord::Migration.table_exists?(klass.table_name)
|
192
|
+
puts "WARNING: Unable to locate table #{klass.table_name} (for #{klass.name})."
|
193
|
+
klass = nil
|
194
|
+
end
|
195
|
+
end
|
196
|
+
end
|
197
|
+
# Refer to this table name as a symbol or dotted string as appropriate
|
198
|
+
tbl_code = tbl_parts.length == 1 ? ":#{tbl_parts.first}" : "'#{tbl}'"
|
199
|
+
mig = +" def change\n return unless reverting? || !table_exists?(#{tbl_code})\n\n"
|
200
|
+
mig << " create_table #{tbl_code}#{id_option} do |t|\n"
|
201
|
+
possible_ts = [] # Track possible generic timestamps
|
202
|
+
add_fks = [] # Track foreign keys to add after table creation
|
203
|
+
relation[:cols].each do |col, col_type|
|
204
|
+
sql_type = SQL_TYPES[col_type.first] || SQL_TYPES[col_type[0..1]] ||
|
205
|
+
SQL_TYPES.find { |r| r.first.is_a?(Regexp) && col_type.first =~ r.first }&.last ||
|
206
|
+
col_type.first
|
207
|
+
suffix = col_type[3] || pkey_cols&.include?(col) ? +', null: false' : +''
|
208
|
+
suffix << ', array: true' if (col_type.first == 'ARRAY')
|
209
|
+
if !is_4x_rails && klass && (comment = klass.columns_hash.fetch(col, nil)&.comment)&.present?
|
210
|
+
suffix << ", comment: #{comment.inspect}"
|
211
|
+
end
|
212
|
+
# Determine if this column is used as part of a foreign key
|
213
|
+
if (fk = fkey_cols.find { |assoc| col == assoc[:fk] })
|
214
|
+
to_table = fk[:inverse_table].split('.')
|
215
|
+
to_table = to_table.length == 1 ? ":#{to_table.first}" : "'#{fk[:inverse_table]}'"
|
216
|
+
if needs_serial_col && pkey_cols&.include?(col) && (new_serial_type = {'integer' => 'serial', 'bigint' => 'bigserial'}[sql_type])
|
217
|
+
sql_type = new_serial_type
|
218
|
+
needs_serial_col = false
|
219
|
+
end
|
220
|
+
if fk[:fk] != "#{fk[:assoc_name].singularize}_id" # Need to do our own foreign_key tricks, not use references?
|
221
|
+
column = fk[:fk]
|
222
|
+
mig << emit_column(sql_type, column, suffix)
|
223
|
+
add_fks << [to_table, column, relations[fk[:inverse_table]]]
|
224
|
+
else
|
225
|
+
suffix << ", type: :#{sql_type}" unless sql_type == key_type
|
226
|
+
# Will the resulting default index name be longer than what Postgres allows? (63 characters)
|
227
|
+
if (idx_name = ActiveRecord::Base.connection.index_name(tbl, {column: col})).length > 63
|
228
|
+
# Try to find a shorter name that hasn't been used yet
|
229
|
+
unless indexes.key?(shorter = idx_name[0..62]) ||
|
230
|
+
indexes.key?(shorter = idx_name.tr('_', '')[0..62]) ||
|
231
|
+
indexes.key?(shorter = idx_name.tr('aeio', '')[0..62])
|
232
|
+
puts "Unable to easily find unique name for index #{idx_name} that is shorter than 64 characters,"
|
233
|
+
puts "so have resorted to this GUID-based identifier: #{shorter = "#{tbl[0..25]}_#{::SecureRandom.uuid}"}."
|
234
|
+
end
|
235
|
+
suffix << ", index: { name: '#{shorter || idx_name}' }"
|
236
|
+
indexes[shorter || idx_name] = nil
|
237
|
+
end
|
238
|
+
primary_key = nil
|
239
|
+
begin
|
240
|
+
primary_key = relations[fk[:inverse_table]][:class_name]&.constantize&.primary_key
|
241
|
+
rescue NameError => e
|
242
|
+
primary_key = ar_base.primary_key
|
243
|
+
end
|
244
|
+
mig << " t.references :#{fk[:assoc_name]}#{suffix}, foreign_key: { to_table: #{to_table}#{", primary_key: :#{primary_key}" if primary_key != ar_base.primary_key} }\n"
|
245
|
+
end
|
246
|
+
else
|
247
|
+
next if !id_option&.end_with?('id: false') && pkey_cols&.include?(col)
|
248
|
+
|
249
|
+
# See if there are generic timestamps
|
250
|
+
if sql_type == 'timestamp' && ['created_at','updated_at'].include?(col)
|
251
|
+
possible_ts << [col, !col_type[3]]
|
252
|
+
else
|
253
|
+
mig << emit_column(sql_type, col, suffix)
|
254
|
+
end
|
255
|
+
end
|
256
|
+
end
|
257
|
+
if possible_ts.length == 2 && # Both created_at and updated_at
|
258
|
+
# Rails 5 and later timestamps default to NOT NULL
|
259
|
+
(possible_ts.first.last == is_4x_rails && possible_ts.last.last == is_4x_rails)
|
260
|
+
mig << "\n t.timestamps\n"
|
261
|
+
else # Just one or the other, or a nullability mismatch
|
262
|
+
possible_ts.each { |ts| emit_column('timestamp', ts.first, nil) }
|
263
|
+
end
|
264
|
+
mig << " end\n"
|
265
|
+
if pk_is_also_fk
|
266
|
+
mig << " reversible do |dir|\n"
|
267
|
+
mig << " dir.up { execute('ALTER TABLE #{tbl} ADD PRIMARY KEY (#{pk_is_also_fk})') }\n"
|
268
|
+
mig << " end\n"
|
269
|
+
end
|
270
|
+
add_fks.each do |add_fk|
|
271
|
+
is_commented = false
|
272
|
+
# add_fk[2] holds the inverse relation
|
273
|
+
unless (pk = add_fk[2][:pkey].values.flatten&.first)
|
274
|
+
is_commented = true
|
275
|
+
mig << " # (Unable to create relationship because primary key is missing on table #{add_fk[0]})\n"
|
276
|
+
# No official PK, but if coincidentally there's a column of the same name, take a chance on it
|
277
|
+
pk = (add_fk[2][:cols].key?(add_fk[1]) && add_fk[1]) || '???'
|
278
|
+
end
|
279
|
+
# to_table column
|
280
|
+
mig << " #{'# ' if is_commented}add_foreign_key #{tbl_code}, #{add_fk[0]}, column: :#{add_fk[1]}, primary_key: :#{pk}\n"
|
281
|
+
end
|
282
|
+
mig << " end\n"
|
283
|
+
versions_to_create << migration_file_write(mig_path, "create_#{tbl_parts.map(&:underscore).join('_')}", current_mig_time += 1.minute, ar_version, mig)
|
284
|
+
end
|
285
|
+
done.concat(fringe)
|
286
|
+
chosen -= done
|
287
|
+
end
|
288
|
+
|
289
|
+
stuck_counts = Hash.new { |h, k| h[k] = 0 }
|
290
|
+
chosen.each do |leftover|
|
291
|
+
puts "Can't do #{leftover} because:\n #{stuck[leftover].map do |snag|
|
292
|
+
stuck_counts[snag.last[:inverse_table]] += 1
|
293
|
+
snag.last[:assoc_name]
|
294
|
+
end.join(', ')}"
|
295
|
+
end
|
296
|
+
if mig_path.start_with?(cur_path = ::Rails.root.to_s)
|
297
|
+
pretty_mig_path = mig_path[cur_path.length..-1]
|
298
|
+
end
|
299
|
+
puts "\n*** Created #{done.length} migration files under #{pretty_mig_path || mig_path} ***"
|
300
|
+
if (stuck_sorted = stuck_counts.to_a.sort { |a, b| b.last <=> a.last }).length.positive?
|
301
|
+
puts "-----------------------------------------"
|
302
|
+
puts "Unable to create migrations for #{stuck_sorted.length} tables#{
|
303
|
+
". Here's the top 5 blockers" if stuck_sorted.length > 5
|
304
|
+
}:"
|
305
|
+
pp stuck_sorted[0..4]
|
306
|
+
else # Successful, and now we can update the schema_migrations table accordingly
|
307
|
+
unless ActiveRecord::Migration.table_exists?(ActiveRecord::Base.schema_migrations_table_name)
|
308
|
+
ActiveRecord::SchemaMigration.create_table
|
309
|
+
end
|
310
|
+
# Remove to_delete - to_create
|
311
|
+
if ((versions_to_delete_or_append ||= []) - versions_to_create).present? && is_delete_versions
|
312
|
+
ActiveRecord::Base.execute_sql("DELETE FROM #{
|
313
|
+
ActiveRecord::Base.schema_migrations_table_name} WHERE version IN (#{
|
314
|
+
(versions_to_delete_or_append - versions_to_create).map { |vtd| "'#{vtd}'" }.join(', ')}
|
315
|
+
)")
|
316
|
+
end
|
317
|
+
# Add to_create - to_delete
|
318
|
+
if is_insert_versions && ((versions_to_create ||= []) - versions_to_delete_or_append).present?
|
319
|
+
ActiveRecord::Base.execute_sql("INSERT INTO #{
|
320
|
+
ActiveRecord::Base.schema_migrations_table_name} (version) VALUES #{
|
321
|
+
(versions_to_create - versions_to_delete_or_append).map { |vtc| "('#{vtc}')" }.join(', ')
|
322
|
+
}")
|
323
|
+
end
|
324
|
+
end
|
325
|
+
end
|
326
|
+
|
327
|
+
private
|
328
|
+
|
329
|
+
def emit_column(type, name, suffix)
|
330
|
+
" t.#{type.start_with?('numeric') ? 'decimal' : type} :#{name}#{suffix}\n"
|
331
|
+
end
|
332
|
+
|
333
|
+
def migration_file_write(mig_path, name, current_mig_time, ar_version, mig)
|
334
|
+
File.open("#{mig_path}/#{version = current_mig_time.strftime('%Y%m%d%H%M00')}_#{name}.rb", "w") do |f|
|
335
|
+
f.write "class #{name.camelize} < ActiveRecord::Migration#{ar_version}\n"
|
336
|
+
f.write mig
|
337
|
+
f.write "end\n"
|
338
|
+
end
|
339
|
+
version
|
340
|
+
end
|
341
|
+
end
|
342
|
+
end
|
343
|
+
end
|
@@ -3,52 +3,12 @@
|
|
3
3
|
require 'rails/generators'
|
4
4
|
require 'rails/generators/active_record'
|
5
5
|
require 'fancy_gets'
|
6
|
+
require 'generators/brick/migration_builder'
|
6
7
|
|
7
8
|
module Brick
|
8
9
|
# Auto-generates migration files
|
9
10
|
class MigrationsGenerator < ::Rails::Generators::Base
|
10
|
-
include
|
11
|
-
# include ::Rails::Generators::Migration
|
12
|
-
|
13
|
-
# Many SQL types are the same as their migration data type name:
|
14
|
-
# text, integer, bigint, date, boolean, decimal, float
|
15
|
-
# These however are not:
|
16
|
-
SQL_TYPES = { 'character varying' => 'string',
|
17
|
-
'character' => 'string', # %%% Need to put in "limit: 1"
|
18
|
-
'xml' => 'text',
|
19
|
-
'bytea' => 'binary',
|
20
|
-
'timestamp without time zone' => 'timestamp',
|
21
|
-
'timestamp with time zone' => 'timestamp',
|
22
|
-
'time without time zone' => 'time',
|
23
|
-
'time with time zone' => 'time',
|
24
|
-
'double precision' => 'float',
|
25
|
-
'smallint' => 'integer', # %%% Need to put in "limit: 2"
|
26
|
-
'ARRAY' => 'string', # Note that we'll also add ", array: true"
|
27
|
-
# Oracle data types
|
28
|
-
'VARCHAR2' => 'string',
|
29
|
-
'CHAR' => 'string',
|
30
|
-
['NUMBER', 22] => 'integer',
|
31
|
-
/^INTERVAL / => 'string', # Time interval stuff like INTERVAL YEAR(2) TO MONTH, INTERVAL '999' DAY(3), etc
|
32
|
-
'XMLTYPE' => 'xml',
|
33
|
-
'RAW' => 'binary',
|
34
|
-
'SDO_GEOMETRY' => 'geometry',
|
35
|
-
# MSSQL data types
|
36
|
-
'int' => 'integer',
|
37
|
-
'nvarchar' => 'string',
|
38
|
-
'nchar' => 'string',
|
39
|
-
'datetime2' => 'timestamp',
|
40
|
-
'bit' => 'boolean',
|
41
|
-
'varbinary' => 'binary',
|
42
|
-
# Sqlite data types
|
43
|
-
'TEXT' => 'text',
|
44
|
-
'' => 'string',
|
45
|
-
'INTEGER' => 'integer',
|
46
|
-
'REAL' => 'float',
|
47
|
-
'BLOB' => 'binary',
|
48
|
-
'TIMESTAMP' => 'timestamp',
|
49
|
-
'DATETIME' => 'timestamp'
|
50
|
-
}
|
51
|
-
# (Still need to find what "inet" and "json" data types map to.)
|
11
|
+
include ::Brick::MigrationBuilder
|
52
12
|
|
53
13
|
desc 'Auto-generates migration files for an existing database.'
|
54
14
|
|
@@ -63,294 +23,12 @@ module Brick
|
|
63
23
|
return
|
64
24
|
end
|
65
25
|
|
66
|
-
|
67
|
-
key_type = ((is_sqlite || ActiveRecord.version < ::Gem::Version.new('5.1')) ? 'integer' : 'bigint')
|
68
|
-
is_4x_rails = ActiveRecord.version < ::Gem::Version.new('5.0')
|
69
|
-
ar_version = "[#{ActiveRecord.version.segments[0..1].join('.')}]" unless is_4x_rails
|
70
|
-
is_insert_versions = true
|
71
|
-
is_delete_versions = false
|
72
|
-
versions_to_delete_or_append = nil
|
73
|
-
if Dir.exist?(mig_path = ActiveRecord::Migrator.migrations_paths.first || "#{::Rails.root}/db/migrate")
|
74
|
-
if Dir["#{mig_path}/**/*.rb"].present?
|
75
|
-
puts "WARNING: migrations folder #{mig_path} appears to already have ruby files present."
|
76
|
-
mig_path2 = "#{::Rails.root}/tmp/brick_migrations"
|
77
|
-
is_insert_versions = false unless mig_path == mig_path2
|
78
|
-
if Dir.exist?(mig_path2)
|
79
|
-
if Dir["#{mig_path2}/**/*.rb"].present?
|
80
|
-
puts "As well, temporary folder #{mig_path2} also has ruby files present."
|
81
|
-
puts "Choose a destination -- all existing .rb files will be removed:"
|
82
|
-
mig_path2 = gets_list(list: ['Cancel operation!', "Append migration files into #{mig_path} anyway", mig_path, mig_path2])
|
83
|
-
return if mig_path2.start_with?('Cancel')
|
84
|
-
|
85
|
-
existing_mig_files = Dir["#{mig_path2}/**/*.rb"]
|
86
|
-
if (is_insert_versions = mig_path == mig_path2)
|
87
|
-
versions_to_delete_or_append = existing_mig_files.map { |ver| ver.split('/').last.split('_').first }
|
88
|
-
end
|
89
|
-
if mig_path2.start_with?('Append migration files into ')
|
90
|
-
mig_path2 = mig_path
|
91
|
-
else
|
92
|
-
is_delete_versions = true
|
93
|
-
existing_mig_files.each { |rb| File.delete(rb) }
|
94
|
-
end
|
95
|
-
else
|
96
|
-
puts "Using temporary folder #{mig_path2} for created migration files.\n\n"
|
97
|
-
end
|
98
|
-
else
|
99
|
-
puts "Creating the temporary folder #{mig_path2} for created migration files.\n\n"
|
100
|
-
Dir.mkdir(mig_path2)
|
101
|
-
end
|
102
|
-
mig_path = mig_path2
|
103
|
-
else
|
104
|
-
puts "Using standard migration folder #{mig_path} for created migration files.\n\n"
|
105
|
-
end
|
106
|
-
else
|
107
|
-
puts "Creating standard ActiveRecord migration folder #{mig_path} to hold new migration files.\n\n"
|
108
|
-
Dir.mkdir(mig_path)
|
109
|
-
end
|
26
|
+
mig_path, is_insert_versions, is_delete_versions = ::Brick::MigrationBuilder.check_folder
|
110
27
|
|
111
28
|
# Generate a list of tables that can be chosen
|
112
29
|
chosen = gets_list(list: tables, chosen: tables.dup)
|
113
|
-
schemas = chosen.each_with_object({}) do |v, s|
|
114
|
-
if (v_parts = v.split('.')).length > 1
|
115
|
-
s[v_parts.first] = nil unless [::Brick.default_schema, 'public'].include?(v_parts.first)
|
116
|
-
end
|
117
|
-
end
|
118
|
-
# Start the timestamps back the same number of minutes from now as expected number of migrations to create
|
119
|
-
current_mig_time = Time.now - (schemas.length + chosen.length).minutes
|
120
|
-
done = []
|
121
|
-
fks = {}
|
122
|
-
stuck = {}
|
123
|
-
indexes = {} # Track index names to make sure things are unique
|
124
|
-
built_schemas = {} # Track all built schemas so we can place an appropriate drop_schema command only in the first
|
125
|
-
# migration in which that schema is referenced, thereby allowing rollbacks to function properly.
|
126
|
-
versions_to_create = [] # Resulting versions to be used when updating the schema_migrations table
|
127
|
-
ar_base = Object.const_defined?(:ApplicationRecord) ? ApplicationRecord : Class.new(ActiveRecord::Base)
|
128
|
-
# Start by making migrations for fringe tables (those with no foreign keys).
|
129
|
-
# Continue layer by layer, creating migrations for tables that reference ones already done, until
|
130
|
-
# no more migrations can be created. (At that point hopefully all tables are accounted for.)
|
131
|
-
while (fringe = chosen.reject do |tbl|
|
132
|
-
snag_fks = []
|
133
|
-
snags = ::Brick.relations.fetch(tbl, nil)&.fetch(:fks, nil)&.select do |_k, v|
|
134
|
-
v[:is_bt] && !v[:polymorphic] &&
|
135
|
-
tbl != v[:inverse_table] && # Ignore self-referencing associations (stuff like "parent_id")
|
136
|
-
!done.include?(v[:inverse_table]) &&
|
137
|
-
::Brick.config.ignore_migration_fks.exclude?(snag_fk = "#{tbl}.#{v[:fk]}") &&
|
138
|
-
snag_fks << snag_fk
|
139
|
-
end
|
140
|
-
if snags&.present?
|
141
|
-
# puts snag_fks.inspect
|
142
|
-
stuck[tbl] = snags
|
143
|
-
end
|
144
|
-
end).present?
|
145
|
-
fringe.each do |tbl|
|
146
|
-
next unless (relation = ::Brick.relations.fetch(tbl, nil))&.fetch(:cols, nil)&.present?
|
147
|
-
|
148
|
-
pkey_cols = (rpk = relation[:pkey].values.flatten) & (arpk = [ar_base.primary_key].flatten.sort)
|
149
|
-
# In case things aren't as standard
|
150
|
-
if pkey_cols.empty?
|
151
|
-
pkey_cols = if rpk.empty? && relation[:cols][arpk.first]&.first == key_type
|
152
|
-
arpk
|
153
|
-
elsif rpk.first
|
154
|
-
rpk
|
155
|
-
end
|
156
|
-
end
|
157
|
-
schema = if (tbl_parts = tbl.split('.')).length > 1
|
158
|
-
if tbl_parts.first == (::Brick.default_schema || 'public')
|
159
|
-
tbl_parts.shift
|
160
|
-
nil
|
161
|
-
else
|
162
|
-
tbl_parts.first
|
163
|
-
end
|
164
|
-
end
|
165
|
-
unless schema.blank? || built_schemas.key?(schema)
|
166
|
-
mig = +" def change\n create_schema(:#{schema}) unless schema_exists?(:#{schema})\n end\n"
|
167
|
-
migration_file_write(mig_path, "create_db_schema_#{schema.underscore}", current_mig_time += 1.minute, ar_version, mig)
|
168
|
-
built_schemas[schema] = nil
|
169
|
-
end
|
170
30
|
|
171
|
-
|
172
|
-
fkey_cols = relation[:fks].values.select { |assoc| assoc[:is_bt] && !assoc[:polymorphic] }
|
173
|
-
# If the primary key is also used as a foreign key, will need to do id: false and then build out
|
174
|
-
# a column definition which includes :primary_key -- %%% also using a data type of bigserial or serial
|
175
|
-
# if this one has come in as bigint or integer.
|
176
|
-
pk_is_also_fk = fkey_cols.any? { |assoc| pkey_cols&.first == assoc[:fk] } ? pkey_cols&.first : nil
|
177
|
-
# Support missing primary key (by adding: , id: false)
|
178
|
-
id_option = if pk_is_also_fk || !pkey_cols&.present?
|
179
|
-
needs_serial_col = true
|
180
|
-
+', id: false'
|
181
|
-
elsif ((pkey_col_first = (col_def = relation[:cols][pkey_cols&.first])&.first) &&
|
182
|
-
(pkey_col_first = SQL_TYPES[pkey_col_first] || SQL_TYPES[col_def&.[](0..1)] ||
|
183
|
-
SQL_TYPES.find { |r| r.first.is_a?(Regexp) && pkey_col_first =~ r.first }&.last ||
|
184
|
-
pkey_col_first
|
185
|
-
) != key_type
|
186
|
-
)
|
187
|
-
case pkey_col_first
|
188
|
-
when 'integer'
|
189
|
-
+', id: :serial'
|
190
|
-
when 'bigint'
|
191
|
-
+', id: :bigserial'
|
192
|
-
else
|
193
|
-
+", id: :#{pkey_col_first}" # Something like: id: :integer, primary_key: :businessentityid
|
194
|
-
end +
|
195
|
-
(pkey_cols.first ? ", primary_key: :#{pkey_cols.first}" : '')
|
196
|
-
end
|
197
|
-
if !id_option && pkey_cols.sort != arpk
|
198
|
-
id_option = +", primary_key: :#{pkey_cols.first}"
|
199
|
-
end
|
200
|
-
if !is_4x_rails && (comment = relation&.fetch(:description, nil))&.present?
|
201
|
-
(id_option ||= +'') << ", comment: #{comment.inspect}"
|
202
|
-
end
|
203
|
-
# Find the ActiveRecord class in order to see if the columns have comments
|
204
|
-
unless is_4x_rails
|
205
|
-
klass = begin
|
206
|
-
tbl.tr('.', '/').singularize.camelize.constantize
|
207
|
-
rescue StandardError
|
208
|
-
end
|
209
|
-
if klass
|
210
|
-
unless ActiveRecord::Migration.table_exists?(klass.table_name)
|
211
|
-
puts "WARNING: Unable to locate table #{klass.table_name} (for #{klass.name})."
|
212
|
-
klass = nil
|
213
|
-
end
|
214
|
-
end
|
215
|
-
end
|
216
|
-
# Refer to this table name as a symbol or dotted string as appropriate
|
217
|
-
tbl_code = tbl_parts.length == 1 ? ":#{tbl_parts.first}" : "'#{tbl}'"
|
218
|
-
mig = +" def change\n return unless reverting? || !table_exists?(#{tbl_code})\n\n"
|
219
|
-
mig << " create_table #{tbl_code}#{id_option} do |t|\n"
|
220
|
-
possible_ts = [] # Track possible generic timestamps
|
221
|
-
add_fks = [] # Track foreign keys to add after table creation
|
222
|
-
relation[:cols].each do |col, col_type|
|
223
|
-
sql_type = SQL_TYPES[col_type.first] || SQL_TYPES[col_type[0..1]] ||
|
224
|
-
SQL_TYPES.find { |r| r.first.is_a?(Regexp) && col_type.first =~ r.first }&.last ||
|
225
|
-
col_type.first
|
226
|
-
suffix = col_type[3] || pkey_cols&.include?(col) ? +', null: false' : +''
|
227
|
-
suffix << ', array: true' if (col_type.first == 'ARRAY')
|
228
|
-
if !is_4x_rails && klass && (comment = klass.columns_hash.fetch(col, nil)&.comment)&.present?
|
229
|
-
suffix << ", comment: #{comment.inspect}"
|
230
|
-
end
|
231
|
-
# Determine if this column is used as part of a foreign key
|
232
|
-
if (fk = fkey_cols.find { |assoc| col == assoc[:fk] })
|
233
|
-
to_table = fk[:inverse_table].split('.')
|
234
|
-
to_table = to_table.length == 1 ? ":#{to_table.first}" : "'#{fk[:inverse_table]}'"
|
235
|
-
if needs_serial_col && pkey_cols&.include?(col) && (new_serial_type = {'integer' => 'serial', 'bigint' => 'bigserial'}[sql_type])
|
236
|
-
sql_type = new_serial_type
|
237
|
-
needs_serial_col = false
|
238
|
-
end
|
239
|
-
if fk[:fk] != "#{fk[:assoc_name].singularize}_id" # Need to do our own foreign_key tricks, not use references?
|
240
|
-
column = fk[:fk]
|
241
|
-
mig << emit_column(sql_type, column, suffix)
|
242
|
-
add_fks << [to_table, column, ::Brick.relations[fk[:inverse_table]]]
|
243
|
-
else
|
244
|
-
suffix << ", type: :#{sql_type}" unless sql_type == key_type
|
245
|
-
# Will the resulting default index name be longer than what Postgres allows? (63 characters)
|
246
|
-
if (idx_name = ActiveRecord::Base.connection.index_name(tbl, {column: col})).length > 63
|
247
|
-
# Try to find a shorter name that hasn't been used yet
|
248
|
-
unless indexes.key?(shorter = idx_name[0..62]) ||
|
249
|
-
indexes.key?(shorter = idx_name.tr('_', '')[0..62]) ||
|
250
|
-
indexes.key?(shorter = idx_name.tr('aeio', '')[0..62])
|
251
|
-
puts "Unable to easily find unique name for index #{idx_name} that is shorter than 64 characters,"
|
252
|
-
puts "so have resorted to this GUID-based identifier: #{shorter = "#{tbl[0..25]}_#{::SecureRandom.uuid}"}."
|
253
|
-
end
|
254
|
-
suffix << ", index: { name: '#{shorter || idx_name}' }"
|
255
|
-
indexes[shorter || idx_name] = nil
|
256
|
-
end
|
257
|
-
primary_key = ::Brick.relations[fk[:inverse_table]][:class_name]&.constantize&.primary_key
|
258
|
-
mig << " t.references :#{fk[:assoc_name]}#{suffix}, foreign_key: { to_table: #{to_table}#{", primary_key: :#{primary_key}" if primary_key != ar_base.primary_key} }\n"
|
259
|
-
end
|
260
|
-
else
|
261
|
-
next if !id_option&.end_with?('id: false') && pkey_cols&.include?(col)
|
262
|
-
|
263
|
-
# See if there are generic timestamps
|
264
|
-
if sql_type == 'timestamp' && ['created_at','updated_at'].include?(col)
|
265
|
-
possible_ts << [col, !col_type[3]]
|
266
|
-
else
|
267
|
-
mig << emit_column(sql_type, col, suffix)
|
268
|
-
end
|
269
|
-
end
|
270
|
-
end
|
271
|
-
if possible_ts.length == 2 && # Both created_at and updated_at
|
272
|
-
# Rails 5 and later timestamps default to NOT NULL
|
273
|
-
(possible_ts.first.last == is_4x_rails && possible_ts.last.last == is_4x_rails)
|
274
|
-
mig << "\n t.timestamps\n"
|
275
|
-
else # Just one or the other, or a nullability mismatch
|
276
|
-
possible_ts.each { |ts| emit_column('timestamp', ts.first, nil) }
|
277
|
-
end
|
278
|
-
mig << " end\n"
|
279
|
-
if pk_is_also_fk
|
280
|
-
mig << " reversible do |dir|\n"
|
281
|
-
mig << " dir.up { execute('ALTER TABLE #{tbl} ADD PRIMARY KEY (#{pk_is_also_fk})') }\n"
|
282
|
-
mig << " end\n"
|
283
|
-
end
|
284
|
-
add_fks.each do |add_fk|
|
285
|
-
is_commented = false
|
286
|
-
# add_fk[2] holds the inverse relation
|
287
|
-
unless (pk = add_fk[2][:pkey].values.flatten&.first)
|
288
|
-
is_commented = true
|
289
|
-
mig << " # (Unable to create relationship because primary key is missing on table #{add_fk[0]})\n"
|
290
|
-
# No official PK, but if coincidentally there's a column of the same name, take a chance on it
|
291
|
-
pk = (add_fk[2][:cols].key?(add_fk[1]) && add_fk[1]) || '???'
|
292
|
-
end
|
293
|
-
# to_table column
|
294
|
-
mig << " #{'# ' if is_commented}add_foreign_key #{tbl_code}, #{add_fk[0]}, column: :#{add_fk[1]}, primary_key: :#{pk}\n"
|
295
|
-
end
|
296
|
-
mig << " end\n"
|
297
|
-
versions_to_create << migration_file_write(mig_path, "create_#{tbl_parts.map(&:underscore).join('_')}", current_mig_time += 1.minute, ar_version, mig)
|
298
|
-
end
|
299
|
-
done.concat(fringe)
|
300
|
-
chosen -= done
|
301
|
-
end
|
302
|
-
|
303
|
-
stuck_counts = Hash.new { |h, k| h[k] = 0 }
|
304
|
-
chosen.each do |leftover|
|
305
|
-
puts "Can't do #{leftover} because:\n #{stuck[leftover].map do |snag|
|
306
|
-
stuck_counts[snag.last[:inverse_table]] += 1
|
307
|
-
snag.last[:assoc_name]
|
308
|
-
end.join(', ')}"
|
309
|
-
end
|
310
|
-
if mig_path.start_with?(cur_path = ::Rails.root.to_s)
|
311
|
-
pretty_mig_path = mig_path[cur_path.length..-1]
|
312
|
-
end
|
313
|
-
puts "\n*** Created #{done.length} migration files under #{pretty_mig_path || mig_path} ***"
|
314
|
-
if (stuck_sorted = stuck_counts.to_a.sort { |a, b| b.last <=> a.last }).length.positive?
|
315
|
-
puts "-----------------------------------------"
|
316
|
-
puts "Unable to create migrations for #{stuck_sorted.length} tables#{
|
317
|
-
". Here's the top 5 blockers" if stuck_sorted.length > 5
|
318
|
-
}:"
|
319
|
-
pp stuck_sorted[0..4]
|
320
|
-
else # Successful, and now we can update the schema_migrations table accordingly
|
321
|
-
unless ActiveRecord::Migration.table_exists?(ActiveRecord::Base.schema_migrations_table_name)
|
322
|
-
ActiveRecord::SchemaMigration.create_table
|
323
|
-
end
|
324
|
-
# Remove to_delete - to_create
|
325
|
-
if ((versions_to_delete_or_append ||= []) - versions_to_create).present? && is_delete_versions
|
326
|
-
ActiveRecord::Base.execute_sql("DELETE FROM #{
|
327
|
-
ActiveRecord::Base.schema_migrations_table_name} WHERE version IN (#{
|
328
|
-
(versions_to_delete_or_append - versions_to_create).map { |vtd| "'#{vtd}'" }.join(', ')}
|
329
|
-
)")
|
330
|
-
end
|
331
|
-
# Add to_create - to_delete
|
332
|
-
if is_insert_versions && ((versions_to_create ||= []) - versions_to_delete_or_append).present?
|
333
|
-
ActiveRecord::Base.execute_sql("INSERT INTO #{
|
334
|
-
ActiveRecord::Base.schema_migrations_table_name} (version) VALUES #{
|
335
|
-
(versions_to_create - versions_to_delete_or_append).map { |vtc| "('#{vtc}')" }.join(', ')
|
336
|
-
}")
|
337
|
-
end
|
338
|
-
end
|
339
|
-
end
|
340
|
-
|
341
|
-
private
|
342
|
-
|
343
|
-
def emit_column(type, name, suffix)
|
344
|
-
" t.#{type.start_with?('numeric') ? 'decimal' : type} :#{name}#{suffix}\n"
|
345
|
-
end
|
346
|
-
|
347
|
-
def migration_file_write(mig_path, name, current_mig_time, ar_version, mig)
|
348
|
-
File.open("#{mig_path}/#{version = current_mig_time.strftime('%Y%m%d%H%M00')}_#{name}.rb", "w") do |f|
|
349
|
-
f.write "class #{name.camelize} < ActiveRecord::Migration#{ar_version}\n"
|
350
|
-
f.write mig
|
351
|
-
f.write "end\n"
|
352
|
-
end
|
353
|
-
version
|
31
|
+
::Brick::MigrationBuilder.generate_migrations(chosen, mig_path, is_insert_versions, is_delete_versions)
|
354
32
|
end
|
355
33
|
end
|
356
34
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: brick
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.0.
|
4
|
+
version: 1.0.160
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Lorin Thwaits
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2023-
|
11
|
+
date: 2023-08-08 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: activerecord
|
@@ -255,6 +255,7 @@ files:
|
|
255
255
|
- lib/brick/version_number.rb
|
256
256
|
- lib/generators/brick/USAGE
|
257
257
|
- lib/generators/brick/install_generator.rb
|
258
|
+
- lib/generators/brick/migration_builder.rb
|
258
259
|
- lib/generators/brick/migrations_generator.rb
|
259
260
|
- lib/generators/brick/models_generator.rb
|
260
261
|
- lib/generators/brick/seeds_generator.rb
|