brick 1.0.189 → 1.0.191
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/brick/config.rb +8 -0
- data/lib/brick/extensions.rb +147 -50
- data/lib/brick/frameworks/rails/engine.rb +28 -13
- data/lib/brick/frameworks/rails/form_tags.rb +6 -3
- data/lib/brick/version_number.rb +1 -1
- data/lib/brick.rb +12 -7
- data/lib/generators/brick/migration_builder.rb +223 -159
- data/lib/generators/brick/migrations_generator.rb +1 -0
- data/lib/generators/brick/salesforce_migrations_generator.rb +101 -0
- data/lib/generators/brick/salesforce_schema.rb +105 -0
- metadata +4 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 193982ca353a787d619e46d02b5d7a671c2b05936a2bf96c7f4c92fbba2b467c
|
4
|
+
data.tar.gz: 065f9abdc32f6d6413caf955ab7f01cd2a300b24532f215471aadf82ec9bdc93
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 363c321fac7e13b4ccbe4bd9ed8b10f1a525641c0da3db0c5ee2bc49f6b1a7b0b0254c11fdb9d42c4ab2ab2c9c7e673a4f6c25e063315bcafcede48cc3096692
|
7
|
+
data.tar.gz: 9f27685f8621482b2130a66d5c7d149d41b108dc73f800d060f26ab757f09f06bb3c675b22db0dc37e666a55cdcd0b5ead274232913893d48881809cbf58e72e
|
data/lib/brick/config.rb
CHANGED
@@ -399,6 +399,14 @@ module Brick
|
|
399
399
|
@mutex.synchronize { @not_nullables = columns }
|
400
400
|
end
|
401
401
|
|
402
|
+
def omit_empty_tables_in_dropdown
|
403
|
+
@mutex.synchronize { @omit_empty_tables_in_dropdown }
|
404
|
+
end
|
405
|
+
|
406
|
+
def omit_empty_tables_in_dropdown=(field_set)
|
407
|
+
@mutex.synchronize { @omit_empty_tables_in_dropdown = field_set }
|
408
|
+
end
|
409
|
+
|
402
410
|
def always_load_fields
|
403
411
|
@mutex.synchronize { @always_load_fields || {} }
|
404
412
|
end
|
data/lib/brick/extensions.rb
CHANGED
@@ -98,6 +98,7 @@ module ActiveRecord
|
|
98
98
|
end
|
99
99
|
|
100
100
|
def _br_quoted_name(name)
|
101
|
+
name = name.join('.') if name.is_a?(Array)
|
101
102
|
if name == '*'
|
102
103
|
name
|
103
104
|
elsif is_mysql
|
@@ -328,27 +329,10 @@ module ActiveRecord
|
|
328
329
|
end
|
329
330
|
|
330
331
|
# Providing a relation object allows auto-modules built from table name prefixes to work
|
331
|
-
def self._brick_index(mode = nil, separator =
|
332
|
+
def self._brick_index(mode = nil, separator = nil, relation = nil)
|
332
333
|
return if abstract_class?
|
333
334
|
|
334
|
-
|
335
|
-
tbl_parts.shift if ::Brick.apartment_multitenant && tbl_parts.length > 1 && tbl_parts.first == ::Brick.apartment_default_tenant
|
336
|
-
if (aps = relation&.fetch(:auto_prefixed_schema, nil)) && tbl_parts.last.start_with?(aps)
|
337
|
-
last_part = tbl_parts.last[aps.length..-1]
|
338
|
-
aps = aps[0..-2] if aps[-1] == '_'
|
339
|
-
tbl_parts[-1] = aps
|
340
|
-
tbl_parts << last_part
|
341
|
-
end
|
342
|
-
path_prefix = []
|
343
|
-
if ::Brick.config.path_prefix
|
344
|
-
tbl_parts.unshift(::Brick.config.path_prefix)
|
345
|
-
path_prefix << ::Brick.config.path_prefix
|
346
|
-
end
|
347
|
-
index = tbl_parts.map(&:underscore).join(separator)
|
348
|
-
# Rails applies an _index suffix to that route when the resource name isn't something plural
|
349
|
-
index << '_index' if mode != :singular && separator == '_' &&
|
350
|
-
index == (path_prefix + [name&.underscore&.tr('/', '_') || '_']).join(separator)
|
351
|
-
index
|
335
|
+
::Brick._brick_index(table_name, mode, separator, relation)
|
352
336
|
end
|
353
337
|
|
354
338
|
def self.brick_import_template
|
@@ -685,7 +669,7 @@ module ActiveRecord
|
|
685
669
|
(cust_col_override || klass._br_cust_cols).each do |k, cc|
|
686
670
|
if rel_dupe.respond_to?(k) # Name already taken?
|
687
671
|
# %%% Use ensure_unique here in this kind of fashion:
|
688
|
-
# cnstr_name = ensure_unique(+"(brick) #{for_tbl}_#{pri_tbl}", bts, hms)
|
672
|
+
# cnstr_name = ensure_unique(+"(brick) #{for_tbl}_#{pri_tbl}", nil, bts, hms)
|
689
673
|
# binding.pry
|
690
674
|
next
|
691
675
|
end
|
@@ -805,6 +789,7 @@ module ActiveRecord
|
|
805
789
|
|
806
790
|
# Add derived table JOIN for the has_many counts
|
807
791
|
nix = []
|
792
|
+
previous = []
|
808
793
|
klass._br_hm_counts.each do |k, hm|
|
809
794
|
count_column = if hm.options[:through]
|
810
795
|
# Build the chain of JOINs going to the final destination HMT table
|
@@ -831,7 +816,7 @@ module ActiveRecord
|
|
831
816
|
through_sources.push(this_hm = src_ref.active_record.reflect_on_association(thr))
|
832
817
|
end
|
833
818
|
through_sources.push(src_ref) unless src_ref.belongs_to?
|
834
|
-
from_clause = +"#{through_sources.first.table_name} br_t0"
|
819
|
+
from_clause = +"#{_br_quoted_name(through_sources.first.table_name)} br_t0"
|
835
820
|
fk_col = through_sources.shift.foreign_key
|
836
821
|
|
837
822
|
idx = 0
|
@@ -899,7 +884,7 @@ module ActiveRecord
|
|
899
884
|
next
|
900
885
|
end
|
901
886
|
|
902
|
-
tbl_alias = "b_r_#{hm.name}"
|
887
|
+
tbl_alias = unique63("b_r_#{hm.name}", previous)
|
903
888
|
on_clause = []
|
904
889
|
hm_selects = if fk_col.is_a?(Array) # Composite key?
|
905
890
|
fk_col.each_with_index { |fk_col_part, idx| on_clause << "#{tbl_alias}.#{fk_col_part} = #{pri_tbl.table_name}.#{pri_key[idx]}" }
|
@@ -1104,6 +1089,19 @@ Might want to add this in your brick.rb:
|
|
1104
1089
|
def shift_or_first(ary)
|
1105
1090
|
ary.length > 1 ? ary.shift : ary.first
|
1106
1091
|
end
|
1092
|
+
|
1093
|
+
def unique63(name, previous)
|
1094
|
+
name = name[0..62] if name.length > 63
|
1095
|
+
unique_num = 1
|
1096
|
+
loop do
|
1097
|
+
break unless previous.include?(name)
|
1098
|
+
|
1099
|
+
unique_suffix = "_#{unique_num += 1}"
|
1100
|
+
name = "#{name[0..name.length - unique_suffix.length - 1]}#{unique_suffix}"
|
1101
|
+
end
|
1102
|
+
previous << name
|
1103
|
+
name
|
1104
|
+
end
|
1107
1105
|
end
|
1108
1106
|
|
1109
1107
|
module Inheritance
|
@@ -1714,7 +1712,8 @@ class Object
|
|
1714
1712
|
# options[:class_name] = hm.first[:inverse_table].singularize.camelize
|
1715
1713
|
# options[:foreign_key] = hm.first[:fk].to_sym
|
1716
1714
|
far_assoc = relations[hm.first[:inverse_table]][:fks].find { |_k, v| v[:assoc_name] == hm[1] }
|
1717
|
-
|
1715
|
+
# Was: ::Brick.namify(far_assoc.last[:inverse_table], :underscore).camelize
|
1716
|
+
options[:class_name] = relations[far_assoc.last[:inverse_table]][:class_name]
|
1718
1717
|
options[:foreign_key] = far_assoc.last[:fk].to_sym
|
1719
1718
|
end
|
1720
1719
|
options[:source] ||= hm[1].to_sym unless hmt_name.singularize == hm[1]
|
@@ -1801,9 +1800,9 @@ class Object
|
|
1801
1800
|
::Brick.config.schema_behavior[:multitenant] && singular_table_parts.first == 'public'
|
1802
1801
|
singular_table_parts.shift
|
1803
1802
|
end
|
1804
|
-
|
1805
|
-
|
1806
|
-
|
1803
|
+
if need_class_name
|
1804
|
+
options[:class_name] = "::#{assoc[:primary_class]&.name || ::Brick.relations[inverse_table][:class_name]}"
|
1805
|
+
end
|
1807
1806
|
if need_fk # Funky foreign key?
|
1808
1807
|
options_fk_key = :foreign_key
|
1809
1808
|
if assoc[:fk].is_a?(Array)
|
@@ -2558,6 +2557,8 @@ class Object
|
|
2558
2557
|
assoc_name = assoc_parts.join('.')
|
2559
2558
|
else
|
2560
2559
|
class_name_parts = ::Brick.namify(hm_assoc[:inverse_table], :underscore).split('.')
|
2560
|
+
last_idx = class_name_parts.length - 1
|
2561
|
+
class_name_parts[last_idx] = class_name_parts[last_idx].singularize
|
2561
2562
|
real_name = class_name_parts.map(&:camelize).join('::')
|
2562
2563
|
needs_class = (real_name != hm_assoc[:inverse_table].camelize)
|
2563
2564
|
end
|
@@ -2895,24 +2896,32 @@ ORDER BY 1, 2, c.internal_column_id, acc.position"
|
|
2895
2896
|
# end
|
2896
2897
|
# end
|
2897
2898
|
# schema = ::Brick.default_schema # Reset back for this next round of fun
|
2899
|
+
kcus = nil
|
2898
2900
|
case ActiveRecord::Base.connection.adapter_name
|
2899
2901
|
when 'PostgreSQL', 'Mysql2', 'Trilogy', 'SQLServer'
|
2900
|
-
|
2901
|
-
|
2902
|
+
# All KCUs -- use this to virtually JOIN against fk_references in Ruby code
|
2903
|
+
sql = "SELECT CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME, ORDINAL_POSITION,
|
2904
|
+
TABLE_NAME, COLUMN_NAME
|
2905
|
+
FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE#{"
|
2906
|
+
WHERE CONSTRAINT_SCHEMA = COALESCE(current_setting('SEARCH_PATH'), 'public')" if is_postgres && schema }"
|
2907
|
+
kcus = ActiveRecord::Base.execute_sql(sql).each_with_object({}) do |v, s|
|
2908
|
+
key = "#{v.fetch('constraint_name', v[2])}.#{v.fetch('constraint_schema', v[1])}.#{v.fetch('constraint_catalog', v[0])}.#{v.fetch('ordinal_position', v[3])}"
|
2909
|
+
key << ".#{v.fetch('table_name', v[4])}.#{v.fetch('column_name', v[5])}" unless is_postgres || is_mssql
|
2910
|
+
s[key] = [v.fetch('constraint_schema', v[1]), v.fetch('table_name', v[4])]
|
2911
|
+
end
|
2912
|
+
|
2913
|
+
sql = "SELECT kcu.CONSTRAINT_SCHEMA, kcu.TABLE_NAME, kcu.COLUMN_NAME,
|
2914
|
+
#{# These will get filled in with real values (effectively doing the JOIN in Ruby)
|
2915
|
+
is_postgres || is_mssql ? 'NULL as primary_schema, NULL as primary_table' :
|
2916
|
+
'kcu.REFERENCED_TABLE_NAME, kcu.REFERENCED_COLUMN_NAME'},
|
2917
|
+
kcu.CONSTRAINT_NAME AS CONSTRAINT_SCHEMA_FK,
|
2918
|
+
rc.UNIQUE_CONSTRAINT_NAME, rc.UNIQUE_CONSTRAINT_SCHEMA, rc.UNIQUE_CONSTRAINT_CATALOG, kcu.ORDINAL_POSITION
|
2902
2919
|
FROM INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS AS rc
|
2903
|
-
INNER JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE AS
|
2904
|
-
ON
|
2905
|
-
AND
|
2906
|
-
AND
|
2907
|
-
|
2908
|
-
ON kcu2.CONSTRAINT_CATALOG = rc.UNIQUE_CONSTRAINT_CATALOG
|
2909
|
-
AND kcu2.CONSTRAINT_SCHEMA = rc.UNIQUE_CONSTRAINT_SCHEMA
|
2910
|
-
AND kcu2.CONSTRAINT_NAME = rc.UNIQUE_CONSTRAINT_NAME#{"
|
2911
|
-
AND kcu2.TABLE_NAME = kcu1.REFERENCED_TABLE_NAME
|
2912
|
-
AND kcu2.COLUMN_NAME = kcu1.REFERENCED_COLUMN_NAME" unless is_postgres || is_mssql }
|
2913
|
-
AND kcu2.ORDINAL_POSITION = kcu1.ORDINAL_POSITION#{"
|
2914
|
-
WHERE kcu1.CONSTRAINT_SCHEMA = COALESCE(current_setting('SEARCH_PATH'), 'public')" if is_postgres && schema }"
|
2915
|
-
# AND kcu2.TABLE_NAME = ?;", Apartment::Tenant.current, table_name
|
2920
|
+
INNER JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE AS kcu
|
2921
|
+
ON kcu.CONSTRAINT_CATALOG = rc.CONSTRAINT_CATALOG
|
2922
|
+
AND kcu.CONSTRAINT_SCHEMA = rc.CONSTRAINT_SCHEMA
|
2923
|
+
AND kcu.CONSTRAINT_NAME = rc.CONSTRAINT_NAME#{"
|
2924
|
+
WHERE kcu.CONSTRAINT_SCHEMA = COALESCE(current_setting('SEARCH_PATH'), 'public')" if is_postgres && schema }"
|
2916
2925
|
fk_references = ActiveRecord::Base.execute_sql(sql)
|
2917
2926
|
when 'SQLite'
|
2918
2927
|
sql = "SELECT NULL AS constraint_schema, m.name, fkl.\"from\", NULL AS primary_schema, fkl.\"table\", m.name || '_' || fkl.\"from\" AS constraint_name
|
@@ -2940,9 +2949,14 @@ ORDER BY 1, 2, c.internal_column_id, acc.position"
|
|
2940
2949
|
end
|
2941
2950
|
::Brick.is_oracle = true if ActiveRecord::Base.connection.adapter_name == 'OracleEnhanced'
|
2942
2951
|
# ::Brick.default_schema ||= schema ||= 'public' if ActiveRecord::Base.connection.adapter_name == 'PostgreSQL'
|
2943
|
-
::Brick.default_schema ||= 'public' if
|
2952
|
+
::Brick.default_schema ||= 'public' if is_postgres
|
2944
2953
|
fk_references&.each do |fk|
|
2945
2954
|
fk = fk.values unless fk.is_a?(Array)
|
2955
|
+
# Virtually JOIN against fk_references in order to change out the primary schema and primary table
|
2956
|
+
if (kcu = kcus&.fetch("#{fk[6]}.#{fk[7]}.#{fk[8]}.#{fk[9]}", nil))
|
2957
|
+
fk[3] = kcu[0]
|
2958
|
+
fk[4] = kcu[1]
|
2959
|
+
end
|
2946
2960
|
# Multitenancy makes things a little more general overall, except for non-tenanted tables
|
2947
2961
|
if ::Brick.is_apartment_excluded_table(::Brick.namify(fk[1]))
|
2948
2962
|
fk[0] = ::Brick.apartment_default_tenant
|
@@ -2967,6 +2981,7 @@ ORDER BY 1, 2, c.internal_column_id, acc.position"
|
|
2967
2981
|
end
|
2968
2982
|
::Brick._add_bt_and_hm(fk, relations)
|
2969
2983
|
end
|
2984
|
+
kcus = nil # Allow this large item to be garbage collected
|
2970
2985
|
end
|
2971
2986
|
|
2972
2987
|
table_name_lookup = (::Brick.table_name_lookup ||= {})
|
@@ -2985,18 +3000,71 @@ ORDER BY 1, 2, c.internal_column_id, acc.position"
|
|
2985
3000
|
&.find { |k1, _v1| singular.start_with?(k1) && singular.length > k1.length }
|
2986
3001
|
).present?
|
2987
3002
|
v[:auto_prefixed_schema] = tnp.first
|
2988
|
-
v[:resource] = rel_name.last[
|
2989
|
-
[tnp.last, singular[
|
3003
|
+
# v[:resource] = rel_name.last[tnp.first.length..-1]
|
3004
|
+
[tnp.last, singular[tnp.first.length..-1]]
|
2990
3005
|
else
|
2991
|
-
v[:resource] = rel_name.last
|
3006
|
+
# v[:resource] = rel_name.last
|
2992
3007
|
[singular]
|
2993
3008
|
end
|
2994
|
-
|
3009
|
+
proposed_name_parts = (schema_names + name_parts).map { |p| ::Brick.namify(p, :underscore).camelize }
|
3010
|
+
# Find out if the proposed name leads to a module or class that already exists and is not an AR class
|
3011
|
+
colliding_thing = nil
|
3012
|
+
loop do
|
3013
|
+
klass = Object
|
3014
|
+
proposed_name_parts.each do |part|
|
3015
|
+
if klass.const_defined?(part)
|
3016
|
+
klass = klass.const_get(part)
|
3017
|
+
else
|
3018
|
+
klass = nil
|
3019
|
+
break
|
3020
|
+
end
|
3021
|
+
end
|
3022
|
+
break if !klass || (klass < ActiveRecord::Base) # Break if all good -- no conflicts
|
3023
|
+
|
3024
|
+
# Find a unique name since there's already something that's non-AR with that same name
|
3025
|
+
last_idx = proposed_name_parts.length - 1
|
3026
|
+
proposed_name_parts[last_idx] = ::Brick.ensure_unique(proposed_name_parts[last_idx], 'X')
|
3027
|
+
colliding_thing ||= klass
|
3028
|
+
end
|
3029
|
+
v[:class_name] = proposed_name_parts.join('::')
|
3030
|
+
# Was: v[:resource] = v[:class_name].underscore.tr('/', '.').pluralize
|
3031
|
+
v[:resource] = proposed_name_parts.last.underscore.pluralize
|
3032
|
+
if colliding_thing
|
3033
|
+
message_start = if colliding_thing.is_a?(Module) && Object.const_defined?(:Rails) &&
|
3034
|
+
colliding_thing.constants.find { |c| colliding_thing.const_get(c) < Rails::Application }
|
3035
|
+
"The module for the Rails application itself, \"#{colliding_thing.name}\","
|
3036
|
+
else
|
3037
|
+
"Non-AR #{colliding_thing.class.name.downcase} \"#{colliding_thing.name}\""
|
3038
|
+
end
|
3039
|
+
puts "WARNING: #{message_start} already exists.\n Will set up to auto-create model #{v[:class_name]} for table #{k}."
|
3040
|
+
end
|
2995
3041
|
# Track anything that's out-of-the-ordinary
|
2996
3042
|
table_name_lookup[v[:class_name]] = k unless v[:class_name].underscore.pluralize == k
|
2997
3043
|
end
|
2998
3044
|
::Brick.load_additional_references if ::Brick.initializer_loaded
|
2999
3045
|
|
3046
|
+
if is_postgres
|
3047
|
+
ActiveRecord::Base.execute_sql("-- inherited and partitioned tables counts
|
3048
|
+
SELECT parent.relname,
|
3049
|
+
((SUM(child.reltuples::float) / greatest(SUM(child.relpages), 1))) *
|
3050
|
+
(SUM(pg_relation_size(child.oid))::float / (current_setting('block_size')::float))::integer AS rowcount
|
3051
|
+
FROM pg_inherits
|
3052
|
+
INNER JOIN pg_class parent ON pg_inherits.inhparent = parent.oid
|
3053
|
+
INNER JOIN pg_class child ON pg_inherits.inhrelid = child.oid
|
3054
|
+
GROUP BY parent.relname, child.reltuples, child.relpages, child.oid
|
3055
|
+
|
3056
|
+
UNION ALL
|
3057
|
+
|
3058
|
+
-- table count
|
3059
|
+
SELECT relname,
|
3060
|
+
(reltuples::float / greatest(relpages, 1)) *
|
3061
|
+
(pg_relation_size(pg_class.oid)::float / (current_setting('block_size')::float))::integer AS rowcount
|
3062
|
+
FROM pg_class
|
3063
|
+
GROUP BY relname, reltuples, relpages, oid").each do |tblcount|
|
3064
|
+
relations.fetch(tblcount['relname'], nil)&.[]=(:rowcount, tblcount['rowcount'].round)
|
3065
|
+
end
|
3066
|
+
end
|
3067
|
+
|
3000
3068
|
if orig_schema && (orig_schema = (orig_schema - ['pg_catalog', 'pg_toast', 'heroku_ext']).first)
|
3001
3069
|
puts "Now switching back to \"#{orig_schema}\" schema."
|
3002
3070
|
ActiveRecord::Base.execute_sql("SET SEARCH_PATH = ?", orig_schema)
|
@@ -3136,7 +3204,7 @@ module Brick
|
|
3136
3204
|
# For any appended references (those that come from config), arrive upon a definitely unique constraint name
|
3137
3205
|
pri_tbl = is_class ? fk[4][:class].underscore : pri_tbl
|
3138
3206
|
pri_tbl = "#{bt_assoc_name}_#{pri_tbl}" if pri_tbl&.singularize != bt_assoc_name
|
3139
|
-
cnstr_name = ensure_unique(+"(brick) #{for_tbl}_#{pri_tbl}", bts, hms)
|
3207
|
+
cnstr_name = ensure_unique(+"(brick) #{for_tbl}_#{pri_tbl}", nil, bts, hms)
|
3140
3208
|
missing = []
|
3141
3209
|
missing << fk[1] unless relations.key?(fk[1])
|
3142
3210
|
missing << primary_table unless is_class || relations.key?(primary_table)
|
@@ -3270,15 +3338,17 @@ module Brick
|
|
3270
3338
|
end
|
3271
3339
|
end
|
3272
3340
|
|
3273
|
-
def ensure_unique(name, *sources)
|
3341
|
+
def ensure_unique(name, delimiter, *sources)
|
3274
3342
|
base = name
|
3275
|
-
|
3343
|
+
delimiter ||= '_'
|
3344
|
+
# By default ends up building this regex: /_(\d+)$/
|
3345
|
+
if (added_num = name.slice!(Regexp.new("#{delimiter}(\d+)$")))
|
3276
3346
|
added_num = added_num[1..-1].to_i
|
3277
3347
|
else
|
3278
3348
|
added_num = 1
|
3279
3349
|
end
|
3280
3350
|
while (
|
3281
|
-
name = "#{base}
|
3351
|
+
name = "#{base}#{delimiter}#{added_num += 1}"
|
3282
3352
|
sources.each_with_object(nil) do |v, s|
|
3283
3353
|
s || case v
|
3284
3354
|
when Hash
|
@@ -3356,6 +3426,33 @@ module Brick
|
|
3356
3426
|
end
|
3357
3427
|
end
|
3358
3428
|
|
3429
|
+
def _brick_index(tbl_name, mode = nil, separator = nil, relation = nil)
|
3430
|
+
separator ||= '_'
|
3431
|
+
res_name = (tbl_name_parts = tbl_name.split('.'))[0..-2].first
|
3432
|
+
res_name << '.' if res_name
|
3433
|
+
(res_name ||= +'') << (relation || ::Brick.relations.fetch(tbl_name, nil)&.fetch(:resource, nil) || tbl_name_parts.last)
|
3434
|
+
|
3435
|
+
res_parts = ((mode == :singular) ? res_name.singularize : res_name).split('.')
|
3436
|
+
res_parts.shift if ::Brick.apartment_multitenant && res_parts.length > 1 && res_parts.first == ::Brick.apartment_default_tenant
|
3437
|
+
if (aps = relation&.fetch(:auto_prefixed_schema, nil)) && res_parts.last.start_with?(aps)
|
3438
|
+
last_part = res_parts.last[aps.length..-1]
|
3439
|
+
aps = aps[0..-2] if aps[-1] == '_'
|
3440
|
+
res_parts[-1] = aps
|
3441
|
+
res_parts << last_part
|
3442
|
+
end
|
3443
|
+
path_prefix = []
|
3444
|
+
if ::Brick.config.path_prefix
|
3445
|
+
res_parts.unshift(::Brick.config.path_prefix)
|
3446
|
+
path_prefix << ::Brick.config.path_prefix
|
3447
|
+
end
|
3448
|
+
index = res_parts.map(&:underscore).join(separator)
|
3449
|
+
index = index.tr('_', 'x') if separator == 'x'
|
3450
|
+
# Rails applies an _index suffix to that route when the resource name isn't something plural
|
3451
|
+
index << '_index' if mode != :singular && separator == '_' &&
|
3452
|
+
index == (path_prefix + [name&.underscore&.tr('/', '_') || '_']).join(separator)
|
3453
|
+
index
|
3454
|
+
end
|
3455
|
+
|
3359
3456
|
def find_col_renaming(api_ver_path, relation_name)
|
3360
3457
|
::Brick.config.api_column_renaming&.fetch(
|
3361
3458
|
api_ver_path,
|
@@ -738,8 +738,11 @@ window.addEventListener(\"popstate\", linkSchemas);
|
|
738
738
|
hms_columns << hm_entry
|
739
739
|
end
|
740
740
|
when 'show', 'new', 'update'
|
741
|
+
predicates = nil
|
741
742
|
hm_stuff << if hm_fk_name
|
742
|
-
if
|
743
|
+
if (hm_fk_name.is_a?(Array) && # Composite key?
|
744
|
+
hm_fk_name.all? { |hm_fk_part| hm_assoc.klass.column_names.include?(hm_fk_part) }) ||
|
745
|
+
hm_assoc.klass.column_names.include?(hm_fk_name.to_s) ||
|
743
746
|
(hm_fk_name.is_a?(String) && hm_fk_name.include?('.')) # HMT? (Could do a better check for this)
|
744
747
|
predicates = path_keys(hm_assoc, hm_fk_name, pk).map do |k, v|
|
745
748
|
if v == '[sti_type]'
|
@@ -771,9 +774,18 @@ window.addEventListener(\"popstate\", linkSchemas);
|
|
771
774
|
end
|
772
775
|
# %%% If we are not auto-creating controllers (or routes) then omit by default, and if enabled anyway, such as in a development
|
773
776
|
# environment or whatever, then get either the controllers or routes list instead
|
774
|
-
|
775
|
-
|
776
|
-
|
777
|
+
table_rels = if ::Brick.config.omit_empty_tables_in_dropdown
|
778
|
+
::Brick.relations.reject { |k, v| k.is_a?(Symbol) || v[:rowcount] == 0 }
|
779
|
+
else
|
780
|
+
::Brick.relations
|
781
|
+
end
|
782
|
+
table_options = table_rels.sort do |a, b|
|
783
|
+
a[0] = '' if a[0].is_a?(Symbol)
|
784
|
+
b[0] = '' if b[0].is_a?(Symbol)
|
785
|
+
a.first <=> b.first
|
786
|
+
end.each_with_object(+'') do |rel, s|
|
787
|
+
next if rel.first.blank? || rel.last[:cols].empty? ||
|
788
|
+
::Brick.config.exclude_tables.include?(rel.first)
|
777
789
|
|
778
790
|
tbl_parts = rel.first.split('.')
|
779
791
|
if (aps = rel.last.fetch(:auto_prefixed_schema, nil))
|
@@ -781,16 +793,16 @@ window.addEventListener(\"popstate\", linkSchemas);
|
|
781
793
|
aps = aps[0..-2] if aps[-1] == '_'
|
782
794
|
tbl_parts[-2] = aps
|
783
795
|
end
|
784
|
-
if tbl_parts.first == apartment_default_schema
|
785
|
-
tbl_parts.shift
|
786
|
-
end
|
796
|
+
tbl_parts.shift if tbl_parts.first == apartment_default_schema
|
787
797
|
# %%% When table_name_prefixes are use then during rendering empty non-TNP
|
788
798
|
# entries get added at some point when an attempt is made to find the table.
|
789
799
|
# Will have to hunt that down at some point.
|
790
|
-
|
791
|
-
|
792
|
-
|
800
|
+
if (rowcount = rel.last.fetch(:rowcount, nil))
|
801
|
+
rowcount = rowcount > 0 ? " (#{rowcount})" : nil
|
802
|
+
end
|
803
|
+
s << "<option value=\"#{::Brick._brick_index(rel.first, nil, '/')}\">#{rel.first}#{rowcount}</option>"
|
793
804
|
end.html_safe
|
805
|
+
prefix = "#{::Brick.config.path_prefix}/" if ::Brick.config.path_prefix
|
794
806
|
table_options << "<option value=\"#{prefix}brick_status\">(Status)</option>".html_safe if ::Brick.config.add_status
|
795
807
|
table_options << "<option value=\"#{prefix}brick_orphans\">(Orphans)</option>".html_safe if is_orphans
|
796
808
|
table_options << "<option value=\"#{prefix}brick_crosstab\">(Crosstab)</option>".html_safe if is_crosstab
|
@@ -1473,7 +1485,7 @@ end
|
|
1473
1485
|
%>
|
1474
1486
|
<tr>
|
1475
1487
|
<td><%= begin
|
1476
|
-
kls = Object.const_get(::Brick.relations.fetch(r[0], nil)&.fetch(:class_name, nil))
|
1488
|
+
kls = Object.const_get((rel = ::Brick.relations.fetch(r[0], nil))&.fetch(:class_name, nil))
|
1477
1489
|
rescue
|
1478
1490
|
end
|
1479
1491
|
if kls.is_a?(Class) && (path_helper = respond_to?(bi_path = \"#\{kls._brick_index}_path\".to_sym) ? bi_path : nil)
|
@@ -1486,7 +1498,10 @@ end
|
|
1486
1498
|
else
|
1487
1499
|
' class=\"dimmed\"'
|
1488
1500
|
end&.html_safe %>><%= # Table
|
1489
|
-
|
1501
|
+
if (rowcount = rel&.fetch(:rowcount, nil))
|
1502
|
+
rowcount = (rowcount > 0 ? \" (#\{rowcount})\" : nil)
|
1503
|
+
end
|
1504
|
+
\"#\{r[1]}#\{rowcount}\" %></td>
|
1490
1505
|
<td<%= lines = r[2]&.map { |line| \"#\{line.first}:#\{line.last}\" }
|
1491
1506
|
' class=\"dimmed\"'.html_safe unless r[2] %>><%= # Migration
|
1492
1507
|
lines&.join('<br>')&.html_safe %></td>
|
@@ -1638,7 +1653,7 @@ end
|
|
1638
1653
|
end
|
1639
1654
|
s << "<table id=\"#{hm_name}\" class=\"shadow\">
|
1640
1655
|
<tr><th>#{hm[1]}#{' poly' if hm[0].options[:as]} #{hm[3]}
|
1641
|
-
<% if respond_to?(:new_#{partial_new_path_name = hm.first.klass._brick_index(:singular)}_path) %>
|
1656
|
+
<% if predicates && respond_to?(:new_#{partial_new_path_name = hm.first.klass._brick_index(:singular)}_path) %>
|
1642
1657
|
<span class = \"add-hm-related\"><%=
|
1643
1658
|
pk_val = (obj_pk = model.primary_key).is_a?(String) ? obj.send(obj_pk) : obj_pk.map { |pk_part| obj.send(pk_part) }
|
1644
1659
|
pk_val_arr = [pk_val] unless pk_val.is_a?(Array)
|
@@ -151,7 +151,7 @@ module Brick::Rails::FormTags
|
|
151
151
|
# ActiveRecord::StatementTimeout in Warehouse::ColdRoomTemperatures_Archive#index
|
152
152
|
# TinyTds::Error: Adaptive Server connection timed out
|
153
153
|
# (After restarting the server it worked fine again.)
|
154
|
-
|
154
|
+
row_count = 0
|
155
155
|
relation.each do |obj|
|
156
156
|
out << "<tr>\n"
|
157
157
|
out << "<td class=\"col-sticky\">#{link_to('⇛', send("#{klass._brick_index(:singular)}_path".to_sym,
|
@@ -252,13 +252,16 @@ module Brick::Rails::FormTags
|
|
252
252
|
out << '</td>'
|
253
253
|
end
|
254
254
|
out << '</tr>'
|
255
|
-
|
255
|
+
row_count += 1
|
256
|
+
end
|
257
|
+
if (total_row_count = ::Brick.relations[table_name].fetch(:rowcount, nil))
|
258
|
+
total_row_count = total_row_count > row_count ? " (out of #{total_row_count})" : nil
|
256
259
|
end
|
257
260
|
out << " </tbody>
|
258
261
|
</table>
|
259
262
|
<script>
|
260
263
|
var rowCount = document.getElementById(\"rowCount\");
|
261
|
-
if (rowCount) rowCount.innerHTML = \"#{pluralize(
|
264
|
+
if (rowCount) rowCount.innerHTML = \"#{pluralize(row_count, "row")}#{total_row_count} \";
|
262
265
|
</script>
|
263
266
|
"
|
264
267
|
|
data/lib/brick/version_number.rb
CHANGED
data/lib/brick.rb
CHANGED
@@ -619,6 +619,10 @@ module Brick
|
|
619
619
|
Brick.config.license = key
|
620
620
|
end
|
621
621
|
|
622
|
+
def omit_empty_tables_in_dropdown=(setting)
|
623
|
+
Brick.config.omit_empty_tables_in_dropdown = setting
|
624
|
+
end
|
625
|
+
|
622
626
|
def always_load_fields=(field_set)
|
623
627
|
Brick.config.always_load_fields = field_set
|
624
628
|
end
|
@@ -948,13 +952,14 @@ In config/initializers/brick.rb appropriate entries would look something like:
|
|
948
952
|
object_name = k.split('.').last # Take off any first schema part
|
949
953
|
|
950
954
|
full_schema_prefix = if (full_aps = aps = v.fetch(:auto_prefixed_schema, nil))
|
951
|
-
|
952
|
-
|
953
|
-
|
954
|
-
|
955
|
-
|
955
|
+
aps = aps[0..-2] if aps[-1] == '_'
|
956
|
+
(schema_prefix&.dup || +'') << "#{aps}."
|
957
|
+
else
|
958
|
+
schema_prefix
|
959
|
+
end
|
956
960
|
|
957
961
|
# Track routes being built
|
962
|
+
resource_name = v.fetch(:resource, nil) || k
|
958
963
|
if (class_name = v.fetch(:class_name, nil))
|
959
964
|
if v.key?(:isView)
|
960
965
|
view_class_length = class_name.length if class_name.length > view_class_length
|
@@ -962,7 +967,7 @@ In config/initializers/brick.rb appropriate entries would look something like:
|
|
962
967
|
else
|
963
968
|
table_class_length = class_name.length if class_name.length > table_class_length
|
964
969
|
tables
|
965
|
-
end << [class_name, aps,
|
970
|
+
end << [class_name, aps, resource_name.tr('.', '/')[full_aps&.length || 0 .. -1]]
|
966
971
|
end
|
967
972
|
|
968
973
|
options = {}
|
@@ -973,7 +978,7 @@ In config/initializers/brick.rb appropriate entries would look something like:
|
|
973
978
|
prefixes << [aps, v[:class_name]&.split('::')[-2]&.underscore] if aps
|
974
979
|
prefixes << schema_name if schema_name
|
975
980
|
prefixes << path_prefix if path_prefix
|
976
|
-
brick_namespace_create.call(prefixes,
|
981
|
+
brick_namespace_create.call(prefixes, resource_name, options)
|
977
982
|
sti_subclasses.fetch(class_name, nil)&.each do |sc| # Add any STI subclass routes for this relation
|
978
983
|
brick_namespace_create.call(prefixes, sc.underscore.tr('/', '_').pluralize, options)
|
979
984
|
end
|
@@ -89,7 +89,13 @@ module Brick
|
|
89
89
|
[mig_path, is_insert_versions, is_delete_versions]
|
90
90
|
end
|
91
91
|
|
92
|
-
def generate_migrations(chosen, mig_path, is_insert_versions, is_delete_versions,
|
92
|
+
def generate_migrations(chosen, mig_path, is_insert_versions, is_delete_versions,
|
93
|
+
relations = ::Brick.relations, do_fks_last: nil, do_schema_migrations: true)
|
94
|
+
if do_fks_last.nil?
|
95
|
+
puts 'Would you like for the foreign keys to be built inline inside of each migration file, or as a final migration?'
|
96
|
+
do_fks_last = (gets_list(list: ['Inline', 'Separate final migration for all FKs']).start_with?('Separate'))
|
97
|
+
end
|
98
|
+
|
93
99
|
is_sqlite = ActiveRecord::Base.connection.adapter_name == 'SQLite'
|
94
100
|
key_type = ((is_sqlite || ActiveRecord.version < ::Gem::Version.new('5.1')) ? 'integer' : 'bigint')
|
95
101
|
is_4x_rails = ActiveRecord.version < ::Gem::Version.new('5.0')
|
@@ -112,6 +118,7 @@ module Brick
|
|
112
118
|
# Start by making migrations for fringe tables (those with no foreign keys).
|
113
119
|
# Continue layer by layer, creating migrations for tables that reference ones already done, until
|
114
120
|
# no more migrations can be created. (At that point hopefully all tables are accounted for.)
|
121
|
+
after_fks = [] # Track foreign keys to add after table creation
|
115
122
|
while (fringe = chosen.reject do |tbl|
|
116
123
|
snag_fks = []
|
117
124
|
snags = relations.fetch(tbl, nil)&.fetch(:fks, nil)&.select do |_k, v|
|
@@ -131,166 +138,58 @@ module Brick
|
|
131
138
|
end
|
132
139
|
end).present?
|
133
140
|
fringe.each do |tbl|
|
134
|
-
|
141
|
+
mig = gen_migration_columns(relations, tbl, (tbl_parts = tbl.split('.')), (add_fks = []),
|
142
|
+
key_type, is_4x_rails, ar_version, do_fks_last)
|
143
|
+
after_fks.concat(add_fks) if do_fks_last
|
144
|
+
versions_to_create << migration_file_write(mig_path, ::Brick._brick_index("create_#{tbl}", nil, 'x'), current_mig_time += 1.minute, ar_version, mig)
|
145
|
+
end
|
146
|
+
done.concat(fringe)
|
147
|
+
chosen -= done
|
148
|
+
end
|
135
149
|
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
if tbl_parts.first == (::Brick.default_schema || 'public')
|
147
|
-
tbl_parts.shift
|
148
|
-
nil
|
149
|
-
else
|
150
|
-
tbl_parts.first
|
151
|
-
end
|
152
|
-
end
|
153
|
-
unless schema.blank? || built_schemas.key?(schema)
|
154
|
-
mig = +" def change\n create_schema(:#{schema}) unless schema_exists?(:#{schema})\n end\n"
|
155
|
-
migration_file_write(mig_path, "create_db_schema_#{schema.underscore}", current_mig_time += 1.minute, ar_version, mig)
|
156
|
-
built_schemas[schema] = nil
|
157
|
-
end
|
150
|
+
if do_fks_last
|
151
|
+
# Write out any more tables that haven't been done yet
|
152
|
+
chosen.each do |tbl|
|
153
|
+
mig = gen_migration_columns(relations, tbl, (tbl_parts = tbl.split('.')), (add_fks = []),
|
154
|
+
key_type, is_4x_rails, ar_version, do_fks_last)
|
155
|
+
after_fks.concat(add_fks)
|
156
|
+
migration_file_write(mig_path, ::Brick._brick_index("create_#{tbl}", nil, 'x'), current_mig_time += 1.minute, ar_version, mig)
|
157
|
+
end
|
158
|
+
done.concat(chosen)
|
159
|
+
chosen.clear
|
158
160
|
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
# if this one has come in as bigint or integer.
|
164
|
-
pk_is_also_fk = fkey_cols.any? { |assoc| pkey_cols&.first == assoc[:fk] } ? pkey_cols&.first : nil
|
165
|
-
# Support missing primary key (by adding: , id: false)
|
166
|
-
id_option = if pk_is_also_fk || !pkey_cols&.present?
|
167
|
-
needs_serial_col = true
|
168
|
-
+', id: false'
|
169
|
-
elsif ((pkey_col_first = (col_def = relation[:cols][pkey_cols&.first])&.first) &&
|
170
|
-
(pkey_col_first = SQL_TYPES[pkey_col_first] || SQL_TYPES[col_def&.[](0..1)] ||
|
171
|
-
SQL_TYPES.find { |r| r.first.is_a?(Regexp) && pkey_col_first =~ r.first }&.last ||
|
172
|
-
pkey_col_first
|
173
|
-
) != key_type
|
174
|
-
)
|
175
|
-
case pkey_col_first
|
176
|
-
when 'integer'
|
177
|
-
+', id: :serial'
|
178
|
-
when 'bigint'
|
179
|
-
+', id: :bigserial'
|
180
|
-
else
|
181
|
-
+", id: :#{pkey_col_first}" # Something like: id: :integer, primary_key: :businessentityid
|
182
|
-
end +
|
183
|
-
(pkey_cols.first ? ", primary_key: :#{pkey_cols.first}" : '')
|
184
|
-
end
|
185
|
-
if !id_option && pkey_cols.sort != arpk
|
186
|
-
id_option = +", primary_key: :#{pkey_cols.first}"
|
187
|
-
end
|
188
|
-
if !is_4x_rails && (comment = relation&.fetch(:description, nil))&.present?
|
189
|
-
(id_option ||= +'') << ", comment: #{comment.inspect}"
|
190
|
-
end
|
191
|
-
# Find the ActiveRecord class in order to see if the columns have comments
|
192
|
-
unless is_4x_rails
|
193
|
-
klass = begin
|
194
|
-
tbl.tr('.', '/').singularize.camelize.constantize
|
195
|
-
rescue StandardError
|
196
|
-
end
|
197
|
-
if klass
|
198
|
-
unless ActiveRecord::Migration.table_exists?(klass.table_name)
|
199
|
-
puts "WARNING: Unable to locate table #{klass.table_name} (for #{klass.name})."
|
200
|
-
klass = nil
|
201
|
-
end
|
202
|
-
end
|
203
|
-
end
|
204
|
-
# Refer to this table name as a symbol or dotted string as appropriate
|
205
|
-
tbl_code = tbl_parts.length == 1 ? ":#{tbl_parts.first}" : "'#{tbl}'"
|
206
|
-
mig = +" def change\n return unless reverting? || !table_exists?(#{tbl_code})\n\n"
|
207
|
-
mig << " create_table #{tbl_code}#{id_option} do |t|\n"
|
208
|
-
possible_ts = [] # Track possible generic timestamps
|
209
|
-
add_fks = [] # Track foreign keys to add after table creation
|
210
|
-
relation[:cols].each do |col, col_type|
|
211
|
-
sql_type = SQL_TYPES[col_type.first] || SQL_TYPES[col_type[0..1]] ||
|
212
|
-
SQL_TYPES.find { |r| r.first.is_a?(Regexp) && col_type.first =~ r.first }&.last ||
|
213
|
-
col_type.first
|
214
|
-
suffix = col_type[3] || pkey_cols&.include?(col) ? +', null: false' : +''
|
215
|
-
suffix << ', array: true' if (col_type.first == 'ARRAY')
|
216
|
-
if !is_4x_rails && klass && (comment = klass.columns_hash.fetch(col, nil)&.comment)&.present?
|
217
|
-
suffix << ", comment: #{comment.inspect}"
|
218
|
-
end
|
219
|
-
# Determine if this column is used as part of a foreign key
|
220
|
-
if (fk = fkey_cols.find { |assoc| col == assoc[:fk] })
|
221
|
-
to_table = fk[:inverse_table].split('.')
|
222
|
-
to_table = to_table.length == 1 ? ":#{to_table.first}" : "'#{fk[:inverse_table]}'"
|
223
|
-
if needs_serial_col && pkey_cols&.include?(col) && (new_serial_type = {'integer' => 'serial', 'bigint' => 'bigserial'}[sql_type])
|
224
|
-
sql_type = new_serial_type
|
225
|
-
needs_serial_col = false
|
226
|
-
end
|
227
|
-
if fk[:fk] != "#{fk[:assoc_name].singularize}_id" # Need to do our own foreign_key tricks, not use references?
|
228
|
-
column = fk[:fk]
|
229
|
-
mig << emit_column(sql_type, column, suffix)
|
230
|
-
add_fks << [to_table, column, relations[fk[:inverse_table]]]
|
231
|
-
else
|
232
|
-
suffix << ", type: :#{sql_type}" unless sql_type == key_type
|
233
|
-
# Will the resulting default index name be longer than what Postgres allows? (63 characters)
|
234
|
-
if (idx_name = ActiveRecord::Base.connection.index_name(tbl, {column: col})).length > 63
|
235
|
-
# Try to find a shorter name that hasn't been used yet
|
236
|
-
unless indexes.key?(shorter = idx_name[0..62]) ||
|
237
|
-
indexes.key?(shorter = idx_name.tr('_', '')[0..62]) ||
|
238
|
-
indexes.key?(shorter = idx_name.tr('aeio', '')[0..62])
|
239
|
-
puts "Unable to easily find unique name for index #{idx_name} that is shorter than 64 characters,"
|
240
|
-
puts "so have resorted to this GUID-based identifier: #{shorter = "#{tbl[0..25]}_#{::SecureRandom.uuid}"}."
|
241
|
-
end
|
242
|
-
suffix << ", index: { name: '#{shorter || idx_name}' }"
|
243
|
-
indexes[shorter || idx_name] = nil
|
244
|
-
end
|
245
|
-
primary_key = nil
|
246
|
-
begin
|
247
|
-
primary_key = relations[fk[:inverse_table]][:class_name]&.constantize&.primary_key
|
248
|
-
rescue NameError => e
|
249
|
-
primary_key = ::Brick.ar_base.primary_key
|
250
|
-
end
|
251
|
-
mig << " t.references :#{fk[:assoc_name]}#{suffix}, foreign_key: { to_table: #{to_table}#{", primary_key: :#{primary_key}" if primary_key != ::Brick.ar_base.primary_key} }\n"
|
252
|
-
end
|
253
|
-
else
|
254
|
-
next if !id_option&.end_with?('id: false') && pkey_cols&.include?(col)
|
161
|
+
# Add a final migration to create all the foreign keys
|
162
|
+
mig = +" def change\n"
|
163
|
+
after_fks.each do |add_fk|
|
164
|
+
next unless add_fk[2] # add_fk[2] holds the inverse relation
|
255
165
|
|
256
|
-
|
257
|
-
|
258
|
-
|
259
|
-
else
|
260
|
-
mig << emit_column(sql_type, col, suffix)
|
261
|
-
end
|
262
|
-
end
|
263
|
-
end
|
264
|
-
if possible_ts.length == 2 && # Both created_at and updated_at
|
265
|
-
# Rails 5 and later timestamps default to NOT NULL
|
266
|
-
(possible_ts.first.last == is_4x_rails && possible_ts.last.last == is_4x_rails)
|
267
|
-
mig << "\n t.timestamps\n"
|
268
|
-
else # Just one or the other, or a nullability mismatch
|
269
|
-
possible_ts.each { |ts| emit_column('timestamp', ts.first, nil) }
|
166
|
+
unless (pk = add_fk[2][:pkey].values.flatten&.first)
|
167
|
+
# No official PK, but if coincidentally there's a column of the same name, take a chance on it
|
168
|
+
pk = (add_fk[2][:cols].key?(add_fk[1]) && add_fk[1]) || '???'
|
270
169
|
end
|
271
|
-
mig << "
|
272
|
-
|
273
|
-
|
274
|
-
mig << " dir.up { execute('ALTER TABLE #{tbl} ADD PRIMARY KEY (#{pk_is_also_fk})') }\n"
|
275
|
-
mig << " end\n"
|
276
|
-
end
|
277
|
-
add_fks.each do |add_fk|
|
278
|
-
is_commented = false
|
279
|
-
# add_fk[2] holds the inverse relation
|
280
|
-
unless (pk = add_fk[2][:pkey].values.flatten&.first)
|
281
|
-
is_commented = true
|
282
|
-
mig << " # (Unable to create relationship because primary key is missing on table #{add_fk[0]})\n"
|
283
|
-
# No official PK, but if coincidentally there's a column of the same name, take a chance on it
|
284
|
-
pk = (add_fk[2][:cols].key?(add_fk[1]) && add_fk[1]) || '???'
|
285
|
-
end
|
286
|
-
# to_table column
|
287
|
-
mig << " #{'# ' if is_commented}add_foreign_key #{tbl_code}, #{add_fk[0]}, column: :#{add_fk[1]}, primary_key: :#{pk}\n"
|
288
|
-
end
|
289
|
-
mig << " end\n"
|
290
|
-
versions_to_create << migration_file_write(mig_path, "create_#{tbl_parts.map(&:underscore).join('_')}", current_mig_time += 1.minute, ar_version, mig)
|
170
|
+
mig << " add_foreign_key #{add_fk[3]}, " # The tbl_code
|
171
|
+
# to_table column
|
172
|
+
mig << "#{add_fk[0]}, column: :#{add_fk[1]}, primary_key: :#{pk}\n"
|
291
173
|
end
|
292
|
-
|
293
|
-
|
174
|
+
if after_fks.length > 500
|
175
|
+
minutes = (after_fks.length + 1000) / 1500
|
176
|
+
mig << " if ActiveRecord::Base.connection.adapter_name == 'PostgreSQL'\n"
|
177
|
+
mig << " puts 'NOTE: It could take around #{minutes} #{'minute'.pluralize(minutes)} on a FAST machine for Postgres to do all the final processing for these foreign keys. Please be patient!'\n"
|
178
|
+
|
179
|
+
mig << " # Vacuum takes only about ten seconds when all the tables are empty,
|
180
|
+
# and about 2 minutes when the tables are fairly full.
|
181
|
+
execute('COMMIT')
|
182
|
+
execute('VACUUM FULL')
|
183
|
+
execute('BEGIN TRANSACTION')
|
184
|
+
end\n"
|
185
|
+
end
|
186
|
+
|
187
|
+
mig << +" end\n"
|
188
|
+
migration_file_write(mig_path, 'create_brick_fks.rbx', current_mig_time += 1.minute, ar_version, mig)
|
189
|
+
puts "Have written out a final migration called 'create_brick_fks.rbx' which creates #{after_fks.length} foreign keys.
|
190
|
+
This file extension (.rbx) will cause it not to run yet when you do a 'rails db:migrate'.
|
191
|
+
The idea here is to do all data loading first, and then rename that migration file back
|
192
|
+
into having a .rb extension, and run a final db:migrate to put the foreign keys in place."
|
294
193
|
end
|
295
194
|
|
296
195
|
stuck_counts = Hash.new { |h, k| h[k] = 0 }
|
@@ -310,7 +209,7 @@ module Brick
|
|
310
209
|
". Here's the top 5 blockers" if stuck_sorted.length > 5
|
311
210
|
}:"
|
312
211
|
pp stuck_sorted[0..4]
|
313
|
-
|
212
|
+
elsif do_schema_migrations # Successful, and now we can update the schema_migrations table accordingly
|
314
213
|
unless ActiveRecord::Migration.table_exists?(ActiveRecord::Base.schema_migrations_table_name)
|
315
214
|
ActiveRecord::SchemaMigration.create_table
|
316
215
|
end
|
@@ -333,13 +232,178 @@ module Brick
|
|
333
232
|
|
334
233
|
private
|
335
234
|
|
235
|
+
def gen_migration_columns(relations, tbl, tbl_parts, add_fks,
|
236
|
+
key_type, is_4x_rails, ar_version, do_fks_last)
|
237
|
+
return unless (relation = relations.fetch(tbl, nil))&.fetch(:cols, nil)&.present?
|
238
|
+
|
239
|
+
mig = +''
|
240
|
+
pkey_cols = (rpk = relation[:pkey].values.flatten) & (arpk = [::Brick.ar_base.primary_key].flatten.sort)
|
241
|
+
# In case things aren't as standard
|
242
|
+
if pkey_cols.empty?
|
243
|
+
pkey_cols = if rpk.empty? && relation[:cols][arpk.first]&.first == key_type
|
244
|
+
arpk
|
245
|
+
elsif rpk.first
|
246
|
+
rpk
|
247
|
+
end
|
248
|
+
end
|
249
|
+
schema = if tbl_parts.length > 1
|
250
|
+
if tbl_parts.first == (::Brick.default_schema || 'public')
|
251
|
+
tbl_parts.shift
|
252
|
+
nil
|
253
|
+
else
|
254
|
+
tbl_parts.first
|
255
|
+
end
|
256
|
+
end
|
257
|
+
unless schema.blank? || built_schemas.key?(schema)
|
258
|
+
mig = +" def change\n create_schema(:#{schema}) unless schema_exists?(:#{schema})\n end\n"
|
259
|
+
migration_file_write(mig_path, "create_db_schema_#{schema.underscore}", current_mig_time += 1.minute, ar_version, mig)
|
260
|
+
built_schemas[schema] = nil
|
261
|
+
end
|
262
|
+
|
263
|
+
# %%% For the moment we're skipping polymorphics
|
264
|
+
fkey_cols = relation[:fks].values.select { |assoc| assoc[:is_bt] && !assoc[:polymorphic] }
|
265
|
+
# If the primary key is also used as a foreign key, will need to do id: false and then build out
|
266
|
+
# a column definition which includes :primary_key -- %%% also using a data type of bigserial or serial
|
267
|
+
# if this one has come in as bigint or integer.
|
268
|
+
pk_is_also_fk = fkey_cols.any? { |assoc| pkey_cols&.first == assoc[:fk] } ? pkey_cols&.first : nil
|
269
|
+
id_option = if pk_is_also_fk || !pkey_cols&.present?
|
270
|
+
needs_serial_col = true
|
271
|
+
+', id: false' # Support missing primary key (by adding: , id: false)
|
272
|
+
elsif ((pkey_col_first = (col_def = relation[:cols][pkey_cols&.first])&.first) &&
|
273
|
+
(pkey_col_first = SQL_TYPES[pkey_col_first] || SQL_TYPES[col_def&.[](0..1)] ||
|
274
|
+
SQL_TYPES.find { |r| r.first.is_a?(Regexp) && pkey_col_first =~ r.first }&.last ||
|
275
|
+
pkey_col_first
|
276
|
+
) != key_type
|
277
|
+
)
|
278
|
+
case pkey_col_first
|
279
|
+
when 'integer'
|
280
|
+
+', id: :serial'
|
281
|
+
when 'bigint'
|
282
|
+
+', id: :bigserial'
|
283
|
+
else
|
284
|
+
+", id: :#{pkey_col_first}" # Something like: id: :integer, primary_key: :businessentityid
|
285
|
+
end +
|
286
|
+
(pkey_cols.first ? ", primary_key: :#{pkey_cols.first}" : '')
|
287
|
+
end
|
288
|
+
if !id_option && pkey_cols.sort != arpk
|
289
|
+
id_option = +", primary_key: :#{pkey_cols.first}"
|
290
|
+
end
|
291
|
+
if !is_4x_rails && (comment = relation&.fetch(:description, nil))&.present?
|
292
|
+
(id_option ||= +'') << ", comment: #{comment.inspect}"
|
293
|
+
end
|
294
|
+
# Find the ActiveRecord class in order to see if the columns have comments
|
295
|
+
unless is_4x_rails
|
296
|
+
klass = begin
|
297
|
+
tbl.tr('.', '/').singularize.camelize.constantize
|
298
|
+
rescue StandardError
|
299
|
+
end
|
300
|
+
if klass
|
301
|
+
unless ActiveRecord::Migration.table_exists?(klass.table_name)
|
302
|
+
puts "WARNING: Unable to locate table #{klass.table_name} (for #{klass.name})."
|
303
|
+
klass = nil
|
304
|
+
end
|
305
|
+
end
|
306
|
+
end
|
307
|
+
# Refer to this table name as a symbol or dotted string as appropriate
|
308
|
+
tbl_code = tbl_parts.length == 1 ? ":#{tbl_parts.first}" : "'#{tbl}'"
|
309
|
+
mig = +" def change\n return unless reverting? || !table_exists?(#{tbl_code})\n\n"
|
310
|
+
mig << " create_table #{tbl_code}#{id_option} do |t|\n"
|
311
|
+
possible_ts = [] # Track possible generic timestamps
|
312
|
+
relation[:cols].each do |col, col_type|
|
313
|
+
sql_type = SQL_TYPES[col_type.first] || SQL_TYPES[col_type[0..1]] ||
|
314
|
+
SQL_TYPES.find { |r| r.first.is_a?(Regexp) && col_type.first =~ r.first }&.last ||
|
315
|
+
col_type.first
|
316
|
+
suffix = col_type[3] || pkey_cols&.include?(col) ? +', null: false' : +''
|
317
|
+
suffix << ', array: true' if (col_type.first == 'ARRAY')
|
318
|
+
if !is_4x_rails && klass && (comment = klass.columns_hash.fetch(col, nil)&.comment)&.present?
|
319
|
+
suffix << ", comment: #{comment.inspect}"
|
320
|
+
end
|
321
|
+
# Determine if this column is used as part of a foreign key
|
322
|
+
if (fk = fkey_cols.find { |assoc| col == assoc[:fk] })
|
323
|
+
to_table = fk[:inverse_table].split('.')
|
324
|
+
to_table = to_table.length == 1 ? ":#{to_table.first}" : "'#{fk[:inverse_table]}'"
|
325
|
+
if needs_serial_col && pkey_cols&.include?(col) && (new_serial_type = {'integer' => 'serial', 'bigint' => 'bigserial'}[sql_type])
|
326
|
+
sql_type = new_serial_type
|
327
|
+
needs_serial_col = false
|
328
|
+
end
|
329
|
+
if do_fks_last || (fk[:fk] != "#{fk[:assoc_name].singularize}_id") # Need to do our own foreign_key tricks, not use references?
|
330
|
+
column = fk[:fk]
|
331
|
+
mig << emit_column(sql_type, column, suffix)
|
332
|
+
add_fks << [to_table, column, relations[fk[:inverse_table]], tbl_code]
|
333
|
+
else
|
334
|
+
suffix << ", type: :#{sql_type}" unless sql_type == key_type
|
335
|
+
# Will the resulting default index name be longer than what Postgres allows? (63 characters)
|
336
|
+
if (idx_name = ActiveRecord::Base.connection.index_name(tbl, {column: col})).length > 63
|
337
|
+
# Try to find a shorter name that hasn't been used yet
|
338
|
+
unless indexes.key?(shorter = idx_name[0..62]) ||
|
339
|
+
indexes.key?(shorter = idx_name.tr('_', '')[0..62]) ||
|
340
|
+
indexes.key?(shorter = idx_name.tr('aeio', '')[0..62])
|
341
|
+
puts "Unable to easily find unique name for index #{idx_name} that is shorter than 64 characters,"
|
342
|
+
puts "so have resorted to this GUID-based identifier: #{shorter = "#{tbl[0..25]}_#{::SecureRandom.uuid}"}."
|
343
|
+
end
|
344
|
+
suffix << ", index: { name: '#{shorter || idx_name}' }"
|
345
|
+
indexes[shorter || idx_name] = nil
|
346
|
+
end
|
347
|
+
next if do_fks_last
|
348
|
+
|
349
|
+
primary_key = nil
|
350
|
+
begin
|
351
|
+
primary_key = relations[fk[:inverse_table]][:class_name]&.constantize&.primary_key
|
352
|
+
rescue NameError => e
|
353
|
+
primary_key = ::Brick.ar_base.primary_key
|
354
|
+
end
|
355
|
+
fk_stuff = ", foreign_key: { to_table: #{to_table}#{", primary_key: :#{primary_key}" if primary_key != ::Brick.ar_base.primary_key} }"
|
356
|
+
mig << " t.references :#{fk[:assoc_name]}#{suffix}#{fk_stuff}\n"
|
357
|
+
end
|
358
|
+
else
|
359
|
+
next if !id_option&.end_with?('id: false') && pkey_cols&.include?(col)
|
360
|
+
|
361
|
+
# See if there are generic timestamps
|
362
|
+
if sql_type == 'timestamp' && ['created_at','updated_at'].include?(col)
|
363
|
+
possible_ts << [col, !col_type[3]]
|
364
|
+
else
|
365
|
+
mig << emit_column(sql_type, col, suffix)
|
366
|
+
end
|
367
|
+
end
|
368
|
+
end
|
369
|
+
if possible_ts.length == 2 && # Both created_at and updated_at
|
370
|
+
# Rails 5 and later timestamps default to NOT NULL
|
371
|
+
(possible_ts.first.last == is_4x_rails && possible_ts.last.last == is_4x_rails)
|
372
|
+
mig << "\n t.timestamps\n"
|
373
|
+
else # Just one or the other, or a nullability mismatch
|
374
|
+
possible_ts.each { |ts| emit_column('timestamp', ts.first, nil) }
|
375
|
+
end
|
376
|
+
mig << " end\n"
|
377
|
+
if pk_is_also_fk
|
378
|
+
mig << " reversible do |dir|\n"
|
379
|
+
mig << " dir.up { execute('ALTER TABLE #{tbl} ADD PRIMARY KEY (#{pk_is_also_fk})') }\n"
|
380
|
+
mig << " end\n"
|
381
|
+
end
|
382
|
+
add_fks.each do |add_fk|
|
383
|
+
next unless add_fk[2]
|
384
|
+
|
385
|
+
is_commented = false
|
386
|
+
# add_fk[2] holds the inverse relation
|
387
|
+
unless (pk = add_fk[2][:pkey]&.values&.flatten&.first)
|
388
|
+
is_commented = true
|
389
|
+
mig << " # (Unable to create relationship because primary key is missing on table #{add_fk[0]})\n"
|
390
|
+
# No official PK, but if coincidentally there's a column of the same name, take a chance on it
|
391
|
+
pk = (add_fk[2][:cols].key?(add_fk[1]) && add_fk[1]) || '???'
|
392
|
+
end
|
393
|
+
mig << " #{'# ' if do_fks_last}#{'# ' if is_commented}add_foreign_key #{tbl_code}, "
|
394
|
+
# to_table column
|
395
|
+
mig << "#{add_fk[0]}, column: :#{add_fk[1]}, primary_key: :#{pk}\n"
|
396
|
+
end
|
397
|
+
mig << " end\n"
|
398
|
+
end
|
399
|
+
|
336
400
|
def emit_column(type, name, suffix)
|
337
401
|
" t.#{type.start_with?('numeric') ? 'decimal' : type} :#{name}#{suffix}\n"
|
338
402
|
end
|
339
403
|
|
340
404
|
def migration_file_write(mig_path, name, current_mig_time, ar_version, mig)
|
341
|
-
File.open("#{mig_path}/#{version = current_mig_time.strftime('%Y%m%d%H%M00')}_#{name}.rb", "w") do |f|
|
342
|
-
f.write "class #{name.camelize} < ActiveRecord::Migration#{ar_version}\n"
|
405
|
+
File.open("#{mig_path}/#{version = current_mig_time.strftime('%Y%m%d%H%M00')}_#{name}#{'.rb' unless name.index('.')}", "w") do |f|
|
406
|
+
f.write "class #{name.split('.').first.camelize} < ActiveRecord::Migration#{ar_version}\n"
|
343
407
|
f.write mig
|
344
408
|
f.write "end\n"
|
345
409
|
end
|
@@ -0,0 +1,101 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'brick'
|
4
|
+
require 'rails/generators'
|
5
|
+
require 'fancy_gets'
|
6
|
+
require 'generators/brick/migration_builder'
|
7
|
+
require 'generators/brick/salesforce_schema'
|
8
|
+
|
9
|
+
module Brick
|
10
|
+
# Auto-generates migration files
|
11
|
+
class SalesforceMigrationsGenerator < ::Rails::Generators::Base
|
12
|
+
include FancyGets
|
13
|
+
desc 'Auto-generates migration files for a set of Salesforce tables and columns.'
|
14
|
+
|
15
|
+
argument :wsdl_file, type: :string, default: ''
|
16
|
+
|
17
|
+
def brick_salesforce_migrations
|
18
|
+
::Brick.apply_double_underscore_patch
|
19
|
+
# ::Brick.mode = :on
|
20
|
+
# ActiveRecord::Base.establish_connection
|
21
|
+
|
22
|
+
# Runs at the end of parsing Salesforce WSDL, and uses the discovered tables and columns to create migrations
|
23
|
+
relations = nil
|
24
|
+
end_document_proc = lambda do |salesforce_tables|
|
25
|
+
# p [:end_document]
|
26
|
+
mig_path, is_insert_versions, is_delete_versions = ::Brick::MigrationBuilder.check_folder
|
27
|
+
return unless mig_path
|
28
|
+
|
29
|
+
# Generate a list of tables that can be chosen
|
30
|
+
table_names = salesforce_tables.keys
|
31
|
+
chosen = gets_list(list: table_names, chosen: table_names.dup)
|
32
|
+
|
33
|
+
soap_data_types = {
|
34
|
+
'tns:ID' => 'string',
|
35
|
+
'xsd:string' => 'string',
|
36
|
+
'xsd:dateTime' => 'datetime',
|
37
|
+
'xsd:boolean' => 'boolean',
|
38
|
+
'xsd:double' => 'float',
|
39
|
+
'xsd:int' => 'integer',
|
40
|
+
'xsd:date' => 'date',
|
41
|
+
'xsd:anyType' => 'string', # Don't fully know on this
|
42
|
+
'xsd:long' => 'bigint',
|
43
|
+
'xsd:base64Binary' => 'bytea',
|
44
|
+
'xsd:time' => 'time'
|
45
|
+
}
|
46
|
+
fk_idx = 0
|
47
|
+
# Build out a '::Brick.relations' hash that represents this Salesforce schema
|
48
|
+
relations = chosen.each_with_object({}) do |tbl_name, s|
|
49
|
+
tbl = salesforce_tables[tbl_name]
|
50
|
+
# Build out columns and foreign keys
|
51
|
+
cols = { 'id'=>['string', nil, false, true] }
|
52
|
+
fks = {}
|
53
|
+
tbl[:cols].each do |col|
|
54
|
+
next if col[:name] == 'Id'
|
55
|
+
|
56
|
+
dt = soap_data_types[col[:data_type]] || 'string'
|
57
|
+
cols[col[:name]] = [dt, nil, col[:nillable], false]
|
58
|
+
if (ref_to = col[:fk_reference_to])
|
59
|
+
fk_hash = {
|
60
|
+
is_bt: true,
|
61
|
+
fk: col[:name],
|
62
|
+
assoc_name: "#{col[:name]}_bt",
|
63
|
+
inverse_table: ref_to
|
64
|
+
}
|
65
|
+
fks["fk_salesforce_#{fk_idx += 1}"] = fk_hash
|
66
|
+
end
|
67
|
+
end
|
68
|
+
# Put it all into a relation entry, named the same as the table
|
69
|
+
s[tbl_name] = {
|
70
|
+
pkey: { "#{tbl_name}_pkey" => ['id'] },
|
71
|
+
cols: cols,
|
72
|
+
fks: fks
|
73
|
+
}
|
74
|
+
end
|
75
|
+
# Build but do not have foreign keys established yet, and do not put version entries info the schema_migrations table
|
76
|
+
::Brick::MigrationBuilder.generate_migrations(chosen, mig_path, is_insert_versions, is_delete_versions, relations,
|
77
|
+
do_fks_last: true, do_schema_migrations: false)
|
78
|
+
end
|
79
|
+
parser = Nokogiri::XML::SAX::Parser.new(::Brick::SalesforceSchema.new(end_document_proc))
|
80
|
+
# The WSDL file must have a .xml extension, and can be in any folder in the project
|
81
|
+
# Alternatively the user can supply this option on the command line
|
82
|
+
@wsdl_file = nil if @wsdl_file == ''
|
83
|
+
loop do
|
84
|
+
break if (@wsdl_file ||= gets_list(Dir['**/*.xml'] + ['* Cancel *'])) == '* Cancel *'
|
85
|
+
|
86
|
+
parser.parse(File.read(@wsdl_file))
|
87
|
+
|
88
|
+
if relations.length > 300
|
89
|
+
puts "A Salesforce installation generally has hundreds to a few thousand tables, and many are empty.
|
90
|
+
In order to more easily navigate just those tables that have content, you might want to add this
|
91
|
+
to brick.rb:
|
92
|
+
::Brick.omit_empty_tables_in_dropdown = true"
|
93
|
+
end
|
94
|
+
break
|
95
|
+
rescue Errno::ENOENT
|
96
|
+
puts "File \"#{@wsdl_file}\" is not found."
|
97
|
+
@wsdl_file = nil
|
98
|
+
end
|
99
|
+
end
|
100
|
+
end
|
101
|
+
end
|
@@ -0,0 +1,105 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Brick
|
4
|
+
class SalesforceSchema < Nokogiri::XML::SAX::Document
|
5
|
+
include ::Brick::MigrationBuilder
|
6
|
+
|
7
|
+
attr_reader :end_document_proc
|
8
|
+
|
9
|
+
def initialize(end_doc_proc)
|
10
|
+
@end_document_proc = end_doc_proc
|
11
|
+
end
|
12
|
+
|
13
|
+
def start_document
|
14
|
+
# p [:start_document]
|
15
|
+
@salesforce_tables = {}
|
16
|
+
@text_stack = []
|
17
|
+
@all_extends = {}
|
18
|
+
puts 'Each dot is a table:'
|
19
|
+
end
|
20
|
+
|
21
|
+
def end_document
|
22
|
+
puts
|
23
|
+
end_document_proc&.call(@salesforce_tables)
|
24
|
+
end
|
25
|
+
|
26
|
+
def start_element_namespace(name, attrs = [], prefix = nil, uri = nil, ns = [])
|
27
|
+
# p [:start_element, name, attrs, prefix, uri, ns]
|
28
|
+
case name
|
29
|
+
when 'complexType' # Table
|
30
|
+
@last_table = attrs.find { |a| a.localname == 'name' }&.value
|
31
|
+
@fks = {}
|
32
|
+
# if attrs.first&.value&.end_with?('__c') # Starts as a string
|
33
|
+
when 'extension'
|
34
|
+
@last_extension = attrs.find { |a| a.localname == 'base' }.value
|
35
|
+
when 'element' # Column
|
36
|
+
# Extremely rarely this is nil!
|
37
|
+
data_type = attrs.find { |a| a.localname == 'type' }&.value
|
38
|
+
return if !@last_table || data_type.nil? || data_type == 'tns:QueryResult'
|
39
|
+
|
40
|
+
# Promoted to a real SalesforceTable object
|
41
|
+
if @last_table.is_a?(String)
|
42
|
+
@last_table = @salesforce_tables[@last_table] = { extend: @salesforce_tables[@last_extension] }
|
43
|
+
end
|
44
|
+
|
45
|
+
col_name = attrs.find { |a| a.localname == 'name' }&.value
|
46
|
+
|
47
|
+
# Foreign key reference?
|
48
|
+
if data_type&.start_with?('ens:')
|
49
|
+
foreign_table = data_type[4..]
|
50
|
+
if col_name.end_with?('__r')
|
51
|
+
@fks["#{col_name[0..-2]}c"] = foreign_table
|
52
|
+
else # if col_name.end_with?('Id')
|
53
|
+
@fks["#{col_name}Id"] = foreign_table
|
54
|
+
end
|
55
|
+
return
|
56
|
+
end
|
57
|
+
|
58
|
+
# Rarely this is nil
|
59
|
+
nillable = attrs.find { |a| a.localname == 'nillable' }&.value == 'true'
|
60
|
+
min_occurs = attrs.find { |a| a.localname == 'minOccurs' }&.value || -2
|
61
|
+
min_occurs = -1 if min_occurs == 'unbounded'
|
62
|
+
max_occurs = attrs.find { |a| a.localname == 'maxOccurs' }&.value || -2
|
63
|
+
max_occurs = -1 if max_occurs == 'unbounded'
|
64
|
+
col_options = { name: col_name, data_type: :data_type, nillable: :nillable, min_occurs: :min_occurs, max_occurs: :max_occurs }
|
65
|
+
|
66
|
+
(@last_table[:cols] ||= []) << col_options
|
67
|
+
end
|
68
|
+
@text_stack.push +''
|
69
|
+
end
|
70
|
+
|
71
|
+
def end_element_namespace(name, prefix = nil, uri = nil)
|
72
|
+
# p [:end_element, name, prefix, uri]
|
73
|
+
texts = @text_stack.pop
|
74
|
+
case name
|
75
|
+
when 'extension'
|
76
|
+
@last_extension = nil
|
77
|
+
when 'complexType'
|
78
|
+
if @last_table && !@last_table.is_a?(String)
|
79
|
+
# Do up any foreign keys
|
80
|
+
@fks.each do |k, v|
|
81
|
+
# Only a few records set up like this, going to sObject
|
82
|
+
if
|
83
|
+
# (k.downcase.end_with?('recordid') &&
|
84
|
+
# (fk_col = @last_table[:cols].find { |t| t[:name] == "#{k[0..-9]}Id" })
|
85
|
+
# ) ||
|
86
|
+
(fk_col = @last_table[:cols].find { |t| t[:name] == k })
|
87
|
+
fk_col[:fk_reference_to] = v
|
88
|
+
# puts "Skipping #{@last_table[:name]} / #{k}"
|
89
|
+
end
|
90
|
+
end
|
91
|
+
end
|
92
|
+
print '.'
|
93
|
+
@last_table = nil
|
94
|
+
end
|
95
|
+
# p [:end_element_texts, name, texts]
|
96
|
+
end
|
97
|
+
|
98
|
+
def characters(string)
|
99
|
+
# p [:characters, string]
|
100
|
+
@text_stack.each do |text|
|
101
|
+
text << string
|
102
|
+
end
|
103
|
+
end
|
104
|
+
end
|
105
|
+
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: brick
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.0.
|
4
|
+
version: 1.0.191
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Lorin Thwaits
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2023-
|
11
|
+
date: 2023-12-09 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: activerecord
|
@@ -258,6 +258,8 @@ files:
|
|
258
258
|
- lib/generators/brick/migration_builder.rb
|
259
259
|
- lib/generators/brick/migrations_generator.rb
|
260
260
|
- lib/generators/brick/models_generator.rb
|
261
|
+
- lib/generators/brick/salesforce_migrations_generator.rb
|
262
|
+
- lib/generators/brick/salesforce_schema.rb
|
261
263
|
- lib/generators/brick/seeds_generator.rb
|
262
264
|
- lib/generators/brick/templates/add_object_changes_to_versions.rb.erb
|
263
265
|
- lib/generators/brick/templates/create_versions.rb.erb
|