brick 1.0.229 → 1.0.230
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/brick/extensions.rb +59 -57
- data/lib/brick/rails/engine.rb +11 -7
- data/lib/brick/reflect_tables.rb +21 -7
- data/lib/brick/version_number.rb +1 -1
- data/lib/generators/brick/airtable_api_caller.rb +171 -0
- data/lib/generators/brick/airtable_migrations_generator.rb +24 -0
- data/lib/generators/brick/airtable_seeds_generator.rb +19 -0
- data/lib/generators/brick/{migration_builder.rb → migrations_builder.rb} +7 -3
- data/lib/generators/brick/migrations_generator.rb +4 -4
- data/lib/generators/brick/salesforce_migrations_generator.rb +3 -3
- data/lib/generators/brick/salesforce_schema.rb +1 -1
- data/lib/generators/brick/seeds_builder.rb +329 -0
- data/lib/generators/brick/seeds_generator.rb +2 -242
- metadata +7 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 5230864d4450f23406da3efcab54e2c337ae9029c2b9fea3d1b8df1bb34583dd
|
4
|
+
data.tar.gz: b16f195b58d0df45611f8a2251e005ae6cc841970fb1c6eea17119af678d62a8
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 55e9c658e0c258be349866a82f7c5a62234779aa27e7a5e04517979dbccdf9475948fecb2de8f78604daf773094e9a19f8a3c32b9d64ef5c8948cbd3fa4dffe2
|
7
|
+
data.tar.gz: a00b991cd2d4125e10baada1d070440d4b25a99994e462e543acafdcdcf4cf5a4703e7b0fc60dd2e6989e933828a9eb0355610f6901b9b2ca9d5a047a61260a3
|
data/lib/brick/extensions.rb
CHANGED
@@ -1749,57 +1749,6 @@ class Object
|
|
1749
1749
|
built_model = Class.new(base_model) do |new_model_class|
|
1750
1750
|
(schema_module || Object).const_set(chosen_name, new_model_class) unless is_generator
|
1751
1751
|
@_brick_relation = relation
|
1752
|
-
# Enable Elasticsearch based on the table name?
|
1753
|
-
if (@_brick_es_crud = ::Brick.elasticsearch_models&.fetch(table_name, nil))
|
1754
|
-
include ::Elasticsearch::Model
|
1755
|
-
if @_brick_es_crud.index('i') # Enable auto-creation of indexes on import?
|
1756
|
-
class << self
|
1757
|
-
alias _original_import import
|
1758
|
-
def import(options={}, &block)
|
1759
|
-
unless self.__elasticsearch__.index_exists?
|
1760
|
-
self.__elasticsearch__.create_index!
|
1761
|
-
::Brick.elasticsearch_existings << self.table_name
|
1762
|
-
end
|
1763
|
-
_original_import(options={}, &block)
|
1764
|
-
end
|
1765
|
-
end
|
1766
|
-
end
|
1767
|
-
if ::Elasticsearch::Model.const_defined?('Callbacks') &&
|
1768
|
-
@_brick_es_crud.index('c') || @_brick_es_crud.index('u') || @_brick_es_crud.index('d')
|
1769
|
-
include ::Elasticsearch::Model::Callbacks
|
1770
|
-
is_include_es_callbacks = true
|
1771
|
-
end
|
1772
|
-
# Create mappings for all text columns
|
1773
|
-
mappings do
|
1774
|
-
has_mappings = nil
|
1775
|
-
new_model_class.columns.select { |col| [:string, :text].include?(col.type) }.each do |string_col|
|
1776
|
-
unless has_mappings
|
1777
|
-
code << " include Elasticsearch::Model\n"
|
1778
|
-
code << " include Elasticsearch::Model::Callbacks\n" if is_include_es_callbacks
|
1779
|
-
code << " mappings do\n"
|
1780
|
-
has_mappings = true
|
1781
|
-
end
|
1782
|
-
code << " indexes :#{string_col.name}, type: #{string_col.type.to_s.inspect}\n"
|
1783
|
-
indexes string_col.name.to_sym, type: string_col.type.to_s
|
1784
|
-
end
|
1785
|
-
code << " end\n" if has_mappings
|
1786
|
-
end
|
1787
|
-
if @_brick_es_crud.index('r')
|
1788
|
-
class << self
|
1789
|
-
# Search and hydrate records using only Elasticsearch data
|
1790
|
-
define_method :search do |q|
|
1791
|
-
self.__elasticsearch__.search(q).raw_response.body['hits']['hits'].map do |hit|
|
1792
|
-
obj = self.new(hit['_source'])
|
1793
|
-
obj.instance_variable_set(:@new_record, false) # Don't want to accidentally save a new one
|
1794
|
-
obj
|
1795
|
-
end
|
1796
|
-
rescue Elastic::Transport::Transport::Errors::NotFound => e
|
1797
|
-
self.create_index! if @_brick_es_crud.index('i')
|
1798
|
-
[]
|
1799
|
-
end
|
1800
|
-
end
|
1801
|
-
end
|
1802
|
-
end
|
1803
1752
|
if inheritable_name
|
1804
1753
|
new_model_class.define_singleton_method :inherited do |subclass|
|
1805
1754
|
super(subclass)
|
@@ -1872,6 +1821,59 @@ class Object
|
|
1872
1821
|
end
|
1873
1822
|
end
|
1874
1823
|
|
1824
|
+
# Enable Elasticsearch based on the table name?
|
1825
|
+
if (@_brick_es_crud = ::Brick.elasticsearch_models&.fetch(matching, nil))
|
1826
|
+
include ::Elasticsearch::Model
|
1827
|
+
if @_brick_es_crud.index('i') # Enable auto-creation of indexes on import?
|
1828
|
+
class << self
|
1829
|
+
alias _original_import import
|
1830
|
+
def import(options={}, &block)
|
1831
|
+
unless self.__elasticsearch__.index_exists?
|
1832
|
+
self.__elasticsearch__.create_index!
|
1833
|
+
::Brick.elasticsearch_existings[self.table_name] = self.table_name.tr('.', '-').pluralize
|
1834
|
+
end
|
1835
|
+
_original_import(options={}, &block)
|
1836
|
+
end
|
1837
|
+
end
|
1838
|
+
end
|
1839
|
+
if ::Elasticsearch::Model.const_defined?('Callbacks') &&
|
1840
|
+
@_brick_es_crud.index('c') || @_brick_es_crud.index('u') || @_brick_es_crud.index('d')
|
1841
|
+
include ::Elasticsearch::Model::Callbacks
|
1842
|
+
is_include_es_callbacks = true
|
1843
|
+
end
|
1844
|
+
# Create mappings for all text columns
|
1845
|
+
mappings do
|
1846
|
+
has_mappings = nil
|
1847
|
+
new_model_class.columns.select { |col| [:string, :text].include?(col.type) }.each do |string_col|
|
1848
|
+
unless has_mappings
|
1849
|
+
code << " include Elasticsearch::Model\n"
|
1850
|
+
code << " include Elasticsearch::Model::Callbacks\n" if is_include_es_callbacks
|
1851
|
+
code << " mappings do\n"
|
1852
|
+
has_mappings = true
|
1853
|
+
end
|
1854
|
+
code << " indexes :#{string_col.name}, type: 'text'\n"
|
1855
|
+
indexes string_col.name.to_sym, type: 'text'
|
1856
|
+
end
|
1857
|
+
code << " end\n" if has_mappings
|
1858
|
+
end
|
1859
|
+
if @_brick_es_crud.index('r')
|
1860
|
+
class << self
|
1861
|
+
# Search and hydrate records using only Elasticsearch data
|
1862
|
+
define_method :search do |q|
|
1863
|
+
self.__elasticsearch__.search(q).raw_response.body['hits']['hits'].map do |hit|
|
1864
|
+
obj = self.new(hit['_source'])
|
1865
|
+
obj.instance_variable_set(:@new_record, false) # Don't want to accidentally save a new one
|
1866
|
+
obj
|
1867
|
+
end
|
1868
|
+
rescue Elastic::Transport::Transport::Errors::NotFound => e
|
1869
|
+
self.__elasticsearch__.create_index! if @_brick_es_crud.index('i')
|
1870
|
+
::Brick.elasticsearch_existings[self.table_name] = self.table_name.tr('.', '-').pluralize
|
1871
|
+
[]
|
1872
|
+
end
|
1873
|
+
end
|
1874
|
+
end
|
1875
|
+
end
|
1876
|
+
|
1875
1877
|
unless is_sti
|
1876
1878
|
fks = relation[:fks] || {}
|
1877
1879
|
# Do the bulk of the has_many / belongs_to processing, and store details about HMT so they can be done at the very last
|
@@ -2288,14 +2290,14 @@ class Object
|
|
2288
2290
|
|
2289
2291
|
self.define_method :search do
|
2290
2292
|
# TODO: Make sure at least one index is actually present which allows for reading before attempting
|
2291
|
-
if (
|
2292
|
-
@indexes = ::Brick.elasticsearch_existings
|
2293
|
-
hits = Elasticsearch::Model.client.search({index: @indexes.join(','), q:
|
2293
|
+
if (@qry = params['qry'] || params['_brick_es']) # Elasticsearch query?
|
2294
|
+
@indexes = ::Brick.elasticsearch_existings&.keys
|
2295
|
+
hits = Elasticsearch::Model.client.search({index: @indexes.join(','), q: @qry, size: 100})
|
2294
2296
|
model_infos = {}
|
2295
2297
|
# Number of indexes used: hits.body['_shards']['total']
|
2296
2298
|
@count = hits.body['hits']['total']['value']
|
2297
2299
|
@results = hits.body['hits']['hits'].map do |x|
|
2298
|
-
klass = ::Brick.relations[x['_index']][:class_name].constantize
|
2300
|
+
klass = ::Brick.relations[::Brick.elasticsearch_existings[x['_index']]][:class_name].constantize
|
2299
2301
|
model_info = model_infos[klass] ||= [
|
2300
2302
|
klass.primary_key,
|
2301
2303
|
klass.brick_parse_dsl(join_array = ::Brick::JoinArray.new, [], translations = {}, false, nil, true)
|
@@ -2644,7 +2646,7 @@ class Object
|
|
2644
2646
|
if @_brick_es_crud.index('i')
|
2645
2647
|
self.__elasticsearch__.create_index!
|
2646
2648
|
# model.import
|
2647
|
-
::Brick.elasticsearch_existings
|
2649
|
+
::Brick.elasticsearch_existings[self.table_name] = self.table_name.tr('.', '-').pluralize
|
2648
2650
|
model.__elasticsearch__.search(q)
|
2649
2651
|
else
|
2650
2652
|
[]
|
@@ -3320,7 +3322,7 @@ module Brick
|
|
3320
3322
|
# Rails applies an _index suffix to that route when the resource name isn't something plural
|
3321
3323
|
index << '_index' if mode != :singular && !not_path &&
|
3322
3324
|
index == (
|
3323
|
-
index2 + [relation[:class_name][(relation&.fetch(:auto_prefixed_class, nil)&.length&.+ 2) || 0..-1
|
3325
|
+
index2 + [relation[:class_name]&.[]((relation&.fetch(:auto_prefixed_class, nil)&.length&.+ 2) || 0..-1)&.underscore&.tr('/', '_') || '_']
|
3324
3326
|
).join(separator)
|
3325
3327
|
end
|
3326
3328
|
index
|
data/lib/brick/rails/engine.rb
CHANGED
@@ -671,12 +671,16 @@ window.addEventListener(\"popstate\", linkSchemas);
|
|
671
671
|
def find_template(*args, **options)
|
672
672
|
find_template_err = nil
|
673
673
|
unless (model_name = @_brick_model&.name) ||
|
674
|
-
(
|
675
|
-
|
676
|
-
|
677
|
-
|
678
|
-
|
679
|
-
|
674
|
+
(
|
675
|
+
args[1].first == 'brick_gem' &&
|
676
|
+
((is_search = ::Brick.config.add_search && args[0] == 'search' &&
|
677
|
+
::Brick.elasticsearch_existings&.length&.positive?
|
678
|
+
) ||
|
679
|
+
(is_status = ::Brick.config.add_status && args[0] == 'status') ||
|
680
|
+
(is_orphans = ::Brick.config.add_orphans && args[0] == 'orphans') ||
|
681
|
+
(is_crosstab = args[0] == 'crosstab')
|
682
|
+
)
|
683
|
+
)
|
680
684
|
begin
|
681
685
|
if (possible_template = _brick_find_template(*args, **options))
|
682
686
|
return possible_template
|
@@ -1220,7 +1224,7 @@ end
|
|
1220
1224
|
#{schema_options}" if schema_options}
|
1221
1225
|
<select id=\"tbl\">#{table_options}</select><br><br>
|
1222
1226
|
<form method=\"get\">
|
1223
|
-
<input type=\"text\" name=\"qry\"
|
1227
|
+
<input type=\"text\" name=\"qry\"<%= \" value=\\\"#\{@qry}\\\"\".html_safe unless @qry.blank? %>><input type=\"submit\", value=\"Search\">
|
1224
1228
|
</form>
|
1225
1229
|
<% if @results.present? %>
|
1226
1230
|
<div id=\"rowCount\"><b><%= @count %> results from: </b><%= @indexes.sort.join(', ') %></div>
|
data/lib/brick/reflect_tables.rb
CHANGED
@@ -40,7 +40,7 @@ module Brick
|
|
40
40
|
end
|
41
41
|
end
|
42
42
|
end
|
43
|
-
if ::Elasticsearch.const_defined?('Model')
|
43
|
+
if ::Elasticsearch.const_defined?('Model')
|
44
44
|
# By setting the environment variable ELASTICSEARCH_URL then you can specify an Elasticsearch/Opensearch host
|
45
45
|
host = (client = ::Elasticsearch::Model.client).transport.hosts.first
|
46
46
|
es_uri = URI.parse("#{host[:protocol]}://#{host[:host]}:#{host[:port]}")
|
@@ -528,18 +528,32 @@ ORDER BY 1, 2, c.internal_column_id, acc.position"
|
|
528
528
|
::Brick.elasticsearch_models = unless access.blank?
|
529
529
|
# Find all existing indexes
|
530
530
|
client = Elastic::Transport::Client.new
|
531
|
-
::Brick.elasticsearch_existings = client.perform_request('GET', '_aliases').body.each_with_object(
|
532
|
-
|
533
|
-
entry.
|
531
|
+
::Brick.elasticsearch_existings = client.perform_request('GET', '_aliases').body.each_with_object({}) do |entry, s|
|
532
|
+
rel_name = entry.first.tr('-', '.')
|
533
|
+
s[entry.first] = rel_name if relations.include?(entry.first)
|
534
|
+
s[entry.first] = rel_name.singularize if relations.include?(rel_name.singularize)
|
535
|
+
entry.last.fetch('aliases', nil)&.each do |k, _v|
|
536
|
+
rel_name = k.tr('-', '.')
|
537
|
+
s[k] = rel_name if relations.include?(rel_name)
|
538
|
+
s[k] = rel_name.singularize if relations.include?(rel_name.singularize)
|
539
|
+
end
|
534
540
|
end
|
535
541
|
# Add this either if...
|
536
542
|
if access.is_a?(String) # ...they have permissions over absolutely anything,
|
537
|
-
relations.each_with_object({})
|
543
|
+
relations.each_with_object({}) do |rel, s|
|
544
|
+
next if rel.first.is_a?(Symbol)
|
545
|
+
|
546
|
+
perms = rel.last.fetch(:isView, nil) ? access.tr('cud', '') : access
|
547
|
+
s[rel.first] = perms
|
548
|
+
end
|
538
549
|
else # or there are specific permissions for each resource, so find the matching indexes
|
539
550
|
client = Elastic::Transport::Client.new
|
540
551
|
::Brick.elasticsearch_existings.each_with_object({}) do |index, s|
|
541
|
-
|
542
|
-
|
552
|
+
this_access = access.is_a?(String) ? access : access[index.first] || '' # Look up permissions from above
|
553
|
+
next unless (rel = relations.fetch(index.first, nil))
|
554
|
+
|
555
|
+
perms = rel&.fetch(:isView, nil) ? this_access.tr('cud', '') : this_access
|
556
|
+
s[index.first] = perms unless perms.blank?
|
543
557
|
end
|
544
558
|
end
|
545
559
|
end
|
data/lib/brick/version_number.rb
CHANGED
@@ -0,0 +1,171 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Brick
|
4
|
+
class AirtableApiCaller
|
5
|
+
class << self
|
6
|
+
include FancyGets
|
7
|
+
|
8
|
+
def pick_tables(usage = :migrations)
|
9
|
+
puts "In order to reference Airtable data you will need a Personal Access Token (PAT) which can be generated by referencing this URL:
|
10
|
+
https://airtable.com/create/tokens
|
11
|
+
You need only #{usage == :migrations ? 'this scope:' : "these three scopes:
|
12
|
+
data.records:read
|
13
|
+
data.recordComments:read"}
|
14
|
+
schema.bases:read
|
15
|
+
|
16
|
+
Please provide your Airtable PAT:"
|
17
|
+
pat = gets_password
|
18
|
+
require 'net/http'
|
19
|
+
# Generate a list of bases that can be chosen
|
20
|
+
bases = https_get('https://api.airtable.com/v0/meta/bases', pat)
|
21
|
+
base = gets_list(bases.fetch('bases', nil)&.map { |z| AirtableTable.new(z['id'], z['name']) })
|
22
|
+
puts
|
23
|
+
# Generate a list of tables that can be chosen
|
24
|
+
objects = https_get("https://api.airtable.com/v0/meta/bases/#{base.id}/tables", pat).fetch('tables', nil)
|
25
|
+
if objects.blank?
|
26
|
+
puts "No tables found in base #{base.name}."
|
27
|
+
return
|
28
|
+
end
|
29
|
+
|
30
|
+
tables = objects.map { |z| AirtableTable.new(z['id'], z['name'], z['primaryFieldId'], z['fields'], z['views'], base.id) }
|
31
|
+
chosen = gets_list(tables, tables.dup)
|
32
|
+
puts
|
33
|
+
|
34
|
+
# Build out a '::Brick.relations' hash that represents this Airtable schema
|
35
|
+
fks = []
|
36
|
+
associatives = {}
|
37
|
+
relations = chosen.each_with_object({}) do |table, s|
|
38
|
+
tbl_name = sane_table_name(table.name)
|
39
|
+
# Build out columns and foreign keys
|
40
|
+
cols = {}
|
41
|
+
table.fields.each do |col|
|
42
|
+
col_name = sane_name(col['name'])
|
43
|
+
# This is like a has_many or has_many through
|
44
|
+
if col['type'] == 'multipleRecordLinks'
|
45
|
+
# binding.pry if col['options']['isReversed']
|
46
|
+
if (frn_tbl = sane_table_name(
|
47
|
+
chosen.find { |t| t.id == col['options']['linkedTableId'] }&.name
|
48
|
+
))
|
49
|
+
if col['options']['prefersSingleRecordLink'] # 1:M
|
50
|
+
fks << [frn_tbl, "#{col_name}_id", tbl_name, col_name]
|
51
|
+
else # N:M
|
52
|
+
# Queue up to build associative table with two foreign keys
|
53
|
+
camelized = (assoc_name = "#{tbl_name}_#{col_name}_#{frn_tbl}").camelize
|
54
|
+
if associatives.keys.any? { |a| a.camelize == camelized }
|
55
|
+
puts "Strangely have found two columns in \"#{table.name}\" with a name similar to \"#{col_name}\". Skipping this to avoid a conflict."
|
56
|
+
next
|
57
|
+
|
58
|
+
end
|
59
|
+
associatives[assoc_name] = [col_name, frn_tbl, tbl_name]
|
60
|
+
fks << [assoc_name, frn_tbl, frn_tbl, col_name.underscore, tbl_name]
|
61
|
+
end
|
62
|
+
end
|
63
|
+
else
|
64
|
+
# puts col['type']
|
65
|
+
dt = case col['type']
|
66
|
+
when 'singleLineText', 'url', 'singleSelect'
|
67
|
+
'string'
|
68
|
+
when 'multilineText'
|
69
|
+
'text'
|
70
|
+
when 'number'
|
71
|
+
'decimal'
|
72
|
+
when 'checkbox'
|
73
|
+
'boolean'
|
74
|
+
when 'date'
|
75
|
+
'date'
|
76
|
+
when 'multipleSelects'
|
77
|
+
# Sqlite3 can do json
|
78
|
+
'json'
|
79
|
+
when 'formula', 'count', 'rollup', 'multipleAttachments'
|
80
|
+
next
|
81
|
+
# else
|
82
|
+
# binding.pry
|
83
|
+
end
|
84
|
+
cols[col_name] = [dt, nil, true, false] # true is the col[:nillable]
|
85
|
+
end
|
86
|
+
end
|
87
|
+
# Put it all into a relation entry, named the same as the table
|
88
|
+
pkey = table.fields.find { |f| f['id'] == table.primary_key }['name']
|
89
|
+
s[tbl_name] = {
|
90
|
+
pkey: { "#{tbl_name}_pkey" => [sane_name(pkey)] },
|
91
|
+
cols: cols,
|
92
|
+
fks: {},
|
93
|
+
airtable_table: table
|
94
|
+
}
|
95
|
+
end
|
96
|
+
associatives.each do |k, v|
|
97
|
+
pri_pk_col = relations[v[1]][:pkey]&.first&.last&.first
|
98
|
+
frn_pk_col = relations[v[2]][:pkey]&.first&.last&.first
|
99
|
+
pri_fk_name = "#{v[1]}_id"
|
100
|
+
frn_fk_name = (frn_fk_name == pri_fk_name) ?
|
101
|
+
"#{v[2]}_2_id" # Self-referencing N:M
|
102
|
+
: "#{v[2]}_id" # Standard N:M
|
103
|
+
relations[k] = {
|
104
|
+
pkey: { "#{k}_pkey" => ['id'] },
|
105
|
+
cols: { 'id' => ['integer', nil, false, false] }
|
106
|
+
}
|
107
|
+
fks << [v[1], pri_fk_name, k, pri_fk_name.underscore]
|
108
|
+
fks << [v[2], frn_fk_name, k, frn_fk_name.underscore]
|
109
|
+
end
|
110
|
+
fk_idx = 0
|
111
|
+
fks.each do |pri_tbl, fk_col, frn_tbl, airtable_col, assoc_tbl|
|
112
|
+
pri_pk_col = relations[pri_tbl][:pkey].first.last.first
|
113
|
+
# binding.pry unless relations.key?(frn_tbl) && relations[pri_tbl][:cols][pri_pk_col]
|
114
|
+
unless assoc_tbl # It's a 1:M -- make a FK column
|
115
|
+
relations[frn_tbl][:cols][fk_col] = [relations[pri_tbl][:cols][pri_pk_col][0], nil, true, false]
|
116
|
+
end
|
117
|
+
# And the actual relation
|
118
|
+
frn_fks = ((relations[frn_tbl] ||= {})[:fks] ||= {})
|
119
|
+
this_fk = frn_fks["fk_airtable_#{fk_idx += 1}"] = {
|
120
|
+
is_bt: !assoc_tbl, # Normal foreign key is true, and N:M is really a has_many, so false
|
121
|
+
fk: fk_col,
|
122
|
+
assoc_name: airtable_col,
|
123
|
+
inverse_table: pri_tbl
|
124
|
+
}
|
125
|
+
this_fk[:assoc_tbl] = assoc_tbl if assoc_tbl
|
126
|
+
end
|
127
|
+
|
128
|
+
relations
|
129
|
+
end
|
130
|
+
|
131
|
+
def https_get(uri, pat = nil)
|
132
|
+
uri = URI(uri) unless uri.is_a?(URI)
|
133
|
+
https = Net::HTTP.new(uri.host, uri.port)
|
134
|
+
request = Net::HTTP::Get.new("#{uri.path}?#{uri.query}")
|
135
|
+
request['Authorization'] = "Bearer #{@bearer ||= pat}"
|
136
|
+
response = Net::HTTP.start(uri.hostname, uri.port, { use_ssl: true }) do |http|
|
137
|
+
http.request(request)
|
138
|
+
end
|
139
|
+
# if response.code&.to_i > 299
|
140
|
+
# end
|
141
|
+
JSON.parse(response.body)
|
142
|
+
end
|
143
|
+
|
144
|
+
def sane_name(col_name)
|
145
|
+
sane_table_name(col_name.gsub('&', 'and').tr('()?', ''))
|
146
|
+
end
|
147
|
+
|
148
|
+
def sane_table_name(tbl_name)
|
149
|
+
tbl_name&.downcase&.tr(': -', '_')
|
150
|
+
end
|
151
|
+
|
152
|
+
class AirtableTable
|
153
|
+
attr_accessor :id, :name, :primary_key, :fields, :views, :base_id, :objects
|
154
|
+
def initialize(id, name,
|
155
|
+
primary_key = nil, fields = nil, views = nil, base_id = nil)
|
156
|
+
self.id = id
|
157
|
+
self.name = name
|
158
|
+
self.primary_key = primary_key
|
159
|
+
self.fields = fields
|
160
|
+
self.views = views
|
161
|
+
self.base_id = base_id
|
162
|
+
self.objects = {}
|
163
|
+
end
|
164
|
+
|
165
|
+
def to_s
|
166
|
+
name
|
167
|
+
end
|
168
|
+
end
|
169
|
+
end
|
170
|
+
end
|
171
|
+
end
|
@@ -0,0 +1,24 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'brick'
|
4
|
+
require 'rails/generators'
|
5
|
+
require 'rails/generators/active_record'
|
6
|
+
require 'fancy_gets'
|
7
|
+
require 'generators/brick/migrations_builder'
|
8
|
+
require 'generators/brick/airtable_api_caller'
|
9
|
+
|
10
|
+
module Brick
|
11
|
+
# Auto-generates Airtable migration files
|
12
|
+
class AirtableMigrationsGenerator < ::Rails::Generators::Base
|
13
|
+
desc 'Auto-generates migration files for an existing Airtable "base".'
|
14
|
+
|
15
|
+
def airtable_migrations
|
16
|
+
mig_path, is_insert_versions, is_delete_versions = ::Brick::MigrationsBuilder.check_folder
|
17
|
+
return unless mig_path &&
|
18
|
+
(relations = ::Brick::AirtableApiCaller.pick_tables)
|
19
|
+
|
20
|
+
::Brick::MigrationsBuilder.generate_migrations(relations.keys, mig_path, is_insert_versions, is_delete_versions, relations,
|
21
|
+
do_fks_last: 'Separate', do_schema_migrations: false)
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
@@ -0,0 +1,19 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'brick'
|
4
|
+
require 'rails/generators'
|
5
|
+
require 'rails/generators/active_record'
|
6
|
+
require 'generators/brick/seeds_builder'
|
7
|
+
require 'generators/brick/airtable_api_caller'
|
8
|
+
|
9
|
+
module Brick
|
10
|
+
class AirtableSeedsGenerator < ::Rails::Generators::Base
|
11
|
+
desc 'Auto-generates a seeds file from existing data in an Airtable "base".'
|
12
|
+
|
13
|
+
def airtable_seeds
|
14
|
+
return unless (relations = ::Brick::AirtableApiCaller.pick_tables(:seeds))
|
15
|
+
|
16
|
+
::Brick::SeedsBuilder.generate_seeds(relations)
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
@@ -1,5 +1,7 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
module Brick
|
2
|
-
module
|
4
|
+
module MigrationsBuilder
|
3
5
|
# Many SQL types are the same as their migration data type name:
|
4
6
|
# text, integer, bigint, date, boolean, decimal, float
|
5
7
|
# These however are not:
|
@@ -147,7 +149,7 @@ module Brick
|
|
147
149
|
key_type, is_4x_rails, ar_version, do_fks_last, versions_to_create)
|
148
150
|
after_fks.concat(add_fks) if do_fks_last
|
149
151
|
current_mig_time[0] += 1.minute
|
150
|
-
versions_to_create << migration_file_write(mig_path, "create_#{::Brick._brick_index(tbl, nil, separator)}", current_mig_time, ar_version, mig)
|
152
|
+
versions_to_create << migration_file_write(mig_path, "create_#{::Brick._brick_index(tbl, nil, separator, relations[tbl])}", current_mig_time, ar_version, mig)
|
151
153
|
end
|
152
154
|
done.concat(fringe)
|
153
155
|
chosen -= done
|
@@ -160,7 +162,9 @@ module Brick
|
|
160
162
|
key_type, is_4x_rails, ar_version, do_fks_last, versions_to_create)
|
161
163
|
after_fks.concat(add_fks)
|
162
164
|
current_mig_time[0] += 1.minute
|
163
|
-
versions_to_create << migration_file_write(mig_path, "create_#{
|
165
|
+
versions_to_create << migration_file_write(mig_path, "create_#{
|
166
|
+
::Brick._brick_index(tbl, :migration, separator, relations[tbl])
|
167
|
+
}", current_mig_time, ar_version, mig)
|
164
168
|
end
|
165
169
|
done.concat(chosen)
|
166
170
|
chosen.clear
|
@@ -4,12 +4,12 @@ require 'brick'
|
|
4
4
|
require 'rails/generators'
|
5
5
|
require 'rails/generators/active_record'
|
6
6
|
require 'fancy_gets'
|
7
|
-
require 'generators/brick/
|
7
|
+
require 'generators/brick/migrations_builder'
|
8
8
|
|
9
9
|
module Brick
|
10
10
|
# Auto-generates migration files
|
11
11
|
class MigrationsGenerator < ::Rails::Generators::Base
|
12
|
-
include ::Brick::
|
12
|
+
include ::Brick::MigrationsBuilder
|
13
13
|
include FancyGets
|
14
14
|
|
15
15
|
desc 'Auto-generates migration files for an existing database.'
|
@@ -25,13 +25,13 @@ module Brick
|
|
25
25
|
return
|
26
26
|
end
|
27
27
|
|
28
|
-
mig_path, is_insert_versions, is_delete_versions = ::Brick::
|
28
|
+
mig_path, is_insert_versions, is_delete_versions = ::Brick::MigrationsBuilder.check_folder
|
29
29
|
return unless mig_path
|
30
30
|
|
31
31
|
# Generate a list of tables that can be chosen
|
32
32
|
chosen = gets_list(list: tables, chosen: tables.dup)
|
33
33
|
|
34
|
-
::Brick::
|
34
|
+
::Brick::MigrationsBuilder.generate_migrations(chosen, mig_path, is_insert_versions, is_delete_versions)
|
35
35
|
end
|
36
36
|
end
|
37
37
|
end
|
@@ -3,7 +3,7 @@
|
|
3
3
|
require 'brick'
|
4
4
|
require 'rails/generators'
|
5
5
|
require 'fancy_gets'
|
6
|
-
require 'generators/brick/
|
6
|
+
require 'generators/brick/migrations_builder'
|
7
7
|
require 'generators/brick/salesforce_schema'
|
8
8
|
|
9
9
|
module Brick
|
@@ -23,7 +23,7 @@ module Brick
|
|
23
23
|
relations = nil
|
24
24
|
end_document_proc = lambda do |salesforce_tables|
|
25
25
|
# p [:end_document]
|
26
|
-
mig_path, is_insert_versions, is_delete_versions = ::Brick::
|
26
|
+
mig_path, is_insert_versions, is_delete_versions = ::Brick::MigrationsBuilder.check_folder
|
27
27
|
return unless mig_path
|
28
28
|
|
29
29
|
# Generate a list of tables that can be chosen
|
@@ -73,7 +73,7 @@ module Brick
|
|
73
73
|
}
|
74
74
|
end
|
75
75
|
# Build but do not have foreign keys established yet, and do not put version entries info the schema_migrations table
|
76
|
-
::Brick::
|
76
|
+
::Brick::MigrationsBuilder.generate_migrations(chosen, mig_path, is_insert_versions, is_delete_versions, relations,
|
77
77
|
do_fks_last: 'Separate', do_schema_migrations: false)
|
78
78
|
end
|
79
79
|
parser = Nokogiri::XML::SAX::Parser.new(::Brick::SalesforceSchema.new(end_document_proc))
|
@@ -0,0 +1,329 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'fancy_gets'
|
4
|
+
|
5
|
+
module Brick
|
6
|
+
class SeedsBuilder
|
7
|
+
class << self
|
8
|
+
include FancyGets
|
9
|
+
|
10
|
+
SeedModel = Struct.new(:table_name, :klass, :is_brick, :airtable_table)
|
11
|
+
SeedModel.define_method(:to_s) do
|
12
|
+
"#{klass.name}#{' (brick-generated)' if is_brick}"
|
13
|
+
end
|
14
|
+
|
15
|
+
def generate_seeds(relations = nil)
|
16
|
+
if File.exist?(seed_file_path = "#{::Rails.root}/db/seeds.rb")
|
17
|
+
puts "WARNING: seeds file #{seed_file_path} appears to already be present.\nOverwrite?"
|
18
|
+
return unless gets_list(list: ['No', 'Yes']) == 'Yes'
|
19
|
+
|
20
|
+
puts "\n"
|
21
|
+
end
|
22
|
+
|
23
|
+
if relations
|
24
|
+
is_airtable = true # So far the only thing that feeds us relations is Airtable
|
25
|
+
require 'generators/brick/airtable_api_caller'
|
26
|
+
# include ::Brick::MigrationsBuilder
|
27
|
+
chosen = relations.map { |k, v| SeedModel.new(k, nil, false, v[:airtable_table]) }
|
28
|
+
else
|
29
|
+
::Brick.mode = :on
|
30
|
+
ActiveRecord::Base.establish_connection
|
31
|
+
relations = ::Brick.relations
|
32
|
+
|
33
|
+
# Load all models
|
34
|
+
::Brick.eager_load_classes
|
35
|
+
|
36
|
+
# Generate a list of viable models that can be chosen
|
37
|
+
# First start with any existing models that have been defined ...
|
38
|
+
existing_models = ActiveRecord::Base.descendants.each_with_object({}) do |m, s|
|
39
|
+
s[m.table_name] = SeedModel.new(m.table_name, m, false) if !m.abstract_class? && !m.is_view? && m.table_exists?
|
40
|
+
end
|
41
|
+
|
42
|
+
models = (existing_models.values +
|
43
|
+
# ... then add models which can be auto-built by Brick
|
44
|
+
relations.reject do |k, v|
|
45
|
+
k.is_a?(Symbol) || (v.key?(:isView) && v[:isView] == true) || existing_models.key?(k)
|
46
|
+
end.map { |k, v| SeedModel.new(k, v[:class_name].constantize, true) }
|
47
|
+
).sort { |a, b| a.to_s <=> b.to_s }
|
48
|
+
if models.empty?
|
49
|
+
puts "No viable models found for database #{ActiveRecord::Base.connection.current_database}."
|
50
|
+
return
|
51
|
+
end
|
52
|
+
|
53
|
+
chosen = gets_list(list: models, chosen: models.dup)
|
54
|
+
schemas = chosen.each_with_object({}) do |v, s|
|
55
|
+
if (v_parts = v.table_name.split('.')).length > 1
|
56
|
+
s[v_parts.first] = nil unless [::Brick.default_schema, 'public'].include?(v_parts.first)
|
57
|
+
end
|
58
|
+
end
|
59
|
+
end
|
60
|
+
|
61
|
+
seeds = +'# Seeds file for '
|
62
|
+
if (arbc = ActiveRecord::Base.connection).respond_to?(:current_database) # SQLite3 can't do this!
|
63
|
+
seeds << "#{arbc.current_database}:\n"
|
64
|
+
elsif (filename = arbc.instance_variable_get(:@connection_parameters)&.fetch(:database, nil))
|
65
|
+
seeds << "#{filename}:\n"
|
66
|
+
end
|
67
|
+
done = []
|
68
|
+
fks = {}
|
69
|
+
stuck = {}
|
70
|
+
indexes = {} # Track index names to make sure things are unique
|
71
|
+
ar_base = Object.const_defined?(:ApplicationRecord) ? ApplicationRecord : Class.new(ActiveRecord::Base)
|
72
|
+
atrt_idx = 0 # ActionText::RichText unique index number
|
73
|
+
airtable_assoc_recids = Hash.new { |h, k| h[k] = [] }
|
74
|
+
@has_atrts = nil # Any ActionText::RichText present?
|
75
|
+
# Start by making entries for fringe models (those with no foreign keys).
|
76
|
+
# Continue layer by layer, creating entries for models that reference ones already done, until
|
77
|
+
# no more entries can be created. (At that point hopefully all models are accounted for.)
|
78
|
+
while (fringe = chosen.reject do |seed_model|
|
79
|
+
tbl = seed_model.table_name
|
80
|
+
snag_fks = []
|
81
|
+
snags = relations.fetch(tbl, nil)&.fetch(:fks, nil)&.select do |_k, v|
|
82
|
+
# Skip any foreign keys which should be deferred ...
|
83
|
+
!Brick.drfgs[tbl]&.any? do |drfg|
|
84
|
+
drfg[0] == v.fetch(:fk, nil) && drfg[1] == v.fetch(:inverse_table, nil)
|
85
|
+
end &&
|
86
|
+
v[:is_bt] && !v[:polymorphic] && # ... and polymorphics ...
|
87
|
+
tbl != v[:inverse_table] && # ... and self-referencing associations (stuff like "parent_id")
|
88
|
+
!done.any? { |done_seed_model| done_seed_model.table_name == v[:inverse_table] } &&
|
89
|
+
::Brick.config.ignore_migration_fks.exclude?(snag_fk = "#{tbl}.#{v[:fk]}") &&
|
90
|
+
snag_fks << snag_fk
|
91
|
+
end
|
92
|
+
if snags&.present?
|
93
|
+
# puts snag_fks.inspect
|
94
|
+
stuck[tbl] = snags
|
95
|
+
end
|
96
|
+
end
|
97
|
+
).present?
|
98
|
+
seeds << "\n"
|
99
|
+
unless is_airtable
|
100
|
+
# Search through the fringe to see if we should bump special dependent classes forward to the next fringe.
|
101
|
+
# (Currently only ActiveStorage::Attachment if there's also an ActiveStorage::VariantRecord in the same
|
102
|
+
# fringe, and always have ActionText::EncryptedRichText at the very end.)
|
103
|
+
fringe_classes = fringe.map { |f| f.klass.name }
|
104
|
+
unless (asa_idx = fringe_classes.index('ActiveStorage::Attachment')).nil?
|
105
|
+
fringe.slice!(asa_idx) if fringe_classes.include?('ActiveStorage::VariantRecord')
|
106
|
+
end
|
107
|
+
unless (atert_idx = fringe_classes.index('ActionText::EncryptedRichText')).nil?
|
108
|
+
fringe.slice!(atert_idx) if fringe_classes.length > 1
|
109
|
+
end
|
110
|
+
end
|
111
|
+
fringe.each do |seed_model|
|
112
|
+
tbl = seed_model.table_name
|
113
|
+
next unless ::Brick.config.exclude_tables.exclude?(tbl) &&
|
114
|
+
(relation = relations.fetch(tbl, nil))&.fetch(:cols, nil)&.present? &&
|
115
|
+
(is_airtable || (klass = seed_model.klass).table_exists?)
|
116
|
+
|
117
|
+
pkey_cols = (rpk = relation[:pkey].values.flatten) & (arpk = [ar_base.primary_key].flatten.sort)
|
118
|
+
# In case things aren't as standard
|
119
|
+
if pkey_cols.empty?
|
120
|
+
pkey_cols = if rpk.empty? # && relation[:cols][arpk.first]&.first == key_type
|
121
|
+
arpk
|
122
|
+
elsif rpk.first
|
123
|
+
rpk
|
124
|
+
end
|
125
|
+
end
|
126
|
+
schema = if (tbl_parts = tbl.split('.')).length > 1
|
127
|
+
if tbl_parts.first == (::Brick.default_schema || 'public')
|
128
|
+
tbl_parts.shift
|
129
|
+
nil
|
130
|
+
else
|
131
|
+
tbl_parts.first
|
132
|
+
end
|
133
|
+
end
|
134
|
+
|
135
|
+
# %%% For the moment we're skipping polymorphics
|
136
|
+
fkeys = if is_airtable
|
137
|
+
tbl = tbl.singularize
|
138
|
+
relation[:fks]&.values&.select { |assoc| assoc[:is_bt] && !assoc[:polymorphic] }
|
139
|
+
else
|
140
|
+
klass.reflect_on_all_associations.select { |a| a.belongs_to? && !a.polymorphic? }.map do |fk|
|
141
|
+
{ fk: fk.foreign_key, assoc_name: fk.name.to_s, inverse_table: fk.table_name }
|
142
|
+
end
|
143
|
+
end
|
144
|
+
# Refer to this table name as a symbol or dotted string as appropriate
|
145
|
+
# tbl_code = tbl_parts.length == 1 ? ":#{tbl_parts.first}" : "'#{tbl}'"
|
146
|
+
|
147
|
+
has_rows = false
|
148
|
+
is_empty = true
|
149
|
+
klass_name = is_airtable ? ::Brick::AirtableApiCaller.sane_table_name(relation[:airtable_table]&.name)&.singularize&.camelize : klass.name
|
150
|
+
# Pull the records
|
151
|
+
collection = if is_airtable
|
152
|
+
if (airtable_table = relation[:airtable_table])
|
153
|
+
::Brick::AirtableApiCaller.https_get("https://api.airtable.com/v0/#{airtable_table.base_id}/#{airtable_table.id}").fetch('records', nil)
|
154
|
+
end
|
155
|
+
else
|
156
|
+
klass.order(*pkey_cols)
|
157
|
+
end
|
158
|
+
collection&.each do |obj|
|
159
|
+
if is_airtable
|
160
|
+
fields = obj['fields'].each_with_object({}) do |field, s|
|
161
|
+
if relation[:cols].keys.include?(col_name = ::Brick::AirtableApiCaller.sane_name(field.first))
|
162
|
+
s[col_name] = obj['fields'][field.first]
|
163
|
+
else # Consider N:M fks
|
164
|
+
nm_fk = relation[:fks].find do |_k, fk1|
|
165
|
+
relations[fk1[:assoc_tbl]]&.fetch(:fks, nil)&.find { |_k, fk2| fk2[:assoc_name] == col_name }
|
166
|
+
end&.last
|
167
|
+
if (t_table = nm_fk&.fetch(:inverse_table, nil))
|
168
|
+
field.last.each do |nm_rec|
|
169
|
+
nm_fk_col = nm_fk[:assoc_tbl]
|
170
|
+
airtable_assoc_recids[t_table] << "#{nm_fk[:fk]}: #{nm_fk[:fk].singularize}_#{obj['id'][3..-1]}, " \
|
171
|
+
"#{nm_fk_col}: #{nm_fk_col.singularize}_#{nm_rec[3..-1]}"
|
172
|
+
end
|
173
|
+
end
|
174
|
+
end
|
175
|
+
end
|
176
|
+
objects = relation[:airtable_table].objects
|
177
|
+
obj = objects[airtable_id = obj['id']] = AirtableObject.new(seed_model, obj['fields'], obj['createdTime'])
|
178
|
+
end
|
179
|
+
unless has_rows
|
180
|
+
has_rows = true
|
181
|
+
seeds << " puts 'Seeding: #{klass_name}'\n"
|
182
|
+
end
|
183
|
+
is_empty = false
|
184
|
+
# For Airtable, take off the "rec___" prefix
|
185
|
+
pk_val = is_airtable ? airtable_id[3..-1] : brick_escape(obj.attributes_before_type_cast[pkey_cols.first])
|
186
|
+
var_name = "#{tbl.gsub('.', '__')}_#{pk_val}"
|
187
|
+
fk_vals = []
|
188
|
+
data = []
|
189
|
+
updates = []
|
190
|
+
relation[:cols].each do |col, _col_type|
|
191
|
+
# Skip primary key columns, unless they are part of a foreign key.
|
192
|
+
# (But always add all columns if it's Airtable!)
|
193
|
+
next if !(fk = fkeys.find { |assoc| col == assoc[:fk] }) &&
|
194
|
+
pkey_cols.include?(col) &&
|
195
|
+
!is_airtable
|
196
|
+
|
197
|
+
# Used to be: obj.send(col)
|
198
|
+
# (and with that it was possible to raise ActiveRecord::Encryption::Errors::Configuration...)
|
199
|
+
# %%% should test further and see if that is possible with this code!)
|
200
|
+
if (val = obj.attributes_before_type_cast[col]) && (val.is_a?(Time) || val.is_a?(Date))
|
201
|
+
val = val.to_s
|
202
|
+
end
|
203
|
+
if fk
|
204
|
+
inv_tbl = fk[:inverse_table].gsub('.', '__')
|
205
|
+
fk_val = if is_airtable
|
206
|
+
# Used to be: fk[:airtable_col]
|
207
|
+
# Take off the "rec___" prefix
|
208
|
+
obj.attributes_before_type_cast[fk[:assoc_name]]&.first&.[](3..-1)
|
209
|
+
else
|
210
|
+
brick_escape(val)
|
211
|
+
end
|
212
|
+
fk_vals << "#{fk[:assoc_name]}: #{inv_tbl}_#{fk_val}" if fk_val
|
213
|
+
else
|
214
|
+
val = case val.class.name
|
215
|
+
when 'ActiveStorage::Filename'
|
216
|
+
val.to_s.inspect
|
217
|
+
when 'ActionText::RichText'
|
218
|
+
ensure_has_atrts(updates)
|
219
|
+
atrt_var = "atrt#{atrt_idx += 1}"
|
220
|
+
atrt_create = "(#{atrt_var} = #{val.class.name}.create(name: #{val.name.inspect}, body: #{val.to_trix_html.inspect
|
221
|
+
}, record_type: #{val.record_type.inspect}, record_id: #{var_name}.#{pkey_cols.first
|
222
|
+
}, created_at: DateTime.parse('#{val.created_at.inspect}'), updated_at: DateTime.parse('#{val.updated_at.inspect}')))"
|
223
|
+
updates << "#{var_name}.update(#{col}: #{atrt_create})\n"
|
224
|
+
# obj.send(col)&.embeds_blobs&.each do |blob|
|
225
|
+
updates << "atrt_ids[[#{val.id}, '#{val.class.name}']] = #{atrt_var}.id\n"
|
226
|
+
# end
|
227
|
+
next
|
228
|
+
else
|
229
|
+
val.inspect
|
230
|
+
end
|
231
|
+
data << "#{col}: #{val}" unless val == 'nil'
|
232
|
+
end
|
233
|
+
end
|
234
|
+
case klass_name
|
235
|
+
when 'ActiveStorage::VariantRecord'
|
236
|
+
ensure_has_atrts(updates)
|
237
|
+
updates << "atrt_ids[[#{obj.id}, '#{klass_name}']] = #{var_name}.id\n"
|
238
|
+
end
|
239
|
+
# Make sure that ActiveStorage::Attachment and ActionText::EncryptedRichText get
|
240
|
+
# wired up to the proper record_id
|
241
|
+
if klass_name == 'ActiveStorage::Attachment' || klass_name == 'ActionText::EncryptedRichText'
|
242
|
+
record_class = data.find { |d| d.start_with?('record_type: ') }[14..-2]
|
243
|
+
record_id = data.find { |d| d.start_with?('record_id: ') }[11..-1]
|
244
|
+
data.reject! { |d| d.start_with?('record_id: ') || d.start_with?('created_at: ') || d.start_with?('updated_at: ') }
|
245
|
+
data << "record_id: atrt_ids[[#{record_id}, '#{record_class}']]"
|
246
|
+
seeds << "#{var_name} = #{klass_name}.find_or_create_by(#{(fk_vals + data).join(', ')}) do |asa|
|
247
|
+
asa.created_at = DateTime.parse('#{obj.created_at.inspect}')#{"
|
248
|
+
asa.updated_at = DateTime.parse('#{obj.updated_at.inspect}')" if obj.respond_to?(:updated_at)}
|
249
|
+
end\n"
|
250
|
+
else
|
251
|
+
seeds << "#{var_name} = #{klass_name}.create(#{(fk_vals + data).join(', ')})\n"
|
252
|
+
unless is_airtable
|
253
|
+
klass.attachment_reflections.each do |k, v|
|
254
|
+
if (attached = obj.send(k))
|
255
|
+
ensure_has_atrts(updates)
|
256
|
+
updates << "atrt_ids[[#{obj.id}, '#{klass_name}']] = #{var_name}.id\n"
|
257
|
+
end
|
258
|
+
end if klass.respond_to?(:attachment_reflections)
|
259
|
+
end
|
260
|
+
end
|
261
|
+
updates.each { |update| seeds << update } # Anything that needs patching up after-the-fact
|
262
|
+
end
|
263
|
+
seeds << " # (Skipping #{klass_name} as it has no rows)\n" unless has_rows
|
264
|
+
end
|
265
|
+
done.concat(fringe)
|
266
|
+
chosen -= done
|
267
|
+
end
|
268
|
+
airtable_assoc_recids.each do |k, v| # N:M links
|
269
|
+
v.each do |link|
|
270
|
+
seeds << "#{k.singularize.camelize}.create(#{link})\n"
|
271
|
+
end
|
272
|
+
end
|
273
|
+
|
274
|
+
File.open(seed_file_path, "w") { |f| f.write seeds }
|
275
|
+
stuck_counts = Hash.new { |h, k| h[k] = 0 }
|
276
|
+
chosen.each do |leftover|
|
277
|
+
puts "Can't do #{leftover.klass_name} because:\n #{stuck[leftover.table_name].map do |snag|
|
278
|
+
stuck_counts[snag.last[:inverse_table]] += 1
|
279
|
+
snag.last[:assoc_name]
|
280
|
+
end.join(', ')}"
|
281
|
+
end
|
282
|
+
puts "\n*** Created seeds for #{done.length} models in db/seeds.rb ***"
|
283
|
+
if (stuck_sorted = stuck_counts.to_a.sort { |a, b| b.last <=> a.last }).length.positive?
|
284
|
+
puts "-----------------------------------------"
|
285
|
+
puts "Unable to create seeds for #{stuck_sorted.length} tables#{
|
286
|
+
". Here's the top 5 blockers" if stuck_sorted.length > 5
|
287
|
+
}:"
|
288
|
+
pp stuck_sorted[0..4]
|
289
|
+
end
|
290
|
+
end
|
291
|
+
|
292
|
+
private
|
293
|
+
|
294
|
+
def brick_escape(val)
|
295
|
+
val = val.to_s if val.is_a?(Date) || val.is_a?(Time) # Accommodate when for whatever reason a primary key is a date or time
|
296
|
+
case val
|
297
|
+
when String
|
298
|
+
ret = +''
|
299
|
+
val.each_char do |ch|
|
300
|
+
if ch < '0' || (ch > '9' && ch < 'A') || ch > 'Z'
|
301
|
+
ret << (ch == '_' ? ch : "x#{'K'.unpack('H*')[0]}")
|
302
|
+
else
|
303
|
+
ret << ch
|
304
|
+
end
|
305
|
+
end
|
306
|
+
ret
|
307
|
+
else
|
308
|
+
val
|
309
|
+
end
|
310
|
+
end
|
311
|
+
|
312
|
+
def ensure_has_atrts(array)
|
313
|
+
unless @has_atrts
|
314
|
+
array << "atrt_ids = {}\n"
|
315
|
+
@has_atrts = true
|
316
|
+
end
|
317
|
+
end
|
318
|
+
|
319
|
+
class AirtableObject
|
320
|
+
attr_accessor :table, :attributes_before_type_cast, :created_at
|
321
|
+
def initialize(table, attributes, created_at)
|
322
|
+
self.table = table
|
323
|
+
self.attributes_before_type_cast = attributes.each_with_object({}) { |a, s| s[::Brick::AirtableApiCaller.sane_name(a.first)] = a.last }
|
324
|
+
self.created_at = created_at
|
325
|
+
end
|
326
|
+
end
|
327
|
+
end
|
328
|
+
end
|
329
|
+
end
|
@@ -2,256 +2,16 @@
|
|
2
2
|
|
3
3
|
require 'brick'
|
4
4
|
require 'rails/generators'
|
5
|
-
require '
|
5
|
+
require 'generators/brick/seeds_builder'
|
6
6
|
|
7
7
|
module Brick
|
8
8
|
class SeedsGenerator < ::Rails::Generators::Base
|
9
|
-
include FancyGets
|
10
|
-
|
11
9
|
desc 'Auto-generates a seeds file from existing data.'
|
12
10
|
|
13
|
-
SeedModel = Struct.new(:table_name, :klass, :is_brick)
|
14
|
-
SeedModel.define_method(:to_s) do
|
15
|
-
"#{klass.name}#{' (brick-generated)' if is_brick}"
|
16
|
-
end
|
17
|
-
|
18
11
|
def brick_seeds
|
19
12
|
# %%% If Apartment is active and there's no schema_to_analyse, ask which schema they want
|
20
13
|
|
21
|
-
::Brick.
|
22
|
-
ActiveRecord::Base.establish_connection
|
23
|
-
|
24
|
-
# Load all models
|
25
|
-
::Brick.eager_load_classes
|
26
|
-
|
27
|
-
# Generate a list of viable models that can be chosen
|
28
|
-
# First start with any existing models that have been defined ...
|
29
|
-
existing_models = ActiveRecord::Base.descendants.each_with_object({}) do |m, s|
|
30
|
-
s[m.table_name] = SeedModel.new(m.table_name, m, false) if !m.abstract_class? && !m.is_view? && m.table_exists?
|
31
|
-
end
|
32
|
-
models = (existing_models.values +
|
33
|
-
# ... then add models which can be auto-built by Brick
|
34
|
-
::Brick.relations.reject do |k, v|
|
35
|
-
k.is_a?(Symbol) || (v.key?(:isView) && v[:isView] == true) || existing_models.key?(k)
|
36
|
-
end.map { |k, v| SeedModel.new(k, v[:class_name].constantize, true) }
|
37
|
-
).sort { |a, b| a.to_s <=> b.to_s }
|
38
|
-
if models.empty?
|
39
|
-
puts "No viable models found for database #{ActiveRecord::Base.connection.current_database}."
|
40
|
-
return
|
41
|
-
end
|
42
|
-
|
43
|
-
if File.exist?(seed_file_path = "#{::Rails.root}/db/seeds.rb")
|
44
|
-
puts "WARNING: seeds file #{seed_file_path} appears to already be present.\nOverwrite?"
|
45
|
-
return unless gets_list(list: ['No', 'Yes']) == 'Yes'
|
46
|
-
|
47
|
-
puts "\n"
|
48
|
-
end
|
49
|
-
|
50
|
-
chosen = gets_list(list: models, chosen: models.dup)
|
51
|
-
schemas = chosen.each_with_object({}) do |v, s|
|
52
|
-
if (v_parts = v.table_name.split('.')).length > 1
|
53
|
-
s[v_parts.first] = nil unless [::Brick.default_schema, 'public'].include?(v_parts.first)
|
54
|
-
end
|
55
|
-
end
|
56
|
-
seeds = +'# Seeds file for '
|
57
|
-
if (arbc = ActiveRecord::Base.connection).respond_to?(:current_database) # SQLite3 can't do this!
|
58
|
-
seeds << "#{arbc.current_database}:\n"
|
59
|
-
elsif (filename = arbc.instance_variable_get(:@connection_parameters)&.fetch(:database, nil))
|
60
|
-
seeds << "#{filename}:\n"
|
61
|
-
end
|
62
|
-
done = []
|
63
|
-
fks = {}
|
64
|
-
stuck = {}
|
65
|
-
indexes = {} # Track index names to make sure things are unique
|
66
|
-
ar_base = Object.const_defined?(:ApplicationRecord) ? ApplicationRecord : Class.new(ActiveRecord::Base)
|
67
|
-
atrt_idx = 0 # ActionText::RichText unique index number
|
68
|
-
@has_atrts = nil # Any ActionText::RichText present?
|
69
|
-
# Start by making entries for fringe models (those with no foreign keys).
|
70
|
-
# Continue layer by layer, creating entries for models that reference ones already done, until
|
71
|
-
# no more entries can be created. (At that point hopefully all models are accounted for.)
|
72
|
-
while (fringe = chosen.reject do |seed_model|
|
73
|
-
tbl = seed_model.table_name
|
74
|
-
snag_fks = []
|
75
|
-
snags = ::Brick.relations.fetch(tbl, nil)&.fetch(:fks, nil)&.select do |_k, v|
|
76
|
-
# Skip any foreign keys which should be deferred ...
|
77
|
-
!Brick.drfgs[tbl]&.any? do |drfg|
|
78
|
-
drfg[0] == v.fetch(:fk, nil) && drfg[1] == v.fetch(:inverse_table, nil)
|
79
|
-
end &&
|
80
|
-
v[:is_bt] && !v[:polymorphic] && # ... and polymorphics ...
|
81
|
-
tbl != v[:inverse_table] && # ... and self-referencing associations (stuff like "parent_id")
|
82
|
-
!done.any? { |done_seed_model| done_seed_model.table_name == v[:inverse_table] } &&
|
83
|
-
::Brick.config.ignore_migration_fks.exclude?(snag_fk = "#{tbl}.#{v[:fk]}") &&
|
84
|
-
snag_fks << snag_fk
|
85
|
-
end
|
86
|
-
if snags&.present?
|
87
|
-
# puts snag_fks.inspect
|
88
|
-
stuck[tbl] = snags
|
89
|
-
end
|
90
|
-
end
|
91
|
-
).present?
|
92
|
-
seeds << "\n"
|
93
|
-
# Search through the fringe to see if we should bump special dependent classes forward to the next fringe.
|
94
|
-
# (Currently only ActiveStorage::Attachment if there's also an ActiveStorage::VariantRecord in the same
|
95
|
-
# fringe, and always have ActionText::EncryptedRichText at the very end.)
|
96
|
-
fringe_classes = fringe.map { |f| f.klass.name }
|
97
|
-
unless (asa_idx = fringe_classes.index('ActiveStorage::Attachment')).nil?
|
98
|
-
fringe.slice!(asa_idx) if fringe_classes.include?('ActiveStorage::VariantRecord')
|
99
|
-
end
|
100
|
-
unless (atert_idx = fringe_classes.index('ActionText::EncryptedRichText')).nil?
|
101
|
-
fringe.slice!(atert_idx) if fringe_classes.length > 1
|
102
|
-
end
|
103
|
-
fringe.each do |seed_model|
|
104
|
-
tbl = seed_model.table_name
|
105
|
-
next unless ::Brick.config.exclude_tables.exclude?(tbl) &&
|
106
|
-
(relation = ::Brick.relations.fetch(tbl, nil))&.fetch(:cols, nil)&.present? &&
|
107
|
-
(klass = seed_model.klass).table_exists?
|
108
|
-
|
109
|
-
pkey_cols = (rpk = relation[:pkey].values.flatten) & (arpk = [ar_base.primary_key].flatten.sort)
|
110
|
-
# In case things aren't as standard
|
111
|
-
if pkey_cols.empty?
|
112
|
-
pkey_cols = if rpk.empty? # && relation[:cols][arpk.first]&.first == key_type
|
113
|
-
arpk
|
114
|
-
elsif rpk.first
|
115
|
-
rpk
|
116
|
-
end
|
117
|
-
end
|
118
|
-
schema = if (tbl_parts = tbl.split('.')).length > 1
|
119
|
-
if tbl_parts.first == (::Brick.default_schema || 'public')
|
120
|
-
tbl_parts.shift
|
121
|
-
nil
|
122
|
-
else
|
123
|
-
tbl_parts.first
|
124
|
-
end
|
125
|
-
end
|
126
|
-
|
127
|
-
# %%% For the moment we're skipping polymorphics
|
128
|
-
fkeys = relation[:fks].values.select { |assoc| assoc[:is_bt] && !assoc[:polymorphic] }
|
129
|
-
# Refer to this table name as a symbol or dotted string as appropriate
|
130
|
-
# tbl_code = tbl_parts.length == 1 ? ":#{tbl_parts.first}" : "'#{tbl}'"
|
131
|
-
|
132
|
-
has_rows = false
|
133
|
-
is_empty = true
|
134
|
-
klass.order(*pkey_cols).each do |obj|
|
135
|
-
unless has_rows
|
136
|
-
has_rows = true
|
137
|
-
seeds << " puts 'Seeding: #{klass.name}'\n"
|
138
|
-
end
|
139
|
-
is_empty = false
|
140
|
-
pk_val = obj.send(pkey_cols.first)
|
141
|
-
var_name = "#{tbl.gsub('.', '__')}_#{brick_escape(pk_val)}"
|
142
|
-
fk_vals = []
|
143
|
-
data = []
|
144
|
-
updates = []
|
145
|
-
relation[:cols].each do |col, _col_type|
|
146
|
-
next if !(fk = fkeys.find { |assoc| col == assoc[:fk] }) &&
|
147
|
-
pkey_cols.include?(col)
|
148
|
-
|
149
|
-
begin
|
150
|
-
# Used to be: obj.send(col)
|
151
|
-
if (val = obj.attributes_before_type_cast[col]) && (val.is_a?(Time) || val.is_a?(Date))
|
152
|
-
val = val.to_s
|
153
|
-
end
|
154
|
-
rescue StandardError => e # ActiveRecord::Encryption::Errors::Configuration
|
155
|
-
end
|
156
|
-
if fk
|
157
|
-
inv_tbl = fk[:inverse_table].gsub('.', '__')
|
158
|
-
fk_vals << "#{fk[:assoc_name]}: #{inv_tbl}_#{brick_escape(val)}" if val
|
159
|
-
else
|
160
|
-
val = case val.class.name
|
161
|
-
when 'ActiveStorage::Filename'
|
162
|
-
val.to_s.inspect
|
163
|
-
when 'ActionText::RichText'
|
164
|
-
ensure_has_atrts(updates)
|
165
|
-
atrt_var = "atrt#{atrt_idx += 1}"
|
166
|
-
atrt_create = "(#{atrt_var} = #{val.class.name}.create(name: #{val.name.inspect}, body: #{val.to_trix_html.inspect
|
167
|
-
}, record_type: #{val.record_type.inspect}, record_id: #{var_name}.#{pkey_cols.first
|
168
|
-
}, created_at: DateTime.parse('#{val.created_at.inspect}'), updated_at: DateTime.parse('#{val.updated_at.inspect}')))"
|
169
|
-
updates << "#{var_name}.update(#{col}: #{atrt_create})\n"
|
170
|
-
# obj.send(col)&.embeds_blobs&.each do |blob|
|
171
|
-
updates << "atrt_ids[[#{val.id}, '#{val.class.name}']] = #{atrt_var}.id\n"
|
172
|
-
# end
|
173
|
-
next
|
174
|
-
else
|
175
|
-
val.inspect
|
176
|
-
end
|
177
|
-
data << "#{col}: #{val}" unless val == 'nil'
|
178
|
-
end
|
179
|
-
end
|
180
|
-
case klass.name
|
181
|
-
when 'ActiveStorage::VariantRecord'
|
182
|
-
ensure_has_atrts(updates)
|
183
|
-
updates << "atrt_ids[[#{obj.id}, '#{klass.name}']] = #{var_name}.id\n"
|
184
|
-
end
|
185
|
-
# Make sure that ActiveStorage::Attachment and ActionText::EncryptedRichText get
|
186
|
-
# wired up to the proper record_id
|
187
|
-
if klass.name == 'ActiveStorage::Attachment' || klass.name == 'ActionText::EncryptedRichText'
|
188
|
-
record_class = data.find { |d| d.start_with?('record_type: ') }[14..-2]
|
189
|
-
record_id = data.find { |d| d.start_with?('record_id: ') }[11..-1]
|
190
|
-
data.reject! { |d| d.start_with?('record_id: ') || d.start_with?('created_at: ') || d.start_with?('updated_at: ') }
|
191
|
-
data << "record_id: atrt_ids[[#{record_id}, '#{record_class}']]"
|
192
|
-
seeds << "#{var_name} = #{klass.name}.find_or_create_by(#{(fk_vals + data).join(', ')}) do |asa|
|
193
|
-
asa.created_at = DateTime.parse('#{obj.created_at.inspect}')#{"
|
194
|
-
asa.updated_at = DateTime.parse('#{obj.updated_at.inspect}')" if obj.respond_to?(:updated_at)}
|
195
|
-
end\n"
|
196
|
-
else
|
197
|
-
seeds << "#{var_name} = #{seed_model.klass.name}.create(#{(fk_vals + data).join(', ')})\n"
|
198
|
-
klass.attachment_reflections.each do |k, v|
|
199
|
-
if (attached = obj.send(k))
|
200
|
-
ensure_has_atrts(updates)
|
201
|
-
updates << "atrt_ids[[#{obj.id}, '#{klass.name}']] = #{var_name}.id\n"
|
202
|
-
end
|
203
|
-
end if klass.respond_to?(:attachment_reflections)
|
204
|
-
end
|
205
|
-
updates.each { |update| seeds << update } # Anything that needs patching up after-the-fact
|
206
|
-
end
|
207
|
-
seeds << " # (Skipping #{seed_model.klass.name} as it has no rows)\n" unless has_rows
|
208
|
-
File.open(seed_file_path, "w") { |f| f.write seeds }
|
209
|
-
end
|
210
|
-
done.concat(fringe)
|
211
|
-
chosen -= done
|
212
|
-
end
|
213
|
-
stuck_counts = Hash.new { |h, k| h[k] = 0 }
|
214
|
-
chosen.each do |leftover|
|
215
|
-
puts "Can't do #{leftover.klass.name} because:\n #{stuck[leftover.table_name].map do |snag|
|
216
|
-
stuck_counts[snag.last[:inverse_table]] += 1
|
217
|
-
snag.last[:assoc_name]
|
218
|
-
end.join(', ')}"
|
219
|
-
end
|
220
|
-
puts "\n*** Created seeds for #{done.length} models in db/seeds.rb ***"
|
221
|
-
if (stuck_sorted = stuck_counts.to_a.sort { |a, b| b.last <=> a.last }).length.positive?
|
222
|
-
puts "-----------------------------------------"
|
223
|
-
puts "Unable to create seeds for #{stuck_sorted.length} tables#{
|
224
|
-
". Here's the top 5 blockers" if stuck_sorted.length > 5
|
225
|
-
}:"
|
226
|
-
pp stuck_sorted[0..4]
|
227
|
-
end
|
228
|
-
end
|
229
|
-
|
230
|
-
private
|
231
|
-
|
232
|
-
def brick_escape(val)
|
233
|
-
val = val.to_s if val.is_a?(Date) || val.is_a?(Time) # Accommodate when for whatever reason a primary key is a date or time
|
234
|
-
case val
|
235
|
-
when String
|
236
|
-
ret = +''
|
237
|
-
val.each_char do |ch|
|
238
|
-
if ch < '0' || (ch > '9' && ch < 'A') || ch > 'Z'
|
239
|
-
ret << (ch == '_' ? ch : "x#{'K'.unpack('H*')[0]}")
|
240
|
-
else
|
241
|
-
ret << ch
|
242
|
-
end
|
243
|
-
end
|
244
|
-
ret
|
245
|
-
else
|
246
|
-
val
|
247
|
-
end
|
248
|
-
end
|
249
|
-
|
250
|
-
def ensure_has_atrts(array)
|
251
|
-
unless @has_atrts
|
252
|
-
array << "atrt_ids = {}\n"
|
253
|
-
@has_atrts = true
|
254
|
-
end
|
14
|
+
::Brick::SeedsBuilder.generate_seeds
|
255
15
|
end
|
256
16
|
end
|
257
17
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: brick
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.0.
|
4
|
+
version: 1.0.230
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Lorin Thwaits
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2025-
|
11
|
+
date: 2025-03-12 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: activerecord
|
@@ -228,13 +228,17 @@ files:
|
|
228
228
|
- lib/brick/util.rb
|
229
229
|
- lib/brick/version_number.rb
|
230
230
|
- lib/generators/brick/USAGE
|
231
|
+
- lib/generators/brick/airtable_api_caller.rb
|
232
|
+
- lib/generators/brick/airtable_migrations_generator.rb
|
233
|
+
- lib/generators/brick/airtable_seeds_generator.rb
|
231
234
|
- lib/generators/brick/controllers_generator.rb
|
232
235
|
- lib/generators/brick/install_generator.rb
|
233
|
-
- lib/generators/brick/
|
236
|
+
- lib/generators/brick/migrations_builder.rb
|
234
237
|
- lib/generators/brick/migrations_generator.rb
|
235
238
|
- lib/generators/brick/models_generator.rb
|
236
239
|
- lib/generators/brick/salesforce_migrations_generator.rb
|
237
240
|
- lib/generators/brick/salesforce_schema.rb
|
241
|
+
- lib/generators/brick/seeds_builder.rb
|
238
242
|
- lib/generators/brick/seeds_generator.rb
|
239
243
|
- lib/generators/brick/templates/add_object_changes_to_versions.rb.erb
|
240
244
|
- lib/generators/brick/templates/create_versions.rb.erb
|