pg_meta 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: 9a7711ea44e1ae9ff9fd3ed45016e7ba2a00a1554af08e5b311e036cb29c6f01
4
+ data.tar.gz: 5045d4b33fe8a0b2aeacac9958d7a2f80e8ed2ae3b27c1b69a457acc4d343875
5
+ SHA512:
6
+ metadata.gz: 2e342aef3595dd33c6728ad702efa4a7e4e80640cecccf6c345ad5839f5d3c913b30215b8be7f858fa90faa84eef5c497083978d423c271e008dfb13737c8ba7
7
+ data.tar.gz: 38b665922c5005182d764db3126948defc7e87aeab3355782f695f3d89a3976e648aa405e191b871863da1ca42377be3b6ff1fcaeb9ea4657c5c272ddc40c3f4
data/.gitignore ADDED
@@ -0,0 +1,19 @@
1
+ /.bundle/
2
+ /.yardoc
3
+ /_yardoc/
4
+ /coverage/
5
+ /doc/
6
+ /pkg/
7
+ /spec/reports/
8
+ /tmp/
9
+
10
+ # rspec failure tracking
11
+ .rspec_status
12
+
13
+ # Ignore auto-generated main file
14
+ /main
15
+
16
+ # Ignore Gemfile.lock. See https://stackoverflow.com/questions/4151495/should-gemfile-lock-be-included-in-gitignore
17
+ /Gemfile.lock
18
+
19
+ # Put your personal ignore files in /home/clr/.config/git/ignore
data/.rspec ADDED
@@ -0,0 +1,3 @@
1
+ --format documentation
2
+ --color
3
+ --require spec_helper
data/.ruby-version ADDED
@@ -0,0 +1 @@
1
+ ruby-2.7.1
data/.travis.yml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ sudo: false
3
+ language: ruby
4
+ cache: bundler
5
+ rvm:
6
+ - 2.6.6
7
+ before_install: gem install bundler -v 1.17.3
data/Gemfile ADDED
@@ -0,0 +1,6 @@
1
+ source "https://rubygems.org"
2
+
3
+ git_source(:github) {|repo_name| "https://github.com/#{repo_name}" }
4
+
5
+ # Specify your gem's dependencies in pg_meta.gemspec
6
+ gemspec
data/README.md ADDED
@@ -0,0 +1,35 @@
1
+ # PgMeta
2
+
3
+ Welcome to your new gem! In this directory, you'll find the files you need to be able to package up your Ruby library into a gem. Put your Ruby code in the file `lib/pg_meta`. To experiment with that code, run `bin/console` for an interactive prompt.
4
+
5
+ TODO: Delete this and the text above, and describe your gem
6
+
7
+ ## Installation
8
+
9
+ Add this line to your application's Gemfile:
10
+
11
+ ```ruby
12
+ gem 'pg_meta'
13
+ ```
14
+
15
+ And then execute:
16
+
17
+ $ bundle
18
+
19
+ Or install it yourself as:
20
+
21
+ $ gem install pg_meta
22
+
23
+ ## Usage
24
+
25
+ TODO: Write usage instructions here
26
+
27
+ ## Development
28
+
29
+ After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake spec` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment.
30
+
31
+ To install this gem onto your local machine, run `bundle exec rake install`. To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and tags, and push the `.gem` file to [rubygems.org](https://rubygems.org).
32
+
33
+ ## Contributing
34
+
35
+ Bug reports and pull requests are welcome on GitHub at https://github.com/[USERNAME]/pg_meta.
data/Rakefile ADDED
@@ -0,0 +1,6 @@
1
+ require "bundler/gem_tasks"
2
+ require "rspec/core/rake_task"
3
+
4
+ RSpec::Core::RakeTask.new(:spec)
5
+
6
+ task :default => :spec
data/TODO ADDED
@@ -0,0 +1,3 @@
1
+
2
+ o Needs tests of functions and triggers (works on mikras)
3
+ o ::load_yaml doesn't create depending/defining tables/relations on Table and View
data/bin/console ADDED
@@ -0,0 +1,14 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require "bundler/setup"
4
+ require "pg_meta"
5
+
6
+ # You can add fixtures and/or initialization code here to make experimenting
7
+ # with your gem easier. You can also use a different console, if you like.
8
+
9
+ # (If you use this, don't forget to add pry to your Gemfile!)
10
+ # require "pry"
11
+ # Pry.start
12
+
13
+ require "irb"
14
+ IRB.start(__FILE__)
data/bin/setup ADDED
@@ -0,0 +1,8 @@
1
+ #!/usr/bin/env bash
2
+ set -euo pipefail
3
+ IFS=$'\n\t'
4
+ set -vx
5
+
6
+ bundle install
7
+
8
+ # Do any other automated setup that you need to do here
data/exe/pg_meta ADDED
@@ -0,0 +1,28 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require 'shellopts'
4
+ require 'pg_meta'
5
+
6
+ include ShellOpts
7
+
8
+ SPEC = %(
9
+ -f,format=FORMAT:yaml,marshal,dump
10
+ Control format of output. FORMAT can be one of 'yaml', 'marshal' or the
11
+ human-readable 'dump' format (the default)
12
+
13
+ -- DATABASE
14
+ )
15
+
16
+ opts, args = ShellOpts.process(SPEC, ARGV, version: PgMeta::VERSION)
17
+
18
+ format = opts.format
19
+ database = args.expect(1)
20
+ conn = PgConn.new(database)
21
+ meta = PgMeta.new(conn)
22
+
23
+ case format
24
+ when "yaml"; puts meta.to_yaml
25
+ when "marshal"; puts meta.to_marshal
26
+ else meta.dump
27
+ end
28
+
data/lib/ext/hash.rb ADDED
@@ -0,0 +1,9 @@
1
+
2
+ module Ext
3
+ module Hash
4
+ refine ::Hash do
5
+ def project(*keys) keys.map { |key| self[key] } end
6
+ end
7
+ end
8
+ end
9
+
@@ -0,0 +1,37 @@
1
+
2
+ require 'indented_io'
3
+
4
+ module PgMeta
5
+ class Node
6
+ def dump
7
+ dump_value(to_h)
8
+ end
9
+
10
+ def dump_value(v)
11
+ case v
12
+ when Hash
13
+ puts "{"
14
+ indent {
15
+ v.each { |k,v|
16
+ print "#{k}: "
17
+ dump_value(v)
18
+ }
19
+ }
20
+ puts "}"
21
+ when []
22
+ puts "[]"
23
+ when Array
24
+ puts "["
25
+ indent {
26
+ v.each { |v|
27
+ dump_value(v)
28
+ }
29
+ }
30
+ puts "]"
31
+ else
32
+ puts (v.nil? ? "nil" : v)
33
+ end
34
+ end
35
+ end
36
+ end
37
+
@@ -0,0 +1,356 @@
1
+
2
+ module PgMeta
3
+ class Database
4
+ using Ext::Hash
5
+
6
+ # Load the database from the given PgConn::Connection object. This method is a
7
+ # transaction wrapper around #do_load_conn
8
+ def self.load_conn(pg_conn)
9
+ pg_conn.pg_connection.transaction { |conn|
10
+ conn.exec "set transaction isolation level serializable read only deferrable"
11
+ do_load_from_connection(conn)
12
+ }
13
+ end
14
+
15
+ private
16
+ # Load the given database into self. Note that +conn+ is a PG::Connection object
17
+ def self.do_load_from_connection(conn)
18
+ # Get database owner
19
+ r = conn.exec %(
20
+ select rolname::varchar
21
+ from pg_database d
22
+ join pg_roles r on r.oid = d.datdba
23
+ where d.datname = '#{conn.db}'
24
+ )
25
+ owner = r.values.first.first
26
+
27
+ # Create database object
28
+ db = Database.new(conn.db, owner)
29
+
30
+ # Build schemas
31
+ conn.exec(%(
32
+ select schema_name::text as name,
33
+ schema_owner::text as owner
34
+ from information_schema.schemata
35
+ where schema_name !~ '^pg_' and schema_name <> 'information_schema'
36
+ )).each { |row|
37
+ Schema.new(db, *row.project(:name, :owner))
38
+ }
39
+
40
+ # Build tables and views
41
+ conn.exec(%(
42
+ select n.nspname::text as "schema",
43
+ c.relname::text as "name",
44
+ c.relkind,
45
+ coalesce(i.is_insertable_into, 'NO') = 'YES' as "insertable?",
46
+ coalesce(i.is_typed, 'NO') = 'YES' as "typed?"
47
+ from pg_class c
48
+ join pg_namespace n on n.oid = c.relnamespace
49
+ left join information_schema.tables i on
50
+ i.table_schema = n.nspname
51
+ and i.table_name = c.relname
52
+ where n.nspname !~ '^pg_' and n.nspname <> 'information_schema'
53
+ and c.relkind in ('r', 'v', 'm')
54
+ )).each { |row|
55
+ schema = db.schemas[row[:schema]]
56
+ klass =
57
+ case row[:relkind]
58
+ when 'r'; Table
59
+ when 'v'; View
60
+ when 'm'; MaterializedView
61
+ end
62
+ klass.new(schema, *row.project(:name, :insertable?, :typed?))
63
+ }
64
+
65
+ # Find transitive closure of dependencies between tables
66
+ conn.exec(%(
67
+ with recursive chain as (
68
+ select distinct
69
+ c.conrelid as this,
70
+ c.confrelid as that
71
+ from pg_depend d
72
+ join pg_constraint c on c.oid = d.objid
73
+ join pg_class t1 on t1.oid = c.conrelid
74
+ join pg_namespace n1 on n1.oid = t1.relnamespace
75
+ join pg_class t2 on t2.oid = c.confrelid
76
+ join pg_namespace n2 on n2.oid = t2.relnamespace
77
+ where n1.nspname = 'auth'
78
+ and deptype = 'n'
79
+ union
80
+ select ch.this as this,
81
+ c.confrelid as that
82
+ from pg_depend d
83
+ join pg_constraint c on c.oid = d.objid
84
+ join chain ch on ch.that = c.conrelid
85
+ )
86
+ select n1.nspname as this_schema,
87
+ t1.relname as this_table,
88
+ n2.nspname as that_schema,
89
+ t2.relname as that_table
90
+ from chain c
91
+ join pg_class t1 on t1.oid = c.this
92
+ join pg_namespace n1 on n1.oid = t1.relnamespace
93
+ join pg_class t2 on t2.oid = c.that
94
+ join pg_namespace n2 on n2.oid = t2.relnamespace
95
+ )).each { |row|
96
+ this_table = db.schemas[row[:this_schema]].tables[row[:this_table]]
97
+ that_table = db.schemas[row[:that_schema]].tables[row[:that_table]]
98
+ that_table.send(:add_depending_table, this_table)
99
+ }
100
+
101
+ # Build defining-relation dependencies
102
+ conn.exec(%(
103
+ select distinct
104
+ n2.nspname::text as "dep_schema",
105
+ c2.relname::text as "dep_view",
106
+ n.nspname::text as "def_schema",
107
+ c.relname::text as "def_relation"
108
+ from pg_class c
109
+ join pg_namespace n on n.oid = c.relnamespace
110
+ join pg_depend d on d.refobjid = c.oid
111
+ join pg_rewrite r on r.oid = d.objid
112
+ join pg_class c2 on c2.oid = r.ev_class
113
+ join pg_namespace n2 on n2.oid = c2.relnamespace
114
+ where n.nspname !~ '^pg_' and n.nspname <> 'information_schema'
115
+ and c.relkind in ('r', 'v', 'm')
116
+ and d.classid = 'pg_rewrite'::regclass
117
+ and d.refclassid = 'pg_class'::regclass
118
+ and d.deptype = 'n'
119
+ and c2.relkind in ('r̈́', 'v', 'm')
120
+ and c.oid <> c2.oid
121
+ )).each { |row|
122
+ depending_view = db.schemas[row[:dep_schema]].tables[row[:dep_view]]
123
+ defining_relation = db.schemas[row[:def_schema]].tables[row[:def_relation]]
124
+
125
+ defining_relation.send(:add_depending_view, depending_view)
126
+ depending_view.send(:add_defining_relation, defining_relation)
127
+ }
128
+
129
+ # Build columns
130
+ conn.exec(%(
131
+ select n.nspname::text as "schema",
132
+ c.relname::text as "table",
133
+ a.attname::text as "name",
134
+ a.attnum as "ordinal",
135
+ t.typname::text as "type",
136
+ t2.typname::text as "element_type",
137
+ a.attndims::integer as "dimensions",
138
+ i.column_default::text as "default",
139
+ coalesce(i.is_identity, 'NO') = 'YES' as "identity?",
140
+ coalesce(i.is_generated, 'NO') = 'ALWAYS' as "generated?",
141
+ coalesce(i.is_nullable, 'NO') = 'YES' as "nullable?",
142
+ coalesce(i.is_updatable, 'NO') = 'YES' as "updatable?"
143
+ from pg_namespace n
144
+ join pg_class c on c.relnamespace = n.oid
145
+ join pg_attribute a on a.attrelid = c.oid
146
+ join pg_type t on t.oid = a.atttypid
147
+ left join pg_type t2 on t2.oid = t.typelem
148
+ left join information_schema.columns i on
149
+ i.table_schema = n.nspname
150
+ and i.table_name = c.relname
151
+ and i.column_name = a.attname
152
+ where n.nspname !~ '^pg_' and n.nspname <> 'information_schema'
153
+ and c.relkind in ('r', 'v', 'm')
154
+ and a.attnum > 0
155
+ )).each { |row|
156
+ table = db.schemas[row[:schema]].tables[row[:table]]
157
+ Column.new(table, *row.project(
158
+ :name, :ordinal, :type, :element_type, :dimensions, :default, :identity?, :generated?,
159
+ :nullable?, :updatable?))
160
+ }
161
+
162
+ # Build simple constraints
163
+ conn.exec(%(
164
+ select c.table_schema::text as schema,
165
+ c.table_name::text as table,
166
+ c.constraint_type::text,
167
+ c.constraint_name::text as name,
168
+ cc.check_clause as expression,
169
+ (
170
+ select array_agg(column_name::text)
171
+ from information_schema.constraint_column_usage ccu
172
+ where ccu.table_schema = c.table_schema
173
+ and ccu.table_name = c.table_name
174
+ and ccu.constraint_schema = c.constraint_schema
175
+ and ccu.constraint_name = c.constraint_name
176
+ ) as columns
177
+ from information_schema.table_constraints c
178
+ left join information_schema.check_constraints cc
179
+ on cc.constraint_schema = c.table_schema and
180
+ cc.constraint_name = c.constraint_name
181
+ where c.table_schema !~ '^pg_' and c.table_schema <> 'information_schema'
182
+ and c.constraint_type in ('PRIMARY KEY', 'UNIQUE', 'CHECK')
183
+ )).each { |row|
184
+ table = db.schemas[row[:schema]].tables[row[:table]]
185
+ name = row[:name]
186
+ columns = lookup_columns(table, row[:columns] || [])
187
+ expression = row[:expression]
188
+ case row[:constraint_type]
189
+ when "PRIMARY KEY"; PrimaryKeyConstraint.new(table, name, columns)
190
+ when "UNIQUE"; UniqueConstraint.new(table, name, columns)
191
+ when "CHECK"
192
+ constraint = CheckConstraint.new(table, name, expression)
193
+ # patch-up column if constraint is a not null constraint
194
+ if constraint.not_null?
195
+ constraint.column.instance_variable_set(:@is_nullable, false)
196
+ end
197
+ else
198
+ raise ArgumentError
199
+ end
200
+ }
201
+
202
+ # Build implicit unique constraints from indexes. Only indexes not used
203
+ # in constraints are considered
204
+ conn.exec(%(
205
+ select n.nspname as "schema",
206
+ c.relname as "table",
207
+ array_agg(a.attname::text) as "fields",
208
+ i.relname as "index",
209
+ x.indisunique as "unique"
210
+ from pg_class c
211
+ join pg_namespace n on n.oid = c.relnamespace
212
+ join pg_index x on x.indrelid = c.oid
213
+ join pg_class i on i.oid = x.indexrelid
214
+ join ( select attrelid, attname from pg_attribute order by attnum ) a on a.attrelid = i.oid
215
+ where n.nspname !~ '^pg_' and n.nspname <> 'information_schema'
216
+ and i.oid not in (select conindid from pg_constraint)
217
+ and x.indisunique
218
+ group by
219
+ "schema",
220
+ "table",
221
+ "index",
222
+ "unique"
223
+ order by
224
+ "schema",
225
+ "table",
226
+ "index"
227
+ )).each { |row|
228
+ table = db.schemas[row[:schema]].tables[row[:table]]
229
+ name = "#{row[:schema]}.#{row[:table]}.#{row[:index]}_constr"
230
+ columns = lookup_columns(table, row[:fields] || [])
231
+ UniqueConstraint.new(table, name, columns)
232
+ }
233
+
234
+ # Build referential constraints
235
+ #
236
+ # Referential constraints has to be initialized after unique constraints
237
+ #
238
+ # The GROUP BY is necessary because we re-assign constraints from schema to
239
+ # table. This requires joining key_column_usage again to get the name of
240
+ # the referenced table and that yields a row for each column in the unique
241
+ # key (TODO: Can this be omitted?)
242
+ #
243
+ conn.exec(%(
244
+ select rc.constraint_schema::text as schema,
245
+ rc.constraint_name::text as name,
246
+ cu_refing.table_schema::text as "referencing_schema",
247
+ cu_refing.table_name::text as "referencing_table",
248
+ (
249
+ select array_agg(column_name::text order by ordinal_position)
250
+ from information_schema.key_column_usage kcu
251
+ where kcu.constraint_schema = rc.constraint_schema
252
+ and kcu.constraint_name = rc.constraint_name
253
+ ) as "referencing_columns",
254
+ cu_refed.table_schema::text as referenced_schema,
255
+ cu_refed.table_name::text as referenced_table,
256
+ cu_refed.constraint_name::text as referenced_constraint
257
+ from information_schema.referential_constraints rc
258
+ join information_schema.key_column_usage cu_refing
259
+ on cu_refing.constraint_schema = rc.constraint_schema
260
+ and cu_refing.constraint_name = rc.constraint_name
261
+ join information_schema.key_column_usage cu_refed
262
+ on cu_refed.constraint_schema = rc.unique_constraint_schema
263
+ and cu_refed.constraint_name = rc.unique_constraint_name
264
+ where cu_refing.table_schema !~ '^pg_' and cu_refing.table_schema <> 'information_schema'
265
+ group by
266
+ rc.constraint_schema,
267
+ rc.constraint_name,
268
+ cu_refing.table_schema,
269
+ cu_refing.table_name,
270
+ cu_refed.table_schema,
271
+ cu_refed.table_name,
272
+ cu_refed.constraint_name
273
+ )).each { |row|
274
+ schema = db.schemas[row[:schema]]
275
+ name = row[:name]
276
+ referencing_table = schema.tables[row[:referencing_table]]
277
+ referencing_columns = lookup_columns(referencing_table, row[:referencing_columns])
278
+ referenced_constraint =
279
+ db.schemas[row[:referenced_schema]] \
280
+ .tables[row[:referenced_table]] \
281
+ .constraints[row[:referenced_constraint]]
282
+ ref = ReferentialConstraint.new(referencing_table, name, referencing_columns, referenced_constraint)
283
+ ref.referenced_table.send(:add_depending_table, referencing_table)
284
+ }
285
+
286
+ # Build functions and procedures
287
+ conn.exec(%(
288
+ select s.nspname::text as "schema",
289
+ pg_get_userbyid(p.proowner)::text as "owner",
290
+ format('%I(%s)', p.proname, oidvectortypes(p.proargtypes))
291
+ || ': '
292
+ || format_type(p.prorettype, null) as "name",
293
+ case format_type(p.prorettype, null)
294
+ when 'void' then 'procedure'
295
+ else 'function'
296
+ end as "kind",
297
+ case
298
+ when prosecdef then 'definer'
299
+ else 'invoker'
300
+ end as "security"
301
+ from pg_proc p
302
+ join pg_namespace s on (p.pronamespace = s.oid)
303
+ where s.nspname !~ '^pg_' and s.nspname <> 'information_schema'
304
+ )).each { |row|
305
+ schema = db.schemas[row[:schema]]
306
+ klass = (row[:kind] == 'function' ? Function : Procedure)
307
+ klass.new(schema, *row.project(:name, :owner, :security))
308
+ }
309
+
310
+ # Build user-defined triggers
311
+ conn.exec(%(
312
+ select n.nspname::text as "schema",
313
+ c.relname::text as "table",
314
+ t.tgname::text as "name",
315
+ fn.nspname::text as "function_schema",
316
+ format('%I(%s)', p.proname::text, oidvectortypes(p.proargtypes))
317
+ || ': '
318
+ || format_type(p.prorettype, null) as "function_name",
319
+ case when (tgtype::int::bit(7) & b'0000001')::int = 0 then 'stmt' else 'row' end as "level",
320
+ coalesce(
321
+ case when (tgtype::int::bit(7) & b'0000010')::int = 0 then null else 'before' end,
322
+ case when (tgtype::int::bit(7) & b'0000010')::int = 0 then 'after' else null end,
323
+ case when (tgtype::int::bit(7) & b'1000000')::int = 0 then null else 'instead' end,
324
+ ''
325
+ )::text as "timing",
326
+ (case when (tgtype::int::bit(7) & b'0000100')::int = 0 then '' else ' insert' end) ||
327
+ (case when (tgtype::int::bit(7) & b'0001000')::int = 0 then '' else ' delete' end) ||
328
+ (case when (tgtype::int::bit(7) & b'0010000')::int = 0 then '' else ' update' end) ||
329
+ (case when (tgtype::int::bit(7) & b'0100000')::int = 0 then '' else ' truncate' end)
330
+ as "events"
331
+ from pg_trigger t
332
+ join pg_proc p on t.tgfoid = p.oid
333
+ join pg_class c on c.oid = t.tgrelid
334
+ join pg_namespace n on n.oid = c.relnamespace
335
+ join pg_namespace fn on fn.oid = p.pronamespace
336
+ where not t.tgisinternal
337
+ )).each { |row|
338
+ schema = db.schemas[row[:schema]]
339
+ table = schema.tables[row[:table]]
340
+ name, level, timing = *row.project(:name, :level, :timing)
341
+ function_schema = db.schemas[row[:function_schema]]
342
+ function = function_schema.functions[row[:function_name]]
343
+ events = row[:events].split.map(&:to_sym)
344
+ Trigger.new(table, name, function, level, timing, events)
345
+ }
346
+
347
+ # Return database object
348
+ db
349
+ end
350
+
351
+ def self.lookup_columns(table, column_names)
352
+ column_names.map { |n| table.columns[n] }
353
+ end
354
+ end
355
+ end
356
+
@@ -0,0 +1,76 @@
1
+
2
+ module PgMeta
3
+ class Database
4
+ using Ext::Hash
5
+
6
+ # Create a Database object from the given YAML representation and return
7
+ # it. The YAML representation can be generated by Database#to_h
8
+ def self.load_yaml(database_yaml)
9
+ database = Database.new *database_yaml.project(:name, :owner)
10
+
11
+ # Accumulators for second pass
12
+ views = []
13
+ referential_constraints = []
14
+
15
+ # First pass: Build everything except referential constraints
16
+ for schema_yaml in database_yaml[:schemas]
17
+ schema = Schema.new database, *schema_yaml.project(:name, :owner)
18
+ for table_yaml in schema_yaml[:tables]
19
+ klass = table_yaml[:table?] ? Table : (table_yaml[:materialized?] ? MaterializedView : View)
20
+ table = klass.new schema, *table_yaml.project(:name, :insertable?, :typed?)
21
+ views << [table, table_yaml[:defining_relations]] if klass <= View || klass <= MaterializedView
22
+ for column_yaml in table_yaml[:columns]
23
+ column = Column.new(table,
24
+ *column_yaml.project(
25
+ :name, :ordinal, :type, :element_type, :dimensions, :default, :identity?, :generated?,
26
+ :nullable?, :updatable?))
27
+ end
28
+ for constraint_yaml in table_yaml[:constraints]
29
+ name = constraint_yaml[:name]
30
+ case kind = constraint_yaml[:kind]
31
+ when :primary_key
32
+ columns = constraint_yaml[:columns]&.map { |name| table.columns[name] }
33
+ PrimaryKeyConstraint.new(table, name, columns)
34
+ when :unique
35
+ columns = constraint_yaml[:columns]&.map { |name| table.columns[name] }
36
+ UniqueConstraint.new(table, name, columns)
37
+ when :check
38
+ CheckConstraint.new(table, name, constraint_yaml[:expression])
39
+ when :referential
40
+ # Postpone to second pass
41
+ columns = constraint_yaml[:referencing_columns]&.map { |name| table.columns[name] }
42
+ referential_constraints << [table, name, columns, constraint_yaml[:referenced_constraint]]
43
+ end
44
+ end
45
+ for trigger_yaml in table_yaml[:triggers]
46
+ Trigger.new(table, *trigger_yaml.project(:name, :function, :level, :timing, :events))
47
+ end
48
+ end
49
+ for function_yaml in schema_yaml[:functions]
50
+ klass = function_yaml[:function?] ? Function : Procedure
51
+ klass.new(schema, *function_yaml.project(:name, :owner, :security))
52
+ end
53
+ end
54
+
55
+ # Second pass: Add referential constraints
56
+ referential_constraints.each { |table, name, columns, referenced_constraint_uid|
57
+ referenced_constraint = database[referenced_constraint_uid] or
58
+ raise Error, "Can't find UID '#{referenced_constraint_uid}'"
59
+ ReferentialConstraint.new(table, name, columns, referenced_constraint)
60
+ referenced_constraint.table.send(:add_depending_table, table)
61
+ }
62
+
63
+ # Second pass: Build defining tables
64
+ views.each { |view, defining_relation_uids|
65
+ defining_relation_uids.each { |uid|
66
+ relation = database[uid]
67
+ relation.send(:add_depending_view, view)
68
+ view.send(:add_defining_relation, relation)
69
+ }
70
+ }
71
+
72
+ database
73
+ end
74
+ end
75
+ end
76
+