dynamic_migrations 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CHANGELOG.md +13 -0
- data/CODE_OF_CONDUCT.md +84 -0
- data/LICENSE.txt +21 -0
- data/README.md +71 -0
- data/lib/dynamic_migrations/expected_boolean_error.rb +4 -0
- data/lib/dynamic_migrations/expected_integer_error.rb +4 -0
- data/lib/dynamic_migrations/expected_string_error.rb +4 -0
- data/lib/dynamic_migrations/expected_symbol_error.rb +4 -0
- data/lib/dynamic_migrations/invalid_source_error.rb +7 -0
- data/lib/dynamic_migrations/module_included_into_unexpected_target_error.rb +4 -0
- data/lib/dynamic_migrations/postgres/connections.rb +42 -0
- data/lib/dynamic_migrations/postgres/data_types.rb +273 -0
- data/lib/dynamic_migrations/postgres/server/database/configured_schemas.rb +55 -0
- data/lib/dynamic_migrations/postgres/server/database/connection.rb +39 -0
- data/lib/dynamic_migrations/postgres/server/database/differences.rb +292 -0
- data/lib/dynamic_migrations/postgres/server/database/keys_and_unique_constraints_loader.rb +149 -0
- data/lib/dynamic_migrations/postgres/server/database/loaded_schemas.rb +55 -0
- data/lib/dynamic_migrations/postgres/server/database/loaded_schemas_builder.rb +86 -0
- data/lib/dynamic_migrations/postgres/server/database/schema/table/column.rb +84 -0
- data/lib/dynamic_migrations/postgres/server/database/schema/table/columns.rb +58 -0
- data/lib/dynamic_migrations/postgres/server/database/schema/table/foreign_key_constraint.rb +132 -0
- data/lib/dynamic_migrations/postgres/server/database/schema/table/foreign_key_constraints.rb +62 -0
- data/lib/dynamic_migrations/postgres/server/database/schema/table/index.rb +144 -0
- data/lib/dynamic_migrations/postgres/server/database/schema/table/indexes.rb +63 -0
- data/lib/dynamic_migrations/postgres/server/database/schema/table/primary_key.rb +83 -0
- data/lib/dynamic_migrations/postgres/server/database/schema/table/unique_constraint.rb +101 -0
- data/lib/dynamic_migrations/postgres/server/database/schema/table/unique_constraints.rb +59 -0
- data/lib/dynamic_migrations/postgres/server/database/schema/table/validation.rb +90 -0
- data/lib/dynamic_migrations/postgres/server/database/schema/table/validations.rb +59 -0
- data/lib/dynamic_migrations/postgres/server/database/schema/table.rb +73 -0
- data/lib/dynamic_migrations/postgres/server/database/schema.rb +72 -0
- data/lib/dynamic_migrations/postgres/server/database/source.rb +37 -0
- data/lib/dynamic_migrations/postgres/server/database/structure_loader.rb +242 -0
- data/lib/dynamic_migrations/postgres/server/database/validations_loader.rb +81 -0
- data/lib/dynamic_migrations/postgres/server/database.rb +54 -0
- data/lib/dynamic_migrations/postgres/server.rb +33 -0
- data/lib/dynamic_migrations/postgres.rb +8 -0
- data/lib/dynamic_migrations/version.rb +5 -0
- data/lib/dynamic_migrations.rb +44 -0
- metadata +113 -0
@@ -0,0 +1,59 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module DynamicMigrations
|
4
|
+
module Postgres
|
5
|
+
class Server
|
6
|
+
class Database
|
7
|
+
class Schema
|
8
|
+
class Table < Source
|
9
|
+
# This module has all the tables methods for working with unique_constraints
|
10
|
+
module UniqueConstraints
|
11
|
+
class UniqueConstraintDoesNotExistError < StandardError
|
12
|
+
end
|
13
|
+
|
14
|
+
class UniqueConstraintAlreadyExistsError < StandardError
|
15
|
+
end
|
16
|
+
|
17
|
+
# returns the unique_constraint object for the provided unique_constraint name, and raises an
|
18
|
+
# error if the unique_constraint does not exist
|
19
|
+
def unique_constraint unique_constraint_name
|
20
|
+
raise ExpectedSymbolError, unique_constraint_name unless unique_constraint_name.is_a? Symbol
|
21
|
+
raise UniqueConstraintDoesNotExistError unless has_unique_constraint? unique_constraint_name
|
22
|
+
@unique_constraints[unique_constraint_name]
|
23
|
+
end
|
24
|
+
|
25
|
+
# returns true if this table has a unique_constraint with the provided name, otherwise false
|
26
|
+
def has_unique_constraint? unique_constraint_name
|
27
|
+
raise ExpectedSymbolError, unique_constraint_name unless unique_constraint_name.is_a? Symbol
|
28
|
+
@unique_constraints.key? unique_constraint_name
|
29
|
+
end
|
30
|
+
|
31
|
+
# returns an array of this tables unique_constraints
|
32
|
+
def unique_constraints
|
33
|
+
@unique_constraints.values
|
34
|
+
end
|
35
|
+
|
36
|
+
def unique_constraints_hash
|
37
|
+
@unique_constraints
|
38
|
+
end
|
39
|
+
|
40
|
+
# adds a new unique_constraint to this table, and returns it
|
41
|
+
def add_unique_constraint unique_constraint_name, column_names, **unique_constraint_options
|
42
|
+
if has_unique_constraint? unique_constraint_name
|
43
|
+
raise(UniqueConstraintAlreadyExistsError, "unique_constraint #{unique_constraint_name} already exists")
|
44
|
+
end
|
45
|
+
columns = column_names.map { |column_name| column column_name }
|
46
|
+
included_target = self
|
47
|
+
if included_target.is_a? Table
|
48
|
+
@unique_constraints[unique_constraint_name] = UniqueConstraint.new source, included_target, columns, unique_constraint_name, **unique_constraint_options
|
49
|
+
else
|
50
|
+
raise ModuleIncludedIntoUnexpectedTargetError, included_target
|
51
|
+
end
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
58
|
+
end
|
59
|
+
end
|
@@ -0,0 +1,90 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module DynamicMigrations
|
4
|
+
module Postgres
|
5
|
+
class Server
|
6
|
+
class Database
|
7
|
+
class Schema
|
8
|
+
class Table
|
9
|
+
# This class represents a postgres table validation
|
10
|
+
class Validation < Source
|
11
|
+
class ExpectedTableError < StandardError
|
12
|
+
end
|
13
|
+
|
14
|
+
class ExpectedArrayOfColumnsError < StandardError
|
15
|
+
end
|
16
|
+
|
17
|
+
class DuplicateColumnError < StandardError
|
18
|
+
end
|
19
|
+
|
20
|
+
attr_reader :table
|
21
|
+
attr_reader :validation_name
|
22
|
+
attr_reader :check_clause
|
23
|
+
attr_reader :deferrable
|
24
|
+
attr_reader :initially_deferred
|
25
|
+
|
26
|
+
# initialize a new object to represent a validation in a postgres table
|
27
|
+
def initialize source, table, columns, validation_name, check_clause, deferrable: false, initially_deferred: false
|
28
|
+
super source
|
29
|
+
raise ExpectedTableError, table unless table.is_a? Table
|
30
|
+
@table = table
|
31
|
+
|
32
|
+
# assert that the provided columns is an array
|
33
|
+
unless columns.is_a?(Array) && columns.count > 0
|
34
|
+
raise ExpectedArrayOfColumnsError
|
35
|
+
end
|
36
|
+
|
37
|
+
@columns = {}
|
38
|
+
columns.each do |column|
|
39
|
+
add_column column
|
40
|
+
end
|
41
|
+
|
42
|
+
raise ExpectedSymbolError, validation_name unless validation_name.is_a? Symbol
|
43
|
+
@validation_name = validation_name
|
44
|
+
|
45
|
+
raise ExpectedStringError, check_clause unless check_clause.is_a? String
|
46
|
+
@check_clause = check_clause
|
47
|
+
|
48
|
+
raise ExpectedBooleanError, deferrable unless [true, false].include?(deferrable)
|
49
|
+
@deferrable = deferrable
|
50
|
+
|
51
|
+
raise ExpectedBooleanError, initially_deferred unless [true, false].include?(initially_deferred)
|
52
|
+
@initially_deferred = initially_deferred
|
53
|
+
end
|
54
|
+
|
55
|
+
# return an array of this validations columns
|
56
|
+
def columns
|
57
|
+
@columns.values
|
58
|
+
end
|
59
|
+
|
60
|
+
def column_names
|
61
|
+
@columns.keys
|
62
|
+
end
|
63
|
+
|
64
|
+
private
|
65
|
+
|
66
|
+
# used internally to set the columns from this objects initialize method
|
67
|
+
def add_column column
|
68
|
+
# assert that the provided dsl name is an array of Columns
|
69
|
+
unless column.is_a? Column
|
70
|
+
raise ExpectedArrayOfColumnsError
|
71
|
+
end
|
72
|
+
|
73
|
+
# assert that the provided column exists within this validations table
|
74
|
+
unless @table.has_column? column.column_name
|
75
|
+
raise ExpectedArrayOfColumnsError, "One or more columns do not exist in this validations table"
|
76
|
+
end
|
77
|
+
|
78
|
+
if @columns.key? column.column_name
|
79
|
+
raise(DuplicateColumnError, "Column #{column.column_name} already exists")
|
80
|
+
end
|
81
|
+
|
82
|
+
@columns[column.column_name] = column
|
83
|
+
end
|
84
|
+
end
|
85
|
+
end
|
86
|
+
end
|
87
|
+
end
|
88
|
+
end
|
89
|
+
end
|
90
|
+
end
|
@@ -0,0 +1,59 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module DynamicMigrations
|
4
|
+
module Postgres
|
5
|
+
class Server
|
6
|
+
class Database
|
7
|
+
class Schema
|
8
|
+
class Table < Source
|
9
|
+
# This module has all the tables methods for working with validations
|
10
|
+
module Validations
|
11
|
+
class ValidationDoesNotExistError < StandardError
|
12
|
+
end
|
13
|
+
|
14
|
+
class ValidationAlreadyExistsError < StandardError
|
15
|
+
end
|
16
|
+
|
17
|
+
# returns the validation object for the provided validation name, and raises an
|
18
|
+
# error if the validation does not exist
|
19
|
+
def validation validation_name
|
20
|
+
raise ExpectedSymbolError, validation_name unless validation_name.is_a? Symbol
|
21
|
+
raise ValidationDoesNotExistError unless has_validation? validation_name
|
22
|
+
@validations[validation_name]
|
23
|
+
end
|
24
|
+
|
25
|
+
# returns true if this table has a validation with the provided name, otherwise false
|
26
|
+
def has_validation? validation_name
|
27
|
+
raise ExpectedSymbolError, validation_name unless validation_name.is_a? Symbol
|
28
|
+
@validations.key? validation_name
|
29
|
+
end
|
30
|
+
|
31
|
+
# returns an array of this tables validations
|
32
|
+
def validations
|
33
|
+
@validations.values
|
34
|
+
end
|
35
|
+
|
36
|
+
def validations_hash
|
37
|
+
@validations
|
38
|
+
end
|
39
|
+
|
40
|
+
# adds a new validation to this table, and returns it
|
41
|
+
def add_validation validation_name, column_names, check_clause, **validation_options
|
42
|
+
if has_validation? validation_name
|
43
|
+
raise(ValidationAlreadyExistsError, "Validation #{validation_name} already exists")
|
44
|
+
end
|
45
|
+
columns = column_names.map { |column_name| column column_name }
|
46
|
+
included_target = self
|
47
|
+
if included_target.is_a? Table
|
48
|
+
@validations[validation_name] = Validation.new source, included_target, columns, validation_name, check_clause, **validation_options
|
49
|
+
else
|
50
|
+
raise ModuleIncludedIntoUnexpectedTargetError, included_target
|
51
|
+
end
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
58
|
+
end
|
59
|
+
end
|
@@ -0,0 +1,73 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module DynamicMigrations
|
4
|
+
module Postgres
|
5
|
+
class Server
|
6
|
+
class Database
|
7
|
+
class Schema
|
8
|
+
# This class represents a postgres table.
|
9
|
+
class Table < Source
|
10
|
+
class ExpectedSchemaError < StandardError
|
11
|
+
end
|
12
|
+
|
13
|
+
class PrimaryKeyDoesNotExistError < StandardError
|
14
|
+
end
|
15
|
+
|
16
|
+
include Columns
|
17
|
+
include Validations
|
18
|
+
include Indexes
|
19
|
+
include ForeignKeyConstraints
|
20
|
+
include UniqueConstraints
|
21
|
+
|
22
|
+
attr_reader :schema
|
23
|
+
attr_reader :table_name
|
24
|
+
attr_reader :description
|
25
|
+
|
26
|
+
# initialize a new object to represent a postgres table
|
27
|
+
def initialize source, schema, table_name, description = nil
|
28
|
+
super source
|
29
|
+
raise ExpectedSchemaError, schema unless schema.is_a? Schema
|
30
|
+
raise ExpectedSymbolError, table_name unless table_name.is_a? Symbol
|
31
|
+
unless description.nil?
|
32
|
+
raise ExpectedStringError, description unless description.is_a? String
|
33
|
+
@description = description
|
34
|
+
end
|
35
|
+
@schema = schema
|
36
|
+
@table_name = table_name
|
37
|
+
@columns = {}
|
38
|
+
@validations = {}
|
39
|
+
@indexes = {}
|
40
|
+
@foreign_key_constraints = {}
|
41
|
+
@unique_constraints = {}
|
42
|
+
end
|
43
|
+
|
44
|
+
# returns true if this table has a description, otehrwise false
|
45
|
+
def has_description?
|
46
|
+
!@description.nil?
|
47
|
+
end
|
48
|
+
|
49
|
+
# add a primary key to this table
|
50
|
+
def add_primary_key primary_key_name, column_names, **primary_key_options
|
51
|
+
raise PrimaryKeyAlreadyExistsError if @primary_key
|
52
|
+
columns = column_names.map { |column_name| column column_name }
|
53
|
+
@primary_key = PrimaryKey.new source, self, columns, primary_key_name, **primary_key_options
|
54
|
+
end
|
55
|
+
|
56
|
+
# returns true if this table has a primary key, otherwise false
|
57
|
+
def has_primary_key?
|
58
|
+
!@primary_key.nil?
|
59
|
+
end
|
60
|
+
|
61
|
+
# returns a primary key if one exists, else raises an error
|
62
|
+
def primary_key
|
63
|
+
unless @primary_key
|
64
|
+
raise PrimaryKeyDoesNotExistError
|
65
|
+
end
|
66
|
+
@primary_key
|
67
|
+
end
|
68
|
+
end
|
69
|
+
end
|
70
|
+
end
|
71
|
+
end
|
72
|
+
end
|
73
|
+
end
|
@@ -0,0 +1,72 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module DynamicMigrations
|
4
|
+
module Postgres
|
5
|
+
class Server
|
6
|
+
class Database
|
7
|
+
# This class represents a postgres schema. A schema is essentially a namespace within
|
8
|
+
# a postgres database. Each schema is a collection of tables, functions and other
|
9
|
+
# database objects.
|
10
|
+
class Schema < Source
|
11
|
+
class ExpectedDatabaseError < StandardError
|
12
|
+
end
|
13
|
+
|
14
|
+
class TableAlreadyExistsError < StandardError
|
15
|
+
end
|
16
|
+
|
17
|
+
class TableDoesNotExistError < StandardError
|
18
|
+
end
|
19
|
+
|
20
|
+
attr_reader :database
|
21
|
+
attr_reader :schema_name
|
22
|
+
|
23
|
+
# initialize a new object to represent a postgres schema
|
24
|
+
def initialize source, database, schema_name
|
25
|
+
super source
|
26
|
+
raise ExpectedDatabaseError, database unless database.is_a? Database
|
27
|
+
raise ExpectedSymbolError, schema_name unless schema_name.is_a? Symbol
|
28
|
+
@database = database
|
29
|
+
@schema_name = schema_name
|
30
|
+
@tables = {}
|
31
|
+
end
|
32
|
+
|
33
|
+
# create and add a new table from a provided table name
|
34
|
+
def add_table table_name, description = nil
|
35
|
+
raise ExpectedSymbolError, table_name unless table_name.is_a? Symbol
|
36
|
+
if has_table? table_name
|
37
|
+
raise(TableAlreadyExistsError, "Table #{table_name} already exists")
|
38
|
+
end
|
39
|
+
included_target = self
|
40
|
+
if included_target.is_a? Schema
|
41
|
+
@tables[table_name] = Table.new source, included_target, table_name, description
|
42
|
+
else
|
43
|
+
raise ModuleIncludedIntoUnexpectedTargetError, included_target
|
44
|
+
end
|
45
|
+
end
|
46
|
+
|
47
|
+
# return a table by its name, raises an error if the table does not exist
|
48
|
+
def table table_name
|
49
|
+
raise ExpectedSymbolError, table_name unless table_name.is_a? Symbol
|
50
|
+
raise TableDoesNotExistError unless has_table? table_name
|
51
|
+
@tables[table_name]
|
52
|
+
end
|
53
|
+
|
54
|
+
# returns true/false representing if a table with the provided name exists
|
55
|
+
def has_table? table_name
|
56
|
+
raise ExpectedSymbolError, table_name unless table_name.is_a? Symbol
|
57
|
+
@tables.key? table_name
|
58
|
+
end
|
59
|
+
|
60
|
+
# returns an array of all tables in the schema
|
61
|
+
def tables
|
62
|
+
@tables.values
|
63
|
+
end
|
64
|
+
|
65
|
+
def tables_hash
|
66
|
+
@tables
|
67
|
+
end
|
68
|
+
end
|
69
|
+
end
|
70
|
+
end
|
71
|
+
end
|
72
|
+
end
|
@@ -0,0 +1,37 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module DynamicMigrations
|
4
|
+
module Postgres
|
5
|
+
class Server
|
6
|
+
class Database
|
7
|
+
class Source
|
8
|
+
attr_reader :source
|
9
|
+
|
10
|
+
# initialize a new object to represent a postgres schema
|
11
|
+
def initialize source
|
12
|
+
unless source == :configuration || source == :database
|
13
|
+
raise InvalidSourceError, source
|
14
|
+
end
|
15
|
+
@source = source
|
16
|
+
end
|
17
|
+
|
18
|
+
def from_configuration?
|
19
|
+
@source == :configuration
|
20
|
+
end
|
21
|
+
|
22
|
+
def from_database?
|
23
|
+
@source == :database
|
24
|
+
end
|
25
|
+
|
26
|
+
def assert_is_a_symbol! value
|
27
|
+
if value.is_a? Symbol
|
28
|
+
true
|
29
|
+
else
|
30
|
+
raise ExpectedSymbolError, "expected Symbol but got #{value}"
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
@@ -0,0 +1,242 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module DynamicMigrations
|
4
|
+
module Postgres
|
5
|
+
class Server
|
6
|
+
class Database
|
7
|
+
module StructureLoader
|
8
|
+
def create_database_structure_cache
|
9
|
+
connection.exec(<<~SQL)
|
10
|
+
CREATE MATERIALIZED VIEW public.dynamic_migrations_structure_cache as
|
11
|
+
SELECT
|
12
|
+
-- Name of the schema containing the table
|
13
|
+
schemata.schema_name,
|
14
|
+
-- Name of the table
|
15
|
+
tables.table_name,
|
16
|
+
-- The comment which has been added to the table (if any)
|
17
|
+
table_description.description as table_description,
|
18
|
+
-- Name of the column
|
19
|
+
columns.column_name,
|
20
|
+
-- The comment which has been added to the column (if any)
|
21
|
+
column_description.description as column_description,
|
22
|
+
-- Default expression of the column
|
23
|
+
columns.column_default,
|
24
|
+
-- YES if the column is possibly nullable, NO if
|
25
|
+
-- it is known not nullable
|
26
|
+
columns.is_nullable,
|
27
|
+
-- Data type of the column, if it is a built-in type,
|
28
|
+
-- or ARRAY if it is some array (in that case, see the
|
29
|
+
-- view element_types), else USER-DEFINED (in that case,
|
30
|
+
-- the type is identified in udt_name and associated
|
31
|
+
-- columns). If the column is based on a domain, this
|
32
|
+
-- column refers to the type underlying the domain (and
|
33
|
+
-- the domain is identified in domain_name and associated
|
34
|
+
-- columns).
|
35
|
+
columns.data_type,
|
36
|
+
-- If data_type identifies a character or bit string type,
|
37
|
+
-- the declared maximum length; null for all other data
|
38
|
+
-- types or if no maximum length was declared.
|
39
|
+
columns.character_maximum_length,
|
40
|
+
-- If data_type identifies a character type, the maximum
|
41
|
+
-- possible length in octets (bytes) of a datum; null for
|
42
|
+
-- all other data types. The maximum octet length depends
|
43
|
+
-- on the declared character maximum length (see above)
|
44
|
+
-- and the server encoding.
|
45
|
+
columns.character_octet_length,
|
46
|
+
-- If data_type identifies a numeric type, this column
|
47
|
+
-- contains the (declared or implicit) precision of the type
|
48
|
+
-- for this column. The precision indicates the number of
|
49
|
+
-- significant digits. It can be expressed in decimal (base 10)
|
50
|
+
-- or binary (base 2) terms, as specified in the column
|
51
|
+
-- numeric_precision_radix. For all other data types, this
|
52
|
+
-- column is null.
|
53
|
+
columns.numeric_precision,
|
54
|
+
-- If data_type identifies a numeric type, this column indicates
|
55
|
+
-- in which base the values in the columns numeric_precision and
|
56
|
+
-- numeric_scale are expressed. The value is either 2 or 10. For
|
57
|
+
-- all other data types, this column is null.
|
58
|
+
columns.numeric_precision_radix,
|
59
|
+
-- If data_type identifies an exact numeric type, this column
|
60
|
+
-- contains the (declared or implicit) scale of the type for this
|
61
|
+
-- column. The scale indicates the number of significant digits to
|
62
|
+
-- the right of the decimal point. It can be expressed in decimal
|
63
|
+
-- (base 10) or binary (base 2) terms, as specified in the column
|
64
|
+
-- numeric_precision_radix. For all other data types, this column
|
65
|
+
-- is null.
|
66
|
+
columns.numeric_scale,
|
67
|
+
-- If data_type identifies a date, time, timestamp, or interval
|
68
|
+
-- type, this column contains the (declared or implicit) fractional
|
69
|
+
-- seconds precision of the type for this column, that is, the
|
70
|
+
-- number of decimal digits maintained following the decimal point
|
71
|
+
-- in the seconds value. For all other data types, this column is
|
72
|
+
-- null.
|
73
|
+
columns.datetime_precision,
|
74
|
+
-- If data_type identifies an interval type, this column contains
|
75
|
+
-- the specification which fields the intervals include for this
|
76
|
+
-- column, e.g., YEAR TO MONTH, DAY TO SECOND, etc. If no field
|
77
|
+
-- restrictions were specified (that is, the interval accepts all
|
78
|
+
-- fields), and for all other data types, this field is null.
|
79
|
+
columns.interval_type,
|
80
|
+
-- Name of the schema that the column data type (the underlying
|
81
|
+
--type of the domain, if applicable) is defined in
|
82
|
+
columns.udt_schema,
|
83
|
+
-- Name of the column data type (the underlying type of the domain,
|
84
|
+
-- if applicable)
|
85
|
+
columns.udt_name,
|
86
|
+
-- YES if the column is updatable, NO if not (Columns in base tables
|
87
|
+
-- are always updatable, columns in views not necessarily)
|
88
|
+
columns.is_updatable
|
89
|
+
FROM information_schema.schemata
|
90
|
+
LEFT JOIN information_schema.tables ON schemata.schema_name = tables.table_schema AND left(tables.table_name, 3) != 'pg_'
|
91
|
+
LEFT JOIN information_schema.columns ON tables.table_name = columns.table_name
|
92
|
+
-- required for the column and table description/comment joins
|
93
|
+
LEFT JOIN pg_catalog.pg_statio_all_tables ON pg_statio_all_tables.schemaname = schemata.schema_name AND pg_statio_all_tables.relname = tables.table_name
|
94
|
+
-- required for the table description/comment
|
95
|
+
LEFT JOIN pg_catalog.pg_description table_description ON table_description.objoid = pg_statio_all_tables.relid AND table_description.objsubid = 0
|
96
|
+
-- required for the column description/comment
|
97
|
+
LEFT JOIN pg_catalog.pg_description column_description ON column_description.objoid = pg_statio_all_tables.relid AND column_description.objsubid = columns.ordinal_position
|
98
|
+
WHERE schemata.schema_name != 'information_schema'
|
99
|
+
AND schemata.schema_name != 'postgis'
|
100
|
+
AND left(schemata.schema_name, 3) != 'pg_'
|
101
|
+
-- order by the schema and table names alphabetically, then by the column position in the table
|
102
|
+
ORDER BY schemata.schema_name, tables.table_schema, columns.ordinal_position
|
103
|
+
SQL
|
104
|
+
connection.exec(<<~SQL)
|
105
|
+
COMMENT ON MATERIALIZED VIEW public.dynamic_migrations_structure_cache IS 'A cached representation of the database structure. This is used by the dynamic migrations library and is created automatically and updated automatically after migrations have run.';
|
106
|
+
SQL
|
107
|
+
end
|
108
|
+
|
109
|
+
# fetch all columns from the database and build and return a
|
110
|
+
# useful hash representing the structure of your database
|
111
|
+
def fetch_structure
|
112
|
+
begin
|
113
|
+
rows = connection.exec_params(<<~SQL)
|
114
|
+
SELECT * FROM public.dynamic_migrations_structure_cache
|
115
|
+
SQL
|
116
|
+
rescue PG::UndefinedTable
|
117
|
+
create_database_structure_cache
|
118
|
+
rows = connection.exec_params(<<~SQL)
|
119
|
+
SELECT * FROM public.dynamic_migrations_structure_cache
|
120
|
+
SQL
|
121
|
+
end
|
122
|
+
|
123
|
+
schemas = {}
|
124
|
+
rows.each do |row|
|
125
|
+
schema_name = row["schema_name"].to_sym
|
126
|
+
schema = schemas[schema_name] ||= {
|
127
|
+
tables: {}
|
128
|
+
}
|
129
|
+
|
130
|
+
unless row["table_name"].nil?
|
131
|
+
table_name = row["table_name"].to_sym
|
132
|
+
table = schema[:tables][table_name] ||= {
|
133
|
+
description: row["table_description"],
|
134
|
+
columns: {}
|
135
|
+
}
|
136
|
+
|
137
|
+
unless row["column_name"].nil?
|
138
|
+
column_name = row["column_name"].to_sym
|
139
|
+
column = table[:columns][column_name] ||= {}
|
140
|
+
|
141
|
+
column[:data_type] = row["data_type"].to_sym
|
142
|
+
column[:null] = row["is_nullable"] == "YES"
|
143
|
+
column[:default] = row["column_default"]
|
144
|
+
column[:description] = row["column_description"]
|
145
|
+
column[:character_maximum_length] = row["character_maximum_length"].nil? ? nil : row["character_maximum_length"].to_i
|
146
|
+
column[:character_octet_length] = row["character_octet_length"].nil? ? nil : row["character_octet_length"].to_i
|
147
|
+
column[:numeric_precision] = row["numeric_precision"].nil? ? nil : row["numeric_precision"].to_i
|
148
|
+
column[:numeric_precision_radix] = row["numeric_precision_radix"].nil? ? nil : row["numeric_precision_radix"].to_i
|
149
|
+
column[:numeric_scale] = row["numeric_scale"].nil? ? nil : row["numeric_scale"].to_i
|
150
|
+
column[:datetime_precision] = row["datetime_precision"].nil? ? nil : row["datetime_precision"].to_i
|
151
|
+
column[:interval_type] = row["interval_type"].nil? ? nil : row["interval_type"].to_sym
|
152
|
+
column[:udt_schema] = row["udt_schema"].to_sym
|
153
|
+
column[:udt_name] = row["udt_name"].to_sym
|
154
|
+
column[:updatable] = row["is_updatable"] == "YES"
|
155
|
+
end
|
156
|
+
end
|
157
|
+
end
|
158
|
+
schemas
|
159
|
+
end
|
160
|
+
|
161
|
+
# recursively process the database and build all the schemas,
|
162
|
+
# tables and columns
|
163
|
+
def recursively_build_schemas_from_database
|
164
|
+
fetch_structure.each do |schema_name, schema_definition|
|
165
|
+
schema = add_loaded_schema schema_name
|
166
|
+
|
167
|
+
schema_definition[:tables].each do |table_name, table_definition|
|
168
|
+
table = schema.add_table table_name, table_definition[:description]
|
169
|
+
|
170
|
+
table_definition[:columns].each do |column_name, column_definition|
|
171
|
+
# we only need these for arrays and user-defined types
|
172
|
+
# (user-defined is usually ENUMS)
|
173
|
+
if [:ARRAY, :"USER-DEFINED"].include? column_definition[:data_type]
|
174
|
+
udt_schema = column_definition[:udt_schema]
|
175
|
+
udt_name = column_definition[:udt_name]
|
176
|
+
else
|
177
|
+
udt_schema = nil
|
178
|
+
udt_name = nil
|
179
|
+
end
|
180
|
+
|
181
|
+
table.add_column column_name, column_definition[:data_type],
|
182
|
+
null: column_definition[:null],
|
183
|
+
default: column_definition[:default],
|
184
|
+
description: column_definition[:description],
|
185
|
+
character_maximum_length: column_definition[:character_maximum_length],
|
186
|
+
character_octet_length: column_definition[:character_octet_length],
|
187
|
+
numeric_precision: column_definition[:numeric_precision],
|
188
|
+
numeric_precision_radix: column_definition[:numeric_precision_radix],
|
189
|
+
numeric_scale: column_definition[:numeric_scale],
|
190
|
+
datetime_precision: column_definition[:datetime_precision],
|
191
|
+
udt_schema: udt_schema,
|
192
|
+
udt_name: udt_name,
|
193
|
+
updatable: column_definition[:updatable]
|
194
|
+
end
|
195
|
+
end
|
196
|
+
end
|
197
|
+
end
|
198
|
+
|
199
|
+
# returns a list of the schema names in this database
|
200
|
+
def fetch_schema_names
|
201
|
+
rows = connection.exec(<<-SQL)
|
202
|
+
SELECT schema_name
|
203
|
+
FROM information_schema.schemata;
|
204
|
+
SQL
|
205
|
+
schema_names = rows.map { |row| row["schema_name"] }
|
206
|
+
schema_names.reject! { |schema_name| schema_name == "information_schema" }
|
207
|
+
schema_names.reject! { |schema_name| schema_name == "public" }
|
208
|
+
schema_names.reject! { |schema_name| schema_name.start_with? "pg_" }
|
209
|
+
schema_names.sort
|
210
|
+
end
|
211
|
+
|
212
|
+
# returns a list of the table names in the provided schema
|
213
|
+
def fetch_table_names schema_name
|
214
|
+
rows = connection.exec_params(<<-SQL, [schema_name.to_s])
|
215
|
+
SELECT table_name FROM information_schema.tables
|
216
|
+
WHERE table_schema = $1
|
217
|
+
SQL
|
218
|
+
table_names = rows.map { |row| row["table_name"] }
|
219
|
+
table_names.reject! { |table_name| table_name.start_with? "pg_" }
|
220
|
+
table_names.sort
|
221
|
+
end
|
222
|
+
|
223
|
+
# returns a list of columns definitions for the provided table
|
224
|
+
def fetch_columns schema_name, table_name
|
225
|
+
rows = connection.exec_params(<<-SQL, [schema_name.to_s, table_name.to_s])
|
226
|
+
SELECT column_name, is_nullable, data_type, character_octet_length, column_default, numeric_precision, numeric_precision_radix, numeric_scale, udt_schema, udt_name
|
227
|
+
FROM information_schema.columns
|
228
|
+
WHERE table_schema = $1
|
229
|
+
AND table_name = $2;
|
230
|
+
SQL
|
231
|
+
rows.map do |row|
|
232
|
+
{
|
233
|
+
column_name: row["column_name"].to_sym,
|
234
|
+
type: row["data_type"].to_sym
|
235
|
+
}
|
236
|
+
end
|
237
|
+
end
|
238
|
+
end
|
239
|
+
end
|
240
|
+
end
|
241
|
+
end
|
242
|
+
end
|