@dbml/cli 3.7.0 → 3.7.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/__test__/db2dbml/mysql/options.json +1 -1
- package/lib/cli/connector.js +4 -2
- package/package.json +7 -3
- package/src/cli/connector.js +4 -2
- package/LICENSE +0 -202
- package/__test__/db2dbml/mssql/dbml-error.log +0 -467
- package/__test__/db2dbml/mysql/dbml-error.log +0 -281
- package/__test__/db2dbml/mysql/out-files/schema.dbml +0 -180
- package/__test__/db2dbml/postgres/dbml-error.log +0 -252
- package/__test__/db2dbml/postgres/out-files/schema.dbml +0 -140
- package/__test__/dbml2sql/filename --mysql --out-file/dbml-error.log +0 -0
- package/__test__/dbml2sql/filename --mysql --out-file/out-files/schema.sql +0 -65
- package/__test__/dbml2sql/filename --mysql stdout/dbml-error.log +0 -0
- package/__test__/dbml2sql/filename --oracle --out-file/dbml-error.log +0 -0
- package/__test__/dbml2sql/filename --oracle --out-file/out-files/schema.sql +0 -61
- package/__test__/dbml2sql/filename --oracle stdout/dbml-error.log +0 -0
- package/__test__/dbml2sql/filename --out-file/dbml-error.log +0 -0
- package/__test__/dbml2sql/filename --out-file/out-files/schema.sql +0 -77
- package/__test__/dbml2sql/filename --postgres --out-file/dbml-error.log +0 -0
- package/__test__/dbml2sql/filename --postgres --out-file/out-files/schema.sql +0 -77
- package/__test__/dbml2sql/filename --postgres stdout/dbml-error.log +0 -0
- package/__test__/dbml2sql/filename stdout/dbml-error.log +0 -0
- package/__test__/dbml2sql/filenames --mysql --out-file/dbml-error.log +0 -0
- package/__test__/dbml2sql/filenames --mysql --out-file/out-files/schema.sql +0 -172
- package/__test__/dbml2sql/filenames --mysql stdout/dbml-error.log +0 -0
- package/__test__/dbml2sql/filenames --oracle --out-file/dbml-error.log +0 -0
- package/__test__/dbml2sql/filenames --oracle --out-file/out-files/schema.sql +0 -172
- package/__test__/dbml2sql/filenames --oracle stdout/dbml-error.log +0 -0
- package/__test__/dbml2sql/filenames --out-file/dbml-error.log +0 -0
- package/__test__/dbml2sql/filenames --out-file/out-files/schema.sql +0 -172
- package/__test__/dbml2sql/filenames --postgres --out-file/dbml-error.log +0 -0
- package/__test__/dbml2sql/filenames --postgres --out-file/out-files/schema.sql +0 -172
- package/__test__/dbml2sql/filenames --postgres stdout/dbml-error.log +0 -0
- package/__test__/dbml2sql/filenames stdout/dbml-error.log +0 -0
- package/__test__/dbml2sql/multiple_schema_mssql/dbml-error.log +0 -0
- package/__test__/dbml2sql/multiple_schema_mssql/out-files/multiple_schema.out.sql +0 -62
- package/__test__/dbml2sql/multiple_schema_mysql/dbml-error.log +0 -0
- package/__test__/dbml2sql/multiple_schema_mysql/out-files/multiple_schema.out.sql +0 -50
- package/__test__/dbml2sql/multiple_schema_oracle/dbml-error.log +0 -0
- package/__test__/dbml2sql/multiple_schema_oracle/out-files/multiple_schema.out.sql +0 -88
- package/__test__/dbml2sql/multiple_schema_pg/dbml-error.log +0 -0
- package/__test__/dbml2sql/multiple_schema_pg/out-files/multiple_schema.out.sql +0 -67
- package/__test__/dbml2sql/syntax-error/dbml-error.log +0 -12
- package/__test__/sql2dbml/filename --mssql --out-file/dbml-error.log +0 -0
- package/__test__/sql2dbml/filename --mssql --out-file/out-files/schema.dbml +0 -25
- package/__test__/sql2dbml/filename --mysql --out-file/dbml-error.log +0 -0
- package/__test__/sql2dbml/filename --mysql --out-file/out-files/schema.dbml +0 -74
- package/__test__/sql2dbml/filename --mysql stdout/dbml-error.log +0 -0
- package/__test__/sql2dbml/filename --out-file/dbml-error.log +0 -0
- package/__test__/sql2dbml/filename --out-file/out-files/schema.dbml +0 -74
- package/__test__/sql2dbml/filename --postgres --out-file/dbml-error.log +0 -0
- package/__test__/sql2dbml/filename --postgres --out-file/out-files/schema.dbml +0 -74
- package/__test__/sql2dbml/filename --postgres stdout/dbml-error.log +0 -0
- package/__test__/sql2dbml/filename --snowflake stdout/dbml-error.log +0 -0
- package/__test__/sql2dbml/filename stdout/dbml-error.log +0 -0
- package/__test__/sql2dbml/filenames --mysql --out-file/dbml-error.log +0 -0
- package/__test__/sql2dbml/filenames --mysql --out-file/out-files/schema.dbml +0 -170
- package/__test__/sql2dbml/filenames --mysql stdout/dbml-error.log +0 -0
- package/__test__/sql2dbml/filenames --out-file/dbml-error.log +0 -0
- package/__test__/sql2dbml/filenames --out-file/out-files/schema.dbml +0 -170
- package/__test__/sql2dbml/filenames --postgres --out-file/dbml-error.log +0 -0
- package/__test__/sql2dbml/filenames --postgres --out-file/out-files/schema.dbml +0 -170
- package/__test__/sql2dbml/filenames --postgres stdout/dbml-error.log +0 -0
- package/__test__/sql2dbml/filenames stdout/dbml-error.log +0 -0
- package/__test__/sql2dbml/multiple_schema_mssql/dbml-error.log +0 -0
- package/__test__/sql2dbml/multiple_schema_mssql/out-files/multiple_schema.out.dbml +0 -106
- package/__test__/sql2dbml/multiple_schema_mysql/dbml-error.log +0 -0
- package/__test__/sql2dbml/multiple_schema_mysql/out-files/multiple_schema.out.dbml +0 -136
- package/__test__/sql2dbml/multiple_schema_pg/dbml-error.log +0 -0
- package/__test__/sql2dbml/multiple_schema_pg/out-files/multiple_schema.out.dbml +0 -101
- package/__test__/sql2dbml/syntax-error/dbml-error.log +0 -12
- package/__test__/sql2dbml/syntax-error-duplicate-endpoints --mssql/dbml-error.log +0 -12
- package/__test__/sql2dbml/syntax-error-duplicate-endpoints --mysql/dbml-error.log +0 -12
- package/dbml-error.log +0 -53
- package/lib/connectors/Connector.js +0 -19
- package/lib/connectors/MssqlConnector.js +0 -483
- package/lib/connectors/PostgresConnector.js +0 -450
|
@@ -1,450 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
|
|
3
|
-
Object.defineProperty(exports, "__esModule", {
|
|
4
|
-
value: true
|
|
5
|
-
});
|
|
6
|
-
exports.fetchSchemaJson = void 0;
|
|
7
|
-
var _pg = require("pg");
|
|
8
|
-
/* eslint-disable camelcase */
|
|
9
|
-
|
|
10
|
-
const connectPg = async connection => {
|
|
11
|
-
const client = new _pg.Client(connection);
|
|
12
|
-
// bearer:disable javascript_lang_logger
|
|
13
|
-
client.on('error', err => console.log('PG connection error:', err));
|
|
14
|
-
await client.connect();
|
|
15
|
-
return client;
|
|
16
|
-
};
|
|
17
|
-
const convertQueryBoolean = val => val === 'YES';
|
|
18
|
-
const getFieldType = (data_type, udt_name, character_maximum_length, numeric_precision, numeric_scale) => {
|
|
19
|
-
if (data_type === 'ARRAY') {
|
|
20
|
-
return `${udt_name.slice(1, udt_name.length)}[]`;
|
|
21
|
-
}
|
|
22
|
-
if (character_maximum_length) {
|
|
23
|
-
return `${udt_name}(${character_maximum_length})`;
|
|
24
|
-
}
|
|
25
|
-
if (numeric_precision && numeric_scale) {
|
|
26
|
-
return `${udt_name}(${numeric_precision},${numeric_scale})`;
|
|
27
|
-
}
|
|
28
|
-
return udt_name;
|
|
29
|
-
};
|
|
30
|
-
const getDbdefault = (data_type, column_default, default_type) => {
|
|
31
|
-
if (data_type === 'ARRAY') {
|
|
32
|
-
const values = column_default.slice(6, -1).split(',').map(value => {
|
|
33
|
-
return value.split('::')[0];
|
|
34
|
-
});
|
|
35
|
-
return {
|
|
36
|
-
type: default_type,
|
|
37
|
-
value: `ARRAY[${values.join(', ')}]`
|
|
38
|
-
};
|
|
39
|
-
}
|
|
40
|
-
if (default_type === 'string') {
|
|
41
|
-
const defaultValues = column_default.split('::')[0];
|
|
42
|
-
const isJson = data_type === 'json' || data_type === 'jsonb';
|
|
43
|
-
const type = isJson ? 'expression' : 'string';
|
|
44
|
-
return {
|
|
45
|
-
type,
|
|
46
|
-
value: defaultValues.slice(1, -1)
|
|
47
|
-
};
|
|
48
|
-
}
|
|
49
|
-
return {
|
|
50
|
-
type: default_type,
|
|
51
|
-
value: column_default
|
|
52
|
-
};
|
|
53
|
-
};
|
|
54
|
-
const generateField = row => {
|
|
55
|
-
const {
|
|
56
|
-
column_name,
|
|
57
|
-
data_type,
|
|
58
|
-
character_maximum_length,
|
|
59
|
-
numeric_precision,
|
|
60
|
-
numeric_scale,
|
|
61
|
-
udt_schema,
|
|
62
|
-
udt_name,
|
|
63
|
-
identity_increment,
|
|
64
|
-
is_nullable,
|
|
65
|
-
column_default,
|
|
66
|
-
default_type,
|
|
67
|
-
column_comment
|
|
68
|
-
} = row;
|
|
69
|
-
const dbdefault = column_default && default_type !== 'increment' ? getDbdefault(data_type, column_default, default_type) : null;
|
|
70
|
-
const fieldType = data_type === 'USER-DEFINED' ? {
|
|
71
|
-
type_name: udt_name,
|
|
72
|
-
schemaName: udt_schema
|
|
73
|
-
} : {
|
|
74
|
-
type_name: getFieldType(data_type, udt_name, character_maximum_length, numeric_precision, numeric_scale),
|
|
75
|
-
schemaname: null
|
|
76
|
-
};
|
|
77
|
-
return {
|
|
78
|
-
name: column_name,
|
|
79
|
-
type: fieldType,
|
|
80
|
-
dbdefault,
|
|
81
|
-
not_null: !convertQueryBoolean(is_nullable),
|
|
82
|
-
increment: !!identity_increment || default_type === 'increment',
|
|
83
|
-
note: column_comment ? {
|
|
84
|
-
value: column_comment
|
|
85
|
-
} : {
|
|
86
|
-
value: ''
|
|
87
|
-
}
|
|
88
|
-
};
|
|
89
|
-
};
|
|
90
|
-
const generateTablesAndFields = async client => {
|
|
91
|
-
const fields = {};
|
|
92
|
-
const tablesAndFieldsSql = `
|
|
93
|
-
WITH comments AS (
|
|
94
|
-
SELECT
|
|
95
|
-
pc.relname AS table_name,
|
|
96
|
-
pn.nspname AS table_schema,
|
|
97
|
-
pa.attname AS column_name,
|
|
98
|
-
pd.description
|
|
99
|
-
FROM
|
|
100
|
-
pg_description pd
|
|
101
|
-
JOIN
|
|
102
|
-
pg_class pc ON pd.objoid = pc.oid
|
|
103
|
-
JOIN
|
|
104
|
-
pg_namespace pn ON pc.relnamespace = pn.oid
|
|
105
|
-
LEFT JOIN
|
|
106
|
-
pg_attribute pa ON pd.objoid = pa.attrelid AND pd.objsubid = pa.attnum
|
|
107
|
-
WHERE
|
|
108
|
-
pc.relkind = 'r'
|
|
109
|
-
AND pn.nspname NOT IN ('pg_catalog', 'information_schema')
|
|
110
|
-
)
|
|
111
|
-
SELECT
|
|
112
|
-
t.table_schema,
|
|
113
|
-
t.table_name,
|
|
114
|
-
c.column_name,
|
|
115
|
-
c.data_type,
|
|
116
|
-
c.character_maximum_length,
|
|
117
|
-
c.numeric_precision,
|
|
118
|
-
c.numeric_scale,
|
|
119
|
-
c.udt_schema,
|
|
120
|
-
c.udt_name,
|
|
121
|
-
c.identity_increment,
|
|
122
|
-
c.is_nullable,
|
|
123
|
-
c.column_default,
|
|
124
|
-
CASE
|
|
125
|
-
WHEN c.column_default IS NULL THEN NULL
|
|
126
|
-
WHEN c.column_default LIKE 'nextval(%' THEN 'increment'
|
|
127
|
-
WHEN c.column_default LIKE '''%' THEN 'string'
|
|
128
|
-
WHEN c.column_default = 'true' OR c.column_default = 'false' THEN 'boolean'
|
|
129
|
-
WHEN c.column_default ~ '^-?[0-9]+(.[0-9]+)?$' THEN 'number'
|
|
130
|
-
ELSE 'expression'
|
|
131
|
-
END AS default_type,
|
|
132
|
-
(SELECT description FROM comments WHERE table_name = t.table_name AND table_schema = t.table_schema AND column_name IS NULL) AS table_comment,
|
|
133
|
-
(SELECT description FROM comments WHERE table_name = t.table_name AND table_schema = t.table_schema AND column_name = c.column_name) AS column_comment
|
|
134
|
-
FROM
|
|
135
|
-
information_schema.columns c
|
|
136
|
-
JOIN
|
|
137
|
-
information_schema.tables t ON c.table_name = t.table_name AND c.table_schema = t.table_schema
|
|
138
|
-
WHERE
|
|
139
|
-
t.table_type = 'BASE TABLE'
|
|
140
|
-
AND t.table_schema NOT IN ('pg_catalog', 'information_schema')
|
|
141
|
-
ORDER BY
|
|
142
|
-
t.table_schema,
|
|
143
|
-
t.table_name,
|
|
144
|
-
c.ordinal_position
|
|
145
|
-
;
|
|
146
|
-
`;
|
|
147
|
-
const tablesAndFieldsResult = await client.query(tablesAndFieldsSql);
|
|
148
|
-
const tables = tablesAndFieldsResult.rows.reduce((acc, row) => {
|
|
149
|
-
const {
|
|
150
|
-
table_schema,
|
|
151
|
-
table_name,
|
|
152
|
-
table_comment
|
|
153
|
-
} = row;
|
|
154
|
-
if (!acc[table_name]) {
|
|
155
|
-
acc[table_name] = {
|
|
156
|
-
name: table_name,
|
|
157
|
-
schemaName: table_schema,
|
|
158
|
-
note: table_comment ? {
|
|
159
|
-
value: table_comment
|
|
160
|
-
} : {
|
|
161
|
-
value: ''
|
|
162
|
-
}
|
|
163
|
-
};
|
|
164
|
-
}
|
|
165
|
-
if (!fields[table_name]) fields[table_name] = [];
|
|
166
|
-
const field = generateField(row);
|
|
167
|
-
fields[table_name].push(field);
|
|
168
|
-
return acc;
|
|
169
|
-
}, {});
|
|
170
|
-
return {
|
|
171
|
-
tables: Object.values(tables),
|
|
172
|
-
fields
|
|
173
|
-
};
|
|
174
|
-
};
|
|
175
|
-
const generateRawRefs = async client => {
|
|
176
|
-
const refs = [];
|
|
177
|
-
const refsListSql = `
|
|
178
|
-
SELECT
|
|
179
|
-
tc.table_schema,
|
|
180
|
-
tc.table_name,
|
|
181
|
-
tc.constraint_name as fk_constraint_name,
|
|
182
|
-
STRING_AGG(DISTINCT kcu.column_name, ',') AS column_names,
|
|
183
|
-
ccu.table_schema AS foreign_table_schema,
|
|
184
|
-
ccu.table_name AS foreign_table_name,
|
|
185
|
-
STRING_AGG(DISTINCT ccu.column_name, ',') AS foreign_column_names,
|
|
186
|
-
tc.constraint_type,
|
|
187
|
-
rc.delete_rule AS on_delete,
|
|
188
|
-
rc.update_rule AS on_update
|
|
189
|
-
FROM information_schema.table_constraints AS tc
|
|
190
|
-
JOIN information_schema.key_column_usage AS kcu
|
|
191
|
-
ON tc.constraint_name = kcu.constraint_name
|
|
192
|
-
AND tc.table_schema = kcu.table_schema
|
|
193
|
-
JOIN information_schema.constraint_column_usage AS ccu
|
|
194
|
-
ON ccu.constraint_name = tc.constraint_name
|
|
195
|
-
JOIN information_schema.referential_constraints AS rc
|
|
196
|
-
ON tc.constraint_name = rc.constraint_name
|
|
197
|
-
AND tc.table_schema = rc.constraint_schema
|
|
198
|
-
WHERE tc.constraint_type = 'FOREIGN KEY'
|
|
199
|
-
AND tc.table_schema NOT IN ('pg_catalog', 'information_schema')
|
|
200
|
-
GROUP BY
|
|
201
|
-
tc.table_schema,
|
|
202
|
-
tc.table_name,
|
|
203
|
-
tc.constraint_name,
|
|
204
|
-
ccu.table_schema,
|
|
205
|
-
ccu.table_name,
|
|
206
|
-
tc.constraint_type,
|
|
207
|
-
rc.delete_rule,
|
|
208
|
-
rc.update_rule
|
|
209
|
-
ORDER BY
|
|
210
|
-
tc.table_schema,
|
|
211
|
-
tc.table_name;
|
|
212
|
-
`;
|
|
213
|
-
const refsQueryResult = await client.query(refsListSql);
|
|
214
|
-
refsQueryResult.rows.forEach(refRow => {
|
|
215
|
-
const {
|
|
216
|
-
table_schema,
|
|
217
|
-
fk_constraint_name,
|
|
218
|
-
table_name,
|
|
219
|
-
column_names,
|
|
220
|
-
foreign_table_schema,
|
|
221
|
-
foreign_table_name,
|
|
222
|
-
foreign_column_names,
|
|
223
|
-
on_delete,
|
|
224
|
-
on_update
|
|
225
|
-
} = refRow;
|
|
226
|
-
const ep1 = {
|
|
227
|
-
tableName: table_name,
|
|
228
|
-
schemaName: table_schema,
|
|
229
|
-
fieldNames: column_names.split(','),
|
|
230
|
-
relation: '*'
|
|
231
|
-
};
|
|
232
|
-
const ep2 = {
|
|
233
|
-
tableName: foreign_table_name,
|
|
234
|
-
schemaName: foreign_table_schema,
|
|
235
|
-
fieldNames: foreign_column_names.split(','),
|
|
236
|
-
relation: '1'
|
|
237
|
-
};
|
|
238
|
-
refs.push({
|
|
239
|
-
name: fk_constraint_name,
|
|
240
|
-
endpoints: [ep1, ep2],
|
|
241
|
-
onDelete: on_delete === 'NO ACTION' ? null : on_delete,
|
|
242
|
-
onUpdate: on_update === 'NO ACTION' ? null : on_update
|
|
243
|
-
});
|
|
244
|
-
});
|
|
245
|
-
return refs;
|
|
246
|
-
};
|
|
247
|
-
const generateIndexes = async client => {
|
|
248
|
-
// const tableConstraints = {};
|
|
249
|
-
const indexListSql = `
|
|
250
|
-
WITH user_tables AS (
|
|
251
|
-
SELECT tablename
|
|
252
|
-
FROM pg_tables
|
|
253
|
-
WHERE schemaname NOT IN ('pg_catalog', 'information_schema') -- Exclude system schemas
|
|
254
|
-
AND tablename NOT LIKE 'pg_%' -- Exclude PostgreSQL system tables
|
|
255
|
-
AND tablename NOT LIKE 'sql_%' -- Exclude SQL standard tables
|
|
256
|
-
),
|
|
257
|
-
index_info AS (
|
|
258
|
-
SELECT
|
|
259
|
-
t.relname AS table_name,
|
|
260
|
-
i.relname AS index_name,
|
|
261
|
-
ix.indisunique AS is_unique,
|
|
262
|
-
ix.indisprimary AS is_primary,
|
|
263
|
-
am.amname AS index_type,
|
|
264
|
-
array_to_string(array_agg(a.attname ORDER BY x.n), ', ') AS columns,
|
|
265
|
-
pg_get_expr(ix.indexprs, ix.indrelid) AS expressions,
|
|
266
|
-
CASE
|
|
267
|
-
WHEN ix.indisprimary THEN 'PRIMARY KEY'
|
|
268
|
-
WHEN ix.indisunique THEN 'UNIQUE'
|
|
269
|
-
ELSE NULL
|
|
270
|
-
END AS constraint_type
|
|
271
|
-
FROM
|
|
272
|
-
pg_class t
|
|
273
|
-
JOIN pg_index ix ON t.oid = ix.indrelid
|
|
274
|
-
JOIN pg_class i ON i.oid = ix.indexrelid
|
|
275
|
-
LEFT JOIN pg_attribute a ON a.attrelid = t.oid AND a.attnum = ANY(ix.indkey)
|
|
276
|
-
JOIN pg_am am ON i.relam = am.oid
|
|
277
|
-
LEFT JOIN generate_subscripts(ix.indkey, 1) AS x(n) ON a.attnum = ix.indkey[x.n]
|
|
278
|
-
WHERE
|
|
279
|
-
t.relkind = 'r'
|
|
280
|
-
AND t.relname NOT LIKE 'pg_%'
|
|
281
|
-
AND t.relname NOT LIKE 'sql_%'
|
|
282
|
-
GROUP BY
|
|
283
|
-
t.relname, i.relname, ix.indisunique, ix.indisprimary, am.amname, ix.indexprs, ix.indrelid
|
|
284
|
-
)
|
|
285
|
-
SELECT
|
|
286
|
-
ut.tablename AS table_name,
|
|
287
|
-
ii.index_name,
|
|
288
|
-
ii.is_unique,
|
|
289
|
-
ii.is_primary,
|
|
290
|
-
ii.index_type,
|
|
291
|
-
ii.columns,
|
|
292
|
-
ii.expressions,
|
|
293
|
-
ii.constraint_type -- Added constraint type
|
|
294
|
-
FROM
|
|
295
|
-
user_tables ut
|
|
296
|
-
LEFT JOIN
|
|
297
|
-
index_info ii ON ut.tablename = ii.table_name
|
|
298
|
-
WHERE ii.columns IS NOT NULL
|
|
299
|
-
ORDER BY
|
|
300
|
-
ut.tablename,
|
|
301
|
-
ii.constraint_type,
|
|
302
|
-
ii.index_name
|
|
303
|
-
;
|
|
304
|
-
`;
|
|
305
|
-
const indexListResult = await client.query(indexListSql);
|
|
306
|
-
const {
|
|
307
|
-
outOfLineConstraints,
|
|
308
|
-
inlineConstraints
|
|
309
|
-
} = indexListResult.rows.reduce((acc, row) => {
|
|
310
|
-
const {
|
|
311
|
-
constraint_type,
|
|
312
|
-
columns
|
|
313
|
-
} = row;
|
|
314
|
-
if (columns === 'null' || columns.trim() === '') return acc;
|
|
315
|
-
const isSingleColumn = columns.split(',').length === 1;
|
|
316
|
-
const isInlineConstraint = isSingleColumn && (constraint_type === 'PRIMARY KEY' || constraint_type === 'UNIQUE');
|
|
317
|
-
if (isInlineConstraint) {
|
|
318
|
-
acc.inlineConstraints.push(row);
|
|
319
|
-
} else {
|
|
320
|
-
acc.outOfLineConstraints.push(row);
|
|
321
|
-
}
|
|
322
|
-
return acc;
|
|
323
|
-
}, {
|
|
324
|
-
outOfLineConstraints: [],
|
|
325
|
-
inlineConstraints: []
|
|
326
|
-
});
|
|
327
|
-
const indexes = outOfLineConstraints.reduce((acc, indexRow) => {
|
|
328
|
-
const {
|
|
329
|
-
table_name,
|
|
330
|
-
index_name,
|
|
331
|
-
index_type,
|
|
332
|
-
columns,
|
|
333
|
-
expressions
|
|
334
|
-
} = indexRow;
|
|
335
|
-
const indexColumns = columns.split(',').map(column => {
|
|
336
|
-
return {
|
|
337
|
-
type: 'column',
|
|
338
|
-
value: column.trim()
|
|
339
|
-
};
|
|
340
|
-
});
|
|
341
|
-
const indexExpressions = expressions ? expressions.split(',').map(expression => {
|
|
342
|
-
return {
|
|
343
|
-
type: 'expression',
|
|
344
|
-
value: expression
|
|
345
|
-
};
|
|
346
|
-
}) : [];
|
|
347
|
-
const index = {
|
|
348
|
-
name: index_name,
|
|
349
|
-
type: index_type,
|
|
350
|
-
columns: [...indexColumns, ...indexExpressions]
|
|
351
|
-
};
|
|
352
|
-
if (acc[table_name]) {
|
|
353
|
-
acc[table_name].push(index);
|
|
354
|
-
} else {
|
|
355
|
-
acc[table_name] = [index];
|
|
356
|
-
}
|
|
357
|
-
return acc;
|
|
358
|
-
}, {});
|
|
359
|
-
const tableConstraints = inlineConstraints.reduce((acc, row) => {
|
|
360
|
-
const {
|
|
361
|
-
table_name,
|
|
362
|
-
columns,
|
|
363
|
-
constraint_type
|
|
364
|
-
} = row;
|
|
365
|
-
if (!acc[table_name]) acc[table_name] = {};
|
|
366
|
-
const columnNames = columns.split(',').map(column => column.trim());
|
|
367
|
-
columnNames.forEach(columnName => {
|
|
368
|
-
if (!acc[table_name][columnName]) acc[table_name][columnName] = {};
|
|
369
|
-
if (constraint_type === 'PRIMARY KEY') {
|
|
370
|
-
acc[table_name][columnName].pk = true;
|
|
371
|
-
}
|
|
372
|
-
if (constraint_type === 'UNIQUE' && !acc[table_name][columnName].pk) {
|
|
373
|
-
acc[table_name][columnName].unique = true;
|
|
374
|
-
}
|
|
375
|
-
});
|
|
376
|
-
return acc;
|
|
377
|
-
}, {});
|
|
378
|
-
return {
|
|
379
|
-
indexes,
|
|
380
|
-
tableConstraints
|
|
381
|
-
};
|
|
382
|
-
};
|
|
383
|
-
const generateRawEnums = async client => {
|
|
384
|
-
const enumListSql = `
|
|
385
|
-
SELECT
|
|
386
|
-
n.nspname AS schema_name,
|
|
387
|
-
t.typname AS enum_type,
|
|
388
|
-
e.enumlabel AS enum_value,
|
|
389
|
-
e.enumsortorder AS sort_order
|
|
390
|
-
FROM
|
|
391
|
-
pg_enum e
|
|
392
|
-
JOIN
|
|
393
|
-
pg_type t ON e.enumtypid = t.oid
|
|
394
|
-
JOIN
|
|
395
|
-
pg_namespace n ON t.typnamespace = n.oid
|
|
396
|
-
ORDER BY
|
|
397
|
-
schema_name,
|
|
398
|
-
enum_type,
|
|
399
|
-
sort_order;
|
|
400
|
-
;
|
|
401
|
-
`;
|
|
402
|
-
const enumListResult = await client.query(enumListSql);
|
|
403
|
-
const enums = enumListResult.rows.reduce((acc, row) => {
|
|
404
|
-
const {
|
|
405
|
-
schema_name,
|
|
406
|
-
enum_type,
|
|
407
|
-
enum_value
|
|
408
|
-
} = row;
|
|
409
|
-
if (!acc[enum_type]) {
|
|
410
|
-
acc[enum_type] = {
|
|
411
|
-
name: enum_type,
|
|
412
|
-
schemaName: schema_name,
|
|
413
|
-
values: []
|
|
414
|
-
};
|
|
415
|
-
}
|
|
416
|
-
acc[enum_type].values.push({
|
|
417
|
-
name: enum_value
|
|
418
|
-
});
|
|
419
|
-
return acc;
|
|
420
|
-
}, {});
|
|
421
|
-
return Object.values(enums);
|
|
422
|
-
};
|
|
423
|
-
const fetchSchemaJson = async connection => {
|
|
424
|
-
const client = await connectPg(connection);
|
|
425
|
-
const tablesAndFieldsRes = generateTablesAndFields(client);
|
|
426
|
-
const indexesRes = generateIndexes(client);
|
|
427
|
-
const refsRes = generateRawRefs(client);
|
|
428
|
-
const enumsRes = generateRawEnums(client);
|
|
429
|
-
const res = await Promise.all([tablesAndFieldsRes, indexesRes, refsRes, enumsRes]);
|
|
430
|
-
client.end();
|
|
431
|
-
const {
|
|
432
|
-
tables,
|
|
433
|
-
fields
|
|
434
|
-
} = res[0];
|
|
435
|
-
const {
|
|
436
|
-
indexes,
|
|
437
|
-
tableConstraints
|
|
438
|
-
} = res[1];
|
|
439
|
-
const refs = res[2];
|
|
440
|
-
const enums = res[3];
|
|
441
|
-
return {
|
|
442
|
-
tables,
|
|
443
|
-
fields,
|
|
444
|
-
refs,
|
|
445
|
-
enums,
|
|
446
|
-
indexes,
|
|
447
|
-
tableConstraints
|
|
448
|
-
};
|
|
449
|
-
};
|
|
450
|
-
exports.fetchSchemaJson = fetchSchemaJson;
|