@dbml/cli 3.7.0 → 3.7.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. package/__test__/db2dbml/mysql/options.json +1 -1
  2. package/lib/cli/connector.js +4 -2
  3. package/package.json +7 -3
  4. package/src/cli/connector.js +4 -2
  5. package/LICENSE +0 -202
  6. package/__test__/db2dbml/mssql/dbml-error.log +0 -467
  7. package/__test__/db2dbml/mysql/dbml-error.log +0 -281
  8. package/__test__/db2dbml/mysql/out-files/schema.dbml +0 -180
  9. package/__test__/db2dbml/postgres/dbml-error.log +0 -252
  10. package/__test__/db2dbml/postgres/out-files/schema.dbml +0 -140
  11. package/__test__/dbml2sql/filename --mysql --out-file/dbml-error.log +0 -0
  12. package/__test__/dbml2sql/filename --mysql --out-file/out-files/schema.sql +0 -65
  13. package/__test__/dbml2sql/filename --mysql stdout/dbml-error.log +0 -0
  14. package/__test__/dbml2sql/filename --oracle --out-file/dbml-error.log +0 -0
  15. package/__test__/dbml2sql/filename --oracle --out-file/out-files/schema.sql +0 -61
  16. package/__test__/dbml2sql/filename --oracle stdout/dbml-error.log +0 -0
  17. package/__test__/dbml2sql/filename --out-file/dbml-error.log +0 -0
  18. package/__test__/dbml2sql/filename --out-file/out-files/schema.sql +0 -77
  19. package/__test__/dbml2sql/filename --postgres --out-file/dbml-error.log +0 -0
  20. package/__test__/dbml2sql/filename --postgres --out-file/out-files/schema.sql +0 -77
  21. package/__test__/dbml2sql/filename --postgres stdout/dbml-error.log +0 -0
  22. package/__test__/dbml2sql/filename stdout/dbml-error.log +0 -0
  23. package/__test__/dbml2sql/filenames --mysql --out-file/dbml-error.log +0 -0
  24. package/__test__/dbml2sql/filenames --mysql --out-file/out-files/schema.sql +0 -172
  25. package/__test__/dbml2sql/filenames --mysql stdout/dbml-error.log +0 -0
  26. package/__test__/dbml2sql/filenames --oracle --out-file/dbml-error.log +0 -0
  27. package/__test__/dbml2sql/filenames --oracle --out-file/out-files/schema.sql +0 -172
  28. package/__test__/dbml2sql/filenames --oracle stdout/dbml-error.log +0 -0
  29. package/__test__/dbml2sql/filenames --out-file/dbml-error.log +0 -0
  30. package/__test__/dbml2sql/filenames --out-file/out-files/schema.sql +0 -172
  31. package/__test__/dbml2sql/filenames --postgres --out-file/dbml-error.log +0 -0
  32. package/__test__/dbml2sql/filenames --postgres --out-file/out-files/schema.sql +0 -172
  33. package/__test__/dbml2sql/filenames --postgres stdout/dbml-error.log +0 -0
  34. package/__test__/dbml2sql/filenames stdout/dbml-error.log +0 -0
  35. package/__test__/dbml2sql/multiple_schema_mssql/dbml-error.log +0 -0
  36. package/__test__/dbml2sql/multiple_schema_mssql/out-files/multiple_schema.out.sql +0 -62
  37. package/__test__/dbml2sql/multiple_schema_mysql/dbml-error.log +0 -0
  38. package/__test__/dbml2sql/multiple_schema_mysql/out-files/multiple_schema.out.sql +0 -50
  39. package/__test__/dbml2sql/multiple_schema_oracle/dbml-error.log +0 -0
  40. package/__test__/dbml2sql/multiple_schema_oracle/out-files/multiple_schema.out.sql +0 -88
  41. package/__test__/dbml2sql/multiple_schema_pg/dbml-error.log +0 -0
  42. package/__test__/dbml2sql/multiple_schema_pg/out-files/multiple_schema.out.sql +0 -67
  43. package/__test__/dbml2sql/syntax-error/dbml-error.log +0 -12
  44. package/__test__/sql2dbml/filename --mssql --out-file/dbml-error.log +0 -0
  45. package/__test__/sql2dbml/filename --mssql --out-file/out-files/schema.dbml +0 -25
  46. package/__test__/sql2dbml/filename --mysql --out-file/dbml-error.log +0 -0
  47. package/__test__/sql2dbml/filename --mysql --out-file/out-files/schema.dbml +0 -74
  48. package/__test__/sql2dbml/filename --mysql stdout/dbml-error.log +0 -0
  49. package/__test__/sql2dbml/filename --out-file/dbml-error.log +0 -0
  50. package/__test__/sql2dbml/filename --out-file/out-files/schema.dbml +0 -74
  51. package/__test__/sql2dbml/filename --postgres --out-file/dbml-error.log +0 -0
  52. package/__test__/sql2dbml/filename --postgres --out-file/out-files/schema.dbml +0 -74
  53. package/__test__/sql2dbml/filename --postgres stdout/dbml-error.log +0 -0
  54. package/__test__/sql2dbml/filename --snowflake stdout/dbml-error.log +0 -0
  55. package/__test__/sql2dbml/filename stdout/dbml-error.log +0 -0
  56. package/__test__/sql2dbml/filenames --mysql --out-file/dbml-error.log +0 -0
  57. package/__test__/sql2dbml/filenames --mysql --out-file/out-files/schema.dbml +0 -170
  58. package/__test__/sql2dbml/filenames --mysql stdout/dbml-error.log +0 -0
  59. package/__test__/sql2dbml/filenames --out-file/dbml-error.log +0 -0
  60. package/__test__/sql2dbml/filenames --out-file/out-files/schema.dbml +0 -170
  61. package/__test__/sql2dbml/filenames --postgres --out-file/dbml-error.log +0 -0
  62. package/__test__/sql2dbml/filenames --postgres --out-file/out-files/schema.dbml +0 -170
  63. package/__test__/sql2dbml/filenames --postgres stdout/dbml-error.log +0 -0
  64. package/__test__/sql2dbml/filenames stdout/dbml-error.log +0 -0
  65. package/__test__/sql2dbml/multiple_schema_mssql/dbml-error.log +0 -0
  66. package/__test__/sql2dbml/multiple_schema_mssql/out-files/multiple_schema.out.dbml +0 -106
  67. package/__test__/sql2dbml/multiple_schema_mysql/dbml-error.log +0 -0
  68. package/__test__/sql2dbml/multiple_schema_mysql/out-files/multiple_schema.out.dbml +0 -136
  69. package/__test__/sql2dbml/multiple_schema_pg/dbml-error.log +0 -0
  70. package/__test__/sql2dbml/multiple_schema_pg/out-files/multiple_schema.out.dbml +0 -101
  71. package/__test__/sql2dbml/syntax-error/dbml-error.log +0 -12
  72. package/__test__/sql2dbml/syntax-error-duplicate-endpoints --mssql/dbml-error.log +0 -12
  73. package/__test__/sql2dbml/syntax-error-duplicate-endpoints --mysql/dbml-error.log +0 -12
  74. package/dbml-error.log +0 -53
  75. package/lib/connectors/Connector.js +0 -19
  76. package/lib/connectors/MssqlConnector.js +0 -483
  77. package/lib/connectors/PostgresConnector.js +0 -450
@@ -1,483 +0,0 @@
1
- "use strict";
2
-
3
- Object.defineProperty(exports, "__esModule", {
4
- value: true
5
- });
6
- exports.fetchSchemaJson = void 0;
7
- var _mssql = _interopRequireDefault(require("mssql"));
8
- function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
9
- /* eslint-disable camelcase */
10
-
11
- const MSSQL_DATE_TYPES = ['date', 'datetime', 'datetime2', 'smalldatetime', 'datetimeoffset', 'time'];
12
- const connect = async connection => {
13
- const options = connection.split(';').reduce((acc, option) => {
14
- const [key, value] = option.split('=');
15
- acc[key] = value;
16
- return acc;
17
- }, {});
18
- const [host, port] = options['Data Source'].split(',');
19
- const config = {
20
- user: options['User ID'],
21
- password: options.Password,
22
- server: host,
23
- database: options['Initial Catalog'],
24
- options: {
25
- encrypt: options.Encrypt === 'True',
26
- trustServerCertificate: options['Trust Server Certificate'] === 'True',
27
- port: port || 1433
28
- }
29
- };
30
- try {
31
- // Connect to the database using the connection string
32
- const client = await _mssql.default.connect(config);
33
- return client;
34
- } catch (err) {
35
- console.log('MSSQL connection error:', err);
36
- return null;
37
- }
38
- };
39
- const convertQueryBoolean = val => val === 'YES';
40
- const getFieldType = (data_type, default_type, character_maximum_length, numeric_precision, numeric_scale) => {
41
- if (MSSQL_DATE_TYPES.includes(data_type)) {
42
- return data_type;
43
- }
44
- if (data_type === 'bit') {
45
- return data_type;
46
- }
47
- if (numeric_precision && numeric_scale && default_type === 'number') {
48
- return `${data_type}(${numeric_precision},${numeric_scale})`;
49
- }
50
- if (character_maximum_length && character_maximum_length > 0 && default_type === 'string') {
51
- return `${data_type}(${character_maximum_length})`;
52
- }
53
- return data_type;
54
- };
55
- const getDbdefault = (data_type, column_default, default_type) => {
56
- // The regex below is used to extract the value from the default value
57
- // \( and \) are used to escape parentheses
58
- // [^()]+ is used to match any character except parentheses
59
- // Example: (1) => 1, ('hello') => hello, getdate()-(1) => getdate()-1
60
- const value = column_default.slice(1, -1).replace(/\(([^()]+)\)/g, '$1');
61
- return {
62
- type: default_type,
63
- value: default_type === 'string' ? value.slice(1, -1) : value // Remove the quotes for string values
64
- };
65
- };
66
-
67
- const getEnumValues = definition => {
68
- // Use the example below to understand the regex:
69
- // ([quantity]>(0))
70
- // ([unit_price]>(0))
71
- // ([status]='cancelled' OR [status]='delivered' OR [status]='shipped' OR [status]='processing' OR [status]='pending')
72
- // ([total_amount]>(0))
73
- // ([price]>(0))
74
- // ([stock_quantity]>=(0))
75
- // ([age_start]<=[age_end])
76
- // ([age_start]<=[age_end])
77
- // ([gender]='Other' OR [gender]='Female' OR [gender]='Male')
78
- // ([date_of_birth]<=dateadd(year,(-13),getdate()))
79
- // ([email] like '%_@_%._%')
80
- if (!definition) return null;
81
- const values = definition.match(/\[([^\]]+)\]='([^']+)'/g); // Extracting the enum values when the definition contains ]='
82
- if (!values) return null;
83
- const enumValues = values.map(value => {
84
- const enumValue = value.split("]='")[1];
85
- return {
86
- name: enumValue.slice(0, -1)
87
- };
88
- });
89
- return enumValues;
90
- };
91
- const generateField = row => {
92
- const {
93
- column_name,
94
- data_type,
95
- character_maximum_length,
96
- numeric_precision,
97
- numeric_scale,
98
- identity_increment,
99
- is_nullable,
100
- column_default,
101
- default_type,
102
- column_comment
103
- } = row;
104
- const dbdefault = column_default && default_type !== 'increment' ? getDbdefault(data_type, column_default, default_type) : null;
105
- const fieldType = {
106
- type_name: getFieldType(data_type, default_type, character_maximum_length, numeric_precision, numeric_scale),
107
- schemaname: null
108
- };
109
- return {
110
- name: column_name,
111
- type: fieldType,
112
- dbdefault,
113
- not_null: !convertQueryBoolean(is_nullable),
114
- increment: !!identity_increment,
115
- note: column_comment ? {
116
- value: column_comment
117
- } : {
118
- value: ''
119
- }
120
- };
121
- };
122
- const generateTablesFieldsAndEnums = async client => {
123
- const fields = {};
124
- const enums = [];
125
- const tablesAndFieldsSql = `
126
- WITH tables_and_fields AS (
127
- SELECT
128
- s.name AS table_schema,
129
- t.name AS table_name,
130
- c.name AS column_name,
131
- ty.name AS data_type,
132
- c.max_length AS character_maximum_length,
133
- c.precision AS numeric_precision,
134
- c.scale AS numeric_scale,
135
- c.is_identity AS identity_increment,
136
- CASE
137
- WHEN c.is_nullable = 1 THEN 'YES'
138
- ELSE 'NO'
139
- END AS is_nullable,
140
- CASE
141
- WHEN c.default_object_id = 0 THEN NULL
142
- ELSE OBJECT_DEFINITION(c.default_object_id)
143
- END AS column_default,
144
- -- Fetching table comments
145
- p.value AS table_comment,
146
- ep.value AS column_comment
147
- FROM
148
- sys.tables t
149
- JOIN
150
- sys.schemas s ON t.schema_id = s.schema_id
151
- JOIN
152
- sys.columns c ON t.object_id = c.object_id
153
- JOIN
154
- sys.types ty ON c.user_type_id = ty.user_type_id
155
- LEFT JOIN
156
- sys.extended_properties p ON p.major_id = t.object_id
157
- AND p.name = 'MS_Description'
158
- AND p.minor_id = 0 -- Ensure minor_id is 0 for table comments
159
- LEFT JOIN
160
- sys.extended_properties ep ON ep.major_id = c.object_id
161
- AND ep.minor_id = c.column_id
162
- AND ep.name = 'MS_Description'
163
- WHERE
164
- t.type = 'U' -- User-defined tables
165
- )
166
- SELECT
167
- tf.table_schema,
168
- tf.table_name,
169
- tf.column_name,
170
- tf.data_type,
171
- tf.character_maximum_length,
172
- tf.numeric_precision,
173
- tf.numeric_scale,
174
- tf.identity_increment,
175
- tf.is_nullable,
176
- tf.column_default,
177
- tf.table_comment,
178
- tf.column_comment,
179
- cc.name AS check_constraint_name, -- Adding CHECK constraint name
180
- cc.definition AS check_constraint_definition, -- Adding CHECK constraint definition
181
- CASE
182
- WHEN tf.column_default LIKE '((%))' THEN 'number'
183
- WHEN tf.column_default LIKE '(''%'')' THEN 'string'
184
- ELSE 'expression'
185
- END AS default_type
186
- FROM
187
- tables_and_fields AS tf
188
- LEFT JOIN
189
- sys.check_constraints cc ON cc.parent_object_id = OBJECT_ID(tf.table_schema + '.' + tf.table_name)
190
- AND cc.definition LIKE '%' + tf.column_name + '%' -- Ensure the constraint references the column
191
- ORDER BY
192
- tf.table_schema,
193
- tf.table_name,
194
- tf.column_name;
195
- `;
196
- const tablesAndFieldsResult = await client.query(tablesAndFieldsSql);
197
- const tables = tablesAndFieldsResult.recordset.reduce((acc, row) => {
198
- const {
199
- table_schema,
200
- table_name,
201
- table_comment,
202
- check_constraint_name,
203
- check_constraint_definition
204
- } = row;
205
- if (!acc[table_name]) {
206
- acc[table_name] = {
207
- name: table_name,
208
- schemaName: table_schema,
209
- note: table_comment ? {
210
- value: table_comment
211
- } : {
212
- value: ''
213
- }
214
- };
215
- }
216
- const enumValues = getEnumValues(check_constraint_definition);
217
- if (enumValues) {
218
- enums.push({
219
- name: check_constraint_name,
220
- schemaName: table_schema,
221
- values: enumValues
222
- });
223
- }
224
- if (!fields[table_name]) fields[table_name] = [];
225
- const field = generateField(row);
226
- if (enumValues) {
227
- field.type = {
228
- type_name: check_constraint_name,
229
- schemaName: table_schema
230
- };
231
- }
232
- fields[table_name].push(field);
233
- return acc;
234
- }, {});
235
- return {
236
- tables: Object.values(tables),
237
- fields,
238
- enums
239
- };
240
- };
241
- const generateRefs = async client => {
242
- const refs = [];
243
- const refsListSql = `
244
- SELECT
245
- s.name AS table_schema,
246
- t.name AS table_name,
247
- fk.name AS fk_constraint_name,
248
- STUFF((
249
- SELECT ',' + c1.name
250
- FROM sys.foreign_key_columns AS fkc
251
- JOIN sys.columns AS c1 ON fkc.parent_object_id = c1.object_id AND fkc.parent_column_id = c1.column_id
252
- WHERE fkc.constraint_object_id = fk.object_id
253
- FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'), 1, 1, '') AS column_names,
254
- s2.name AS foreign_table_schema,
255
- t2.name AS foreign_table_name,
256
- STUFF((
257
- SELECT ',' + c2.name
258
- FROM sys.foreign_key_columns AS fkc
259
- JOIN sys.columns AS c2 ON fkc.referenced_object_id = c2.object_id AND fkc.referenced_column_id = c2.column_id
260
- WHERE fkc.constraint_object_id = fk.object_id
261
- FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'), 1, 1, '') AS foreign_column_names,
262
- fk.type_desc AS constraint_type,
263
- fk.delete_referential_action_desc AS on_delete,
264
- fk.update_referential_action_desc AS on_update
265
- FROM sys.foreign_keys AS fk
266
- JOIN sys.tables AS t ON fk.parent_object_id = t.object_id
267
- JOIN sys.schemas AS s ON t.schema_id = s.schema_id
268
- JOIN sys.tables AS t2 ON fk.referenced_object_id = t2.object_id
269
- JOIN sys.schemas AS s2 ON t2.schema_id = s2.schema_id
270
- WHERE s.name NOT IN ('sys', 'information_schema')
271
- ORDER BY
272
- s.name,
273
- t.name;
274
- `;
275
- const refsQueryResult = await client.query(refsListSql);
276
- refsQueryResult.recordset.forEach(refRow => {
277
- const {
278
- table_schema,
279
- fk_constraint_name,
280
- table_name,
281
- column_names,
282
- foreign_table_schema,
283
- foreign_table_name,
284
- foreign_column_names,
285
- on_delete,
286
- on_update
287
- } = refRow;
288
- const ep1 = {
289
- tableName: table_name,
290
- schemaName: table_schema,
291
- fieldNames: column_names.split(','),
292
- relation: '*'
293
- };
294
- const ep2 = {
295
- tableName: foreign_table_name,
296
- schemaName: foreign_table_schema,
297
- fieldNames: foreign_column_names.split(','),
298
- relation: '1'
299
- };
300
- refs.push({
301
- name: fk_constraint_name,
302
- endpoints: [ep1, ep2],
303
- onDelete: on_delete === 'NO_ACTION' ? null : on_delete,
304
- onUpdate: on_update === 'NO_ACTION' ? null : on_update
305
- });
306
- });
307
- return refs;
308
- };
309
- const generateIndexes = async client => {
310
- const indexListSql = `
311
- WITH user_tables AS (
312
- SELECT
313
- TABLE_NAME
314
- FROM
315
- INFORMATION_SCHEMA.TABLES
316
- WHERE
317
- TABLE_SCHEMA = 'dbo'
318
- AND TABLE_TYPE = 'BASE TABLE' -- Ensure we are only getting base tables
319
- AND TABLE_NAME NOT LIKE 'dt%'
320
- AND TABLE_NAME NOT LIKE 'syscs%'
321
- AND TABLE_NAME NOT LIKE 'sysss%'
322
- AND TABLE_NAME NOT LIKE 'sysrs%'
323
- AND TABLE_NAME NOT LIKE 'sysxlgc%'
324
- ),
325
- index_info AS (
326
- SELECT
327
- OBJECT_NAME(i.object_id) AS table_name,
328
- i.name AS index_name,
329
- i.is_unique,
330
- CASE
331
- WHEN i.type = 1 THEN 1
332
- ELSE 0
333
- END AS is_primary,
334
- i.type_desc AS index_type,
335
- STUFF((
336
- SELECT
337
- ', ' + c.name
338
- FROM
339
- sys.index_columns ic
340
- JOIN sys.columns c ON ic.column_id = c.column_id AND ic.object_id = c.object_id
341
- WHERE
342
- ic.index_id = i.index_id
343
- AND ic.object_id = i.object_id
344
- AND OBJECT_NAME(ic.object_id) IN (SELECT TABLE_NAME FROM user_tables) -- Filter for user tables
345
- ORDER BY
346
- ic.key_ordinal
347
- FOR XML PATH('')
348
- ), 1, 2, '') AS columns,
349
- CASE
350
- WHEN i.type = 1 THEN 'PRIMARY KEY'
351
- WHEN i.is_unique = 1 THEN 'UNIQUE'
352
- ELSE NULL
353
- END AS constraint_type
354
- FROM
355
- sys.indexes i
356
- JOIN sys.tables t ON i.object_id = t.object_id
357
- WHERE
358
- t.is_ms_shipped = 0
359
- AND i.type <> 0
360
- )
361
- SELECT
362
- ut.TABLE_NAME AS table_name,
363
- ii.index_name,
364
- ii.is_unique,
365
- ii.is_primary,
366
- ii.index_type,
367
- ii.columns,
368
- ii.constraint_type
369
- FROM
370
- user_tables ut
371
- LEFT JOIN
372
- index_info ii ON ut.TABLE_NAME = ii.table_name
373
- WHERE
374
- ii.columns IS NOT NULL
375
- ORDER BY
376
- ut.TABLE_NAME,
377
- ii.constraint_type,
378
- ii.index_name;
379
- `;
380
- const indexListResult = await client.query(indexListSql);
381
- const {
382
- outOfLineConstraints,
383
- inlineConstraints
384
- } = indexListResult.recordset.reduce((acc, row) => {
385
- const {
386
- constraint_type,
387
- columns
388
- } = row;
389
- if (columns === 'null' || columns.trim() === '') return acc;
390
- if (constraint_type === 'PRIMARY KEY' || constraint_type === 'UNIQUE') {
391
- acc.inlineConstraints.push(row);
392
- } else {
393
- acc.outOfLineConstraints.push(row);
394
- }
395
- return acc;
396
- }, {
397
- outOfLineConstraints: [],
398
- inlineConstraints: []
399
- });
400
- const indexes = outOfLineConstraints.reduce((acc, indexRow) => {
401
- const {
402
- table_name,
403
- index_name,
404
- index_type,
405
- columns,
406
- expressions
407
- } = indexRow;
408
- const indexColumns = columns.split(',').map(column => {
409
- return {
410
- type: 'column',
411
- value: column.trim()
412
- };
413
- });
414
- const indexExpressions = expressions ? expressions.split(',').map(expression => {
415
- return {
416
- type: 'expression',
417
- value: expression
418
- };
419
- }) : [];
420
- const index = {
421
- name: index_name,
422
- type: index_type,
423
- columns: [...indexColumns, ...indexExpressions]
424
- };
425
- if (acc[table_name]) {
426
- acc[table_name].push(index);
427
- } else {
428
- acc[table_name] = [index];
429
- }
430
- return acc;
431
- }, {});
432
- const tableConstraints = inlineConstraints.reduce((acc, row) => {
433
- const {
434
- table_name,
435
- columns,
436
- constraint_type
437
- } = row;
438
- if (!acc[table_name]) acc[table_name] = {};
439
- const columnNames = columns.split(',').map(column => column.trim());
440
- columnNames.forEach(columnName => {
441
- if (!acc[table_name][columnName]) acc[table_name][columnName] = {};
442
- if (constraint_type === 'PRIMARY KEY') {
443
- acc[table_name][columnName].pk = true;
444
- }
445
- if (constraint_type === 'UNIQUE' && !acc[table_name][columnName].pk) {
446
- acc[table_name][columnName].unique = true;
447
- }
448
- });
449
- return acc;
450
- }, {});
451
- return {
452
- indexes,
453
- tableConstraints
454
- };
455
- };
456
- const fetchSchemaJson = async connection => {
457
- const client = await connect(connection);
458
- if (!client) throw new Error('Failed to connect to the database');
459
- const tablesFieldsAndEnumsRes = generateTablesFieldsAndEnums(client);
460
- const indexesRes = generateIndexes(client);
461
- const refsRes = generateRefs(client);
462
- const res = await Promise.all([tablesFieldsAndEnumsRes, indexesRes, refsRes]);
463
- client.close();
464
- const {
465
- tables,
466
- fields,
467
- enums
468
- } = res[0];
469
- const {
470
- indexes,
471
- tableConstraints
472
- } = res[1];
473
- const refs = res[2];
474
- return {
475
- tables,
476
- fields,
477
- enums,
478
- refs,
479
- indexes,
480
- tableConstraints
481
- };
482
- };
483
- exports.fetchSchemaJson = fetchSchemaJson;