rake-db 2.4.42 → 2.4.44
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +22 -22
- package/dist/index.js +54 -53
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +40 -32
- package/dist/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/index.d.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { ColumnsShape, Db as Db$1, ColumnType, EnumColumn, ColumnTypes, Adapter, DbResult, DefaultColumnTypes, TransactionAdapter, QueryLogObject, IndexColumnOptions, IndexOptions, ForeignKeyOptions, TextColumn, NoPrimaryKeyOption, TableData, SingleColumnIndexOptions, AdapterOptions, QueryLogOptions } from 'pqb';
|
|
2
2
|
import { EmptyObject, RawExpression, ColumnTypesBase, raw, MaybeArray } from 'orchid-core';
|
|
3
3
|
|
|
4
|
-
|
|
4
|
+
type CreateTableResult<Table extends string, Shape extends ColumnsShape> = {
|
|
5
5
|
table: Db$1<Table, Shape>;
|
|
6
6
|
};
|
|
7
7
|
|
|
@@ -12,12 +12,12 @@ declare function add(this: ColumnTypesBase, emptyObject: EmptyObject): EmptyObje
|
|
|
12
12
|
declare function add(this: ColumnTypesBase, items: Record<string, ColumnType>, options?: {
|
|
13
13
|
dropMode?: DropMode;
|
|
14
14
|
}): Record<string, RakeDbAst.ChangeTableItem.Column>;
|
|
15
|
-
|
|
16
|
-
|
|
15
|
+
type Change = RakeDbAst.ChangeTableItem.Change & ChangeOptions;
|
|
16
|
+
type ChangeOptions = {
|
|
17
17
|
usingUp?: RawExpression;
|
|
18
18
|
usingDown?: RawExpression;
|
|
19
19
|
};
|
|
20
|
-
|
|
20
|
+
type TableChangeMethods = typeof tableChangeMethods;
|
|
21
21
|
declare const tableChangeMethods: {
|
|
22
22
|
add: typeof add;
|
|
23
23
|
drop: typeof add;
|
|
@@ -29,43 +29,43 @@ declare const tableChangeMethods: {
|
|
|
29
29
|
rename(name: string): RakeDbAst.ChangeTableItem.Rename;
|
|
30
30
|
enum(this: ColumnTypesBase, name: string): EnumColumn<string, [string, ...string[]]>;
|
|
31
31
|
};
|
|
32
|
-
|
|
33
|
-
|
|
32
|
+
type TableChanger = MigrationColumnTypes & TableChangeMethods;
|
|
33
|
+
type TableChangeData = Record<string, RakeDbAst.ChangeTableItem.Column | RakeDbAst.ChangeTableItem.Rename | Change | EmptyObject>;
|
|
34
34
|
|
|
35
|
-
|
|
36
|
-
|
|
35
|
+
type DropMode = 'CASCADE' | 'RESTRICT';
|
|
36
|
+
type TableOptions = {
|
|
37
37
|
dropMode?: DropMode;
|
|
38
38
|
comment?: string;
|
|
39
39
|
noPrimaryKey?: boolean;
|
|
40
40
|
snakeCase?: boolean;
|
|
41
41
|
};
|
|
42
|
-
|
|
43
|
-
|
|
42
|
+
type TextColumnCreator = () => TextColumn;
|
|
43
|
+
type MigrationColumnTypes = Omit<ColumnTypes, 'text' | 'string' | 'enum'> & {
|
|
44
44
|
text: TextColumnCreator;
|
|
45
45
|
string: TextColumnCreator;
|
|
46
46
|
citext: TextColumnCreator;
|
|
47
47
|
enum: (name: string) => EnumColumn;
|
|
48
48
|
};
|
|
49
|
-
|
|
49
|
+
type ColumnsShapeCallback<Shape extends ColumnsShape = ColumnsShape> = (t: MigrationColumnTypes & {
|
|
50
50
|
raw: typeof raw;
|
|
51
51
|
}) => Shape;
|
|
52
|
-
|
|
52
|
+
type ChangeTableOptions = {
|
|
53
53
|
snakeCase?: boolean;
|
|
54
54
|
comment?: string | [string, string] | null;
|
|
55
55
|
};
|
|
56
|
-
|
|
57
|
-
|
|
56
|
+
type ChangeTableCallback = (t: TableChanger) => TableChangeData;
|
|
57
|
+
type ColumnComment = {
|
|
58
58
|
column: string;
|
|
59
59
|
comment: string | null;
|
|
60
60
|
};
|
|
61
|
-
|
|
61
|
+
type SilentQueries = {
|
|
62
62
|
silentQuery: Adapter['query'];
|
|
63
63
|
silentArrays: Adapter['arrays'];
|
|
64
64
|
};
|
|
65
|
-
|
|
65
|
+
type Migration = DbResult<DefaultColumnTypes> & MigrationBase & {
|
|
66
66
|
adapter: SilentQueries;
|
|
67
67
|
};
|
|
68
|
-
|
|
68
|
+
type ConstraintArg = {
|
|
69
69
|
name?: string;
|
|
70
70
|
references?: [
|
|
71
71
|
columns: [string, ...string[]],
|
|
@@ -124,7 +124,7 @@ declare class MigrationBase {
|
|
|
124
124
|
constraintExists(constraintName: string): Promise<boolean>;
|
|
125
125
|
}
|
|
126
126
|
|
|
127
|
-
|
|
127
|
+
type RakeDbAst = RakeDbAst.Table | RakeDbAst.ChangeTable | RakeDbAst.RenameTable | RakeDbAst.Schema | RakeDbAst.Extension | RakeDbAst.Enum | RakeDbAst.Domain | RakeDbAst.Constraint | RakeDbAst.View;
|
|
128
128
|
declare namespace RakeDbAst {
|
|
129
129
|
type Table = {
|
|
130
130
|
type: 'table';
|
|
@@ -258,8 +258,8 @@ declare namespace RakeDbAst {
|
|
|
258
258
|
};
|
|
259
259
|
}
|
|
260
260
|
|
|
261
|
-
|
|
262
|
-
|
|
261
|
+
type Db = DbResult<DefaultColumnTypes>;
|
|
262
|
+
type RakeDbConfig = {
|
|
263
263
|
basePath: string;
|
|
264
264
|
migrationsPath: string;
|
|
265
265
|
migrationsTable: string;
|
|
@@ -274,7 +274,7 @@ declare type RakeDbConfig = {
|
|
|
274
274
|
beforeRollback?(db: Db): Promise<void>;
|
|
275
275
|
afterRollback?(db: Db): Promise<void>;
|
|
276
276
|
} & QueryLogOptions;
|
|
277
|
-
|
|
277
|
+
type AppCodeUpdater = (params: {
|
|
278
278
|
ast: RakeDbAst;
|
|
279
279
|
options: AdapterOptions;
|
|
280
280
|
basePath: string;
|
|
@@ -290,7 +290,7 @@ declare const writeMigrationFile: (config: RakeDbConfig, version: string, name:
|
|
|
290
290
|
declare const generate: (config: RakeDbConfig, args: string[]) => Promise<void>;
|
|
291
291
|
declare const makeFileTimeStamp: () => string;
|
|
292
292
|
|
|
293
|
-
|
|
293
|
+
type ChangeCallback = (db: Migration, up: boolean) => Promise<void>;
|
|
294
294
|
declare const change: (fn: ChangeCallback) => void;
|
|
295
295
|
|
|
296
296
|
declare const migrateOrRollback: (options: MaybeArray<AdapterOptions>, config: RakeDbConfig, args: string[], up: boolean) => Promise<void>;
|
package/dist/index.js
CHANGED
|
@@ -1,7 +1,5 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
Object.defineProperty(exports, '__esModule', { value: true });
|
|
4
|
-
|
|
5
3
|
var pqb = require('pqb');
|
|
6
4
|
var orchidCore = require('orchid-core');
|
|
7
5
|
var path = require('path');
|
|
@@ -9,10 +7,7 @@ var promises = require('fs/promises');
|
|
|
9
7
|
var prompts = require('prompts');
|
|
10
8
|
var url = require('url');
|
|
11
9
|
|
|
12
|
-
function
|
|
13
|
-
|
|
14
|
-
function _interopNamespace(e) {
|
|
15
|
-
if (e && e.__esModule) return e;
|
|
10
|
+
function _interopNamespaceDefault(e) {
|
|
16
11
|
var n = Object.create(null);
|
|
17
12
|
if (e) {
|
|
18
13
|
Object.keys(e).forEach(function (k) {
|
|
@@ -25,13 +20,11 @@ function _interopNamespace(e) {
|
|
|
25
20
|
}
|
|
26
21
|
});
|
|
27
22
|
}
|
|
28
|
-
n
|
|
23
|
+
n.default = e;
|
|
29
24
|
return Object.freeze(n);
|
|
30
25
|
}
|
|
31
26
|
|
|
32
|
-
var
|
|
33
|
-
var prompts__default = /*#__PURE__*/_interopDefaultLegacy(prompts);
|
|
34
|
-
var url__namespace = /*#__PURE__*/_interopNamespace(url);
|
|
27
|
+
var url__namespace = /*#__PURE__*/_interopNamespaceDefault(url);
|
|
35
28
|
|
|
36
29
|
var __defProp$6 = Object.defineProperty;
|
|
37
30
|
var __defProps$5 = Object.defineProperties;
|
|
@@ -53,11 +46,11 @@ var __spreadValues$6 = (a, b) => {
|
|
|
53
46
|
};
|
|
54
47
|
var __spreadProps$5 = (a, b) => __defProps$5(a, __getOwnPropDescs$5(b));
|
|
55
48
|
const migrationConfigDefaults = {
|
|
56
|
-
migrationsPath:
|
|
49
|
+
migrationsPath: path.join("src", "db", "migrations"),
|
|
57
50
|
migrationsTable: "schemaMigrations",
|
|
58
51
|
snakeCase: false,
|
|
59
52
|
commands: {},
|
|
60
|
-
import: (path2) => (
|
|
53
|
+
import: (path2) => import(path2),
|
|
61
54
|
log: true,
|
|
62
55
|
logger: console,
|
|
63
56
|
useCodeUpdater: true
|
|
@@ -76,10 +69,10 @@ const processRakeDbConfig = (config) => {
|
|
|
76
69
|
Error.prepareStackTrace = original;
|
|
77
70
|
if (stack) {
|
|
78
71
|
const thisFile = (_a = stack[0]) == null ? void 0 : _a.getFileName();
|
|
79
|
-
const thisDir = thisFile &&
|
|
72
|
+
const thisDir = thisFile && path.dirname(thisFile);
|
|
80
73
|
for (const item of stack) {
|
|
81
74
|
let file = item.getFileName();
|
|
82
|
-
if (!file ||
|
|
75
|
+
if (!file || path.dirname(file) === thisDir || /\bnode_modules\b/.test(file)) {
|
|
83
76
|
continue;
|
|
84
77
|
}
|
|
85
78
|
if (/file:\/\/\/\w+:\//.test(file)) {
|
|
@@ -90,7 +83,7 @@ const processRakeDbConfig = (config) => {
|
|
|
90
83
|
} catch (_) {
|
|
91
84
|
}
|
|
92
85
|
}
|
|
93
|
-
result.basePath =
|
|
86
|
+
result.basePath = path.dirname(file);
|
|
94
87
|
break;
|
|
95
88
|
}
|
|
96
89
|
}
|
|
@@ -100,8 +93,8 @@ const processRakeDbConfig = (config) => {
|
|
|
100
93
|
);
|
|
101
94
|
}
|
|
102
95
|
}
|
|
103
|
-
if (!
|
|
104
|
-
result.migrationsPath =
|
|
96
|
+
if (!path.isAbsolute(result.migrationsPath)) {
|
|
97
|
+
result.migrationsPath = path.resolve(
|
|
105
98
|
result.basePath,
|
|
106
99
|
result.migrationsPath
|
|
107
100
|
);
|
|
@@ -140,7 +133,7 @@ const setAdapterOptions = (options, set) => {
|
|
|
140
133
|
}
|
|
141
134
|
};
|
|
142
135
|
const setAdminCredentialsToOptions = async (options, create) => {
|
|
143
|
-
const confirm = await
|
|
136
|
+
const confirm = await prompts([
|
|
144
137
|
{
|
|
145
138
|
message: `Would you like to share admin credentials to ${create ? "create" : "drop"} a database`,
|
|
146
139
|
type: "confirm",
|
|
@@ -151,7 +144,7 @@ const setAdminCredentialsToOptions = async (options, create) => {
|
|
|
151
144
|
if (!confirm.confirm) {
|
|
152
145
|
return;
|
|
153
146
|
}
|
|
154
|
-
const values = await
|
|
147
|
+
const values = await prompts([
|
|
155
148
|
{
|
|
156
149
|
message: "Enter admin user:",
|
|
157
150
|
type: "text",
|
|
@@ -235,7 +228,7 @@ const getMigrationFiles = async (config, up) => {
|
|
|
235
228
|
);
|
|
236
229
|
}
|
|
237
230
|
return {
|
|
238
|
-
path:
|
|
231
|
+
path: path.resolve(migrationsPath, file),
|
|
239
232
|
version: timestampMatch[1]
|
|
240
233
|
};
|
|
241
234
|
});
|
|
@@ -1756,7 +1749,7 @@ const execute = async (options, sql) => {
|
|
|
1756
1749
|
}
|
|
1757
1750
|
};
|
|
1758
1751
|
const createOrDrop = async (options, adminOptions, config, args) => {
|
|
1759
|
-
var _a, _b, _c, _d;
|
|
1752
|
+
var _a, _b, _c, _d, _e, _f;
|
|
1760
1753
|
const params = getDatabaseAndUserFromOptions(options);
|
|
1761
1754
|
const result = await execute(
|
|
1762
1755
|
setAdapterOptions(adminOptions, { database: "postgres" }),
|
|
@@ -1794,6 +1787,21 @@ Don't use this command for database service providers, only for a local db.`;
|
|
|
1794
1787
|
if (!args.create)
|
|
1795
1788
|
return;
|
|
1796
1789
|
const db = new pqb.Adapter(options);
|
|
1790
|
+
const { schema } = db;
|
|
1791
|
+
if (schema) {
|
|
1792
|
+
db.schema = void 0;
|
|
1793
|
+
try {
|
|
1794
|
+
await db.query(`CREATE SCHEMA "${schema}"`);
|
|
1795
|
+
(_e = config.logger) == null ? void 0 : _e.log(`Created schema ${schema}`);
|
|
1796
|
+
} catch (err) {
|
|
1797
|
+
if (err.code === "42P06") {
|
|
1798
|
+
(_f = config.logger) == null ? void 0 : _f.log(`Schema ${schema} already exists`);
|
|
1799
|
+
} else {
|
|
1800
|
+
throw err;
|
|
1801
|
+
}
|
|
1802
|
+
}
|
|
1803
|
+
db.schema = schema;
|
|
1804
|
+
}
|
|
1797
1805
|
await createSchemaMigrations(db, config);
|
|
1798
1806
|
await db.close();
|
|
1799
1807
|
};
|
|
@@ -1837,7 +1845,7 @@ const resetDb = async (arg, config) => {
|
|
|
1837
1845
|
const writeMigrationFile = async (config, version, name, content) => {
|
|
1838
1846
|
var _a;
|
|
1839
1847
|
await promises.mkdir(config.migrationsPath, { recursive: true });
|
|
1840
|
-
const filePath =
|
|
1848
|
+
const filePath = path.resolve(config.migrationsPath, `${version}_${name}.ts`);
|
|
1841
1849
|
await promises.writeFile(filePath, content);
|
|
1842
1850
|
(_a = config.logger) == null ? void 0 : _a.log(`Created ${orchidCore.pathToLog(filePath)}`);
|
|
1843
1851
|
};
|
|
@@ -1854,7 +1862,7 @@ const generate = async (config, args) => {
|
|
|
1854
1862
|
);
|
|
1855
1863
|
};
|
|
1856
1864
|
const makeFileTimeStamp = () => {
|
|
1857
|
-
const now = new Date();
|
|
1865
|
+
const now = /* @__PURE__ */ new Date();
|
|
1858
1866
|
return [
|
|
1859
1867
|
now.getUTCFullYear(),
|
|
1860
1868
|
now.getUTCMonth() + 1,
|
|
@@ -2007,7 +2015,12 @@ ORDER BY "name"`
|
|
|
2007
2015
|
`SELECT
|
|
2008
2016
|
nspname AS "schemaName",
|
|
2009
2017
|
relname AS "name",
|
|
2010
|
-
obj_description(c.oid) AS comment
|
|
2018
|
+
obj_description(c.oid) AS comment,
|
|
2019
|
+
(SELECT coalesce(json_agg(t), '[]') FROM (${columnsSql({
|
|
2020
|
+
schema: "n",
|
|
2021
|
+
table: "c",
|
|
2022
|
+
where: "a.attrelid = c.oid"
|
|
2023
|
+
})}) t) AS "columns"
|
|
2011
2024
|
FROM pg_class c
|
|
2012
2025
|
JOIN pg_catalog.pg_namespace n ON n.oid = relnamespace
|
|
2013
2026
|
WHERE relkind = 'r'
|
|
@@ -2069,17 +2082,6 @@ WHERE ${filterSchema("n.nspname")}`
|
|
|
2069
2082
|
);
|
|
2070
2083
|
return rows;
|
|
2071
2084
|
}
|
|
2072
|
-
async getColumns() {
|
|
2073
|
-
const { rows } = await this.db.query(
|
|
2074
|
-
columnsSql({
|
|
2075
|
-
schema: "nc",
|
|
2076
|
-
table: "c",
|
|
2077
|
-
join: `JOIN pg_class c ON a.attrelid = c.oid AND c.relkind = 'r' JOIN pg_namespace nc ON nc.oid = c.relnamespace`,
|
|
2078
|
-
where: filterSchema("nc.nspname")
|
|
2079
|
-
})
|
|
2080
|
-
);
|
|
2081
|
-
return rows;
|
|
2082
|
-
}
|
|
2083
2085
|
async getIndexes() {
|
|
2084
2086
|
const { rows } = await this.db.query(
|
|
2085
2087
|
`SELECT
|
|
@@ -2368,6 +2370,7 @@ const matchMap = {
|
|
|
2368
2370
|
};
|
|
2369
2371
|
const fkeyActionMap = {
|
|
2370
2372
|
a: void 0,
|
|
2373
|
+
// default
|
|
2371
2374
|
r: "RESTRICT",
|
|
2372
2375
|
c: "CASCADE",
|
|
2373
2376
|
n: "SET NULL",
|
|
@@ -2423,7 +2426,7 @@ const structureToAst = async (ctx, db) => {
|
|
|
2423
2426
|
type: "extension",
|
|
2424
2427
|
action: "create",
|
|
2425
2428
|
name: it.name,
|
|
2426
|
-
schema: it.schemaName ===
|
|
2429
|
+
schema: it.schemaName === ctx.currentSchema ? void 0 : it.schemaName,
|
|
2427
2430
|
version: it.version
|
|
2428
2431
|
});
|
|
2429
2432
|
}
|
|
@@ -2432,7 +2435,7 @@ const structureToAst = async (ctx, db) => {
|
|
|
2432
2435
|
type: "enum",
|
|
2433
2436
|
action: "create",
|
|
2434
2437
|
name: it.name,
|
|
2435
|
-
schema: it.schemaName ===
|
|
2438
|
+
schema: it.schemaName === ctx.currentSchema ? void 0 : it.schemaName,
|
|
2436
2439
|
values: it.values
|
|
2437
2440
|
});
|
|
2438
2441
|
}
|
|
@@ -2440,7 +2443,7 @@ const structureToAst = async (ctx, db) => {
|
|
|
2440
2443
|
ast.push({
|
|
2441
2444
|
type: "domain",
|
|
2442
2445
|
action: "create",
|
|
2443
|
-
schema: it.schemaName ===
|
|
2446
|
+
schema: it.schemaName === ctx.currentSchema ? void 0 : it.schemaName,
|
|
2444
2447
|
name: it.name,
|
|
2445
2448
|
baseType: domains[`${it.schemaName}.${it.name}`],
|
|
2446
2449
|
notNull: it.notNull,
|
|
@@ -2473,10 +2476,10 @@ const structureToAst = async (ctx, db) => {
|
|
|
2473
2476
|
);
|
|
2474
2477
|
}
|
|
2475
2478
|
for (const [fkey, table] of outerConstraints) {
|
|
2476
|
-
ast.push(__spreadProps(__spreadValues({}, constraintToAst(fkey)), {
|
|
2479
|
+
ast.push(__spreadProps(__spreadValues({}, constraintToAst(ctx, fkey)), {
|
|
2477
2480
|
type: "constraint",
|
|
2478
2481
|
action: "create",
|
|
2479
|
-
tableSchema: table.schemaName ===
|
|
2482
|
+
tableSchema: table.schemaName === ctx.currentSchema ? void 0 : table.schemaName,
|
|
2480
2483
|
tableName: fkey.tableName
|
|
2481
2484
|
}));
|
|
2482
2485
|
}
|
|
@@ -2490,7 +2493,6 @@ const getData = async (db) => {
|
|
|
2490
2493
|
schemas,
|
|
2491
2494
|
tables,
|
|
2492
2495
|
views,
|
|
2493
|
-
columns,
|
|
2494
2496
|
constraints,
|
|
2495
2497
|
indexes,
|
|
2496
2498
|
extensions,
|
|
@@ -2500,7 +2502,6 @@ const getData = async (db) => {
|
|
|
2500
2502
|
db.getSchemas(),
|
|
2501
2503
|
db.getTables(),
|
|
2502
2504
|
db.getViews(),
|
|
2503
|
-
db.getColumns(),
|
|
2504
2505
|
db.getConstraints(),
|
|
2505
2506
|
db.getIndexes(),
|
|
2506
2507
|
db.getExtensions(),
|
|
@@ -2511,7 +2512,6 @@ const getData = async (db) => {
|
|
|
2511
2512
|
schemas,
|
|
2512
2513
|
tables,
|
|
2513
2514
|
views,
|
|
2514
|
-
columns,
|
|
2515
2515
|
constraints,
|
|
2516
2516
|
indexes,
|
|
2517
2517
|
extensions,
|
|
@@ -2579,13 +2579,12 @@ const getColumnType = (type, isSerial) => {
|
|
|
2579
2579
|
return type === "int2" ? "smallserial" : type === "int4" ? "serial" : "bigserial";
|
|
2580
2580
|
};
|
|
2581
2581
|
const pushTableAst = (ctx, ast, data, domains, table, pendingTables, innerConstraints = data.constraints) => {
|
|
2582
|
-
const { schemaName, name: tableName } = table;
|
|
2582
|
+
const { schemaName, name: tableName, columns } = table;
|
|
2583
2583
|
const key = `${schemaName}.${table.name}`;
|
|
2584
2584
|
delete pendingTables[key];
|
|
2585
2585
|
if (tableName === "schemaMigrations")
|
|
2586
2586
|
return;
|
|
2587
2587
|
const belongsToTable = makeBelongsToTable(schemaName, tableName);
|
|
2588
|
-
const columns = data.columns.filter(belongsToTable);
|
|
2589
2588
|
let primaryKey;
|
|
2590
2589
|
for (const item of data.constraints) {
|
|
2591
2590
|
if (belongsToTable(item) && item.primaryKey)
|
|
@@ -2600,7 +2599,7 @@ const pushTableAst = (ctx, ast, data, domains, table, pendingTables, innerConstr
|
|
|
2600
2599
|
const constraint = {
|
|
2601
2600
|
references: references ? {
|
|
2602
2601
|
columns: references.columns,
|
|
2603
|
-
fnOrTable: getReferencesTable(references),
|
|
2602
|
+
fnOrTable: getReferencesTable(ctx, references),
|
|
2604
2603
|
foreignColumns: references.foreignColumns,
|
|
2605
2604
|
options: {
|
|
2606
2605
|
match: matchMap[references.match],
|
|
@@ -2646,7 +2645,7 @@ const pushTableAst = (ctx, ast, data, domains, table, pendingTables, innerConstr
|
|
|
2646
2645
|
ast.push({
|
|
2647
2646
|
type: "table",
|
|
2648
2647
|
action: "create",
|
|
2649
|
-
schema: schemaName ===
|
|
2648
|
+
schema: schemaName === ctx.currentSchema ? void 0 : schemaName,
|
|
2650
2649
|
comment: table.comment,
|
|
2651
2650
|
name: tableName,
|
|
2652
2651
|
shape,
|
|
@@ -2685,7 +2684,7 @@ const pushTableAst = (ctx, ast, data, domains, table, pendingTables, innerConstr
|
|
|
2685
2684
|
}
|
|
2686
2685
|
}
|
|
2687
2686
|
};
|
|
2688
|
-
const constraintToAst = (item) => {
|
|
2687
|
+
const constraintToAst = (ctx, item) => {
|
|
2689
2688
|
var _a;
|
|
2690
2689
|
const result = {};
|
|
2691
2690
|
const { references, check } = item;
|
|
@@ -2693,7 +2692,7 @@ const constraintToAst = (item) => {
|
|
|
2693
2692
|
const options = {};
|
|
2694
2693
|
result.references = {
|
|
2695
2694
|
columns: references.columns,
|
|
2696
|
-
fnOrTable: getReferencesTable(references),
|
|
2695
|
+
fnOrTable: getReferencesTable(ctx, references),
|
|
2697
2696
|
foreignColumns: references.foreignColumns,
|
|
2698
2697
|
options
|
|
2699
2698
|
};
|
|
@@ -2718,8 +2717,8 @@ const constraintToAst = (item) => {
|
|
|
2718
2717
|
}
|
|
2719
2718
|
return result;
|
|
2720
2719
|
};
|
|
2721
|
-
const getReferencesTable = (references) => {
|
|
2722
|
-
return references.foreignSchema !==
|
|
2720
|
+
const getReferencesTable = (ctx, references) => {
|
|
2721
|
+
return references.foreignSchema !== ctx.currentSchema ? `${references.foreignSchema}.${references.foreignTable}` : references.foreignTable;
|
|
2723
2722
|
};
|
|
2724
2723
|
const isColumnCheck = (it) => {
|
|
2725
2724
|
var _a, _b;
|
|
@@ -2745,7 +2744,7 @@ const viewToAst = (ctx, data, domains, view) => {
|
|
|
2745
2744
|
return {
|
|
2746
2745
|
type: "view",
|
|
2747
2746
|
action: "create",
|
|
2748
|
-
schema: view.schemaName ===
|
|
2747
|
+
schema: view.schemaName === ctx.currentSchema ? void 0 : view.schemaName,
|
|
2749
2748
|
name: view.name,
|
|
2750
2749
|
shape,
|
|
2751
2750
|
sql: orchidCore.raw(view.sql),
|
|
@@ -3021,10 +3020,12 @@ const createView = (ast) => {
|
|
|
3021
3020
|
const pullDbStructure = async (options, config) => {
|
|
3022
3021
|
var _a, _b, _c;
|
|
3023
3022
|
const adapter = new pqb.Adapter(options);
|
|
3023
|
+
const currentSchema = adapter.schema || "public";
|
|
3024
3024
|
const db = new DbStructure(adapter);
|
|
3025
3025
|
const ctx = {
|
|
3026
3026
|
unsupportedTypes: {},
|
|
3027
|
-
snakeCase: config.snakeCase
|
|
3027
|
+
snakeCase: config.snakeCase,
|
|
3028
|
+
currentSchema
|
|
3028
3029
|
};
|
|
3029
3030
|
const ast = await structureToAst(ctx, db);
|
|
3030
3031
|
await adapter.close();
|