rake-db 2.4.43 → 2.4.44
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +38 -31
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +38 -31
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -1749,7 +1749,7 @@ const execute = async (options, sql) => {
|
|
|
1749
1749
|
}
|
|
1750
1750
|
};
|
|
1751
1751
|
const createOrDrop = async (options, adminOptions, config, args) => {
|
|
1752
|
-
var _a, _b, _c, _d;
|
|
1752
|
+
var _a, _b, _c, _d, _e, _f;
|
|
1753
1753
|
const params = getDatabaseAndUserFromOptions(options);
|
|
1754
1754
|
const result = await execute(
|
|
1755
1755
|
setAdapterOptions(adminOptions, { database: "postgres" }),
|
|
@@ -1787,6 +1787,21 @@ Don't use this command for database service providers, only for a local db.`;
|
|
|
1787
1787
|
if (!args.create)
|
|
1788
1788
|
return;
|
|
1789
1789
|
const db = new pqb.Adapter(options);
|
|
1790
|
+
const { schema } = db;
|
|
1791
|
+
if (schema) {
|
|
1792
|
+
db.schema = void 0;
|
|
1793
|
+
try {
|
|
1794
|
+
await db.query(`CREATE SCHEMA "${schema}"`);
|
|
1795
|
+
(_e = config.logger) == null ? void 0 : _e.log(`Created schema ${schema}`);
|
|
1796
|
+
} catch (err) {
|
|
1797
|
+
if (err.code === "42P06") {
|
|
1798
|
+
(_f = config.logger) == null ? void 0 : _f.log(`Schema ${schema} already exists`);
|
|
1799
|
+
} else {
|
|
1800
|
+
throw err;
|
|
1801
|
+
}
|
|
1802
|
+
}
|
|
1803
|
+
db.schema = schema;
|
|
1804
|
+
}
|
|
1790
1805
|
await createSchemaMigrations(db, config);
|
|
1791
1806
|
await db.close();
|
|
1792
1807
|
};
|
|
@@ -2000,7 +2015,12 @@ ORDER BY "name"`
|
|
|
2000
2015
|
`SELECT
|
|
2001
2016
|
nspname AS "schemaName",
|
|
2002
2017
|
relname AS "name",
|
|
2003
|
-
obj_description(c.oid) AS comment
|
|
2018
|
+
obj_description(c.oid) AS comment,
|
|
2019
|
+
(SELECT coalesce(json_agg(t), '[]') FROM (${columnsSql({
|
|
2020
|
+
schema: "n",
|
|
2021
|
+
table: "c",
|
|
2022
|
+
where: "a.attrelid = c.oid"
|
|
2023
|
+
})}) t) AS "columns"
|
|
2004
2024
|
FROM pg_class c
|
|
2005
2025
|
JOIN pg_catalog.pg_namespace n ON n.oid = relnamespace
|
|
2006
2026
|
WHERE relkind = 'r'
|
|
@@ -2062,17 +2082,6 @@ WHERE ${filterSchema("n.nspname")}`
|
|
|
2062
2082
|
);
|
|
2063
2083
|
return rows;
|
|
2064
2084
|
}
|
|
2065
|
-
async getColumns() {
|
|
2066
|
-
const { rows } = await this.db.query(
|
|
2067
|
-
columnsSql({
|
|
2068
|
-
schema: "nc",
|
|
2069
|
-
table: "c",
|
|
2070
|
-
join: `JOIN pg_class c ON a.attrelid = c.oid AND c.relkind = 'r' JOIN pg_namespace nc ON nc.oid = c.relnamespace`,
|
|
2071
|
-
where: filterSchema("nc.nspname")
|
|
2072
|
-
})
|
|
2073
|
-
);
|
|
2074
|
-
return rows;
|
|
2075
|
-
}
|
|
2076
2085
|
async getIndexes() {
|
|
2077
2086
|
const { rows } = await this.db.query(
|
|
2078
2087
|
`SELECT
|
|
@@ -2417,7 +2426,7 @@ const structureToAst = async (ctx, db) => {
|
|
|
2417
2426
|
type: "extension",
|
|
2418
2427
|
action: "create",
|
|
2419
2428
|
name: it.name,
|
|
2420
|
-
schema: it.schemaName ===
|
|
2429
|
+
schema: it.schemaName === ctx.currentSchema ? void 0 : it.schemaName,
|
|
2421
2430
|
version: it.version
|
|
2422
2431
|
});
|
|
2423
2432
|
}
|
|
@@ -2426,7 +2435,7 @@ const structureToAst = async (ctx, db) => {
|
|
|
2426
2435
|
type: "enum",
|
|
2427
2436
|
action: "create",
|
|
2428
2437
|
name: it.name,
|
|
2429
|
-
schema: it.schemaName ===
|
|
2438
|
+
schema: it.schemaName === ctx.currentSchema ? void 0 : it.schemaName,
|
|
2430
2439
|
values: it.values
|
|
2431
2440
|
});
|
|
2432
2441
|
}
|
|
@@ -2434,7 +2443,7 @@ const structureToAst = async (ctx, db) => {
|
|
|
2434
2443
|
ast.push({
|
|
2435
2444
|
type: "domain",
|
|
2436
2445
|
action: "create",
|
|
2437
|
-
schema: it.schemaName ===
|
|
2446
|
+
schema: it.schemaName === ctx.currentSchema ? void 0 : it.schemaName,
|
|
2438
2447
|
name: it.name,
|
|
2439
2448
|
baseType: domains[`${it.schemaName}.${it.name}`],
|
|
2440
2449
|
notNull: it.notNull,
|
|
@@ -2467,10 +2476,10 @@ const structureToAst = async (ctx, db) => {
|
|
|
2467
2476
|
);
|
|
2468
2477
|
}
|
|
2469
2478
|
for (const [fkey, table] of outerConstraints) {
|
|
2470
|
-
ast.push(__spreadProps(__spreadValues({}, constraintToAst(fkey)), {
|
|
2479
|
+
ast.push(__spreadProps(__spreadValues({}, constraintToAst(ctx, fkey)), {
|
|
2471
2480
|
type: "constraint",
|
|
2472
2481
|
action: "create",
|
|
2473
|
-
tableSchema: table.schemaName ===
|
|
2482
|
+
tableSchema: table.schemaName === ctx.currentSchema ? void 0 : table.schemaName,
|
|
2474
2483
|
tableName: fkey.tableName
|
|
2475
2484
|
}));
|
|
2476
2485
|
}
|
|
@@ -2484,7 +2493,6 @@ const getData = async (db) => {
|
|
|
2484
2493
|
schemas,
|
|
2485
2494
|
tables,
|
|
2486
2495
|
views,
|
|
2487
|
-
columns,
|
|
2488
2496
|
constraints,
|
|
2489
2497
|
indexes,
|
|
2490
2498
|
extensions,
|
|
@@ -2494,7 +2502,6 @@ const getData = async (db) => {
|
|
|
2494
2502
|
db.getSchemas(),
|
|
2495
2503
|
db.getTables(),
|
|
2496
2504
|
db.getViews(),
|
|
2497
|
-
db.getColumns(),
|
|
2498
2505
|
db.getConstraints(),
|
|
2499
2506
|
db.getIndexes(),
|
|
2500
2507
|
db.getExtensions(),
|
|
@@ -2505,7 +2512,6 @@ const getData = async (db) => {
|
|
|
2505
2512
|
schemas,
|
|
2506
2513
|
tables,
|
|
2507
2514
|
views,
|
|
2508
|
-
columns,
|
|
2509
2515
|
constraints,
|
|
2510
2516
|
indexes,
|
|
2511
2517
|
extensions,
|
|
@@ -2573,13 +2579,12 @@ const getColumnType = (type, isSerial) => {
|
|
|
2573
2579
|
return type === "int2" ? "smallserial" : type === "int4" ? "serial" : "bigserial";
|
|
2574
2580
|
};
|
|
2575
2581
|
const pushTableAst = (ctx, ast, data, domains, table, pendingTables, innerConstraints = data.constraints) => {
|
|
2576
|
-
const { schemaName, name: tableName } = table;
|
|
2582
|
+
const { schemaName, name: tableName, columns } = table;
|
|
2577
2583
|
const key = `${schemaName}.${table.name}`;
|
|
2578
2584
|
delete pendingTables[key];
|
|
2579
2585
|
if (tableName === "schemaMigrations")
|
|
2580
2586
|
return;
|
|
2581
2587
|
const belongsToTable = makeBelongsToTable(schemaName, tableName);
|
|
2582
|
-
const columns = data.columns.filter(belongsToTable);
|
|
2583
2588
|
let primaryKey;
|
|
2584
2589
|
for (const item of data.constraints) {
|
|
2585
2590
|
if (belongsToTable(item) && item.primaryKey)
|
|
@@ -2594,7 +2599,7 @@ const pushTableAst = (ctx, ast, data, domains, table, pendingTables, innerConstr
|
|
|
2594
2599
|
const constraint = {
|
|
2595
2600
|
references: references ? {
|
|
2596
2601
|
columns: references.columns,
|
|
2597
|
-
fnOrTable: getReferencesTable(references),
|
|
2602
|
+
fnOrTable: getReferencesTable(ctx, references),
|
|
2598
2603
|
foreignColumns: references.foreignColumns,
|
|
2599
2604
|
options: {
|
|
2600
2605
|
match: matchMap[references.match],
|
|
@@ -2640,7 +2645,7 @@ const pushTableAst = (ctx, ast, data, domains, table, pendingTables, innerConstr
|
|
|
2640
2645
|
ast.push({
|
|
2641
2646
|
type: "table",
|
|
2642
2647
|
action: "create",
|
|
2643
|
-
schema: schemaName ===
|
|
2648
|
+
schema: schemaName === ctx.currentSchema ? void 0 : schemaName,
|
|
2644
2649
|
comment: table.comment,
|
|
2645
2650
|
name: tableName,
|
|
2646
2651
|
shape,
|
|
@@ -2679,7 +2684,7 @@ const pushTableAst = (ctx, ast, data, domains, table, pendingTables, innerConstr
|
|
|
2679
2684
|
}
|
|
2680
2685
|
}
|
|
2681
2686
|
};
|
|
2682
|
-
const constraintToAst = (item) => {
|
|
2687
|
+
const constraintToAst = (ctx, item) => {
|
|
2683
2688
|
var _a;
|
|
2684
2689
|
const result = {};
|
|
2685
2690
|
const { references, check } = item;
|
|
@@ -2687,7 +2692,7 @@ const constraintToAst = (item) => {
|
|
|
2687
2692
|
const options = {};
|
|
2688
2693
|
result.references = {
|
|
2689
2694
|
columns: references.columns,
|
|
2690
|
-
fnOrTable: getReferencesTable(references),
|
|
2695
|
+
fnOrTable: getReferencesTable(ctx, references),
|
|
2691
2696
|
foreignColumns: references.foreignColumns,
|
|
2692
2697
|
options
|
|
2693
2698
|
};
|
|
@@ -2712,8 +2717,8 @@ const constraintToAst = (item) => {
|
|
|
2712
2717
|
}
|
|
2713
2718
|
return result;
|
|
2714
2719
|
};
|
|
2715
|
-
const getReferencesTable = (references) => {
|
|
2716
|
-
return references.foreignSchema !==
|
|
2720
|
+
const getReferencesTable = (ctx, references) => {
|
|
2721
|
+
return references.foreignSchema !== ctx.currentSchema ? `${references.foreignSchema}.${references.foreignTable}` : references.foreignTable;
|
|
2717
2722
|
};
|
|
2718
2723
|
const isColumnCheck = (it) => {
|
|
2719
2724
|
var _a, _b;
|
|
@@ -2739,7 +2744,7 @@ const viewToAst = (ctx, data, domains, view) => {
|
|
|
2739
2744
|
return {
|
|
2740
2745
|
type: "view",
|
|
2741
2746
|
action: "create",
|
|
2742
|
-
schema: view.schemaName ===
|
|
2747
|
+
schema: view.schemaName === ctx.currentSchema ? void 0 : view.schemaName,
|
|
2743
2748
|
name: view.name,
|
|
2744
2749
|
shape,
|
|
2745
2750
|
sql: orchidCore.raw(view.sql),
|
|
@@ -3015,10 +3020,12 @@ const createView = (ast) => {
|
|
|
3015
3020
|
const pullDbStructure = async (options, config) => {
|
|
3016
3021
|
var _a, _b, _c;
|
|
3017
3022
|
const adapter = new pqb.Adapter(options);
|
|
3023
|
+
const currentSchema = adapter.schema || "public";
|
|
3018
3024
|
const db = new DbStructure(adapter);
|
|
3019
3025
|
const ctx = {
|
|
3020
3026
|
unsupportedTypes: {},
|
|
3021
|
-
snakeCase: config.snakeCase
|
|
3027
|
+
snakeCase: config.snakeCase,
|
|
3028
|
+
currentSchema
|
|
3022
3029
|
};
|
|
3023
3030
|
const ast = await structureToAst(ctx, db);
|
|
3024
3031
|
await adapter.close();
|