rake-db 2.4.8 → 2.4.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +17 -3
- package/dist/index.js +187 -34
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +188 -35
- package/dist/index.mjs.map +1 -1
- package/package.json +4 -4
package/dist/index.mjs
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { quote, getRaw, EnumColumn, UnknownColumn, columnTypes, getColumnTypes, getTableData, ColumnType, resetTableData, TransactionAdapter, logParamToLogObject, createDb as createDb$1, Adapter, columnsByType, instantiateColumn, ArrayColumn, columnCode, codeToString, primaryKeyToCode, indexToCode, foreignKeyToCode, TimestampColumn, foreignKeyArgsToCode } from 'pqb';
|
|
1
|
+
import { quote, getRaw, EnumColumn, UnknownColumn, columnTypes, getColumnTypes, getTableData, ColumnType, resetTableData, TransactionAdapter, logParamToLogObject, createDb as createDb$1, Adapter, columnsByType, instantiateColumn, DomainColumn, ArrayColumn, columnCode, codeToString, rawToCode, primaryKeyToCode, indexToCode, foreignKeyToCode, TimestampColumn, foreignKeyArgsToCode } from 'pqb';
|
|
2
2
|
import { singleQuote, isRaw, toArray, snakeCaseKey, nameKey, emptyObject, pathToLog, raw, addCode, quoteObjectKey } from 'orchid-core';
|
|
3
3
|
import path from 'path';
|
|
4
4
|
import { readdir, mkdir, writeFile } from 'fs/promises';
|
|
@@ -274,8 +274,11 @@ var __spreadValues$5 = (a, b) => {
|
|
|
274
274
|
return a;
|
|
275
275
|
};
|
|
276
276
|
var __spreadProps$4 = (a, b) => __defProps$4(a, __getOwnPropDescs$4(b));
|
|
277
|
+
const columnTypeToSql = (item) => {
|
|
278
|
+
return item.data.isOfCustomType ? `"${item.toSQL()}"` : item.toSQL();
|
|
279
|
+
};
|
|
277
280
|
const columnToSql = (key, item, values, hasMultiplePrimaryKeys) => {
|
|
278
|
-
const line = [`"${item.data.name || key}" ${item
|
|
281
|
+
const line = [`"${item.data.name || key}" ${columnTypeToSql(item)}`];
|
|
279
282
|
if (item.data.compression) {
|
|
280
283
|
line.push(`COMPRESSION ${item.data.compression}`);
|
|
281
284
|
}
|
|
@@ -480,7 +483,7 @@ var __spreadValues$4 = (a, b) => {
|
|
|
480
483
|
return a;
|
|
481
484
|
};
|
|
482
485
|
var __spreadProps$3 = (a, b) => __defProps$3(a, __getOwnPropDescs$3(b));
|
|
483
|
-
var __objRest = (source, exclude) => {
|
|
486
|
+
var __objRest$1 = (source, exclude) => {
|
|
484
487
|
var target = {};
|
|
485
488
|
for (var prop in source)
|
|
486
489
|
if (__hasOwnProp$4.call(source, prop) && exclude.indexOf(prop) < 0)
|
|
@@ -508,7 +511,7 @@ const createTable$1 = async (migration, up, tableName, options, fn) => {
|
|
|
508
511
|
validatePrimaryKey(ast);
|
|
509
512
|
const queries = astToQueries$1(ast);
|
|
510
513
|
for (const _a of queries) {
|
|
511
|
-
const _b = _a, { then } = _b, query = __objRest(_b, ["then"]);
|
|
514
|
+
const _b = _a, { then } = _b, query = __objRest$1(_b, ["then"]);
|
|
512
515
|
const result = await migration.adapter.arrays(query);
|
|
513
516
|
then == null ? void 0 : then(result);
|
|
514
517
|
}
|
|
@@ -889,8 +892,9 @@ const astToQueries = (ast) => {
|
|
|
889
892
|
} else if (item.type === "change") {
|
|
890
893
|
const { from, to } = item;
|
|
891
894
|
if (to.type && (from.type !== to.type || from.collate !== to.collate)) {
|
|
895
|
+
const type = !to.column || to.column.data.isOfCustomType ? `"${to.type}"` : to.type;
|
|
892
896
|
alterTable.push(
|
|
893
|
-
`ALTER COLUMN "${item.name || key}" TYPE ${
|
|
897
|
+
`ALTER COLUMN "${item.name || key}" TYPE ${type}${to.collate ? ` COLLATE ${quote(to.collate)}` : ""}${item.using ? ` USING ${getRaw(item.using, values)}` : ""}`
|
|
894
898
|
);
|
|
895
899
|
}
|
|
896
900
|
if (from.default !== to.default) {
|
|
@@ -1158,6 +1162,12 @@ class MigrationBase {
|
|
|
1158
1162
|
dropEnum(name, values, options) {
|
|
1159
1163
|
return createEnum$1(this, !this.up, name, values, options);
|
|
1160
1164
|
}
|
|
1165
|
+
createDomain(name, fn, options) {
|
|
1166
|
+
return createDomain$1(this, this.up, name, fn, options);
|
|
1167
|
+
}
|
|
1168
|
+
dropDomain(name, fn, options) {
|
|
1169
|
+
return createDomain$1(this, !this.up, name, fn, options);
|
|
1170
|
+
}
|
|
1161
1171
|
async tableExists(tableName) {
|
|
1162
1172
|
return queryExists(this, {
|
|
1163
1173
|
text: `SELECT 1 FROM "information_schema"."tables" WHERE "table_name" = $1`,
|
|
@@ -1252,6 +1262,34 @@ const createEnum$1 = async (migration, up, name, values, options = {}) => {
|
|
|
1252
1262
|
await migration.adapter.query(query);
|
|
1253
1263
|
migration.migratedAsts.push(ast);
|
|
1254
1264
|
};
|
|
1265
|
+
const createDomain$1 = async (migration, up, name, fn, options) => {
|
|
1266
|
+
const [schema, domainName] = getSchemaAndTableFromName(name);
|
|
1267
|
+
const ast = __spreadValues$2({
|
|
1268
|
+
type: "domain",
|
|
1269
|
+
action: up ? "create" : "drop",
|
|
1270
|
+
schema,
|
|
1271
|
+
name: domainName,
|
|
1272
|
+
baseType: fn(columnTypes)
|
|
1273
|
+
}, options);
|
|
1274
|
+
let query;
|
|
1275
|
+
const values = [];
|
|
1276
|
+
const quotedName = quoteWithSchema(ast);
|
|
1277
|
+
if (ast.action === "create") {
|
|
1278
|
+
query = `CREATE DOMAIN ${quotedName} AS ${columnTypeToSql(ast.baseType)}${ast.collation ? `
|
|
1279
|
+
COLLATION ${singleQuote(ast.collation)}` : ""}${ast.default ? `
|
|
1280
|
+
DEFAULT ${getRaw(ast.default, values)}` : ""}${ast.notNull || ast.check ? "\n" : ""}${[
|
|
1281
|
+
ast.notNull && "NOT NULL",
|
|
1282
|
+
ast.check && `CHECK ${getRaw(ast.check, values)}`
|
|
1283
|
+
].filter(Boolean).join(" ")}`;
|
|
1284
|
+
} else {
|
|
1285
|
+
query = `DROP DOMAIN ${quotedName}${ast.cascade ? " CASCADE" : ""}`;
|
|
1286
|
+
}
|
|
1287
|
+
await migration.adapter.query({
|
|
1288
|
+
text: query,
|
|
1289
|
+
values
|
|
1290
|
+
});
|
|
1291
|
+
migration.migratedAsts.push(ast);
|
|
1292
|
+
};
|
|
1255
1293
|
const queryExists = (db, sql) => {
|
|
1256
1294
|
return db.adapter.query(sql).then(({ rowCount }) => rowCount > 0);
|
|
1257
1295
|
};
|
|
@@ -1895,6 +1933,38 @@ ORDER BY c.conname`);
|
|
|
1895
1933
|
}
|
|
1896
1934
|
return rows;
|
|
1897
1935
|
}
|
|
1936
|
+
async getDomains() {
|
|
1937
|
+
const { rows } = await this.db.query(`SELECT
|
|
1938
|
+
n.nspname AS "schemaName",
|
|
1939
|
+
d.typname AS "name",
|
|
1940
|
+
t.typname AS "type",
|
|
1941
|
+
s.nspname AS "typeSchema",
|
|
1942
|
+
.typnotnull AS "notNull",
|
|
1943
|
+
d.typcategory = 'A' AS "isArray",
|
|
1944
|
+
character_maximum_length AS "maxChars",
|
|
1945
|
+
numeric_precision AS "numericPrecision",
|
|
1946
|
+
numeric_scale AS "numericScale",
|
|
1947
|
+
datetime_precision AS "dateTimePrecision",
|
|
1948
|
+
collation_name AS "collation",
|
|
1949
|
+
domain_default AS "default",
|
|
1950
|
+
pg_get_expr(conbin, conrelid) AS "expression"
|
|
1951
|
+
FROM pg_catalog.pg_type d
|
|
1952
|
+
JOIN pg_catalog.pg_namespace n ON n.oid = d.typnamespace
|
|
1953
|
+
JOIN information_schema.domains i
|
|
1954
|
+
ON i.domain_schema = nspname
|
|
1955
|
+
AND i.domain_name = d.typname
|
|
1956
|
+
JOIN pg_catalog.pg_type t
|
|
1957
|
+
ON (
|
|
1958
|
+
CASE WHEN d.typcategory = 'A'
|
|
1959
|
+
THEN t.typarray
|
|
1960
|
+
ELSE t.oid
|
|
1961
|
+
END
|
|
1962
|
+
) = d.typbasetype
|
|
1963
|
+
JOIN pg_catalog.pg_namespace s ON s.oid = t.typnamespace
|
|
1964
|
+
LEFT JOIN pg_catalog.pg_constraint c ON c.contypid = d.oid
|
|
1965
|
+
WHERE d.typtype = 'd' AND ${filterSchema("n.nspname")}`);
|
|
1966
|
+
return rows;
|
|
1967
|
+
}
|
|
1898
1968
|
}
|
|
1899
1969
|
|
|
1900
1970
|
var __defProp = Object.defineProperty;
|
|
@@ -1916,6 +1986,18 @@ var __spreadValues = (a, b) => {
|
|
|
1916
1986
|
return a;
|
|
1917
1987
|
};
|
|
1918
1988
|
var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b));
|
|
1989
|
+
var __objRest = (source, exclude) => {
|
|
1990
|
+
var target = {};
|
|
1991
|
+
for (var prop in source)
|
|
1992
|
+
if (__hasOwnProp.call(source, prop) && exclude.indexOf(prop) < 0)
|
|
1993
|
+
target[prop] = source[prop];
|
|
1994
|
+
if (source != null && __getOwnPropSymbols)
|
|
1995
|
+
for (var prop of __getOwnPropSymbols(source)) {
|
|
1996
|
+
if (exclude.indexOf(prop) < 0 && __propIsEnum.call(source, prop))
|
|
1997
|
+
target[prop] = source[prop];
|
|
1998
|
+
}
|
|
1999
|
+
return target;
|
|
2000
|
+
};
|
|
1919
2001
|
class RakeDbEnumColumn extends EnumColumn {
|
|
1920
2002
|
toCode(t) {
|
|
1921
2003
|
return columnCode(this, t, `enum('${this.enumName}')`);
|
|
@@ -1959,10 +2041,21 @@ const structureToAst = async (db) => {
|
|
|
1959
2041
|
}
|
|
1960
2042
|
pendingTables[key] = { table, dependsOn };
|
|
1961
2043
|
}
|
|
2044
|
+
const domains = {};
|
|
2045
|
+
for (const it of data.domains) {
|
|
2046
|
+
domains[`${it.schemaName}.${it.name}`] = getColumn(data, domains, {
|
|
2047
|
+
schemaName: it.schemaName,
|
|
2048
|
+
name: it.name,
|
|
2049
|
+
type: it.type,
|
|
2050
|
+
typeSchema: it.typeSchema,
|
|
2051
|
+
isArray: it.isArray,
|
|
2052
|
+
isSerial: false
|
|
2053
|
+
});
|
|
2054
|
+
}
|
|
1962
2055
|
for (const key in pendingTables) {
|
|
1963
2056
|
const { table, dependsOn } = pendingTables[key];
|
|
1964
2057
|
if (!dependsOn.size) {
|
|
1965
|
-
pushTableAst(ast, data, table, pendingTables);
|
|
2058
|
+
pushTableAst(ast, data, domains, table, pendingTables);
|
|
1966
2059
|
}
|
|
1967
2060
|
}
|
|
1968
2061
|
const outerFKeys = [];
|
|
@@ -1984,6 +2077,19 @@ const structureToAst = async (db) => {
|
|
|
1984
2077
|
values: it.values
|
|
1985
2078
|
});
|
|
1986
2079
|
}
|
|
2080
|
+
for (const it of data.domains) {
|
|
2081
|
+
ast.push({
|
|
2082
|
+
type: "domain",
|
|
2083
|
+
action: "create",
|
|
2084
|
+
schema: it.schemaName === "public" ? void 0 : it.schemaName,
|
|
2085
|
+
name: it.name,
|
|
2086
|
+
baseType: domains[`${it.schemaName}.${it.name}`],
|
|
2087
|
+
notNull: it.notNull,
|
|
2088
|
+
collation: it.collation,
|
|
2089
|
+
default: it.default ? raw(it.default) : void 0,
|
|
2090
|
+
check: it.check ? raw(it.check) : void 0
|
|
2091
|
+
});
|
|
2092
|
+
}
|
|
1987
2093
|
for (const key in pendingTables) {
|
|
1988
2094
|
const innerFKeys = [];
|
|
1989
2095
|
const { table } = pendingTables[key];
|
|
@@ -1997,7 +2103,7 @@ const structureToAst = async (db) => {
|
|
|
1997
2103
|
outerFKeys.push([fkey, table]);
|
|
1998
2104
|
}
|
|
1999
2105
|
}
|
|
2000
|
-
pushTableAst(ast, data, table, pendingTables, innerFKeys);
|
|
2106
|
+
pushTableAst(ast, data, domains, table, pendingTables, innerFKeys);
|
|
2001
2107
|
}
|
|
2002
2108
|
for (const [fkey, table] of outerFKeys) {
|
|
2003
2109
|
ast.push(__spreadProps(__spreadValues({}, foreignKeyToAst(fkey)), {
|
|
@@ -2019,7 +2125,8 @@ const getData = async (db) => {
|
|
|
2019
2125
|
foreignKeys,
|
|
2020
2126
|
extensions,
|
|
2021
2127
|
enums,
|
|
2022
|
-
checks
|
|
2128
|
+
checks,
|
|
2129
|
+
domains
|
|
2023
2130
|
] = await Promise.all([
|
|
2024
2131
|
db.getSchemas(),
|
|
2025
2132
|
db.getTables(),
|
|
@@ -2029,7 +2136,8 @@ const getData = async (db) => {
|
|
|
2029
2136
|
db.getForeignKeys(),
|
|
2030
2137
|
db.getExtensions(),
|
|
2031
2138
|
db.getEnums(),
|
|
2032
|
-
db.getChecks()
|
|
2139
|
+
db.getChecks(),
|
|
2140
|
+
db.getDomains()
|
|
2033
2141
|
]);
|
|
2034
2142
|
return {
|
|
2035
2143
|
schemas,
|
|
@@ -2040,7 +2148,8 @@ const getData = async (db) => {
|
|
|
2040
2148
|
foreignKeys,
|
|
2041
2149
|
extensions,
|
|
2042
2150
|
enums,
|
|
2043
|
-
checks
|
|
2151
|
+
checks,
|
|
2152
|
+
domains
|
|
2044
2153
|
};
|
|
2045
2154
|
};
|
|
2046
2155
|
const makeBelongsToTable = (schema, table) => (item) => item.schemaName === schema && item.tableName === table;
|
|
@@ -2054,12 +2163,52 @@ const getIsSerial = (item) => {
|
|
|
2054
2163
|
}
|
|
2055
2164
|
return false;
|
|
2056
2165
|
};
|
|
2166
|
+
const getColumn = (data, domains, _a) => {
|
|
2167
|
+
var _b = _a, {
|
|
2168
|
+
schemaName,
|
|
2169
|
+
tableName,
|
|
2170
|
+
name,
|
|
2171
|
+
type,
|
|
2172
|
+
typeSchema,
|
|
2173
|
+
isArray,
|
|
2174
|
+
isSerial
|
|
2175
|
+
} = _b, params = __objRest(_b, [
|
|
2176
|
+
"schemaName",
|
|
2177
|
+
"tableName",
|
|
2178
|
+
"name",
|
|
2179
|
+
"type",
|
|
2180
|
+
"typeSchema",
|
|
2181
|
+
"isArray",
|
|
2182
|
+
"isSerial"
|
|
2183
|
+
]);
|
|
2184
|
+
let column;
|
|
2185
|
+
const klass = columnsByType[getColumnType(type, isSerial)];
|
|
2186
|
+
if (klass) {
|
|
2187
|
+
column = instantiateColumn(klass, params);
|
|
2188
|
+
} else {
|
|
2189
|
+
const domainColumn = domains[`${typeSchema}.${type}`];
|
|
2190
|
+
if (domainColumn) {
|
|
2191
|
+
column = new DomainColumn({}, type).as(domainColumn);
|
|
2192
|
+
} else {
|
|
2193
|
+
const enumType = data.enums.find(
|
|
2194
|
+
(item) => item.name === type && item.schemaName === typeSchema
|
|
2195
|
+
);
|
|
2196
|
+
if (!enumType) {
|
|
2197
|
+
throw new Error(
|
|
2198
|
+
`Cannot handle ${tableName ? "column" : "domain"} ${schemaName}${tableName ? `.${tableName}` : ""}.${name}: column type \`${type}\` is not supported`
|
|
2199
|
+
);
|
|
2200
|
+
}
|
|
2201
|
+
column = new RakeDbEnumColumn({}, type, enumType.values);
|
|
2202
|
+
}
|
|
2203
|
+
}
|
|
2204
|
+
return isArray ? new ArrayColumn({}, column) : column;
|
|
2205
|
+
};
|
|
2057
2206
|
const getColumnType = (type, isSerial) => {
|
|
2058
2207
|
if (!isSerial)
|
|
2059
2208
|
return type;
|
|
2060
2209
|
return type === "int2" ? "smallserial" : type === "int4" ? "serial" : "bigserial";
|
|
2061
2210
|
};
|
|
2062
|
-
const pushTableAst = (ast, data, table, pendingTables, innerFKeys = data.foreignKeys) => {
|
|
2211
|
+
const pushTableAst = (ast, data, domains, table, pendingTables, innerFKeys = data.foreignKeys) => {
|
|
2063
2212
|
const { schemaName, name } = table;
|
|
2064
2213
|
const key = `${schemaName}.${table.name}`;
|
|
2065
2214
|
delete pendingTables[key];
|
|
@@ -2082,26 +2231,12 @@ const pushTableAst = (ast, data, table, pendingTables, innerFKeys = data.foreign
|
|
|
2082
2231
|
if (isSerial) {
|
|
2083
2232
|
item = __spreadProps(__spreadValues({}, item), { default: void 0 });
|
|
2084
2233
|
}
|
|
2085
|
-
let column;
|
|
2086
2234
|
const isArray = item.dataType === "ARRAY";
|
|
2087
|
-
|
|
2088
|
-
|
|
2089
|
-
|
|
2090
|
-
|
|
2091
|
-
}
|
|
2092
|
-
const { type: type2, typeSchema } = item;
|
|
2093
|
-
const enumType = data.enums.find(
|
|
2094
|
-
(item2) => item2.name === type2 && item2.schemaName === typeSchema
|
|
2095
|
-
);
|
|
2096
|
-
if (!enumType) {
|
|
2097
|
-
throw new Error(
|
|
2098
|
-
`Cannot handle column ${item.schemaName}.${item.tableName}.${item.name}: column type \`${item.type}\` is not supported`
|
|
2099
|
-
);
|
|
2100
|
-
}
|
|
2101
|
-
column = new RakeDbEnumColumn({}, type2, enumType.values);
|
|
2102
|
-
}
|
|
2103
|
-
if (isArray)
|
|
2104
|
-
column = new ArrayColumn({}, column);
|
|
2235
|
+
let column = getColumn(data, domains, __spreadProps(__spreadValues({}, item), {
|
|
2236
|
+
type: isArray ? item.type.slice(1) : item.type,
|
|
2237
|
+
isArray,
|
|
2238
|
+
isSerial
|
|
2239
|
+
}));
|
|
2105
2240
|
if ((primaryKey == null ? void 0 : primaryKey.columnNames.length) === 1 && (primaryKey == null ? void 0 : primaryKey.columnNames[0]) === item.name) {
|
|
2106
2241
|
column = column.primaryKey();
|
|
2107
2242
|
}
|
|
@@ -2180,7 +2315,7 @@ const pushTableAst = (ast, data, table, pendingTables, innerFKeys = data.foreign
|
|
|
2180
2315
|
for (const otherKey in pendingTables) {
|
|
2181
2316
|
const item = pendingTables[otherKey];
|
|
2182
2317
|
if (item.dependsOn.delete(key) && item.dependsOn.size === 0) {
|
|
2183
|
-
pushTableAst(ast, data, item.table, pendingTables);
|
|
2318
|
+
pushTableAst(ast, data, domains, item.table, pendingTables);
|
|
2184
2319
|
}
|
|
2185
2320
|
}
|
|
2186
2321
|
};
|
|
@@ -2210,7 +2345,11 @@ const astToMigration = (config, ast) => {
|
|
|
2210
2345
|
} else if (item.type === "enum" && item.action === "create") {
|
|
2211
2346
|
if (first.length)
|
|
2212
2347
|
first.push([]);
|
|
2213
|
-
first.push(
|
|
2348
|
+
first.push(createEnum(item));
|
|
2349
|
+
} else if (item.type === "domain" && item.action === "create") {
|
|
2350
|
+
if (first.length)
|
|
2351
|
+
first.push([]);
|
|
2352
|
+
first.push(...createDomain(item));
|
|
2214
2353
|
} else if (item.type === "table" && item.action === "create") {
|
|
2215
2354
|
tables.push(createTable(config, item));
|
|
2216
2355
|
} else if (item.type === "foreignKey") {
|
|
@@ -2267,12 +2406,26 @@ const createExtension = (ast) => {
|
|
|
2267
2406
|
return code;
|
|
2268
2407
|
};
|
|
2269
2408
|
const createEnum = (ast) => {
|
|
2409
|
+
return `await db.createEnum(${quoteSchemaTable(ast)}, [${ast.values.map(singleQuote).join(", ")}]);`;
|
|
2410
|
+
};
|
|
2411
|
+
const createDomain = (ast) => {
|
|
2270
2412
|
const code = [
|
|
2271
|
-
`await db.
|
|
2413
|
+
`await db.createDomain(${quoteSchemaTable(
|
|
2414
|
+
ast
|
|
2415
|
+
)}, (t) => ${ast.baseType.toCode("t")}`
|
|
2272
2416
|
];
|
|
2273
|
-
if (ast.
|
|
2417
|
+
if (ast.notNull || ast.collation || ast.default || ast.check) {
|
|
2418
|
+
const props = [];
|
|
2419
|
+
if (ast.notNull)
|
|
2420
|
+
props.push(`notNull: true,`);
|
|
2421
|
+
if (ast.collation)
|
|
2422
|
+
props.push(`collation: ${singleQuote(ast.collation)},`);
|
|
2423
|
+
if (ast.default)
|
|
2424
|
+
props.push(`default: ${rawToCode("db", ast.default)},`);
|
|
2425
|
+
if (ast.check)
|
|
2426
|
+
props.push(`check: ${rawToCode("db", ast.check)},`);
|
|
2274
2427
|
addCode(code, ", {");
|
|
2275
|
-
code.push(
|
|
2428
|
+
code.push(props);
|
|
2276
2429
|
addCode(code, "}");
|
|
2277
2430
|
}
|
|
2278
2431
|
addCode(code, ");");
|