durcno 1.0.0-alpha.3 → 1.0.0-alpha.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -5
- package/dist/bin.cjs +36 -33
- package/dist/src/columns/common.d.mts +1 -1
- package/dist/src/columns/enum.d.mts +1 -1
- package/dist/src/connectors/common.d.mts +19 -1
- package/dist/src/connectors/common.mjs +17 -0
- package/dist/src/connectors/pglite.d.mts +8 -0
- package/dist/src/connectors/pglite.mjs +8 -0
- package/dist/src/db.d.mts +1 -1
- package/dist/src/filters/array.d.mts +2 -2
- package/dist/src/filters/custom.d.mts +1 -1
- package/dist/src/filters/index.d.mts +3 -3
- package/dist/src/index.d.mts +9 -9
- package/dist/src/migration/ddl/enum.d.mts +100 -0
- package/dist/src/migration/ddl/enum.mjs +138 -0
- package/dist/src/migration/ddl/index.d.mts +211 -0
- package/dist/src/migration/ddl/index.mjs +81 -0
- package/dist/src/migration/ddl/indexes.d.mts +110 -0
- package/dist/src/migration/ddl/indexes.mjs +151 -0
- package/dist/src/migration/ddl/schema.d.mts +56 -0
- package/dist/src/migration/ddl/schema.mjs +62 -0
- package/dist/src/migration/ddl/sequence.d.mts +77 -0
- package/dist/src/migration/ddl/sequence.mjs +86 -0
- package/dist/src/migration/{statement.d.mts → ddl/statement.d.mts} +13 -10
- package/dist/src/migration/{statement.mjs → ddl/statement.mjs} +4 -4
- package/dist/src/migration/ddl/table.d.mts +305 -0
- package/dist/src/migration/{ddl.mjs → ddl/table.mjs} +8 -493
- package/dist/src/migration/ddl/types.d.mts +117 -0
- package/dist/src/migration/ddl/types.mjs +187 -0
- package/dist/src/migration/index.d.mts +2 -2
- package/dist/src/migration/index.mjs +2 -2
- package/dist/src/migration/snapshot.d.mts +1 -1
- package/dist/src/models.d.mts +1 -1
- package/dist/src/query-builders/aggregates.d.mts +2 -2
- package/dist/src/query-builders/count.d.mts +2 -2
- package/dist/src/query-builders/delete.d.mts +2 -2
- package/dist/src/query-builders/distinct.d.mts +2 -2
- package/dist/src/query-builders/exists.d.mts +2 -2
- package/dist/src/query-builders/first.d.mts +2 -2
- package/dist/src/query-builders/insert-returning.d.mts +2 -2
- package/dist/src/query-builders/insert.d.mts +2 -2
- package/dist/src/query-builders/pre.d.mts +2 -2
- package/dist/src/query-builders/raw.d.mts +1 -1
- package/dist/src/query-builders/rq.d.mts +2 -2
- package/dist/src/query-builders/select.d.mts +2 -2
- package/dist/src/query-builders/update.d.mts +2 -2
- package/dist/src/sql.d.mts +1 -1
- package/dist/src/table.d.mts +2 -2
- package/package.json +4 -4
- package/dist/src/migration/ddl.d.mts +0 -764
package/README.md
CHANGED
|
@@ -1,8 +1,6 @@
|
|
|
1
1
|
<p align="center">
|
|
2
|
-
<
|
|
3
|
-
|
|
4
|
-
</a>
|
|
5
|
-
<img src="https://img.shields.io/badge/Node.js-24%2B-339933?style=flat&logo=node.js&logoColor=white" alt="Node.js 24+" />
|
|
2
|
+
<img alt="NPM Downloads" src="https://img.shields.io/npm/dw/durcno" alt="Downloads" />
|
|
3
|
+
<img src="https://img.shields.io/badge/Node.js-24%2B-339935?style=flat&logo=node.js&logoColor=white" alt="Node.js 24+" />
|
|
6
4
|
<img src="https://img.shields.io/badge/PostgreSQL-14%2B-336791?style=flat&logo=postgresql&logoColor=white" alt="PostgreSQL 14+" />
|
|
7
5
|
<img src="https://img.shields.io/badge/License-Apache%202.0-blue?style=flat" alt="License" />
|
|
8
6
|
<img src="https://img.shields.io/badge/vitest--green?logo=vitest" alt="License" />
|
|
@@ -31,7 +29,7 @@
|
|
|
31
29
|
## Setup
|
|
32
30
|
|
|
33
31
|
```bash
|
|
34
|
-
npm
|
|
32
|
+
npm add durcno@alpha
|
|
35
33
|
```
|
|
36
34
|
|
|
37
35
|
```bash
|
package/dist/bin.cjs
CHANGED
|
@@ -8961,25 +8961,24 @@ async function runDownMigration(migrationDirName, isFirstMigration, migrationsDi
|
|
|
8961
8961
|
const options = migrationModule.options ?? {};
|
|
8962
8962
|
const useTransaction = options.transaction ?? true;
|
|
8963
8963
|
const execution = options.execution ?? "joined";
|
|
8964
|
-
if (useTransaction) {
|
|
8965
|
-
await client.query("BEGIN;");
|
|
8966
|
-
}
|
|
8967
8964
|
try {
|
|
8968
8965
|
if (statements.length > 0) {
|
|
8969
8966
|
if (execution === "sequential") {
|
|
8967
|
+
if (useTransaction) await client.query("BEGIN;");
|
|
8970
8968
|
for (const st of statements) {
|
|
8971
8969
|
await client.query(st.toSQL());
|
|
8972
8970
|
}
|
|
8971
|
+
if (useTransaction) await client.query("COMMIT;");
|
|
8973
8972
|
} else {
|
|
8974
|
-
|
|
8973
|
+
let sql = "";
|
|
8974
|
+
if (useTransaction) sql += "BEGIN;\n";
|
|
8975
|
+
sql += statements.map((st) => st.toSQL()).join("\n");
|
|
8976
|
+
if (useTransaction) sql += "\nCOMMIT;";
|
|
8975
8977
|
await client.query(sql);
|
|
8976
8978
|
}
|
|
8977
8979
|
}
|
|
8978
|
-
if (useTransaction) {
|
|
8979
|
-
await client.query("COMMIT;");
|
|
8980
|
-
}
|
|
8981
8980
|
} catch (e) {
|
|
8982
|
-
if (useTransaction) {
|
|
8981
|
+
if (useTransaction && execution === "sequential") {
|
|
8983
8982
|
await client.query("ROLLBACK;");
|
|
8984
8983
|
}
|
|
8985
8984
|
throw e;
|
|
@@ -12275,12 +12274,14 @@ async function generate(options) {
|
|
|
12275
12274
|
ssCurrent,
|
|
12276
12275
|
renamedTables
|
|
12277
12276
|
);
|
|
12277
|
+
const connectorMigrationOpts = config2.connector.constructor.migrationOptions;
|
|
12278
12278
|
const migrationUpTs = generateMigration(
|
|
12279
12279
|
ssPrevious,
|
|
12280
12280
|
ssCurrent,
|
|
12281
12281
|
"up",
|
|
12282
12282
|
renamedTables,
|
|
12283
|
-
renamedColumns
|
|
12283
|
+
renamedColumns,
|
|
12284
|
+
connectorMigrationOpts
|
|
12284
12285
|
);
|
|
12285
12286
|
const reverseRenamedTables = {};
|
|
12286
12287
|
for (const [oldKey, newKey] of Object.entries(renamedTables)) {
|
|
@@ -12299,7 +12300,8 @@ async function generate(options) {
|
|
|
12299
12300
|
ssPrevious,
|
|
12300
12301
|
"down",
|
|
12301
12302
|
reverseRenamedTables,
|
|
12302
|
-
reverseRenamedColumns
|
|
12303
|
+
reverseRenamedColumns,
|
|
12304
|
+
connectorMigrationOpts
|
|
12303
12305
|
);
|
|
12304
12306
|
if (migrationUpTs === null) {
|
|
12305
12307
|
console.log(yellow3("No changes detected. Skipping migration creation."));
|
|
@@ -12311,14 +12313,14 @@ async function generate(options) {
|
|
|
12311
12313
|
(0, import_node_fs2.writeFileSync)((0, import_node_path4.resolve)(migrationDir, "up.ts"), migrationUpTs);
|
|
12312
12314
|
(0, import_node_fs2.writeFileSync)(
|
|
12313
12315
|
(0, import_node_path4.resolve)(migrationDir, "down.ts"),
|
|
12314
|
-
migrationDnTs ?? generateNoOpMigration()
|
|
12316
|
+
migrationDnTs ?? generateNoOpMigration(connectorMigrationOpts)
|
|
12315
12317
|
);
|
|
12316
12318
|
const migrationsRelativePath = (0, import_node_path4.relative)(process.cwd(), migrationsDir);
|
|
12317
12319
|
console.log(
|
|
12318
12320
|
`${bgGreen2.white.bold("[CREATED]")} ${cyan3(migrationName)} at ${cyan3(`${migrationsRelativePath}/`)}`
|
|
12319
12321
|
);
|
|
12320
12322
|
}
|
|
12321
|
-
function generateMigration(prev, curr, direction, renamedTables = {}, renamedColumns = {}) {
|
|
12323
|
+
function generateMigration(prev, curr, direction, renamedTables = {}, renamedColumns = {}, defaultOptions) {
|
|
12322
12324
|
const statements = [];
|
|
12323
12325
|
const renamedFromKeys = new Set(Object.keys(renamedTables));
|
|
12324
12326
|
const renamedToKeys = new Set(Object.values(renamedTables));
|
|
@@ -12351,7 +12353,7 @@ function generateMigration(prev, curr, direction, renamedTables = {}, renamedCol
|
|
|
12351
12353
|
for (const enumName in prev.enums) {
|
|
12352
12354
|
if (!(enumName in curr.enums)) {
|
|
12353
12355
|
const enm = prev.enums[enumName];
|
|
12354
|
-
statements.push(`ddl.
|
|
12356
|
+
statements.push(`ddl.dropType("${enm.schema}", "${enm.name}")`);
|
|
12355
12357
|
}
|
|
12356
12358
|
}
|
|
12357
12359
|
for (const enumName in curr.enums) {
|
|
@@ -12359,7 +12361,7 @@ function generateMigration(prev, curr, direction, renamedTables = {}, renamedCol
|
|
|
12359
12361
|
const enm = curr.enums[enumName];
|
|
12360
12362
|
const values = enm.values.map((v) => `"${v}"`).join(", ");
|
|
12361
12363
|
statements.push(
|
|
12362
|
-
`ddl.
|
|
12364
|
+
`ddl.createType("${enm.schema}", "${enm.name}", { asEnum: [${values}] })`
|
|
12363
12365
|
);
|
|
12364
12366
|
} else {
|
|
12365
12367
|
const prevValues = prev.enums[enumName].values;
|
|
@@ -12417,15 +12419,15 @@ function generateMigration(prev, curr, direction, renamedTables = {}, renamedCol
|
|
|
12417
12419
|
}
|
|
12418
12420
|
if (afterValue !== null) {
|
|
12419
12421
|
statements.push(
|
|
12420
|
-
`ddl.
|
|
12422
|
+
`ddl.alterType("${enm.schema}", "${enm.name}").addValue("${addedValue}", { after: "${afterValue}" })`
|
|
12421
12423
|
);
|
|
12422
12424
|
} else if (beforeValue !== null) {
|
|
12423
12425
|
statements.push(
|
|
12424
|
-
`ddl.
|
|
12426
|
+
`ddl.alterType("${enm.schema}", "${enm.name}").addValue("${addedValue}", { before: "${beforeValue}" })`
|
|
12425
12427
|
);
|
|
12426
12428
|
} else {
|
|
12427
12429
|
statements.push(
|
|
12428
|
-
`ddl.
|
|
12430
|
+
`ddl.alterType("${enm.schema}", "${enm.name}").addValue("${addedValue}")`
|
|
12429
12431
|
);
|
|
12430
12432
|
}
|
|
12431
12433
|
}
|
|
@@ -12524,25 +12526,25 @@ function generateMigration(prev, curr, direction, renamedTables = {}, renamedCol
|
|
|
12524
12526
|
if (statements.length === 0) return null;
|
|
12525
12527
|
return `import { type DDLStatement, ddl, type MigrationOptions } from "durcno/migration";
|
|
12526
12528
|
|
|
12527
|
-
export const options: MigrationOptions = {
|
|
12528
|
-
transaction: true,
|
|
12529
|
-
};
|
|
12529
|
+
export const options: MigrationOptions = ${stringifyMigrationOpts(defaultOptions ?? { transaction: true })};
|
|
12530
12530
|
|
|
12531
12531
|
export const statements: DDLStatement[] = [
|
|
12532
12532
|
${statements.join(",\n ")},
|
|
12533
12533
|
];
|
|
12534
12534
|
`;
|
|
12535
12535
|
}
|
|
12536
|
-
function generateNoOpMigration() {
|
|
12536
|
+
function generateNoOpMigration(defaultOptions) {
|
|
12537
12537
|
return `import { type DDLStatement, ddl, type MigrationOptions } from "durcno/migration";
|
|
12538
12538
|
|
|
12539
|
-
export const options: MigrationOptions = {
|
|
12540
|
-
transaction: true,
|
|
12541
|
-
};
|
|
12539
|
+
export const options: MigrationOptions = ${stringifyMigrationOpts(defaultOptions ?? { transaction: true })};
|
|
12542
12540
|
|
|
12543
12541
|
export const statements: DDLStatement[] = [];
|
|
12544
12542
|
`;
|
|
12545
12543
|
}
|
|
12544
|
+
function stringifyMigrationOpts(opts) {
|
|
12545
|
+
if (opts.transaction === void 0) opts.transaction = true;
|
|
12546
|
+
return JSON.stringify(opts, null, 2);
|
|
12547
|
+
}
|
|
12546
12548
|
function generateAlterTableStmts(prevTable, currTable, tableName, curr, statements, columnRenames) {
|
|
12547
12549
|
const alterStatements = [];
|
|
12548
12550
|
const renamedFromCols = new Set(
|
|
@@ -13060,23 +13062,22 @@ async function runUpMigration(migrationDirName, migrationsDir, client, config2)
|
|
|
13060
13062
|
const options = migrationModule.options ?? {};
|
|
13061
13063
|
const useTransaction = options.transaction ?? true;
|
|
13062
13064
|
const execution = options.execution ?? "joined";
|
|
13063
|
-
if (useTransaction) {
|
|
13064
|
-
await client.query("BEGIN;");
|
|
13065
|
-
}
|
|
13066
13065
|
try {
|
|
13067
13066
|
if (statements.length > 0) {
|
|
13068
13067
|
if (execution === "sequential") {
|
|
13068
|
+
if (useTransaction) await client.query("BEGIN;");
|
|
13069
13069
|
for (const st of statements) {
|
|
13070
13070
|
await client.query(st.toSQL());
|
|
13071
13071
|
}
|
|
13072
|
+
if (useTransaction) await client.query("COMMIT;");
|
|
13072
13073
|
} else {
|
|
13073
|
-
|
|
13074
|
+
let sql = "";
|
|
13075
|
+
if (useTransaction) sql += "BEGIN;\n";
|
|
13076
|
+
sql += statements.map((st) => st.toSQL()).join("\n");
|
|
13077
|
+
if (useTransaction) sql += "\nCOMMIT;";
|
|
13074
13078
|
await client.query(sql);
|
|
13075
13079
|
}
|
|
13076
13080
|
}
|
|
13077
|
-
if (useTransaction) {
|
|
13078
|
-
await client.query("COMMIT;");
|
|
13079
|
-
}
|
|
13080
13081
|
console.log(
|
|
13081
13082
|
bgGreen3.white.bold("[APPLIED]") + " " + green2(`Migration ${cyan5(migrationDirName)}`) + dim3(".")
|
|
13082
13083
|
);
|
|
@@ -13089,7 +13090,7 @@ async function runUpMigration(migrationDirName, migrationsDir, client, config2)
|
|
|
13089
13090
|
});
|
|
13090
13091
|
await db.close();
|
|
13091
13092
|
} catch (e) {
|
|
13092
|
-
if (useTransaction) {
|
|
13093
|
+
if (useTransaction && execution === "sequential") {
|
|
13093
13094
|
await client.query("ROLLBACK;");
|
|
13094
13095
|
}
|
|
13095
13096
|
throw e;
|
|
@@ -13472,6 +13473,8 @@ async function status(options) {
|
|
|
13472
13473
|
console.log(source_default.yellow("No migrations found."));
|
|
13473
13474
|
process.exit(0);
|
|
13474
13475
|
}
|
|
13476
|
+
connector.pool = { ...connector.pool, max: 1 };
|
|
13477
|
+
connector.logger = void 0;
|
|
13475
13478
|
const db = (0, import_durcno4.database)({ Migrations: import_durcno4.Migrations }, config2);
|
|
13476
13479
|
const migrationsQuery = db.from(import_durcno4.Migrations).select();
|
|
13477
13480
|
let migrations;
|
|
@@ -13505,7 +13508,7 @@ async function status(options) {
|
|
|
13505
13508
|
}
|
|
13506
13509
|
|
|
13507
13510
|
// src/cli/index.ts
|
|
13508
|
-
program.version("1.0.0-alpha.
|
|
13511
|
+
program.version("1.0.0-alpha.3");
|
|
13509
13512
|
var Options = {
|
|
13510
13513
|
config: ["--config <path>", "Path to the config file"]
|
|
13511
13514
|
};
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { Sql } from "../sql.mjs";
|
|
2
2
|
import { entityType } from "../symbols.mjs";
|
|
3
|
-
import { StdTableColumn, TableColumn } from "../table.mjs";
|
|
4
3
|
import { Arg } from "../query-builders/pre.mjs";
|
|
4
|
+
import { StdTableColumn, TableColumn } from "../table.mjs";
|
|
5
5
|
import * as z from "zod";
|
|
6
6
|
|
|
7
7
|
//#region src/columns/common.d.ts
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { DurcnoLogger } from "../logger.mjs";
|
|
2
2
|
import { Query } from "../query-builders/query.mjs";
|
|
3
|
+
import { MigrationOptions } from "../migration/index.mjs";
|
|
3
4
|
import { ConnectionOptions } from "node:tls";
|
|
4
5
|
|
|
5
6
|
//#region src/connectors/common.d.ts
|
|
@@ -55,6 +56,23 @@ type ConnectorOptions = {
|
|
|
55
56
|
* @abstract
|
|
56
57
|
*/
|
|
57
58
|
declare abstract class Connector {
|
|
59
|
+
/**
|
|
60
|
+
* Default migration options applied to generated migration files for this
|
|
61
|
+
* connector. When set, the `generate` CLI command will use these values as
|
|
62
|
+
* the `options` export in the produced `up.ts` / `down.ts` files instead of
|
|
63
|
+
* the built-in defaults.
|
|
64
|
+
*
|
|
65
|
+
* @example
|
|
66
|
+
* ```typescript
|
|
67
|
+
* class MyConnector extends Connector {
|
|
68
|
+
* static override migrationOptions: MigrationOptions = {
|
|
69
|
+
* transaction: false,
|
|
70
|
+
* execution: "sequential",
|
|
71
|
+
* };
|
|
72
|
+
* }
|
|
73
|
+
* ```
|
|
74
|
+
*/
|
|
75
|
+
static migrationOptions?: MigrationOptions;
|
|
58
76
|
/**
|
|
59
77
|
* The original options passed to the connector constructor.
|
|
60
78
|
* Provides full access to `dbCredentials`, `pool`, and `logger`.
|
|
@@ -207,4 +225,4 @@ declare abstract class $Pool extends $QueryExecutor {
|
|
|
207
225
|
*/
|
|
208
226
|
type QueryExecutor = $Client | $Pool;
|
|
209
227
|
//#endregion
|
|
210
|
-
export { $Pool, Connector, ConnectorOptions, QueryExecutor };
|
|
228
|
+
export { $Client, $Pool, Connector, ConnectorOptions, QueryExecutor };
|
|
@@ -30,6 +30,23 @@ function getUrlFromDbCredentials(dbCredentials) {
|
|
|
30
30
|
* @abstract
|
|
31
31
|
*/
|
|
32
32
|
var Connector = class {
|
|
33
|
+
/**
|
|
34
|
+
* Default migration options applied to generated migration files for this
|
|
35
|
+
* connector. When set, the `generate` CLI command will use these values as
|
|
36
|
+
* the `options` export in the produced `up.ts` / `down.ts` files instead of
|
|
37
|
+
* the built-in defaults.
|
|
38
|
+
*
|
|
39
|
+
* @example
|
|
40
|
+
* ```typescript
|
|
41
|
+
* class MyConnector extends Connector {
|
|
42
|
+
* static override migrationOptions: MigrationOptions = {
|
|
43
|
+
* transaction: false,
|
|
44
|
+
* execution: "sequential",
|
|
45
|
+
* };
|
|
46
|
+
* }
|
|
47
|
+
* ```
|
|
48
|
+
*/
|
|
49
|
+
static migrationOptions;
|
|
33
50
|
/**
|
|
34
51
|
* The original options passed to the connector constructor.
|
|
35
52
|
* Provides full access to `dbCredentials`, `pool`, and `logger`.
|
|
@@ -14,6 +14,14 @@ import { PGliteOptions } from "@electric-sql/pglite";
|
|
|
14
14
|
*/
|
|
15
15
|
declare class PgLiteConnector extends Connector {
|
|
16
16
|
#private;
|
|
17
|
+
/**
|
|
18
|
+
* PGlite does not support DDL statements inside transactions and requires
|
|
19
|
+
* sequential execution, so migrations are generated with these defaults.
|
|
20
|
+
*/
|
|
21
|
+
static migrationOptions: {
|
|
22
|
+
transaction: boolean;
|
|
23
|
+
execution: "sequential";
|
|
24
|
+
};
|
|
17
25
|
constructor(options: ConnectorOptions, driverOptions?: PGliteOptions);
|
|
18
26
|
getClient(): PgLiteClient;
|
|
19
27
|
getPool(): PgLitePool;
|
|
@@ -12,6 +12,14 @@ import { PGlite } from "@electric-sql/pglite";
|
|
|
12
12
|
* @see https://www.npmjs.com/package/@electric-sql/pglite
|
|
13
13
|
*/
|
|
14
14
|
var PgLiteConnector = class extends Connector {
|
|
15
|
+
/**
|
|
16
|
+
* PGlite does not support DDL statements inside transactions and requires
|
|
17
|
+
* sequential execution, so migrations are generated with these defaults.
|
|
18
|
+
*/
|
|
19
|
+
static migrationOptions = {
|
|
20
|
+
transaction: false,
|
|
21
|
+
execution: "sequential"
|
|
22
|
+
};
|
|
15
23
|
#driverOptions;
|
|
16
24
|
constructor(options, driverOptions) {
|
|
17
25
|
super(options);
|
package/dist/src/db.d.mts
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import { AnyColumn, AnyRelations, IsTableWC, Relations, TColsToLeftRight, TableWCorNever, TableWithColumns } from "./table.mjs";
|
|
2
1
|
import { SelectBuilder } from "./query-builders/select.mjs";
|
|
3
2
|
import { BuildFilterExpression } from "./filters/index.mjs";
|
|
4
3
|
import { Config } from "./index.mjs";
|
|
@@ -13,6 +12,7 @@ import { InsertReturningQuery } from "./query-builders/insert-returning.mjs";
|
|
|
13
12
|
import { RawQuery } from "./query-builders/raw.mjs";
|
|
14
13
|
import { RelationQueryBuilder } from "./query-builders/rq.mjs";
|
|
15
14
|
import { UpdateBuilder } from "./query-builders/update.mjs";
|
|
15
|
+
import { AnyColumn, AnyRelations, IsTableWC, Relations, TColsToLeftRight, TableWCorNever, TableWithColumns } from "./table.mjs";
|
|
16
16
|
import { $Pool, QueryExecutor } from "./connectors/common.mjs";
|
|
17
17
|
|
|
18
18
|
//#region src/db.d.ts
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import { AnyColumn, TableColumn } from "../table.mjs";
|
|
2
|
-
import { Filter } from "./custom.mjs";
|
|
3
1
|
import { Query } from "../query-builders/query.mjs";
|
|
2
|
+
import { Filter } from "./custom.mjs";
|
|
3
|
+
import { AnyColumn, TableColumn } from "../table.mjs";
|
|
4
4
|
|
|
5
5
|
//#region src/filters/array.d.ts
|
|
6
6
|
/** Shorthand for the Filter's TableColumn constraint. */
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { AnyColumn, TableColumn } from "../table.mjs";
|
|
2
1
|
import { Query } from "../query-builders/query.mjs";
|
|
2
|
+
import { AnyColumn, TableColumn } from "../table.mjs";
|
|
3
3
|
|
|
4
4
|
//#region src/filters/custom.d.ts
|
|
5
5
|
/** Abstract base class for SQL filter expressions used in `WHERE`/`ON` clauses. */
|
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { AnyColumn, AnyTableWithColumns, TColsToLeftRight, TableColumn, TableColumnArgs } from "../table.mjs";
|
|
1
|
+
import { Query } from "../query-builders/query.mjs";
|
|
3
2
|
import { SelectQuery } from "../query-builders/select.mjs";
|
|
3
|
+
import { Sql } from "../sql.mjs";
|
|
4
4
|
import { Filter } from "./custom.mjs";
|
|
5
5
|
import { Arg } from "../query-builders/pre.mjs";
|
|
6
|
-
import {
|
|
6
|
+
import { AnyColumn, AnyTableWithColumns, TColsToLeftRight, TableColumn, TableColumnArgs } from "../table.mjs";
|
|
7
7
|
|
|
8
8
|
//#region src/filters/index.d.ts
|
|
9
9
|
type ConditionExpression<Left extends TableColumnArgs, Right extends Record<string, TableColumnArgs>, TArg extends boolean = false> = EqualValCondition<Left[0], Left[1], Left[2], Left[3], Left[3]["ValType"]> | (TArg extends true ? EqualValCondition<Left[0], Left[1], Left[2], Left[3], Arg<Left[3]["ValType"]>> : never) | { [RightKey in keyof Right]: EqualColCondition<Left[0], Left[1], Left[2], Left[3], Right[RightKey][0], Right[RightKey][1], Right[RightKey][2], Right[RightKey][3]> }[keyof Right] | GreaterEqualValCondition<Left[0], Left[1], Left[2], Left[3], Left[3]["ValType"] | (TArg extends true ? Arg<Left[3]["ValType"]> : never)> | { [RightKey in keyof Right]: GreaterEqualColCondition<Left[0], Left[1], Left[2], Left[3], Right[RightKey][0], Right[RightKey][1], Right[RightKey][2], Right[RightKey][3]> }[keyof Right] | GreaterThanValCondition<Left[0], Left[1], Left[2], Left[3], Left[3]["ValType"] | (TArg extends true ? Arg<Left[3]["ValType"]> : never)> | { [RightKey in keyof Right]: GreaterThanColCondition<Left[0], Left[1], Left[2], Left[3], Right[RightKey][0], Right[RightKey][1], Right[RightKey][2], Right[RightKey][3]> }[keyof Right] | LessThanValCondition<Left[0], Left[1], Left[2], Left[3], Left[3]["ValType"] | (TArg extends true ? Arg<Left[3]["ValType"]> : never)> | { [RightKey in keyof Right]: LessThanColCondition<Left[0], Left[1], Left[2], Left[3], Right[RightKey][0], Right[RightKey][1], Right[RightKey][2], Right[RightKey][3]> }[keyof Right] | LessEqualValCondition<Left[0], Left[1], Left[2], Left[3], Left[3]["ValType"] | (TArg extends true ? Arg<Left[3]["ValType"]> : never)> | { [RightKey in keyof Right]: LessEqualColCondition<Left[0], Left[1], Left[2], Left[3], Right[RightKey][0], Right[RightKey][1], Right[RightKey][2], Right[RightKey][3]> }[keyof Right] | IsNullCondition<Left[0], Left[1], Left[2], Left[3]> | InCondition<Left[0], Left[1], Left[2], Left[3], TArg>;
|
package/dist/src/index.d.mts
CHANGED
|
@@ -1,11 +1,7 @@
|
|
|
1
1
|
import { DurcnoLogger } from "./logger.mjs";
|
|
2
|
-
import {
|
|
3
|
-
import { Column, ColumnConfig, notNull, primaryKey, unique } from "./columns/common.mjs";
|
|
4
|
-
import { PrimaryKeyConstraint, primaryKeyConstraint } from "./constraints/primary-key.mjs";
|
|
5
|
-
import { UniqueConstraint, uniqueConstraint } from "./constraints/unique.mjs";
|
|
6
|
-
import { index, uniqueIndex } from "./indexes.mjs";
|
|
7
|
-
import { AnyColumn, AnyTableColumn, Table, TableColumn, fk, many, one, relations, table } from "./table.mjs";
|
|
2
|
+
import { Query } from "./query-builders/query.mjs";
|
|
8
3
|
import { asc, desc } from "./query-builders/orderby-clause.mjs";
|
|
4
|
+
import { Sql, sql } from "./sql.mjs";
|
|
9
5
|
import { Filter } from "./filters/custom.mjs";
|
|
10
6
|
import { and, eq, gt, gte, isIn, isNotNull, isNull, lt, lte, ne, or } from "./filters/index.mjs";
|
|
11
7
|
import { Sequence, SequenceOptions, sequence } from "./sequence.mjs";
|
|
@@ -33,13 +29,17 @@ import { time } from "./columns/time.mjs";
|
|
|
33
29
|
import { timestamp } from "./columns/timestamp.mjs";
|
|
34
30
|
import { UuidVersion, uuid } from "./columns/uuid.mjs";
|
|
35
31
|
import { varchar } from "./columns/varchar.mjs";
|
|
32
|
+
import { PrimaryKeyConstraint, primaryKeyConstraint } from "./constraints/primary-key.mjs";
|
|
33
|
+
import { UniqueConstraint, uniqueConstraint } from "./constraints/unique.mjs";
|
|
36
34
|
import { arrayAll, arrayContainedBy, arrayContains, arrayHas, arrayOverlaps } from "./filters/array.mjs";
|
|
37
35
|
import { now, uuidv4, uuidv7 } from "./functions/index.mjs";
|
|
36
|
+
import { index, uniqueIndex } from "./indexes.mjs";
|
|
38
37
|
import { Migrations, pk } from "./models.mjs";
|
|
39
38
|
import { database } from "./db.mjs";
|
|
40
39
|
import { Arg, prequery } from "./query-builders/pre.mjs";
|
|
41
|
-
import {
|
|
42
|
-
import {
|
|
40
|
+
import { Column, ColumnConfig, notNull, primaryKey, unique } from "./columns/common.mjs";
|
|
41
|
+
import { AnyColumn, AnyTableColumn, Table, TableColumn, fk, many, one, relations, table } from "./table.mjs";
|
|
42
|
+
import { $Client, Connector, ConnectorOptions } from "./connectors/common.mjs";
|
|
43
43
|
|
|
44
44
|
//#region src/index.d.ts
|
|
45
45
|
declare const $: {
|
|
@@ -101,4 +101,4 @@ declare global {
|
|
|
101
101
|
type Prettify<T> = { [K in keyof T]: T[K] } & {};
|
|
102
102
|
}
|
|
103
103
|
//#endregion
|
|
104
|
-
export { $, type AnyColumn, type AnyTableColumn, Arg, Column, type ColumnConfig, Config, type ConnectorOptions, type DurcnoLogger, Filter, Migrations, PrimaryKeyConstraint, Query, Sql, type TableColumn, UniqueConstraint, type UuidVersion, and, arrayAll, arrayContainedBy, arrayContains, arrayHas, arrayOverlaps, asc, bigint, bigserial, boolean, bytea, char, cidr, database, date, defineConfig, desc, enumed, enumtype, eq, fk, geography, gt, gte, index, inet, integer, isIn, isNotNull, isNull, json, jsonb, lt, lte, macaddr, many, ne, notNull, now, numeric, one, or, pk, prequery, primaryKey, primaryKeyConstraint, relations, sequence, serial, smallint, smallserial, sql, table, text, time, timestamp, unique, uniqueConstraint, uniqueIndex, uuid, uuidv4, uuidv7, varchar };
|
|
104
|
+
export { $, type $Client, type AnyColumn, type AnyTableColumn, Arg, Column, type ColumnConfig, Config, type ConnectorOptions, type DurcnoLogger, Filter, Migrations, PrimaryKeyConstraint, Query, Sql, type TableColumn, UniqueConstraint, type UuidVersion, and, arrayAll, arrayContainedBy, arrayContains, arrayHas, arrayOverlaps, asc, bigint, bigserial, boolean, bytea, char, cidr, database, date, defineConfig, desc, enumed, enumtype, eq, fk, geography, gt, gte, index, inet, integer, isIn, isNotNull, isNull, json, jsonb, lt, lte, macaddr, many, ne, notNull, now, numeric, one, or, pk, prequery, primaryKey, primaryKeyConstraint, relations, sequence, serial, smallint, smallserial, sql, table, text, time, timestamp, unique, uniqueConstraint, uniqueIndex, uuid, uuidv4, uuidv7, varchar };
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
import { Snapshot } from "../snapshot.mjs";
|
|
2
|
+
import { DDLStatement } from "./statement.mjs";
|
|
3
|
+
|
|
4
|
+
//#region src/migration/ddl/enum.d.ts
|
|
5
|
+
/**
|
|
6
|
+
* DDL statement that creates a new PostgreSQL enum type.
|
|
7
|
+
*
|
|
8
|
+
* Generates: `CREATE TYPE "schema"."name" AS ENUM('val1', 'val2', ...);`
|
|
9
|
+
*
|
|
10
|
+
* @deprecated Use {@link CreateTypeStatement} via `ddl.createType()` instead.
|
|
11
|
+
*
|
|
12
|
+
* @example
|
|
13
|
+
* ```typescript
|
|
14
|
+
* ddl.createEnum('public', 'user_type', ['admin', 'user', 'guest']);
|
|
15
|
+
* // CREATE TYPE "public"."user_type" AS ENUM('admin', 'user', 'guest');
|
|
16
|
+
* ```
|
|
17
|
+
*/
|
|
18
|
+
declare class CreateEnumStatement extends DDLStatement {
|
|
19
|
+
private readonly schema;
|
|
20
|
+
private readonly name;
|
|
21
|
+
private readonly values;
|
|
22
|
+
readonly type: "createEnum";
|
|
23
|
+
/**
|
|
24
|
+
* @param schema - The schema the enum belongs to.
|
|
25
|
+
* @param name - The name of the enum type to create.
|
|
26
|
+
* @param values - Ordered list of allowed enum values.
|
|
27
|
+
*/
|
|
28
|
+
constructor(schema: string, name: string, values: string[]);
|
|
29
|
+
toSQL(): string;
|
|
30
|
+
applyToSnapshot(snapshot: Snapshot): void;
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* DDL statement that adds a value to an existing PostgreSQL enum type.
|
|
34
|
+
*
|
|
35
|
+
* Generates: `ALTER TYPE "schema"."name" ADD VALUE IF NOT EXISTS 'value' [AFTER|BEFORE 'ref'];`
|
|
36
|
+
*
|
|
37
|
+
* @remarks
|
|
38
|
+
* PostgreSQL does not support removing values from an enum.
|
|
39
|
+
* Values can only be added, optionally positioned relative to an existing value.
|
|
40
|
+
*
|
|
41
|
+
* @deprecated Use {@link AlterTypeBuilder} via `ddl.alterType()` instead.
|
|
42
|
+
*
|
|
43
|
+
* @example
|
|
44
|
+
* ```typescript
|
|
45
|
+
* // Append a value
|
|
46
|
+
* ddl.alterEnumAddValue('public', 'user_type', 'moderator');
|
|
47
|
+
*
|
|
48
|
+
* // Insert after a specific value
|
|
49
|
+
* ddl.alterEnumAddValue('public', 'user_type', 'moderator', { after: 'admin' });
|
|
50
|
+
*
|
|
51
|
+
* // Insert before a specific value
|
|
52
|
+
* ddl.alterEnumAddValue('public', 'user_type', 'moderator', { before: 'user' });
|
|
53
|
+
* ```
|
|
54
|
+
*/
|
|
55
|
+
declare class AlterEnumAddValueStatement extends DDLStatement {
|
|
56
|
+
private readonly schema;
|
|
57
|
+
private readonly name;
|
|
58
|
+
private readonly value;
|
|
59
|
+
private readonly position?;
|
|
60
|
+
readonly type: "alterEnum";
|
|
61
|
+
/**
|
|
62
|
+
* @param schema - The schema the enum belongs to.
|
|
63
|
+
* @param name - The enum type name.
|
|
64
|
+
* @param value - The new value to add.
|
|
65
|
+
* @param position - Optional positioning: `{ after: 'val' }` or `{ before: 'val' }`.
|
|
66
|
+
*/
|
|
67
|
+
constructor(schema: string, name: string, value: string, position?: {
|
|
68
|
+
after?: string;
|
|
69
|
+
before?: string;
|
|
70
|
+
} | undefined);
|
|
71
|
+
toSQL(): string;
|
|
72
|
+
applyToSnapshot(snapshot: Snapshot): void;
|
|
73
|
+
}
|
|
74
|
+
/**
|
|
75
|
+
* DDL statement that drops an existing PostgreSQL enum type.
|
|
76
|
+
*
|
|
77
|
+
* Generates: `DROP TYPE "schema"."name";`
|
|
78
|
+
*
|
|
79
|
+
* @deprecated Use {@link DropTypeStatement} via `ddl.dropType()` instead.
|
|
80
|
+
*
|
|
81
|
+
* @example
|
|
82
|
+
* ```typescript
|
|
83
|
+
* ddl.dropEnum('public', 'user_type');
|
|
84
|
+
* // DROP TYPE "public"."user_type";
|
|
85
|
+
* ```
|
|
86
|
+
*/
|
|
87
|
+
declare class DropEnumStatement extends DDLStatement {
|
|
88
|
+
private readonly schema;
|
|
89
|
+
private readonly name;
|
|
90
|
+
readonly type: "dropEnum";
|
|
91
|
+
/**
|
|
92
|
+
* @param schema - The schema the enum belongs to.
|
|
93
|
+
* @param name - The enum type name to drop.
|
|
94
|
+
*/
|
|
95
|
+
constructor(schema: string, name: string);
|
|
96
|
+
toSQL(): string;
|
|
97
|
+
applyToSnapshot(snapshot: Snapshot): void;
|
|
98
|
+
}
|
|
99
|
+
//#endregion
|
|
100
|
+
export { AlterEnumAddValueStatement, CreateEnumStatement, DropEnumStatement };
|
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
import { DDLStatement } from "./statement.mjs";
|
|
2
|
+
//#region src/migration/ddl/enum.ts
|
|
3
|
+
/**
|
|
4
|
+
* DDL statement that creates a new PostgreSQL enum type.
|
|
5
|
+
*
|
|
6
|
+
* Generates: `CREATE TYPE "schema"."name" AS ENUM('val1', 'val2', ...);`
|
|
7
|
+
*
|
|
8
|
+
* @deprecated Use {@link CreateTypeStatement} via `ddl.createType()` instead.
|
|
9
|
+
*
|
|
10
|
+
* @example
|
|
11
|
+
* ```typescript
|
|
12
|
+
* ddl.createEnum('public', 'user_type', ['admin', 'user', 'guest']);
|
|
13
|
+
* // CREATE TYPE "public"."user_type" AS ENUM('admin', 'user', 'guest');
|
|
14
|
+
* ```
|
|
15
|
+
*/
|
|
16
|
+
var CreateEnumStatement = class extends DDLStatement {
|
|
17
|
+
type = "createEnum";
|
|
18
|
+
/**
|
|
19
|
+
* @param schema - The schema the enum belongs to.
|
|
20
|
+
* @param name - The name of the enum type to create.
|
|
21
|
+
* @param values - Ordered list of allowed enum values.
|
|
22
|
+
*/
|
|
23
|
+
constructor(schema, name, values) {
|
|
24
|
+
super();
|
|
25
|
+
this.schema = schema;
|
|
26
|
+
this.name = name;
|
|
27
|
+
this.values = values;
|
|
28
|
+
}
|
|
29
|
+
toSQL() {
|
|
30
|
+
return `CREATE TYPE ${`"${this.schema}"."${this.name}"`} AS ENUM(${this.values.map((v) => `'${v}'`).join(", ")});`;
|
|
31
|
+
}
|
|
32
|
+
applyToSnapshot(snapshot) {
|
|
33
|
+
const key = `${this.schema}.${this.name}`;
|
|
34
|
+
snapshot.enums[key] = {
|
|
35
|
+
schema: this.schema,
|
|
36
|
+
name: this.name,
|
|
37
|
+
values: [...this.values]
|
|
38
|
+
};
|
|
39
|
+
}
|
|
40
|
+
};
|
|
41
|
+
/**
|
|
42
|
+
* DDL statement that adds a value to an existing PostgreSQL enum type.
|
|
43
|
+
*
|
|
44
|
+
* Generates: `ALTER TYPE "schema"."name" ADD VALUE IF NOT EXISTS 'value' [AFTER|BEFORE 'ref'];`
|
|
45
|
+
*
|
|
46
|
+
* @remarks
|
|
47
|
+
* PostgreSQL does not support removing values from an enum.
|
|
48
|
+
* Values can only be added, optionally positioned relative to an existing value.
|
|
49
|
+
*
|
|
50
|
+
* @deprecated Use {@link AlterTypeBuilder} via `ddl.alterType()` instead.
|
|
51
|
+
*
|
|
52
|
+
* @example
|
|
53
|
+
* ```typescript
|
|
54
|
+
* // Append a value
|
|
55
|
+
* ddl.alterEnumAddValue('public', 'user_type', 'moderator');
|
|
56
|
+
*
|
|
57
|
+
* // Insert after a specific value
|
|
58
|
+
* ddl.alterEnumAddValue('public', 'user_type', 'moderator', { after: 'admin' });
|
|
59
|
+
*
|
|
60
|
+
* // Insert before a specific value
|
|
61
|
+
* ddl.alterEnumAddValue('public', 'user_type', 'moderator', { before: 'user' });
|
|
62
|
+
* ```
|
|
63
|
+
*/
|
|
64
|
+
var AlterEnumAddValueStatement = class extends DDLStatement {
|
|
65
|
+
type = "alterEnum";
|
|
66
|
+
/**
|
|
67
|
+
* @param schema - The schema the enum belongs to.
|
|
68
|
+
* @param name - The enum type name.
|
|
69
|
+
* @param value - The new value to add.
|
|
70
|
+
* @param position - Optional positioning: `{ after: 'val' }` or `{ before: 'val' }`.
|
|
71
|
+
*/
|
|
72
|
+
constructor(schema, name, value, position) {
|
|
73
|
+
super();
|
|
74
|
+
this.schema = schema;
|
|
75
|
+
this.name = name;
|
|
76
|
+
this.value = value;
|
|
77
|
+
this.position = position;
|
|
78
|
+
}
|
|
79
|
+
toSQL() {
|
|
80
|
+
let sql = `ALTER TYPE ${`"${this.schema}"."${this.name}"`} ADD VALUE IF NOT EXISTS '${this.value}'`;
|
|
81
|
+
if (this.position?.after) sql += ` AFTER '${this.position.after}'`;
|
|
82
|
+
else if (this.position?.before) sql += ` BEFORE '${this.position.before}'`;
|
|
83
|
+
return `${sql};`;
|
|
84
|
+
}
|
|
85
|
+
applyToSnapshot(snapshot) {
|
|
86
|
+
const key = `${this.schema}.${this.name}`;
|
|
87
|
+
const enm = snapshot.enums[key];
|
|
88
|
+
if (!enm) return;
|
|
89
|
+
if (this.position?.after) {
|
|
90
|
+
const afterIdx = enm.values.indexOf(this.position.after);
|
|
91
|
+
if (afterIdx !== -1) {
|
|
92
|
+
enm.values.splice(afterIdx + 1, 0, this.value);
|
|
93
|
+
return;
|
|
94
|
+
}
|
|
95
|
+
} else if (this.position?.before) {
|
|
96
|
+
const beforeIdx = enm.values.indexOf(this.position.before);
|
|
97
|
+
if (beforeIdx !== -1) {
|
|
98
|
+
enm.values.splice(beforeIdx, 0, this.value);
|
|
99
|
+
return;
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
enm.values.push(this.value);
|
|
103
|
+
}
|
|
104
|
+
};
|
|
105
|
+
/**
|
|
106
|
+
* DDL statement that drops an existing PostgreSQL enum type.
|
|
107
|
+
*
|
|
108
|
+
* Generates: `DROP TYPE "schema"."name";`
|
|
109
|
+
*
|
|
110
|
+
* @deprecated Use {@link DropTypeStatement} via `ddl.dropType()` instead.
|
|
111
|
+
*
|
|
112
|
+
* @example
|
|
113
|
+
* ```typescript
|
|
114
|
+
* ddl.dropEnum('public', 'user_type');
|
|
115
|
+
* // DROP TYPE "public"."user_type";
|
|
116
|
+
* ```
|
|
117
|
+
*/
|
|
118
|
+
var DropEnumStatement = class extends DDLStatement {
|
|
119
|
+
type = "dropEnum";
|
|
120
|
+
/**
|
|
121
|
+
* @param schema - The schema the enum belongs to.
|
|
122
|
+
* @param name - The enum type name to drop.
|
|
123
|
+
*/
|
|
124
|
+
constructor(schema, name) {
|
|
125
|
+
super();
|
|
126
|
+
this.schema = schema;
|
|
127
|
+
this.name = name;
|
|
128
|
+
}
|
|
129
|
+
toSQL() {
|
|
130
|
+
return `DROP TYPE ${`"${this.schema}"."${this.name}"`};`;
|
|
131
|
+
}
|
|
132
|
+
applyToSnapshot(snapshot) {
|
|
133
|
+
const key = `${this.schema}.${this.name}`;
|
|
134
|
+
delete snapshot.enums[key];
|
|
135
|
+
}
|
|
136
|
+
};
|
|
137
|
+
//#endregion
|
|
138
|
+
export { AlterEnumAddValueStatement, CreateEnumStatement, DropEnumStatement };
|