@prisma-next/target-postgres 0.4.1 → 0.4.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/codec-ids-CojIXVf9.mjs +29 -0
- package/dist/codec-ids-CojIXVf9.mjs.map +1 -0
- package/dist/codec-ids.d.mts +28 -0
- package/dist/codec-ids.d.mts.map +1 -0
- package/dist/codec-ids.mjs +3 -0
- package/dist/codec-types.d.mts +42 -0
- package/dist/codec-types.d.mts.map +1 -0
- package/dist/codec-types.mjs +3 -0
- package/dist/codecs-CE5EUsNM.d.mts +323 -0
- package/dist/codecs-CE5EUsNM.d.mts.map +1 -0
- package/dist/codecs-dzZ_dMpK.mjs +290 -0
- package/dist/codecs-dzZ_dMpK.mjs.map +1 -0
- package/dist/codecs.d.mts +2 -0
- package/dist/codecs.mjs +3 -0
- package/dist/control.d.mts +1 -1
- package/dist/control.mjs +24 -1989
- package/dist/control.mjs.map +1 -1
- package/dist/data-transform-C83dy0vk.mjs +41 -0
- package/dist/data-transform-C83dy0vk.mjs.map +1 -0
- package/dist/data-transform-D8x5m1YV.d.mts +38 -0
- package/dist/data-transform-D8x5m1YV.d.mts.map +1 -0
- package/dist/data-transform.d.mts +2 -0
- package/dist/data-transform.mjs +3 -0
- package/dist/default-normalizer-DNOpRoOF.mjs +131 -0
- package/dist/default-normalizer-DNOpRoOF.mjs.map +1 -0
- package/dist/default-normalizer.d.mts +19 -0
- package/dist/default-normalizer.d.mts.map +1 -0
- package/dist/default-normalizer.mjs +3 -0
- package/dist/{descriptor-meta-DkvCmY98.mjs → descriptor-meta-BVoVtyp-.mjs} +1 -1
- package/dist/{descriptor-meta-DkvCmY98.mjs.map → descriptor-meta-BVoVtyp-.mjs.map} +1 -1
- package/dist/errors-AFvEPZ1R.mjs +34 -0
- package/dist/errors-AFvEPZ1R.mjs.map +1 -0
- package/dist/errors.d.mts +27 -0
- package/dist/errors.d.mts.map +1 -0
- package/dist/errors.mjs +3 -0
- package/dist/issue-planner-CFjB0_oO.mjs +879 -0
- package/dist/issue-planner-CFjB0_oO.mjs.map +1 -0
- package/dist/issue-planner.d.mts +85 -0
- package/dist/issue-planner.d.mts.map +1 -0
- package/dist/issue-planner.mjs +3 -0
- package/dist/migration.d.mts +5 -79
- package/dist/migration.d.mts.map +1 -1
- package/dist/migration.mjs +6 -428
- package/dist/migration.mjs.map +1 -1
- package/dist/native-type-normalizer-CInai_oY.mjs +38 -0
- package/dist/native-type-normalizer-CInai_oY.mjs.map +1 -0
- package/dist/native-type-normalizer.d.mts +18 -0
- package/dist/native-type-normalizer.d.mts.map +1 -0
- package/dist/native-type-normalizer.mjs +3 -0
- package/dist/op-factory-call-BKlruaiC.mjs +605 -0
- package/dist/op-factory-call-BKlruaiC.mjs.map +1 -0
- package/dist/op-factory-call-C3bWXKSP.d.mts +304 -0
- package/dist/op-factory-call-C3bWXKSP.d.mts.map +1 -0
- package/dist/op-factory-call.d.mts +3 -0
- package/dist/op-factory-call.mjs +3 -0
- package/dist/pack.d.mts +1 -1
- package/dist/pack.mjs +1 -1
- package/dist/planner-B4ZSLHRI.mjs +98 -0
- package/dist/planner-B4ZSLHRI.mjs.map +1 -0
- package/dist/planner-ddl-builders-Dxvw1LHw.mjs +132 -0
- package/dist/planner-ddl-builders-Dxvw1LHw.mjs.map +1 -0
- package/dist/planner-ddl-builders.d.mts +22 -0
- package/dist/planner-ddl-builders.d.mts.map +1 -0
- package/dist/planner-ddl-builders.mjs +3 -0
- package/dist/planner-identity-values-Dju-o5GF.mjs +91 -0
- package/dist/planner-identity-values-Dju-o5GF.mjs.map +1 -0
- package/dist/planner-identity-values.d.mts +20 -0
- package/dist/planner-identity-values.d.mts.map +1 -0
- package/dist/planner-identity-values.mjs +3 -0
- package/dist/planner-produced-postgres-migration-C0GNhHGw.mjs +32 -0
- package/dist/planner-produced-postgres-migration-C0GNhHGw.mjs.map +1 -0
- package/dist/planner-produced-postgres-migration-Dw_mPMKt.d.mts +20 -0
- package/dist/planner-produced-postgres-migration-Dw_mPMKt.d.mts.map +1 -0
- package/dist/planner-produced-postgres-migration.d.mts +5 -0
- package/dist/planner-produced-postgres-migration.mjs +3 -0
- package/dist/planner-schema-lookup-B7lkypwn.mjs +29 -0
- package/dist/planner-schema-lookup-B7lkypwn.mjs.map +1 -0
- package/dist/planner-schema-lookup.d.mts +22 -0
- package/dist/planner-schema-lookup.d.mts.map +1 -0
- package/dist/planner-schema-lookup.mjs +3 -0
- package/dist/planner-sql-checks-7jkgm9TX.mjs +241 -0
- package/dist/planner-sql-checks-7jkgm9TX.mjs.map +1 -0
- package/dist/planner-sql-checks.d.mts +55 -0
- package/dist/planner-sql-checks.d.mts.map +1 -0
- package/dist/planner-sql-checks.mjs +3 -0
- package/dist/{planner-target-details-MXb3oeul.d.mts → planner-target-details-DH-azLu-.d.mts} +1 -1
- package/dist/{planner-target-details-MXb3oeul.d.mts.map → planner-target-details-DH-azLu-.d.mts.map} +1 -1
- package/dist/planner-target-details.d.mts +2 -0
- package/dist/planner-target-details.mjs +1 -0
- package/dist/planner.d.mts +74 -0
- package/dist/planner.d.mts.map +1 -0
- package/dist/planner.mjs +4 -0
- package/dist/postgres-migration-DcfWGqhe.d.mts +50 -0
- package/dist/postgres-migration-DcfWGqhe.d.mts.map +1 -0
- package/dist/postgres-migration-EGSlO4jO.mjs +52 -0
- package/dist/postgres-migration-EGSlO4jO.mjs.map +1 -0
- package/dist/render-ops-D6_DHdOK.mjs +8 -0
- package/dist/render-ops-D6_DHdOK.mjs.map +1 -0
- package/dist/render-ops.d.mts +11 -0
- package/dist/render-ops.d.mts.map +1 -0
- package/dist/render-ops.mjs +3 -0
- package/dist/render-typescript-Co3Emwgz.mjs +84 -0
- package/dist/render-typescript-Co3Emwgz.mjs.map +1 -0
- package/dist/render-typescript.d.mts +14 -0
- package/dist/render-typescript.d.mts.map +1 -0
- package/dist/render-typescript.mjs +3 -0
- package/dist/runtime.d.mts +15 -3
- package/dist/runtime.d.mts.map +1 -1
- package/dist/runtime.mjs +10 -1
- package/dist/runtime.mjs.map +1 -1
- package/dist/shared-Bxkt8pNO.d.mts +41 -0
- package/dist/shared-Bxkt8pNO.d.mts.map +1 -0
- package/dist/sql-utils-r-Lw535w.mjs +76 -0
- package/dist/sql-utils-r-Lw535w.mjs.map +1 -0
- package/dist/sql-utils.d.mts +59 -0
- package/dist/sql-utils.d.mts.map +1 -0
- package/dist/sql-utils.mjs +3 -0
- package/dist/statement-builders-CHqCtSfe.mjs +121 -0
- package/dist/statement-builders-CHqCtSfe.mjs.map +1 -0
- package/dist/statement-builders.d.mts +30 -0
- package/dist/statement-builders.d.mts.map +1 -0
- package/dist/statement-builders.mjs +3 -0
- package/dist/tables-BmdW_FWO.mjs +477 -0
- package/dist/tables-BmdW_FWO.mjs.map +1 -0
- package/dist/types-ClK03Ojd.d.mts +10 -0
- package/dist/types-ClK03Ojd.d.mts.map +1 -0
- package/dist/types.d.mts +2 -0
- package/dist/types.mjs +1 -0
- package/package.json +40 -20
- package/src/core/codec-ids.ts +30 -0
- package/src/core/codecs.ts +622 -0
- package/src/core/default-normalizer.ts +131 -0
- package/src/core/descriptor-meta.ts +1 -1
- package/src/core/errors.ts +33 -0
- package/src/core/migrations/op-factory-call.ts +1 -5
- package/src/core/migrations/operations/columns.ts +1 -1
- package/src/core/migrations/operations/constraints.ts +1 -1
- package/src/core/migrations/operations/data-transform.ts +35 -21
- package/src/core/migrations/operations/dependencies.ts +1 -1
- package/src/core/migrations/operations/enums.ts +1 -1
- package/src/core/migrations/operations/indexes.ts +1 -1
- package/src/core/migrations/operations/shared.ts +1 -1
- package/src/core/migrations/operations/tables.ts +1 -1
- package/src/core/migrations/planner-ddl-builders.ts +1 -1
- package/src/core/migrations/planner-produced-postgres-migration.ts +0 -1
- package/src/core/migrations/planner-recipes.ts +1 -1
- package/src/core/migrations/planner-sql-checks.ts +1 -1
- package/src/core/migrations/planner.ts +19 -15
- package/src/core/migrations/postgres-migration.ts +54 -1
- package/src/core/migrations/render-typescript.ts +23 -17
- package/src/core/migrations/runner.ts +47 -13
- package/src/core/migrations/statement-builders.ts +22 -6
- package/src/core/native-type-normalizer.ts +49 -0
- package/src/core/sql-utils.ts +104 -0
- package/src/exports/codec-ids.ts +1 -0
- package/src/exports/codec-types.ts +51 -0
- package/src/exports/codecs.ts +2 -0
- package/src/exports/data-transform.ts +1 -0
- package/src/exports/default-normalizer.ts +1 -0
- package/src/exports/errors.ts +1 -0
- package/src/exports/issue-planner.ts +1 -0
- package/src/exports/migration.ts +6 -0
- package/src/exports/native-type-normalizer.ts +1 -0
- package/src/exports/op-factory-call.ts +25 -0
- package/src/exports/planner-ddl-builders.ts +8 -0
- package/src/exports/planner-identity-values.ts +1 -0
- package/src/exports/planner-produced-postgres-migration.ts +1 -0
- package/src/exports/planner-schema-lookup.ts +6 -0
- package/src/exports/planner-sql-checks.ts +11 -0
- package/src/exports/planner-target-details.ts +1 -0
- package/src/exports/planner.ts +1 -0
- package/src/exports/render-ops.ts +1 -0
- package/src/exports/render-typescript.ts +1 -0
- package/src/exports/runtime.ts +19 -4
- package/src/exports/sql-utils.ts +7 -0
- package/src/exports/statement-builders.ts +7 -0
- package/src/exports/types.ts +1 -0
- package/dist/postgres-migration-BsHJHV9O.mjs +0 -2793
- package/dist/postgres-migration-BsHJHV9O.mjs.map +0 -1
package/dist/control.mjs
CHANGED
|
@@ -1,1990 +1,16 @@
|
|
|
1
|
-
import { t as postgresTargetDescriptorMeta } from "./descriptor-meta-
|
|
2
|
-
import {
|
|
3
|
-
import {
|
|
1
|
+
import { t as postgresTargetDescriptorMeta } from "./descriptor-meta-BVoVtyp-.mjs";
|
|
2
|
+
import { t as parsePostgresDefault } from "./default-normalizer-DNOpRoOF.mjs";
|
|
3
|
+
import { t as normalizeSchemaNativeType } from "./native-type-normalizer-CInai_oY.mjs";
|
|
4
|
+
import { o as renderDefaultLiteral } from "./planner-ddl-builders-Dxvw1LHw.mjs";
|
|
5
|
+
import "./issue-planner-CFjB0_oO.mjs";
|
|
6
|
+
import { t as createPostgresMigrationPlanner } from "./planner-B4ZSLHRI.mjs";
|
|
7
|
+
import { a as ensurePrismaContractSchemaStatement, i as ensureMarkerTableStatement, n as buildMergeMarkerStatements, r as ensureLedgerTableStatement, t as buildLedgerInsertStatement } from "./statement-builders-CHqCtSfe.mjs";
|
|
4
8
|
import { ifDefined } from "@prisma-next/utils/defined";
|
|
9
|
+
import { contractToSchemaIR, extractCodecControlHooks, runnerFailure, runnerSuccess } from "@prisma-next/family-sql/control";
|
|
5
10
|
import { verifySqlSchema } from "@prisma-next/family-sql/schema-verify";
|
|
6
|
-
import {
|
|
7
|
-
import { errorUnfilledPlaceholder } from "@prisma-next/errors/migration";
|
|
8
|
-
import { TsExpression, jsonToTsSource, renderImports } from "@prisma-next/ts-render";
|
|
9
|
-
import { detectScaffoldRuntime, shebangLineFor } from "@prisma-next/migration-tools/migration-ts";
|
|
10
|
-
import { readMarker } from "@prisma-next/family-sql/verify";
|
|
11
|
+
import { ok, okVoid } from "@prisma-next/utils/result";
|
|
11
12
|
import { SqlQueryError } from "@prisma-next/sql-errors";
|
|
12
13
|
|
|
13
|
-
//#region src/core/migrations/op-factory-call.ts
|
|
14
|
-
/**
|
|
15
|
-
* Postgres migration IR: one concrete `*Call` class per pure factory under
|
|
16
|
-
* `operations/`, plus a shared `PostgresOpFactoryCallNode` abstract base.
|
|
17
|
-
*
|
|
18
|
-
* Every call class carries the literal arguments its backing factory would
|
|
19
|
-
* receive, computes a human-readable `label` in its constructor, and
|
|
20
|
-
* implements two polymorphic hooks:
|
|
21
|
-
*
|
|
22
|
-
* - `toOp()` — converts the IR node to a runtime
|
|
23
|
-
* `SqlMigrationPlanOperation` by delegating to the matching pure factory
|
|
24
|
-
* under `operations/`. `DataTransformCall.toOp()` always throws
|
|
25
|
-
* `PN-MIG-2001` because a planner-generated data transform is an
|
|
26
|
-
* unfilled authoring stub by construction.
|
|
27
|
-
* - `renderTypeScript()` / `importRequirements()` — inherited from
|
|
28
|
-
* `TsExpression`. Used by `renderCallsToTypeScript` to emit the call as
|
|
29
|
-
* a TypeScript expression inside the scaffolded `migration.ts`.
|
|
30
|
-
*
|
|
31
|
-
* The abstract base and all concrete classes are package-private. External
|
|
32
|
-
* consumers see only the framework-level `OpFactoryCall` interface and the
|
|
33
|
-
* `PostgresOpFactoryCall` union.
|
|
34
|
-
*/
|
|
35
|
-
const TARGET_MIGRATION_MODULE = "@prisma-next/target-postgres/migration";
|
|
36
|
-
var PostgresOpFactoryCallNode = class extends TsExpression {
|
|
37
|
-
importRequirements() {
|
|
38
|
-
return [{
|
|
39
|
-
moduleSpecifier: TARGET_MIGRATION_MODULE,
|
|
40
|
-
symbol: this.factoryName
|
|
41
|
-
}];
|
|
42
|
-
}
|
|
43
|
-
freeze() {
|
|
44
|
-
Object.freeze(this);
|
|
45
|
-
}
|
|
46
|
-
};
|
|
47
|
-
var CreateTableCall = class extends PostgresOpFactoryCallNode {
|
|
48
|
-
factoryName = "createTable";
|
|
49
|
-
operationClass = "additive";
|
|
50
|
-
schemaName;
|
|
51
|
-
tableName;
|
|
52
|
-
columns;
|
|
53
|
-
primaryKey;
|
|
54
|
-
label;
|
|
55
|
-
constructor(schemaName, tableName, columns, primaryKey) {
|
|
56
|
-
super();
|
|
57
|
-
this.schemaName = schemaName;
|
|
58
|
-
this.tableName = tableName;
|
|
59
|
-
this.columns = columns;
|
|
60
|
-
this.primaryKey = primaryKey;
|
|
61
|
-
this.label = `Create table "${tableName}"`;
|
|
62
|
-
this.freeze();
|
|
63
|
-
}
|
|
64
|
-
toOp() {
|
|
65
|
-
return createTable(this.schemaName, this.tableName, this.columns, this.primaryKey);
|
|
66
|
-
}
|
|
67
|
-
renderTypeScript() {
|
|
68
|
-
const args = [
|
|
69
|
-
jsonToTsSource(this.schemaName),
|
|
70
|
-
jsonToTsSource(this.tableName),
|
|
71
|
-
jsonToTsSource(this.columns)
|
|
72
|
-
];
|
|
73
|
-
if (this.primaryKey) args.push(jsonToTsSource(this.primaryKey));
|
|
74
|
-
return `createTable(${args.join(", ")})`;
|
|
75
|
-
}
|
|
76
|
-
};
|
|
77
|
-
var DropTableCall = class extends PostgresOpFactoryCallNode {
|
|
78
|
-
factoryName = "dropTable";
|
|
79
|
-
operationClass = "destructive";
|
|
80
|
-
schemaName;
|
|
81
|
-
tableName;
|
|
82
|
-
label;
|
|
83
|
-
constructor(schemaName, tableName) {
|
|
84
|
-
super();
|
|
85
|
-
this.schemaName = schemaName;
|
|
86
|
-
this.tableName = tableName;
|
|
87
|
-
this.label = `Drop table "${tableName}"`;
|
|
88
|
-
this.freeze();
|
|
89
|
-
}
|
|
90
|
-
toOp() {
|
|
91
|
-
return dropTable(this.schemaName, this.tableName);
|
|
92
|
-
}
|
|
93
|
-
renderTypeScript() {
|
|
94
|
-
return `dropTable(${jsonToTsSource(this.schemaName)}, ${jsonToTsSource(this.tableName)})`;
|
|
95
|
-
}
|
|
96
|
-
};
|
|
97
|
-
var AddColumnCall = class extends PostgresOpFactoryCallNode {
|
|
98
|
-
factoryName = "addColumn";
|
|
99
|
-
operationClass = "additive";
|
|
100
|
-
schemaName;
|
|
101
|
-
tableName;
|
|
102
|
-
column;
|
|
103
|
-
label;
|
|
104
|
-
constructor(schemaName, tableName, column) {
|
|
105
|
-
super();
|
|
106
|
-
this.schemaName = schemaName;
|
|
107
|
-
this.tableName = tableName;
|
|
108
|
-
this.column = column;
|
|
109
|
-
this.label = `Add column "${column.name}" to "${tableName}"`;
|
|
110
|
-
this.freeze();
|
|
111
|
-
}
|
|
112
|
-
toOp() {
|
|
113
|
-
return addColumn(this.schemaName, this.tableName, this.column);
|
|
114
|
-
}
|
|
115
|
-
renderTypeScript() {
|
|
116
|
-
return `addColumn(${jsonToTsSource(this.schemaName)}, ${jsonToTsSource(this.tableName)}, ${jsonToTsSource(this.column)})`;
|
|
117
|
-
}
|
|
118
|
-
};
|
|
119
|
-
var DropColumnCall = class extends PostgresOpFactoryCallNode {
|
|
120
|
-
factoryName = "dropColumn";
|
|
121
|
-
operationClass = "destructive";
|
|
122
|
-
schemaName;
|
|
123
|
-
tableName;
|
|
124
|
-
columnName;
|
|
125
|
-
label;
|
|
126
|
-
constructor(schemaName, tableName, columnName) {
|
|
127
|
-
super();
|
|
128
|
-
this.schemaName = schemaName;
|
|
129
|
-
this.tableName = tableName;
|
|
130
|
-
this.columnName = columnName;
|
|
131
|
-
this.label = `Drop column "${columnName}" from "${tableName}"`;
|
|
132
|
-
this.freeze();
|
|
133
|
-
}
|
|
134
|
-
toOp() {
|
|
135
|
-
return dropColumn(this.schemaName, this.tableName, this.columnName);
|
|
136
|
-
}
|
|
137
|
-
renderTypeScript() {
|
|
138
|
-
return `dropColumn(${jsonToTsSource(this.schemaName)}, ${jsonToTsSource(this.tableName)}, ${jsonToTsSource(this.columnName)})`;
|
|
139
|
-
}
|
|
140
|
-
};
|
|
141
|
-
var AlterColumnTypeCall = class extends PostgresOpFactoryCallNode {
|
|
142
|
-
factoryName = "alterColumnType";
|
|
143
|
-
operationClass = "destructive";
|
|
144
|
-
schemaName;
|
|
145
|
-
tableName;
|
|
146
|
-
columnName;
|
|
147
|
-
options;
|
|
148
|
-
label;
|
|
149
|
-
constructor(schemaName, tableName, columnName, options) {
|
|
150
|
-
super();
|
|
151
|
-
this.schemaName = schemaName;
|
|
152
|
-
this.tableName = tableName;
|
|
153
|
-
this.columnName = columnName;
|
|
154
|
-
this.options = options;
|
|
155
|
-
this.label = `Alter type of "${tableName}"."${columnName}" to ${options.rawTargetTypeForLabel}`;
|
|
156
|
-
this.freeze();
|
|
157
|
-
}
|
|
158
|
-
toOp() {
|
|
159
|
-
return alterColumnType(this.schemaName, this.tableName, this.columnName, this.options);
|
|
160
|
-
}
|
|
161
|
-
renderTypeScript() {
|
|
162
|
-
return `alterColumnType(${jsonToTsSource(this.schemaName)}, ${jsonToTsSource(this.tableName)}, ${jsonToTsSource(this.columnName)}, ${jsonToTsSource(this.options)})`;
|
|
163
|
-
}
|
|
164
|
-
};
|
|
165
|
-
var SetNotNullCall = class extends PostgresOpFactoryCallNode {
|
|
166
|
-
factoryName = "setNotNull";
|
|
167
|
-
operationClass = "destructive";
|
|
168
|
-
schemaName;
|
|
169
|
-
tableName;
|
|
170
|
-
columnName;
|
|
171
|
-
label;
|
|
172
|
-
constructor(schemaName, tableName, columnName) {
|
|
173
|
-
super();
|
|
174
|
-
this.schemaName = schemaName;
|
|
175
|
-
this.tableName = tableName;
|
|
176
|
-
this.columnName = columnName;
|
|
177
|
-
this.label = `Set NOT NULL on "${tableName}"."${columnName}"`;
|
|
178
|
-
this.freeze();
|
|
179
|
-
}
|
|
180
|
-
toOp() {
|
|
181
|
-
return setNotNull(this.schemaName, this.tableName, this.columnName);
|
|
182
|
-
}
|
|
183
|
-
renderTypeScript() {
|
|
184
|
-
return `setNotNull(${jsonToTsSource(this.schemaName)}, ${jsonToTsSource(this.tableName)}, ${jsonToTsSource(this.columnName)})`;
|
|
185
|
-
}
|
|
186
|
-
};
|
|
187
|
-
var DropNotNullCall = class extends PostgresOpFactoryCallNode {
|
|
188
|
-
factoryName = "dropNotNull";
|
|
189
|
-
operationClass = "widening";
|
|
190
|
-
schemaName;
|
|
191
|
-
tableName;
|
|
192
|
-
columnName;
|
|
193
|
-
label;
|
|
194
|
-
constructor(schemaName, tableName, columnName) {
|
|
195
|
-
super();
|
|
196
|
-
this.schemaName = schemaName;
|
|
197
|
-
this.tableName = tableName;
|
|
198
|
-
this.columnName = columnName;
|
|
199
|
-
this.label = `Drop NOT NULL on "${tableName}"."${columnName}"`;
|
|
200
|
-
this.freeze();
|
|
201
|
-
}
|
|
202
|
-
toOp() {
|
|
203
|
-
return dropNotNull(this.schemaName, this.tableName, this.columnName);
|
|
204
|
-
}
|
|
205
|
-
renderTypeScript() {
|
|
206
|
-
return `dropNotNull(${jsonToTsSource(this.schemaName)}, ${jsonToTsSource(this.tableName)}, ${jsonToTsSource(this.columnName)})`;
|
|
207
|
-
}
|
|
208
|
-
};
|
|
209
|
-
var SetDefaultCall = class extends PostgresOpFactoryCallNode {
|
|
210
|
-
factoryName = "setDefault";
|
|
211
|
-
operationClass;
|
|
212
|
-
schemaName;
|
|
213
|
-
tableName;
|
|
214
|
-
columnName;
|
|
215
|
-
defaultSql;
|
|
216
|
-
label;
|
|
217
|
-
constructor(schemaName, tableName, columnName, defaultSql, operationClass = "additive") {
|
|
218
|
-
super();
|
|
219
|
-
this.schemaName = schemaName;
|
|
220
|
-
this.tableName = tableName;
|
|
221
|
-
this.columnName = columnName;
|
|
222
|
-
this.defaultSql = defaultSql;
|
|
223
|
-
this.operationClass = operationClass;
|
|
224
|
-
this.label = `Set default on "${tableName}"."${columnName}"`;
|
|
225
|
-
this.freeze();
|
|
226
|
-
}
|
|
227
|
-
toOp() {
|
|
228
|
-
return setDefault(this.schemaName, this.tableName, this.columnName, this.defaultSql, this.operationClass);
|
|
229
|
-
}
|
|
230
|
-
renderTypeScript() {
|
|
231
|
-
const args = [
|
|
232
|
-
jsonToTsSource(this.schemaName),
|
|
233
|
-
jsonToTsSource(this.tableName),
|
|
234
|
-
jsonToTsSource(this.columnName),
|
|
235
|
-
jsonToTsSource(this.defaultSql)
|
|
236
|
-
];
|
|
237
|
-
if (this.operationClass !== "additive") args.push(jsonToTsSource(this.operationClass));
|
|
238
|
-
return `setDefault(${args.join(", ")})`;
|
|
239
|
-
}
|
|
240
|
-
};
|
|
241
|
-
var DropDefaultCall = class extends PostgresOpFactoryCallNode {
|
|
242
|
-
factoryName = "dropDefault";
|
|
243
|
-
operationClass = "destructive";
|
|
244
|
-
schemaName;
|
|
245
|
-
tableName;
|
|
246
|
-
columnName;
|
|
247
|
-
label;
|
|
248
|
-
constructor(schemaName, tableName, columnName) {
|
|
249
|
-
super();
|
|
250
|
-
this.schemaName = schemaName;
|
|
251
|
-
this.tableName = tableName;
|
|
252
|
-
this.columnName = columnName;
|
|
253
|
-
this.label = `Drop default on "${tableName}"."${columnName}"`;
|
|
254
|
-
this.freeze();
|
|
255
|
-
}
|
|
256
|
-
toOp() {
|
|
257
|
-
return dropDefault(this.schemaName, this.tableName, this.columnName);
|
|
258
|
-
}
|
|
259
|
-
renderTypeScript() {
|
|
260
|
-
return `dropDefault(${jsonToTsSource(this.schemaName)}, ${jsonToTsSource(this.tableName)}, ${jsonToTsSource(this.columnName)})`;
|
|
261
|
-
}
|
|
262
|
-
};
|
|
263
|
-
var AddPrimaryKeyCall = class extends PostgresOpFactoryCallNode {
|
|
264
|
-
factoryName = "addPrimaryKey";
|
|
265
|
-
operationClass = "additive";
|
|
266
|
-
schemaName;
|
|
267
|
-
tableName;
|
|
268
|
-
constraintName;
|
|
269
|
-
columns;
|
|
270
|
-
label;
|
|
271
|
-
constructor(schemaName, tableName, constraintName, columns) {
|
|
272
|
-
super();
|
|
273
|
-
this.schemaName = schemaName;
|
|
274
|
-
this.tableName = tableName;
|
|
275
|
-
this.constraintName = constraintName;
|
|
276
|
-
this.columns = columns;
|
|
277
|
-
this.label = `Add primary key on "${tableName}"`;
|
|
278
|
-
this.freeze();
|
|
279
|
-
}
|
|
280
|
-
toOp() {
|
|
281
|
-
return addPrimaryKey(this.schemaName, this.tableName, this.constraintName, this.columns);
|
|
282
|
-
}
|
|
283
|
-
renderTypeScript() {
|
|
284
|
-
return `addPrimaryKey(${jsonToTsSource(this.schemaName)}, ${jsonToTsSource(this.tableName)}, ${jsonToTsSource(this.constraintName)}, ${jsonToTsSource(this.columns)})`;
|
|
285
|
-
}
|
|
286
|
-
};
|
|
287
|
-
var AddUniqueCall = class extends PostgresOpFactoryCallNode {
|
|
288
|
-
factoryName = "addUnique";
|
|
289
|
-
operationClass = "additive";
|
|
290
|
-
schemaName;
|
|
291
|
-
tableName;
|
|
292
|
-
constraintName;
|
|
293
|
-
columns;
|
|
294
|
-
label;
|
|
295
|
-
constructor(schemaName, tableName, constraintName, columns) {
|
|
296
|
-
super();
|
|
297
|
-
this.schemaName = schemaName;
|
|
298
|
-
this.tableName = tableName;
|
|
299
|
-
this.constraintName = constraintName;
|
|
300
|
-
this.columns = columns;
|
|
301
|
-
this.label = `Add unique constraint on "${tableName}" (${columns.join(", ")})`;
|
|
302
|
-
this.freeze();
|
|
303
|
-
}
|
|
304
|
-
toOp() {
|
|
305
|
-
return addUnique(this.schemaName, this.tableName, this.constraintName, this.columns);
|
|
306
|
-
}
|
|
307
|
-
renderTypeScript() {
|
|
308
|
-
return `addUnique(${jsonToTsSource(this.schemaName)}, ${jsonToTsSource(this.tableName)}, ${jsonToTsSource(this.constraintName)}, ${jsonToTsSource(this.columns)})`;
|
|
309
|
-
}
|
|
310
|
-
};
|
|
311
|
-
var AddForeignKeyCall = class extends PostgresOpFactoryCallNode {
|
|
312
|
-
factoryName = "addForeignKey";
|
|
313
|
-
operationClass = "additive";
|
|
314
|
-
schemaName;
|
|
315
|
-
tableName;
|
|
316
|
-
fk;
|
|
317
|
-
label;
|
|
318
|
-
constructor(schemaName, tableName, fk) {
|
|
319
|
-
super();
|
|
320
|
-
this.schemaName = schemaName;
|
|
321
|
-
this.tableName = tableName;
|
|
322
|
-
this.fk = fk;
|
|
323
|
-
this.label = `Add foreign key "${fk.name}" on "${tableName}"`;
|
|
324
|
-
this.freeze();
|
|
325
|
-
}
|
|
326
|
-
toOp() {
|
|
327
|
-
return addForeignKey(this.schemaName, this.tableName, this.fk);
|
|
328
|
-
}
|
|
329
|
-
renderTypeScript() {
|
|
330
|
-
return `addForeignKey(${jsonToTsSource(this.schemaName)}, ${jsonToTsSource(this.tableName)}, ${jsonToTsSource(this.fk)})`;
|
|
331
|
-
}
|
|
332
|
-
};
|
|
333
|
-
var DropConstraintCall = class extends PostgresOpFactoryCallNode {
|
|
334
|
-
factoryName = "dropConstraint";
|
|
335
|
-
operationClass = "destructive";
|
|
336
|
-
schemaName;
|
|
337
|
-
tableName;
|
|
338
|
-
constraintName;
|
|
339
|
-
kind;
|
|
340
|
-
label;
|
|
341
|
-
constructor(schemaName, tableName, constraintName, kind = "unique") {
|
|
342
|
-
super();
|
|
343
|
-
this.schemaName = schemaName;
|
|
344
|
-
this.tableName = tableName;
|
|
345
|
-
this.constraintName = constraintName;
|
|
346
|
-
this.kind = kind;
|
|
347
|
-
this.label = `Drop constraint "${constraintName}" on "${tableName}"`;
|
|
348
|
-
this.freeze();
|
|
349
|
-
}
|
|
350
|
-
toOp() {
|
|
351
|
-
return dropConstraint(this.schemaName, this.tableName, this.constraintName, this.kind);
|
|
352
|
-
}
|
|
353
|
-
renderTypeScript() {
|
|
354
|
-
const args = [
|
|
355
|
-
jsonToTsSource(this.schemaName),
|
|
356
|
-
jsonToTsSource(this.tableName),
|
|
357
|
-
jsonToTsSource(this.constraintName)
|
|
358
|
-
];
|
|
359
|
-
if (this.kind !== "unique") args.push(jsonToTsSource(this.kind));
|
|
360
|
-
return `dropConstraint(${args.join(", ")})`;
|
|
361
|
-
}
|
|
362
|
-
};
|
|
363
|
-
var CreateIndexCall = class extends PostgresOpFactoryCallNode {
|
|
364
|
-
factoryName = "createIndex";
|
|
365
|
-
operationClass = "additive";
|
|
366
|
-
schemaName;
|
|
367
|
-
tableName;
|
|
368
|
-
indexName;
|
|
369
|
-
columns;
|
|
370
|
-
label;
|
|
371
|
-
constructor(schemaName, tableName, indexName, columns) {
|
|
372
|
-
super();
|
|
373
|
-
this.schemaName = schemaName;
|
|
374
|
-
this.tableName = tableName;
|
|
375
|
-
this.indexName = indexName;
|
|
376
|
-
this.columns = columns;
|
|
377
|
-
this.label = `Create index "${indexName}" on "${tableName}"`;
|
|
378
|
-
this.freeze();
|
|
379
|
-
}
|
|
380
|
-
toOp() {
|
|
381
|
-
return createIndex(this.schemaName, this.tableName, this.indexName, this.columns);
|
|
382
|
-
}
|
|
383
|
-
renderTypeScript() {
|
|
384
|
-
return `createIndex(${jsonToTsSource(this.schemaName)}, ${jsonToTsSource(this.tableName)}, ${jsonToTsSource(this.indexName)}, ${jsonToTsSource(this.columns)})`;
|
|
385
|
-
}
|
|
386
|
-
};
|
|
387
|
-
var DropIndexCall = class extends PostgresOpFactoryCallNode {
|
|
388
|
-
factoryName = "dropIndex";
|
|
389
|
-
operationClass = "destructive";
|
|
390
|
-
schemaName;
|
|
391
|
-
tableName;
|
|
392
|
-
indexName;
|
|
393
|
-
label;
|
|
394
|
-
constructor(schemaName, tableName, indexName) {
|
|
395
|
-
super();
|
|
396
|
-
this.schemaName = schemaName;
|
|
397
|
-
this.tableName = tableName;
|
|
398
|
-
this.indexName = indexName;
|
|
399
|
-
this.label = `Drop index "${indexName}"`;
|
|
400
|
-
this.freeze();
|
|
401
|
-
}
|
|
402
|
-
toOp() {
|
|
403
|
-
return dropIndex(this.schemaName, this.tableName, this.indexName);
|
|
404
|
-
}
|
|
405
|
-
renderTypeScript() {
|
|
406
|
-
return `dropIndex(${jsonToTsSource(this.schemaName)}, ${jsonToTsSource(this.tableName)}, ${jsonToTsSource(this.indexName)})`;
|
|
407
|
-
}
|
|
408
|
-
};
|
|
409
|
-
var CreateEnumTypeCall = class extends PostgresOpFactoryCallNode {
|
|
410
|
-
factoryName = "createEnumType";
|
|
411
|
-
operationClass = "additive";
|
|
412
|
-
schemaName;
|
|
413
|
-
typeName;
|
|
414
|
-
values;
|
|
415
|
-
label;
|
|
416
|
-
constructor(schemaName, typeName, values) {
|
|
417
|
-
super();
|
|
418
|
-
this.schemaName = schemaName;
|
|
419
|
-
this.typeName = typeName;
|
|
420
|
-
this.values = values;
|
|
421
|
-
this.label = `Create enum type "${typeName}"`;
|
|
422
|
-
this.freeze();
|
|
423
|
-
}
|
|
424
|
-
toOp() {
|
|
425
|
-
return createEnumType(this.schemaName, this.typeName, this.values);
|
|
426
|
-
}
|
|
427
|
-
renderTypeScript() {
|
|
428
|
-
return `createEnumType(${jsonToTsSource(this.schemaName)}, ${jsonToTsSource(this.typeName)}, ${jsonToTsSource(this.values)})`;
|
|
429
|
-
}
|
|
430
|
-
};
|
|
431
|
-
var AddEnumValuesCall = class extends PostgresOpFactoryCallNode {
|
|
432
|
-
factoryName = "addEnumValues";
|
|
433
|
-
operationClass = "additive";
|
|
434
|
-
schemaName;
|
|
435
|
-
typeName;
|
|
436
|
-
nativeType;
|
|
437
|
-
values;
|
|
438
|
-
label;
|
|
439
|
-
constructor(schemaName, typeName, nativeType, values) {
|
|
440
|
-
super();
|
|
441
|
-
this.schemaName = schemaName;
|
|
442
|
-
this.typeName = typeName;
|
|
443
|
-
this.nativeType = nativeType;
|
|
444
|
-
this.values = values;
|
|
445
|
-
this.label = `Add values to enum type "${typeName}": ${values.join(", ")}`;
|
|
446
|
-
this.freeze();
|
|
447
|
-
}
|
|
448
|
-
toOp() {
|
|
449
|
-
return addEnumValues(this.schemaName, this.typeName, this.nativeType, this.values);
|
|
450
|
-
}
|
|
451
|
-
renderTypeScript() {
|
|
452
|
-
return `addEnumValues(${jsonToTsSource(this.schemaName)}, ${jsonToTsSource(this.typeName)}, ${jsonToTsSource(this.nativeType)}, ${jsonToTsSource(this.values)})`;
|
|
453
|
-
}
|
|
454
|
-
};
|
|
455
|
-
var DropEnumTypeCall = class extends PostgresOpFactoryCallNode {
|
|
456
|
-
factoryName = "dropEnumType";
|
|
457
|
-
operationClass = "destructive";
|
|
458
|
-
schemaName;
|
|
459
|
-
typeName;
|
|
460
|
-
label;
|
|
461
|
-
constructor(schemaName, typeName) {
|
|
462
|
-
super();
|
|
463
|
-
this.schemaName = schemaName;
|
|
464
|
-
this.typeName = typeName;
|
|
465
|
-
this.label = `Drop enum type "${typeName}"`;
|
|
466
|
-
this.freeze();
|
|
467
|
-
}
|
|
468
|
-
toOp() {
|
|
469
|
-
return dropEnumType(this.schemaName, this.typeName);
|
|
470
|
-
}
|
|
471
|
-
renderTypeScript() {
|
|
472
|
-
return `dropEnumType(${jsonToTsSource(this.schemaName)}, ${jsonToTsSource(this.typeName)})`;
|
|
473
|
-
}
|
|
474
|
-
};
|
|
475
|
-
var RenameTypeCall = class extends PostgresOpFactoryCallNode {
|
|
476
|
-
factoryName = "renameType";
|
|
477
|
-
operationClass = "destructive";
|
|
478
|
-
schemaName;
|
|
479
|
-
fromName;
|
|
480
|
-
toName;
|
|
481
|
-
label;
|
|
482
|
-
constructor(schemaName, fromName, toName) {
|
|
483
|
-
super();
|
|
484
|
-
this.schemaName = schemaName;
|
|
485
|
-
this.fromName = fromName;
|
|
486
|
-
this.toName = toName;
|
|
487
|
-
this.label = `Rename type "${fromName}" to "${toName}"`;
|
|
488
|
-
this.freeze();
|
|
489
|
-
}
|
|
490
|
-
toOp() {
|
|
491
|
-
return renameType(this.schemaName, this.fromName, this.toName);
|
|
492
|
-
}
|
|
493
|
-
renderTypeScript() {
|
|
494
|
-
return `renameType(${jsonToTsSource(this.schemaName)}, ${jsonToTsSource(this.fromName)}, ${jsonToTsSource(this.toName)})`;
|
|
495
|
-
}
|
|
496
|
-
};
|
|
497
|
-
/**
|
|
498
|
-
* Laundered pre-built operation.
|
|
499
|
-
*
|
|
500
|
-
* Wraps an already-materialized `SqlMigrationPlanOperation` — typically one
|
|
501
|
-
* produced by a SQL-family method, a codec control hook, or a component
|
|
502
|
-
* `databaseDependencies.init` declaration — so the planner can carry it
|
|
503
|
-
* alongside IR nodes without reverse-engineering it into a
|
|
504
|
-
* structured call class. Doubles as the user-facing escape hatch for raw
|
|
505
|
-
* migrations: authors can pass a full op shape to `rawSql({...})`.
|
|
506
|
-
*
|
|
507
|
-
* `toOp()` returns the stored op unchanged. `renderTypeScript()` emits
|
|
508
|
-
* `rawSql({...})` with the op serialized as a JSON literal — round-tripping
|
|
509
|
-
* requires every field on the op to be JSON-serializable (no closures).
|
|
510
|
-
*/
|
|
511
|
-
var RawSqlCall = class extends PostgresOpFactoryCallNode {
|
|
512
|
-
factoryName = "rawSql";
|
|
513
|
-
operationClass;
|
|
514
|
-
label;
|
|
515
|
-
op;
|
|
516
|
-
constructor(op) {
|
|
517
|
-
super();
|
|
518
|
-
this.op = op;
|
|
519
|
-
this.label = op.label;
|
|
520
|
-
this.operationClass = op.operationClass;
|
|
521
|
-
this.freeze();
|
|
522
|
-
}
|
|
523
|
-
toOp() {
|
|
524
|
-
return this.op;
|
|
525
|
-
}
|
|
526
|
-
renderTypeScript() {
|
|
527
|
-
return `rawSql(${jsonToTsSource(this.op)})`;
|
|
528
|
-
}
|
|
529
|
-
};
|
|
530
|
-
var CreateExtensionCall = class extends PostgresOpFactoryCallNode {
|
|
531
|
-
factoryName = "createExtension";
|
|
532
|
-
operationClass = "additive";
|
|
533
|
-
extensionName;
|
|
534
|
-
label;
|
|
535
|
-
constructor(extensionName) {
|
|
536
|
-
super();
|
|
537
|
-
this.extensionName = extensionName;
|
|
538
|
-
this.label = `Create extension "${extensionName}"`;
|
|
539
|
-
this.freeze();
|
|
540
|
-
}
|
|
541
|
-
toOp() {
|
|
542
|
-
return createExtension(this.extensionName);
|
|
543
|
-
}
|
|
544
|
-
renderTypeScript() {
|
|
545
|
-
return `createExtension(${jsonToTsSource(this.extensionName)})`;
|
|
546
|
-
}
|
|
547
|
-
};
|
|
548
|
-
var CreateSchemaCall = class extends PostgresOpFactoryCallNode {
|
|
549
|
-
factoryName = "createSchema";
|
|
550
|
-
operationClass = "additive";
|
|
551
|
-
schemaName;
|
|
552
|
-
label;
|
|
553
|
-
constructor(schemaName) {
|
|
554
|
-
super();
|
|
555
|
-
this.schemaName = schemaName;
|
|
556
|
-
this.label = `Create schema "${schemaName}"`;
|
|
557
|
-
this.freeze();
|
|
558
|
-
}
|
|
559
|
-
toOp() {
|
|
560
|
-
return createSchema(this.schemaName);
|
|
561
|
-
}
|
|
562
|
-
renderTypeScript() {
|
|
563
|
-
return `createSchema(${jsonToTsSource(this.schemaName)})`;
|
|
564
|
-
}
|
|
565
|
-
};
|
|
566
|
-
/**
|
|
567
|
-
* A planner-generated data-transform stub. `checkSlot` and `runSlot` name
|
|
568
|
-
* the unfilled authoring slots that the rendered `migration.ts` will expose
|
|
569
|
-
* to the user via `placeholder("…")` calls. `toOp()` always throws
|
|
570
|
-
* `PN-MIG-2001`: the planner cannot lower a stubbed transform to a runtime
|
|
571
|
-
* op — the user must fill the rendered `migration.ts` and re-emit.
|
|
572
|
-
*/
|
|
573
|
-
var DataTransformCall = class extends PostgresOpFactoryCallNode {
|
|
574
|
-
factoryName = "dataTransform";
|
|
575
|
-
operationClass;
|
|
576
|
-
label;
|
|
577
|
-
checkSlot;
|
|
578
|
-
runSlot;
|
|
579
|
-
constructor(label, checkSlot, runSlot, operationClass = "data") {
|
|
580
|
-
super();
|
|
581
|
-
this.label = label;
|
|
582
|
-
this.checkSlot = checkSlot;
|
|
583
|
-
this.runSlot = runSlot;
|
|
584
|
-
this.operationClass = operationClass;
|
|
585
|
-
this.freeze();
|
|
586
|
-
}
|
|
587
|
-
toOp() {
|
|
588
|
-
throw errorUnfilledPlaceholder(this.label);
|
|
589
|
-
}
|
|
590
|
-
renderTypeScript() {
|
|
591
|
-
return [
|
|
592
|
-
`dataTransform(endContract, ${jsonToTsSource(this.label)}, {`,
|
|
593
|
-
` check: () => placeholder(${jsonToTsSource(this.checkSlot)}),`,
|
|
594
|
-
` run: () => placeholder(${jsonToTsSource(this.runSlot)}),`,
|
|
595
|
-
"})"
|
|
596
|
-
].join("\n");
|
|
597
|
-
}
|
|
598
|
-
importRequirements() {
|
|
599
|
-
return [
|
|
600
|
-
{
|
|
601
|
-
moduleSpecifier: TARGET_MIGRATION_MODULE,
|
|
602
|
-
symbol: this.factoryName
|
|
603
|
-
},
|
|
604
|
-
{
|
|
605
|
-
moduleSpecifier: TARGET_MIGRATION_MODULE,
|
|
606
|
-
symbol: "placeholder"
|
|
607
|
-
},
|
|
608
|
-
{
|
|
609
|
-
moduleSpecifier: "./end-contract.json",
|
|
610
|
-
symbol: "endContract",
|
|
611
|
-
kind: "default",
|
|
612
|
-
attributes: { type: "json" }
|
|
613
|
-
}
|
|
614
|
-
];
|
|
615
|
-
}
|
|
616
|
-
};
|
|
617
|
-
|
|
618
|
-
//#endregion
|
|
619
|
-
//#region src/core/migrations/planner-ddl-builders.ts
|
|
620
|
-
/**
|
|
621
|
-
* Pattern for safe PostgreSQL type names.
|
|
622
|
-
* Allows letters, digits, underscores, spaces (for "double precision", "character varying"),
|
|
623
|
-
* and trailing [] for array types.
|
|
624
|
-
*/
|
|
625
|
-
const SAFE_NATIVE_TYPE_PATTERN = /^[a-zA-Z][a-zA-Z0-9_ ]*(\[\])?$/;
|
|
626
|
-
function assertSafeNativeType(nativeType) {
|
|
627
|
-
if (!SAFE_NATIVE_TYPE_PATTERN.test(nativeType)) throw new Error(`Unsafe native type name in contract: "${nativeType}". Native type names must match /^[a-zA-Z][a-zA-Z0-9_ ]*(\\[\\])?\$/`);
|
|
628
|
-
}
|
|
629
|
-
/**
|
|
630
|
-
* Sanity check against accidental SQL injection from malformed contract files.
|
|
631
|
-
* Rejects semicolons, SQL comment tokens, and dollar-quoting.
|
|
632
|
-
* Not a comprehensive security boundary — the contract is developer-authored.
|
|
633
|
-
*/
|
|
634
|
-
function assertSafeDefaultExpression(expression) {
|
|
635
|
-
if (expression.includes(";") || /--|\/\*|\$\$|\bSELECT\b/i.test(expression)) throw new Error(`Unsafe default expression in contract: "${expression}". Default expressions must not contain semicolons, SQL comment tokens, dollar-quoting, or subqueries.`);
|
|
636
|
-
}
|
|
637
|
-
/**
|
|
638
|
-
* Renders the SQL type for a column in DDL context.
|
|
639
|
-
*
|
|
640
|
-
* @param allowPseudoTypes - When true (default), autoincrement integer columns
|
|
641
|
-
* produce SERIAL/BIGSERIAL/SMALLSERIAL pseudo-types. Set to false for contexts
|
|
642
|
-
* like ALTER COLUMN TYPE where pseudo-types are invalid.
|
|
643
|
-
*/
|
|
644
|
-
function buildColumnTypeSql(column, codecHooks, storageTypes = {}, allowPseudoTypes = true) {
|
|
645
|
-
const resolved = resolveColumnTypeMetadata(column, storageTypes);
|
|
646
|
-
if (allowPseudoTypes) {
|
|
647
|
-
const columnDefault = column.default;
|
|
648
|
-
if (columnDefault?.kind === "function" && columnDefault.expression === "autoincrement()") {
|
|
649
|
-
if (resolved.nativeType === "int4" || resolved.nativeType === "integer") return "SERIAL";
|
|
650
|
-
if (resolved.nativeType === "int8" || resolved.nativeType === "bigint") return "BIGSERIAL";
|
|
651
|
-
if (resolved.nativeType === "int2" || resolved.nativeType === "smallint") return "SMALLSERIAL";
|
|
652
|
-
}
|
|
653
|
-
}
|
|
654
|
-
const expanded = expandParameterizedTypeSql(resolved, codecHooks);
|
|
655
|
-
if (expanded !== null) return expanded;
|
|
656
|
-
if (column.typeRef) return quoteIdentifier(resolved.nativeType);
|
|
657
|
-
assertSafeNativeType(resolved.nativeType);
|
|
658
|
-
return resolved.nativeType;
|
|
659
|
-
}
|
|
660
|
-
function expandParameterizedTypeSql(column, codecHooks) {
|
|
661
|
-
if (!column.typeParams) return null;
|
|
662
|
-
if (!column.codecId) throw new Error(`Column declares typeParams for nativeType "${column.nativeType}" but has no codecId. Ensure the column is associated with a codec.`);
|
|
663
|
-
const hooks = codecHooks.get(column.codecId);
|
|
664
|
-
if (!hooks?.expandNativeType) {
|
|
665
|
-
if (hooks?.planTypeOperations) return null;
|
|
666
|
-
throw new Error(`Column declares typeParams for nativeType "${column.nativeType}" but no expandNativeType hook is registered for codecId "${column.codecId}". Ensure the extension providing this codec is included in extensionPacks.`);
|
|
667
|
-
}
|
|
668
|
-
const expanded = hooks.expandNativeType({
|
|
669
|
-
nativeType: column.nativeType,
|
|
670
|
-
codecId: column.codecId,
|
|
671
|
-
typeParams: column.typeParams
|
|
672
|
-
});
|
|
673
|
-
return expanded !== column.nativeType ? expanded : null;
|
|
674
|
-
}
|
|
675
|
-
/** Autoincrement columns use SERIAL types, so this returns empty for them. */
|
|
676
|
-
function buildColumnDefaultSql(columnDefault, column) {
|
|
677
|
-
if (!columnDefault) return "";
|
|
678
|
-
switch (columnDefault.kind) {
|
|
679
|
-
case "literal": return `DEFAULT ${renderDefaultLiteral(columnDefault.value, column)}`;
|
|
680
|
-
case "function":
|
|
681
|
-
if (columnDefault.expression === "autoincrement()") return "";
|
|
682
|
-
assertSafeDefaultExpression(columnDefault.expression);
|
|
683
|
-
return `DEFAULT (${columnDefault.expression})`;
|
|
684
|
-
case "sequence": return `DEFAULT nextval('${escapeLiteral(quoteIdentifier(columnDefault.name))}'::regclass)`;
|
|
685
|
-
}
|
|
686
|
-
}
|
|
687
|
-
function renderDefaultLiteral(value, column) {
|
|
688
|
-
const isJsonColumn = column?.nativeType === "json" || column?.nativeType === "jsonb";
|
|
689
|
-
if (value instanceof Date) return `'${escapeLiteral(value.toISOString())}'`;
|
|
690
|
-
if (typeof value === "string") return `'${escapeLiteral(value)}'`;
|
|
691
|
-
if (typeof value === "number" || typeof value === "boolean") return String(value);
|
|
692
|
-
if (value === null) return "NULL";
|
|
693
|
-
const json = JSON.stringify(value);
|
|
694
|
-
if (isJsonColumn) return `'${escapeLiteral(json)}'::${column.nativeType}`;
|
|
695
|
-
return `'${escapeLiteral(json)}'`;
|
|
696
|
-
}
|
|
697
|
-
function buildAddColumnSql(qualifiedTableName, columnName, column, codecHooks, temporaryDefault, storageTypes = {}) {
|
|
698
|
-
const typeSql = buildColumnTypeSql(column, codecHooks, storageTypes);
|
|
699
|
-
const defaultSql = buildColumnDefaultSql(column.default, column) || (temporaryDefault ? `DEFAULT ${temporaryDefault}` : "");
|
|
700
|
-
return [
|
|
701
|
-
`ALTER TABLE ${qualifiedTableName}`,
|
|
702
|
-
`ADD COLUMN ${quoteIdentifier(columnName)} ${typeSql}`,
|
|
703
|
-
defaultSql,
|
|
704
|
-
column.nullable ? "" : "NOT NULL"
|
|
705
|
-
].filter(Boolean).join(" ");
|
|
706
|
-
}
|
|
707
|
-
|
|
708
|
-
//#endregion
|
|
709
|
-
//#region src/core/migrations/planner-identity-values.ts
|
|
710
|
-
/**
|
|
711
|
-
* Resolves the identity value (monoid neutral element) as a SQL literal for a column's type.
|
|
712
|
-
* Checks codec hooks first (extensions can provide type-specific identity values),
|
|
713
|
-
* then falls back to the built-in map.
|
|
714
|
-
*/
|
|
715
|
-
function resolveIdentityValue(column, codecHooks, storageTypes = {}) {
|
|
716
|
-
const referencedType = column.typeRef ? storageTypes[column.typeRef] : void 0;
|
|
717
|
-
const codecId = referencedType?.codecId ?? column.codecId;
|
|
718
|
-
const nativeType = referencedType?.nativeType ?? column.nativeType;
|
|
719
|
-
const typeParams = referencedType?.typeParams ?? column.typeParams;
|
|
720
|
-
if (codecId) {
|
|
721
|
-
const hookDefault = codecHooks.get(codecId)?.resolveIdentityValue?.({
|
|
722
|
-
nativeType,
|
|
723
|
-
codecId,
|
|
724
|
-
...ifDefined("typeParams", typeParams)
|
|
725
|
-
});
|
|
726
|
-
if (hookDefault !== void 0) return hookDefault;
|
|
727
|
-
}
|
|
728
|
-
return buildBuiltinIdentityValue(nativeType, typeParams);
|
|
729
|
-
}
|
|
730
|
-
/**
|
|
731
|
-
* Returns the built-in identity value (monoid neutral element) as a SQL literal for the given
|
|
732
|
-
* PostgreSQL native type — e.g. 0 for integers, '' for text, false for booleans.
|
|
733
|
-
*
|
|
734
|
-
* This is the planner's fallback when no codec hook provides a type-specific identity value.
|
|
735
|
-
*
|
|
736
|
-
* Returns null for unrecognized types (for example enums and extension-owned types without a
|
|
737
|
-
* hook), which causes the planner to fall back to the empty-table precheck.
|
|
738
|
-
*
|
|
739
|
-
* @internal Exported for testing only.
|
|
740
|
-
*/
|
|
741
|
-
function buildBuiltinIdentityValue(nativeType, typeParams) {
|
|
742
|
-
const normalizedNativeType = normalizeIdentityValueNativeType(nativeType);
|
|
743
|
-
if (normalizedNativeType.endsWith("[]")) return "'{}'";
|
|
744
|
-
switch (normalizedNativeType) {
|
|
745
|
-
case "text":
|
|
746
|
-
case "character":
|
|
747
|
-
case "bpchar":
|
|
748
|
-
case "character varying":
|
|
749
|
-
case "varchar": return "''";
|
|
750
|
-
case "int2":
|
|
751
|
-
case "int4":
|
|
752
|
-
case "int8":
|
|
753
|
-
case "integer":
|
|
754
|
-
case "bigint":
|
|
755
|
-
case "smallint":
|
|
756
|
-
case "float4":
|
|
757
|
-
case "float8":
|
|
758
|
-
case "real":
|
|
759
|
-
case "double precision":
|
|
760
|
-
case "numeric":
|
|
761
|
-
case "decimal": return "0";
|
|
762
|
-
case "bool":
|
|
763
|
-
case "boolean": return "false";
|
|
764
|
-
case "uuid": return "'00000000-0000-0000-0000-000000000000'";
|
|
765
|
-
case "json": return "'{}'::json";
|
|
766
|
-
case "jsonb": return "'{}'::jsonb";
|
|
767
|
-
case "date":
|
|
768
|
-
case "timestamp":
|
|
769
|
-
case "timestamptz":
|
|
770
|
-
case "timestamp with time zone":
|
|
771
|
-
case "timestamp without time zone": return "'epoch'";
|
|
772
|
-
case "time":
|
|
773
|
-
case "time without time zone": return "'00:00:00'";
|
|
774
|
-
case "timetz":
|
|
775
|
-
case "time with time zone": return "'00:00:00+00'";
|
|
776
|
-
case "interval": return "'0'";
|
|
777
|
-
case "bytea": return "''::bytea";
|
|
778
|
-
case "tsvector": return "''::tsvector";
|
|
779
|
-
case "bit": return buildBitIdentityValue(typeParams);
|
|
780
|
-
case "bit varying":
|
|
781
|
-
case "varbit": return "B''";
|
|
782
|
-
default: return null;
|
|
783
|
-
}
|
|
784
|
-
}
|
|
785
|
-
function normalizeIdentityValueNativeType(nativeType) {
|
|
786
|
-
return nativeType.trim().toLowerCase().replace(/\s+/g, " ");
|
|
787
|
-
}
|
|
788
|
-
function buildBitIdentityValue(typeParams) {
|
|
789
|
-
const length = typeParams?.["length"];
|
|
790
|
-
if (length === void 0) return "B'0'";
|
|
791
|
-
if (typeof length !== "number" || !Number.isInteger(length) || length <= 0) return null;
|
|
792
|
-
return `B'${"0".repeat(length)}'`;
|
|
793
|
-
}
|
|
794
|
-
|
|
795
|
-
//#endregion
|
|
796
|
-
//#region src/core/migrations/planner-target-details.ts
|
|
797
|
-
function buildTargetDetails(objectType, name, schema, table) {
|
|
798
|
-
return {
|
|
799
|
-
schema,
|
|
800
|
-
objectType,
|
|
801
|
-
name,
|
|
802
|
-
...ifDefined("table", table)
|
|
803
|
-
};
|
|
804
|
-
}
|
|
805
|
-
|
|
806
|
-
//#endregion
|
|
807
|
-
//#region src/core/migrations/planner-recipes.ts
|
|
808
|
-
function buildAddColumnOperationIdentity(schema, tableName, columnName) {
|
|
809
|
-
return {
|
|
810
|
-
id: `column.${tableName}.${columnName}`,
|
|
811
|
-
label: `Add column ${columnName} to ${tableName}`,
|
|
812
|
-
summary: `Adds column ${columnName} to table ${tableName}`,
|
|
813
|
-
target: {
|
|
814
|
-
id: "postgres",
|
|
815
|
-
details: buildTargetDetails("table", tableName, schema)
|
|
816
|
-
}
|
|
817
|
-
};
|
|
818
|
-
}
|
|
819
|
-
function buildAddNotNullColumnWithTemporaryDefaultOperation(options) {
|
|
820
|
-
const { schema, tableName, columnName, column, codecHooks, storageTypes, temporaryDefault } = options;
|
|
821
|
-
const qualified = qualifyTableName(schema, tableName);
|
|
822
|
-
return {
|
|
823
|
-
...buildAddColumnOperationIdentity(schema, tableName, columnName),
|
|
824
|
-
operationClass: "additive",
|
|
825
|
-
precheck: [{
|
|
826
|
-
description: `ensure column "${columnName}" is missing`,
|
|
827
|
-
sql: columnExistsCheck({
|
|
828
|
-
schema,
|
|
829
|
-
table: tableName,
|
|
830
|
-
column: columnName,
|
|
831
|
-
exists: false
|
|
832
|
-
})
|
|
833
|
-
}],
|
|
834
|
-
execute: [{
|
|
835
|
-
description: `add column "${columnName}"`,
|
|
836
|
-
sql: buildAddColumnSql(qualified, columnName, column, codecHooks, temporaryDefault, storageTypes)
|
|
837
|
-
}, {
|
|
838
|
-
description: `drop temporary default from column "${columnName}"`,
|
|
839
|
-
sql: `ALTER TABLE ${qualified} ALTER COLUMN ${quoteIdentifier(columnName)} DROP DEFAULT`
|
|
840
|
-
}],
|
|
841
|
-
postcheck: [
|
|
842
|
-
{
|
|
843
|
-
description: `verify column "${columnName}" exists`,
|
|
844
|
-
sql: columnExistsCheck({
|
|
845
|
-
schema,
|
|
846
|
-
table: tableName,
|
|
847
|
-
column: columnName
|
|
848
|
-
})
|
|
849
|
-
},
|
|
850
|
-
{
|
|
851
|
-
description: `verify column "${columnName}" is NOT NULL`,
|
|
852
|
-
sql: columnNullabilityCheck({
|
|
853
|
-
schema,
|
|
854
|
-
table: tableName,
|
|
855
|
-
column: columnName,
|
|
856
|
-
nullable: false
|
|
857
|
-
})
|
|
858
|
-
},
|
|
859
|
-
{
|
|
860
|
-
description: `verify column "${columnName}" has no default after temporary default removal`,
|
|
861
|
-
sql: columnHasNoDefaultCheck({
|
|
862
|
-
schema,
|
|
863
|
-
table: tableName,
|
|
864
|
-
column: columnName
|
|
865
|
-
})
|
|
866
|
-
}
|
|
867
|
-
]
|
|
868
|
-
};
|
|
869
|
-
}
|
|
870
|
-
|
|
871
|
-
//#endregion
|
|
872
|
-
//#region src/core/migrations/planner-schema-lookup.ts
|
|
873
|
-
function buildSchemaLookupMap(schema) {
|
|
874
|
-
const map = /* @__PURE__ */ new Map();
|
|
875
|
-
for (const [tableName, table] of Object.entries(schema.tables)) map.set(tableName, buildSchemaTableLookup(table));
|
|
876
|
-
return map;
|
|
877
|
-
}
|
|
878
|
-
function buildSchemaTableLookup(table) {
|
|
879
|
-
return {
|
|
880
|
-
uniqueKeys: new Set(table.uniques.map((u) => u.columns.join(","))),
|
|
881
|
-
indexKeys: new Set(table.indexes.map((i) => i.columns.join(","))),
|
|
882
|
-
uniqueIndexKeys: new Set(table.indexes.filter((i) => i.unique).map((i) => i.columns.join(","))),
|
|
883
|
-
fkKeys: new Set(table.foreignKeys.map((fk) => `${fk.columns.join(",")}|${fk.referencedTable}|${fk.referencedColumns.join(",")}`))
|
|
884
|
-
};
|
|
885
|
-
}
|
|
886
|
-
function hasUniqueConstraint(lookup, columns) {
|
|
887
|
-
const key = columns.join(",");
|
|
888
|
-
return lookup.uniqueKeys.has(key) || lookup.uniqueIndexKeys.has(key);
|
|
889
|
-
}
|
|
890
|
-
function hasForeignKey(lookup, fk) {
|
|
891
|
-
return lookup.fkKeys.has(`${fk.columns.join(",")}|${fk.references.table}|${fk.references.columns.join(",")}`);
|
|
892
|
-
}
|
|
893
|
-
|
|
894
|
-
//#endregion
|
|
895
|
-
//#region src/core/migrations/planner-strategies.ts
|
|
896
|
-
const REBUILD_SUFFIX = "__prisma_next_new";
|
|
897
|
-
function buildColumnSpec(table, column, ctx, overrides) {
|
|
898
|
-
const col = ctx.toContract.storage.tables[table]?.columns[column];
|
|
899
|
-
if (!col) throw new Error(`Column "${table}"."${column}" not found in destination contract`);
|
|
900
|
-
const mutableHooks = ctx.codecHooks;
|
|
901
|
-
const mutableTypes = ctx.storageTypes;
|
|
902
|
-
return {
|
|
903
|
-
name: column,
|
|
904
|
-
typeSql: buildColumnTypeSql(col, mutableHooks, mutableTypes),
|
|
905
|
-
defaultSql: buildColumnDefaultSql(col.default, col),
|
|
906
|
-
nullable: overrides?.nullable ?? col.nullable
|
|
907
|
-
};
|
|
908
|
-
}
|
|
909
|
-
function buildAlterTypeOptions(table, column, ctx, using) {
|
|
910
|
-
const col = ctx.toContract.storage.tables[table]?.columns[column];
|
|
911
|
-
if (!col) throw new Error(`Column "${table}"."${column}" not found in destination contract`);
|
|
912
|
-
const mutableHooks = ctx.codecHooks;
|
|
913
|
-
const mutableTypes = ctx.storageTypes;
|
|
914
|
-
const qualifiedTargetType = buildColumnTypeSql(col, mutableHooks, mutableTypes, false);
|
|
915
|
-
return {
|
|
916
|
-
qualifiedTargetType,
|
|
917
|
-
formatTypeExpected: buildExpectedFormatType(col, mutableHooks, mutableTypes),
|
|
918
|
-
rawTargetTypeForLabel: qualifiedTargetType,
|
|
919
|
-
...using !== void 0 ? { using } : {}
|
|
920
|
-
};
|
|
921
|
-
}
|
|
922
|
-
const notNullBackfillCallStrategy = (issues, ctx) => {
|
|
923
|
-
if (!ctx.policy.allowedOperationClasses.includes("data")) return { kind: "no_match" };
|
|
924
|
-
const matched = [];
|
|
925
|
-
const calls = [];
|
|
926
|
-
for (const issue of issues) {
|
|
927
|
-
if (issue.kind !== "missing_column" || !issue.table || !issue.column) continue;
|
|
928
|
-
const column = ctx.toContract.storage.tables[issue.table]?.columns[issue.column];
|
|
929
|
-
if (!column) continue;
|
|
930
|
-
if (column.nullable === true || column.default !== void 0) continue;
|
|
931
|
-
matched.push(issue);
|
|
932
|
-
const spec = buildColumnSpec(issue.table, issue.column, ctx, { nullable: true });
|
|
933
|
-
calls.push(new AddColumnCall(ctx.schemaName, issue.table, spec), new DataTransformCall(`backfill-${issue.table}-${issue.column}`, `backfill-${issue.table}-${issue.column}:check`, `backfill-${issue.table}-${issue.column}:run`), new SetNotNullCall(ctx.schemaName, issue.table, issue.column));
|
|
934
|
-
}
|
|
935
|
-
if (matched.length === 0) return { kind: "no_match" };
|
|
936
|
-
return {
|
|
937
|
-
kind: "match",
|
|
938
|
-
issues: issues.filter((i) => !matched.includes(i)),
|
|
939
|
-
calls,
|
|
940
|
-
recipe: true
|
|
941
|
-
};
|
|
942
|
-
};
|
|
943
|
-
const SAFE_WIDENINGS = new Set([
|
|
944
|
-
"int2→int4",
|
|
945
|
-
"int2→int8",
|
|
946
|
-
"int4→int8",
|
|
947
|
-
"float4→float8"
|
|
948
|
-
]);
|
|
949
|
-
const typeChangeCallStrategy = (issues, ctx) => {
|
|
950
|
-
const dataAllowed = ctx.policy.allowedOperationClasses.includes("data");
|
|
951
|
-
const matched = [];
|
|
952
|
-
const calls = [];
|
|
953
|
-
for (const issue of issues) {
|
|
954
|
-
if (issue.kind !== "type_mismatch") continue;
|
|
955
|
-
if (!issue.table || !issue.column) continue;
|
|
956
|
-
const fromColumn = ctx.fromContract?.storage.tables[issue.table]?.columns[issue.column];
|
|
957
|
-
const toColumn = ctx.toContract.storage.tables[issue.table]?.columns[issue.column];
|
|
958
|
-
if (!fromColumn || !toColumn) continue;
|
|
959
|
-
const fromType = fromColumn.nativeType;
|
|
960
|
-
const toType = toColumn.nativeType;
|
|
961
|
-
if (fromType === toType) continue;
|
|
962
|
-
const isSafeWidening = SAFE_WIDENINGS.has(`${fromType}→${toType}`);
|
|
963
|
-
if (!isSafeWidening && !dataAllowed) continue;
|
|
964
|
-
matched.push(issue);
|
|
965
|
-
const alterOpts = buildAlterTypeOptions(issue.table, issue.column, ctx);
|
|
966
|
-
if (isSafeWidening) calls.push(new AlterColumnTypeCall(ctx.schemaName, issue.table, issue.column, alterOpts));
|
|
967
|
-
else calls.push(new DataTransformCall(`typechange-${issue.table}-${issue.column}`, `typechange-${issue.table}-${issue.column}:check`, `typechange-${issue.table}-${issue.column}:run`), new AlterColumnTypeCall(ctx.schemaName, issue.table, issue.column, alterOpts));
|
|
968
|
-
}
|
|
969
|
-
if (matched.length === 0) return { kind: "no_match" };
|
|
970
|
-
return {
|
|
971
|
-
kind: "match",
|
|
972
|
-
issues: issues.filter((i) => !matched.includes(i)),
|
|
973
|
-
calls,
|
|
974
|
-
recipe: true
|
|
975
|
-
};
|
|
976
|
-
};
|
|
977
|
-
const nullableTighteningCallStrategy = (issues, ctx) => {
|
|
978
|
-
if (!ctx.policy.allowedOperationClasses.includes("data")) return { kind: "no_match" };
|
|
979
|
-
const matched = [];
|
|
980
|
-
const calls = [];
|
|
981
|
-
for (const issue of issues) {
|
|
982
|
-
if (issue.kind !== "nullability_mismatch" || !issue.table || !issue.column) continue;
|
|
983
|
-
const column = ctx.toContract.storage.tables[issue.table]?.columns[issue.column];
|
|
984
|
-
if (!column) continue;
|
|
985
|
-
if (column.nullable === true) continue;
|
|
986
|
-
matched.push(issue);
|
|
987
|
-
calls.push(new DataTransformCall(`handle-nulls-${issue.table}-${issue.column}`, `handle-nulls-${issue.table}-${issue.column}:check`, `handle-nulls-${issue.table}-${issue.column}:run`), new SetNotNullCall(ctx.schemaName, issue.table, issue.column));
|
|
988
|
-
}
|
|
989
|
-
if (matched.length === 0) return { kind: "no_match" };
|
|
990
|
-
return {
|
|
991
|
-
kind: "match",
|
|
992
|
-
issues: issues.filter((i) => !matched.includes(i)),
|
|
993
|
-
calls,
|
|
994
|
-
recipe: true
|
|
995
|
-
};
|
|
996
|
-
};
|
|
997
|
-
function enumRebuildCallRecipe(typeName, ctx) {
|
|
998
|
-
const toType = ctx.toContract.storage.types?.[typeName];
|
|
999
|
-
if (!toType) return [];
|
|
1000
|
-
const nativeType = toType.nativeType;
|
|
1001
|
-
const desiredValues = toType.typeParams["values"] ?? [];
|
|
1002
|
-
const tempName = `${nativeType}${REBUILD_SUFFIX}`;
|
|
1003
|
-
const columnRefs = [];
|
|
1004
|
-
for (const [tableName, table] of Object.entries(ctx.toContract.storage.tables)) for (const [columnName, column] of Object.entries(table.columns)) if (column.typeRef === typeName) columnRefs.push({
|
|
1005
|
-
table: tableName,
|
|
1006
|
-
column: columnName
|
|
1007
|
-
});
|
|
1008
|
-
return [
|
|
1009
|
-
new CreateEnumTypeCall(ctx.schemaName, tempName, desiredValues),
|
|
1010
|
-
...columnRefs.map((ref) => {
|
|
1011
|
-
const using = `${ref.column}::text::${tempName}`;
|
|
1012
|
-
return new AlterColumnTypeCall(ctx.schemaName, ref.table, ref.column, {
|
|
1013
|
-
qualifiedTargetType: tempName,
|
|
1014
|
-
formatTypeExpected: tempName,
|
|
1015
|
-
rawTargetTypeForLabel: tempName,
|
|
1016
|
-
using
|
|
1017
|
-
});
|
|
1018
|
-
}),
|
|
1019
|
-
new DropEnumTypeCall(ctx.schemaName, nativeType),
|
|
1020
|
-
new RenameTypeCall(ctx.schemaName, tempName, nativeType)
|
|
1021
|
-
];
|
|
1022
|
-
}
|
|
1023
|
-
const enumChangeCallStrategy = (issues, ctx) => {
|
|
1024
|
-
if (!ctx.policy.allowedOperationClasses.includes("data")) return { kind: "no_match" };
|
|
1025
|
-
const matched = [];
|
|
1026
|
-
const calls = [];
|
|
1027
|
-
for (const issue of issues) {
|
|
1028
|
-
if (issue.kind !== "enum_values_changed") continue;
|
|
1029
|
-
matched.push(issue);
|
|
1030
|
-
if (issue.removedValues.length > 0) calls.push(new DataTransformCall(`migrate-${issue.typeName}-values`, `migrate-${issue.typeName}-values:check`, `migrate-${issue.typeName}-values:run`), ...enumRebuildCallRecipe(issue.typeName, ctx));
|
|
1031
|
-
else if (issue.addedValues.length === 0) calls.push(...enumRebuildCallRecipe(issue.typeName, ctx));
|
|
1032
|
-
else {
|
|
1033
|
-
const toType = ctx.toContract.storage.types?.[issue.typeName];
|
|
1034
|
-
if (toType) calls.push(new AddEnumValuesCall(ctx.schemaName, issue.typeName, toType.nativeType, issue.addedValues));
|
|
1035
|
-
}
|
|
1036
|
-
}
|
|
1037
|
-
if (matched.length === 0) return { kind: "no_match" };
|
|
1038
|
-
return {
|
|
1039
|
-
kind: "match",
|
|
1040
|
-
issues: issues.filter((i) => !matched.includes(i)),
|
|
1041
|
-
calls,
|
|
1042
|
-
recipe: true
|
|
1043
|
-
};
|
|
1044
|
-
};
|
|
1045
|
-
/**
|
|
1046
|
-
* Dispatches storage types through their codec's `planTypeOperations` hook.
|
|
1047
|
-
* Replaces the walk-schema `buildStorageTypeOperations` path: the hook is
|
|
1048
|
-
* the authoritative source for codec-driven DDL (enum create/rebuild/add-
|
|
1049
|
-
* value, custom type creation, etc.).
|
|
1050
|
-
*
|
|
1051
|
-
* Runs after `enumChangeCallStrategy` so the structured enum path (value
|
|
1052
|
-
* add, rebuild recipe) gets first pick at `enum_values_changed` issues;
|
|
1053
|
-
* this strategy then handles remaining `type_missing` / `enum_values_changed`
|
|
1054
|
-
* issues for types whose hook produced at least one op.
|
|
1055
|
-
*/
|
|
1056
|
-
const storageTypePlanCallStrategy = (issues, ctx) => {
|
|
1057
|
-
const storageTypes = ctx.toContract.storage.types ?? {};
|
|
1058
|
-
if (Object.keys(storageTypes).length === 0) return { kind: "no_match" };
|
|
1059
|
-
const calls = [];
|
|
1060
|
-
const handledTypeNames = /* @__PURE__ */ new Set();
|
|
1061
|
-
for (const [typeName, typeInstance] of Object.entries(storageTypes).sort(([a], [b]) => a.localeCompare(b))) {
|
|
1062
|
-
const hook = ctx.codecHooks.get(typeInstance.codecId);
|
|
1063
|
-
if (!hook?.planTypeOperations) continue;
|
|
1064
|
-
const planResult = hook.planTypeOperations({
|
|
1065
|
-
typeName,
|
|
1066
|
-
typeInstance,
|
|
1067
|
-
contract: ctx.toContract,
|
|
1068
|
-
schema: ctx.schema,
|
|
1069
|
-
schemaName: ctx.schemaName,
|
|
1070
|
-
policy: ctx.policy
|
|
1071
|
-
});
|
|
1072
|
-
if (!planResult) continue;
|
|
1073
|
-
if (planResult.operations.length === 0) {
|
|
1074
|
-
handledTypeNames.add(typeName);
|
|
1075
|
-
continue;
|
|
1076
|
-
}
|
|
1077
|
-
handledTypeNames.add(typeName);
|
|
1078
|
-
for (const op of planResult.operations) calls.push(new RawSqlCall({
|
|
1079
|
-
...op,
|
|
1080
|
-
target: {
|
|
1081
|
-
id: op.target.id,
|
|
1082
|
-
details: buildTargetDetails("type", typeName, ctx.schemaName)
|
|
1083
|
-
}
|
|
1084
|
-
}));
|
|
1085
|
-
}
|
|
1086
|
-
const remaining = issues.filter((issue) => !((issue.kind === "type_missing" || issue.kind === "enum_values_changed") && issue.typeName && handledTypeNames.has(issue.typeName)));
|
|
1087
|
-
if (calls.length === 0 && remaining.length === issues.length) return { kind: "no_match" };
|
|
1088
|
-
return {
|
|
1089
|
-
kind: "match",
|
|
1090
|
-
issues: remaining,
|
|
1091
|
-
calls
|
|
1092
|
-
};
|
|
1093
|
-
};
|
|
1094
|
-
/**
|
|
1095
|
-
* Dispatches component-declared database dependencies. Replaces the
|
|
1096
|
-
* walk-schema `buildDatabaseDependencyOperations` path. Rather than consuming
|
|
1097
|
-
* `dependency_missing` issues (which only carry the id), this strategy
|
|
1098
|
-
* re-invokes `collectInitDependencies(frameworkComponents)` at plan time so
|
|
1099
|
-
* the handler has access to the structured `install` ops each component
|
|
1100
|
-
* declared — including arbitrary SQL launders — and dedupes by dependency id
|
|
1101
|
-
* plus per-op id.
|
|
1102
|
-
*/
|
|
1103
|
-
const dependencyInstallCallStrategy = (issues, ctx) => {
|
|
1104
|
-
const installedIds = new Set(ctx.schema.dependencies.map((d) => d.id));
|
|
1105
|
-
const dependencies = sortDependencies(collectInitDependencies(ctx.frameworkComponents).filter(isPostgresPlannerDependency));
|
|
1106
|
-
const calls = [];
|
|
1107
|
-
const handledDependencyIds = /* @__PURE__ */ new Set();
|
|
1108
|
-
const seenOperationIds = /* @__PURE__ */ new Set();
|
|
1109
|
-
for (const dep of dependencies) {
|
|
1110
|
-
handledDependencyIds.add(dep.id);
|
|
1111
|
-
if (installedIds.has(dep.id)) continue;
|
|
1112
|
-
for (const installOp of dep.install) {
|
|
1113
|
-
if (seenOperationIds.has(installOp.id)) continue;
|
|
1114
|
-
seenOperationIds.add(installOp.id);
|
|
1115
|
-
calls.push(liftInstallOpToCall(installOp));
|
|
1116
|
-
}
|
|
1117
|
-
}
|
|
1118
|
-
const remaining = issues.filter((issue) => issue.kind !== "dependency_missing");
|
|
1119
|
-
if (calls.length === 0 && remaining.length === issues.length) return { kind: "no_match" };
|
|
1120
|
-
return {
|
|
1121
|
-
kind: "match",
|
|
1122
|
-
issues: remaining,
|
|
1123
|
-
calls
|
|
1124
|
-
};
|
|
1125
|
-
};
|
|
1126
|
-
/**
|
|
1127
|
-
* Handles `missing_column` issues for NOT NULL columns without a contract
|
|
1128
|
-
* default. Replaces the walk-schema `buildAddColumnItem` non-default branches.
|
|
1129
|
-
*
|
|
1130
|
-
* Two shapes:
|
|
1131
|
-
* - Shared-temp-default safe: emit a single atomic composite op (add
|
|
1132
|
-
* nullable → backfill identity value → `SET NOT NULL` → `DROP DEFAULT`).
|
|
1133
|
-
* - Empty-table guarded: emit a hand-built op with a `tableIsEmptyCheck`
|
|
1134
|
-
* precheck so the failure message is "table is not empty" rather than the
|
|
1135
|
-
* raw PG NOT NULL violation.
|
|
1136
|
-
*
|
|
1137
|
-
* "Normal" missing_column cases (nullable or has a contract default) are left
|
|
1138
|
-
* for `mapIssueToCall`'s default `AddColumnCall` emission.
|
|
1139
|
-
*/
|
|
1140
|
-
const notNullAddColumnCallStrategy = (issues, ctx) => {
|
|
1141
|
-
const matched = [];
|
|
1142
|
-
const calls = [];
|
|
1143
|
-
const schemaLookups = buildSchemaLookupMap(ctx.schema);
|
|
1144
|
-
const mutableCodecHooks = ctx.codecHooks;
|
|
1145
|
-
const mutableStorageTypes = ctx.storageTypes;
|
|
1146
|
-
for (const issue of issues) {
|
|
1147
|
-
if (issue.kind !== "missing_column" || !issue.table || !issue.column) continue;
|
|
1148
|
-
const contractTable = ctx.toContract.storage.tables[issue.table];
|
|
1149
|
-
const column = contractTable?.columns[issue.column];
|
|
1150
|
-
if (!column) continue;
|
|
1151
|
-
const notNull = column.nullable !== true;
|
|
1152
|
-
const hasDefault = column.default !== void 0;
|
|
1153
|
-
if (!notNull || hasDefault) continue;
|
|
1154
|
-
const schemaTable = ctx.schema.tables[issue.table];
|
|
1155
|
-
if (!schemaTable) continue;
|
|
1156
|
-
const temporaryDefault = resolveIdentityValue(column, mutableCodecHooks, mutableStorageTypes);
|
|
1157
|
-
const schemaLookup = schemaLookups.get(issue.table);
|
|
1158
|
-
const canUseSharedTempDefault = temporaryDefault !== null && canUseSharedTemporaryDefaultStrategy({
|
|
1159
|
-
table: contractTable,
|
|
1160
|
-
schemaTable,
|
|
1161
|
-
schemaLookup,
|
|
1162
|
-
columnName: issue.column
|
|
1163
|
-
});
|
|
1164
|
-
matched.push(issue);
|
|
1165
|
-
if (canUseSharedTempDefault && temporaryDefault !== null) {
|
|
1166
|
-
calls.push(new RawSqlCall(buildAddNotNullColumnWithTemporaryDefaultOperation({
|
|
1167
|
-
schema: ctx.schemaName,
|
|
1168
|
-
tableName: issue.table,
|
|
1169
|
-
columnName: issue.column,
|
|
1170
|
-
column,
|
|
1171
|
-
codecHooks: mutableCodecHooks,
|
|
1172
|
-
storageTypes: mutableStorageTypes,
|
|
1173
|
-
temporaryDefault
|
|
1174
|
-
})));
|
|
1175
|
-
continue;
|
|
1176
|
-
}
|
|
1177
|
-
const qualified = qualifyTableName(ctx.schemaName, issue.table);
|
|
1178
|
-
calls.push(new RawSqlCall({
|
|
1179
|
-
...buildAddColumnOperationIdentity(ctx.schemaName, issue.table, issue.column),
|
|
1180
|
-
operationClass: "additive",
|
|
1181
|
-
precheck: [{
|
|
1182
|
-
description: `ensure column "${issue.column}" is missing`,
|
|
1183
|
-
sql: columnExistsCheck({
|
|
1184
|
-
schema: ctx.schemaName,
|
|
1185
|
-
table: issue.table,
|
|
1186
|
-
column: issue.column,
|
|
1187
|
-
exists: false
|
|
1188
|
-
})
|
|
1189
|
-
}, {
|
|
1190
|
-
description: `ensure table "${issue.table}" is empty before adding NOT NULL column without default`,
|
|
1191
|
-
sql: tableIsEmptyCheck(qualified)
|
|
1192
|
-
}],
|
|
1193
|
-
execute: [{
|
|
1194
|
-
description: `add column "${issue.column}"`,
|
|
1195
|
-
sql: buildAddColumnSql(qualified, issue.column, column, mutableCodecHooks, void 0, mutableStorageTypes)
|
|
1196
|
-
}],
|
|
1197
|
-
postcheck: [{
|
|
1198
|
-
description: `verify column "${issue.column}" exists`,
|
|
1199
|
-
sql: columnExistsCheck({
|
|
1200
|
-
schema: ctx.schemaName,
|
|
1201
|
-
table: issue.table,
|
|
1202
|
-
column: issue.column
|
|
1203
|
-
})
|
|
1204
|
-
}, {
|
|
1205
|
-
description: `verify column "${issue.column}" is NOT NULL`,
|
|
1206
|
-
sql: columnNullabilityCheck({
|
|
1207
|
-
schema: ctx.schemaName,
|
|
1208
|
-
table: issue.table,
|
|
1209
|
-
column: issue.column,
|
|
1210
|
-
nullable: false
|
|
1211
|
-
})
|
|
1212
|
-
}]
|
|
1213
|
-
}));
|
|
1214
|
-
}
|
|
1215
|
-
if (matched.length === 0) return { kind: "no_match" };
|
|
1216
|
-
return {
|
|
1217
|
-
kind: "match",
|
|
1218
|
-
issues: issues.filter((i) => !matched.includes(i)),
|
|
1219
|
-
calls
|
|
1220
|
-
};
|
|
1221
|
-
};
|
|
1222
|
-
function canUseSharedTemporaryDefaultStrategy(options) {
|
|
1223
|
-
const { table, schemaTable, schemaLookup, columnName } = options;
|
|
1224
|
-
if (table.primaryKey?.columns.includes(columnName) && !schemaTable.primaryKey) return false;
|
|
1225
|
-
for (const unique of table.uniques) {
|
|
1226
|
-
if (!unique.columns.includes(columnName)) continue;
|
|
1227
|
-
if (!schemaLookup || !hasUniqueConstraint(schemaLookup, unique.columns)) return false;
|
|
1228
|
-
}
|
|
1229
|
-
for (const foreignKey of table.foreignKeys) {
|
|
1230
|
-
if (foreignKey.constraint === false || !foreignKey.columns.includes(columnName)) continue;
|
|
1231
|
-
if (!schemaLookup || !hasForeignKey(schemaLookup, foreignKey)) return false;
|
|
1232
|
-
}
|
|
1233
|
-
return true;
|
|
1234
|
-
}
|
|
1235
|
-
function isPostgresPlannerDependency(dependency) {
|
|
1236
|
-
return dependency.install.every((operation) => operation.target.id === "postgres");
|
|
1237
|
-
}
|
|
1238
|
-
function sortDependencies(dependencies) {
|
|
1239
|
-
return [...dependencies].sort((a, b) => a.id.localeCompare(b.id));
|
|
1240
|
-
}
|
|
1241
|
-
/**
|
|
1242
|
-
* Lift a component install op into migration IR. Structured shapes — extension
|
|
1243
|
-
* and schema installs with predictable SQL — collapse to typed `*Call`
|
|
1244
|
-
* subclasses so the scaffolded migration authoring surface stays readable.
|
|
1245
|
-
* Everything else (arbitrary SQL) falls through to `RawSqlCall` as an escape
|
|
1246
|
-
* hatch.
|
|
1247
|
-
*/
|
|
1248
|
-
/**
|
|
1249
|
-
* Component-declared install ops are wrapped as `RawSqlCall` so the
|
|
1250
|
-
* component's original `label`, `precheck`, `execute`, `postcheck`, and op
|
|
1251
|
-
* id are preserved verbatim. Structured conversion (to e.g.
|
|
1252
|
-
* `CreateExtensionCall`) would drop the precheck/postcheck pair and
|
|
1253
|
-
* change the DDL label, breaking walk-schema output parity. Classification
|
|
1254
|
-
* as `'dep'` happens in `classifyCall` via the underlying op's id prefix.
|
|
1255
|
-
*/
|
|
1256
|
-
function liftInstallOpToCall(op) {
|
|
1257
|
-
return new RawSqlCall(op);
|
|
1258
|
-
}
|
|
1259
|
-
/**
|
|
1260
|
-
* Ordered list of Postgres planner strategies, shared by `migration plan`
|
|
1261
|
-
* and `db update` / `db init`. The issue planner runs each strategy in
|
|
1262
|
-
* order, letting it consume any issues it handles, and routes whatever's
|
|
1263
|
-
* left through `mapIssueToCall`. Behavior diverges purely on
|
|
1264
|
-
* `policy.allowedOperationClasses`:
|
|
1265
|
-
*
|
|
1266
|
-
* - When `'data'` is allowed (`migration plan`), the data-safe strategies
|
|
1267
|
-
* (`enumChangeCallStrategy`, `notNullBackfillCallStrategy`,
|
|
1268
|
-
* `typeChangeCallStrategy`, `nullableTighteningCallStrategy`) consume their
|
|
1269
|
-
* matching issues and emit `DataTransformCall` placeholders or recipe ops.
|
|
1270
|
-
*
|
|
1271
|
-
* - When `'data'` is not allowed (`db update` / `db init`), each data-safe
|
|
1272
|
-
* strategy short-circuits to `no_match`, leaving the issue for the
|
|
1273
|
-
* downstream walk-schema strategies (`storageTypePlanCallStrategy`,
|
|
1274
|
-
* `dependencyInstallCallStrategy`, `notNullAddColumnCallStrategy`) or the
|
|
1275
|
-
* `mapIssueToCall` default to handle with direct DDL.
|
|
1276
|
-
*
|
|
1277
|
-
* Order matters: data-safe strategies must run before the walk-schema
|
|
1278
|
-
* strategies on overlapping issue kinds (e.g. `enum_values_changed`,
|
|
1279
|
-
* `missing_column` for NOT NULL) so they take priority when active.
|
|
1280
|
-
*/
|
|
1281
|
-
const postgresPlannerStrategies = [
|
|
1282
|
-
enumChangeCallStrategy,
|
|
1283
|
-
notNullBackfillCallStrategy,
|
|
1284
|
-
typeChangeCallStrategy,
|
|
1285
|
-
nullableTighteningCallStrategy,
|
|
1286
|
-
storageTypePlanCallStrategy,
|
|
1287
|
-
dependencyInstallCallStrategy,
|
|
1288
|
-
notNullAddColumnCallStrategy
|
|
1289
|
-
];
|
|
1290
|
-
|
|
1291
|
-
//#endregion
|
|
1292
|
-
//#region src/core/migrations/issue-planner.ts
|
|
1293
|
-
const ISSUE_KIND_ORDER = {
|
|
1294
|
-
dependency_missing: 1,
|
|
1295
|
-
type_missing: 2,
|
|
1296
|
-
type_values_mismatch: 3,
|
|
1297
|
-
enum_values_changed: 3,
|
|
1298
|
-
extra_foreign_key: 10,
|
|
1299
|
-
extra_unique_constraint: 11,
|
|
1300
|
-
extra_primary_key: 12,
|
|
1301
|
-
extra_index: 13,
|
|
1302
|
-
extra_default: 14,
|
|
1303
|
-
extra_column: 15,
|
|
1304
|
-
extra_table: 16,
|
|
1305
|
-
missing_table: 20,
|
|
1306
|
-
missing_column: 30,
|
|
1307
|
-
type_mismatch: 40,
|
|
1308
|
-
nullability_mismatch: 41,
|
|
1309
|
-
default_missing: 42,
|
|
1310
|
-
default_mismatch: 43,
|
|
1311
|
-
primary_key_mismatch: 50,
|
|
1312
|
-
unique_constraint_mismatch: 51,
|
|
1313
|
-
index_mismatch: 52,
|
|
1314
|
-
foreign_key_mismatch: 60
|
|
1315
|
-
};
|
|
1316
|
-
function issueOrder(issue) {
|
|
1317
|
-
return ISSUE_KIND_ORDER[issue.kind] ?? 99;
|
|
1318
|
-
}
|
|
1319
|
-
function issueConflict(kind, summary, location) {
|
|
1320
|
-
return {
|
|
1321
|
-
kind,
|
|
1322
|
-
summary,
|
|
1323
|
-
why: "Use `migration new` to author a custom migration for this change.",
|
|
1324
|
-
...location ? { location } : {}
|
|
1325
|
-
};
|
|
1326
|
-
}
|
|
1327
|
-
function isMissing(issue) {
|
|
1328
|
-
if (issue.kind === "enum_values_changed") return false;
|
|
1329
|
-
return issue.actual === void 0;
|
|
1330
|
-
}
|
|
1331
|
-
function toColumnSpec(name, column, codecHooks, storageTypes) {
|
|
1332
|
-
return {
|
|
1333
|
-
name,
|
|
1334
|
-
typeSql: buildColumnTypeSql(column, codecHooks, storageTypes),
|
|
1335
|
-
defaultSql: buildColumnDefaultSql(column.default, column),
|
|
1336
|
-
nullable: column.nullable
|
|
1337
|
-
};
|
|
1338
|
-
}
|
|
1339
|
-
function mapIssueToCall(issue, ctx) {
|
|
1340
|
-
const { schemaName, codecHooks, storageTypes } = ctx;
|
|
1341
|
-
switch (issue.kind) {
|
|
1342
|
-
case "missing_table": {
|
|
1343
|
-
if (!issue.table) return notOk(issueConflict("unsupportedOperation", "Missing table issue has no table name"));
|
|
1344
|
-
const contractTable = ctx.toContract.storage.tables[issue.table];
|
|
1345
|
-
if (!contractTable) return notOk(issueConflict("unsupportedOperation", `Table "${issue.table}" reported missing but not found in destination contract`));
|
|
1346
|
-
const columns = Object.entries(contractTable.columns).map(([name, column]) => toColumnSpec(name, column, codecHooks, storageTypes));
|
|
1347
|
-
const primaryKey = contractTable.primaryKey ? { columns: contractTable.primaryKey.columns } : void 0;
|
|
1348
|
-
const calls = [new CreateTableCall(schemaName, issue.table, columns, primaryKey)];
|
|
1349
|
-
for (const index of contractTable.indexes) {
|
|
1350
|
-
const indexName = index.name ?? `${issue.table}_${index.columns.join("_")}_idx`;
|
|
1351
|
-
calls.push(new CreateIndexCall(schemaName, issue.table, indexName, [...index.columns]));
|
|
1352
|
-
}
|
|
1353
|
-
const explicitIndexColumnSets = new Set(contractTable.indexes.map((idx) => idx.columns.join(",")));
|
|
1354
|
-
for (const fk of contractTable.foreignKeys) {
|
|
1355
|
-
if (fk.constraint) {
|
|
1356
|
-
const fkSpec = {
|
|
1357
|
-
name: fk.name ?? `${issue.table}_${fk.columns.join("_")}_fkey`,
|
|
1358
|
-
columns: fk.columns,
|
|
1359
|
-
references: {
|
|
1360
|
-
table: fk.references.table,
|
|
1361
|
-
columns: fk.references.columns
|
|
1362
|
-
},
|
|
1363
|
-
...fk.onDelete !== void 0 && { onDelete: fk.onDelete },
|
|
1364
|
-
...fk.onUpdate !== void 0 && { onUpdate: fk.onUpdate }
|
|
1365
|
-
};
|
|
1366
|
-
calls.push(new AddForeignKeyCall(schemaName, issue.table, fkSpec));
|
|
1367
|
-
}
|
|
1368
|
-
if (fk.index && !explicitIndexColumnSets.has(fk.columns.join(","))) {
|
|
1369
|
-
const indexName = `${issue.table}_${fk.columns.join("_")}_idx`;
|
|
1370
|
-
calls.push(new CreateIndexCall(schemaName, issue.table, indexName, [...fk.columns]));
|
|
1371
|
-
}
|
|
1372
|
-
}
|
|
1373
|
-
for (const unique of contractTable.uniques) {
|
|
1374
|
-
const constraintName = unique.name ?? `${issue.table}_${unique.columns.join("_")}_key`;
|
|
1375
|
-
calls.push(new AddUniqueCall(schemaName, issue.table, constraintName, [...unique.columns]));
|
|
1376
|
-
}
|
|
1377
|
-
return ok(calls);
|
|
1378
|
-
}
|
|
1379
|
-
case "missing_column":
|
|
1380
|
-
if (!issue.table || !issue.column) return notOk(issueConflict("unsupportedOperation", "Missing column issue has no table/column name"));
|
|
1381
|
-
{
|
|
1382
|
-
const column = ctx.toContract.storage.tables[issue.table]?.columns[issue.column];
|
|
1383
|
-
if (!column) return notOk(issueConflict("unsupportedOperation", `Column "${issue.table}"."${issue.column}" not in destination contract`));
|
|
1384
|
-
return ok([new AddColumnCall(schemaName, issue.table, toColumnSpec(issue.column, column, codecHooks, storageTypes))]);
|
|
1385
|
-
}
|
|
1386
|
-
case "default_missing":
|
|
1387
|
-
if (!issue.table || !issue.column) return notOk(issueConflict("unsupportedOperation", "Default missing issue has no table/column name"));
|
|
1388
|
-
{
|
|
1389
|
-
const column = ctx.toContract.storage.tables[issue.table]?.columns[issue.column];
|
|
1390
|
-
if (!column?.default) return notOk(issueConflict("unsupportedOperation", `Column "${issue.table}"."${issue.column}" has no default in contract`));
|
|
1391
|
-
const defaultSql = buildColumnDefaultSql(column.default, column);
|
|
1392
|
-
if (!defaultSql) return ok([]);
|
|
1393
|
-
return ok([new SetDefaultCall(schemaName, issue.table, issue.column, defaultSql)]);
|
|
1394
|
-
}
|
|
1395
|
-
case "extra_table":
|
|
1396
|
-
if (!issue.table) return notOk(issueConflict("unsupportedOperation", "Extra table issue has no table name"));
|
|
1397
|
-
return ok([new DropTableCall(schemaName, issue.table)]);
|
|
1398
|
-
case "extra_column":
|
|
1399
|
-
if (!issue.table || !issue.column) return notOk(issueConflict("unsupportedOperation", "Extra column issue has no table/column name"));
|
|
1400
|
-
return ok([new DropColumnCall(schemaName, issue.table, issue.column)]);
|
|
1401
|
-
case "extra_index":
|
|
1402
|
-
if (!issue.table || !issue.indexOrConstraint) return notOk(issueConflict("unsupportedOperation", "Extra index issue has no table/index name"));
|
|
1403
|
-
return ok([new DropIndexCall(schemaName, issue.table, issue.indexOrConstraint)]);
|
|
1404
|
-
case "extra_unique_constraint":
|
|
1405
|
-
case "extra_foreign_key":
|
|
1406
|
-
case "extra_primary_key": {
|
|
1407
|
-
if (!issue.table) return notOk(issueConflict("unsupportedOperation", "Extra constraint issue has no table/constraint name"));
|
|
1408
|
-
const constraintName = issue.indexOrConstraint ?? (issue.kind === "extra_primary_key" ? `${issue.table}_pkey` : void 0);
|
|
1409
|
-
if (!constraintName) return notOk(issueConflict("unsupportedOperation", "Extra constraint issue has no table/constraint name"));
|
|
1410
|
-
return ok([new DropConstraintCall(schemaName, issue.table, constraintName, {
|
|
1411
|
-
extra_unique_constraint: "unique",
|
|
1412
|
-
extra_foreign_key: "foreignKey",
|
|
1413
|
-
extra_primary_key: "primaryKey"
|
|
1414
|
-
}[issue.kind])]);
|
|
1415
|
-
}
|
|
1416
|
-
case "extra_default":
|
|
1417
|
-
if (!issue.table || !issue.column) return notOk(issueConflict("unsupportedOperation", "Extra default issue has no table/column name"));
|
|
1418
|
-
return ok([new DropDefaultCall(schemaName, issue.table, issue.column)]);
|
|
1419
|
-
case "nullability_mismatch": {
|
|
1420
|
-
if (!issue.table || !issue.column) return notOk(issueConflict("nullabilityConflict", "Nullability mismatch has no table/column name"));
|
|
1421
|
-
const column = ctx.toContract.storage.tables[issue.table]?.columns[issue.column];
|
|
1422
|
-
if (!column) return notOk(issueConflict("nullabilityConflict", `Column "${issue.table}"."${issue.column}" not found in destination contract`));
|
|
1423
|
-
return ok(column.nullable ? [new DropNotNullCall(schemaName, issue.table, issue.column)] : [new SetNotNullCall(schemaName, issue.table, issue.column)]);
|
|
1424
|
-
}
|
|
1425
|
-
case "type_mismatch":
|
|
1426
|
-
if (!issue.table || !issue.column) return notOk(issueConflict("typeMismatch", "Type mismatch has no table/column name"));
|
|
1427
|
-
{
|
|
1428
|
-
const column = ctx.toContract.storage.tables[issue.table]?.columns[issue.column];
|
|
1429
|
-
if (!column) return notOk(issueConflict("typeMismatch", `Column "${issue.table}"."${issue.column}" not in destination contract`));
|
|
1430
|
-
const hooksMap = codecHooks;
|
|
1431
|
-
const typesMap = storageTypes;
|
|
1432
|
-
const qualifiedTargetType = buildColumnTypeSql(column, hooksMap, typesMap, false);
|
|
1433
|
-
const formatTypeExpected = buildExpectedFormatType(column, hooksMap, typesMap);
|
|
1434
|
-
return ok([new AlterColumnTypeCall(schemaName, issue.table, issue.column, {
|
|
1435
|
-
qualifiedTargetType,
|
|
1436
|
-
formatTypeExpected,
|
|
1437
|
-
rawTargetTypeForLabel: qualifiedTargetType
|
|
1438
|
-
})]);
|
|
1439
|
-
}
|
|
1440
|
-
case "default_mismatch":
|
|
1441
|
-
if (!issue.table || !issue.column) return notOk(issueConflict("unsupportedOperation", "Default mismatch has no table/column name"));
|
|
1442
|
-
{
|
|
1443
|
-
const column = ctx.toContract.storage.tables[issue.table]?.columns[issue.column];
|
|
1444
|
-
if (!column?.default) return ok([]);
|
|
1445
|
-
const defaultSql = buildColumnDefaultSql(column.default, column);
|
|
1446
|
-
if (!defaultSql) return ok([]);
|
|
1447
|
-
return ok([new SetDefaultCall(schemaName, issue.table, issue.column, defaultSql, "widening")]);
|
|
1448
|
-
}
|
|
1449
|
-
case "primary_key_mismatch":
|
|
1450
|
-
if (!issue.table) return notOk(issueConflict("indexIncompatible", "Primary key issue has no table name"));
|
|
1451
|
-
if (isMissing(issue)) {
|
|
1452
|
-
const pk = ctx.toContract.storage.tables[issue.table]?.primaryKey;
|
|
1453
|
-
if (!pk) return notOk(issueConflict("indexIncompatible", `No primary key in contract for "${issue.table}"`));
|
|
1454
|
-
const constraintName = pk.name ?? `${issue.table}_pkey`;
|
|
1455
|
-
return ok([new AddPrimaryKeyCall(schemaName, issue.table, constraintName, pk.columns)]);
|
|
1456
|
-
}
|
|
1457
|
-
return notOk(issueConflict("indexIncompatible", `Primary key on "${issue.table}" has different columns (expected: ${issue.expected}, actual: ${issue.actual})`, { table: issue.table }));
|
|
1458
|
-
case "unique_constraint_mismatch":
|
|
1459
|
-
if (!issue.table) return notOk(issueConflict("indexIncompatible", "Unique constraint issue has no table name"));
|
|
1460
|
-
if (isMissing(issue) && issue.expected) {
|
|
1461
|
-
const columns = issue.expected.split(", ");
|
|
1462
|
-
const constraintName = `${issue.table}_${columns.join("_")}_key`;
|
|
1463
|
-
return ok([new AddUniqueCall(schemaName, issue.table, constraintName, columns)]);
|
|
1464
|
-
}
|
|
1465
|
-
return notOk(issueConflict("indexIncompatible", `Unique constraint on "${issue.table}" differs (expected: ${issue.expected}, actual: ${issue.actual})`, { table: issue.table }));
|
|
1466
|
-
case "index_mismatch":
|
|
1467
|
-
if (!issue.table) return notOk(issueConflict("indexIncompatible", "Index issue has no table name"));
|
|
1468
|
-
if (isMissing(issue) && issue.expected) {
|
|
1469
|
-
const columns = issue.expected.split(", ");
|
|
1470
|
-
const indexName = `${issue.table}_${columns.join("_")}_idx`;
|
|
1471
|
-
return ok([new CreateIndexCall(schemaName, issue.table, indexName, columns)]);
|
|
1472
|
-
}
|
|
1473
|
-
return notOk(issueConflict("indexIncompatible", `Index on "${issue.table}" differs (expected: ${issue.expected}, actual: ${issue.actual})`, { table: issue.table }));
|
|
1474
|
-
case "foreign_key_mismatch":
|
|
1475
|
-
if (!issue.table) return notOk(issueConflict("foreignKeyConflict", "Foreign key issue has no table name"));
|
|
1476
|
-
if (isMissing(issue) && issue.expected) {
|
|
1477
|
-
const arrowIdx = issue.expected.indexOf(" -> ");
|
|
1478
|
-
if (arrowIdx >= 0) {
|
|
1479
|
-
const columns = issue.expected.slice(0, arrowIdx).split(", ");
|
|
1480
|
-
const fkName = `${issue.table}_${columns.join("_")}_fkey`;
|
|
1481
|
-
const fk = ctx.toContract.storage.tables[issue.table]?.foreignKeys.find((k) => k.columns.join(", ") === columns.join(", "));
|
|
1482
|
-
if (fk) {
|
|
1483
|
-
const fkSpec = {
|
|
1484
|
-
name: fkName,
|
|
1485
|
-
columns: fk.columns,
|
|
1486
|
-
references: {
|
|
1487
|
-
table: fk.references.table,
|
|
1488
|
-
columns: fk.references.columns
|
|
1489
|
-
},
|
|
1490
|
-
...fk.onDelete !== void 0 && { onDelete: fk.onDelete },
|
|
1491
|
-
...fk.onUpdate !== void 0 && { onUpdate: fk.onUpdate }
|
|
1492
|
-
};
|
|
1493
|
-
return ok([new AddForeignKeyCall(schemaName, issue.table, fkSpec)]);
|
|
1494
|
-
}
|
|
1495
|
-
return notOk(issueConflict("foreignKeyConflict", `Foreign key on "${issue.table}" (${columns.join(", ")}) not found in destination contract`, { table: issue.table }));
|
|
1496
|
-
}
|
|
1497
|
-
}
|
|
1498
|
-
return notOk(issueConflict("foreignKeyConflict", `Foreign key on "${issue.table}" differs (expected: ${issue.expected}, actual: ${issue.actual})`, { table: issue.table }));
|
|
1499
|
-
case "type_missing": {
|
|
1500
|
-
if (!issue.typeName) return notOk(issueConflict("unsupportedOperation", "Type missing issue has no typeName"));
|
|
1501
|
-
const typeInstance = ctx.toContract.storage.types?.[issue.typeName];
|
|
1502
|
-
if (!typeInstance) return notOk(issueConflict("unsupportedOperation", `Type "${issue.typeName}" reported missing but not found in destination contract`));
|
|
1503
|
-
if (typeInstance.codecId.startsWith("pg/enum")) {
|
|
1504
|
-
const values = typeInstance.typeParams["values"] ?? [];
|
|
1505
|
-
return ok([new CreateEnumTypeCall(schemaName, typeInstance.nativeType, values)]);
|
|
1506
|
-
}
|
|
1507
|
-
return notOk(issueConflict("unsupportedOperation", `Type "${issue.typeName}" uses codec "${typeInstance.codecId}" — only enum types are supported`));
|
|
1508
|
-
}
|
|
1509
|
-
case "type_values_mismatch": return notOk(issueConflict("unsupportedOperation", `Type "${issue.typeName ?? "unknown"}" values differ — type alteration not yet supported`));
|
|
1510
|
-
case "dependency_missing":
|
|
1511
|
-
if (!issue.dependencyId) return notOk(issueConflict("unsupportedOperation", "Dependency missing issue has no dependencyId"));
|
|
1512
|
-
if (issue.dependencyId.startsWith("ext:")) return ok([new CreateExtensionCall(issue.dependencyId.slice(4))]);
|
|
1513
|
-
if (issue.dependencyId.startsWith("schema:")) return ok([new CreateSchemaCall(issue.dependencyId.slice(7))]);
|
|
1514
|
-
return notOk(issueConflict("unsupportedOperation", `Unknown dependency type: ${issue.dependencyId}`));
|
|
1515
|
-
default: return notOk(issueConflict("unsupportedOperation", `Unhandled issue kind: ${issue.kind}`));
|
|
1516
|
-
}
|
|
1517
|
-
}
|
|
1518
|
-
/**
|
|
1519
|
-
* Classifies calls into DDL sequencing buckets. The order matches the
|
|
1520
|
-
* legacy walk-schema planner's emission order so `db init` and `db update`
|
|
1521
|
-
* produce byte-identical plans for the shared shape (deps → drops → tables
|
|
1522
|
-
* → columns → alters → PKs → uniques → indexes → FKs).
|
|
1523
|
-
*/
|
|
1524
|
-
function classifyCall(call) {
|
|
1525
|
-
switch (call.factoryName) {
|
|
1526
|
-
case "createExtension":
|
|
1527
|
-
case "createSchema":
|
|
1528
|
-
case "createEnumType":
|
|
1529
|
-
case "addEnumValues":
|
|
1530
|
-
case "dropEnumType":
|
|
1531
|
-
case "renameType": return "dep";
|
|
1532
|
-
case "dropTable":
|
|
1533
|
-
case "dropColumn":
|
|
1534
|
-
case "dropConstraint":
|
|
1535
|
-
case "dropIndex":
|
|
1536
|
-
case "dropDefault": return "drop";
|
|
1537
|
-
case "createTable": return "table";
|
|
1538
|
-
case "addColumn": return "column";
|
|
1539
|
-
case "alterColumnType":
|
|
1540
|
-
case "setNotNull":
|
|
1541
|
-
case "dropNotNull":
|
|
1542
|
-
case "setDefault": return "alter";
|
|
1543
|
-
case "addPrimaryKey": return "primaryKey";
|
|
1544
|
-
case "addUnique": return "unique";
|
|
1545
|
-
case "createIndex": return "index";
|
|
1546
|
-
case "addForeignKey": return "foreignKey";
|
|
1547
|
-
case "rawSql": {
|
|
1548
|
-
const op = call.op;
|
|
1549
|
-
if (op?.target?.details?.objectType === "type") return "dep";
|
|
1550
|
-
const id = typeof op?.id === "string" ? op.id : "";
|
|
1551
|
-
if (id.startsWith("extension.") || id.startsWith("schema.")) return "dep";
|
|
1552
|
-
return "alter";
|
|
1553
|
-
}
|
|
1554
|
-
default: return "alter";
|
|
1555
|
-
}
|
|
1556
|
-
}
|
|
1557
|
-
/** Stable lexical key used to order issues within the same kind bucket. */
|
|
1558
|
-
function issueKey(issue) {
|
|
1559
|
-
return `${"table" in issue && typeof issue.table === "string" ? issue.table : ""}\u0000${"column" in issue && typeof issue.column === "string" ? issue.column : ""}\u0000${"indexOrConstraint" in issue && typeof issue.indexOrConstraint === "string" ? issue.indexOrConstraint : ""}`;
|
|
1560
|
-
}
|
|
1561
|
-
const DEFAULT_POLICY = { allowedOperationClasses: [
|
|
1562
|
-
"additive",
|
|
1563
|
-
"widening",
|
|
1564
|
-
"destructive",
|
|
1565
|
-
"data"
|
|
1566
|
-
] };
|
|
1567
|
-
function emptySchemaIR() {
|
|
1568
|
-
return {
|
|
1569
|
-
tables: {},
|
|
1570
|
-
dependencies: []
|
|
1571
|
-
};
|
|
1572
|
-
}
|
|
1573
|
-
function conflictKindForCall(call) {
|
|
1574
|
-
switch (call.factoryName) {
|
|
1575
|
-
case "alterColumnType": return "typeMismatch";
|
|
1576
|
-
case "setNotNull":
|
|
1577
|
-
case "dropNotNull": return "nullabilityConflict";
|
|
1578
|
-
case "addForeignKey":
|
|
1579
|
-
case "dropConstraint": return "foreignKeyConflict";
|
|
1580
|
-
case "createIndex":
|
|
1581
|
-
case "dropIndex": return "indexIncompatible";
|
|
1582
|
-
default: return "missingButNonAdditive";
|
|
1583
|
-
}
|
|
1584
|
-
}
|
|
1585
|
-
function locationForCall(call) {
|
|
1586
|
-
const anyCall = call;
|
|
1587
|
-
const location = {};
|
|
1588
|
-
if (anyCall.tableName) location.table = anyCall.tableName;
|
|
1589
|
-
if (anyCall.columnName) location.column = anyCall.columnName;
|
|
1590
|
-
if (anyCall.indexName) location.index = anyCall.indexName;
|
|
1591
|
-
if (anyCall.constraintName) location.constraint = anyCall.constraintName;
|
|
1592
|
-
if (anyCall.typeName) location.type = anyCall.typeName;
|
|
1593
|
-
return Object.keys(location).length > 0 ? location : void 0;
|
|
1594
|
-
}
|
|
1595
|
-
function conflictForDisallowedCall(call, allowed) {
|
|
1596
|
-
const summary = `Operation "${call.label}" requires class "${call.operationClass}", but policy allows only: ${allowed.join(", ")}`;
|
|
1597
|
-
const location = locationForCall(call);
|
|
1598
|
-
return {
|
|
1599
|
-
kind: conflictKindForCall(call),
|
|
1600
|
-
summary,
|
|
1601
|
-
why: "Use `migration new` to author a custom migration for this change.",
|
|
1602
|
-
...location ? { location } : {}
|
|
1603
|
-
};
|
|
1604
|
-
}
|
|
1605
|
-
function planIssues(options) {
|
|
1606
|
-
const policyProvided = options.policy !== void 0;
|
|
1607
|
-
const policy = options.policy ?? DEFAULT_POLICY;
|
|
1608
|
-
const schema = options.schema ?? emptySchemaIR();
|
|
1609
|
-
const frameworkComponents = options.frameworkComponents ?? [];
|
|
1610
|
-
const context = {
|
|
1611
|
-
toContract: options.toContract,
|
|
1612
|
-
fromContract: options.fromContract,
|
|
1613
|
-
schemaName: options.schemaName,
|
|
1614
|
-
codecHooks: options.codecHooks,
|
|
1615
|
-
storageTypes: options.storageTypes,
|
|
1616
|
-
schema,
|
|
1617
|
-
policy,
|
|
1618
|
-
frameworkComponents
|
|
1619
|
-
};
|
|
1620
|
-
const strategies = options.strategies ?? postgresPlannerStrategies;
|
|
1621
|
-
let remaining = options.issues;
|
|
1622
|
-
const recipeCalls = [];
|
|
1623
|
-
const bucketablePatternCalls = [];
|
|
1624
|
-
for (const strategy of strategies) {
|
|
1625
|
-
const result = strategy(remaining, context);
|
|
1626
|
-
if (result.kind === "match") {
|
|
1627
|
-
remaining = result.issues;
|
|
1628
|
-
if (result.recipe) recipeCalls.push(...result.calls);
|
|
1629
|
-
else bucketablePatternCalls.push(...result.calls);
|
|
1630
|
-
}
|
|
1631
|
-
}
|
|
1632
|
-
const sorted = [...remaining].sort((a, b) => {
|
|
1633
|
-
const kindDelta = issueOrder(a) - issueOrder(b);
|
|
1634
|
-
if (kindDelta !== 0) return kindDelta;
|
|
1635
|
-
const keyA = issueKey(a);
|
|
1636
|
-
const keyB = issueKey(b);
|
|
1637
|
-
return keyA < keyB ? -1 : keyA > keyB ? 1 : 0;
|
|
1638
|
-
});
|
|
1639
|
-
const defaultCalls = [];
|
|
1640
|
-
const conflicts = [];
|
|
1641
|
-
for (const issue of sorted) {
|
|
1642
|
-
const result = mapIssueToCall(issue, context);
|
|
1643
|
-
if (result.ok) defaultCalls.push(...result.value);
|
|
1644
|
-
else conflicts.push(result.failure);
|
|
1645
|
-
}
|
|
1646
|
-
const allowed = policy.allowedOperationClasses;
|
|
1647
|
-
let gatedDefault = defaultCalls;
|
|
1648
|
-
let gatedRecipe = recipeCalls;
|
|
1649
|
-
let gatedBucketable = bucketablePatternCalls;
|
|
1650
|
-
if (policyProvided) {
|
|
1651
|
-
const keepIfAllowed = (bucket) => (call) => {
|
|
1652
|
-
if (allowed.includes(call.operationClass)) {
|
|
1653
|
-
bucket.push(call);
|
|
1654
|
-
return;
|
|
1655
|
-
}
|
|
1656
|
-
conflicts.push(conflictForDisallowedCall(call, allowed));
|
|
1657
|
-
};
|
|
1658
|
-
const gatedDefaultBucket = [];
|
|
1659
|
-
const gatedRecipeBucket = [];
|
|
1660
|
-
const gatedBucketableBucket = [];
|
|
1661
|
-
defaultCalls.forEach(keepIfAllowed(gatedDefaultBucket));
|
|
1662
|
-
recipeCalls.forEach(keepIfAllowed(gatedRecipeBucket));
|
|
1663
|
-
bucketablePatternCalls.forEach(keepIfAllowed(gatedBucketableBucket));
|
|
1664
|
-
gatedDefault = gatedDefaultBucket;
|
|
1665
|
-
gatedRecipe = gatedRecipeBucket;
|
|
1666
|
-
gatedBucketable = gatedBucketableBucket;
|
|
1667
|
-
}
|
|
1668
|
-
if (conflicts.length > 0) return notOk(conflicts);
|
|
1669
|
-
const combinedBucketable = [...gatedDefault, ...gatedBucketable];
|
|
1670
|
-
const byCategory = (cat) => combinedBucketable.filter((c) => classifyCall(c) === cat);
|
|
1671
|
-
return ok({ calls: [
|
|
1672
|
-
...byCategory("dep"),
|
|
1673
|
-
...byCategory("drop"),
|
|
1674
|
-
...byCategory("table"),
|
|
1675
|
-
...byCategory("column"),
|
|
1676
|
-
...gatedRecipe,
|
|
1677
|
-
...byCategory("alter"),
|
|
1678
|
-
...byCategory("primaryKey"),
|
|
1679
|
-
...byCategory("unique"),
|
|
1680
|
-
...byCategory("index"),
|
|
1681
|
-
...byCategory("foreignKey")
|
|
1682
|
-
] });
|
|
1683
|
-
}
|
|
1684
|
-
|
|
1685
|
-
//#endregion
|
|
1686
|
-
//#region src/core/migrations/render-ops.ts
|
|
1687
|
-
function renderOps(calls) {
|
|
1688
|
-
return calls.map((c) => c.toOp());
|
|
1689
|
-
}
|
|
1690
|
-
|
|
1691
|
-
//#endregion
|
|
1692
|
-
//#region src/core/migrations/render-typescript.ts
|
|
1693
|
-
/**
|
|
1694
|
-
* Polymorphic TypeScript emitter for the Postgres migration IR.
|
|
1695
|
-
*
|
|
1696
|
-
* Each `PostgresOpFactoryCall` renders itself via `renderTypeScript()` and
|
|
1697
|
-
* declares its own `importRequirements()`; this file just composes the module
|
|
1698
|
-
* source around those contributions. The design mirrors the Mongo target's
|
|
1699
|
-
* `render-typescript.ts` deliberately — byte-for-byte alignment isn't required
|
|
1700
|
-
* (different factory module specifiers, different base-class name) but the
|
|
1701
|
-
* shape is, so future consolidation to a framework-level helper is mechanical.
|
|
1702
|
-
*/
|
|
1703
|
-
/**
|
|
1704
|
-
* Always-present base import — the rendered scaffold always extends the
|
|
1705
|
-
* target-owned `Migration` (i.e. `PostgresMigration`) re-exported from
|
|
1706
|
-
* `@prisma-next/target-postgres/migration`. That re-export fixes the
|
|
1707
|
-
* `SqlMigration` generic to `PostgresPlanTargetDetails` and the abstract
|
|
1708
|
-
* `targetId` to `'postgres'`, so user-authored migrations don't need to
|
|
1709
|
-
* thread target-details or redeclare `targetId`.
|
|
1710
|
-
*/
|
|
1711
|
-
const BASE_IMPORT = {
|
|
1712
|
-
moduleSpecifier: "@prisma-next/target-postgres/migration",
|
|
1713
|
-
symbol: "Migration"
|
|
1714
|
-
};
|
|
1715
|
-
function renderCallsToTypeScript(calls, meta) {
|
|
1716
|
-
const imports = buildImports(calls);
|
|
1717
|
-
const operationsBody = calls.map((c) => c.renderTypeScript()).join(",\n");
|
|
1718
|
-
return [
|
|
1719
|
-
shebangLineFor(detectScaffoldRuntime()),
|
|
1720
|
-
imports,
|
|
1721
|
-
"",
|
|
1722
|
-
"export default class M extends Migration {",
|
|
1723
|
-
buildDescribeMethod(meta),
|
|
1724
|
-
" override get operations() {",
|
|
1725
|
-
" return [",
|
|
1726
|
-
indent(operationsBody, 6),
|
|
1727
|
-
" ];",
|
|
1728
|
-
" }",
|
|
1729
|
-
"}",
|
|
1730
|
-
"",
|
|
1731
|
-
"Migration.run(import.meta.url, M);",
|
|
1732
|
-
""
|
|
1733
|
-
].join("\n");
|
|
1734
|
-
}
|
|
1735
|
-
function buildImports(calls) {
|
|
1736
|
-
const requirements = [BASE_IMPORT];
|
|
1737
|
-
for (const call of calls) for (const req of call.importRequirements()) requirements.push(req);
|
|
1738
|
-
return renderImports(requirements);
|
|
1739
|
-
}
|
|
1740
|
-
function buildDescribeMethod(meta) {
|
|
1741
|
-
const lines = [];
|
|
1742
|
-
lines.push(" override describe() {");
|
|
1743
|
-
lines.push(" return {");
|
|
1744
|
-
lines.push(` from: ${JSON.stringify(meta.from)},`);
|
|
1745
|
-
lines.push(` to: ${JSON.stringify(meta.to)},`);
|
|
1746
|
-
if (meta.kind) lines.push(` kind: ${JSON.stringify(meta.kind)},`);
|
|
1747
|
-
if (meta.labels && meta.labels.length > 0) lines.push(` labels: ${jsonToTsSource(meta.labels)},`);
|
|
1748
|
-
lines.push(" };");
|
|
1749
|
-
lines.push(" }");
|
|
1750
|
-
lines.push("");
|
|
1751
|
-
return lines.join("\n");
|
|
1752
|
-
}
|
|
1753
|
-
function indent(text, spaces) {
|
|
1754
|
-
const pad = " ".repeat(spaces);
|
|
1755
|
-
return text.split("\n").map((line) => line.trim() ? `${pad}${line}` : line).join("\n");
|
|
1756
|
-
}
|
|
1757
|
-
|
|
1758
|
-
//#endregion
|
|
1759
|
-
//#region src/core/migrations/planner-produced-postgres-migration.ts
|
|
1760
|
-
var TypeScriptRenderablePostgresMigration = class extends PostgresMigration {
|
|
1761
|
-
#calls;
|
|
1762
|
-
#meta;
|
|
1763
|
-
constructor(calls, meta) {
|
|
1764
|
-
super();
|
|
1765
|
-
this.#calls = calls;
|
|
1766
|
-
this.#meta = meta;
|
|
1767
|
-
}
|
|
1768
|
-
get operations() {
|
|
1769
|
-
return renderOps(this.#calls);
|
|
1770
|
-
}
|
|
1771
|
-
describe() {
|
|
1772
|
-
return this.#meta;
|
|
1773
|
-
}
|
|
1774
|
-
renderTypeScript() {
|
|
1775
|
-
return renderCallsToTypeScript(this.#calls, {
|
|
1776
|
-
from: this.#meta.from,
|
|
1777
|
-
to: this.#meta.to,
|
|
1778
|
-
...ifDefined("kind", this.#meta.kind),
|
|
1779
|
-
...ifDefined("labels", this.#meta.labels)
|
|
1780
|
-
});
|
|
1781
|
-
}
|
|
1782
|
-
};
|
|
1783
|
-
|
|
1784
|
-
//#endregion
|
|
1785
|
-
//#region src/core/migrations/planner.ts
|
|
1786
|
-
const DEFAULT_PLANNER_CONFIG = { defaultSchema: "public" };
|
|
1787
|
-
function createPostgresMigrationPlanner(config = {}) {
|
|
1788
|
-
return new PostgresMigrationPlanner({
|
|
1789
|
-
...DEFAULT_PLANNER_CONFIG,
|
|
1790
|
-
...config
|
|
1791
|
-
});
|
|
1792
|
-
}
|
|
1793
|
-
/**
|
|
1794
|
-
* Postgres migration planner — a thin wrapper over `planIssues`.
|
|
1795
|
-
*
|
|
1796
|
-
* `plan()` verifies the live schema against the target contract (producing
|
|
1797
|
-
* `SchemaIssue[]`) and delegates to `planIssues` with the unified
|
|
1798
|
-
* `postgresPlannerStrategies` list: enum-change, NOT-NULL backfill,
|
|
1799
|
-
* type-change, nullable-tightening, codec-hook storage types,
|
|
1800
|
-
* component-declared dependency installs, and shared-temp-default /
|
|
1801
|
-
* empty-table-guarded NOT-NULL add-column. The same strategy list runs for
|
|
1802
|
-
* `migration plan`, `db update`, and `db init`; behavior diverges purely on
|
|
1803
|
-
* `policy.allowedOperationClasses` (the data-safe strategies short-circuit
|
|
1804
|
-
* when `'data'` is excluded). The issue planner applies operation-class
|
|
1805
|
-
* policy gates and emits a single `PostgresOpFactoryCall[]` that drives both
|
|
1806
|
-
* the runtime-ops view (via `renderOps`) and the `renderTypeScript()`
|
|
1807
|
-
* authoring surface.
|
|
1808
|
-
*/
|
|
1809
|
-
var PostgresMigrationPlanner = class {
|
|
1810
|
-
constructor(config) {
|
|
1811
|
-
this.config = config;
|
|
1812
|
-
}
|
|
1813
|
-
plan(options) {
|
|
1814
|
-
return this.planSql(options, options.fromHash ?? "");
|
|
1815
|
-
}
|
|
1816
|
-
emptyMigration(context) {
|
|
1817
|
-
return new TypeScriptRenderablePostgresMigration([], {
|
|
1818
|
-
from: context.fromHash,
|
|
1819
|
-
to: context.toHash
|
|
1820
|
-
});
|
|
1821
|
-
}
|
|
1822
|
-
planSql(options, fromHash) {
|
|
1823
|
-
const schemaName = options.schemaName ?? this.config.defaultSchema;
|
|
1824
|
-
const policyResult = this.ensureAdditivePolicy(options.policy);
|
|
1825
|
-
if (policyResult) return policyResult;
|
|
1826
|
-
const schemaIssues = this.collectSchemaIssues(options);
|
|
1827
|
-
const codecHooks = extractCodecControlHooks(options.frameworkComponents);
|
|
1828
|
-
const storageTypes = options.contract.storage.types ?? {};
|
|
1829
|
-
const result = planIssues({
|
|
1830
|
-
issues: schemaIssues,
|
|
1831
|
-
toContract: options.contract,
|
|
1832
|
-
fromContract: options.fromContract ?? null,
|
|
1833
|
-
schemaName,
|
|
1834
|
-
codecHooks,
|
|
1835
|
-
storageTypes,
|
|
1836
|
-
schema: options.schema,
|
|
1837
|
-
policy: options.policy,
|
|
1838
|
-
frameworkComponents: options.frameworkComponents,
|
|
1839
|
-
strategies: postgresPlannerStrategies
|
|
1840
|
-
});
|
|
1841
|
-
if (!result.ok) return plannerFailure(result.failure);
|
|
1842
|
-
return Object.freeze({
|
|
1843
|
-
kind: "success",
|
|
1844
|
-
plan: new TypeScriptRenderablePostgresMigration(result.value.calls, {
|
|
1845
|
-
from: fromHash,
|
|
1846
|
-
to: options.contract.storage.storageHash
|
|
1847
|
-
})
|
|
1848
|
-
});
|
|
1849
|
-
}
|
|
1850
|
-
ensureAdditivePolicy(policy) {
|
|
1851
|
-
if (!policy.allowedOperationClasses.includes("additive")) return plannerFailure([{
|
|
1852
|
-
kind: "unsupportedOperation",
|
|
1853
|
-
summary: "Migration planner requires additive operations be allowed",
|
|
1854
|
-
why: "The planner requires the \"additive\" operation class to be allowed in the policy."
|
|
1855
|
-
}]);
|
|
1856
|
-
return null;
|
|
1857
|
-
}
|
|
1858
|
-
collectSchemaIssues(options) {
|
|
1859
|
-
const allowed = options.policy.allowedOperationClasses;
|
|
1860
|
-
const strict = allowed.includes("widening") || allowed.includes("destructive");
|
|
1861
|
-
return verifySqlSchema({
|
|
1862
|
-
contract: options.contract,
|
|
1863
|
-
schema: options.schema,
|
|
1864
|
-
strict,
|
|
1865
|
-
typeMetadataRegistry: /* @__PURE__ */ new Map(),
|
|
1866
|
-
frameworkComponents: options.frameworkComponents,
|
|
1867
|
-
normalizeDefault: parsePostgresDefault,
|
|
1868
|
-
normalizeNativeType: normalizeSchemaNativeType
|
|
1869
|
-
}).schema.issues;
|
|
1870
|
-
}
|
|
1871
|
-
};
|
|
1872
|
-
|
|
1873
|
-
//#endregion
|
|
1874
|
-
//#region src/core/migrations/statement-builders.ts
|
|
1875
|
-
const ensurePrismaContractSchemaStatement = {
|
|
1876
|
-
sql: "create schema if not exists prisma_contract",
|
|
1877
|
-
params: []
|
|
1878
|
-
};
|
|
1879
|
-
const ensureMarkerTableStatement = {
|
|
1880
|
-
sql: `create table if not exists prisma_contract.marker (
|
|
1881
|
-
id smallint primary key default 1,
|
|
1882
|
-
core_hash text not null,
|
|
1883
|
-
profile_hash text not null,
|
|
1884
|
-
contract_json jsonb,
|
|
1885
|
-
canonical_version int,
|
|
1886
|
-
updated_at timestamptz not null default now(),
|
|
1887
|
-
app_tag text,
|
|
1888
|
-
meta jsonb not null default '{}'
|
|
1889
|
-
)`,
|
|
1890
|
-
params: []
|
|
1891
|
-
};
|
|
1892
|
-
const ensureLedgerTableStatement = {
|
|
1893
|
-
sql: `create table if not exists prisma_contract.ledger (
|
|
1894
|
-
id bigserial primary key,
|
|
1895
|
-
created_at timestamptz not null default now(),
|
|
1896
|
-
origin_core_hash text,
|
|
1897
|
-
origin_profile_hash text,
|
|
1898
|
-
destination_core_hash text not null,
|
|
1899
|
-
destination_profile_hash text,
|
|
1900
|
-
contract_json_before jsonb,
|
|
1901
|
-
contract_json_after jsonb,
|
|
1902
|
-
operations jsonb not null
|
|
1903
|
-
)`,
|
|
1904
|
-
params: []
|
|
1905
|
-
};
|
|
1906
|
-
function buildWriteMarkerStatements(input) {
|
|
1907
|
-
const params = [
|
|
1908
|
-
1,
|
|
1909
|
-
input.storageHash,
|
|
1910
|
-
input.profileHash,
|
|
1911
|
-
jsonParam(input.contractJson),
|
|
1912
|
-
input.canonicalVersion ?? null,
|
|
1913
|
-
input.appTag ?? null,
|
|
1914
|
-
jsonParam(input.meta ?? {})
|
|
1915
|
-
];
|
|
1916
|
-
return {
|
|
1917
|
-
insert: {
|
|
1918
|
-
sql: `insert into prisma_contract.marker (
|
|
1919
|
-
id,
|
|
1920
|
-
core_hash,
|
|
1921
|
-
profile_hash,
|
|
1922
|
-
contract_json,
|
|
1923
|
-
canonical_version,
|
|
1924
|
-
updated_at,
|
|
1925
|
-
app_tag,
|
|
1926
|
-
meta
|
|
1927
|
-
) values (
|
|
1928
|
-
$1,
|
|
1929
|
-
$2,
|
|
1930
|
-
$3,
|
|
1931
|
-
$4::jsonb,
|
|
1932
|
-
$5,
|
|
1933
|
-
now(),
|
|
1934
|
-
$6,
|
|
1935
|
-
$7::jsonb
|
|
1936
|
-
)`,
|
|
1937
|
-
params
|
|
1938
|
-
},
|
|
1939
|
-
update: {
|
|
1940
|
-
sql: `update prisma_contract.marker set
|
|
1941
|
-
core_hash = $2,
|
|
1942
|
-
profile_hash = $3,
|
|
1943
|
-
contract_json = $4::jsonb,
|
|
1944
|
-
canonical_version = $5,
|
|
1945
|
-
updated_at = now(),
|
|
1946
|
-
app_tag = $6,
|
|
1947
|
-
meta = $7::jsonb
|
|
1948
|
-
where id = $1`,
|
|
1949
|
-
params
|
|
1950
|
-
}
|
|
1951
|
-
};
|
|
1952
|
-
}
|
|
1953
|
-
function buildLedgerInsertStatement(input) {
|
|
1954
|
-
return {
|
|
1955
|
-
sql: `insert into prisma_contract.ledger (
|
|
1956
|
-
origin_core_hash,
|
|
1957
|
-
origin_profile_hash,
|
|
1958
|
-
destination_core_hash,
|
|
1959
|
-
destination_profile_hash,
|
|
1960
|
-
contract_json_before,
|
|
1961
|
-
contract_json_after,
|
|
1962
|
-
operations
|
|
1963
|
-
) values (
|
|
1964
|
-
$1,
|
|
1965
|
-
$2,
|
|
1966
|
-
$3,
|
|
1967
|
-
$4,
|
|
1968
|
-
$5::jsonb,
|
|
1969
|
-
$6::jsonb,
|
|
1970
|
-
$7::jsonb
|
|
1971
|
-
)`,
|
|
1972
|
-
params: [
|
|
1973
|
-
input.originStorageHash ?? null,
|
|
1974
|
-
input.originProfileHash ?? null,
|
|
1975
|
-
input.destinationStorageHash,
|
|
1976
|
-
input.destinationProfileHash ?? null,
|
|
1977
|
-
jsonParam(input.contractJsonBefore),
|
|
1978
|
-
jsonParam(input.contractJsonAfter),
|
|
1979
|
-
jsonParam(input.operations)
|
|
1980
|
-
]
|
|
1981
|
-
};
|
|
1982
|
-
}
|
|
1983
|
-
function jsonParam(value) {
|
|
1984
|
-
return JSON.stringify(value ?? null);
|
|
1985
|
-
}
|
|
1986
|
-
|
|
1987
|
-
//#endregion
|
|
1988
14
|
//#region src/core/migrations/runner.ts
|
|
1989
15
|
const DEFAULT_CONFIG = { defaultSchema: "public" };
|
|
1990
16
|
const LOCK_DOMAIN = "prisma_next.contract.marker";
|
|
@@ -2027,10 +53,12 @@ var PostgresMigrationRunner = class {
|
|
|
2027
53
|
try {
|
|
2028
54
|
await this.acquireLock(driver, lockKey);
|
|
2029
55
|
await this.ensureControlTables(driver);
|
|
2030
|
-
const existingMarker = await readMarker(driver);
|
|
56
|
+
const existingMarker = await this.family.readMarker({ driver });
|
|
2031
57
|
const markerCheck = this.ensureMarkerCompatibility(existingMarker, options.plan);
|
|
2032
58
|
if (!markerCheck.ok) return markerCheck;
|
|
2033
|
-
const
|
|
59
|
+
const markerAtDestination = this.markerMatchesDestination(existingMarker, options.plan);
|
|
60
|
+
const isSelfEdge = options.plan.origin?.storageHash === options.plan.destination.storageHash;
|
|
61
|
+
const skipOperations = markerAtDestination && options.plan.origin != null && !isSelfEdge;
|
|
2034
62
|
let applyValue;
|
|
2035
63
|
if (skipOperations) applyValue = {
|
|
2036
64
|
operationsExecuted: 0,
|
|
@@ -2059,8 +87,13 @@ var PostgresMigrationRunner = class {
|
|
|
2059
87
|
why: "The resulting database schema does not satisfy the destination contract.",
|
|
2060
88
|
meta: { issues: schemaVerifyResult.schema.issues }
|
|
2061
89
|
});
|
|
2062
|
-
|
|
2063
|
-
|
|
90
|
+
const incomingInvariants = options.plan.providedInvariants;
|
|
91
|
+
const existingInvariants = new Set(existingMarker?.invariants ?? []);
|
|
92
|
+
const incomingIsSubsetOfExisting = incomingInvariants.every((id) => existingInvariants.has(id));
|
|
93
|
+
if (!(isSelfEdge && applyValue.operationsExecuted === 0 && incomingIsSubsetOfExisting)) {
|
|
94
|
+
await this.upsertMarker(driver, options, existingMarker);
|
|
95
|
+
await this.recordLedgerEntry(driver, options, existingMarker, applyValue.executedOperations);
|
|
96
|
+
}
|
|
2064
97
|
await this.commitTransaction(driver);
|
|
2065
98
|
committed = true;
|
|
2066
99
|
return runnerSuccess({
|
|
@@ -2281,12 +314,14 @@ var PostgresMigrationRunner = class {
|
|
|
2281
314
|
return okVoid();
|
|
2282
315
|
}
|
|
2283
316
|
async upsertMarker(driver, options, existingMarker) {
|
|
2284
|
-
const
|
|
317
|
+
const incomingInvariants = options.plan.providedInvariants;
|
|
318
|
+
const writeStatements = buildMergeMarkerStatements({
|
|
2285
319
|
storageHash: options.plan.destination.storageHash,
|
|
2286
320
|
profileHash: options.plan.destination.profileHash ?? options.destinationContract.profileHash ?? options.plan.destination.storageHash,
|
|
2287
321
|
contractJson: options.destinationContract,
|
|
2288
322
|
canonicalVersion: null,
|
|
2289
|
-
meta: {}
|
|
323
|
+
meta: {},
|
|
324
|
+
invariants: incomingInvariants
|
|
2290
325
|
});
|
|
2291
326
|
const statement = existingMarker ? writeStatements.update : writeStatements.insert;
|
|
2292
327
|
await this.executeStatement(driver, statement);
|