@prisma-next/target-postgres 0.4.0-dev.8 → 0.4.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/dist/control.d.mts +1 -9
  2. package/dist/control.d.mts.map +1 -1
  3. package/dist/control.mjs +1693 -4798
  4. package/dist/control.mjs.map +1 -1
  5. package/dist/migration.d.mts +164 -0
  6. package/dist/migration.d.mts.map +1 -0
  7. package/dist/migration.mjs +446 -0
  8. package/dist/migration.mjs.map +1 -0
  9. package/dist/planner-target-details-MXb3oeul.d.mts +11 -0
  10. package/dist/planner-target-details-MXb3oeul.d.mts.map +1 -0
  11. package/dist/postgres-migration-BsHJHV9O.mjs +2793 -0
  12. package/dist/postgres-migration-BsHJHV9O.mjs.map +1 -0
  13. package/package.json +21 -19
  14. package/src/core/migrations/issue-planner.ts +832 -0
  15. package/src/core/migrations/op-factory-call.ts +862 -0
  16. package/src/core/migrations/operations/columns.ts +285 -0
  17. package/src/core/migrations/operations/constraints.ts +191 -0
  18. package/src/core/migrations/operations/data-transform.ts +113 -0
  19. package/src/core/migrations/operations/dependencies.ts +36 -0
  20. package/src/core/migrations/operations/enums.ts +113 -0
  21. package/src/core/migrations/operations/indexes.ts +61 -0
  22. package/src/core/migrations/operations/raw.ts +15 -0
  23. package/src/core/migrations/operations/shared.ts +67 -0
  24. package/src/core/migrations/operations/tables.ts +63 -0
  25. package/src/core/migrations/planner-produced-postgres-migration.ts +67 -0
  26. package/src/core/migrations/planner-strategies.ts +592 -151
  27. package/src/core/migrations/planner-target-details.ts +0 -6
  28. package/src/core/migrations/planner.ts +63 -781
  29. package/src/core/migrations/postgres-migration.ts +20 -0
  30. package/src/core/migrations/render-ops.ts +9 -0
  31. package/src/core/migrations/render-typescript.ts +95 -0
  32. package/src/exports/control.ts +9 -142
  33. package/src/exports/migration.ts +40 -0
  34. package/dist/migration-builders.d.mts +0 -88
  35. package/dist/migration-builders.d.mts.map +0 -1
  36. package/dist/migration-builders.mjs +0 -3
  37. package/dist/operation-descriptors-CxymFSgK.mjs +0 -52
  38. package/dist/operation-descriptors-CxymFSgK.mjs.map +0 -1
  39. package/src/core/migrations/descriptor-planner.ts +0 -464
  40. package/src/core/migrations/operation-descriptors.ts +0 -166
  41. package/src/core/migrations/operation-resolver.ts +0 -929
  42. package/src/core/migrations/planner-reconciliation.ts +0 -798
  43. package/src/core/migrations/scaffolding.ts +0 -140
  44. package/src/exports/migration-builders.ts +0 -56
@@ -0,0 +1,285 @@
1
+ import { quoteIdentifier } from '@prisma-next/adapter-postgres/control';
2
+ import {
3
+ columnDefaultExistsCheck,
4
+ columnExistsCheck,
5
+ columnNullabilityCheck,
6
+ columnTypeCheck,
7
+ qualifyTableName,
8
+ } from '../planner-sql-checks';
9
+ import { type ColumnSpec, type Op, step, targetDetails } from './shared';
10
+
11
+ export function addColumn(schemaName: string, tableName: string, column: ColumnSpec): Op {
12
+ const qualified = qualifyTableName(schemaName, tableName);
13
+ const parts = [
14
+ `ALTER TABLE ${qualified}`,
15
+ `ADD COLUMN ${quoteIdentifier(column.name)} ${column.typeSql}`,
16
+ column.defaultSql,
17
+ column.nullable ? '' : 'NOT NULL',
18
+ ].filter(Boolean);
19
+ const addSql = parts.join(' ');
20
+
21
+ return {
22
+ id: `column.${tableName}.${column.name}`,
23
+ label: `Add column "${column.name}" to "${tableName}"`,
24
+ operationClass: 'additive',
25
+ target: targetDetails('column', column.name, schemaName, tableName),
26
+ precheck: [
27
+ step(
28
+ `ensure column "${column.name}" is missing`,
29
+ columnExistsCheck({
30
+ schema: schemaName,
31
+ table: tableName,
32
+ column: column.name,
33
+ exists: false,
34
+ }),
35
+ ),
36
+ ],
37
+ execute: [step(`add column "${column.name}"`, addSql)],
38
+ postcheck: [
39
+ step(
40
+ `verify column "${column.name}" exists`,
41
+ columnExistsCheck({ schema: schemaName, table: tableName, column: column.name }),
42
+ ),
43
+ ],
44
+ };
45
+ }
46
+
47
+ export function dropColumn(schemaName: string, tableName: string, columnName: string): Op {
48
+ const qualified = qualifyTableName(schemaName, tableName);
49
+ return {
50
+ id: `dropColumn.${tableName}.${columnName}`,
51
+ label: `Drop column "${columnName}" from "${tableName}"`,
52
+ operationClass: 'destructive',
53
+ target: targetDetails('column', columnName, schemaName, tableName),
54
+ precheck: [
55
+ step(
56
+ `ensure column "${columnName}" exists`,
57
+ columnExistsCheck({ schema: schemaName, table: tableName, column: columnName }),
58
+ ),
59
+ ],
60
+ execute: [
61
+ step(
62
+ `drop column "${columnName}"`,
63
+ `ALTER TABLE ${qualified} DROP COLUMN ${quoteIdentifier(columnName)}`,
64
+ ),
65
+ ],
66
+ postcheck: [
67
+ step(
68
+ `verify column "${columnName}" does not exist`,
69
+ columnExistsCheck({
70
+ schema: schemaName,
71
+ table: tableName,
72
+ column: columnName,
73
+ exists: false,
74
+ }),
75
+ ),
76
+ ],
77
+ };
78
+ }
79
+
80
+ /**
81
+ * `qualifiedTargetType` is the new column type as it appears in the
82
+ * `ALTER COLUMN TYPE` clause (schema-qualified for user-defined types, raw
83
+ * native name for built-ins). `formatTypeExpected` is the unqualified
84
+ * `format_type` form used in the postcheck. `rawTargetTypeForLabel` is the
85
+ * string appearing in the human-readable label (typically `toType` when
86
+ * explicit, else the column's native type).
87
+ */
88
+ export function alterColumnType(
89
+ schemaName: string,
90
+ tableName: string,
91
+ columnName: string,
92
+ options: {
93
+ readonly qualifiedTargetType: string;
94
+ readonly formatTypeExpected: string;
95
+ readonly rawTargetTypeForLabel: string;
96
+ readonly using?: string;
97
+ },
98
+ ): Op {
99
+ const qualified = qualifyTableName(schemaName, tableName);
100
+ const usingClause = options.using
101
+ ? ` USING ${options.using}`
102
+ : ` USING ${quoteIdentifier(columnName)}::${options.qualifiedTargetType}`;
103
+ return {
104
+ id: `alterType.${tableName}.${columnName}`,
105
+ label: `Alter type of "${tableName}"."${columnName}" to ${options.rawTargetTypeForLabel}`,
106
+ operationClass: 'destructive',
107
+ target: targetDetails('column', columnName, schemaName, tableName),
108
+ precheck: [
109
+ step(
110
+ `ensure column "${columnName}" exists`,
111
+ columnExistsCheck({ schema: schemaName, table: tableName, column: columnName }),
112
+ ),
113
+ ],
114
+ execute: [
115
+ step(
116
+ `alter type of "${columnName}"`,
117
+ `ALTER TABLE ${qualified} ALTER COLUMN ${quoteIdentifier(columnName)} TYPE ${options.qualifiedTargetType}${usingClause}`,
118
+ ),
119
+ ],
120
+ postcheck: [
121
+ step(
122
+ `verify column "${columnName}" has type "${options.formatTypeExpected}"`,
123
+ columnTypeCheck({
124
+ schema: schemaName,
125
+ table: tableName,
126
+ column: columnName,
127
+ expectedType: options.formatTypeExpected,
128
+ }),
129
+ ),
130
+ ],
131
+ meta: { warning: 'TABLE_REWRITE' },
132
+ };
133
+ }
134
+
135
+ export function setNotNull(schemaName: string, tableName: string, columnName: string): Op {
136
+ const qualified = qualifyTableName(schemaName, tableName);
137
+ return {
138
+ id: `alterNullability.setNotNull.${tableName}.${columnName}`,
139
+ label: `Set NOT NULL on "${tableName}"."${columnName}"`,
140
+ operationClass: 'destructive',
141
+ target: targetDetails('column', columnName, schemaName, tableName),
142
+ precheck: [
143
+ step(
144
+ `ensure column "${columnName}" exists`,
145
+ columnExistsCheck({ schema: schemaName, table: tableName, column: columnName }),
146
+ ),
147
+ step(
148
+ `ensure no NULL values in "${columnName}"`,
149
+ `SELECT NOT EXISTS (SELECT 1 FROM ${qualified} WHERE ${quoteIdentifier(columnName)} IS NULL)`,
150
+ ),
151
+ ],
152
+ execute: [
153
+ step(
154
+ `set NOT NULL on "${columnName}"`,
155
+ `ALTER TABLE ${qualified} ALTER COLUMN ${quoteIdentifier(columnName)} SET NOT NULL`,
156
+ ),
157
+ ],
158
+ postcheck: [
159
+ step(
160
+ `verify column "${columnName}" is NOT NULL`,
161
+ columnNullabilityCheck({
162
+ schema: schemaName,
163
+ table: tableName,
164
+ column: columnName,
165
+ nullable: false,
166
+ }),
167
+ ),
168
+ ],
169
+ };
170
+ }
171
+
172
+ export function dropNotNull(schemaName: string, tableName: string, columnName: string): Op {
173
+ const qualified = qualifyTableName(schemaName, tableName);
174
+ return {
175
+ id: `alterNullability.dropNotNull.${tableName}.${columnName}`,
176
+ label: `Drop NOT NULL on "${tableName}"."${columnName}"`,
177
+ operationClass: 'widening',
178
+ target: targetDetails('column', columnName, schemaName, tableName),
179
+ precheck: [
180
+ step(
181
+ `ensure column "${columnName}" exists`,
182
+ columnExistsCheck({ schema: schemaName, table: tableName, column: columnName }),
183
+ ),
184
+ ],
185
+ execute: [
186
+ step(
187
+ `drop NOT NULL on "${columnName}"`,
188
+ `ALTER TABLE ${qualified} ALTER COLUMN ${quoteIdentifier(columnName)} DROP NOT NULL`,
189
+ ),
190
+ ],
191
+ postcheck: [
192
+ step(
193
+ `verify column "${columnName}" is nullable`,
194
+ columnNullabilityCheck({
195
+ schema: schemaName,
196
+ table: tableName,
197
+ column: columnName,
198
+ nullable: true,
199
+ }),
200
+ ),
201
+ ],
202
+ };
203
+ }
204
+
205
+ /**
206
+ * `defaultSql` is the full `DEFAULT …` clause as produced by
207
+ * `buildColumnDefaultSql` — e.g. `"DEFAULT 42"`,
208
+ * `"DEFAULT (CURRENT_TIMESTAMP)"`, or `"DEFAULT nextval('seq'::regclass)"`.
209
+ *
210
+ * `operationClass` defaults to `'additive'` (setting a default on a column
211
+ * that currently has none). The reconciliation planner passes `'widening'`
212
+ * when the column already has a different default — policy enforcement
213
+ * treats that as a widening change rather than an additive one.
214
+ */
215
+ export function setDefault(
216
+ schemaName: string,
217
+ tableName: string,
218
+ columnName: string,
219
+ defaultSql: string,
220
+ operationClass: 'additive' | 'widening' = 'additive',
221
+ ): Op {
222
+ const qualified = qualifyTableName(schemaName, tableName);
223
+ return {
224
+ id: `setDefault.${tableName}.${columnName}`,
225
+ label: `Set default on "${tableName}"."${columnName}"`,
226
+ operationClass,
227
+ target: targetDetails('column', columnName, schemaName, tableName),
228
+ precheck: [
229
+ step(
230
+ `ensure column "${columnName}" exists`,
231
+ columnExistsCheck({ schema: schemaName, table: tableName, column: columnName }),
232
+ ),
233
+ ],
234
+ execute: [
235
+ step(
236
+ `set default on "${columnName}"`,
237
+ `ALTER TABLE ${qualified} ALTER COLUMN ${quoteIdentifier(columnName)} SET ${defaultSql}`,
238
+ ),
239
+ ],
240
+ postcheck: [
241
+ step(
242
+ `verify column "${columnName}" has a default`,
243
+ columnDefaultExistsCheck({
244
+ schema: schemaName,
245
+ table: tableName,
246
+ column: columnName,
247
+ exists: true,
248
+ }),
249
+ ),
250
+ ],
251
+ };
252
+ }
253
+
254
+ export function dropDefault(schemaName: string, tableName: string, columnName: string): Op {
255
+ const qualified = qualifyTableName(schemaName, tableName);
256
+ return {
257
+ id: `dropDefault.${tableName}.${columnName}`,
258
+ label: `Drop default on "${tableName}"."${columnName}"`,
259
+ operationClass: 'destructive',
260
+ target: targetDetails('column', columnName, schemaName, tableName),
261
+ precheck: [
262
+ step(
263
+ `ensure column "${columnName}" exists`,
264
+ columnExistsCheck({ schema: schemaName, table: tableName, column: columnName }),
265
+ ),
266
+ ],
267
+ execute: [
268
+ step(
269
+ `drop default on "${columnName}"`,
270
+ `ALTER TABLE ${qualified} ALTER COLUMN ${quoteIdentifier(columnName)} DROP DEFAULT`,
271
+ ),
272
+ ],
273
+ postcheck: [
274
+ step(
275
+ `verify column "${columnName}" has no default`,
276
+ columnDefaultExistsCheck({
277
+ schema: schemaName,
278
+ table: tableName,
279
+ column: columnName,
280
+ exists: false,
281
+ }),
282
+ ),
283
+ ],
284
+ };
285
+ }
@@ -0,0 +1,191 @@
1
+ import { quoteIdentifier } from '@prisma-next/adapter-postgres/control';
2
+ import type { ReferentialAction } from '@prisma-next/sql-contract/types';
3
+ import { constraintExistsCheck, qualifyTableName } from '../planner-sql-checks';
4
+ import { type ForeignKeySpec, type Op, step, targetDetails } from './shared';
5
+
6
+ const REFERENTIAL_ACTION_SQL: Record<ReferentialAction, string> = {
7
+ noAction: 'NO ACTION',
8
+ restrict: 'RESTRICT',
9
+ cascade: 'CASCADE',
10
+ setNull: 'SET NULL',
11
+ setDefault: 'SET DEFAULT',
12
+ };
13
+
14
+ function renderForeignKeySql(schemaName: string, tableName: string, fk: ForeignKeySpec): string {
15
+ let sql = `ALTER TABLE ${qualifyTableName(schemaName, tableName)}
16
+ ADD CONSTRAINT ${quoteIdentifier(fk.name)}
17
+ FOREIGN KEY (${fk.columns.map(quoteIdentifier).join(', ')})
18
+ REFERENCES ${qualifyTableName(schemaName, fk.references.table)} (${fk.references.columns
19
+ .map(quoteIdentifier)
20
+ .join(', ')})`;
21
+
22
+ if (fk.onDelete !== undefined) {
23
+ const action = REFERENTIAL_ACTION_SQL[fk.onDelete];
24
+ if (!action) {
25
+ throw new Error(`Unknown referential action for onDelete: ${String(fk.onDelete)}`);
26
+ }
27
+ sql += `\nON DELETE ${action}`;
28
+ }
29
+ if (fk.onUpdate !== undefined) {
30
+ const action = REFERENTIAL_ACTION_SQL[fk.onUpdate];
31
+ if (!action) {
32
+ throw new Error(`Unknown referential action for onUpdate: ${String(fk.onUpdate)}`);
33
+ }
34
+ sql += `\nON UPDATE ${action}`;
35
+ }
36
+ return sql;
37
+ }
38
+
39
+ export function addPrimaryKey(
40
+ schemaName: string,
41
+ tableName: string,
42
+ constraintName: string,
43
+ columns: readonly string[],
44
+ ): Op {
45
+ const qualified = qualifyTableName(schemaName, tableName);
46
+ const columnList = columns.map(quoteIdentifier).join(', ');
47
+ return {
48
+ id: `primaryKey.${tableName}.${constraintName}`,
49
+ label: `Add primary key on "${tableName}"`,
50
+ operationClass: 'additive',
51
+ target: targetDetails('primaryKey', constraintName, schemaName, tableName),
52
+ precheck: [
53
+ step(
54
+ `ensure primary key "${constraintName}" does not exist`,
55
+ constraintExistsCheck({
56
+ constraintName,
57
+ schema: schemaName,
58
+ table: tableName,
59
+ exists: false,
60
+ }),
61
+ ),
62
+ ],
63
+ execute: [
64
+ step(
65
+ `add primary key "${constraintName}"`,
66
+ `ALTER TABLE ${qualified} ADD CONSTRAINT ${quoteIdentifier(constraintName)} PRIMARY KEY (${columnList})`,
67
+ ),
68
+ ],
69
+ postcheck: [
70
+ step(
71
+ `verify primary key "${constraintName}" exists`,
72
+ constraintExistsCheck({ constraintName, schema: schemaName, table: tableName }),
73
+ ),
74
+ ],
75
+ };
76
+ }
77
+
78
+ export function addUnique(
79
+ schemaName: string,
80
+ tableName: string,
81
+ constraintName: string,
82
+ columns: readonly string[],
83
+ ): Op {
84
+ const qualified = qualifyTableName(schemaName, tableName);
85
+ const columnList = columns.map(quoteIdentifier).join(', ');
86
+ return {
87
+ id: `unique.${tableName}.${constraintName}`,
88
+ label: `Add unique constraint on "${tableName}" (${columns.join(', ')})`,
89
+ operationClass: 'additive',
90
+ target: targetDetails('unique', constraintName, schemaName, tableName),
91
+ precheck: [
92
+ step(
93
+ `ensure constraint "${constraintName}" does not exist`,
94
+ constraintExistsCheck({
95
+ constraintName,
96
+ schema: schemaName,
97
+ table: tableName,
98
+ exists: false,
99
+ }),
100
+ ),
101
+ ],
102
+ execute: [
103
+ step(
104
+ `add unique constraint "${constraintName}"`,
105
+ `ALTER TABLE ${qualified} ADD CONSTRAINT ${quoteIdentifier(constraintName)} UNIQUE (${columnList})`,
106
+ ),
107
+ ],
108
+ postcheck: [
109
+ step(
110
+ `verify constraint "${constraintName}" exists`,
111
+ constraintExistsCheck({ constraintName, schema: schemaName, table: tableName }),
112
+ ),
113
+ ],
114
+ };
115
+ }
116
+
117
+ export function addForeignKey(schemaName: string, tableName: string, fk: ForeignKeySpec): Op {
118
+ return {
119
+ id: `foreignKey.${tableName}.${fk.name}`,
120
+ label: `Add foreign key "${fk.name}" on "${tableName}"`,
121
+ operationClass: 'additive',
122
+ target: targetDetails('foreignKey', fk.name, schemaName, tableName),
123
+ precheck: [
124
+ step(
125
+ `ensure FK "${fk.name}" does not exist`,
126
+ constraintExistsCheck({
127
+ constraintName: fk.name,
128
+ schema: schemaName,
129
+ table: tableName,
130
+ exists: false,
131
+ }),
132
+ ),
133
+ ],
134
+ execute: [step(`add FK "${fk.name}"`, renderForeignKeySql(schemaName, tableName, fk))],
135
+ postcheck: [
136
+ step(
137
+ `verify FK "${fk.name}" exists`,
138
+ constraintExistsCheck({
139
+ constraintName: fk.name,
140
+ schema: schemaName,
141
+ table: tableName,
142
+ }),
143
+ ),
144
+ ],
145
+ };
146
+ }
147
+
148
+ /**
149
+ * `kind` feeds the operation's `target.details.objectType`. Descriptor-flow
150
+ * does not carry kind information in its drop-constraint descriptor, so the
151
+ * default is `'unique'`. The reconciliation planner passes the correct kind
152
+ * (`'foreignKey'`, `'primaryKey'`, or `'unique'`) based on the `SchemaIssue`
153
+ * that produced the drop.
154
+ */
155
+ export function dropConstraint(
156
+ schemaName: string,
157
+ tableName: string,
158
+ constraintName: string,
159
+ kind: 'foreignKey' | 'unique' | 'primaryKey' = 'unique',
160
+ ): Op {
161
+ const qualified = qualifyTableName(schemaName, tableName);
162
+ return {
163
+ id: `dropConstraint.${tableName}.${constraintName}`,
164
+ label: `Drop constraint "${constraintName}" on "${tableName}"`,
165
+ operationClass: 'destructive',
166
+ target: targetDetails(kind, constraintName, schemaName, tableName),
167
+ precheck: [
168
+ step(
169
+ `ensure constraint "${constraintName}" exists`,
170
+ constraintExistsCheck({ constraintName, schema: schemaName, table: tableName }),
171
+ ),
172
+ ],
173
+ execute: [
174
+ step(
175
+ `drop constraint "${constraintName}"`,
176
+ `ALTER TABLE ${qualified} DROP CONSTRAINT ${quoteIdentifier(constraintName)}`,
177
+ ),
178
+ ],
179
+ postcheck: [
180
+ step(
181
+ `verify constraint "${constraintName}" does not exist`,
182
+ constraintExistsCheck({
183
+ constraintName,
184
+ schema: schemaName,
185
+ table: tableName,
186
+ exists: false,
187
+ }),
188
+ ),
189
+ ],
190
+ };
191
+ }
@@ -0,0 +1,113 @@
1
+ /**
2
+ * User-facing `dataTransform` factory for the Postgres migration authoring
3
+ * surface. Invoked directly inside a `migration.ts` file:
4
+ *
5
+ * ```ts
6
+ * import endContract from './end-contract.json' with { type: 'json' };
7
+ * import { dataTransform } from '@prisma-next/target-postgres/migration';
8
+ *
9
+ * dataTransform(endContract, 'backfill emails', {
10
+ * check: () => db.users.count().where(({ email }) => email.isNull()),
11
+ * run: () => db.users.update({ email: '' }).where(({ email }) => email.isNull()),
12
+ * });
13
+ * ```
14
+ *
15
+ * The factory accepts lazy closures (`() => SqlQueryPlan | Buildable`),
16
+ * invokes each one, asserts that its `meta.storageHash` matches the
17
+ * `contract` it was handed (→ `PN-MIG-2005` on mismatch), and lowers the
18
+ * plan via the Postgres adapter to a serialized `{sql, params}` payload
19
+ * for `ops.json`.
20
+ */
21
+
22
+ import { createPostgresAdapter } from '@prisma-next/adapter-postgres/adapter';
23
+ import type { Contract } from '@prisma-next/contract/types';
24
+ import { errorDataTransformContractMismatch } from '@prisma-next/errors/migration';
25
+ import type {
26
+ DataTransformOperation,
27
+ SerializedQueryPlan,
28
+ } from '@prisma-next/framework-components/control';
29
+ import type { SqlStorage } from '@prisma-next/sql-contract/types';
30
+ import type { SqlQueryPlan } from '@prisma-next/sql-relational-core/plan';
31
+ import { lowerSqlPlan } from '@prisma-next/sql-runtime';
32
+
33
+ interface Buildable<R = unknown> {
34
+ build(): SqlQueryPlan<R>;
35
+ }
36
+
37
+ /**
38
+ * A single-closure producer of a SQL query plan. Shared between
39
+ * `check` and each `run` entry.
40
+ */
41
+ export type DataTransformClosure = () => SqlQueryPlan | Buildable;
42
+
43
+ export interface DataTransformOptions {
44
+ /** Optional pre-flight query. `undefined` means "no check". */
45
+ readonly check?: DataTransformClosure;
46
+ /** One or more mutation queries to execute. */
47
+ readonly run: DataTransformClosure | readonly DataTransformClosure[];
48
+ }
49
+
50
+ /** Single shared adapter for apply/CLI; sufficient for single-threaded migration execution. */
51
+ let adapterSingleton: ReturnType<typeof createPostgresAdapter> | null = null;
52
+ function getAdapter(): ReturnType<typeof createPostgresAdapter> {
53
+ if (adapterSingleton === null) {
54
+ adapterSingleton = createPostgresAdapter();
55
+ }
56
+ return adapterSingleton;
57
+ }
58
+
59
+ export function dataTransform<TContract extends Contract<SqlStorage>>(
60
+ contract: TContract,
61
+ name: string,
62
+ options: DataTransformOptions,
63
+ ): DataTransformOperation {
64
+ const adapter = getAdapter();
65
+ const runClosures: readonly DataTransformClosure[] = Array.isArray(options.run)
66
+ ? options.run
67
+ : [options.run as DataTransformClosure];
68
+ return {
69
+ id: `data_migration.${name}`,
70
+ label: `Data transform: ${name}`,
71
+ operationClass: 'data',
72
+ name,
73
+ source: 'migration.ts',
74
+ check: options.check ? invokeAndLower(options.check, contract, adapter, name) : null,
75
+ run: runClosures.map((closure) => invokeAndLower(closure, contract, adapter, name)),
76
+ };
77
+ }
78
+
79
+ function invokeAndLower(
80
+ closure: DataTransformClosure,
81
+ contract: Contract<SqlStorage>,
82
+ adapter: ReturnType<typeof createPostgresAdapter>,
83
+ name: string,
84
+ ): SerializedQueryPlan {
85
+ const result = closure();
86
+ const plan = isBuildable(result) ? result.build() : result;
87
+ assertContractMatches(plan, contract, name);
88
+ const lowered = lowerSqlPlan(adapter, contract, plan);
89
+ return { sql: lowered.sql, params: lowered.params };
90
+ }
91
+
92
+ function isBuildable(value: unknown): value is Buildable {
93
+ return (
94
+ typeof value === 'object' &&
95
+ value !== null &&
96
+ 'build' in value &&
97
+ typeof (value as { build: unknown }).build === 'function'
98
+ );
99
+ }
100
+
101
+ function assertContractMatches(
102
+ plan: SqlQueryPlan,
103
+ contract: Contract<SqlStorage>,
104
+ name: string,
105
+ ): void {
106
+ if (plan.meta.storageHash !== contract.storage.storageHash) {
107
+ throw errorDataTransformContractMismatch({
108
+ dataTransformName: name,
109
+ expected: contract.storage.storageHash,
110
+ actual: plan.meta.storageHash,
111
+ });
112
+ }
113
+ }
@@ -0,0 +1,36 @@
1
+ import { quoteIdentifier } from '@prisma-next/adapter-postgres/control';
2
+ import { type Op, step } from './shared';
3
+
4
+ export function createExtension(extensionName: string): Op {
5
+ return {
6
+ id: `extension.${extensionName}`,
7
+ label: `Create extension "${extensionName}"`,
8
+ operationClass: 'additive',
9
+ target: { id: 'postgres' },
10
+ precheck: [],
11
+ execute: [
12
+ step(
13
+ `Create extension "${extensionName}"`,
14
+ `CREATE EXTENSION IF NOT EXISTS ${quoteIdentifier(extensionName)}`,
15
+ ),
16
+ ],
17
+ postcheck: [],
18
+ };
19
+ }
20
+
21
+ export function createSchema(schemaName: string): Op {
22
+ return {
23
+ id: `schema.${schemaName}`,
24
+ label: `Create schema "${schemaName}"`,
25
+ operationClass: 'additive',
26
+ target: { id: 'postgres' },
27
+ precheck: [],
28
+ execute: [
29
+ step(
30
+ `Create schema "${schemaName}"`,
31
+ `CREATE SCHEMA IF NOT EXISTS ${quoteIdentifier(schemaName)}`,
32
+ ),
33
+ ],
34
+ postcheck: [],
35
+ };
36
+ }