@prisma-next/target-postgres 0.3.0-pr.99.6 → 0.4.0-dev.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +201 -0
- package/README.md +17 -8
- package/dist/control.d.mts +19 -0
- package/dist/control.d.mts.map +1 -0
- package/dist/control.mjs +5382 -0
- package/dist/control.mjs.map +1 -0
- package/dist/descriptor-meta-CAf16lsJ.mjs +32 -0
- package/dist/descriptor-meta-CAf16lsJ.mjs.map +1 -0
- package/dist/migration-builders.d.mts +88 -0
- package/dist/migration-builders.d.mts.map +1 -0
- package/dist/migration-builders.mjs +3 -0
- package/dist/operation-descriptors-CxymFSgK.mjs +52 -0
- package/dist/operation-descriptors-CxymFSgK.mjs.map +1 -0
- package/dist/pack.d.mts +45 -0
- package/dist/pack.d.mts.map +1 -0
- package/dist/pack.mjs +9 -0
- package/dist/pack.mjs.map +1 -0
- package/dist/runtime.d.mts +9 -0
- package/dist/runtime.d.mts.map +1 -0
- package/dist/runtime.mjs +20 -0
- package/dist/runtime.mjs.map +1 -0
- package/package.json +31 -29
- package/src/core/authoring.ts +15 -0
- package/src/core/descriptor-meta.ts +5 -0
- package/src/core/migrations/descriptor-planner.ts +466 -0
- package/src/core/migrations/operation-descriptors.ts +166 -0
- package/src/core/migrations/operation-resolver.ts +929 -0
- package/src/core/migrations/planner-ddl-builders.ts +256 -0
- package/src/core/migrations/planner-identity-values.ts +135 -0
- package/src/core/migrations/planner-recipes.ts +91 -0
- package/src/core/migrations/planner-reconciliation.ts +798 -0
- package/src/core/migrations/planner-schema-lookup.ts +54 -0
- package/src/core/migrations/planner-sql-checks.ts +322 -0
- package/src/core/migrations/planner-strategies.ts +262 -0
- package/src/core/migrations/planner-target-details.ts +38 -0
- package/src/core/migrations/planner-type-resolution.ts +26 -0
- package/src/core/migrations/planner.ts +410 -460
- package/src/core/migrations/runner.ts +134 -38
- package/src/core/migrations/statement-builders.ts +6 -6
- package/src/core/types.ts +5 -0
- package/src/exports/control.ts +182 -12
- package/src/exports/migration-builders.ts +56 -0
- package/src/exports/pack.ts +7 -3
- package/src/exports/runtime.ts +6 -12
- package/dist/chunk-RKEXRSSI.js +0 -14
- package/dist/chunk-RKEXRSSI.js.map +0 -1
- package/dist/core/descriptor-meta.d.ts +0 -9
- package/dist/core/descriptor-meta.d.ts.map +0 -1
- package/dist/core/migrations/planner.d.ts +0 -14
- package/dist/core/migrations/planner.d.ts.map +0 -1
- package/dist/core/migrations/runner.d.ts +0 -8
- package/dist/core/migrations/runner.d.ts.map +0 -1
- package/dist/core/migrations/statement-builders.d.ts +0 -30
- package/dist/core/migrations/statement-builders.d.ts.map +0 -1
- package/dist/exports/control.d.ts +0 -8
- package/dist/exports/control.d.ts.map +0 -1
- package/dist/exports/control.js +0 -1260
- package/dist/exports/control.js.map +0 -1
- package/dist/exports/pack.d.ts +0 -4
- package/dist/exports/pack.d.ts.map +0 -1
- package/dist/exports/pack.js +0 -11
- package/dist/exports/pack.js.map +0 -1
- package/dist/exports/runtime.d.ts +0 -12
- package/dist/exports/runtime.d.ts.map +0 -1
- package/dist/exports/runtime.js +0 -19
- package/dist/exports/runtime.js.map +0 -1
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import type { ForeignKey } from '@prisma-next/sql-contract/types';
|
|
2
|
+
import type { SqlSchemaIR } from '@prisma-next/sql-schema-ir/types';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Pre-computed lookup sets for a schema table's constraints.
|
|
6
|
+
* Converts O(n*m) linear scans to O(1) Set lookups per constraint check.
|
|
7
|
+
*/
|
|
8
|
+
export interface SchemaTableLookup {
|
|
9
|
+
readonly uniqueKeys: Set<string>;
|
|
10
|
+
readonly indexKeys: Set<string>;
|
|
11
|
+
readonly uniqueIndexKeys: Set<string>;
|
|
12
|
+
readonly fkKeys: Set<string>;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export function buildSchemaLookupMap(schema: SqlSchemaIR): ReadonlyMap<string, SchemaTableLookup> {
|
|
16
|
+
const map = new Map<string, SchemaTableLookup>();
|
|
17
|
+
for (const [tableName, table] of Object.entries(schema.tables)) {
|
|
18
|
+
map.set(tableName, buildSchemaTableLookup(table));
|
|
19
|
+
}
|
|
20
|
+
return map;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
function buildSchemaTableLookup(table: SqlSchemaIR['tables'][string]): SchemaTableLookup {
|
|
24
|
+
const uniqueKeys = new Set(table.uniques.map((u) => u.columns.join(',')));
|
|
25
|
+
const indexKeys = new Set(table.indexes.map((i) => i.columns.join(',')));
|
|
26
|
+
const uniqueIndexKeys = new Set(
|
|
27
|
+
table.indexes.filter((i) => i.unique).map((i) => i.columns.join(',')),
|
|
28
|
+
);
|
|
29
|
+
const fkKeys = new Set(
|
|
30
|
+
table.foreignKeys.map(
|
|
31
|
+
(fk) => `${fk.columns.join(',')}|${fk.referencedTable}|${fk.referencedColumns.join(',')}`,
|
|
32
|
+
),
|
|
33
|
+
);
|
|
34
|
+
return { uniqueKeys, indexKeys, uniqueIndexKeys, fkKeys };
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
export function hasUniqueConstraint(
|
|
38
|
+
lookup: SchemaTableLookup,
|
|
39
|
+
columns: readonly string[],
|
|
40
|
+
): boolean {
|
|
41
|
+
const key = columns.join(',');
|
|
42
|
+
return lookup.uniqueKeys.has(key) || lookup.uniqueIndexKeys.has(key);
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
export function hasIndex(lookup: SchemaTableLookup, columns: readonly string[]): boolean {
|
|
46
|
+
const key = columns.join(',');
|
|
47
|
+
return lookup.indexKeys.has(key) || lookup.uniqueKeys.has(key);
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
export function hasForeignKey(lookup: SchemaTableLookup, fk: ForeignKey): boolean {
|
|
51
|
+
return lookup.fkKeys.has(
|
|
52
|
+
`${fk.columns.join(',')}|${fk.references.table}|${fk.references.columns.join(',')}`,
|
|
53
|
+
);
|
|
54
|
+
}
|
|
@@ -0,0 +1,322 @@
|
|
|
1
|
+
import { escapeLiteral, quoteIdentifier } from '@prisma-next/adapter-postgres/control';
|
|
2
|
+
import type { CodecControlHooks } from '@prisma-next/family-sql/control';
|
|
3
|
+
import type { StorageColumn, StorageTypeInstance } from '@prisma-next/sql-contract/types';
|
|
4
|
+
import { resolveColumnTypeMetadata } from './planner-type-resolution';
|
|
5
|
+
|
|
6
|
+
export function qualifyTableName(schema: string, table: string): string {
|
|
7
|
+
return `${quoteIdentifier(schema)}.${quoteIdentifier(table)}`;
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
export function toRegclassLiteral(schema: string, name: string): string {
|
|
11
|
+
const regclass = `${quoteIdentifier(schema)}.${quoteIdentifier(name)}`;
|
|
12
|
+
return `'${escapeLiteral(regclass)}'`;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* When `table` is omitted the check matches by name + schema across all tables.
|
|
17
|
+
* Pass `table` to scope the check to a single table (prevents false matches on
|
|
18
|
+
* identically-named constraints in different tables).
|
|
19
|
+
*/
|
|
20
|
+
export function constraintExistsCheck({
|
|
21
|
+
constraintName,
|
|
22
|
+
schema,
|
|
23
|
+
table,
|
|
24
|
+
exists = true,
|
|
25
|
+
}: {
|
|
26
|
+
constraintName: string;
|
|
27
|
+
schema: string;
|
|
28
|
+
table?: string;
|
|
29
|
+
exists?: boolean;
|
|
30
|
+
}): string {
|
|
31
|
+
const existsClause = exists ? 'EXISTS' : 'NOT EXISTS';
|
|
32
|
+
const tableFilter = table
|
|
33
|
+
? `AND c.conrelid = to_regclass(${toRegclassLiteral(schema, table)})`
|
|
34
|
+
: '';
|
|
35
|
+
return `SELECT ${existsClause} (
|
|
36
|
+
SELECT 1 FROM pg_constraint c
|
|
37
|
+
JOIN pg_namespace n ON c.connamespace = n.oid
|
|
38
|
+
WHERE c.conname = '${escapeLiteral(constraintName)}'
|
|
39
|
+
AND n.nspname = '${escapeLiteral(schema)}'
|
|
40
|
+
${tableFilter}
|
|
41
|
+
)`;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
export function columnExistsCheck({
|
|
45
|
+
schema,
|
|
46
|
+
table,
|
|
47
|
+
column,
|
|
48
|
+
exists = true,
|
|
49
|
+
}: {
|
|
50
|
+
schema: string;
|
|
51
|
+
table: string;
|
|
52
|
+
column: string;
|
|
53
|
+
exists?: boolean;
|
|
54
|
+
}): string {
|
|
55
|
+
const existsClause = exists ? '' : 'NOT ';
|
|
56
|
+
return `SELECT ${existsClause}EXISTS (
|
|
57
|
+
SELECT 1
|
|
58
|
+
FROM information_schema.columns
|
|
59
|
+
WHERE table_schema = '${escapeLiteral(schema)}'
|
|
60
|
+
AND table_name = '${escapeLiteral(table)}'
|
|
61
|
+
AND column_name = '${escapeLiteral(column)}'
|
|
62
|
+
)`;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
export function columnNullabilityCheck({
|
|
66
|
+
schema,
|
|
67
|
+
table,
|
|
68
|
+
column,
|
|
69
|
+
nullable,
|
|
70
|
+
}: {
|
|
71
|
+
schema: string;
|
|
72
|
+
table: string;
|
|
73
|
+
column: string;
|
|
74
|
+
nullable: boolean;
|
|
75
|
+
}): string {
|
|
76
|
+
const expected = nullable ? 'YES' : 'NO';
|
|
77
|
+
return `SELECT EXISTS (
|
|
78
|
+
SELECT 1
|
|
79
|
+
FROM information_schema.columns
|
|
80
|
+
WHERE table_schema = '${escapeLiteral(schema)}'
|
|
81
|
+
AND table_name = '${escapeLiteral(table)}'
|
|
82
|
+
AND column_name = '${escapeLiteral(column)}'
|
|
83
|
+
AND is_nullable = '${expected}'
|
|
84
|
+
)`;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
export function tableIsEmptyCheck(qualifiedTableName: string): string {
|
|
88
|
+
return `SELECT NOT EXISTS (SELECT 1 FROM ${qualifiedTableName} LIMIT 1)`;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
export function columnHasNoDefaultCheck(opts: {
|
|
92
|
+
schema: string;
|
|
93
|
+
table: string;
|
|
94
|
+
column: string;
|
|
95
|
+
}): string {
|
|
96
|
+
return `SELECT NOT EXISTS (
|
|
97
|
+
SELECT 1
|
|
98
|
+
FROM information_schema.columns
|
|
99
|
+
WHERE table_schema = '${escapeLiteral(opts.schema)}'
|
|
100
|
+
AND table_name = '${escapeLiteral(opts.table)}'
|
|
101
|
+
AND column_name = '${escapeLiteral(opts.column)}'
|
|
102
|
+
AND column_default IS NOT NULL
|
|
103
|
+
)`;
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
const FORMAT_TYPE_DISPLAY: ReadonlyMap<string, string> = new Map([
|
|
107
|
+
['int2', 'smallint'],
|
|
108
|
+
['int4', 'integer'],
|
|
109
|
+
['int8', 'bigint'],
|
|
110
|
+
['float4', 'real'],
|
|
111
|
+
['float8', 'double precision'],
|
|
112
|
+
['bool', 'boolean'],
|
|
113
|
+
['timestamp', 'timestamp without time zone'],
|
|
114
|
+
['timestamptz', 'timestamp with time zone'],
|
|
115
|
+
['time', 'time without time zone'],
|
|
116
|
+
['timetz', 'time with time zone'],
|
|
117
|
+
]);
|
|
118
|
+
|
|
119
|
+
const UNQUOTED_POSTGRES_IDENTIFIER_PATTERN = /^[a-z_][a-z0-9_$]*$/;
|
|
120
|
+
|
|
121
|
+
const POSTGRES_RESERVED_IDENTIFIER_WORDS = new Set([
|
|
122
|
+
'all',
|
|
123
|
+
'analyse',
|
|
124
|
+
'analyze',
|
|
125
|
+
'and',
|
|
126
|
+
'any',
|
|
127
|
+
'array',
|
|
128
|
+
'as',
|
|
129
|
+
'asc',
|
|
130
|
+
'asymmetric',
|
|
131
|
+
'authorization',
|
|
132
|
+
'between',
|
|
133
|
+
'binary',
|
|
134
|
+
'both',
|
|
135
|
+
'case',
|
|
136
|
+
'cast',
|
|
137
|
+
'check',
|
|
138
|
+
'collate',
|
|
139
|
+
'column',
|
|
140
|
+
'constraint',
|
|
141
|
+
'create',
|
|
142
|
+
'current_catalog',
|
|
143
|
+
'current_date',
|
|
144
|
+
'current_role',
|
|
145
|
+
'current_time',
|
|
146
|
+
'current_timestamp',
|
|
147
|
+
'current_user',
|
|
148
|
+
'default',
|
|
149
|
+
'deferrable',
|
|
150
|
+
'desc',
|
|
151
|
+
'distinct',
|
|
152
|
+
'do',
|
|
153
|
+
'else',
|
|
154
|
+
'end',
|
|
155
|
+
'except',
|
|
156
|
+
'false',
|
|
157
|
+
'fetch',
|
|
158
|
+
'for',
|
|
159
|
+
'foreign',
|
|
160
|
+
'freeze',
|
|
161
|
+
'from',
|
|
162
|
+
'full',
|
|
163
|
+
'grant',
|
|
164
|
+
'group',
|
|
165
|
+
'having',
|
|
166
|
+
'ilike',
|
|
167
|
+
'in',
|
|
168
|
+
'initially',
|
|
169
|
+
'inner',
|
|
170
|
+
'intersect',
|
|
171
|
+
'into',
|
|
172
|
+
'is',
|
|
173
|
+
'isnull',
|
|
174
|
+
'join',
|
|
175
|
+
'lateral',
|
|
176
|
+
'leading',
|
|
177
|
+
'left',
|
|
178
|
+
'like',
|
|
179
|
+
'limit',
|
|
180
|
+
'localtime',
|
|
181
|
+
'localtimestamp',
|
|
182
|
+
'natural',
|
|
183
|
+
'not',
|
|
184
|
+
'notnull',
|
|
185
|
+
'null',
|
|
186
|
+
'offset',
|
|
187
|
+
'on',
|
|
188
|
+
'only',
|
|
189
|
+
'or',
|
|
190
|
+
'order',
|
|
191
|
+
'outer',
|
|
192
|
+
'overlaps',
|
|
193
|
+
'placing',
|
|
194
|
+
'primary',
|
|
195
|
+
'references',
|
|
196
|
+
'right',
|
|
197
|
+
'select',
|
|
198
|
+
'session_user',
|
|
199
|
+
'similar',
|
|
200
|
+
'some',
|
|
201
|
+
'symmetric',
|
|
202
|
+
'table',
|
|
203
|
+
'then',
|
|
204
|
+
'to',
|
|
205
|
+
'trailing',
|
|
206
|
+
'true',
|
|
207
|
+
'union',
|
|
208
|
+
'unique',
|
|
209
|
+
'user',
|
|
210
|
+
'using',
|
|
211
|
+
'variadic',
|
|
212
|
+
'verbose',
|
|
213
|
+
'when',
|
|
214
|
+
'where',
|
|
215
|
+
'window',
|
|
216
|
+
'with',
|
|
217
|
+
]);
|
|
218
|
+
|
|
219
|
+
function formatUserDefinedTypeName(identifier: string): string {
|
|
220
|
+
if (
|
|
221
|
+
UNQUOTED_POSTGRES_IDENTIFIER_PATTERN.test(identifier) &&
|
|
222
|
+
!POSTGRES_RESERVED_IDENTIFIER_WORDS.has(identifier)
|
|
223
|
+
) {
|
|
224
|
+
return identifier;
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
return quoteIdentifier(identifier);
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
export function buildExpectedFormatType(
|
|
231
|
+
column: StorageColumn,
|
|
232
|
+
codecHooks: Map<string, CodecControlHooks>,
|
|
233
|
+
storageTypes: Record<string, StorageTypeInstance> = {},
|
|
234
|
+
): string {
|
|
235
|
+
const resolved = resolveColumnTypeMetadata(column, storageTypes);
|
|
236
|
+
|
|
237
|
+
if (resolved.typeParams && resolved.codecId) {
|
|
238
|
+
const hooks = codecHooks.get(resolved.codecId);
|
|
239
|
+
if (hooks?.expandNativeType) {
|
|
240
|
+
return hooks.expandNativeType({
|
|
241
|
+
nativeType: resolved.nativeType,
|
|
242
|
+
codecId: resolved.codecId,
|
|
243
|
+
typeParams: resolved.typeParams,
|
|
244
|
+
});
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
if (column.typeRef) {
|
|
249
|
+
return formatUserDefinedTypeName(resolved.nativeType);
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
return FORMAT_TYPE_DISPLAY.get(resolved.nativeType) ?? resolved.nativeType;
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
export function columnTypeCheck({
|
|
256
|
+
schema,
|
|
257
|
+
table,
|
|
258
|
+
column,
|
|
259
|
+
expectedType,
|
|
260
|
+
}: {
|
|
261
|
+
schema: string;
|
|
262
|
+
table: string;
|
|
263
|
+
column: string;
|
|
264
|
+
expectedType: string;
|
|
265
|
+
}): string {
|
|
266
|
+
return `SELECT EXISTS (
|
|
267
|
+
SELECT 1
|
|
268
|
+
FROM pg_attribute a
|
|
269
|
+
JOIN pg_class c ON c.oid = a.attrelid
|
|
270
|
+
JOIN pg_namespace n ON n.oid = c.relnamespace
|
|
271
|
+
WHERE n.nspname = '${escapeLiteral(schema)}'
|
|
272
|
+
AND c.relname = '${escapeLiteral(table)}'
|
|
273
|
+
AND a.attname = '${escapeLiteral(column)}'
|
|
274
|
+
AND format_type(a.atttypid, a.atttypmod) = '${escapeLiteral(expectedType)}'
|
|
275
|
+
AND NOT a.attisdropped
|
|
276
|
+
)`;
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
export function columnDefaultExistsCheck({
|
|
280
|
+
schema,
|
|
281
|
+
table,
|
|
282
|
+
column,
|
|
283
|
+
exists = true,
|
|
284
|
+
}: {
|
|
285
|
+
schema: string;
|
|
286
|
+
table: string;
|
|
287
|
+
column: string;
|
|
288
|
+
exists?: boolean;
|
|
289
|
+
}): string {
|
|
290
|
+
const nullCheck = exists ? 'IS NOT NULL' : 'IS NULL';
|
|
291
|
+
return `SELECT EXISTS (
|
|
292
|
+
SELECT 1
|
|
293
|
+
FROM information_schema.columns
|
|
294
|
+
WHERE table_schema = '${escapeLiteral(schema)}'
|
|
295
|
+
AND table_name = '${escapeLiteral(table)}'
|
|
296
|
+
AND column_name = '${escapeLiteral(column)}'
|
|
297
|
+
AND column_default ${nullCheck}
|
|
298
|
+
)`;
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
export function tableHasPrimaryKeyCheck(
|
|
302
|
+
schema: string,
|
|
303
|
+
table: string,
|
|
304
|
+
exists: boolean,
|
|
305
|
+
constraintName?: string,
|
|
306
|
+
): string {
|
|
307
|
+
const comparison = exists ? '' : 'NOT ';
|
|
308
|
+
const constraintFilter = constraintName
|
|
309
|
+
? `AND c2.relname = '${escapeLiteral(constraintName)}'`
|
|
310
|
+
: '';
|
|
311
|
+
return `SELECT ${comparison}EXISTS (
|
|
312
|
+
SELECT 1
|
|
313
|
+
FROM pg_index i
|
|
314
|
+
JOIN pg_class c ON c.oid = i.indrelid
|
|
315
|
+
JOIN pg_namespace n ON n.oid = c.relnamespace
|
|
316
|
+
LEFT JOIN pg_class c2 ON c2.oid = i.indexrelid
|
|
317
|
+
WHERE n.nspname = '${escapeLiteral(schema)}'
|
|
318
|
+
AND c.relname = '${escapeLiteral(table)}'
|
|
319
|
+
AND i.indisprimary
|
|
320
|
+
${constraintFilter}
|
|
321
|
+
)`;
|
|
322
|
+
}
|
|
@@ -0,0 +1,262 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Migration strategies for the descriptor-based planner.
|
|
3
|
+
*
|
|
4
|
+
* Each strategy examines the issue list, consumes issues it handles,
|
|
5
|
+
* and returns the ops to handle them. The planner chains strategies,
|
|
6
|
+
* then handles whatever's left with default issue-to-descriptor mapping.
|
|
7
|
+
*
|
|
8
|
+
* Different strategy sets are used for different contexts:
|
|
9
|
+
* - `migration plan`: data-safe strategies (dataTransform for NOT NULL, type changes, etc.)
|
|
10
|
+
* - `db update`: dev-push strategies (temp defaults, destructive type changes, no data transforms)
|
|
11
|
+
*/
|
|
12
|
+
|
|
13
|
+
import type { Contract } from '@prisma-next/contract/types';
|
|
14
|
+
import type { SchemaIssue } from '@prisma-next/framework-components/control';
|
|
15
|
+
import type { SqlStorage } from '@prisma-next/sql-contract/types';
|
|
16
|
+
import {
|
|
17
|
+
addColumn,
|
|
18
|
+
addEnumValues,
|
|
19
|
+
alterColumnType,
|
|
20
|
+
createEnumType,
|
|
21
|
+
dataTransform,
|
|
22
|
+
dropEnumType,
|
|
23
|
+
type PostgresMigrationOpDescriptor,
|
|
24
|
+
renameType,
|
|
25
|
+
setNotNull,
|
|
26
|
+
TODO,
|
|
27
|
+
} from './operation-descriptors';
|
|
28
|
+
|
|
29
|
+
// ============================================================================
|
|
30
|
+
// Strategy types
|
|
31
|
+
// ============================================================================
|
|
32
|
+
|
|
33
|
+
/** Context passed to each migration strategy — the from/to contracts for the migration. */
|
|
34
|
+
export interface StrategyContext {
|
|
35
|
+
readonly toContract: Contract<SqlStorage>;
|
|
36
|
+
readonly fromContract: Contract<SqlStorage> | null;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* A migration strategy examines schema issues, consumes the ones it handles,
|
|
41
|
+
* and returns the descriptor ops to address them. Returns `'no_match'` if
|
|
42
|
+
* none of the issues are relevant. The planner chains strategies in order —
|
|
43
|
+
* earlier strategies consume issues before later ones see them.
|
|
44
|
+
*/
|
|
45
|
+
export type MigrationStrategy = (
|
|
46
|
+
issues: readonly SchemaIssue[],
|
|
47
|
+
context: StrategyContext,
|
|
48
|
+
) =>
|
|
49
|
+
| { kind: 'match'; issues: readonly SchemaIssue[]; ops: readonly PostgresMigrationOpDescriptor[] }
|
|
50
|
+
| { kind: 'no_match' };
|
|
51
|
+
|
|
52
|
+
// ============================================================================
|
|
53
|
+
// Recipes
|
|
54
|
+
// ============================================================================
|
|
55
|
+
|
|
56
|
+
const REBUILD_SUFFIX = '__prisma_next_new';
|
|
57
|
+
|
|
58
|
+
/**
|
|
59
|
+
* Produces the descriptor sequence for rebuilding a Postgres enum type:
|
|
60
|
+
* createEnumType(temp, values) → alterColumnType(USING cast) per column → dropEnumType(old) → renameType(temp, old)
|
|
61
|
+
*
|
|
62
|
+
* Used by the enum change strategy for value removal and reorder scenarios.
|
|
63
|
+
* Finds all columns referencing the enum via `typeRef` in the destination contract.
|
|
64
|
+
*/
|
|
65
|
+
function enumRebuildRecipe(
|
|
66
|
+
typeName: string,
|
|
67
|
+
ctx: StrategyContext,
|
|
68
|
+
): readonly PostgresMigrationOpDescriptor[] {
|
|
69
|
+
const toType = ctx.toContract.storage.types?.[typeName];
|
|
70
|
+
if (!toType) return [];
|
|
71
|
+
const nativeType = toType.nativeType;
|
|
72
|
+
const desiredValues = (toType.typeParams['values'] ?? []) as readonly string[];
|
|
73
|
+
const tempName = `${nativeType}${REBUILD_SUFFIX}`;
|
|
74
|
+
|
|
75
|
+
const columnRefs: { table: string; column: string }[] = [];
|
|
76
|
+
for (const [tableName, table] of Object.entries(ctx.toContract.storage.tables)) {
|
|
77
|
+
for (const [columnName, column] of Object.entries(table.columns)) {
|
|
78
|
+
if (column.typeRef === typeName) {
|
|
79
|
+
columnRefs.push({ table: tableName, column: columnName });
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
return [
|
|
85
|
+
createEnumType(tempName, desiredValues),
|
|
86
|
+
...columnRefs.map((ref) =>
|
|
87
|
+
alterColumnType(ref.table, ref.column, {
|
|
88
|
+
toType: tempName,
|
|
89
|
+
using: `${ref.column}::text::${tempName}`,
|
|
90
|
+
}),
|
|
91
|
+
),
|
|
92
|
+
dropEnumType(nativeType),
|
|
93
|
+
renameType(tempName, nativeType),
|
|
94
|
+
];
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
// ============================================================================
|
|
98
|
+
// Data-safe strategies (for `migration plan`)
|
|
99
|
+
// ============================================================================
|
|
100
|
+
|
|
101
|
+
/**
|
|
102
|
+
* NOT NULL backfill strategy.
|
|
103
|
+
*
|
|
104
|
+
* When a missing column is NOT NULL without a default, the planner can't just
|
|
105
|
+
* add it — existing rows would violate the constraint. Instead, emit:
|
|
106
|
+
* addColumn(nullable) → dataTransform (user fills in backfill) → setNotNull
|
|
107
|
+
*/
|
|
108
|
+
export const notNullBackfillStrategy: MigrationStrategy = (issues, ctx) => {
|
|
109
|
+
const matched: SchemaIssue[] = [];
|
|
110
|
+
const ops: PostgresMigrationOpDescriptor[] = [];
|
|
111
|
+
|
|
112
|
+
for (const issue of issues) {
|
|
113
|
+
if (issue.kind !== 'missing_column' || !issue.table || !issue.column) continue;
|
|
114
|
+
|
|
115
|
+
const column = ctx.toContract.storage.tables[issue.table]?.columns[issue.column];
|
|
116
|
+
if (!column) continue;
|
|
117
|
+
if (column.nullable === true || column.default !== undefined) continue;
|
|
118
|
+
|
|
119
|
+
matched.push(issue);
|
|
120
|
+
ops.push(
|
|
121
|
+
addColumn(issue.table, issue.column, { nullable: true }),
|
|
122
|
+
dataTransform(`backfill-${issue.table}-${issue.column}`, {
|
|
123
|
+
check: TODO,
|
|
124
|
+
run: TODO,
|
|
125
|
+
}),
|
|
126
|
+
setNotNull(issue.table, issue.column),
|
|
127
|
+
);
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
if (matched.length === 0) return { kind: 'no_match' };
|
|
131
|
+
return {
|
|
132
|
+
kind: 'match',
|
|
133
|
+
issues: issues.filter((i) => !matched.includes(i)),
|
|
134
|
+
ops,
|
|
135
|
+
};
|
|
136
|
+
};
|
|
137
|
+
|
|
138
|
+
/**
|
|
139
|
+
* Unsafe type change strategy.
|
|
140
|
+
*
|
|
141
|
+
* Safe widenings (int4 → int8) emit alterColumnType directly.
|
|
142
|
+
* Unsafe changes emit dataTransform for user to handle conversion.
|
|
143
|
+
*/
|
|
144
|
+
export const typeChangeStrategy: MigrationStrategy = (issues, ctx) => {
|
|
145
|
+
const matched: SchemaIssue[] = [];
|
|
146
|
+
const ops: PostgresMigrationOpDescriptor[] = [];
|
|
147
|
+
|
|
148
|
+
const SAFE_WIDENINGS = new Set(['int2→int4', 'int2→int8', 'int4→int8', 'float4→float8']);
|
|
149
|
+
function isSafeWidening(fromType: string, toType: string): boolean {
|
|
150
|
+
return SAFE_WIDENINGS.has(`${fromType}→${toType}`);
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
for (const issue of issues) {
|
|
154
|
+
if (issue.kind !== 'type_mismatch') continue;
|
|
155
|
+
if (!issue.table || !issue.column) continue;
|
|
156
|
+
const fromColumn = ctx.fromContract?.storage.tables[issue.table]?.columns[issue.column];
|
|
157
|
+
const toColumn = ctx.toContract?.storage.tables[issue.table]?.columns[issue.column];
|
|
158
|
+
if (!fromColumn || !toColumn) continue;
|
|
159
|
+
const fromType = fromColumn.nativeType;
|
|
160
|
+
const toType = toColumn.nativeType;
|
|
161
|
+
if (fromType === toType) continue;
|
|
162
|
+
matched.push(issue);
|
|
163
|
+
if (isSafeWidening(fromType, toType)) {
|
|
164
|
+
ops.push(alterColumnType(issue.table, issue.column));
|
|
165
|
+
} else {
|
|
166
|
+
ops.push(
|
|
167
|
+
dataTransform(`typechange-${issue.table}-${issue.column}`, {
|
|
168
|
+
check: TODO,
|
|
169
|
+
run: TODO,
|
|
170
|
+
}),
|
|
171
|
+
alterColumnType(issue.table, issue.column),
|
|
172
|
+
);
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
if (matched.length === 0) return { kind: 'no_match' };
|
|
176
|
+
return {
|
|
177
|
+
kind: 'match',
|
|
178
|
+
issues: issues.filter((i) => !matched.includes(i)),
|
|
179
|
+
ops,
|
|
180
|
+
};
|
|
181
|
+
};
|
|
182
|
+
|
|
183
|
+
/**
|
|
184
|
+
* Nullable → NOT NULL tightening strategy.
|
|
185
|
+
*
|
|
186
|
+
* When an existing column changes from nullable to NOT NULL, existing rows
|
|
187
|
+
* may have NULLs that violate the constraint. Emit:
|
|
188
|
+
* dataTransform (user fills in NULL handling) → setNotNull
|
|
189
|
+
*/
|
|
190
|
+
export const nullableTighteningStrategy: MigrationStrategy = (issues, ctx) => {
|
|
191
|
+
const matched: SchemaIssue[] = [];
|
|
192
|
+
const ops: PostgresMigrationOpDescriptor[] = [];
|
|
193
|
+
|
|
194
|
+
for (const issue of issues) {
|
|
195
|
+
if (issue.kind !== 'nullability_mismatch' || !issue.table || !issue.column) continue;
|
|
196
|
+
|
|
197
|
+
const column = ctx.toContract.storage.tables[issue.table]?.columns[issue.column];
|
|
198
|
+
if (!column) continue;
|
|
199
|
+
if (column.nullable === true) continue;
|
|
200
|
+
|
|
201
|
+
matched.push(issue);
|
|
202
|
+
ops.push(
|
|
203
|
+
dataTransform(`handle-nulls-${issue.table}-${issue.column}`, {
|
|
204
|
+
check: TODO,
|
|
205
|
+
run: TODO,
|
|
206
|
+
}),
|
|
207
|
+
setNotNull(issue.table, issue.column),
|
|
208
|
+
);
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
if (matched.length === 0) return { kind: 'no_match' };
|
|
212
|
+
return {
|
|
213
|
+
kind: 'match',
|
|
214
|
+
issues: issues.filter((i) => !matched.includes(i)),
|
|
215
|
+
ops,
|
|
216
|
+
};
|
|
217
|
+
};
|
|
218
|
+
|
|
219
|
+
/**
|
|
220
|
+
* Enum value change strategy.
|
|
221
|
+
*
|
|
222
|
+
* When enum values change between contracts:
|
|
223
|
+
* - Add only → addEnumValues
|
|
224
|
+
* - Reorder (same values, different order) → rebuild recipe (no data transform)
|
|
225
|
+
* - Removal → dataTransform (user migrates rows) + rebuild recipe
|
|
226
|
+
*/
|
|
227
|
+
export const enumChangeStrategy: MigrationStrategy = (issues, ctx) => {
|
|
228
|
+
const matched: SchemaIssue[] = [];
|
|
229
|
+
const ops: PostgresMigrationOpDescriptor[] = [];
|
|
230
|
+
|
|
231
|
+
for (const issue of issues) {
|
|
232
|
+
if (issue.kind !== 'enum_values_changed') continue;
|
|
233
|
+
matched.push(issue);
|
|
234
|
+
|
|
235
|
+
if (issue.removedValues.length > 0) {
|
|
236
|
+
ops.push(
|
|
237
|
+
dataTransform(`migrate-${issue.typeName}-values`, { check: TODO, run: TODO }),
|
|
238
|
+
...enumRebuildRecipe(issue.typeName, ctx),
|
|
239
|
+
);
|
|
240
|
+
} else if (issue.addedValues.length === 0) {
|
|
241
|
+
// Reorder only — rebuild without data transform
|
|
242
|
+
ops.push(...enumRebuildRecipe(issue.typeName, ctx));
|
|
243
|
+
} else {
|
|
244
|
+
ops.push(addEnumValues(issue.typeName, issue.addedValues));
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
if (matched.length === 0) return { kind: 'no_match' };
|
|
249
|
+
return {
|
|
250
|
+
kind: 'match',
|
|
251
|
+
issues: issues.filter((i) => !matched.includes(i)),
|
|
252
|
+
ops,
|
|
253
|
+
};
|
|
254
|
+
};
|
|
255
|
+
|
|
256
|
+
/** Default strategy set for `migration plan` — data-safe, requires user input for destructive changes. */
|
|
257
|
+
export const migrationPlanStrategies: readonly MigrationStrategy[] = [
|
|
258
|
+
enumChangeStrategy,
|
|
259
|
+
notNullBackfillStrategy,
|
|
260
|
+
typeChangeStrategy,
|
|
261
|
+
nullableTighteningStrategy,
|
|
262
|
+
];
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import { ifDefined } from '@prisma-next/utils/defined';
|
|
2
|
+
|
|
3
|
+
export type OperationClass =
|
|
4
|
+
| 'dependency'
|
|
5
|
+
| 'type'
|
|
6
|
+
| 'table'
|
|
7
|
+
| 'column'
|
|
8
|
+
| 'primaryKey'
|
|
9
|
+
| 'unique'
|
|
10
|
+
| 'index'
|
|
11
|
+
| 'foreignKey';
|
|
12
|
+
|
|
13
|
+
export interface PostgresPlanTargetDetails {
|
|
14
|
+
readonly schema: string;
|
|
15
|
+
readonly objectType: OperationClass;
|
|
16
|
+
readonly name: string;
|
|
17
|
+
readonly table?: string;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export interface PlanningMode {
|
|
21
|
+
readonly includeExtraObjects: boolean;
|
|
22
|
+
readonly allowWidening: boolean;
|
|
23
|
+
readonly allowDestructive: boolean;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
export function buildTargetDetails(
|
|
27
|
+
objectType: OperationClass,
|
|
28
|
+
name: string,
|
|
29
|
+
schema: string,
|
|
30
|
+
table?: string,
|
|
31
|
+
): PostgresPlanTargetDetails {
|
|
32
|
+
return {
|
|
33
|
+
schema,
|
|
34
|
+
objectType,
|
|
35
|
+
name,
|
|
36
|
+
...ifDefined('table', table),
|
|
37
|
+
};
|
|
38
|
+
}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import type { StorageColumn, StorageTypeInstance } from '@prisma-next/sql-contract/types';
|
|
2
|
+
|
|
3
|
+
export type ResolvedColumnTypeMetadata = Pick<
|
|
4
|
+
StorageColumn,
|
|
5
|
+
'nativeType' | 'codecId' | 'typeParams'
|
|
6
|
+
>;
|
|
7
|
+
|
|
8
|
+
export function resolveColumnTypeMetadata(
|
|
9
|
+
column: StorageColumn,
|
|
10
|
+
storageTypes: Record<string, StorageTypeInstance>,
|
|
11
|
+
): ResolvedColumnTypeMetadata {
|
|
12
|
+
if (!column.typeRef) {
|
|
13
|
+
return column;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
const referencedType = storageTypes[column.typeRef];
|
|
17
|
+
if (!referencedType) {
|
|
18
|
+
return column;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
return {
|
|
22
|
+
codecId: referencedType.codecId,
|
|
23
|
+
nativeType: referencedType.nativeType,
|
|
24
|
+
typeParams: referencedType.typeParams,
|
|
25
|
+
};
|
|
26
|
+
}
|