@prisma-next/target-postgres 0.4.0-dev.8 → 0.4.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/dist/control.d.mts +1 -9
  2. package/dist/control.d.mts.map +1 -1
  3. package/dist/control.mjs +1693 -4798
  4. package/dist/control.mjs.map +1 -1
  5. package/dist/migration.d.mts +164 -0
  6. package/dist/migration.d.mts.map +1 -0
  7. package/dist/migration.mjs +446 -0
  8. package/dist/migration.mjs.map +1 -0
  9. package/dist/planner-target-details-MXb3oeul.d.mts +11 -0
  10. package/dist/planner-target-details-MXb3oeul.d.mts.map +1 -0
  11. package/dist/postgres-migration-BsHJHV9O.mjs +2793 -0
  12. package/dist/postgres-migration-BsHJHV9O.mjs.map +1 -0
  13. package/package.json +21 -19
  14. package/src/core/migrations/issue-planner.ts +832 -0
  15. package/src/core/migrations/op-factory-call.ts +862 -0
  16. package/src/core/migrations/operations/columns.ts +285 -0
  17. package/src/core/migrations/operations/constraints.ts +191 -0
  18. package/src/core/migrations/operations/data-transform.ts +113 -0
  19. package/src/core/migrations/operations/dependencies.ts +36 -0
  20. package/src/core/migrations/operations/enums.ts +113 -0
  21. package/src/core/migrations/operations/indexes.ts +61 -0
  22. package/src/core/migrations/operations/raw.ts +15 -0
  23. package/src/core/migrations/operations/shared.ts +67 -0
  24. package/src/core/migrations/operations/tables.ts +63 -0
  25. package/src/core/migrations/planner-produced-postgres-migration.ts +67 -0
  26. package/src/core/migrations/planner-strategies.ts +592 -151
  27. package/src/core/migrations/planner-target-details.ts +0 -6
  28. package/src/core/migrations/planner.ts +63 -781
  29. package/src/core/migrations/postgres-migration.ts +20 -0
  30. package/src/core/migrations/render-ops.ts +9 -0
  31. package/src/core/migrations/render-typescript.ts +95 -0
  32. package/src/exports/control.ts +9 -142
  33. package/src/exports/migration.ts +40 -0
  34. package/dist/migration-builders.d.mts +0 -88
  35. package/dist/migration-builders.d.mts.map +0 -1
  36. package/dist/migration-builders.mjs +0 -3
  37. package/dist/operation-descriptors-CxymFSgK.mjs +0 -52
  38. package/dist/operation-descriptors-CxymFSgK.mjs.map +0 -1
  39. package/src/core/migrations/descriptor-planner.ts +0 -464
  40. package/src/core/migrations/operation-descriptors.ts +0 -166
  41. package/src/core/migrations/operation-resolver.ts +0 -929
  42. package/src/core/migrations/planner-reconciliation.ts +0 -798
  43. package/src/core/migrations/scaffolding.ts +0 -140
  44. package/src/exports/migration-builders.ts +0 -56
@@ -1,113 +1,164 @@
1
1
  /**
2
- * Migration strategies for the descriptor-based planner.
2
+ * Migration strategies.
3
3
  *
4
- * Each strategy examines the issue list, consumes issues it handles,
5
- * and returns the ops to handle them. The planner chains strategies,
6
- * then handles whatever's left with default issue-to-descriptor mapping.
4
+ * Each strategy examines the issue list, consumes issues it handles, and
5
+ * returns the `PostgresOpFactoryCall[]` to address them. The issue planner
6
+ * runs each strategy in order and routes whatever's left through
7
+ * `mapIssueToCall`.
7
8
  *
8
- * Different strategy sets are used for different contexts:
9
- * - `migration plan`: data-safe strategies (dataTransform for NOT NULL, type changes, etc.)
10
- * - `db update`: dev-push strategies (temp defaults, destructive type changes, no data transforms)
9
+ * The full ordered list is exported as `postgresPlannerStrategies` and is
10
+ * used unchanged by both `migration plan` and `db update` / `db init`. The
11
+ * two journeys differ only in `policy.allowedOperationClasses`:
12
+ *
13
+ * - When `'data'` is in the policy, data-safe strategies (NOT NULL backfill,
14
+ * nullability tightening, unsafe type changes, enum shrink/rebuild) emit
15
+ * `DataTransformCall` placeholders that the user fills in.
16
+ * - When `'data'` is excluded, those strategies short-circuit so the
17
+ * downstream walk-schema strategies (codec-hook type ops, dependency
18
+ * installs, temp-default backfill) and `mapIssueToCall` defaults emit
19
+ * direct DDL instead.
11
20
  */
12
21
 
13
22
  import type { Contract } from '@prisma-next/contract/types';
23
+ import type {
24
+ CodecControlHooks,
25
+ ComponentDatabaseDependency,
26
+ MigrationOperationPolicy,
27
+ SqlMigrationPlanOperation,
28
+ } from '@prisma-next/family-sql/control';
29
+ import { collectInitDependencies } from '@prisma-next/family-sql/control';
30
+ import type { TargetBoundComponentDescriptor } from '@prisma-next/framework-components/components';
14
31
  import type { SchemaIssue } from '@prisma-next/framework-components/control';
15
- import type { SqlStorage } from '@prisma-next/sql-contract/types';
32
+ import type { SqlStorage, StorageTypeInstance } from '@prisma-next/sql-contract/types';
33
+ import type { SqlSchemaIR } from '@prisma-next/sql-schema-ir/types';
34
+ import {
35
+ AddColumnCall,
36
+ AddEnumValuesCall,
37
+ AlterColumnTypeCall,
38
+ CreateEnumTypeCall,
39
+ DataTransformCall,
40
+ DropEnumTypeCall,
41
+ type PostgresOpFactoryCall,
42
+ RawSqlCall,
43
+ RenameTypeCall,
44
+ SetNotNullCall,
45
+ } from './op-factory-call';
46
+ import {
47
+ buildAddColumnSql,
48
+ buildColumnDefaultSql,
49
+ buildColumnTypeSql,
50
+ } from './planner-ddl-builders';
51
+ import { resolveIdentityValue } from './planner-identity-values';
16
52
  import {
17
- addColumn,
18
- addEnumValues,
19
- alterColumnType,
20
- createEnumType,
21
- dataTransform,
22
- dropEnumType,
23
- type PostgresMigrationOpDescriptor,
24
- renameType,
25
- setNotNull,
26
- TODO,
27
- } from './operation-descriptors';
53
+ buildAddColumnOperationIdentity,
54
+ buildAddNotNullColumnWithTemporaryDefaultOperation,
55
+ } from './planner-recipes';
56
+ import { buildSchemaLookupMap, hasForeignKey, hasUniqueConstraint } from './planner-schema-lookup';
57
+ import {
58
+ buildExpectedFormatType,
59
+ columnExistsCheck,
60
+ columnNullabilityCheck,
61
+ qualifyTableName,
62
+ tableIsEmptyCheck,
63
+ } from './planner-sql-checks';
64
+ import { buildTargetDetails, type PostgresPlanTargetDetails } from './planner-target-details';
65
+
66
+ const REBUILD_SUFFIX = '__prisma_next_new';
28
67
 
29
68
  // ============================================================================
30
69
  // Strategy types
31
70
  // ============================================================================
32
71
 
33
- /** Context passed to each migration strategy — the from/to contracts for the migration. */
72
+ /**
73
+ * Context passed to each migration strategy.
74
+ *
75
+ * Strategies read the source (`fromContract`), target (`toContract`), current
76
+ * database state (`schema`), operation policy (`policy`), and component list
77
+ * (`frameworkComponents`) to make planning decisions. `fromContract` is null
78
+ * when no prior contract is available (e.g. `db update`, where the current
79
+ * DB state is approximated via `schema`).
80
+ */
34
81
  export interface StrategyContext {
35
82
  readonly toContract: Contract<SqlStorage>;
36
83
  readonly fromContract: Contract<SqlStorage> | null;
84
+ readonly schemaName: string;
85
+ readonly codecHooks: ReadonlyMap<string, CodecControlHooks>;
86
+ readonly storageTypes: Readonly<Record<string, StorageTypeInstance>>;
87
+ readonly schema: SqlSchemaIR;
88
+ readonly policy: MigrationOperationPolicy;
89
+ readonly frameworkComponents: ReadonlyArray<TargetBoundComponentDescriptor<'sql', string>>;
37
90
  }
38
91
 
39
- /**
40
- * A migration strategy examines schema issues, consumes the ones it handles,
41
- * and returns the descriptor ops to address them. Returns `'no_match'` if
42
- * none of the issues are relevant. The planner chains strategies in order —
43
- * earlier strategies consume issues before later ones see them.
44
- */
45
- export type MigrationStrategy = (
92
+ // ============================================================================
93
+ // Call strategies (for issue planner)
94
+ // ============================================================================
95
+
96
+ export type CallMigrationStrategy = (
46
97
  issues: readonly SchemaIssue[],
47
98
  context: StrategyContext,
48
99
  ) =>
49
- | { kind: 'match'; issues: readonly SchemaIssue[]; ops: readonly PostgresMigrationOpDescriptor[] }
100
+ | {
101
+ kind: 'match';
102
+ issues: readonly SchemaIssue[];
103
+ calls: readonly PostgresOpFactoryCall[];
104
+ /**
105
+ * `true` for strategies that emit cohesive sequential recipes whose
106
+ * calls must stay contiguous and in the returned order — e.g.
107
+ * `enumChangeCallStrategy` (dataTransform → createEnumType →
108
+ * dropEnumType), `notNullBackfillCallStrategy` (addColumn →
109
+ * dataTransform → setNotNull). Defaults to `false`, which lets
110
+ * `planIssues` hoist individual calls into their DDL sequencing bucket.
111
+ */
112
+ recipe?: boolean;
113
+ }
50
114
  | { kind: 'no_match' };
51
115
 
52
- // ============================================================================
53
- // Recipes
54
- // ============================================================================
55
-
56
- const REBUILD_SUFFIX = '__prisma_next_new';
57
-
58
- /**
59
- * Produces the descriptor sequence for rebuilding a Postgres enum type:
60
- * createEnumType(temp, values) → alterColumnType(USING cast) per column → dropEnumType(old) → renameType(temp, old)
61
- *
62
- * Used by the enum change strategy for value removal and reorder scenarios.
63
- * Finds all columns referencing the enum via `typeRef` in the destination contract.
64
- */
65
- function enumRebuildRecipe(
66
- typeName: string,
116
+ function buildColumnSpec(
117
+ table: string,
118
+ column: string,
67
119
  ctx: StrategyContext,
68
- ): readonly PostgresMigrationOpDescriptor[] {
69
- const toType = ctx.toContract.storage.types?.[typeName];
70
- if (!toType) return [];
71
- const nativeType = toType.nativeType;
72
- const desiredValues = (toType.typeParams['values'] ?? []) as readonly string[];
73
- const tempName = `${nativeType}${REBUILD_SUFFIX}`;
74
-
75
- const columnRefs: { table: string; column: string }[] = [];
76
- for (const [tableName, table] of Object.entries(ctx.toContract.storage.tables)) {
77
- for (const [columnName, column] of Object.entries(table.columns)) {
78
- if (column.typeRef === typeName) {
79
- columnRefs.push({ table: tableName, column: columnName });
80
- }
81
- }
82
- }
120
+ overrides?: { nullable?: boolean },
121
+ ) {
122
+ const col = ctx.toContract.storage.tables[table]?.columns[column];
123
+ if (!col) throw new Error(`Column "${table}"."${column}" not found in destination contract`);
124
+ const mutableHooks = ctx.codecHooks as Map<string, CodecControlHooks>;
125
+ const mutableTypes = ctx.storageTypes as Record<string, StorageTypeInstance>;
126
+ return {
127
+ name: column,
128
+ typeSql: buildColumnTypeSql(col, mutableHooks, mutableTypes),
129
+ defaultSql: buildColumnDefaultSql(col.default, col),
130
+ nullable: overrides?.nullable ?? col.nullable,
131
+ };
132
+ }
83
133
 
84
- return [
85
- createEnumType(tempName, desiredValues),
86
- ...columnRefs.map((ref) =>
87
- alterColumnType(ref.table, ref.column, {
88
- toType: tempName,
89
- using: `${ref.column}::text::${tempName}`,
90
- }),
91
- ),
92
- dropEnumType(nativeType),
93
- renameType(tempName, nativeType),
94
- ];
134
+ function buildAlterTypeOptions(
135
+ table: string,
136
+ column: string,
137
+ ctx: StrategyContext,
138
+ using?: string,
139
+ ) {
140
+ const col = ctx.toContract.storage.tables[table]?.columns[column];
141
+ if (!col) throw new Error(`Column "${table}"."${column}" not found in destination contract`);
142
+ const mutableHooks = ctx.codecHooks as Map<string, CodecControlHooks>;
143
+ const mutableTypes = ctx.storageTypes as Record<string, StorageTypeInstance>;
144
+ const qualifiedTargetType = buildColumnTypeSql(col, mutableHooks, mutableTypes, false);
145
+ const formatTypeExpected = buildExpectedFormatType(col, mutableHooks, mutableTypes);
146
+ return {
147
+ qualifiedTargetType,
148
+ formatTypeExpected,
149
+ rawTargetTypeForLabel: qualifiedTargetType,
150
+ ...(using !== undefined ? { using } : {}),
151
+ };
95
152
  }
96
153
 
97
- // ============================================================================
98
- // Data-safe strategies (for `migration plan`)
99
- // ============================================================================
154
+ export const notNullBackfillCallStrategy: CallMigrationStrategy = (issues, ctx) => {
155
+ // `DataTransformCall` is operation class `'data'`. When the policy excludes
156
+ // it (`db update` / `db init`), skip so `notNullAddColumnCallStrategy`
157
+ // (temp-default backfill) or `mapIssueToCall` can take the issue.
158
+ if (!ctx.policy.allowedOperationClasses.includes('data')) return { kind: 'no_match' };
100
159
 
101
- /**
102
- * NOT NULL backfill strategy.
103
- *
104
- * When a missing column is NOT NULL without a default, the planner can't just
105
- * add it — existing rows would violate the constraint. Instead, emit:
106
- * addColumn(nullable) → dataTransform (user fills in backfill) → setNotNull
107
- */
108
- export const notNullBackfillStrategy: MigrationStrategy = (issues, ctx) => {
109
160
  const matched: SchemaIssue[] = [];
110
- const ops: PostgresMigrationOpDescriptor[] = [];
161
+ const calls: PostgresOpFactoryCall[] = [];
111
162
 
112
163
  for (const issue of issues) {
113
164
  if (issue.kind !== 'missing_column' || !issue.table || !issue.column) continue;
@@ -117,13 +168,15 @@ export const notNullBackfillStrategy: MigrationStrategy = (issues, ctx) => {
117
168
  if (column.nullable === true || column.default !== undefined) continue;
118
169
 
119
170
  matched.push(issue);
120
- ops.push(
121
- addColumn(issue.table, issue.column, { nullable: true }),
122
- dataTransform(`backfill-${issue.table}-${issue.column}`, {
123
- check: TODO,
124
- run: TODO,
125
- }),
126
- setNotNull(issue.table, issue.column),
171
+ const spec = buildColumnSpec(issue.table, issue.column, ctx, { nullable: true });
172
+ calls.push(
173
+ new AddColumnCall(ctx.schemaName, issue.table, spec),
174
+ new DataTransformCall(
175
+ `backfill-${issue.table}-${issue.column}`,
176
+ `backfill-${issue.table}-${issue.column}:check`,
177
+ `backfill-${issue.table}-${issue.column}:run`,
178
+ ),
179
+ new SetNotNullCall(ctx.schemaName, issue.table, issue.column),
127
180
  );
128
181
  }
129
182
 
@@ -131,44 +184,47 @@ export const notNullBackfillStrategy: MigrationStrategy = (issues, ctx) => {
131
184
  return {
132
185
  kind: 'match',
133
186
  issues: issues.filter((i) => !matched.includes(i)),
134
- ops,
187
+ calls,
188
+ recipe: true,
135
189
  };
136
190
  };
137
191
 
138
- /**
139
- * Unsafe type change strategy.
140
- *
141
- * Safe widenings (int4 → int8) emit alterColumnType directly.
142
- * Unsafe changes emit dataTransform for user to handle conversion.
143
- */
144
- export const typeChangeStrategy: MigrationStrategy = (issues, ctx) => {
145
- const matched: SchemaIssue[] = [];
146
- const ops: PostgresMigrationOpDescriptor[] = [];
192
+ const SAFE_WIDENINGS = new Set(['int2→int4', 'int2→int8', 'int4→int8', 'float4→float8']);
147
193
 
148
- const SAFE_WIDENINGS = new Set(['int2→int4', 'int2→int8', 'int4→int8', 'float4→float8']);
149
- function isSafeWidening(fromType: string, toType: string): boolean {
150
- return SAFE_WIDENINGS.has(`${fromType}→${toType}`);
151
- }
194
+ export const typeChangeCallStrategy: CallMigrationStrategy = (issues, ctx) => {
195
+ // For unsafe widenings this strategy emits a `DataTransformCall` placeholder
196
+ // (operation class `'data'`); when the policy excludes `'data'`
197
+ // (`db update` / `db init`), skip those issues so `mapIssueToCall` can
198
+ // emit a direct `ALTER COLUMN TYPE`. Safe widenings still flow through
199
+ // here because the resulting `AlterColumnTypeCall` is `widening`-class.
200
+ const dataAllowed = ctx.policy.allowedOperationClasses.includes('data');
201
+
202
+ const matched: SchemaIssue[] = [];
203
+ const calls: PostgresOpFactoryCall[] = [];
152
204
 
153
205
  for (const issue of issues) {
154
206
  if (issue.kind !== 'type_mismatch') continue;
155
207
  if (!issue.table || !issue.column) continue;
156
208
  const fromColumn = ctx.fromContract?.storage.tables[issue.table]?.columns[issue.column];
157
- const toColumn = ctx.toContract?.storage.tables[issue.table]?.columns[issue.column];
209
+ const toColumn = ctx.toContract.storage.tables[issue.table]?.columns[issue.column];
158
210
  if (!fromColumn || !toColumn) continue;
159
211
  const fromType = fromColumn.nativeType;
160
212
  const toType = toColumn.nativeType;
161
213
  if (fromType === toType) continue;
214
+ const isSafeWidening = SAFE_WIDENINGS.has(`${fromType}→${toType}`);
215
+ if (!isSafeWidening && !dataAllowed) continue;
162
216
  matched.push(issue);
163
- if (isSafeWidening(fromType, toType)) {
164
- ops.push(alterColumnType(issue.table, issue.column));
217
+ const alterOpts = buildAlterTypeOptions(issue.table, issue.column, ctx);
218
+ if (isSafeWidening) {
219
+ calls.push(new AlterColumnTypeCall(ctx.schemaName, issue.table, issue.column, alterOpts));
165
220
  } else {
166
- ops.push(
167
- dataTransform(`typechange-${issue.table}-${issue.column}`, {
168
- check: TODO,
169
- run: TODO,
170
- }),
171
- alterColumnType(issue.table, issue.column),
221
+ calls.push(
222
+ new DataTransformCall(
223
+ `typechange-${issue.table}-${issue.column}`,
224
+ `typechange-${issue.table}-${issue.column}:check`,
225
+ `typechange-${issue.table}-${issue.column}:run`,
226
+ ),
227
+ new AlterColumnTypeCall(ctx.schemaName, issue.table, issue.column, alterOpts),
172
228
  );
173
229
  }
174
230
  }
@@ -176,20 +232,19 @@ export const typeChangeStrategy: MigrationStrategy = (issues, ctx) => {
176
232
  return {
177
233
  kind: 'match',
178
234
  issues: issues.filter((i) => !matched.includes(i)),
179
- ops,
235
+ calls,
236
+ recipe: true,
180
237
  };
181
238
  };
182
239
 
183
- /**
184
- * Nullable NOT NULL tightening strategy.
185
- *
186
- * When an existing column changes from nullable to NOT NULL, existing rows
187
- * may have NULLs that violate the constraint. Emit:
188
- * dataTransform (user fills in NULL handling) → setNotNull
189
- */
190
- export const nullableTighteningStrategy: MigrationStrategy = (issues, ctx) => {
240
+ export const nullableTighteningCallStrategy: CallMigrationStrategy = (issues, ctx) => {
241
+ // `DataTransformCall` is operation class `'data'`. When the policy excludes
242
+ // it (`db update` / `db init`), skip so `mapIssueToCall` emits a direct
243
+ // `SET NOT NULL` instead.
244
+ if (!ctx.policy.allowedOperationClasses.includes('data')) return { kind: 'no_match' };
245
+
191
246
  const matched: SchemaIssue[] = [];
192
- const ops: PostgresMigrationOpDescriptor[] = [];
247
+ const calls: PostgresOpFactoryCall[] = [];
193
248
 
194
249
  for (const issue of issues) {
195
250
  if (issue.kind !== 'nullability_mismatch' || !issue.table || !issue.column) continue;
@@ -199,12 +254,13 @@ export const nullableTighteningStrategy: MigrationStrategy = (issues, ctx) => {
199
254
  if (column.nullable === true) continue;
200
255
 
201
256
  matched.push(issue);
202
- ops.push(
203
- dataTransform(`handle-nulls-${issue.table}-${issue.column}`, {
204
- check: TODO,
205
- run: TODO,
206
- }),
207
- setNotNull(issue.table, issue.column),
257
+ calls.push(
258
+ new DataTransformCall(
259
+ `handle-nulls-${issue.table}-${issue.column}`,
260
+ `handle-nulls-${issue.table}-${issue.column}:check`,
261
+ `handle-nulls-${issue.table}-${issue.column}:run`,
262
+ ),
263
+ new SetNotNullCall(ctx.schemaName, issue.table, issue.column),
208
264
  );
209
265
  }
210
266
 
@@ -212,51 +268,436 @@ export const nullableTighteningStrategy: MigrationStrategy = (issues, ctx) => {
212
268
  return {
213
269
  kind: 'match',
214
270
  issues: issues.filter((i) => !matched.includes(i)),
215
- ops,
271
+ calls,
272
+ recipe: true,
216
273
  };
217
274
  };
218
275
 
219
- /**
220
- * Enum value change strategy.
221
- *
222
- * When enum values change between contracts:
223
- * - Add only → addEnumValues
224
- * - Reorder (same values, different order) rebuild recipe (no data transform)
225
- * - Removal → dataTransform (user migrates rows) + rebuild recipe
226
- */
227
- export const enumChangeStrategy: MigrationStrategy = (issues, ctx) => {
276
+ function enumRebuildCallRecipe(
277
+ typeName: string,
278
+ ctx: StrategyContext,
279
+ ): readonly PostgresOpFactoryCall[] {
280
+ const toType = ctx.toContract.storage.types?.[typeName];
281
+ if (!toType) return [];
282
+ const nativeType = toType.nativeType;
283
+ const desiredValues = (toType.typeParams['values'] ?? []) as readonly string[];
284
+ const tempName = `${nativeType}${REBUILD_SUFFIX}`;
285
+
286
+ const columnRefs: { table: string; column: string }[] = [];
287
+ for (const [tableName, table] of Object.entries(ctx.toContract.storage.tables)) {
288
+ for (const [columnName, column] of Object.entries(table.columns)) {
289
+ if (column.typeRef === typeName) {
290
+ columnRefs.push({ table: tableName, column: columnName });
291
+ }
292
+ }
293
+ }
294
+
295
+ return [
296
+ new CreateEnumTypeCall(ctx.schemaName, tempName, desiredValues),
297
+ ...columnRefs.map((ref) => {
298
+ const using = `${ref.column}::text::${tempName}`;
299
+ return new AlterColumnTypeCall(ctx.schemaName, ref.table, ref.column, {
300
+ qualifiedTargetType: tempName,
301
+ formatTypeExpected: tempName,
302
+ rawTargetTypeForLabel: tempName,
303
+ using,
304
+ });
305
+ }),
306
+ new DropEnumTypeCall(ctx.schemaName, nativeType),
307
+ new RenameTypeCall(ctx.schemaName, tempName, nativeType),
308
+ ];
309
+ }
310
+
311
+ export const enumChangeCallStrategy: CallMigrationStrategy = (issues, ctx) => {
312
+ // The shrink/rebuild branches emit a `DataTransformCall` placeholder or a
313
+ // destructive rebuild that should be authored explicitly. When the policy
314
+ // excludes `'data'` (`db update` / `db init`), skip the entire strategy so
315
+ // `storageTypePlanCallStrategy` (codec-hook driven) takes over with the
316
+ // dev-push enum behavior.
317
+ if (!ctx.policy.allowedOperationClasses.includes('data')) return { kind: 'no_match' };
318
+
228
319
  const matched: SchemaIssue[] = [];
229
- const ops: PostgresMigrationOpDescriptor[] = [];
320
+ const calls: PostgresOpFactoryCall[] = [];
230
321
 
231
322
  for (const issue of issues) {
232
323
  if (issue.kind !== 'enum_values_changed') continue;
233
324
  matched.push(issue);
234
325
 
235
326
  if (issue.removedValues.length > 0) {
236
- ops.push(
237
- dataTransform(`migrate-${issue.typeName}-values`, { check: TODO, run: TODO }),
238
- ...enumRebuildRecipe(issue.typeName, ctx),
327
+ calls.push(
328
+ new DataTransformCall(
329
+ `migrate-${issue.typeName}-values`,
330
+ `migrate-${issue.typeName}-values:check`,
331
+ `migrate-${issue.typeName}-values:run`,
332
+ ),
333
+ ...enumRebuildCallRecipe(issue.typeName, ctx),
239
334
  );
240
335
  } else if (issue.addedValues.length === 0) {
241
- // Reorder only — rebuild without data transform
242
- ops.push(...enumRebuildRecipe(issue.typeName, ctx));
336
+ calls.push(...enumRebuildCallRecipe(issue.typeName, ctx));
243
337
  } else {
244
- ops.push(addEnumValues(issue.typeName, issue.addedValues));
338
+ const toType = ctx.toContract.storage.types?.[issue.typeName];
339
+ if (toType) {
340
+ calls.push(
341
+ new AddEnumValuesCall(
342
+ ctx.schemaName,
343
+ issue.typeName,
344
+ toType.nativeType,
345
+ issue.addedValues,
346
+ ),
347
+ );
348
+ }
349
+ }
350
+ }
351
+
352
+ if (matched.length === 0) return { kind: 'no_match' };
353
+ return {
354
+ kind: 'match',
355
+ issues: issues.filter((i) => !matched.includes(i)),
356
+ calls,
357
+ recipe: true,
358
+ };
359
+ };
360
+
361
+ // ============================================================================
362
+ // Walk-schema strategies (absorbed from the legacy planner)
363
+ // ============================================================================
364
+
365
+ /**
366
+ * Dispatches storage types through their codec's `planTypeOperations` hook.
367
+ * Replaces the walk-schema `buildStorageTypeOperations` path: the hook is
368
+ * the authoritative source for codec-driven DDL (enum create/rebuild/add-
369
+ * value, custom type creation, etc.).
370
+ *
371
+ * Runs after `enumChangeCallStrategy` so the structured enum path (value
372
+ * add, rebuild recipe) gets first pick at `enum_values_changed` issues;
373
+ * this strategy then handles remaining `type_missing` / `enum_values_changed`
374
+ * issues for types whose hook produced at least one op.
375
+ */
376
+ export const storageTypePlanCallStrategy: CallMigrationStrategy = (issues, ctx) => {
377
+ const storageTypes = ctx.toContract.storage.types ?? {};
378
+ if (Object.keys(storageTypes).length === 0) return { kind: 'no_match' };
379
+
380
+ const calls: PostgresOpFactoryCall[] = [];
381
+ const handledTypeNames = new Set<string>();
382
+
383
+ for (const [typeName, typeInstance] of Object.entries(storageTypes).sort(([a], [b]) =>
384
+ a.localeCompare(b),
385
+ )) {
386
+ const hook = ctx.codecHooks.get(typeInstance.codecId);
387
+ if (!hook?.planTypeOperations) continue;
388
+ const planResult = hook.planTypeOperations({
389
+ typeName,
390
+ typeInstance,
391
+ contract: ctx.toContract,
392
+ schema: ctx.schema,
393
+ schemaName: ctx.schemaName,
394
+ policy: ctx.policy,
395
+ });
396
+ if (!planResult) continue;
397
+ if (planResult.operations.length === 0) {
398
+ handledTypeNames.add(typeName);
399
+ continue;
400
+ }
401
+ handledTypeNames.add(typeName);
402
+ for (const op of planResult.operations) {
403
+ calls.push(
404
+ new RawSqlCall({
405
+ ...op,
406
+ target: {
407
+ id: op.target.id,
408
+ details: buildTargetDetails('type', typeName, ctx.schemaName),
409
+ },
410
+ } as SqlMigrationPlanOperation<PostgresPlanTargetDetails>),
411
+ );
412
+ }
413
+ }
414
+
415
+ const remaining = issues.filter(
416
+ (issue) =>
417
+ !(
418
+ (issue.kind === 'type_missing' || issue.kind === 'enum_values_changed') &&
419
+ issue.typeName &&
420
+ handledTypeNames.has(issue.typeName)
421
+ ),
422
+ );
423
+
424
+ if (calls.length === 0 && remaining.length === issues.length) {
425
+ return { kind: 'no_match' };
426
+ }
427
+
428
+ return { kind: 'match', issues: remaining, calls };
429
+ };
430
+
431
+ /**
432
+ * Dispatches component-declared database dependencies. Replaces the
433
+ * walk-schema `buildDatabaseDependencyOperations` path. Rather than consuming
434
+ * `dependency_missing` issues (which only carry the id), this strategy
435
+ * re-invokes `collectInitDependencies(frameworkComponents)` at plan time so
436
+ * the handler has access to the structured `install` ops each component
437
+ * declared — including arbitrary SQL launders — and dedupes by dependency id
438
+ * plus per-op id.
439
+ */
440
+ export const dependencyInstallCallStrategy: CallMigrationStrategy = (issues, ctx) => {
441
+ const installedIds = new Set(ctx.schema.dependencies.map((d) => d.id));
442
+ const dependencies = sortDependencies(
443
+ collectInitDependencies(ctx.frameworkComponents).filter(isPostgresPlannerDependency),
444
+ );
445
+
446
+ const calls: PostgresOpFactoryCall[] = [];
447
+ const handledDependencyIds = new Set<string>();
448
+ const seenOperationIds = new Set<string>();
449
+
450
+ for (const dep of dependencies) {
451
+ handledDependencyIds.add(dep.id);
452
+ if (installedIds.has(dep.id)) continue;
453
+ for (const installOp of dep.install) {
454
+ if (seenOperationIds.has(installOp.id)) continue;
455
+ seenOperationIds.add(installOp.id);
456
+ calls.push(liftInstallOpToCall(installOp));
457
+ }
458
+ }
459
+
460
+ // Consume ALL `dependency_missing` issues — even non-postgres ones. The
461
+ // walk-schema predecessor silently skipped non-postgres deps; leaving those
462
+ // issues in the stream would let `mapIssueToCall` reject them as
463
+ // "Unknown dependency type".
464
+ const remaining = issues.filter((issue) => issue.kind !== 'dependency_missing');
465
+
466
+ if (calls.length === 0 && remaining.length === issues.length) {
467
+ return { kind: 'no_match' };
468
+ }
469
+ return { kind: 'match', issues: remaining, calls };
470
+ };
471
+
472
+ /**
473
+ * Handles `missing_column` issues for NOT NULL columns without a contract
474
+ * default. Replaces the walk-schema `buildAddColumnItem` non-default branches.
475
+ *
476
+ * Two shapes:
477
+ * - Shared-temp-default safe: emit a single atomic composite op (add
478
+ * nullable → backfill identity value → `SET NOT NULL` → `DROP DEFAULT`).
479
+ * - Empty-table guarded: emit a hand-built op with a `tableIsEmptyCheck`
480
+ * precheck so the failure message is "table is not empty" rather than the
481
+ * raw PG NOT NULL violation.
482
+ *
483
+ * "Normal" missing_column cases (nullable or has a contract default) are left
484
+ * for `mapIssueToCall`'s default `AddColumnCall` emission.
485
+ */
486
+ export const notNullAddColumnCallStrategy: CallMigrationStrategy = (issues, ctx) => {
487
+ const matched: SchemaIssue[] = [];
488
+ const calls: PostgresOpFactoryCall[] = [];
489
+
490
+ const schemaLookups = buildSchemaLookupMap(ctx.schema);
491
+
492
+ const mutableCodecHooks = ctx.codecHooks as Map<string, CodecControlHooks>;
493
+ const mutableStorageTypes = ctx.storageTypes as Record<string, StorageTypeInstance>;
494
+
495
+ for (const issue of issues) {
496
+ if (issue.kind !== 'missing_column' || !issue.table || !issue.column) continue;
497
+ const contractTable = ctx.toContract.storage.tables[issue.table];
498
+ const column = contractTable?.columns[issue.column];
499
+ if (!column) continue;
500
+
501
+ const notNull = column.nullable !== true;
502
+ const hasDefault = column.default !== undefined;
503
+ if (!notNull || hasDefault) continue;
504
+
505
+ const schemaTable = ctx.schema.tables[issue.table];
506
+ if (!schemaTable) continue;
507
+
508
+ const temporaryDefault = resolveIdentityValue(column, mutableCodecHooks, mutableStorageTypes);
509
+ const schemaLookup = schemaLookups.get(issue.table);
510
+ const canUseSharedTempDefault =
511
+ temporaryDefault !== null &&
512
+ canUseSharedTemporaryDefaultStrategy({
513
+ table: contractTable,
514
+ schemaTable,
515
+ schemaLookup,
516
+ columnName: issue.column,
517
+ });
518
+
519
+ matched.push(issue);
520
+
521
+ if (canUseSharedTempDefault && temporaryDefault !== null) {
522
+ calls.push(
523
+ new RawSqlCall(
524
+ buildAddNotNullColumnWithTemporaryDefaultOperation({
525
+ schema: ctx.schemaName,
526
+ tableName: issue.table,
527
+ columnName: issue.column,
528
+ column,
529
+ codecHooks: mutableCodecHooks,
530
+ storageTypes: mutableStorageTypes,
531
+ temporaryDefault,
532
+ }),
533
+ ),
534
+ );
535
+ continue;
245
536
  }
537
+
538
+ const qualified = qualifyTableName(ctx.schemaName, issue.table);
539
+ calls.push(
540
+ new RawSqlCall({
541
+ ...buildAddColumnOperationIdentity(ctx.schemaName, issue.table, issue.column),
542
+ operationClass: 'additive',
543
+ precheck: [
544
+ {
545
+ description: `ensure column "${issue.column}" is missing`,
546
+ sql: columnExistsCheck({
547
+ schema: ctx.schemaName,
548
+ table: issue.table,
549
+ column: issue.column,
550
+ exists: false,
551
+ }),
552
+ },
553
+ {
554
+ description: `ensure table "${issue.table}" is empty before adding NOT NULL column without default`,
555
+ sql: tableIsEmptyCheck(qualified),
556
+ },
557
+ ],
558
+ execute: [
559
+ {
560
+ description: `add column "${issue.column}"`,
561
+ sql: buildAddColumnSql(
562
+ qualified,
563
+ issue.column,
564
+ column,
565
+ mutableCodecHooks,
566
+ undefined,
567
+ mutableStorageTypes,
568
+ ),
569
+ },
570
+ ],
571
+ postcheck: [
572
+ {
573
+ description: `verify column "${issue.column}" exists`,
574
+ sql: columnExistsCheck({
575
+ schema: ctx.schemaName,
576
+ table: issue.table,
577
+ column: issue.column,
578
+ }),
579
+ },
580
+ {
581
+ description: `verify column "${issue.column}" is NOT NULL`,
582
+ sql: columnNullabilityCheck({
583
+ schema: ctx.schemaName,
584
+ table: issue.table,
585
+ column: issue.column,
586
+ nullable: false,
587
+ }),
588
+ },
589
+ ],
590
+ }),
591
+ );
246
592
  }
247
593
 
248
594
  if (matched.length === 0) return { kind: 'no_match' };
249
595
  return {
250
596
  kind: 'match',
251
597
  issues: issues.filter((i) => !matched.includes(i)),
252
- ops,
598
+ calls,
253
599
  };
254
600
  };
255
601
 
256
- /** Default strategy set for `migration plan` — data-safe, requires user input for destructive changes. */
257
- export const migrationPlanStrategies: readonly MigrationStrategy[] = [
258
- enumChangeStrategy,
259
- notNullBackfillStrategy,
260
- typeChangeStrategy,
261
- nullableTighteningStrategy,
602
+ // ============================================================================
603
+ // Strategy helpers
604
+ // ============================================================================
605
+
606
+ function canUseSharedTemporaryDefaultStrategy(options: {
607
+ readonly table: NonNullable<Contract<SqlStorage>['storage']['tables'][string]>;
608
+ readonly schemaTable: SqlSchemaIR['tables'][string];
609
+ readonly schemaLookup: ReturnType<typeof buildSchemaLookupMap> extends ReadonlyMap<
610
+ string,
611
+ infer V
612
+ >
613
+ ? V | undefined
614
+ : never;
615
+ readonly columnName: string;
616
+ }): boolean {
617
+ const { table, schemaTable, schemaLookup, columnName } = options;
618
+
619
+ if (table.primaryKey?.columns.includes(columnName) && !schemaTable.primaryKey) {
620
+ return false;
621
+ }
622
+
623
+ for (const unique of table.uniques) {
624
+ if (!unique.columns.includes(columnName)) continue;
625
+ if (!schemaLookup || !hasUniqueConstraint(schemaLookup, unique.columns)) return false;
626
+ }
627
+
628
+ for (const foreignKey of table.foreignKeys) {
629
+ if (foreignKey.constraint === false || !foreignKey.columns.includes(columnName)) continue;
630
+ if (!schemaLookup || !hasForeignKey(schemaLookup, foreignKey)) return false;
631
+ }
632
+
633
+ return true;
634
+ }
635
+
636
+ type PlannerDatabaseDependency = ComponentDatabaseDependency<unknown> & {
637
+ readonly install: readonly SqlMigrationPlanOperation<PostgresPlanTargetDetails>[];
638
+ };
639
+
640
+ function isPostgresPlannerDependency(
641
+ dependency: ComponentDatabaseDependency<unknown>,
642
+ ): dependency is PlannerDatabaseDependency {
643
+ return dependency.install.every((operation) => operation.target.id === 'postgres');
644
+ }
645
+
646
+ function sortDependencies(
647
+ dependencies: ReadonlyArray<PlannerDatabaseDependency>,
648
+ ): ReadonlyArray<PlannerDatabaseDependency> {
649
+ return [...dependencies].sort((a, b) => a.id.localeCompare(b.id));
650
+ }
651
+
652
+ /**
653
+ * Lift a component install op into migration IR. Structured shapes — extension
654
+ * and schema installs with predictable SQL — collapse to typed `*Call`
655
+ * subclasses so the scaffolded migration authoring surface stays readable.
656
+ * Everything else (arbitrary SQL) falls through to `RawSqlCall` as an escape
657
+ * hatch.
658
+ */
659
+ /**
660
+ * Component-declared install ops are wrapped as `RawSqlCall` so the
661
+ * component's original `label`, `precheck`, `execute`, `postcheck`, and op
662
+ * id are preserved verbatim. Structured conversion (to e.g.
663
+ * `CreateExtensionCall`) would drop the precheck/postcheck pair and
664
+ * change the DDL label, breaking walk-schema output parity. Classification
665
+ * as `'dep'` happens in `classifyCall` via the underlying op's id prefix.
666
+ */
667
+ function liftInstallOpToCall(
668
+ op: SqlMigrationPlanOperation<PostgresPlanTargetDetails>,
669
+ ): PostgresOpFactoryCall {
670
+ return new RawSqlCall(op);
671
+ }
672
+
673
+ /**
674
+ * Ordered list of Postgres planner strategies, shared by `migration plan`
675
+ * and `db update` / `db init`. The issue planner runs each strategy in
676
+ * order, letting it consume any issues it handles, and routes whatever's
677
+ * left through `mapIssueToCall`. Behavior diverges purely on
678
+ * `policy.allowedOperationClasses`:
679
+ *
680
+ * - When `'data'` is allowed (`migration plan`), the data-safe strategies
681
+ * (`enumChangeCallStrategy`, `notNullBackfillCallStrategy`,
682
+ * `typeChangeCallStrategy`, `nullableTighteningCallStrategy`) consume their
683
+ * matching issues and emit `DataTransformCall` placeholders or recipe ops.
684
+ *
685
+ * - When `'data'` is not allowed (`db update` / `db init`), each data-safe
686
+ * strategy short-circuits to `no_match`, leaving the issue for the
687
+ * downstream walk-schema strategies (`storageTypePlanCallStrategy`,
688
+ * `dependencyInstallCallStrategy`, `notNullAddColumnCallStrategy`) or the
689
+ * `mapIssueToCall` default to handle with direct DDL.
690
+ *
691
+ * Order matters: data-safe strategies must run before the walk-schema
692
+ * strategies on overlapping issue kinds (e.g. `enum_values_changed`,
693
+ * `missing_column` for NOT NULL) so they take priority when active.
694
+ */
695
+ export const postgresPlannerStrategies: readonly CallMigrationStrategy[] = [
696
+ enumChangeCallStrategy,
697
+ notNullBackfillCallStrategy,
698
+ typeChangeCallStrategy,
699
+ nullableTighteningCallStrategy,
700
+ storageTypePlanCallStrategy,
701
+ dependencyInstallCallStrategy,
702
+ notNullAddColumnCallStrategy,
262
703
  ];