@prisma-next/target-postgres 0.3.0-dev.6 → 0.3.0-dev.64

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/LICENSE +201 -0
  2. package/README.md +8 -1
  3. package/dist/control.d.mts +16 -0
  4. package/dist/control.d.mts.map +1 -0
  5. package/dist/control.mjs +2947 -0
  6. package/dist/control.mjs.map +1 -0
  7. package/dist/descriptor-meta-DxB8oZzB.mjs +13 -0
  8. package/dist/descriptor-meta-DxB8oZzB.mjs.map +1 -0
  9. package/dist/pack.d.mts +7 -0
  10. package/dist/pack.d.mts.map +1 -0
  11. package/dist/pack.mjs +9 -0
  12. package/dist/pack.mjs.map +1 -0
  13. package/dist/runtime.d.mts +9 -0
  14. package/dist/runtime.d.mts.map +1 -0
  15. package/dist/runtime.mjs +21 -0
  16. package/dist/runtime.mjs.map +1 -0
  17. package/package.json +32 -32
  18. package/src/core/migrations/planner-reconciliation.ts +602 -0
  19. package/src/core/migrations/planner.ts +476 -215
  20. package/src/core/migrations/runner.ts +29 -34
  21. package/src/core/migrations/statement-builders.ts +9 -7
  22. package/src/core/types.ts +5 -0
  23. package/src/exports/control.ts +9 -8
  24. package/src/exports/runtime.ts +7 -12
  25. package/dist/chunk-RKEXRSSI.js +0 -14
  26. package/dist/chunk-RKEXRSSI.js.map +0 -1
  27. package/dist/core/descriptor-meta.d.ts +0 -9
  28. package/dist/core/descriptor-meta.d.ts.map +0 -1
  29. package/dist/core/migrations/planner.d.ts +0 -14
  30. package/dist/core/migrations/planner.d.ts.map +0 -1
  31. package/dist/core/migrations/runner.d.ts +0 -8
  32. package/dist/core/migrations/runner.d.ts.map +0 -1
  33. package/dist/core/migrations/statement-builders.d.ts +0 -30
  34. package/dist/core/migrations/statement-builders.d.ts.map +0 -1
  35. package/dist/exports/control.d.ts +0 -8
  36. package/dist/exports/control.d.ts.map +0 -1
  37. package/dist/exports/control.js +0 -1255
  38. package/dist/exports/control.js.map +0 -1
  39. package/dist/exports/pack.d.ts +0 -4
  40. package/dist/exports/pack.d.ts.map +0 -1
  41. package/dist/exports/pack.js +0 -11
  42. package/dist/exports/pack.js.map +0 -1
  43. package/dist/exports/runtime.d.ts +0 -12
  44. package/dist/exports/runtime.d.ts.map +0 -1
  45. package/dist/exports/runtime.js +0 -19
  46. package/dist/exports/runtime.js.map +0 -1
@@ -1,5 +1,14 @@
1
+ import {
2
+ escapeLiteral,
3
+ expandParameterizedNativeType,
4
+ normalizeSchemaNativeType,
5
+ parsePostgresDefault,
6
+ quoteIdentifier,
7
+ } from '@prisma-next/adapter-postgres/control';
8
+ import { isTaggedBigInt } from '@prisma-next/contract/types';
1
9
  import type { SchemaIssue } from '@prisma-next/core-control-plane/types';
2
10
  import type {
11
+ CodecControlHooks,
3
12
  MigrationOperationPolicy,
4
13
  SqlMigrationPlanner,
5
14
  SqlMigrationPlannerPlanOptions,
@@ -8,20 +17,31 @@ import type {
8
17
  } from '@prisma-next/family-sql/control';
9
18
  import {
10
19
  createMigrationPlan,
20
+ extractCodecControlHooks,
11
21
  plannerFailure,
12
22
  plannerSuccess,
13
23
  } from '@prisma-next/family-sql/control';
14
- import { arraysEqual, verifySqlSchema } from '@prisma-next/family-sql/schema-verify';
24
+ import { verifySqlSchema } from '@prisma-next/family-sql/schema-verify';
15
25
  import type {
16
26
  ForeignKey,
17
- SqlContract,
18
- SqlStorage,
27
+ ReferentialAction,
19
28
  StorageColumn,
20
29
  StorageTable,
21
30
  } from '@prisma-next/sql-contract/types';
22
31
  import type { SqlSchemaIR } from '@prisma-next/sql-schema-ir/types';
32
+ import { ifDefined } from '@prisma-next/utils/defined';
33
+ import type { PostgresColumnDefault } from '../types';
34
+ import { buildReconciliationPlan } from './planner-reconciliation';
23
35
 
24
- type OperationClass = 'extension' | 'table' | 'unique' | 'index' | 'foreignKey';
36
+ export type OperationClass =
37
+ | 'extension'
38
+ | 'type'
39
+ | 'table'
40
+ | 'column'
41
+ | 'primaryKey'
42
+ | 'unique'
43
+ | 'index'
44
+ | 'foreignKey';
25
45
 
26
46
  type PlannerFrameworkComponents = SqlMigrationPlannerPlanOptions extends {
27
47
  readonly frameworkComponents: infer T;
@@ -55,6 +75,12 @@ interface PlannerConfig {
55
75
  readonly defaultSchema: string;
56
76
  }
57
77
 
78
+ export interface PlanningMode {
79
+ readonly includeExtraObjects: boolean;
80
+ readonly allowWidening: boolean;
81
+ readonly allowDestructive: boolean;
82
+ }
83
+
58
84
  const DEFAULT_PLANNER_CONFIG: PlannerConfig = {
59
85
  defaultSchema: 'public',
60
86
  };
@@ -78,38 +104,57 @@ class PostgresMigrationPlanner implements SqlMigrationPlanner<PostgresPlanTarget
78
104
  return policyResult;
79
105
  }
80
106
 
81
- const classification = this.classifySchema(options);
82
- if (classification.kind === 'conflict') {
83
- return plannerFailure(classification.conflicts);
84
- }
107
+ const planningMode = this.resolvePlanningMode(options.policy);
108
+ const schemaIssues = this.collectSchemaIssues(options, planningMode.includeExtraObjects);
109
+
110
+ // Extract codec control hooks once at entry point for reuse across all operations.
111
+ // This avoids repeated iteration over frameworkComponents for each method that needs hooks.
112
+ const codecHooks = extractCodecControlHooks(options.frameworkComponents);
85
113
 
86
114
  const operations: SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] = [];
87
115
 
116
+ const reconciliationPlan = buildReconciliationPlan({
117
+ contract: options.contract,
118
+ issues: schemaIssues,
119
+ schemaName,
120
+ mode: planningMode,
121
+ policy: options.policy,
122
+ });
123
+ if (reconciliationPlan.conflicts.length > 0) {
124
+ return plannerFailure(reconciliationPlan.conflicts);
125
+ }
126
+
127
+ const storageTypePlan = this.buildStorageTypeOperations(options, schemaName, codecHooks);
128
+ if (storageTypePlan.conflicts.length > 0) {
129
+ return plannerFailure(storageTypePlan.conflicts);
130
+ }
131
+
132
+ // Sort table entries once for reuse across all additive operation builders.
133
+ const sortedTables = sortedEntries(options.contract.storage.tables);
134
+
135
+ // Pre-compute constraint lookups once per schema table for O(1) checks across all builders.
136
+ const schemaLookups = buildSchemaLookupMap(options.schema);
137
+
88
138
  // Build extension operations from component-owned database dependencies
89
139
  operations.push(
90
140
  ...this.buildDatabaseDependencyOperations(options),
91
- ...this.buildTableOperations(options.contract.storage.tables, options.schema, schemaName),
92
- ...this.buildColumnOperations(options.contract.storage.tables, options.schema, schemaName),
93
- ...this.buildPrimaryKeyOperations(
94
- options.contract.storage.tables,
95
- options.schema,
96
- schemaName,
97
- ),
98
- ...this.buildUniqueOperations(options.contract.storage.tables, options.schema, schemaName),
99
- ...this.buildIndexOperations(options.contract.storage.tables, options.schema, schemaName),
100
- ...this.buildForeignKeyOperations(
101
- options.contract.storage.tables,
102
- options.schema,
103
- schemaName,
104
- ),
141
+ ...storageTypePlan.operations,
142
+ ...reconciliationPlan.operations,
143
+ ...this.buildTableOperations(sortedTables, options.schema, schemaName),
144
+ ...this.buildColumnOperations(sortedTables, options.schema, schemaName),
145
+ ...this.buildPrimaryKeyOperations(sortedTables, options.schema, schemaName),
146
+ ...this.buildUniqueOperations(sortedTables, schemaLookups, schemaName),
147
+ ...this.buildIndexOperations(sortedTables, schemaLookups, schemaName),
148
+ ...this.buildFkBackingIndexOperations(sortedTables, schemaLookups, schemaName),
149
+ ...this.buildForeignKeyOperations(sortedTables, schemaLookups, schemaName),
105
150
  );
106
151
 
107
152
  const plan = createMigrationPlan<PostgresPlanTargetDetails>({
108
153
  targetId: 'postgres',
109
154
  origin: null,
110
155
  destination: {
111
- coreHash: options.contract.coreHash,
112
- ...(options.contract.profileHash ? { profileHash: options.contract.profileHash } : {}),
156
+ storageHash: options.contract.storageHash,
157
+ ...ifDefined('profileHash', options.contract.profileHash),
113
158
  },
114
159
  operations,
115
160
  });
@@ -122,8 +167,8 @@ class PostgresMigrationPlanner implements SqlMigrationPlanner<PostgresPlanTarget
122
167
  return plannerFailure([
123
168
  {
124
169
  kind: 'unsupportedOperation',
125
- summary: 'Init planner requires additive operations be allowed',
126
- why: 'The init planner only emits additive operations. Update the policy to include "additive".',
170
+ summary: 'Migration planner requires additive operations be allowed',
171
+ why: 'The planner requires the "additive" operation class to be allowed in the policy.',
127
172
  },
128
173
  ]);
129
174
  }
@@ -166,6 +211,55 @@ class PostgresMigrationPlanner implements SqlMigrationPlanner<PostgresPlanTarget
166
211
 
167
212
  return operations;
168
213
  }
214
+
215
+ private buildStorageTypeOperations(
216
+ options: PlannerOptionsWithComponents,
217
+ schemaName: string,
218
+ codecHooks: Map<string, CodecControlHooks>,
219
+ ): {
220
+ readonly operations: readonly SqlMigrationPlanOperation<PostgresPlanTargetDetails>[];
221
+ readonly conflicts: readonly SqlPlannerConflict[];
222
+ } {
223
+ const operations: SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] = [];
224
+ const conflicts: SqlPlannerConflict[] = [];
225
+ const storageTypes = options.contract.storage.types ?? {};
226
+
227
+ for (const [typeName, typeInstance] of sortedEntries(storageTypes)) {
228
+ const hook = codecHooks.get(typeInstance.codecId);
229
+ const planResult = hook?.planTypeOperations?.({
230
+ typeName,
231
+ typeInstance,
232
+ contract: options.contract,
233
+ schema: options.schema,
234
+ schemaName,
235
+ policy: options.policy,
236
+ });
237
+ if (!planResult) {
238
+ continue;
239
+ }
240
+ for (const operation of planResult.operations) {
241
+ if (!options.policy.allowedOperationClasses.includes(operation.operationClass)) {
242
+ conflicts.push({
243
+ kind: 'missingButNonAdditive',
244
+ summary: `Storage type "${typeName}" requires "${operation.operationClass}" operation "${operation.id}"`,
245
+ location: {
246
+ type: typeName,
247
+ },
248
+ });
249
+ continue;
250
+ }
251
+ operations.push({
252
+ ...operation,
253
+ target: {
254
+ id: operation.target.id,
255
+ details: this.buildTargetDetails('type', typeName, schemaName),
256
+ },
257
+ });
258
+ }
259
+ }
260
+
261
+ return { operations, conflicts };
262
+ }
169
263
  private collectDependencies(
170
264
  options: PlannerOptionsWithComponents,
171
265
  ): ReadonlyArray<PlannerDatabaseDependency> {
@@ -187,12 +281,12 @@ class PostgresMigrationPlanner implements SqlMigrationPlanner<PostgresPlanTarget
187
281
  }
188
282
 
189
283
  private buildTableOperations(
190
- tables: SqlContract<SqlStorage>['storage']['tables'],
284
+ tables: ReadonlyArray<[string, StorageTable]>,
191
285
  schema: SqlSchemaIR,
192
286
  schemaName: string,
193
287
  ): readonly SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] {
194
288
  const operations: SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] = [];
195
- for (const [tableName, table] of sortedEntries(tables)) {
289
+ for (const [tableName, table] of tables) {
196
290
  if (schema.tables[tableName]) {
197
291
  continue;
198
292
  }
@@ -230,12 +324,12 @@ class PostgresMigrationPlanner implements SqlMigrationPlanner<PostgresPlanTarget
230
324
  }
231
325
 
232
326
  private buildColumnOperations(
233
- tables: SqlContract<SqlStorage>['storage']['tables'],
327
+ tables: ReadonlyArray<[string, StorageTable]>,
234
328
  schema: SqlSchemaIR,
235
329
  schemaName: string,
236
330
  ): readonly SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] {
237
331
  const operations: SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] = [];
238
- for (const [tableName, table] of sortedEntries(tables)) {
332
+ for (const [tableName, table] of tables) {
239
333
  const schemaTable = schema.tables[tableName];
240
334
  if (!schemaTable) {
241
335
  continue;
@@ -258,15 +352,20 @@ class PostgresMigrationPlanner implements SqlMigrationPlanner<PostgresPlanTarget
258
352
  ): SqlMigrationPlanOperation<PostgresPlanTargetDetails> {
259
353
  const qualified = qualifyTableName(schema, tableName);
260
354
  const notNull = column.nullable === false;
355
+ const hasDefault = column.default !== undefined;
356
+ // Only require empty table for NOT NULL columns WITHOUT defaults.
357
+ // PostgreSQL allows adding NOT NULL columns with defaults to non-empty tables
358
+ // because the default value is applied to existing rows.
359
+ const requiresEmptyTable = notNull && !hasDefault;
261
360
  const precheck = [
262
361
  {
263
362
  description: `ensure column "${columnName}" is missing`,
264
363
  sql: columnExistsCheck({ schema, table: tableName, column: columnName, exists: false }),
265
364
  },
266
- ...(notNull
365
+ ...(requiresEmptyTable
267
366
  ? [
268
367
  {
269
- description: `ensure table "${tableName}" is empty before adding NOT NULL column`,
368
+ description: `ensure table "${tableName}" is empty before adding NOT NULL column without default`,
270
369
  sql: tableIsEmptyCheck(qualified),
271
370
  },
272
371
  ]
@@ -287,7 +386,12 @@ class PostgresMigrationPlanner implements SqlMigrationPlanner<PostgresPlanTarget
287
386
  ? [
288
387
  {
289
388
  description: `verify column "${columnName}" is NOT NULL`,
290
- sql: columnIsNotNullCheck({ schema, table: tableName, column: columnName }),
389
+ sql: columnNullabilityCheck({
390
+ schema,
391
+ table: tableName,
392
+ column: columnName,
393
+ nullable: false,
394
+ }),
291
395
  },
292
396
  ]
293
397
  : []),
@@ -309,12 +413,12 @@ class PostgresMigrationPlanner implements SqlMigrationPlanner<PostgresPlanTarget
309
413
  }
310
414
 
311
415
  private buildPrimaryKeyOperations(
312
- tables: SqlContract<SqlStorage>['storage']['tables'],
416
+ tables: ReadonlyArray<[string, StorageTable]>,
313
417
  schema: SqlSchemaIR,
314
418
  schemaName: string,
315
419
  ): readonly SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] {
316
420
  const operations: SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] = [];
317
- for (const [tableName, table] of sortedEntries(tables)) {
421
+ for (const [tableName, table] of tables) {
318
422
  if (!table.primaryKey) {
319
423
  continue;
320
424
  }
@@ -358,15 +462,15 @@ PRIMARY KEY (${table.primaryKey.columns.map(quoteIdentifier).join(', ')})`,
358
462
  }
359
463
 
360
464
  private buildUniqueOperations(
361
- tables: SqlContract<SqlStorage>['storage']['tables'],
362
- schema: SqlSchemaIR,
465
+ tables: ReadonlyArray<[string, StorageTable]>,
466
+ schemaLookups: ReadonlyMap<string, SchemaTableLookup>,
363
467
  schemaName: string,
364
468
  ): readonly SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] {
365
469
  const operations: SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] = [];
366
- for (const [tableName, table] of sortedEntries(tables)) {
367
- const schemaTable = schema.tables[tableName];
470
+ for (const [tableName, table] of tables) {
471
+ const lookup = schemaLookups.get(tableName);
368
472
  for (const unique of table.uniques) {
369
- if (schemaTable && hasUniqueConstraint(schemaTable, unique.columns)) {
473
+ if (lookup && hasUniqueConstraint(lookup, unique.columns)) {
370
474
  continue;
371
475
  }
372
476
  const constraintName = unique.name ?? `${tableName}_${unique.columns.join('_')}_key`;
@@ -406,15 +510,15 @@ UNIQUE (${unique.columns.map(quoteIdentifier).join(', ')})`,
406
510
  }
407
511
 
408
512
  private buildIndexOperations(
409
- tables: SqlContract<SqlStorage>['storage']['tables'],
410
- schema: SqlSchemaIR,
513
+ tables: ReadonlyArray<[string, StorageTable]>,
514
+ schemaLookups: ReadonlyMap<string, SchemaTableLookup>,
411
515
  schemaName: string,
412
516
  ): readonly SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] {
413
517
  const operations: SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] = [];
414
- for (const [tableName, table] of sortedEntries(tables)) {
415
- const schemaTable = schema.tables[tableName];
518
+ for (const [tableName, table] of tables) {
519
+ const lookup = schemaLookups.get(tableName);
416
520
  for (const index of table.indexes) {
417
- if (schemaTable && hasIndex(schemaTable, index.columns)) {
521
+ if (lookup && hasIndex(lookup, index.columns)) {
418
522
  continue;
419
523
  }
420
524
  const indexName = index.name ?? `${tableName}_${index.columns.join('_')}_idx`;
@@ -454,16 +558,76 @@ UNIQUE (${unique.columns.map(quoteIdentifier).join(', ')})`,
454
558
  return operations;
455
559
  }
456
560
 
561
+ /**
562
+ * Generates FK-backing index operations for FKs with `index: true`,
563
+ * but only when no matching user-declared index exists in `contractTable.indexes`.
564
+ */
565
+ private buildFkBackingIndexOperations(
566
+ tables: ReadonlyArray<[string, StorageTable]>,
567
+ schemaLookups: ReadonlyMap<string, SchemaTableLookup>,
568
+ schemaName: string,
569
+ ): readonly SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] {
570
+ const operations: SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] = [];
571
+ for (const [tableName, table] of tables) {
572
+ const lookup = schemaLookups.get(tableName);
573
+ // Collect column sets of user-declared indexes to avoid duplicates
574
+ const declaredIndexColumns = new Set(table.indexes.map((idx) => idx.columns.join(',')));
575
+
576
+ for (const fk of table.foreignKeys) {
577
+ if (fk.index === false) continue;
578
+ // Skip if user already declared an index with these columns
579
+ if (declaredIndexColumns.has(fk.columns.join(','))) continue;
580
+ // Skip if the index already exists in the database
581
+ if (lookup && hasIndex(lookup, fk.columns)) continue;
582
+
583
+ const indexName = `${tableName}_${fk.columns.join('_')}_idx`;
584
+ operations.push({
585
+ id: `index.${tableName}.${indexName}`,
586
+ label: `Create FK-backing index ${indexName} on ${tableName}`,
587
+ summary: `Creates FK-backing index ${indexName} on ${tableName}`,
588
+ operationClass: 'additive',
589
+ target: {
590
+ id: 'postgres',
591
+ details: this.buildTargetDetails('index', indexName, schemaName, tableName),
592
+ },
593
+ precheck: [
594
+ {
595
+ description: `ensure index "${indexName}" is missing`,
596
+ sql: `SELECT to_regclass(${toRegclassLiteral(schemaName, indexName)}) IS NULL`,
597
+ },
598
+ ],
599
+ execute: [
600
+ {
601
+ description: `create FK-backing index "${indexName}"`,
602
+ sql: `CREATE INDEX ${quoteIdentifier(indexName)} ON ${qualifyTableName(
603
+ schemaName,
604
+ tableName,
605
+ )} (${fk.columns.map(quoteIdentifier).join(', ')})`,
606
+ },
607
+ ],
608
+ postcheck: [
609
+ {
610
+ description: `verify index "${indexName}" exists`,
611
+ sql: `SELECT to_regclass(${toRegclassLiteral(schemaName, indexName)}) IS NOT NULL`,
612
+ },
613
+ ],
614
+ });
615
+ }
616
+ }
617
+ return operations;
618
+ }
619
+
457
620
  private buildForeignKeyOperations(
458
- tables: SqlContract<SqlStorage>['storage']['tables'],
459
- schema: SqlSchemaIR,
621
+ tables: ReadonlyArray<[string, StorageTable]>,
622
+ schemaLookups: ReadonlyMap<string, SchemaTableLookup>,
460
623
  schemaName: string,
461
624
  ): readonly SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] {
462
625
  const operations: SqlMigrationPlanOperation<PostgresPlanTargetDetails>[] = [];
463
- for (const [tableName, table] of sortedEntries(tables)) {
464
- const schemaTable = schema.tables[tableName];
626
+ for (const [tableName, table] of tables) {
627
+ const lookup = schemaLookups.get(tableName);
465
628
  for (const foreignKey of table.foreignKeys) {
466
- if (schemaTable && hasForeignKey(schemaTable, foreignKey)) {
629
+ if (foreignKey.constraint === false) continue;
630
+ if (lookup && hasForeignKey(lookup, foreignKey)) {
467
631
  continue;
468
632
  }
469
633
  const fkName = foreignKey.name ?? `${tableName}_${foreignKey.columns.join('_')}_fkey`;
@@ -489,12 +653,7 @@ UNIQUE (${unique.columns.map(quoteIdentifier).join(', ')})`,
489
653
  execute: [
490
654
  {
491
655
  description: `add foreign key "${fkName}"`,
492
- sql: `ALTER TABLE ${qualifyTableName(schemaName, tableName)}
493
- ADD CONSTRAINT ${quoteIdentifier(fkName)}
494
- FOREIGN KEY (${foreignKey.columns.map(quoteIdentifier).join(', ')})
495
- REFERENCES ${qualifyTableName(schemaName, foreignKey.references.table)} (${foreignKey.references.columns
496
- .map(quoteIdentifier)
497
- .join(', ')})`,
656
+ sql: buildForeignKeySql(schemaName, tableName, fkName, foreignKey),
498
657
  },
499
658
  ],
500
659
  postcheck: [
@@ -515,85 +674,33 @@ REFERENCES ${qualifyTableName(schemaName, foreignKey.references.table)} (${forei
515
674
  schema: string,
516
675
  table?: string,
517
676
  ): PostgresPlanTargetDetails {
518
- return {
519
- schema,
520
- objectType,
521
- name,
522
- ...(table ? { table } : {}),
523
- };
677
+ return buildTargetDetails(objectType, name, schema, table);
678
+ }
679
+
680
+ private resolvePlanningMode(policy: MigrationOperationPolicy): PlanningMode {
681
+ const allowWidening = policy.allowedOperationClasses.includes('widening');
682
+ const allowDestructive = policy.allowedOperationClasses.includes('destructive');
683
+ // `db init` uses additive-only policy and intentionally ignores extras.
684
+ // Any reconciliation-capable policy should inspect extras to reconcile strict equality.
685
+ const includeExtraObjects = allowWidening || allowDestructive;
686
+ return { includeExtraObjects, allowWidening, allowDestructive };
524
687
  }
525
688
 
526
- private classifySchema(options: PlannerOptionsWithComponents):
527
- | { kind: 'ok' }
528
- | {
529
- kind: 'conflict';
530
- conflicts: SqlPlannerConflict[];
531
- } {
689
+ private collectSchemaIssues(
690
+ options: PlannerOptionsWithComponents,
691
+ strict: boolean,
692
+ ): readonly SchemaIssue[] {
532
693
  const verifyOptions: VerifySqlSchemaOptionsWithComponents = {
533
694
  contract: options.contract,
534
695
  schema: options.schema,
535
- strict: false,
696
+ strict,
536
697
  typeMetadataRegistry: new Map(),
537
698
  frameworkComponents: options.frameworkComponents,
699
+ normalizeDefault: parsePostgresDefault,
700
+ normalizeNativeType: normalizeSchemaNativeType,
538
701
  };
539
702
  const verifyResult = verifySqlSchema(verifyOptions);
540
-
541
- const conflicts = this.extractConflicts(verifyResult.schema.issues);
542
- if (conflicts.length > 0) {
543
- return { kind: 'conflict', conflicts };
544
- }
545
- return { kind: 'ok' };
546
- }
547
-
548
- private extractConflicts(issues: readonly SchemaIssue[]): SqlPlannerConflict[] {
549
- const conflicts: SqlPlannerConflict[] = [];
550
- for (const issue of issues) {
551
- if (isAdditiveIssue(issue)) {
552
- continue;
553
- }
554
- const conflict = this.convertIssueToConflict(issue);
555
- if (conflict) {
556
- conflicts.push(conflict);
557
- }
558
- }
559
- return conflicts.sort(conflictComparator);
560
- }
561
-
562
- private convertIssueToConflict(issue: SchemaIssue): SqlPlannerConflict | null {
563
- switch (issue.kind) {
564
- case 'type_mismatch':
565
- return this.buildConflict('typeMismatch', issue);
566
- case 'nullability_mismatch':
567
- return this.buildConflict('nullabilityConflict', issue);
568
- case 'primary_key_mismatch':
569
- return this.buildConflict('indexIncompatible', issue);
570
- case 'unique_constraint_mismatch':
571
- return this.buildConflict('indexIncompatible', issue);
572
- case 'index_mismatch':
573
- return this.buildConflict('indexIncompatible', issue);
574
- case 'foreign_key_mismatch':
575
- return this.buildConflict('foreignKeyConflict', issue);
576
- default:
577
- return null;
578
- }
579
- }
580
-
581
- private buildConflict(kind: SqlPlannerConflict['kind'], issue: SchemaIssue): SqlPlannerConflict {
582
- const location = buildConflictLocation(issue);
583
- const meta =
584
- issue.expected || issue.actual
585
- ? Object.freeze({
586
- ...(issue.expected ? { expected: issue.expected } : {}),
587
- ...(issue.actual ? { actual: issue.actual } : {}),
588
- })
589
- : undefined;
590
-
591
- return {
592
- kind,
593
- summary: issue.message,
594
- ...(location ? { location } : {}),
595
- ...(meta ? { meta } : {}),
596
- };
703
+ return verifyResult.schema.issues;
597
704
  }
598
705
  }
599
706
 
@@ -633,7 +740,8 @@ function buildCreateTableSql(qualifiedTableName: string, table: StorageTable): s
633
740
  ([columnName, column]: [string, StorageColumn]) => {
634
741
  const parts = [
635
742
  quoteIdentifier(columnName),
636
- column.nativeType,
743
+ buildColumnTypeSql(column),
744
+ buildColumnDefaultSql(column.default, column),
637
745
  column.nullable ? '' : 'NOT NULL',
638
746
  ].filter(Boolean);
639
747
  return parts.join(' ');
@@ -651,30 +759,178 @@ function buildCreateTableSql(qualifiedTableName: string, table: StorageTable): s
651
759
  return `CREATE TABLE ${qualifiedTableName} (\n ${allDefinitions.join(',\n ')}\n)`;
652
760
  }
653
761
 
654
- function qualifyTableName(schema: string, table: string): string {
655
- return `${quoteIdentifier(schema)}.${quoteIdentifier(table)}`;
762
+ /**
763
+ * Pattern for safe PostgreSQL type names.
764
+ * Allows letters, digits, underscores, spaces (for "double precision", "character varying"),
765
+ * and trailing [] for array types.
766
+ */
767
+ const SAFE_NATIVE_TYPE_PATTERN = /^[a-zA-Z][a-zA-Z0-9_ ]*(\[\])?$/;
768
+
769
+ function assertSafeNativeType(nativeType: string): void {
770
+ if (!SAFE_NATIVE_TYPE_PATTERN.test(nativeType)) {
771
+ throw new Error(
772
+ `Unsafe native type name in contract: "${nativeType}". ` +
773
+ 'Native type names must match /^[a-zA-Z][a-zA-Z0-9_ ]*(\\[\\])?$/',
774
+ );
775
+ }
656
776
  }
657
777
 
658
- function toRegclassLiteral(schema: string, name: string): string {
659
- const regclass = `${quoteIdentifier(schema)}.${quoteIdentifier(name)}`;
660
- return `'${escapeLiteral(regclass)}'`;
778
+ /**
779
+ * Sanity check against accidental SQL injection from malformed contract files.
780
+ * Rejects semicolons, SQL comment tokens, and dollar-quoting.
781
+ * Not a comprehensive security boundary — the contract is developer-authored.
782
+ */
783
+ function assertSafeDefaultExpression(expression: string): void {
784
+ if (expression.includes(';') || /--|\/\*|\$\$|\bSELECT\b/i.test(expression)) {
785
+ throw new Error(
786
+ `Unsafe default expression in contract: "${expression}". ` +
787
+ 'Default expressions must not contain semicolons, SQL comment tokens, dollar-quoting, or subqueries.',
788
+ );
789
+ }
790
+ }
791
+
792
+ /**
793
+ * Builds the column type SQL, handling autoincrement as a special case.
794
+ * For autoincrement on int4/int8, we use SERIAL/BIGSERIAL types.
795
+ */
796
+ export function buildColumnTypeSql(column: StorageColumn): string {
797
+ const columnDefault = column.default;
798
+
799
+ // For autoincrement, use SERIAL/BIGSERIAL types instead of int4/int8
800
+ if (columnDefault?.kind === 'function' && columnDefault.expression === 'autoincrement()') {
801
+ if (column.nativeType === 'int4' || column.nativeType === 'integer') {
802
+ return 'SERIAL';
803
+ }
804
+ if (column.nativeType === 'int8' || column.nativeType === 'bigint') {
805
+ return 'BIGSERIAL';
806
+ }
807
+ if (column.nativeType === 'int2' || column.nativeType === 'smallint') {
808
+ return 'SMALLSERIAL';
809
+ }
810
+ }
811
+
812
+ if (column.typeRef) {
813
+ return quoteIdentifier(column.nativeType);
814
+ }
815
+
816
+ // Validate nativeType before using it unquoted in DDL
817
+ assertSafeNativeType(column.nativeType);
818
+ return renderParameterizedTypeSql(column) ?? column.nativeType;
819
+ }
820
+
821
+ /**
822
+ * Renders parameterized type SQL for a column, returning null if no expansion is needed.
823
+ *
824
+ * Uses the shared expandParameterizedNativeType utility from the postgres adapter.
825
+ * Returns null when the column has no typeParams, allowing the caller to fall back
826
+ * to the base nativeType.
827
+ */
828
+ function renderParameterizedTypeSql(column: StorageColumn): string | null {
829
+ if (!column.typeParams) {
830
+ return null;
831
+ }
832
+
833
+ const expanded = expandParameterizedNativeType({
834
+ nativeType: column.nativeType,
835
+ codecId: column.codecId,
836
+ typeParams: column.typeParams,
837
+ });
838
+
839
+ // If no expansion happened (returned the same base type), return null
840
+ // so caller can decide whether to use nativeType directly
841
+ return expanded !== column.nativeType ? expanded : null;
842
+ }
843
+
844
+ /**
845
+ * Builds the DEFAULT clause for a column definition.
846
+ * Returns empty string if no default is defined.
847
+ *
848
+ * Note: autoincrement is handled specially via SERIAL types, so we skip it here.
849
+ */
850
+ function buildColumnDefaultSql(
851
+ columnDefault: PostgresColumnDefault | undefined,
852
+ column?: StorageColumn,
853
+ ): string {
854
+ if (!columnDefault) {
855
+ return '';
856
+ }
857
+
858
+ switch (columnDefault.kind) {
859
+ case 'literal':
860
+ return `DEFAULT ${renderDefaultLiteral(columnDefault.value, column)}`;
861
+ case 'function': {
862
+ // autoincrement is handled by SERIAL type, no explicit DEFAULT needed
863
+ if (columnDefault.expression === 'autoincrement()') {
864
+ return '';
865
+ }
866
+ assertSafeDefaultExpression(columnDefault.expression);
867
+ return `DEFAULT (${columnDefault.expression})`;
868
+ }
869
+ case 'sequence':
870
+ // Sequence names use quoteIdentifier for safe identifier handling
871
+ return `DEFAULT nextval(${quoteIdentifier(columnDefault.name)}::regclass)`;
872
+ }
661
873
  }
662
874
 
663
- /** Escapes and quotes a SQL identifier (table, column, schema name). */
664
- function quoteIdentifier(identifier: string): string {
665
- // TypeScript enforces string type - no runtime check needed for internal callers
666
- return `"${identifier.replace(/"/g, '""')}"`;
875
+ function renderDefaultLiteral(value: unknown, column?: StorageColumn): string {
876
+ const isJsonColumn = column?.nativeType === 'json' || column?.nativeType === 'jsonb';
877
+
878
+ if (value instanceof Date) {
879
+ return `'${escapeLiteral(value.toISOString())}'`;
880
+ }
881
+ if (!isJsonColumn && isTaggedBigInt(value)) {
882
+ if (!/^-?\d+$/.test(value.value)) {
883
+ throw new Error(`Invalid tagged bigint value: "${value.value}" is not a valid integer`);
884
+ }
885
+ return value.value;
886
+ }
887
+ if (typeof value === 'bigint') {
888
+ return value.toString();
889
+ }
890
+ if (typeof value === 'string') {
891
+ return `'${escapeLiteral(value)}'`;
892
+ }
893
+ if (typeof value === 'number' || typeof value === 'boolean') {
894
+ return String(value);
895
+ }
896
+ if (value === null) {
897
+ return 'NULL';
898
+ }
899
+ const json = JSON.stringify(value);
900
+ if (isJsonColumn) {
901
+ return `'${escapeLiteral(json)}'::${column.nativeType}`;
902
+ }
903
+ return `'${escapeLiteral(json)}'`;
904
+ }
905
+
906
+ export function buildTargetDetails(
907
+ objectType: OperationClass,
908
+ name: string,
909
+ schema: string,
910
+ table?: string,
911
+ ): PostgresPlanTargetDetails {
912
+ return {
913
+ schema,
914
+ objectType,
915
+ name,
916
+ ...ifDefined('table', table),
917
+ };
667
918
  }
668
919
 
669
- function escapeLiteral(value: string): string {
670
- return value.replace(/'/g, "''");
920
+ export function qualifyTableName(schema: string, table: string): string {
921
+ return `${quoteIdentifier(schema)}.${quoteIdentifier(table)}`;
922
+ }
923
+
924
+ export function toRegclassLiteral(schema: string, name: string): string {
925
+ const regclass = `${quoteIdentifier(schema)}.${quoteIdentifier(name)}`;
926
+ return `'${escapeLiteral(regclass)}'`;
671
927
  }
672
928
 
673
929
  function sortedEntries<V>(record: Readonly<Record<string, V>>): Array<[string, V]> {
674
930
  return Object.entries(record).sort(([a], [b]) => a.localeCompare(b)) as Array<[string, V]>;
675
931
  }
676
932
 
677
- function constraintExistsCheck({
933
+ export function constraintExistsCheck({
678
934
  constraintName,
679
935
  schema,
680
936
  exists = true,
@@ -692,7 +948,7 @@ function constraintExistsCheck({
692
948
  )`;
693
949
  }
694
950
 
695
- function columnExistsCheck({
951
+ export function columnExistsCheck({
696
952
  schema,
697
953
  table,
698
954
  column,
@@ -713,22 +969,25 @@ function columnExistsCheck({
713
969
  )`;
714
970
  }
715
971
 
716
- function columnIsNotNullCheck({
972
+ export function columnNullabilityCheck({
717
973
  schema,
718
974
  table,
719
975
  column,
976
+ nullable,
720
977
  }: {
721
978
  schema: string;
722
979
  table: string;
723
980
  column: string;
981
+ nullable: boolean;
724
982
  }): string {
983
+ const expected = nullable ? 'YES' : 'NO';
725
984
  return `SELECT EXISTS (
726
985
  SELECT 1
727
986
  FROM information_schema.columns
728
987
  WHERE table_schema = '${escapeLiteral(schema)}'
729
988
  AND table_name = '${escapeLiteral(table)}'
730
989
  AND column_name = '${escapeLiteral(column)}'
731
- AND is_nullable = 'NO'
990
+ AND is_nullable = '${expected}'
732
991
  )`;
733
992
  }
734
993
 
@@ -741,9 +1000,12 @@ function buildAddColumnSql(
741
1000
  columnName: string,
742
1001
  column: StorageColumn,
743
1002
  ): string {
1003
+ const typeSql = buildColumnTypeSql(column);
1004
+ const defaultSql = buildColumnDefaultSql(column.default, column);
744
1005
  const parts = [
745
1006
  `ALTER TABLE ${qualifiedTableName}`,
746
- `ADD COLUMN ${quoteIdentifier(columnName)} ${column.nativeType}`,
1007
+ `ADD COLUMN ${quoteIdentifier(columnName)} ${typeSql}`,
1008
+ defaultSql,
747
1009
  column.nullable ? '' : 'NOT NULL',
748
1010
  ].filter(Boolean);
749
1011
  return parts.join(' ');
@@ -772,91 +1034,90 @@ function tableHasPrimaryKeyCheck(
772
1034
  )`;
773
1035
  }
774
1036
 
775
- function hasUniqueConstraint(
776
- table: SqlSchemaIR['tables'][string],
777
- columns: readonly string[],
778
- ): boolean {
779
- return table.uniques.some((unique) => arraysEqual(unique.columns, columns));
1037
+ /**
1038
+ * Pre-computed lookup sets for a schema table's constraints.
1039
+ * Converts O(n*m) linear scans to O(1) Set lookups per constraint check.
1040
+ */
1041
+ interface SchemaTableLookup {
1042
+ readonly uniqueKeys: Set<string>;
1043
+ readonly indexKeys: Set<string>;
1044
+ readonly uniqueIndexKeys: Set<string>;
1045
+ readonly fkKeys: Set<string>;
780
1046
  }
781
1047
 
782
- function hasIndex(table: SqlSchemaIR['tables'][string], columns: readonly string[]): boolean {
783
- return table.indexes.some((index) => !index.unique && arraysEqual(index.columns, columns));
1048
+ function buildSchemaLookupMap(schema: SqlSchemaIR): ReadonlyMap<string, SchemaTableLookup> {
1049
+ const map = new Map<string, SchemaTableLookup>();
1050
+ for (const [tableName, table] of Object.entries(schema.tables)) {
1051
+ map.set(tableName, buildSchemaTableLookup(table));
1052
+ }
1053
+ return map;
784
1054
  }
785
1055
 
786
- function hasForeignKey(table: SqlSchemaIR['tables'][string], fk: ForeignKey): boolean {
787
- return table.foreignKeys.some(
788
- (candidate) =>
789
- arraysEqual(candidate.columns, fk.columns) &&
790
- candidate.referencedTable === fk.references.table &&
791
- arraysEqual(candidate.referencedColumns, fk.references.columns),
1056
+ function buildSchemaTableLookup(table: SqlSchemaIR['tables'][string]): SchemaTableLookup {
1057
+ const uniqueKeys = new Set(table.uniques.map((u) => u.columns.join(',')));
1058
+ const indexKeys = new Set(table.indexes.map((i) => i.columns.join(',')));
1059
+ const uniqueIndexKeys = new Set(
1060
+ table.indexes.filter((i) => i.unique).map((i) => i.columns.join(',')),
1061
+ );
1062
+ const fkKeys = new Set(
1063
+ table.foreignKeys.map(
1064
+ (fk) => `${fk.columns.join(',')}|${fk.referencedTable}|${fk.referencedColumns.join(',')}`,
1065
+ ),
792
1066
  );
1067
+ return { uniqueKeys, indexKeys, uniqueIndexKeys, fkKeys };
793
1068
  }
794
1069
 
795
- function isAdditiveIssue(issue: SchemaIssue): boolean {
796
- switch (issue.kind) {
797
- case 'missing_table':
798
- case 'missing_column':
799
- case 'extension_missing':
800
- return true;
801
- case 'primary_key_mismatch':
802
- return issue.actual === undefined;
803
- case 'unique_constraint_mismatch':
804
- case 'index_mismatch':
805
- case 'foreign_key_mismatch':
806
- return issue.indexOrConstraint === undefined;
807
- default:
808
- return false;
809
- }
1070
+ function hasUniqueConstraint(lookup: SchemaTableLookup, columns: readonly string[]): boolean {
1071
+ const key = columns.join(',');
1072
+ return lookup.uniqueKeys.has(key) || lookup.uniqueIndexKeys.has(key);
810
1073
  }
811
1074
 
812
- function buildConflictLocation(issue: SchemaIssue) {
813
- const location: {
814
- table?: string;
815
- column?: string;
816
- constraint?: string;
817
- } = {};
818
- if (issue.table) {
819
- location.table = issue.table;
820
- }
821
- if (issue.column) {
822
- location.column = issue.column;
823
- }
824
- if (issue.indexOrConstraint) {
825
- location.constraint = issue.indexOrConstraint;
826
- }
827
- return Object.keys(location).length > 0 ? location : undefined;
1075
+ function hasIndex(lookup: SchemaTableLookup, columns: readonly string[]): boolean {
1076
+ const key = columns.join(',');
1077
+ return lookup.indexKeys.has(key) || lookup.uniqueKeys.has(key);
828
1078
  }
829
1079
 
830
- function conflictComparator(a: SqlPlannerConflict, b: SqlPlannerConflict): number {
831
- if (a.kind !== b.kind) {
832
- return a.kind < b.kind ? -1 : 1;
833
- }
834
- const aLocation = a.location ?? {};
835
- const bLocation = b.location ?? {};
836
- const tableCompare = compareStrings(aLocation.table, bLocation.table);
837
- if (tableCompare !== 0) {
838
- return tableCompare;
839
- }
840
- const columnCompare = compareStrings(aLocation.column, bLocation.column);
841
- if (columnCompare !== 0) {
842
- return columnCompare;
843
- }
844
- const constraintCompare = compareStrings(aLocation.constraint, bLocation.constraint);
845
- if (constraintCompare !== 0) {
846
- return constraintCompare;
847
- }
848
- return compareStrings(a.summary, b.summary);
1080
+ function hasForeignKey(lookup: SchemaTableLookup, fk: ForeignKey): boolean {
1081
+ return lookup.fkKeys.has(
1082
+ `${fk.columns.join(',')}|${fk.references.table}|${fk.references.columns.join(',')}`,
1083
+ );
849
1084
  }
850
1085
 
851
- function compareStrings(a?: string, b?: string): number {
852
- if (a === b) {
853
- return 0;
854
- }
855
- if (a === undefined) {
856
- return -1;
1086
+ const REFERENTIAL_ACTION_SQL: Record<ReferentialAction, string> = {
1087
+ noAction: 'NO ACTION',
1088
+ restrict: 'RESTRICT',
1089
+ cascade: 'CASCADE',
1090
+ setNull: 'SET NULL',
1091
+ setDefault: 'SET DEFAULT',
1092
+ };
1093
+
1094
+ function buildForeignKeySql(
1095
+ schemaName: string,
1096
+ tableName: string,
1097
+ fkName: string,
1098
+ foreignKey: ForeignKey,
1099
+ ): string {
1100
+ let sql = `ALTER TABLE ${qualifyTableName(schemaName, tableName)}
1101
+ ADD CONSTRAINT ${quoteIdentifier(fkName)}
1102
+ FOREIGN KEY (${foreignKey.columns.map(quoteIdentifier).join(', ')})
1103
+ REFERENCES ${qualifyTableName(schemaName, foreignKey.references.table)} (${foreignKey.references.columns
1104
+ .map(quoteIdentifier)
1105
+ .join(', ')})`;
1106
+
1107
+ if (foreignKey.onDelete !== undefined) {
1108
+ const action = REFERENTIAL_ACTION_SQL[foreignKey.onDelete];
1109
+ if (!action) {
1110
+ throw new Error(`Unknown referential action for onDelete: ${String(foreignKey.onDelete)}`);
1111
+ }
1112
+ sql += `\nON DELETE ${action}`;
857
1113
  }
858
- if (b === undefined) {
859
- return 1;
1114
+ if (foreignKey.onUpdate !== undefined) {
1115
+ const action = REFERENTIAL_ACTION_SQL[foreignKey.onUpdate];
1116
+ if (!action) {
1117
+ throw new Error(`Unknown referential action for onUpdate: ${String(foreignKey.onUpdate)}`);
1118
+ }
1119
+ sql += `\nON UPDATE ${action}`;
860
1120
  }
861
- return a < b ? -1 : 1;
1121
+
1122
+ return sql;
862
1123
  }