@prisma-next/target-postgres 0.3.0-dev.12 → 0.3.0-dev.122

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/LICENSE +201 -0
  2. package/README.md +9 -2
  3. package/dist/control.d.mts +19 -0
  4. package/dist/control.d.mts.map +1 -0
  5. package/dist/control.mjs +3677 -0
  6. package/dist/control.mjs.map +1 -0
  7. package/dist/descriptor-meta-DxB8oZzB.mjs +13 -0
  8. package/dist/descriptor-meta-DxB8oZzB.mjs.map +1 -0
  9. package/dist/pack.d.mts +10 -0
  10. package/dist/pack.d.mts.map +1 -0
  11. package/dist/pack.mjs +9 -0
  12. package/dist/pack.mjs.map +1 -0
  13. package/dist/runtime.d.mts +9 -0
  14. package/dist/runtime.d.mts.map +1 -0
  15. package/dist/runtime.mjs +21 -0
  16. package/dist/runtime.mjs.map +1 -0
  17. package/package.json +30 -29
  18. package/src/core/migrations/planner-identity-values.ts +129 -0
  19. package/src/core/migrations/planner-recipes.ts +83 -0
  20. package/src/core/migrations/planner-reconciliation.ts +781 -0
  21. package/src/core/migrations/planner-sql.ts +437 -0
  22. package/src/core/migrations/planner-target-details.ts +16 -0
  23. package/src/core/migrations/planner.ts +424 -409
  24. package/src/core/migrations/runner.ts +32 -36
  25. package/src/core/migrations/statement-builders.ts +9 -7
  26. package/src/core/types.ts +5 -0
  27. package/src/exports/control.ts +56 -8
  28. package/src/exports/pack.ts +5 -2
  29. package/src/exports/runtime.ts +7 -12
  30. package/dist/chunk-RKEXRSSI.js +0 -14
  31. package/dist/chunk-RKEXRSSI.js.map +0 -1
  32. package/dist/core/descriptor-meta.d.ts +0 -9
  33. package/dist/core/descriptor-meta.d.ts.map +0 -1
  34. package/dist/core/migrations/planner.d.ts +0 -14
  35. package/dist/core/migrations/planner.d.ts.map +0 -1
  36. package/dist/core/migrations/runner.d.ts +0 -8
  37. package/dist/core/migrations/runner.d.ts.map +0 -1
  38. package/dist/core/migrations/statement-builders.d.ts +0 -30
  39. package/dist/core/migrations/statement-builders.d.ts.map +0 -1
  40. package/dist/exports/control.d.ts +0 -8
  41. package/dist/exports/control.d.ts.map +0 -1
  42. package/dist/exports/control.js +0 -1255
  43. package/dist/exports/control.js.map +0 -1
  44. package/dist/exports/pack.d.ts +0 -4
  45. package/dist/exports/pack.d.ts.map +0 -1
  46. package/dist/exports/pack.js +0 -11
  47. package/dist/exports/pack.js.map +0 -1
  48. package/dist/exports/runtime.d.ts +0 -12
  49. package/dist/exports/runtime.d.ts.map +0 -1
  50. package/dist/exports/runtime.js +0 -19
  51. package/dist/exports/runtime.js.map +0 -1
@@ -0,0 +1,3677 @@
1
+ import { t as postgresTargetDescriptorMeta } from "./descriptor-meta-DxB8oZzB.mjs";
2
+ import { collectInitDependencies, contractToSchemaIR, createMigrationPlan, extractCodecControlHooks, plannerFailure, plannerSuccess, runnerFailure, runnerSuccess } from "@prisma-next/family-sql/control";
3
+ import { ifDefined } from "@prisma-next/utils/defined";
4
+ import { SQL_CHAR_CODEC_ID, SQL_FLOAT_CODEC_ID, SQL_INT_CODEC_ID, SQL_VARCHAR_CODEC_ID } from "@prisma-next/sql-relational-core/ast";
5
+ import { arraysEqual, verifySqlSchema } from "@prisma-next/family-sql/schema-verify";
6
+ import { defaultIndexName } from "@prisma-next/sql-schema-ir/naming";
7
+ import { bigintJsonReplacer, isTaggedBigInt } from "@prisma-next/contract/types";
8
+ import { invariant } from "@prisma-next/utils/assertions";
9
+ import { readMarker } from "@prisma-next/family-sql/verify";
10
+ import { SqlQueryError } from "@prisma-next/sql-errors";
11
+ import { ok, okVoid } from "@prisma-next/utils/result";
12
+
13
+ //#region ../../6-adapters/postgres/dist/sql-utils-CSfAGEwF.mjs
14
+ /**
15
+ * Shared SQL utility functions for the Postgres adapter.
16
+ *
17
+ * These functions handle safe SQL identifier and literal escaping
18
+ * with security validations to prevent injection and encoding issues.
19
+ */
20
+ /**
21
+ * Error thrown when an invalid SQL identifier or literal is detected.
22
+ * Boundary layers map this to structured envelopes.
23
+ */
24
+ var SqlEscapeError = class extends Error {
25
+ constructor(message, value, kind) {
26
+ super(message);
27
+ this.value = value;
28
+ this.kind = kind;
29
+ this.name = "SqlEscapeError";
30
+ }
31
+ };
32
+ /**
33
+ * Maximum length for PostgreSQL identifiers (NAMEDATALEN - 1).
34
+ */
35
+ const MAX_IDENTIFIER_LENGTH$1 = 63;
36
+ /**
37
+ * Validates and quotes a PostgreSQL identifier (table, column, type, schema names).
38
+ *
39
+ * Security validations:
40
+ * - Rejects null bytes which could cause truncation or unexpected behavior
41
+ * - Rejects empty identifiers
42
+ * - Warns on identifiers exceeding PostgreSQL's 63-character limit
43
+ *
44
+ * @throws {SqlEscapeError} If the identifier contains null bytes or is empty
45
+ */
46
+ function quoteIdentifier(identifier) {
47
+ if (identifier.length === 0) throw new SqlEscapeError("Identifier cannot be empty", identifier, "identifier");
48
+ if (identifier.includes("\0")) throw new SqlEscapeError("Identifier cannot contain null bytes", identifier.replace(/\0/g, "\\0"), "identifier");
49
+ if (identifier.length > MAX_IDENTIFIER_LENGTH$1) console.warn(`Identifier "${identifier.slice(0, 20)}..." exceeds PostgreSQL's ${MAX_IDENTIFIER_LENGTH$1}-character limit and will be truncated`);
50
+ return `"${identifier.replace(/"/g, "\"\"")}"`;
51
+ }
52
+ /**
53
+ * Escapes a string literal for safe use in SQL statements.
54
+ *
55
+ * Security validations:
56
+ * - Rejects null bytes which could cause truncation or unexpected behavior
57
+ *
58
+ * Note: This assumes PostgreSQL's `standard_conforming_strings` is ON (default since PG 9.1).
59
+ * Backslashes are treated as literal characters, not escape sequences.
60
+ *
61
+ * @throws {SqlEscapeError} If the value contains null bytes
62
+ */
63
+ function escapeLiteral(value) {
64
+ if (value.includes("\0")) throw new SqlEscapeError("Literal value cannot contain null bytes", value.replace(/\0/g, "\\0"), "literal");
65
+ return value.replace(/'/g, "''");
66
+ }
67
+ /**
68
+ * Builds a qualified name (schema.object) with proper quoting.
69
+ */
70
+ function qualifyName(schemaName, objectName) {
71
+ return `${quoteIdentifier(schemaName)}.${quoteIdentifier(objectName)}`;
72
+ }
73
+ /**
74
+ * Validates that an enum value doesn't exceed PostgreSQL's label length limit.
75
+ *
76
+ * PostgreSQL enum labels have a maximum length of NAMEDATALEN-1 (63 bytes by default).
77
+ * Unlike identifiers, enum labels that exceed this limit cause an error rather than
78
+ * silent truncation.
79
+ *
80
+ * @param value - The enum value to validate
81
+ * @param enumTypeName - Name of the enum type (for error messages)
82
+ * @throws {SqlEscapeError} If the value exceeds the maximum length
83
+ */
84
+ function validateEnumValueLength(value, enumTypeName) {
85
+ if (value.length > MAX_IDENTIFIER_LENGTH$1) throw new SqlEscapeError(`Enum value "${value.slice(0, 20)}..." for type "${enumTypeName}" exceeds PostgreSQL's ${MAX_IDENTIFIER_LENGTH$1}-character label limit`, value, "literal");
86
+ }
87
+
88
+ //#endregion
89
+ //#region ../../6-adapters/postgres/dist/codec-ids-Bsm9c7ns.mjs
90
+ const PG_TEXT_CODEC_ID = "pg/text@1";
91
+ const PG_ENUM_CODEC_ID = "pg/enum@1";
92
+ const PG_CHAR_CODEC_ID = "pg/char@1";
93
+ const PG_VARCHAR_CODEC_ID = "pg/varchar@1";
94
+ const PG_INT_CODEC_ID = "pg/int@1";
95
+ const PG_INT2_CODEC_ID = "pg/int2@1";
96
+ const PG_INT4_CODEC_ID = "pg/int4@1";
97
+ const PG_INT8_CODEC_ID = "pg/int8@1";
98
+ const PG_FLOAT_CODEC_ID = "pg/float@1";
99
+ const PG_FLOAT4_CODEC_ID = "pg/float4@1";
100
+ const PG_FLOAT8_CODEC_ID = "pg/float8@1";
101
+ const PG_NUMERIC_CODEC_ID = "pg/numeric@1";
102
+ const PG_BOOL_CODEC_ID = "pg/bool@1";
103
+ const PG_BIT_CODEC_ID = "pg/bit@1";
104
+ const PG_VARBIT_CODEC_ID = "pg/varbit@1";
105
+ const PG_TIMESTAMP_CODEC_ID = "pg/timestamp@1";
106
+ const PG_TIMESTAMPTZ_CODEC_ID = "pg/timestamptz@1";
107
+ const PG_TIME_CODEC_ID = "pg/time@1";
108
+ const PG_TIMETZ_CODEC_ID = "pg/timetz@1";
109
+ const PG_INTERVAL_CODEC_ID = "pg/interval@1";
110
+ const PG_JSON_CODEC_ID = "pg/json@1";
111
+ const PG_JSONB_CODEC_ID = "pg/jsonb@1";
112
+
113
+ //#endregion
114
+ //#region ../../6-adapters/postgres/dist/descriptor-meta-l_dv8Nnn.mjs
115
+ const ENUM_INTROSPECT_QUERY = `
116
+ SELECT
117
+ n.nspname AS schema_name,
118
+ t.typname AS type_name,
119
+ array_agg(e.enumlabel ORDER BY e.enumsortorder) AS values
120
+ FROM pg_type t
121
+ JOIN pg_namespace n ON t.typnamespace = n.oid
122
+ JOIN pg_enum e ON t.oid = e.enumtypid
123
+ WHERE n.nspname = $1
124
+ GROUP BY n.nspname, t.typname
125
+ ORDER BY n.nspname, t.typname
126
+ `;
127
+ /**
128
+ * Type guard for string arrays. Used for runtime validation of introspected data.
129
+ */
130
+ function isStringArray(value) {
131
+ return Array.isArray(value) && value.every((entry) => typeof entry === "string");
132
+ }
133
+ /**
134
+ * Parses a PostgreSQL array value into a JavaScript string array.
135
+ *
136
+ * PostgreSQL's `pg` library may return `array_agg` results either as:
137
+ * - A JavaScript array (when type parsers are configured)
138
+ * - A string in PostgreSQL array literal format: `{value1,value2,...}`
139
+ *
140
+ * Handles PostgreSQL's quoting rules for array elements:
141
+ * - Elements containing commas, double quotes, backslashes, or whitespace are double-quoted
142
+ * - Inside quoted elements, `\"` represents `"` and `\\` represents `\`
143
+ *
144
+ * @param value - The value to parse (array or PostgreSQL array string)
145
+ * @returns A string array, or null if the value cannot be parsed
146
+ */
147
+ function parsePostgresArray(value) {
148
+ if (isStringArray(value)) return value;
149
+ if (typeof value === "string" && value.startsWith("{") && value.endsWith("}")) {
150
+ const inner = value.slice(1, -1);
151
+ if (inner === "") return [];
152
+ return parseArrayElements(inner);
153
+ }
154
+ return null;
155
+ }
156
+ function parseArrayElements(input) {
157
+ const result = [];
158
+ let i = 0;
159
+ while (i < input.length) {
160
+ if (input[i] === ",") {
161
+ i++;
162
+ continue;
163
+ }
164
+ if (input[i] === "\"") {
165
+ i++;
166
+ let element = "";
167
+ while (i < input.length && input[i] !== "\"") {
168
+ if (input[i] === "\\" && i + 1 < input.length) {
169
+ i++;
170
+ element += input[i];
171
+ } else element += input[i];
172
+ i++;
173
+ }
174
+ i++;
175
+ result.push(element);
176
+ } else {
177
+ const nextComma = input.indexOf(",", i);
178
+ if (nextComma === -1) {
179
+ result.push(input.slice(i).trim());
180
+ i = input.length;
181
+ } else {
182
+ result.push(input.slice(i, nextComma).trim());
183
+ i = nextComma;
184
+ }
185
+ }
186
+ }
187
+ return result;
188
+ }
189
+ /**
190
+ * Extracts enum values from a StorageTypeInstance.
191
+ * Returns null if values are missing or invalid.
192
+ */
193
+ function getEnumValues(typeInstance) {
194
+ const values = typeInstance.typeParams?.["values"];
195
+ return isStringArray(values) ? values : null;
196
+ }
197
+ /**
198
+ * Reads existing enum values from the schema IR for a given native type.
199
+ * Uses optional chaining to simplify navigation through the annotations structure.
200
+ */
201
+ function readExistingEnumValues(schema, nativeType) {
202
+ const existing = ((schema.annotations?.["pg"])?.["storageTypes"])?.[nativeType];
203
+ if (!existing || existing.codecId !== PG_ENUM_CODEC_ID) return null;
204
+ return getEnumValues(existing);
205
+ }
206
+ /**
207
+ * Determines what changes are needed to transform existing enum values to desired values.
208
+ *
209
+ * Returns one of:
210
+ * - `unchanged`: No changes needed, values match exactly
211
+ * - `add_values`: New values can be safely appended (PostgreSQL supports this)
212
+ * - `rebuild`: Full enum rebuild required (value removal, reordering, or both)
213
+ *
214
+ * Note: PostgreSQL enums can only have values added (not removed or reordered) without
215
+ * a full type rebuild involving temp type creation and column migration.
216
+ *
217
+ * @param existing - Current enum values in the database
218
+ * @param desired - Target enum values from the contract
219
+ * @returns The type of change required
220
+ */
221
+ function determineEnumDiff(existing, desired) {
222
+ if (arraysEqual(existing, desired)) return { kind: "unchanged" };
223
+ const existingSet = new Set(existing);
224
+ const desiredSet = new Set(desired);
225
+ const missingValues = desired.filter((value) => !existingSet.has(value));
226
+ const removedValues = existing.filter((value) => !desiredSet.has(value));
227
+ const orderMismatch = missingValues.length === 0 && removedValues.length === 0 && !arraysEqual(existing, desired);
228
+ if (removedValues.length > 0 || orderMismatch) return {
229
+ kind: "rebuild",
230
+ removedValues
231
+ };
232
+ return {
233
+ kind: "add_values",
234
+ values: missingValues
235
+ };
236
+ }
237
+ function enumTypeExistsCheck(schemaName, typeName, exists = true) {
238
+ return `SELECT ${exists ? "EXISTS" : "NOT EXISTS"} (
239
+ SELECT 1
240
+ FROM pg_type t
241
+ JOIN pg_namespace n ON t.typnamespace = n.oid
242
+ WHERE n.nspname = '${escapeLiteral(schemaName)}'
243
+ AND t.typname = '${escapeLiteral(typeName)}'
244
+ )`;
245
+ }
246
+ function buildCreateEnumOperation(typeName, nativeType, schemaName, values) {
247
+ for (const value of values) validateEnumValueLength(value, typeName);
248
+ const literalValues = values.map((value) => `'${escapeLiteral(value)}'`).join(", ");
249
+ const qualifiedType = qualifyName(schemaName, nativeType);
250
+ return {
251
+ id: `type.${typeName}`,
252
+ label: `Create type ${typeName}`,
253
+ summary: `Creates enum type ${typeName}`,
254
+ operationClass: "additive",
255
+ target: { id: "postgres" },
256
+ precheck: [{
257
+ description: `ensure type "${nativeType}" does not exist`,
258
+ sql: enumTypeExistsCheck(schemaName, nativeType, false)
259
+ }],
260
+ execute: [{
261
+ description: `create type "${nativeType}"`,
262
+ sql: `CREATE TYPE ${qualifiedType} AS ENUM (${literalValues})`
263
+ }],
264
+ postcheck: [{
265
+ description: `verify type "${nativeType}" exists`,
266
+ sql: enumTypeExistsCheck(schemaName, nativeType)
267
+ }]
268
+ };
269
+ }
270
+ /**
271
+ * Computes the optimal position for inserting a new enum value to maintain
272
+ * the desired order relative to existing values.
273
+ *
274
+ * PostgreSQL's `ALTER TYPE ADD VALUE` supports BEFORE/AFTER positioning.
275
+ * This function finds the best reference value by:
276
+ * 1. Looking for the nearest preceding value that already exists
277
+ * 2. Falling back to the nearest following value if no preceding exists
278
+ * 3. Defaulting to end-of-list if no reference is found
279
+ *
280
+ * @param options.desired - The target ordered list of all enum values
281
+ * @param options.desiredIndex - Index of the value being inserted in the desired list
282
+ * @param options.current - Current list of enum values (being built up incrementally)
283
+ * @returns SQL clause (e.g., " AFTER 'x'") and insert position for tracking
284
+ */
285
+ function computeInsertPosition(options) {
286
+ const { desired, desiredIndex, current } = options;
287
+ const currentSet = new Set(current);
288
+ const previous = desired.slice(0, desiredIndex).reverse().find((candidate) => currentSet.has(candidate));
289
+ const next = desired.slice(desiredIndex + 1).find((candidate) => currentSet.has(candidate));
290
+ return {
291
+ clause: previous ? ` AFTER '${escapeLiteral(previous)}'` : next ? ` BEFORE '${escapeLiteral(next)}'` : "",
292
+ insertAt: previous ? current.indexOf(previous) + 1 : next ? current.indexOf(next) : current.length
293
+ };
294
+ }
295
+ /**
296
+ * Builds operations to add new enum values to an existing PostgreSQL enum type.
297
+ *
298
+ * Each new value is added with `ALTER TYPE ... ADD VALUE IF NOT EXISTS` for idempotency.
299
+ * Values are inserted in the correct order using BEFORE/AFTER positioning to match
300
+ * the desired final order.
301
+ *
302
+ * This is a safe, non-destructive operation - existing data is not affected.
303
+ *
304
+ * @param options.typeName - Contract-level type name (e.g., 'Role')
305
+ * @param options.nativeType - PostgreSQL type name (e.g., 'role')
306
+ * @param options.schemaName - PostgreSQL schema (e.g., 'public')
307
+ * @param options.desired - Target ordered list of all enum values
308
+ * @param options.existing - Current enum values in the database
309
+ * @returns Array of migration operations to add each missing value
310
+ */
311
+ function buildAddValueOperations(options) {
312
+ const { typeName, nativeType, schemaName } = options;
313
+ const current = [...options.existing];
314
+ const currentSet = new Set(current);
315
+ const operations = [];
316
+ for (let index = 0; index < options.desired.length; index += 1) {
317
+ const value = options.desired[index];
318
+ if (value === void 0) continue;
319
+ if (currentSet.has(value)) continue;
320
+ validateEnumValueLength(value, typeName);
321
+ const { clause, insertAt } = computeInsertPosition({
322
+ desired: options.desired,
323
+ desiredIndex: index,
324
+ current
325
+ });
326
+ operations.push({
327
+ id: `type.${typeName}.value.${value}`,
328
+ label: `Add value ${value} to ${typeName}`,
329
+ summary: `Adds enum value ${value} to ${typeName}`,
330
+ operationClass: "widening",
331
+ target: { id: "postgres" },
332
+ precheck: [],
333
+ execute: [{
334
+ description: `add value "${value}" if not exists`,
335
+ sql: `ALTER TYPE ${qualifyName(schemaName, nativeType)} ADD VALUE IF NOT EXISTS '${escapeLiteral(value)}'${clause}`
336
+ }],
337
+ postcheck: []
338
+ });
339
+ current.splice(insertAt, 0, value);
340
+ currentSet.add(value);
341
+ }
342
+ return operations;
343
+ }
344
+ /**
345
+ * Collects columns using the enum type from the contract (desired state).
346
+ * Used for type-safe reference tracking.
347
+ */
348
+ function collectEnumColumnsFromContract(contract, typeName, nativeType) {
349
+ const columns = [];
350
+ for (const [tableName, table] of Object.entries(contract.storage.tables)) for (const [columnName, column] of Object.entries(table.columns)) if (column.typeRef === typeName || column.nativeType === nativeType && column.codecId === PG_ENUM_CODEC_ID) columns.push({
351
+ table: tableName,
352
+ column: columnName
353
+ });
354
+ return columns;
355
+ }
356
+ /**
357
+ * Collects columns using the enum type from the schema IR (live database state).
358
+ * This ensures we find ALL dependent columns, including those added outside the contract
359
+ * (e.g., manual DDL), which is critical for safe enum rebuild operations.
360
+ */
361
+ function collectEnumColumnsFromSchema(schema, nativeType) {
362
+ const columns = [];
363
+ for (const [tableName, table] of Object.entries(schema.tables)) for (const [columnName, column] of Object.entries(table.columns)) if (column.nativeType === nativeType) columns.push({
364
+ table: tableName,
365
+ column: columnName
366
+ });
367
+ return columns;
368
+ }
369
+ /**
370
+ * Collects all columns using the enum type from both contract AND live database.
371
+ * Merges and deduplicates to ensure we migrate ALL dependent columns during rebuild.
372
+ *
373
+ * This is critical for data integrity: if a column exists in the database using
374
+ * this enum but is not in the contract (e.g., added via manual DDL), we must
375
+ * still migrate it to avoid DROP TYPE failures.
376
+ */
377
+ function collectAllEnumColumns(contract, schema, typeName, nativeType) {
378
+ const contractColumns = collectEnumColumnsFromContract(contract, typeName, nativeType);
379
+ const schemaColumns = collectEnumColumnsFromSchema(schema, nativeType);
380
+ const seen = /* @__PURE__ */ new Set();
381
+ const result = [];
382
+ for (const col of [...contractColumns, ...schemaColumns]) {
383
+ const key = `${col.table}.${col.column}`;
384
+ if (!seen.has(key)) {
385
+ seen.add(key);
386
+ result.push(col);
387
+ }
388
+ }
389
+ return result.sort((a, b) => {
390
+ const tableCompare = a.table.localeCompare(b.table);
391
+ return tableCompare !== 0 ? tableCompare : a.column.localeCompare(b.column);
392
+ });
393
+ }
394
+ /**
395
+ * Builds a SQL check to verify a column's type matches an expected type.
396
+ */
397
+ function columnTypeCheck$1(options) {
398
+ return `SELECT EXISTS (
399
+ SELECT 1
400
+ FROM information_schema.columns
401
+ WHERE table_schema = '${escapeLiteral(options.schemaName)}'
402
+ AND table_name = '${escapeLiteral(options.tableName)}'
403
+ AND column_name = '${escapeLiteral(options.columnName)}'
404
+ AND udt_name = '${escapeLiteral(options.expectedType)}'
405
+ )`;
406
+ }
407
+ /** PostgreSQL maximum identifier length (NAMEDATALEN - 1) */
408
+ const MAX_IDENTIFIER_LENGTH = 63;
409
+ /** Suffix added to enum type names during rebuild operations */
410
+ const REBUILD_SUFFIX = "__pn_rebuild";
411
+ /**
412
+ * Builds an SQL check to verify no rows contain any of the removed enum values.
413
+ * This prevents data loss during enum rebuild operations.
414
+ *
415
+ * @param schemaName - PostgreSQL schema name
416
+ * @param tableName - Table containing the enum column
417
+ * @param columnName - Column using the enum type
418
+ * @param removedValues - Array of enum values being removed
419
+ * @returns SQL query that returns true if no rows contain removed values
420
+ */
421
+ function noRemovedValuesExistCheck(schemaName, tableName, columnName, removedValues) {
422
+ if (removedValues.length === 0) return "SELECT true";
423
+ const valuesList = removedValues.map((v) => `'${escapeLiteral(v)}'`).join(", ");
424
+ return `SELECT NOT EXISTS (
425
+ SELECT 1 FROM ${qualifyName(schemaName, tableName)}
426
+ WHERE ${quoteIdentifier(columnName)}::text IN (${valuesList})
427
+ LIMIT 1
428
+ )`;
429
+ }
430
+ /**
431
+ * Builds a migration operation to recreate a PostgreSQL enum type with updated values.
432
+ *
433
+ * This is required when:
434
+ * - Enum values are removed (PostgreSQL doesn't support direct removal)
435
+ * - Enum values are reordered (PostgreSQL doesn't support reordering)
436
+ *
437
+ * The operation:
438
+ * 1. Creates a new enum type with the desired values (temp name)
439
+ * 2. Migrates all columns to use the new type via text cast
440
+ * 3. Drops the original type
441
+ * 4. Renames the temp type to the original name
442
+ *
443
+ * IMPORTANT: If values are being removed and data exists using those values,
444
+ * the operation will fail at the precheck stage with a clear error message.
445
+ * This prevents silent data loss.
446
+ *
447
+ * @param options.typeName - Contract-level type name
448
+ * @param options.nativeType - PostgreSQL type name
449
+ * @param options.schemaName - PostgreSQL schema
450
+ * @param options.values - Desired final enum values
451
+ * @param options.removedValues - Values being removed (for data loss checks)
452
+ * @param options.contract - Full contract for column discovery
453
+ * @param options.schema - Current schema IR for column discovery
454
+ * @returns Migration operation for full enum rebuild
455
+ */
456
+ function buildRecreateEnumOperation(options) {
457
+ const tempTypeName = `${options.nativeType}${REBUILD_SUFFIX}`;
458
+ if (tempTypeName.length > MAX_IDENTIFIER_LENGTH) {
459
+ const maxBaseLength = MAX_IDENTIFIER_LENGTH - 12;
460
+ throw new Error(`Enum type name "${options.nativeType}" is too long for rebuild operation. Maximum length is ${maxBaseLength} characters (type name + "${REBUILD_SUFFIX}" suffix must fit within PostgreSQL's ${MAX_IDENTIFIER_LENGTH}-character identifier limit).`);
461
+ }
462
+ const qualifiedOriginal = qualifyName(options.schemaName, options.nativeType);
463
+ const qualifiedTemp = qualifyName(options.schemaName, tempTypeName);
464
+ const literalValues = options.values.map((value) => `'${escapeLiteral(value)}'`).join(", ");
465
+ const columnRefs = collectAllEnumColumns(options.contract, options.schema, options.typeName, options.nativeType);
466
+ const alterColumns = columnRefs.map((ref) => ({
467
+ description: `alter ${ref.table}.${ref.column} to ${tempTypeName}`,
468
+ sql: `ALTER TABLE ${qualifyName(options.schemaName, ref.table)}
469
+ ALTER COLUMN ${quoteIdentifier(ref.column)}
470
+ TYPE ${qualifiedTemp}
471
+ USING ${quoteIdentifier(ref.column)}::text::${qualifiedTemp}`
472
+ }));
473
+ const postchecks = [
474
+ {
475
+ description: `verify type "${options.nativeType}" exists`,
476
+ sql: enumTypeExistsCheck(options.schemaName, options.nativeType)
477
+ },
478
+ {
479
+ description: `verify temp type "${tempTypeName}" was removed`,
480
+ sql: enumTypeExistsCheck(options.schemaName, tempTypeName, false)
481
+ },
482
+ ...columnRefs.map((ref) => ({
483
+ description: `verify ${ref.table}.${ref.column} uses type "${options.nativeType}"`,
484
+ sql: columnTypeCheck$1({
485
+ schemaName: options.schemaName,
486
+ tableName: ref.table,
487
+ columnName: ref.column,
488
+ expectedType: options.nativeType
489
+ })
490
+ }))
491
+ ];
492
+ return {
493
+ id: `type.${options.typeName}.rebuild`,
494
+ label: `Rebuild type ${options.typeName}`,
495
+ summary: `Recreates enum type ${options.typeName} with updated values`,
496
+ operationClass: "destructive",
497
+ target: { id: "postgres" },
498
+ precheck: [{
499
+ description: `ensure type "${options.nativeType}" exists`,
500
+ sql: enumTypeExistsCheck(options.schemaName, options.nativeType)
501
+ }, ...options.removedValues.length > 0 ? columnRefs.map((ref) => ({
502
+ description: `ensure no rows in ${ref.table}.${ref.column} contain removed values (${options.removedValues.join(", ")})`,
503
+ sql: noRemovedValuesExistCheck(options.schemaName, ref.table, ref.column, options.removedValues)
504
+ })) : []],
505
+ execute: [
506
+ {
507
+ description: `drop orphaned temp type "${tempTypeName}" if exists`,
508
+ sql: `DROP TYPE IF EXISTS ${qualifiedTemp}`
509
+ },
510
+ {
511
+ description: `create temp type "${tempTypeName}"`,
512
+ sql: `CREATE TYPE ${qualifiedTemp} AS ENUM (${literalValues})`
513
+ },
514
+ ...alterColumns,
515
+ {
516
+ description: `drop type "${options.nativeType}"`,
517
+ sql: `DROP TYPE ${qualifiedOriginal}`
518
+ },
519
+ {
520
+ description: `rename type "${tempTypeName}" to "${options.nativeType}"`,
521
+ sql: `ALTER TYPE ${qualifiedTemp} RENAME TO ${quoteIdentifier(options.nativeType)}`
522
+ }
523
+ ],
524
+ postcheck: postchecks
525
+ };
526
+ }
527
+ /**
528
+ * Postgres enum hooks for planning, verifying, and introspecting `storage.types`.
529
+ */
530
+ const pgEnumControlHooks = {
531
+ planTypeOperations: ({ typeName, typeInstance, contract, schema, schemaName }) => {
532
+ const desired = getEnumValues(typeInstance);
533
+ if (!desired || desired.length === 0) return { operations: [] };
534
+ const schemaNamespace = schemaName ?? "public";
535
+ const existing = readExistingEnumValues(schema, typeInstance.nativeType);
536
+ if (!existing) return { operations: [buildCreateEnumOperation(typeName, typeInstance.nativeType, schemaNamespace, desired)] };
537
+ const diff = determineEnumDiff(existing, desired);
538
+ if (diff.kind === "unchanged") return { operations: [] };
539
+ if (diff.kind === "rebuild") return { operations: [buildRecreateEnumOperation({
540
+ typeName,
541
+ nativeType: typeInstance.nativeType,
542
+ schemaName: schemaNamespace,
543
+ values: desired,
544
+ removedValues: diff.removedValues,
545
+ contract,
546
+ schema
547
+ })] };
548
+ return { operations: buildAddValueOperations({
549
+ typeName,
550
+ nativeType: typeInstance.nativeType,
551
+ schemaName: schemaNamespace,
552
+ desired,
553
+ existing
554
+ }) };
555
+ },
556
+ verifyType: ({ typeName, typeInstance, schema }) => {
557
+ const desired = getEnumValues(typeInstance);
558
+ if (!desired) return [];
559
+ const existing = readExistingEnumValues(schema, typeInstance.nativeType);
560
+ if (!existing) return [{
561
+ kind: "type_missing",
562
+ typeName,
563
+ message: `Type "${typeName}" is missing from database`
564
+ }];
565
+ if (!arraysEqual(existing, desired)) return [{
566
+ kind: "type_values_mismatch",
567
+ typeName,
568
+ expected: desired.join(", "),
569
+ actual: existing.join(", "),
570
+ message: `Type "${typeName}" values do not match contract`
571
+ }];
572
+ return [];
573
+ },
574
+ introspectTypes: async ({ driver, schemaName }) => {
575
+ const namespace = schemaName ?? "public";
576
+ const result = await driver.query(ENUM_INTROSPECT_QUERY, [namespace]);
577
+ const types = {};
578
+ for (const row of result.rows) {
579
+ const values = parsePostgresArray(row.values);
580
+ if (!values) throw new Error(`Failed to parse enum values for type "${row.type_name}": unexpected format: ${JSON.stringify(row.values)}`);
581
+ types[row.type_name] = {
582
+ codecId: PG_ENUM_CODEC_ID,
583
+ nativeType: row.type_name,
584
+ typeParams: { values }
585
+ };
586
+ }
587
+ return types;
588
+ }
589
+ };
590
+ const MAX_DEPTH = 32;
591
+ function isRecord(value) {
592
+ return typeof value === "object" && value !== null;
593
+ }
594
+ function escapeStringLiteral(str) {
595
+ return str.replace(/\\/g, "\\\\").replace(/'/g, "\\'").replace(/\n/g, "\\n").replace(/\r/g, "\\r");
596
+ }
597
+ function quotePropertyKey(key) {
598
+ return /^[A-Za-z_$][A-Za-z0-9_$]*$/.test(key) ? key : `'${escapeStringLiteral(key)}'`;
599
+ }
600
+ function renderLiteral(value) {
601
+ if (typeof value === "string") return `'${escapeStringLiteral(value)}'`;
602
+ if (typeof value === "number" || typeof value === "boolean") return String(value);
603
+ if (value === null) return "null";
604
+ return "unknown";
605
+ }
606
+ function renderUnion(items, depth) {
607
+ return items.map((item) => render(item, depth)).join(" | ");
608
+ }
609
+ function renderObjectType(schema, depth) {
610
+ const properties = isRecord(schema["properties"]) ? schema["properties"] : {};
611
+ const required = Array.isArray(schema["required"]) ? new Set(schema["required"].filter((key) => typeof key === "string")) : /* @__PURE__ */ new Set();
612
+ const keys = Object.keys(properties).sort((left, right) => left.localeCompare(right));
613
+ if (keys.length === 0) {
614
+ const additionalProperties = schema["additionalProperties"];
615
+ if (additionalProperties === true || additionalProperties === void 0) return "Record<string, unknown>";
616
+ return `Record<string, ${render(additionalProperties, depth)}>`;
617
+ }
618
+ return `{ ${keys.map((key) => {
619
+ const valueSchema = properties[key];
620
+ const optionalMarker = required.has(key) ? "" : "?";
621
+ return `${quotePropertyKey(key)}${optionalMarker}: ${render(valueSchema, depth)}`;
622
+ }).join("; ")} }`;
623
+ }
624
+ function renderArrayType(schema, depth) {
625
+ if (Array.isArray(schema["items"])) return `readonly [${schema["items"].map((item) => render(item, depth)).join(", ")}]`;
626
+ if (schema["items"] !== void 0) {
627
+ const itemType = render(schema["items"], depth);
628
+ return itemType.includes(" | ") || itemType.includes(" & ") ? `(${itemType})[]` : `${itemType}[]`;
629
+ }
630
+ return "unknown[]";
631
+ }
632
+ function render(schema, depth) {
633
+ if (depth > MAX_DEPTH || !isRecord(schema)) return "JsonValue";
634
+ const nextDepth = depth + 1;
635
+ if ("const" in schema) return renderLiteral(schema["const"]);
636
+ if (Array.isArray(schema["enum"])) return schema["enum"].map((value) => renderLiteral(value)).join(" | ");
637
+ if (Array.isArray(schema["oneOf"])) return renderUnion(schema["oneOf"], nextDepth);
638
+ if (Array.isArray(schema["anyOf"])) return renderUnion(schema["anyOf"], nextDepth);
639
+ if (Array.isArray(schema["allOf"])) return schema["allOf"].map((item) => render(item, nextDepth)).join(" & ");
640
+ if (Array.isArray(schema["type"])) return schema["type"].map((item) => render({
641
+ ...schema,
642
+ type: item
643
+ }, nextDepth)).join(" | ");
644
+ switch (schema["type"]) {
645
+ case "string": return "string";
646
+ case "number":
647
+ case "integer": return "number";
648
+ case "boolean": return "boolean";
649
+ case "null": return "null";
650
+ case "array": return renderArrayType(schema, nextDepth);
651
+ case "object": return renderObjectType(schema, nextDepth);
652
+ default: break;
653
+ }
654
+ return "JsonValue";
655
+ }
656
+ function renderTypeScriptTypeFromJsonSchema(schema) {
657
+ return render(schema, 0);
658
+ }
659
+ /** Creates a type import spec for codec types */
660
+ const codecTypeImport = (named) => ({
661
+ package: "@prisma-next/adapter-postgres/codec-types",
662
+ named,
663
+ alias: named
664
+ });
665
+ /** Creates a precision-based TypeScript type renderer for temporal types */
666
+ const precisionRenderer = (typeName) => ({
667
+ kind: "function",
668
+ render: (params) => {
669
+ const precision = params["precision"];
670
+ return typeof precision === "number" ? `${typeName}<${precision}>` : typeName;
671
+ }
672
+ });
673
+ function isPositiveInteger(value) {
674
+ return typeof value === "number" && Number.isFinite(value) && Number.isInteger(value) && value > 0;
675
+ }
676
+ function isNonNegativeInteger(value) {
677
+ return typeof value === "number" && Number.isFinite(value) && Number.isInteger(value) && value >= 0;
678
+ }
679
+ function expandLength({ nativeType, typeParams }) {
680
+ if (!typeParams || !("length" in typeParams)) return nativeType;
681
+ const length = typeParams["length"];
682
+ if (!isPositiveInteger(length)) throw new Error(`Invalid "length" type parameter for "${nativeType}": expected a positive integer, got ${JSON.stringify(length)}`);
683
+ return `${nativeType}(${length})`;
684
+ }
685
+ function expandPrecision({ nativeType, typeParams }) {
686
+ if (!typeParams || !("precision" in typeParams)) return nativeType;
687
+ const precision = typeParams["precision"];
688
+ if (!isPositiveInteger(precision)) throw new Error(`Invalid "precision" type parameter for "${nativeType}": expected a positive integer, got ${JSON.stringify(precision)}`);
689
+ return `${nativeType}(${precision})`;
690
+ }
691
+ function expandNumeric({ nativeType, typeParams }) {
692
+ const hasPrecision = typeParams && "precision" in typeParams;
693
+ const hasScale = typeParams && "scale" in typeParams;
694
+ if (!hasPrecision && !hasScale) return nativeType;
695
+ if (!hasPrecision && hasScale) throw new Error(`Invalid type parameters for "${nativeType}": "scale" requires "precision" to be specified`);
696
+ if (hasPrecision) {
697
+ const precision = typeParams["precision"];
698
+ if (!isPositiveInteger(precision)) throw new Error(`Invalid "precision" type parameter for "${nativeType}": expected a positive integer, got ${JSON.stringify(precision)}`);
699
+ if (hasScale) {
700
+ const scale = typeParams["scale"];
701
+ if (!isNonNegativeInteger(scale)) throw new Error(`Invalid "scale" type parameter for "${nativeType}": expected a non-negative integer, got ${JSON.stringify(scale)}`);
702
+ return `${nativeType}(${precision},${scale})`;
703
+ }
704
+ return `${nativeType}(${precision})`;
705
+ }
706
+ return nativeType;
707
+ }
708
+ const lengthHooks = { expandNativeType: expandLength };
709
+ const precisionHooks = { expandNativeType: expandPrecision };
710
+ const numericHooks = { expandNativeType: expandNumeric };
711
+ const identityHooks = { expandNativeType: ({ nativeType }) => nativeType };
712
+ /**
713
+ * Validates that a type expression string is safe to embed in generated .d.ts files.
714
+ * Rejects expressions containing patterns that could inject executable code.
715
+ */
716
+ function isSafeTypeExpression(expr) {
717
+ return !/import\s*\(|require\s*\(|declare\s|export\s|eval\s*\(/.test(expr);
718
+ }
719
+ function renderJsonTypeExpression(params) {
720
+ const typeName = params["type"];
721
+ if (typeof typeName === "string" && typeName.trim().length > 0) {
722
+ const trimmed = typeName.trim();
723
+ if (!isSafeTypeExpression(trimmed)) return "JsonValue";
724
+ return trimmed;
725
+ }
726
+ const schema = params["schemaJson"];
727
+ if (schema && typeof schema === "object") {
728
+ const rendered = renderTypeScriptTypeFromJsonSchema(schema);
729
+ if (!isSafeTypeExpression(rendered)) return "JsonValue";
730
+ return rendered;
731
+ }
732
+ return "JsonValue";
733
+ }
734
+ const postgresAdapterDescriptorMeta = {
735
+ kind: "adapter",
736
+ familyId: "sql",
737
+ targetId: "postgres",
738
+ id: "postgres",
739
+ version: "0.0.1",
740
+ capabilities: {
741
+ postgres: {
742
+ orderBy: true,
743
+ limit: true,
744
+ lateral: true,
745
+ jsonAgg: true,
746
+ returning: true
747
+ },
748
+ sql: { enums: true }
749
+ },
750
+ types: {
751
+ codecTypes: {
752
+ import: {
753
+ package: "@prisma-next/adapter-postgres/codec-types",
754
+ named: "CodecTypes",
755
+ alias: "PgTypes"
756
+ },
757
+ parameterized: {
758
+ [SQL_CHAR_CODEC_ID]: "Char<{{length}}>",
759
+ [SQL_VARCHAR_CODEC_ID]: "Varchar<{{length}}>",
760
+ [PG_CHAR_CODEC_ID]: "Char<{{length}}>",
761
+ [PG_VARCHAR_CODEC_ID]: "Varchar<{{length}}>",
762
+ [PG_NUMERIC_CODEC_ID]: {
763
+ kind: "function",
764
+ render: (params) => {
765
+ const precision = params["precision"];
766
+ if (typeof precision !== "number") throw new Error("pg/numeric@1 renderer expects precision");
767
+ const scale = params["scale"];
768
+ return typeof scale === "number" ? `Numeric<${precision}, ${scale}>` : `Numeric<${precision}>`;
769
+ }
770
+ },
771
+ [PG_BIT_CODEC_ID]: "Bit<{{length}}>",
772
+ [PG_VARBIT_CODEC_ID]: "VarBit<{{length}}>",
773
+ [PG_TIMESTAMP_CODEC_ID]: precisionRenderer("Timestamp"),
774
+ [PG_TIMESTAMPTZ_CODEC_ID]: precisionRenderer("Timestamptz"),
775
+ [PG_TIME_CODEC_ID]: precisionRenderer("Time"),
776
+ [PG_TIMETZ_CODEC_ID]: precisionRenderer("Timetz"),
777
+ [PG_INTERVAL_CODEC_ID]: precisionRenderer("Interval"),
778
+ [PG_ENUM_CODEC_ID]: {
779
+ kind: "function",
780
+ render: (params) => {
781
+ const values = params["values"];
782
+ if (!Array.isArray(values)) throw new Error("pg/enum@1 renderer expects values array");
783
+ return values.map((value) => `'${String(value).replace(/'/g, "\\'")}'`).join(" | ");
784
+ }
785
+ },
786
+ [PG_JSON_CODEC_ID]: {
787
+ kind: "function",
788
+ render: renderJsonTypeExpression
789
+ },
790
+ [PG_JSONB_CODEC_ID]: {
791
+ kind: "function",
792
+ render: renderJsonTypeExpression
793
+ }
794
+ },
795
+ typeImports: [
796
+ {
797
+ package: "@prisma-next/adapter-postgres/codec-types",
798
+ named: "JsonValue",
799
+ alias: "JsonValue"
800
+ },
801
+ codecTypeImport("Char"),
802
+ codecTypeImport("Varchar"),
803
+ codecTypeImport("Numeric"),
804
+ codecTypeImport("Bit"),
805
+ codecTypeImport("VarBit"),
806
+ codecTypeImport("Timestamp"),
807
+ codecTypeImport("Timestamptz"),
808
+ codecTypeImport("Time"),
809
+ codecTypeImport("Timetz"),
810
+ codecTypeImport("Interval")
811
+ ],
812
+ controlPlaneHooks: {
813
+ [SQL_CHAR_CODEC_ID]: lengthHooks,
814
+ [SQL_VARCHAR_CODEC_ID]: lengthHooks,
815
+ [PG_CHAR_CODEC_ID]: lengthHooks,
816
+ [PG_VARCHAR_CODEC_ID]: lengthHooks,
817
+ [PG_NUMERIC_CODEC_ID]: numericHooks,
818
+ [PG_BIT_CODEC_ID]: lengthHooks,
819
+ [PG_VARBIT_CODEC_ID]: lengthHooks,
820
+ [PG_TIMESTAMP_CODEC_ID]: precisionHooks,
821
+ [PG_TIMESTAMPTZ_CODEC_ID]: precisionHooks,
822
+ [PG_TIME_CODEC_ID]: precisionHooks,
823
+ [PG_TIMETZ_CODEC_ID]: precisionHooks,
824
+ [PG_INTERVAL_CODEC_ID]: precisionHooks,
825
+ [PG_ENUM_CODEC_ID]: pgEnumControlHooks,
826
+ [PG_JSON_CODEC_ID]: identityHooks,
827
+ [PG_JSONB_CODEC_ID]: identityHooks
828
+ }
829
+ },
830
+ storage: [
831
+ {
832
+ typeId: PG_TEXT_CODEC_ID,
833
+ familyId: "sql",
834
+ targetId: "postgres",
835
+ nativeType: "text"
836
+ },
837
+ {
838
+ typeId: SQL_CHAR_CODEC_ID,
839
+ familyId: "sql",
840
+ targetId: "postgres",
841
+ nativeType: "character"
842
+ },
843
+ {
844
+ typeId: SQL_VARCHAR_CODEC_ID,
845
+ familyId: "sql",
846
+ targetId: "postgres",
847
+ nativeType: "character varying"
848
+ },
849
+ {
850
+ typeId: SQL_INT_CODEC_ID,
851
+ familyId: "sql",
852
+ targetId: "postgres",
853
+ nativeType: "int4"
854
+ },
855
+ {
856
+ typeId: SQL_FLOAT_CODEC_ID,
857
+ familyId: "sql",
858
+ targetId: "postgres",
859
+ nativeType: "float8"
860
+ },
861
+ {
862
+ typeId: PG_CHAR_CODEC_ID,
863
+ familyId: "sql",
864
+ targetId: "postgres",
865
+ nativeType: "character"
866
+ },
867
+ {
868
+ typeId: PG_VARCHAR_CODEC_ID,
869
+ familyId: "sql",
870
+ targetId: "postgres",
871
+ nativeType: "character varying"
872
+ },
873
+ {
874
+ typeId: PG_INT_CODEC_ID,
875
+ familyId: "sql",
876
+ targetId: "postgres",
877
+ nativeType: "int4"
878
+ },
879
+ {
880
+ typeId: PG_FLOAT_CODEC_ID,
881
+ familyId: "sql",
882
+ targetId: "postgres",
883
+ nativeType: "float8"
884
+ },
885
+ {
886
+ typeId: PG_INT4_CODEC_ID,
887
+ familyId: "sql",
888
+ targetId: "postgres",
889
+ nativeType: "int4"
890
+ },
891
+ {
892
+ typeId: PG_INT2_CODEC_ID,
893
+ familyId: "sql",
894
+ targetId: "postgres",
895
+ nativeType: "int2"
896
+ },
897
+ {
898
+ typeId: PG_INT8_CODEC_ID,
899
+ familyId: "sql",
900
+ targetId: "postgres",
901
+ nativeType: "int8"
902
+ },
903
+ {
904
+ typeId: PG_FLOAT4_CODEC_ID,
905
+ familyId: "sql",
906
+ targetId: "postgres",
907
+ nativeType: "float4"
908
+ },
909
+ {
910
+ typeId: PG_FLOAT8_CODEC_ID,
911
+ familyId: "sql",
912
+ targetId: "postgres",
913
+ nativeType: "float8"
914
+ },
915
+ {
916
+ typeId: PG_NUMERIC_CODEC_ID,
917
+ familyId: "sql",
918
+ targetId: "postgres",
919
+ nativeType: "numeric"
920
+ },
921
+ {
922
+ typeId: PG_TIMESTAMP_CODEC_ID,
923
+ familyId: "sql",
924
+ targetId: "postgres",
925
+ nativeType: "timestamp"
926
+ },
927
+ {
928
+ typeId: PG_TIMESTAMPTZ_CODEC_ID,
929
+ familyId: "sql",
930
+ targetId: "postgres",
931
+ nativeType: "timestamptz"
932
+ },
933
+ {
934
+ typeId: PG_TIME_CODEC_ID,
935
+ familyId: "sql",
936
+ targetId: "postgres",
937
+ nativeType: "time"
938
+ },
939
+ {
940
+ typeId: PG_TIMETZ_CODEC_ID,
941
+ familyId: "sql",
942
+ targetId: "postgres",
943
+ nativeType: "timetz"
944
+ },
945
+ {
946
+ typeId: PG_BOOL_CODEC_ID,
947
+ familyId: "sql",
948
+ targetId: "postgres",
949
+ nativeType: "bool"
950
+ },
951
+ {
952
+ typeId: PG_BIT_CODEC_ID,
953
+ familyId: "sql",
954
+ targetId: "postgres",
955
+ nativeType: "bit"
956
+ },
957
+ {
958
+ typeId: PG_VARBIT_CODEC_ID,
959
+ familyId: "sql",
960
+ targetId: "postgres",
961
+ nativeType: "bit varying"
962
+ },
963
+ {
964
+ typeId: PG_INTERVAL_CODEC_ID,
965
+ familyId: "sql",
966
+ targetId: "postgres",
967
+ nativeType: "interval"
968
+ },
969
+ {
970
+ typeId: PG_JSON_CODEC_ID,
971
+ familyId: "sql",
972
+ targetId: "postgres",
973
+ nativeType: "json"
974
+ },
975
+ {
976
+ typeId: PG_JSONB_CODEC_ID,
977
+ familyId: "sql",
978
+ targetId: "postgres",
979
+ nativeType: "jsonb"
980
+ }
981
+ ]
982
+ }
983
+ };
984
+
985
+ //#endregion
986
+ //#region ../../../1-framework/2-authoring/ids/dist/index.mjs
987
+ const builtinGeneratorIds = [
988
+ "ulid",
989
+ "nanoid",
990
+ "uuidv7",
991
+ "uuidv4",
992
+ "cuid2",
993
+ "ksuid"
994
+ ];
995
+ function resolveNanoidColumnDescriptor(params) {
996
+ const rawSize = params?.["size"];
997
+ if (rawSize === void 0) return {
998
+ type: {
999
+ codecId: "sql/char@1",
1000
+ nativeType: "character"
1001
+ },
1002
+ typeParams: { length: 21 }
1003
+ };
1004
+ if (typeof rawSize !== "number" || !Number.isInteger(rawSize) || rawSize < 2 || rawSize > 255) throw new Error("nanoid size must be an integer between 2 and 255");
1005
+ return {
1006
+ type: {
1007
+ codecId: "sql/char@1",
1008
+ nativeType: "character"
1009
+ },
1010
+ typeParams: { length: rawSize }
1011
+ };
1012
+ }
1013
+ const builtinGeneratorMetadataById = {
1014
+ ulid: {
1015
+ applicableCodecIds: ["pg/text@1", "sql/char@1"],
1016
+ generatedColumnDescriptor: {
1017
+ type: {
1018
+ codecId: "sql/char@1",
1019
+ nativeType: "character"
1020
+ },
1021
+ typeParams: { length: 26 }
1022
+ }
1023
+ },
1024
+ nanoid: {
1025
+ applicableCodecIds: ["pg/text@1", "sql/char@1"],
1026
+ generatedColumnDescriptor: {
1027
+ type: {
1028
+ codecId: "sql/char@1",
1029
+ nativeType: "character"
1030
+ },
1031
+ typeParams: { length: 21 }
1032
+ },
1033
+ resolveGeneratedColumnDescriptor: resolveNanoidColumnDescriptor
1034
+ },
1035
+ uuidv7: {
1036
+ applicableCodecIds: ["pg/text@1", "sql/char@1"],
1037
+ generatedColumnDescriptor: {
1038
+ type: {
1039
+ codecId: "sql/char@1",
1040
+ nativeType: "character"
1041
+ },
1042
+ typeParams: { length: 36 }
1043
+ }
1044
+ },
1045
+ uuidv4: {
1046
+ applicableCodecIds: ["pg/text@1", "sql/char@1"],
1047
+ generatedColumnDescriptor: {
1048
+ type: {
1049
+ codecId: "sql/char@1",
1050
+ nativeType: "character"
1051
+ },
1052
+ typeParams: { length: 36 }
1053
+ }
1054
+ },
1055
+ cuid2: {
1056
+ applicableCodecIds: ["pg/text@1", "sql/char@1"],
1057
+ generatedColumnDescriptor: {
1058
+ type: {
1059
+ codecId: "sql/char@1",
1060
+ nativeType: "character"
1061
+ },
1062
+ typeParams: { length: 24 }
1063
+ }
1064
+ },
1065
+ ksuid: {
1066
+ applicableCodecIds: ["pg/text@1", "sql/char@1"],
1067
+ generatedColumnDescriptor: {
1068
+ type: {
1069
+ codecId: "sql/char@1",
1070
+ nativeType: "character"
1071
+ },
1072
+ typeParams: { length: 27 }
1073
+ }
1074
+ }
1075
+ };
1076
+ const builtinGeneratorRegistryMetadata = builtinGeneratorIds.map((id) => ({
1077
+ id,
1078
+ applicableCodecIds: builtinGeneratorMetadataById[id].applicableCodecIds
1079
+ }));
1080
+ function resolveBuiltinGeneratedColumnDescriptor(input) {
1081
+ const metadata = builtinGeneratorMetadataById[input.id];
1082
+ const resolver = metadata.resolveGeneratedColumnDescriptor;
1083
+ if (resolver) return resolver(input.params);
1084
+ return metadata.generatedColumnDescriptor;
1085
+ }
1086
+
1087
+ //#endregion
1088
+ //#region ../../6-adapters/postgres/dist/control.mjs
1089
+ /**
1090
+ * Pre-compiled regex patterns for performance.
1091
+ * These are compiled once at module load time rather than on each function call.
1092
+ */
1093
+ const NEXTVAL_PATTERN = /^nextval\s*\(/i;
1094
+ const NOW_FUNCTION_PATTERN = /^(now\s*\(\s*\)|CURRENT_TIMESTAMP)$/i;
1095
+ const CLOCK_TIMESTAMP_PATTERN = /^clock_timestamp\s*\(\s*\)$/i;
1096
+ const TIMESTAMP_CAST_SUFFIX = /::timestamp(?:tz|\s+(?:with|without)\s+time\s+zone)?$/i;
1097
+ const TEXT_CAST_SUFFIX = /::text$/i;
1098
+ const NOW_LITERAL_PATTERN = /^'now'$/i;
1099
+ const UUID_PATTERN = /^gen_random_uuid\s*\(\s*\)$/i;
1100
+ const UUID_OSSP_PATTERN = /^uuid_generate_v4\s*\(\s*\)$/i;
1101
+ const NULL_PATTERN = /^NULL(?:::.+)?$/i;
1102
+ const TRUE_PATTERN = /^true$/i;
1103
+ const FALSE_PATTERN = /^false$/i;
1104
+ const NUMERIC_PATTERN = /^-?\d+(\.\d+)?$/;
1105
+ const STRING_LITERAL_PATTERN = /^'((?:[^']|'')*)'(?:::(?:"[^"]+"|[\w\s]+)(?:\(\d+\))?)?$/;
1106
+ /**
1107
+ * Returns the canonical expression for a timestamp default function, or undefined
1108
+ * if the expression is not a recognized timestamp default.
1109
+ *
1110
+ * Keeps now()/CURRENT_TIMESTAMP and clock_timestamp() distinct:
1111
+ * - now(), CURRENT_TIMESTAMP, ('now'::text)::timestamp... → 'now()'
1112
+ * - clock_timestamp(), clock_timestamp()::timestamptz → 'clock_timestamp()'
1113
+ *
1114
+ * These are semantically different in Postgres: now() returns the transaction
1115
+ * start time (constant within a transaction), while clock_timestamp() returns
1116
+ * the actual wall-clock time (can differ across rows in a single INSERT).
1117
+ */
1118
+ function canonicalizeTimestampDefault(expr) {
1119
+ if (NOW_FUNCTION_PATTERN.test(expr)) return "now()";
1120
+ if (CLOCK_TIMESTAMP_PATTERN.test(expr)) return "clock_timestamp()";
1121
+ if (!TIMESTAMP_CAST_SUFFIX.test(expr)) return void 0;
1122
+ let inner = expr.replace(TIMESTAMP_CAST_SUFFIX, "").trim();
1123
+ if (inner.startsWith("(") && inner.endsWith(")")) inner = inner.slice(1, -1).trim();
1124
+ if (NOW_FUNCTION_PATTERN.test(inner)) return "now()";
1125
+ if (CLOCK_TIMESTAMP_PATTERN.test(inner)) return "clock_timestamp()";
1126
+ inner = inner.replace(TEXT_CAST_SUFFIX, "").trim();
1127
+ if (NOW_LITERAL_PATTERN.test(inner)) return "now()";
1128
+ }
1129
+ /**
1130
+ * Parses a raw Postgres column default expression into a normalized ColumnDefault.
1131
+ * This enables semantic comparison between contract defaults and introspected schema defaults.
1132
+ *
1133
+ * Used by the migration diff layer to normalize raw database defaults during comparison,
1134
+ * keeping the introspection layer focused on faithful data capture.
1135
+ *
1136
+ * @param rawDefault - Raw default expression from information_schema.columns.column_default
1137
+ * @param nativeType - Native column type, used for type-aware parsing (bigint tagging, JSON detection)
1138
+ * @returns Normalized ColumnDefault or undefined if the expression cannot be parsed
1139
+ */
1140
+ function parsePostgresDefault(rawDefault, nativeType) {
1141
+ const trimmed = rawDefault.trim();
1142
+ const normalizedType = nativeType?.toLowerCase();
1143
+ const isBigInt = normalizedType === "bigint" || normalizedType === "int8";
1144
+ if (NEXTVAL_PATTERN.test(trimmed)) return {
1145
+ kind: "function",
1146
+ expression: "autoincrement()"
1147
+ };
1148
+ const canonicalTimestamp = canonicalizeTimestampDefault(trimmed);
1149
+ if (canonicalTimestamp) return {
1150
+ kind: "function",
1151
+ expression: canonicalTimestamp
1152
+ };
1153
+ if (UUID_PATTERN.test(trimmed)) return {
1154
+ kind: "function",
1155
+ expression: "gen_random_uuid()"
1156
+ };
1157
+ if (UUID_OSSP_PATTERN.test(trimmed)) return {
1158
+ kind: "function",
1159
+ expression: "gen_random_uuid()"
1160
+ };
1161
+ if (NULL_PATTERN.test(trimmed)) return {
1162
+ kind: "literal",
1163
+ value: null
1164
+ };
1165
+ if (TRUE_PATTERN.test(trimmed)) return {
1166
+ kind: "literal",
1167
+ value: true
1168
+ };
1169
+ if (FALSE_PATTERN.test(trimmed)) return {
1170
+ kind: "literal",
1171
+ value: false
1172
+ };
1173
+ if (NUMERIC_PATTERN.test(trimmed)) {
1174
+ if (isBigInt) return {
1175
+ kind: "literal",
1176
+ value: {
1177
+ $type: "bigint",
1178
+ value: trimmed
1179
+ }
1180
+ };
1181
+ const num = Number(trimmed);
1182
+ if (!Number.isFinite(num)) return void 0;
1183
+ return {
1184
+ kind: "literal",
1185
+ value: num
1186
+ };
1187
+ }
1188
+ const stringMatch = trimmed.match(STRING_LITERAL_PATTERN);
1189
+ if (stringMatch?.[1] !== void 0) {
1190
+ const unescaped = stringMatch[1].replace(/''/g, "'");
1191
+ if (normalizedType === "json" || normalizedType === "jsonb") try {
1192
+ return {
1193
+ kind: "literal",
1194
+ value: JSON.parse(unescaped)
1195
+ };
1196
+ } catch {}
1197
+ return {
1198
+ kind: "literal",
1199
+ value: unescaped
1200
+ };
1201
+ }
1202
+ return {
1203
+ kind: "function",
1204
+ expression: trimmed
1205
+ };
1206
+ }
1207
+ /**
1208
+ * Postgres control plane adapter for control-plane operations like introspection.
1209
+ * Provides target-specific implementations for control-plane domain actions.
1210
+ */
1211
+ var PostgresControlAdapter = class {
1212
+ familyId = "sql";
1213
+ targetId = "postgres";
1214
+ /**
1215
+ * @deprecated Use targetId instead
1216
+ */
1217
+ target = "postgres";
1218
+ /**
1219
+ * Target-specific normalizer for raw Postgres default expressions.
1220
+ * Used by schema verification to normalize raw defaults before comparison.
1221
+ */
1222
+ normalizeDefault = parsePostgresDefault;
1223
+ /**
1224
+ * Target-specific normalizer for Postgres schema native type names.
1225
+ * Used by schema verification to normalize introspected type names
1226
+ * before comparison with contract native types.
1227
+ */
1228
+ normalizeNativeType = normalizeSchemaNativeType;
1229
+ /**
1230
+ * Introspects a Postgres database schema and returns a raw SqlSchemaIR.
1231
+ *
1232
+ * This is a pure schema discovery operation that queries the Postgres catalog
1233
+ * and returns the schema structure without type mapping or contract enrichment.
1234
+ * Type mapping and enrichment are handled separately by enrichment helpers.
1235
+ *
1236
+ * Uses batched queries to minimize database round trips (7 queries instead of 5T+3).
1237
+ *
1238
+ * @param driver - ControlDriverInstance<'sql', 'postgres'> instance for executing queries
1239
+ * @param contractIR - Optional contract IR for contract-guided introspection (filtering, optimization)
1240
+ * @param schema - Schema name to introspect (defaults to 'public')
1241
+ * @returns Promise resolving to SqlSchemaIR representing the live database schema
1242
+ */
1243
+ async introspect(driver, _contractIR, schema = "public") {
1244
+ const [tablesResult, columnsResult, pkResult, fkResult, uniqueResult, indexResult, extensionsResult] = await Promise.all([
1245
+ driver.query(`SELECT table_name
1246
+ FROM information_schema.tables
1247
+ WHERE table_schema = $1
1248
+ AND table_type = 'BASE TABLE'
1249
+ ORDER BY table_name`, [schema]),
1250
+ driver.query(`SELECT
1251
+ c.table_name,
1252
+ column_name,
1253
+ data_type,
1254
+ udt_name,
1255
+ is_nullable,
1256
+ character_maximum_length,
1257
+ numeric_precision,
1258
+ numeric_scale,
1259
+ column_default,
1260
+ format_type(a.atttypid, a.atttypmod) AS formatted_type
1261
+ FROM information_schema.columns c
1262
+ JOIN pg_catalog.pg_class cl
1263
+ ON cl.relname = c.table_name
1264
+ JOIN pg_catalog.pg_namespace ns
1265
+ ON ns.nspname = c.table_schema
1266
+ AND ns.oid = cl.relnamespace
1267
+ JOIN pg_catalog.pg_attribute a
1268
+ ON a.attrelid = cl.oid
1269
+ AND a.attname = c.column_name
1270
+ AND a.attnum > 0
1271
+ AND NOT a.attisdropped
1272
+ WHERE c.table_schema = $1
1273
+ ORDER BY c.table_name, c.ordinal_position`, [schema]),
1274
+ driver.query(`SELECT
1275
+ tc.table_name,
1276
+ tc.constraint_name,
1277
+ kcu.column_name,
1278
+ kcu.ordinal_position
1279
+ FROM information_schema.table_constraints tc
1280
+ JOIN information_schema.key_column_usage kcu
1281
+ ON tc.constraint_name = kcu.constraint_name
1282
+ AND tc.table_schema = kcu.table_schema
1283
+ AND tc.table_name = kcu.table_name
1284
+ WHERE tc.table_schema = $1
1285
+ AND tc.constraint_type = 'PRIMARY KEY'
1286
+ ORDER BY tc.table_name, kcu.ordinal_position`, [schema]),
1287
+ driver.query(`SELECT
1288
+ tc.table_name,
1289
+ tc.constraint_name,
1290
+ kcu.column_name,
1291
+ kcu.ordinal_position,
1292
+ ref_ns.nspname AS referenced_table_schema,
1293
+ ref_cl.relname AS referenced_table_name,
1294
+ ref_att.attname AS referenced_column_name,
1295
+ rc.delete_rule,
1296
+ rc.update_rule
1297
+ FROM information_schema.table_constraints tc
1298
+ JOIN information_schema.key_column_usage kcu
1299
+ ON tc.constraint_name = kcu.constraint_name
1300
+ AND tc.table_schema = kcu.table_schema
1301
+ AND tc.table_name = kcu.table_name
1302
+ JOIN pg_catalog.pg_constraint pgc
1303
+ ON pgc.conname = tc.constraint_name
1304
+ AND pgc.connamespace = (
1305
+ SELECT oid FROM pg_catalog.pg_namespace WHERE nspname = tc.table_schema
1306
+ )
1307
+ JOIN pg_catalog.pg_class ref_cl
1308
+ ON ref_cl.oid = pgc.confrelid
1309
+ JOIN pg_catalog.pg_namespace ref_ns
1310
+ ON ref_ns.oid = ref_cl.relnamespace
1311
+ JOIN pg_catalog.pg_attribute ref_att
1312
+ ON ref_att.attrelid = pgc.confrelid
1313
+ AND ref_att.attnum = pgc.confkey[kcu.ordinal_position]
1314
+ JOIN information_schema.referential_constraints rc
1315
+ ON rc.constraint_name = tc.constraint_name
1316
+ AND rc.constraint_schema = tc.table_schema
1317
+ WHERE tc.table_schema = $1
1318
+ AND tc.constraint_type = 'FOREIGN KEY'
1319
+ ORDER BY tc.table_name, tc.constraint_name, kcu.ordinal_position`, [schema]),
1320
+ driver.query(`SELECT
1321
+ tc.table_name,
1322
+ tc.constraint_name,
1323
+ kcu.column_name,
1324
+ kcu.ordinal_position
1325
+ FROM information_schema.table_constraints tc
1326
+ JOIN information_schema.key_column_usage kcu
1327
+ ON tc.constraint_name = kcu.constraint_name
1328
+ AND tc.table_schema = kcu.table_schema
1329
+ AND tc.table_name = kcu.table_name
1330
+ WHERE tc.table_schema = $1
1331
+ AND tc.constraint_type = 'UNIQUE'
1332
+ ORDER BY tc.table_name, tc.constraint_name, kcu.ordinal_position`, [schema]),
1333
+ driver.query(`SELECT
1334
+ i.tablename,
1335
+ i.indexname,
1336
+ ix.indisunique,
1337
+ a.attname,
1338
+ a.attnum
1339
+ FROM pg_indexes i
1340
+ JOIN pg_class ic ON ic.relname = i.indexname
1341
+ JOIN pg_namespace ins ON ins.oid = ic.relnamespace AND ins.nspname = $1
1342
+ JOIN pg_index ix ON ix.indexrelid = ic.oid
1343
+ JOIN pg_class t ON t.oid = ix.indrelid
1344
+ JOIN pg_namespace tn ON tn.oid = t.relnamespace AND tn.nspname = $1
1345
+ LEFT JOIN pg_attribute a ON a.attrelid = t.oid AND a.attnum = ANY(ix.indkey) AND a.attnum > 0
1346
+ WHERE i.schemaname = $1
1347
+ AND NOT EXISTS (
1348
+ SELECT 1
1349
+ FROM information_schema.table_constraints tc
1350
+ WHERE tc.table_schema = $1
1351
+ AND tc.table_name = i.tablename
1352
+ AND tc.constraint_name = i.indexname
1353
+ )
1354
+ ORDER BY i.tablename, i.indexname, a.attnum`, [schema]),
1355
+ driver.query(`SELECT extname
1356
+ FROM pg_extension
1357
+ ORDER BY extname`, [])
1358
+ ]);
1359
+ const columnsByTable = groupBy(columnsResult.rows, "table_name");
1360
+ const pksByTable = groupBy(pkResult.rows, "table_name");
1361
+ const fksByTable = groupBy(fkResult.rows, "table_name");
1362
+ const uniquesByTable = groupBy(uniqueResult.rows, "table_name");
1363
+ const indexesByTable = groupBy(indexResult.rows, "tablename");
1364
+ const pkConstraintsByTable = /* @__PURE__ */ new Map();
1365
+ for (const row of pkResult.rows) {
1366
+ let constraints = pkConstraintsByTable.get(row.table_name);
1367
+ if (!constraints) {
1368
+ constraints = /* @__PURE__ */ new Set();
1369
+ pkConstraintsByTable.set(row.table_name, constraints);
1370
+ }
1371
+ constraints.add(row.constraint_name);
1372
+ }
1373
+ const tables = {};
1374
+ for (const tableRow of tablesResult.rows) {
1375
+ const tableName = tableRow.table_name;
1376
+ const columns = {};
1377
+ for (const colRow of columnsByTable.get(tableName) ?? []) {
1378
+ let nativeType = colRow.udt_name;
1379
+ const formattedType = colRow.formatted_type ? normalizeFormattedType(colRow.formatted_type, colRow.data_type, colRow.udt_name) : null;
1380
+ if (formattedType) nativeType = formattedType;
1381
+ else if (colRow.data_type === "character varying" || colRow.data_type === "character") if (colRow.character_maximum_length) nativeType = `${colRow.data_type}(${colRow.character_maximum_length})`;
1382
+ else nativeType = colRow.data_type;
1383
+ else if (colRow.data_type === "numeric" || colRow.data_type === "decimal") if (colRow.numeric_precision && colRow.numeric_scale !== null) nativeType = `${colRow.data_type}(${colRow.numeric_precision},${colRow.numeric_scale})`;
1384
+ else if (colRow.numeric_precision) nativeType = `${colRow.data_type}(${colRow.numeric_precision})`;
1385
+ else nativeType = colRow.data_type;
1386
+ else nativeType = colRow.udt_name || colRow.data_type;
1387
+ columns[colRow.column_name] = {
1388
+ name: colRow.column_name,
1389
+ nativeType,
1390
+ nullable: colRow.is_nullable === "YES",
1391
+ ...ifDefined("default", colRow.column_default ?? void 0)
1392
+ };
1393
+ }
1394
+ const pkRows = [...pksByTable.get(tableName) ?? []];
1395
+ const primaryKeyColumns = pkRows.sort((a, b) => a.ordinal_position - b.ordinal_position).map((row) => row.column_name);
1396
+ const primaryKey = primaryKeyColumns.length > 0 ? {
1397
+ columns: primaryKeyColumns,
1398
+ ...pkRows[0]?.constraint_name ? { name: pkRows[0].constraint_name } : {}
1399
+ } : void 0;
1400
+ const foreignKeysMap = /* @__PURE__ */ new Map();
1401
+ for (const fkRow of fksByTable.get(tableName) ?? []) {
1402
+ const existing = foreignKeysMap.get(fkRow.constraint_name);
1403
+ if (existing) {
1404
+ existing.columns.push(fkRow.column_name);
1405
+ existing.referencedColumns.push(fkRow.referenced_column_name);
1406
+ } else foreignKeysMap.set(fkRow.constraint_name, {
1407
+ columns: [fkRow.column_name],
1408
+ referencedTable: fkRow.referenced_table_name,
1409
+ referencedColumns: [fkRow.referenced_column_name],
1410
+ name: fkRow.constraint_name,
1411
+ deleteRule: fkRow.delete_rule,
1412
+ updateRule: fkRow.update_rule
1413
+ });
1414
+ }
1415
+ const foreignKeys = Array.from(foreignKeysMap.values()).map((fk) => ({
1416
+ columns: Object.freeze([...fk.columns]),
1417
+ referencedTable: fk.referencedTable,
1418
+ referencedColumns: Object.freeze([...fk.referencedColumns]),
1419
+ name: fk.name,
1420
+ ...ifDefined("onDelete", mapReferentialAction(fk.deleteRule)),
1421
+ ...ifDefined("onUpdate", mapReferentialAction(fk.updateRule))
1422
+ }));
1423
+ const pkConstraints = pkConstraintsByTable.get(tableName) ?? /* @__PURE__ */ new Set();
1424
+ const uniquesMap = /* @__PURE__ */ new Map();
1425
+ for (const uniqueRow of uniquesByTable.get(tableName) ?? []) {
1426
+ if (pkConstraints.has(uniqueRow.constraint_name)) continue;
1427
+ const existing = uniquesMap.get(uniqueRow.constraint_name);
1428
+ if (existing) existing.columns.push(uniqueRow.column_name);
1429
+ else uniquesMap.set(uniqueRow.constraint_name, {
1430
+ columns: [uniqueRow.column_name],
1431
+ name: uniqueRow.constraint_name
1432
+ });
1433
+ }
1434
+ const uniques = Array.from(uniquesMap.values()).map((uq) => ({
1435
+ columns: Object.freeze([...uq.columns]),
1436
+ name: uq.name
1437
+ }));
1438
+ const indexesMap = /* @__PURE__ */ new Map();
1439
+ for (const idxRow of indexesByTable.get(tableName) ?? []) {
1440
+ if (!idxRow.attname) continue;
1441
+ const existing = indexesMap.get(idxRow.indexname);
1442
+ if (existing) existing.columns.push(idxRow.attname);
1443
+ else indexesMap.set(idxRow.indexname, {
1444
+ columns: [idxRow.attname],
1445
+ name: idxRow.indexname,
1446
+ unique: idxRow.indisunique
1447
+ });
1448
+ }
1449
+ const indexes = Array.from(indexesMap.values()).map((idx) => ({
1450
+ columns: Object.freeze([...idx.columns]),
1451
+ name: idx.name,
1452
+ unique: idx.unique
1453
+ }));
1454
+ tables[tableName] = {
1455
+ name: tableName,
1456
+ columns,
1457
+ ...ifDefined("primaryKey", primaryKey),
1458
+ foreignKeys,
1459
+ uniques,
1460
+ indexes
1461
+ };
1462
+ }
1463
+ const dependencies = extensionsResult.rows.map((row) => ({ id: `postgres.extension.${row.extname}` }));
1464
+ const storageTypes = await pgEnumControlHooks.introspectTypes?.({
1465
+ driver,
1466
+ schemaName: schema
1467
+ }) ?? {};
1468
+ return {
1469
+ tables,
1470
+ dependencies,
1471
+ annotations: { pg: {
1472
+ schema,
1473
+ version: await this.getPostgresVersion(driver),
1474
+ ...ifDefined("storageTypes", Object.keys(storageTypes).length > 0 ? storageTypes : void 0)
1475
+ } }
1476
+ };
1477
+ }
1478
+ /**
1479
+ * Gets the Postgres version from the database.
1480
+ */
1481
+ async getPostgresVersion(driver) {
1482
+ return ((await driver.query("SELECT version() AS version", [])).rows[0]?.version ?? "").match(/PostgreSQL (\d+\.\d+)/)?.[1] ?? "unknown";
1483
+ }
1484
+ };
1485
+ /**
1486
+ * Pre-computed lookup map for simple prefix-based type normalization.
1487
+ * Maps short Postgres type names to their canonical SQL names.
1488
+ * Using a Map for O(1) lookup instead of multiple startsWith checks.
1489
+ */
1490
+ const TYPE_PREFIX_MAP = new Map([
1491
+ ["varchar", "character varying"],
1492
+ ["bpchar", "character"],
1493
+ ["varbit", "bit varying"]
1494
+ ]);
1495
+ /**
1496
+ * Normalizes a Postgres schema native type to its canonical form for comparison.
1497
+ *
1498
+ * Uses a pre-computed lookup map for simple prefix replacements (O(1))
1499
+ * and handles complex temporal type normalization separately.
1500
+ */
1501
+ function normalizeSchemaNativeType(nativeType) {
1502
+ const trimmed = nativeType.trim();
1503
+ for (const [prefix, replacement] of TYPE_PREFIX_MAP) if (trimmed.startsWith(prefix)) return replacement + trimmed.slice(prefix.length);
1504
+ if (trimmed.includes(" with time zone")) {
1505
+ if (trimmed.startsWith("timestamp")) return `timestamptz${trimmed.slice(9).replace(" with time zone", "")}`;
1506
+ if (trimmed.startsWith("time")) return `timetz${trimmed.slice(4).replace(" with time zone", "")}`;
1507
+ }
1508
+ if (trimmed.includes(" without time zone")) return trimmed.replace(" without time zone", "");
1509
+ return trimmed;
1510
+ }
1511
+ function normalizeFormattedType(formattedType, dataType, udtName) {
1512
+ if (formattedType === "integer") return "int4";
1513
+ if (formattedType === "smallint") return "int2";
1514
+ if (formattedType === "bigint") return "int8";
1515
+ if (formattedType === "real") return "float4";
1516
+ if (formattedType === "double precision") return "float8";
1517
+ if (formattedType === "boolean") return "bool";
1518
+ if (formattedType.startsWith("varchar")) return formattedType.replace("varchar", "character varying");
1519
+ if (formattedType.startsWith("bpchar")) return formattedType.replace("bpchar", "character");
1520
+ if (formattedType.startsWith("varbit")) return formattedType.replace("varbit", "bit varying");
1521
+ if (dataType === "timestamp with time zone" || udtName === "timestamptz") return formattedType.replace("timestamp", "timestamptz").replace(" with time zone", "").trim();
1522
+ if (dataType === "timestamp without time zone" || udtName === "timestamp") return formattedType.replace(" without time zone", "").trim();
1523
+ if (dataType === "time with time zone" || udtName === "timetz") return formattedType.replace("time", "timetz").replace(" with time zone", "").trim();
1524
+ if (dataType === "time without time zone" || udtName === "time") return formattedType.replace(" without time zone", "").trim();
1525
+ if (formattedType.startsWith("\"") && formattedType.endsWith("\"")) return formattedType.slice(1, -1);
1526
+ return formattedType;
1527
+ }
1528
+ const PG_REFERENTIAL_ACTION_MAP = {
1529
+ "NO ACTION": "noAction",
1530
+ RESTRICT: "restrict",
1531
+ CASCADE: "cascade",
1532
+ "SET NULL": "setNull",
1533
+ "SET DEFAULT": "setDefault"
1534
+ };
1535
+ /**
1536
+ * Maps a Postgres referential action rule to the canonical SqlReferentialAction.
1537
+ * Returns undefined for 'NO ACTION' (the database default) to keep the IR sparse.
1538
+ * Throws for unrecognized rules to prevent silent data loss.
1539
+ */
1540
+ function mapReferentialAction(rule) {
1541
+ const mapped = PG_REFERENTIAL_ACTION_MAP[rule];
1542
+ if (mapped === void 0) throw new Error(`Unknown PostgreSQL referential action rule: "${rule}". Expected one of: NO ACTION, RESTRICT, CASCADE, SET NULL, SET DEFAULT.`);
1543
+ if (mapped === "noAction") return void 0;
1544
+ return mapped;
1545
+ }
1546
+ /**
1547
+ * Groups an array of objects by a specified key.
1548
+ * Returns a Map for O(1) lookup by group key.
1549
+ */
1550
+ function groupBy(items, key) {
1551
+ const map = /* @__PURE__ */ new Map();
1552
+ for (const item of items) {
1553
+ const groupKey = item[key];
1554
+ let group = map.get(groupKey);
1555
+ if (!group) {
1556
+ group = [];
1557
+ map.set(groupKey, group);
1558
+ }
1559
+ group.push(item);
1560
+ }
1561
+ return map;
1562
+ }
1563
+ function invalidArgumentDiagnostic(input) {
1564
+ return {
1565
+ ok: false,
1566
+ diagnostic: {
1567
+ code: "PSL_INVALID_DEFAULT_FUNCTION_ARGUMENT",
1568
+ message: input.message,
1569
+ sourceId: input.context.sourceId,
1570
+ span: input.span
1571
+ }
1572
+ };
1573
+ }
1574
+ function executionGenerator(id, params) {
1575
+ return {
1576
+ ok: true,
1577
+ value: {
1578
+ kind: "execution",
1579
+ generated: {
1580
+ kind: "generator",
1581
+ id,
1582
+ ...params ? { params } : {}
1583
+ }
1584
+ }
1585
+ };
1586
+ }
1587
+ function expectNoArgs(input) {
1588
+ if (input.call.args.length === 0) return;
1589
+ return invalidArgumentDiagnostic({
1590
+ context: input.context,
1591
+ span: input.call.span,
1592
+ message: `Default function "${input.call.name}" does not accept arguments. Use ${input.usage}.`
1593
+ });
1594
+ }
1595
+ function parseIntegerArgument(raw) {
1596
+ const trimmed = raw.trim();
1597
+ if (!/^-?\d+$/.test(trimmed)) return;
1598
+ const value = Number(trimmed);
1599
+ if (!Number.isInteger(value)) return;
1600
+ return value;
1601
+ }
1602
+ function parseStringLiteral(raw) {
1603
+ const match = raw.trim().match(/^(['"])(.*)\1$/s);
1604
+ if (!match) return;
1605
+ return match[2] ?? "";
1606
+ }
1607
+ function lowerAutoincrement(input) {
1608
+ const maybeNoArgs = expectNoArgs({
1609
+ call: input.call,
1610
+ context: input.context,
1611
+ usage: "`autoincrement()`"
1612
+ });
1613
+ if (maybeNoArgs) return maybeNoArgs;
1614
+ return {
1615
+ ok: true,
1616
+ value: {
1617
+ kind: "storage",
1618
+ defaultValue: {
1619
+ kind: "function",
1620
+ expression: "autoincrement()"
1621
+ }
1622
+ }
1623
+ };
1624
+ }
1625
+ function lowerNow(input) {
1626
+ const maybeNoArgs = expectNoArgs({
1627
+ call: input.call,
1628
+ context: input.context,
1629
+ usage: "`now()`"
1630
+ });
1631
+ if (maybeNoArgs) return maybeNoArgs;
1632
+ return {
1633
+ ok: true,
1634
+ value: {
1635
+ kind: "storage",
1636
+ defaultValue: {
1637
+ kind: "function",
1638
+ expression: "now()"
1639
+ }
1640
+ }
1641
+ };
1642
+ }
1643
+ function lowerUuid(input) {
1644
+ if (input.call.args.length === 0) return executionGenerator("uuidv4");
1645
+ if (input.call.args.length !== 1) return invalidArgumentDiagnostic({
1646
+ context: input.context,
1647
+ span: input.call.span,
1648
+ message: "Default function \"uuid\" accepts at most one version argument: `uuid()`, `uuid(4)`, or `uuid(7)`."
1649
+ });
1650
+ const version = parseIntegerArgument(input.call.args[0]?.raw ?? "");
1651
+ if (version === 4) return executionGenerator("uuidv4");
1652
+ if (version === 7) return executionGenerator("uuidv7");
1653
+ return invalidArgumentDiagnostic({
1654
+ context: input.context,
1655
+ span: input.call.args[0]?.span ?? input.call.span,
1656
+ message: "Default function \"uuid\" supports only `uuid()`, `uuid(4)`, or `uuid(7)` in SQL PSL provider v1."
1657
+ });
1658
+ }
1659
+ function lowerCuid(input) {
1660
+ if (input.call.args.length === 0) return {
1661
+ ok: false,
1662
+ diagnostic: {
1663
+ code: "PSL_UNKNOWN_DEFAULT_FUNCTION",
1664
+ message: "Default function \"cuid()\" is not supported in SQL PSL provider v1. Use `cuid(2)` instead.",
1665
+ sourceId: input.context.sourceId,
1666
+ span: input.call.span
1667
+ }
1668
+ };
1669
+ if (input.call.args.length !== 1) return invalidArgumentDiagnostic({
1670
+ context: input.context,
1671
+ span: input.call.span,
1672
+ message: "Default function \"cuid\" accepts exactly one version argument: `cuid(2)`."
1673
+ });
1674
+ if (parseIntegerArgument(input.call.args[0]?.raw ?? "") === 2) return executionGenerator("cuid2");
1675
+ return invalidArgumentDiagnostic({
1676
+ context: input.context,
1677
+ span: input.call.args[0]?.span ?? input.call.span,
1678
+ message: "Default function \"cuid\" supports only `cuid(2)` in SQL PSL provider v1."
1679
+ });
1680
+ }
1681
+ function lowerUlid(input) {
1682
+ const maybeNoArgs = expectNoArgs({
1683
+ call: input.call,
1684
+ context: input.context,
1685
+ usage: "`ulid()`"
1686
+ });
1687
+ if (maybeNoArgs) return maybeNoArgs;
1688
+ return executionGenerator("ulid");
1689
+ }
1690
+ function lowerNanoid(input) {
1691
+ if (input.call.args.length === 0) return executionGenerator("nanoid");
1692
+ if (input.call.args.length !== 1) return invalidArgumentDiagnostic({
1693
+ context: input.context,
1694
+ span: input.call.span,
1695
+ message: "Default function \"nanoid\" accepts at most one size argument: `nanoid()` or `nanoid(<2-255>)`."
1696
+ });
1697
+ const size = parseIntegerArgument(input.call.args[0]?.raw ?? "");
1698
+ if (size !== void 0 && size >= 2 && size <= 255) return executionGenerator("nanoid", { size });
1699
+ return invalidArgumentDiagnostic({
1700
+ context: input.context,
1701
+ span: input.call.args[0]?.span ?? input.call.span,
1702
+ message: "Default function \"nanoid\" size argument must be an integer between 2 and 255."
1703
+ });
1704
+ }
1705
+ function lowerDbgenerated(input) {
1706
+ if (input.call.args.length !== 1) return invalidArgumentDiagnostic({
1707
+ context: input.context,
1708
+ span: input.call.span,
1709
+ message: "Default function \"dbgenerated\" requires exactly one string argument: `dbgenerated(\"...\")`."
1710
+ });
1711
+ const rawExpression = parseStringLiteral(input.call.args[0]?.raw ?? "");
1712
+ if (rawExpression === void 0) return invalidArgumentDiagnostic({
1713
+ context: input.context,
1714
+ span: input.call.args[0]?.span ?? input.call.span,
1715
+ message: "Default function \"dbgenerated\" argument must be a string literal."
1716
+ });
1717
+ if (rawExpression.trim().length === 0) return invalidArgumentDiagnostic({
1718
+ context: input.context,
1719
+ span: input.call.args[0]?.span ?? input.call.span,
1720
+ message: "Default function \"dbgenerated\" argument cannot be empty."
1721
+ });
1722
+ return {
1723
+ ok: true,
1724
+ value: {
1725
+ kind: "storage",
1726
+ defaultValue: {
1727
+ kind: "function",
1728
+ expression: rawExpression
1729
+ }
1730
+ }
1731
+ };
1732
+ }
1733
+ const postgresDefaultFunctionRegistryEntries = [
1734
+ ["autoincrement", {
1735
+ lower: lowerAutoincrement,
1736
+ usageSignatures: ["autoincrement()"]
1737
+ }],
1738
+ ["now", {
1739
+ lower: lowerNow,
1740
+ usageSignatures: ["now()"]
1741
+ }],
1742
+ ["uuid", {
1743
+ lower: lowerUuid,
1744
+ usageSignatures: [
1745
+ "uuid()",
1746
+ "uuid(4)",
1747
+ "uuid(7)"
1748
+ ]
1749
+ }],
1750
+ ["cuid", {
1751
+ lower: lowerCuid,
1752
+ usageSignatures: ["cuid(2)"]
1753
+ }],
1754
+ ["ulid", {
1755
+ lower: lowerUlid,
1756
+ usageSignatures: ["ulid()"]
1757
+ }],
1758
+ ["nanoid", {
1759
+ lower: lowerNanoid,
1760
+ usageSignatures: ["nanoid()", "nanoid(<2-255>)"]
1761
+ }],
1762
+ ["dbgenerated", {
1763
+ lower: lowerDbgenerated,
1764
+ usageSignatures: ["dbgenerated(\"...\")"]
1765
+ }]
1766
+ ];
1767
+ const postgresPslScalarTypeDescriptors = new Map([
1768
+ ["String", {
1769
+ codecId: "pg/text@1",
1770
+ nativeType: "text"
1771
+ }],
1772
+ ["Boolean", {
1773
+ codecId: "pg/bool@1",
1774
+ nativeType: "bool"
1775
+ }],
1776
+ ["Int", {
1777
+ codecId: "pg/int4@1",
1778
+ nativeType: "int4"
1779
+ }],
1780
+ ["BigInt", {
1781
+ codecId: "pg/int8@1",
1782
+ nativeType: "int8"
1783
+ }],
1784
+ ["Float", {
1785
+ codecId: "pg/float8@1",
1786
+ nativeType: "float8"
1787
+ }],
1788
+ ["Decimal", {
1789
+ codecId: "pg/numeric@1",
1790
+ nativeType: "numeric"
1791
+ }],
1792
+ ["DateTime", {
1793
+ codecId: "pg/timestamptz@1",
1794
+ nativeType: "timestamptz"
1795
+ }],
1796
+ ["Json", {
1797
+ codecId: "pg/jsonb@1",
1798
+ nativeType: "jsonb"
1799
+ }],
1800
+ ["Bytes", {
1801
+ codecId: "pg/bytea@1",
1802
+ nativeType: "bytea"
1803
+ }]
1804
+ ]);
1805
+ function createPostgresDefaultFunctionRegistry() {
1806
+ return new Map(postgresDefaultFunctionRegistryEntries);
1807
+ }
1808
+ function createPostgresMutationDefaultGeneratorDescriptors() {
1809
+ return builtinGeneratorRegistryMetadata.map(({ id, applicableCodecIds }) => ({
1810
+ id,
1811
+ applicableCodecIds,
1812
+ resolveGeneratedColumnDescriptor: ({ generated }) => {
1813
+ if (generated.kind !== "generator" || generated.id !== id) return;
1814
+ const descriptor = resolveBuiltinGeneratedColumnDescriptor({
1815
+ id,
1816
+ ...generated.params ? { params: generated.params } : {}
1817
+ });
1818
+ return {
1819
+ codecId: descriptor.type.codecId,
1820
+ nativeType: descriptor.type.nativeType,
1821
+ ...descriptor.type.typeRef ? { typeRef: descriptor.type.typeRef } : {},
1822
+ ...descriptor.typeParams ? { typeParams: descriptor.typeParams } : {}
1823
+ };
1824
+ }
1825
+ }));
1826
+ }
1827
+ function createPostgresPslScalarTypeDescriptors() {
1828
+ return new Map(postgresPslScalarTypeDescriptors);
1829
+ }
1830
+ var control_default$1 = {
1831
+ ...postgresAdapterDescriptorMeta,
1832
+ operationSignatures: () => [],
1833
+ pslTypeDescriptors: () => ({ scalarTypeDescriptors: createPostgresPslScalarTypeDescriptors() }),
1834
+ controlMutationDefaults: () => ({
1835
+ defaultFunctionRegistry: createPostgresDefaultFunctionRegistry(),
1836
+ generatorDescriptors: createPostgresMutationDefaultGeneratorDescriptors()
1837
+ }),
1838
+ create() {
1839
+ return new PostgresControlAdapter();
1840
+ }
1841
+ };
1842
+
1843
+ //#endregion
1844
+ //#region src/core/migrations/planner-identity-values.ts
1845
+ /**
1846
+ * Resolves the identity value (monoid neutral element) as a SQL literal for a column's type.
1847
+ * Checks codec hooks first (extensions can provide type-specific identity values),
1848
+ * then falls back to the built-in map.
1849
+ */
1850
+ function resolveIdentityValue(column, codecHooks) {
1851
+ if (column.codecId) {
1852
+ const hookDefault = codecHooks.get(column.codecId)?.resolveIdentityValue?.({
1853
+ nativeType: column.nativeType,
1854
+ codecId: column.codecId,
1855
+ ...ifDefined("typeParams", column.typeParams)
1856
+ });
1857
+ if (hookDefault !== void 0) return hookDefault;
1858
+ }
1859
+ return buildBuiltinIdentityValue(column.nativeType, column.typeParams);
1860
+ }
1861
+ /**
1862
+ * Returns the built-in identity value (monoid neutral element) as a SQL literal for the given
1863
+ * PostgreSQL native type — e.g. 0 for integers, '' for text, false for booleans.
1864
+ *
1865
+ * This is the planner's fallback when no codec hook provides a type-specific identity value.
1866
+ *
1867
+ * Returns null for unrecognized types (for example enums and extension-owned types without a
1868
+ * hook), which causes the planner to fall back to the empty-table precheck.
1869
+ *
1870
+ * @internal Exported for testing only.
1871
+ */
1872
+ function buildBuiltinIdentityValue(nativeType, typeParams) {
1873
+ const normalizedNativeType = normalizeIdentityValueNativeType(nativeType);
1874
+ if (normalizedNativeType.endsWith("[]")) return "'{}'";
1875
+ switch (normalizedNativeType) {
1876
+ case "text":
1877
+ case "character":
1878
+ case "bpchar":
1879
+ case "character varying":
1880
+ case "varchar": return "''";
1881
+ case "int2":
1882
+ case "int4":
1883
+ case "int8":
1884
+ case "integer":
1885
+ case "bigint":
1886
+ case "smallint":
1887
+ case "float4":
1888
+ case "float8":
1889
+ case "real":
1890
+ case "double precision":
1891
+ case "numeric":
1892
+ case "decimal": return "0";
1893
+ case "bool":
1894
+ case "boolean": return "false";
1895
+ case "uuid": return "'00000000-0000-0000-0000-000000000000'";
1896
+ case "json": return "'{}'::json";
1897
+ case "jsonb": return "'{}'::jsonb";
1898
+ case "date":
1899
+ case "timestamp":
1900
+ case "timestamptz":
1901
+ case "timestamp with time zone":
1902
+ case "timestamp without time zone": return "'epoch'";
1903
+ case "time":
1904
+ case "time without time zone": return "'00:00:00'";
1905
+ case "timetz":
1906
+ case "time with time zone": return "'00:00:00+00'";
1907
+ case "interval": return "'0'";
1908
+ case "bytea": return "''::bytea";
1909
+ case "tsvector": return "''::tsvector";
1910
+ case "bit": return buildBitIdentityValue(typeParams);
1911
+ case "bit varying":
1912
+ case "varbit": return "B''";
1913
+ default: return null;
1914
+ }
1915
+ }
1916
+ function normalizeIdentityValueNativeType(nativeType) {
1917
+ return nativeType.trim().toLowerCase().replace(/\s+/g, " ");
1918
+ }
1919
+ function buildBitIdentityValue(typeParams) {
1920
+ const length = typeParams?.["length"];
1921
+ if (length === void 0) return "B'0'";
1922
+ if (typeof length !== "number" || !Number.isInteger(length) || length <= 0) return null;
1923
+ return `B'${"0".repeat(length)}'`;
1924
+ }
1925
+
1926
+ //#endregion
1927
+ //#region src/core/migrations/planner-sql.ts
1928
+ function buildCreateTableSql(qualifiedTableName, table, codecHooks) {
1929
+ const columnDefinitions = Object.entries(table.columns).map(([columnName, column]) => {
1930
+ return [
1931
+ quoteIdentifier(columnName),
1932
+ buildColumnTypeSql(column, codecHooks),
1933
+ buildColumnDefaultSql(column.default, column),
1934
+ column.nullable ? "" : "NOT NULL"
1935
+ ].filter(Boolean).join(" ");
1936
+ });
1937
+ const constraintDefinitions = [];
1938
+ if (table.primaryKey) constraintDefinitions.push(`PRIMARY KEY (${table.primaryKey.columns.map(quoteIdentifier).join(", ")})`);
1939
+ return `CREATE TABLE ${qualifiedTableName} (\n ${[...columnDefinitions, ...constraintDefinitions].join(",\n ")}\n)`;
1940
+ }
1941
+ /**
1942
+ * Pattern for safe PostgreSQL type names.
1943
+ * Allows letters, digits, underscores, spaces (for "double precision", "character varying"),
1944
+ * and trailing [] for array types.
1945
+ */
1946
+ const SAFE_NATIVE_TYPE_PATTERN = /^[a-zA-Z][a-zA-Z0-9_ ]*(\[\])?$/;
1947
+ function assertSafeNativeType(nativeType) {
1948
+ if (!SAFE_NATIVE_TYPE_PATTERN.test(nativeType)) throw new Error(`Unsafe native type name in contract: "${nativeType}". Native type names must match /^[a-zA-Z][a-zA-Z0-9_ ]*(\\[\\])?\$/`);
1949
+ }
1950
+ /**
1951
+ * Sanity check against accidental SQL injection from malformed contract files.
1952
+ * Rejects semicolons, SQL comment tokens, and dollar-quoting.
1953
+ * Not a comprehensive security boundary — the contract is developer-authored.
1954
+ */
1955
+ function assertSafeDefaultExpression(expression) {
1956
+ if (expression.includes(";") || /--|\/\*|\$\$|\bSELECT\b/i.test(expression)) throw new Error(`Unsafe default expression in contract: "${expression}". Default expressions must not contain semicolons, SQL comment tokens, dollar-quoting, or subqueries.`);
1957
+ }
1958
+ function buildColumnTypeSql(column, codecHooks) {
1959
+ const columnDefault = column.default;
1960
+ if (columnDefault?.kind === "function" && columnDefault.expression === "autoincrement()") {
1961
+ if (column.nativeType === "int4" || column.nativeType === "integer") return "SERIAL";
1962
+ if (column.nativeType === "int8" || column.nativeType === "bigint") return "BIGSERIAL";
1963
+ if (column.nativeType === "int2" || column.nativeType === "smallint") return "SMALLSERIAL";
1964
+ }
1965
+ if (column.typeRef) return quoteIdentifier(column.nativeType);
1966
+ assertSafeNativeType(column.nativeType);
1967
+ return renderParameterizedTypeSql(column, codecHooks) ?? column.nativeType;
1968
+ }
1969
+ function renderParameterizedTypeSql(column, codecHooks) {
1970
+ if (!column.typeParams) return null;
1971
+ if (!column.codecId) throw new Error(`Column declares typeParams for nativeType "${column.nativeType}" but has no codecId. Ensure the column is associated with a codec.`);
1972
+ const hooks = codecHooks.get(column.codecId);
1973
+ if (!hooks?.expandNativeType) throw new Error(`Column declares typeParams for nativeType "${column.nativeType}" but no expandNativeType hook is registered for codecId "${column.codecId}". Ensure the extension providing this codec is included in extensionPacks.`);
1974
+ const expanded = hooks.expandNativeType({
1975
+ nativeType: column.nativeType,
1976
+ codecId: column.codecId,
1977
+ typeParams: column.typeParams
1978
+ });
1979
+ return expanded !== column.nativeType ? expanded : null;
1980
+ }
1981
+ function buildColumnDefaultSql(columnDefault, column) {
1982
+ if (!columnDefault) return "";
1983
+ switch (columnDefault.kind) {
1984
+ case "literal": return `DEFAULT ${renderDefaultLiteral(columnDefault.value, column)}`;
1985
+ case "function":
1986
+ if (columnDefault.expression === "autoincrement()") return "";
1987
+ assertSafeDefaultExpression(columnDefault.expression);
1988
+ return `DEFAULT (${columnDefault.expression})`;
1989
+ case "sequence": return `DEFAULT nextval(${quoteIdentifier(columnDefault.name)}::regclass)`;
1990
+ }
1991
+ }
1992
+ function renderDefaultLiteral(value, column) {
1993
+ const isJsonColumn = column?.nativeType === "json" || column?.nativeType === "jsonb";
1994
+ if (value instanceof Date) return `'${escapeLiteral(value.toISOString())}'`;
1995
+ if (!isJsonColumn && isTaggedBigInt(value)) {
1996
+ if (!/^-?\d+$/.test(value.value)) throw new Error(`Invalid tagged bigint value: "${value.value}" is not a valid integer`);
1997
+ return value.value;
1998
+ }
1999
+ if (typeof value === "bigint") return value.toString();
2000
+ if (typeof value === "string") return `'${escapeLiteral(value)}'`;
2001
+ if (typeof value === "number" || typeof value === "boolean") return String(value);
2002
+ if (value === null) return "NULL";
2003
+ const json = JSON.stringify(value);
2004
+ if (isJsonColumn) return `'${escapeLiteral(json)}'::${column.nativeType}`;
2005
+ return `'${escapeLiteral(json)}'`;
2006
+ }
2007
+ function qualifyTableName(schema, table) {
2008
+ return `${quoteIdentifier(schema)}.${quoteIdentifier(table)}`;
2009
+ }
2010
+ function toRegclassLiteral(schema, name) {
2011
+ return `'${escapeLiteral(`${quoteIdentifier(schema)}.${quoteIdentifier(name)}`)}'`;
2012
+ }
2013
+ function constraintExistsCheck({ constraintName, schema, table, exists = true }) {
2014
+ return `SELECT ${exists ? "EXISTS" : "NOT EXISTS"} (
2015
+ SELECT 1 FROM pg_constraint c
2016
+ JOIN pg_namespace n ON c.connamespace = n.oid
2017
+ WHERE c.conname = '${escapeLiteral(constraintName)}'
2018
+ AND n.nspname = '${escapeLiteral(schema)}'
2019
+ AND c.conrelid = to_regclass(${toRegclassLiteral(schema, table)})
2020
+ )`;
2021
+ }
2022
+ function columnExistsCheck({ schema, table, column, exists = true }) {
2023
+ return `SELECT ${exists ? "" : "NOT "}EXISTS (
2024
+ SELECT 1
2025
+ FROM information_schema.columns
2026
+ WHERE table_schema = '${escapeLiteral(schema)}'
2027
+ AND table_name = '${escapeLiteral(table)}'
2028
+ AND column_name = '${escapeLiteral(column)}'
2029
+ )`;
2030
+ }
2031
+ function columnNullabilityCheck({ schema, table, column, nullable }) {
2032
+ const expected = nullable ? "YES" : "NO";
2033
+ return `SELECT EXISTS (
2034
+ SELECT 1
2035
+ FROM information_schema.columns
2036
+ WHERE table_schema = '${escapeLiteral(schema)}'
2037
+ AND table_name = '${escapeLiteral(table)}'
2038
+ AND column_name = '${escapeLiteral(column)}'
2039
+ AND is_nullable = '${expected}'
2040
+ )`;
2041
+ }
2042
+ /**
2043
+ * Maps contract native type names to the display form returned by PostgreSQL's
2044
+ * `format_type()`. Base types use short names in the contract (e.g., `int4`)
2045
+ * but `format_type()` returns SQL-standard names (e.g., `integer`).
2046
+ *
2047
+ * NOTE: The inverse mapping lives in `normalizeFormattedType` in control-adapter.ts.
2048
+ * These two maps must stay in sync. A shared bidirectional map in
2049
+ * @prisma-next/adapter-postgres would eliminate the drift risk.
2050
+ */
2051
+ const FORMAT_TYPE_DISPLAY = new Map([
2052
+ ["int2", "smallint"],
2053
+ ["int4", "integer"],
2054
+ ["int8", "bigint"],
2055
+ ["float4", "real"],
2056
+ ["float8", "double precision"],
2057
+ ["bool", "boolean"],
2058
+ ["timestamp", "timestamp without time zone"],
2059
+ ["timestamptz", "timestamp with time zone"],
2060
+ ["time", "time without time zone"],
2061
+ ["timetz", "time with time zone"]
2062
+ ]);
2063
+ /**
2064
+ * Builds the string that `format_type(atttypid, atttypmod)` would return for a
2065
+ * contract column. Used for postchecks — separate from `buildColumnTypeSql` which
2066
+ * produces DDL-safe strings (e.g., quoted identifiers, SERIAL).
2067
+ */
2068
+ function buildExpectedFormatType(column, codecHooks) {
2069
+ if (column.typeParams && column.codecId) {
2070
+ const hooks = codecHooks.get(column.codecId);
2071
+ if (hooks?.expandNativeType) return hooks.expandNativeType({
2072
+ nativeType: column.nativeType,
2073
+ codecId: column.codecId,
2074
+ typeParams: column.typeParams
2075
+ });
2076
+ }
2077
+ if (column.typeRef) return column.nativeType !== column.nativeType.toLowerCase() ? `"${column.nativeType}"` : column.nativeType;
2078
+ return FORMAT_TYPE_DISPLAY.get(column.nativeType) ?? column.nativeType;
2079
+ }
2080
+ /** Checks that the column's full type (including typmods) matches the expected type via `format_type()`. */
2081
+ function columnTypeCheck({ schema, table, column, expectedType }) {
2082
+ return `SELECT EXISTS (
2083
+ SELECT 1
2084
+ FROM pg_attribute a
2085
+ JOIN pg_class c ON c.oid = a.attrelid
2086
+ JOIN pg_namespace n ON n.oid = c.relnamespace
2087
+ WHERE n.nspname = '${escapeLiteral(schema)}'
2088
+ AND c.relname = '${escapeLiteral(table)}'
2089
+ AND a.attname = '${escapeLiteral(column)}'
2090
+ AND format_type(a.atttypid, a.atttypmod) = '${escapeLiteral(expectedType)}'
2091
+ AND NOT a.attisdropped
2092
+ )`;
2093
+ }
2094
+ /** Checks that a column default exists (or does not exist) via `information_schema.columns.column_default`. */
2095
+ function columnDefaultExistsCheck({ schema, table, column, exists = true }) {
2096
+ const nullCheck = exists ? "IS NOT NULL" : "IS NULL";
2097
+ return `SELECT EXISTS (
2098
+ SELECT 1
2099
+ FROM information_schema.columns
2100
+ WHERE table_schema = '${escapeLiteral(schema)}'
2101
+ AND table_name = '${escapeLiteral(table)}'
2102
+ AND column_name = '${escapeLiteral(column)}'
2103
+ AND column_default ${nullCheck}
2104
+ )`;
2105
+ }
2106
+ function tableIsEmptyCheck(qualifiedTableName) {
2107
+ return `SELECT NOT EXISTS (SELECT 1 FROM ${qualifiedTableName} LIMIT 1)`;
2108
+ }
2109
+ function columnHasNoDefaultCheck(opts) {
2110
+ return `SELECT NOT EXISTS (
2111
+ SELECT 1
2112
+ FROM information_schema.columns
2113
+ WHERE table_schema = '${escapeLiteral(opts.schema)}'
2114
+ AND table_name = '${escapeLiteral(opts.table)}'
2115
+ AND column_name = '${escapeLiteral(opts.column)}'
2116
+ AND column_default IS NOT NULL
2117
+ )`;
2118
+ }
2119
+ function buildAddColumnSql(qualifiedTableName, columnName, column, codecHooks, defaultLiteral) {
2120
+ const typeSql = buildColumnTypeSql(column, codecHooks);
2121
+ const defaultSql = buildColumnDefaultSql(column.default, column) || (defaultLiteral != null ? `DEFAULT ${defaultLiteral}` : "");
2122
+ return [
2123
+ `ALTER TABLE ${qualifiedTableName}`,
2124
+ `ADD COLUMN ${quoteIdentifier(columnName)} ${typeSql}`,
2125
+ defaultSql,
2126
+ column.nullable ? "" : "NOT NULL"
2127
+ ].filter(Boolean).join(" ");
2128
+ }
2129
+ const REFERENTIAL_ACTION_SQL = {
2130
+ noAction: "NO ACTION",
2131
+ restrict: "RESTRICT",
2132
+ cascade: "CASCADE",
2133
+ setNull: "SET NULL",
2134
+ setDefault: "SET DEFAULT"
2135
+ };
2136
+ function buildForeignKeySql(schemaName, tableName, fkName, foreignKey) {
2137
+ let sql = `ALTER TABLE ${qualifyTableName(schemaName, tableName)}
2138
+ ADD CONSTRAINT ${quoteIdentifier(fkName)}
2139
+ FOREIGN KEY (${foreignKey.columns.map(quoteIdentifier).join(", ")})
2140
+ REFERENCES ${qualifyTableName(schemaName, foreignKey.references.table)} (${foreignKey.references.columns.map(quoteIdentifier).join(", ")})`;
2141
+ if (foreignKey.onDelete !== void 0) {
2142
+ const action = REFERENTIAL_ACTION_SQL[foreignKey.onDelete];
2143
+ if (!action) throw new Error(`Unknown referential action for onDelete: ${String(foreignKey.onDelete)}`);
2144
+ sql += `\nON DELETE ${action}`;
2145
+ }
2146
+ if (foreignKey.onUpdate !== void 0) {
2147
+ const action = REFERENTIAL_ACTION_SQL[foreignKey.onUpdate];
2148
+ if (!action) throw new Error(`Unknown referential action for onUpdate: ${String(foreignKey.onUpdate)}`);
2149
+ sql += `\nON UPDATE ${action}`;
2150
+ }
2151
+ return sql;
2152
+ }
2153
+
2154
+ //#endregion
2155
+ //#region src/core/migrations/planner-target-details.ts
2156
+ function buildTargetDetails(objectType, name, schema, table) {
2157
+ return {
2158
+ schema,
2159
+ objectType,
2160
+ name,
2161
+ ...ifDefined("table", table)
2162
+ };
2163
+ }
2164
+
2165
+ //#endregion
2166
+ //#region src/core/migrations/planner-recipes.ts
2167
+ function buildAddColumnOperationIdentity(schema, tableName, columnName) {
2168
+ return {
2169
+ id: `column.${tableName}.${columnName}`,
2170
+ label: `Add column ${columnName} to ${tableName}`,
2171
+ summary: `Adds column ${columnName} to table ${tableName}`,
2172
+ target: {
2173
+ id: "postgres",
2174
+ details: buildTargetDetails("table", tableName, schema)
2175
+ }
2176
+ };
2177
+ }
2178
+ function buildAddNotNullColumnWithTemporaryDefaultOperation(options) {
2179
+ const { schema, tableName, columnName, column, codecHooks, temporaryDefault } = options;
2180
+ const qualified = qualifyTableName(schema, tableName);
2181
+ return {
2182
+ ...buildAddColumnOperationIdentity(schema, tableName, columnName),
2183
+ operationClass: "additive",
2184
+ precheck: [{
2185
+ description: `ensure column "${columnName}" is missing`,
2186
+ sql: columnExistsCheck({
2187
+ schema,
2188
+ table: tableName,
2189
+ column: columnName,
2190
+ exists: false
2191
+ })
2192
+ }],
2193
+ execute: [{
2194
+ description: `add column "${columnName}"`,
2195
+ sql: buildAddColumnSql(qualified, columnName, column, codecHooks, temporaryDefault)
2196
+ }, {
2197
+ description: `drop temporary default from column "${columnName}"`,
2198
+ sql: `ALTER TABLE ${qualified} ALTER COLUMN ${quoteIdentifier(columnName)} DROP DEFAULT`
2199
+ }],
2200
+ postcheck: [
2201
+ {
2202
+ description: `verify column "${columnName}" exists`,
2203
+ sql: columnExistsCheck({
2204
+ schema,
2205
+ table: tableName,
2206
+ column: columnName
2207
+ })
2208
+ },
2209
+ {
2210
+ description: `verify column "${columnName}" is NOT NULL`,
2211
+ sql: columnNullabilityCheck({
2212
+ schema,
2213
+ table: tableName,
2214
+ column: columnName,
2215
+ nullable: false
2216
+ })
2217
+ },
2218
+ {
2219
+ description: `verify column "${columnName}" has no default after temporary default removal`,
2220
+ sql: columnHasNoDefaultCheck({
2221
+ schema,
2222
+ table: tableName,
2223
+ column: columnName
2224
+ })
2225
+ }
2226
+ ]
2227
+ };
2228
+ }
2229
+
2230
+ //#endregion
2231
+ //#region src/core/migrations/planner-reconciliation.ts
2232
+ function buildReconciliationPlan(options) {
2233
+ const operations = [];
2234
+ const conflicts = [];
2235
+ const { mode } = options;
2236
+ const seenOperationIds = /* @__PURE__ */ new Set();
2237
+ for (const issue of sortSchemaIssues(options.issues)) {
2238
+ if (isAdditiveIssue(issue)) continue;
2239
+ const operation = buildReconciliationOperationFromIssue({
2240
+ issue,
2241
+ contract: options.contract,
2242
+ schemaName: options.schemaName,
2243
+ mode,
2244
+ codecHooks: options.codecHooks
2245
+ });
2246
+ if (operation) {
2247
+ if (!seenOperationIds.has(operation.id)) {
2248
+ seenOperationIds.add(operation.id);
2249
+ if (options.policy.allowedOperationClasses.includes(operation.operationClass)) operations.push(operation);
2250
+ else {
2251
+ const conflict = convertIssueToConflict(issue);
2252
+ if (conflict) conflicts.push(conflict);
2253
+ }
2254
+ }
2255
+ } else {
2256
+ const conflict = convertIssueToConflict(issue);
2257
+ if (conflict) conflicts.push(conflict);
2258
+ }
2259
+ }
2260
+ return {
2261
+ operations,
2262
+ conflicts: conflicts.sort(conflictComparator)
2263
+ };
2264
+ }
2265
+ function isAdditiveIssue(issue) {
2266
+ switch (issue.kind) {
2267
+ case "type_missing":
2268
+ case "type_values_mismatch":
2269
+ case "missing_table":
2270
+ case "missing_column":
2271
+ case "dependency_missing": return true;
2272
+ case "primary_key_mismatch": return issue.actual === void 0;
2273
+ case "unique_constraint_mismatch":
2274
+ case "index_mismatch":
2275
+ case "foreign_key_mismatch": return issue.indexOrConstraint === void 0;
2276
+ default: return false;
2277
+ }
2278
+ }
2279
+ function buildReconciliationOperationFromIssue(options) {
2280
+ const { issue, contract, schemaName, mode, codecHooks } = options;
2281
+ switch (issue.kind) {
2282
+ case "extra_table":
2283
+ if (!mode.allowDestructive || !issue.table) return null;
2284
+ return buildDropTableOperation(schemaName, issue.table);
2285
+ case "extra_column":
2286
+ if (!mode.allowDestructive || !issue.table || !issue.column) return null;
2287
+ return buildDropColumnOperation(schemaName, issue.table, issue.column);
2288
+ case "extra_index":
2289
+ if (!mode.allowDestructive || !issue.table || !issue.indexOrConstraint) return null;
2290
+ return buildDropIndexOperation(schemaName, issue.table, issue.indexOrConstraint);
2291
+ case "extra_foreign_key":
2292
+ case "extra_unique_constraint": {
2293
+ if (!mode.allowDestructive || !issue.table || !issue.indexOrConstraint) return null;
2294
+ const constraintKind = issue.kind === "extra_foreign_key" ? "foreignKey" : "unique";
2295
+ return buildDropConstraintOperation(schemaName, issue.table, issue.indexOrConstraint, constraintKind);
2296
+ }
2297
+ case "extra_primary_key": {
2298
+ if (!mode.allowDestructive || !issue.table) return null;
2299
+ const constraintName = issue.indexOrConstraint ?? `${issue.table}_pkey`;
2300
+ return buildDropConstraintOperation(schemaName, issue.table, constraintName, "primaryKey");
2301
+ }
2302
+ case "nullability_mismatch":
2303
+ if (!issue.table || !issue.column) return null;
2304
+ if (issue.expected === "true") return mode.allowWidening ? buildDropNotNullOperation(schemaName, issue.table, issue.column) : null;
2305
+ return mode.allowDestructive ? buildSetNotNullOperation(schemaName, issue.table, issue.column) : null;
2306
+ case "type_mismatch": {
2307
+ if (!mode.allowDestructive || !issue.table || !issue.column) return null;
2308
+ const contractColumn = getContractColumn(contract, issue.table, issue.column);
2309
+ if (!contractColumn) return null;
2310
+ return buildAlterColumnTypeOperation(schemaName, issue.table, issue.column, contractColumn, codecHooks);
2311
+ }
2312
+ case "default_missing": {
2313
+ if (!issue.table || !issue.column) return null;
2314
+ const contractColMissing = getContractColumn(contract, issue.table, issue.column);
2315
+ if (!contractColMissing) return null;
2316
+ invariant(contractColMissing.default !== void 0, `default_missing issue for "${issue.table}"."${issue.column}" but contract column has no default`);
2317
+ return buildDefaultOperation(schemaName, issue.table, issue.column, contractColMissing, contractColMissing.default, "additive", "Set");
2318
+ }
2319
+ case "default_mismatch": {
2320
+ if (!issue.table || !issue.column) return null;
2321
+ if (!mode.allowWidening) return null;
2322
+ const contractColMismatch = getContractColumn(contract, issue.table, issue.column);
2323
+ if (!contractColMismatch) return null;
2324
+ invariant(contractColMismatch.default !== void 0, `default_mismatch issue for "${issue.table}"."${issue.column}" but contract column has no default`);
2325
+ return buildDefaultOperation(schemaName, issue.table, issue.column, contractColMismatch, contractColMismatch.default, "widening", "Change");
2326
+ }
2327
+ case "extra_default":
2328
+ if (!issue.table || !issue.column) return null;
2329
+ if (!mode.allowDestructive) return null;
2330
+ return buildDropDefaultOperation(schemaName, issue.table, issue.column);
2331
+ default: return null;
2332
+ }
2333
+ }
2334
+ function getContractColumn(contract, tableName, columnName) {
2335
+ const table = contract.storage.tables[tableName];
2336
+ if (!table) return null;
2337
+ return table.columns[columnName] ?? null;
2338
+ }
2339
+ function buildDropTableOperation(schemaName, tableName) {
2340
+ return {
2341
+ id: `dropTable.${tableName}`,
2342
+ label: `Drop table ${tableName}`,
2343
+ summary: `Drops extra table ${tableName}`,
2344
+ operationClass: "destructive",
2345
+ target: {
2346
+ id: "postgres",
2347
+ details: buildTargetDetails("table", tableName, schemaName)
2348
+ },
2349
+ precheck: [{
2350
+ description: `ensure table "${tableName}" exists`,
2351
+ sql: `SELECT to_regclass(${toRegclassLiteral(schemaName, tableName)}) IS NOT NULL`
2352
+ }],
2353
+ execute: [{
2354
+ description: `drop table "${tableName}"`,
2355
+ sql: `DROP TABLE ${qualifyTableName(schemaName, tableName)}`
2356
+ }],
2357
+ postcheck: [{
2358
+ description: `verify table "${tableName}" is removed`,
2359
+ sql: `SELECT to_regclass(${toRegclassLiteral(schemaName, tableName)}) IS NULL`
2360
+ }]
2361
+ };
2362
+ }
2363
+ function buildDropColumnOperation(schemaName, tableName, columnName) {
2364
+ return {
2365
+ id: `dropColumn.${tableName}.${columnName}`,
2366
+ label: `Drop column ${columnName} from ${tableName}`,
2367
+ summary: `Drops extra column ${columnName} from table ${tableName}`,
2368
+ operationClass: "destructive",
2369
+ target: {
2370
+ id: "postgres",
2371
+ details: buildTargetDetails("column", columnName, schemaName, tableName)
2372
+ },
2373
+ precheck: [{
2374
+ description: `ensure column "${columnName}" exists`,
2375
+ sql: columnExistsCheck({
2376
+ schema: schemaName,
2377
+ table: tableName,
2378
+ column: columnName
2379
+ })
2380
+ }],
2381
+ execute: [{
2382
+ description: `drop column "${columnName}"`,
2383
+ sql: `ALTER TABLE ${qualifyTableName(schemaName, tableName)} DROP COLUMN ${quoteIdentifier(columnName)}`
2384
+ }],
2385
+ postcheck: [{
2386
+ description: `verify column "${columnName}" is removed`,
2387
+ sql: columnExistsCheck({
2388
+ schema: schemaName,
2389
+ table: tableName,
2390
+ column: columnName,
2391
+ exists: false
2392
+ })
2393
+ }]
2394
+ };
2395
+ }
2396
+ function buildDropIndexOperation(schemaName, tableName, indexName) {
2397
+ return {
2398
+ id: `dropIndex.${tableName}.${indexName}`,
2399
+ label: `Drop index ${indexName} on ${tableName}`,
2400
+ summary: `Drops extra index ${indexName} on table ${tableName}`,
2401
+ operationClass: "destructive",
2402
+ target: {
2403
+ id: "postgres",
2404
+ details: buildTargetDetails("index", indexName, schemaName, tableName)
2405
+ },
2406
+ precheck: [{
2407
+ description: `ensure index "${indexName}" exists`,
2408
+ sql: `SELECT to_regclass(${toRegclassLiteral(schemaName, indexName)}) IS NOT NULL`
2409
+ }],
2410
+ execute: [{
2411
+ description: `drop index "${indexName}"`,
2412
+ sql: `DROP INDEX ${qualifyTableName(schemaName, indexName)}`
2413
+ }],
2414
+ postcheck: [{
2415
+ description: `verify index "${indexName}" is removed`,
2416
+ sql: `SELECT to_regclass(${toRegclassLiteral(schemaName, indexName)}) IS NULL`
2417
+ }]
2418
+ };
2419
+ }
2420
+ function buildDropConstraintOperation(schemaName, tableName, constraintName, constraintKind) {
2421
+ return {
2422
+ id: `dropConstraint.${tableName}.${constraintName}`,
2423
+ label: `Drop constraint ${constraintName} on ${tableName}`,
2424
+ summary: `Drops extra constraint ${constraintName} on table ${tableName}`,
2425
+ operationClass: "destructive",
2426
+ target: {
2427
+ id: "postgres",
2428
+ details: buildTargetDetails(constraintKind, constraintName, schemaName, tableName)
2429
+ },
2430
+ precheck: [{
2431
+ description: `ensure constraint "${constraintName}" exists`,
2432
+ sql: constraintExistsCheck({
2433
+ constraintName,
2434
+ schema: schemaName,
2435
+ table: tableName
2436
+ })
2437
+ }],
2438
+ execute: [{
2439
+ description: `drop constraint "${constraintName}"`,
2440
+ sql: `ALTER TABLE ${qualifyTableName(schemaName, tableName)}
2441
+ DROP CONSTRAINT ${quoteIdentifier(constraintName)}`
2442
+ }],
2443
+ postcheck: [{
2444
+ description: `verify constraint "${constraintName}" is removed`,
2445
+ sql: constraintExistsCheck({
2446
+ constraintName,
2447
+ schema: schemaName,
2448
+ table: tableName,
2449
+ exists: false
2450
+ })
2451
+ }]
2452
+ };
2453
+ }
2454
+ function buildDropNotNullOperation(schemaName, tableName, columnName) {
2455
+ return {
2456
+ id: `alterNullability.${tableName}.${columnName}`,
2457
+ label: `Relax nullability for ${columnName} on ${tableName}`,
2458
+ summary: `Drops NOT NULL constraint for ${columnName} on table ${tableName}`,
2459
+ operationClass: "widening",
2460
+ target: {
2461
+ id: "postgres",
2462
+ details: buildTargetDetails("column", columnName, schemaName, tableName)
2463
+ },
2464
+ precheck: [{
2465
+ description: `ensure column "${columnName}" exists`,
2466
+ sql: columnExistsCheck({
2467
+ schema: schemaName,
2468
+ table: tableName,
2469
+ column: columnName
2470
+ })
2471
+ }],
2472
+ execute: [{
2473
+ description: `drop NOT NULL from "${columnName}"`,
2474
+ sql: `ALTER TABLE ${qualifyTableName(schemaName, tableName)}
2475
+ ALTER COLUMN ${quoteIdentifier(columnName)} DROP NOT NULL`
2476
+ }],
2477
+ postcheck: [{
2478
+ description: `verify "${columnName}" is nullable`,
2479
+ sql: columnNullabilityCheck({
2480
+ schema: schemaName,
2481
+ table: tableName,
2482
+ column: columnName,
2483
+ nullable: true
2484
+ })
2485
+ }]
2486
+ };
2487
+ }
2488
+ function buildSetNotNullOperation(schemaName, tableName, columnName) {
2489
+ const qualified = qualifyTableName(schemaName, tableName);
2490
+ return {
2491
+ id: `alterNullability.${tableName}.${columnName}`,
2492
+ label: `Enforce NOT NULL for ${columnName} on ${tableName}`,
2493
+ summary: `Sets NOT NULL on ${columnName} for table ${tableName}`,
2494
+ operationClass: "destructive",
2495
+ target: {
2496
+ id: "postgres",
2497
+ details: buildTargetDetails("column", columnName, schemaName, tableName)
2498
+ },
2499
+ precheck: [{
2500
+ description: `ensure column "${columnName}" exists`,
2501
+ sql: columnExistsCheck({
2502
+ schema: schemaName,
2503
+ table: tableName,
2504
+ column: columnName
2505
+ })
2506
+ }, {
2507
+ description: `ensure "${columnName}" has no NULL values`,
2508
+ sql: `SELECT NOT EXISTS (
2509
+ SELECT 1 FROM ${qualified}
2510
+ WHERE ${quoteIdentifier(columnName)} IS NULL
2511
+ LIMIT 1
2512
+ )`
2513
+ }],
2514
+ execute: [{
2515
+ description: `set NOT NULL on "${columnName}"`,
2516
+ sql: `ALTER TABLE ${qualified}
2517
+ ALTER COLUMN ${quoteIdentifier(columnName)} SET NOT NULL`
2518
+ }],
2519
+ postcheck: [{
2520
+ description: `verify "${columnName}" is NOT NULL`,
2521
+ sql: columnNullabilityCheck({
2522
+ schema: schemaName,
2523
+ table: tableName,
2524
+ column: columnName,
2525
+ nullable: false
2526
+ })
2527
+ }]
2528
+ };
2529
+ }
2530
+ function buildAlterColumnTypeOperation(schemaName, tableName, columnName, column, codecHooks) {
2531
+ const qualified = qualifyTableName(schemaName, tableName);
2532
+ const expectedType = buildColumnTypeSql(column, codecHooks);
2533
+ return {
2534
+ id: `alterType.${tableName}.${columnName}`,
2535
+ label: `Alter type for ${columnName} on ${tableName}`,
2536
+ summary: `Changes type of ${columnName} to ${expectedType}`,
2537
+ operationClass: "destructive",
2538
+ target: {
2539
+ id: "postgres",
2540
+ details: buildTargetDetails("column", columnName, schemaName, tableName)
2541
+ },
2542
+ meta: {
2543
+ warning: "TABLE_REWRITE",
2544
+ detail: "ALTER COLUMN TYPE requires a full table rewrite and acquires an ACCESS EXCLUSIVE lock. On large tables, this can cause significant downtime."
2545
+ },
2546
+ precheck: [{
2547
+ description: `ensure column "${columnName}" exists`,
2548
+ sql: columnExistsCheck({
2549
+ schema: schemaName,
2550
+ table: tableName,
2551
+ column: columnName
2552
+ })
2553
+ }],
2554
+ execute: [{
2555
+ description: `alter type of "${columnName}"`,
2556
+ sql: `ALTER TABLE ${qualified}
2557
+ ALTER COLUMN ${quoteIdentifier(columnName)}
2558
+ TYPE ${expectedType}
2559
+ USING ${quoteIdentifier(columnName)}::${expectedType}`
2560
+ }],
2561
+ postcheck: [{
2562
+ description: `verify column "${columnName}" has type ${expectedType}`,
2563
+ sql: columnTypeCheck({
2564
+ schema: schemaName,
2565
+ table: tableName,
2566
+ column: columnName,
2567
+ expectedType: buildExpectedFormatType(column, codecHooks)
2568
+ })
2569
+ }]
2570
+ };
2571
+ }
2572
+ function buildDefaultOperation(schemaName, tableName, columnName, column, columnDefault, operationClass, verb) {
2573
+ const qualified = qualifyTableName(schemaName, tableName);
2574
+ const defaultClause = buildColumnDefaultSql(columnDefault, column);
2575
+ if (!defaultClause) return null;
2576
+ const verbLower = verb.toLowerCase();
2577
+ return {
2578
+ id: `setDefault.${tableName}.${columnName}`,
2579
+ label: `${verb} default for ${columnName} on ${tableName}`,
2580
+ summary: `${verb}s default on column ${columnName} of table ${tableName}`,
2581
+ operationClass,
2582
+ target: {
2583
+ id: "postgres",
2584
+ details: buildTargetDetails("column", columnName, schemaName, tableName)
2585
+ },
2586
+ precheck: [{
2587
+ description: `ensure column "${columnName}" exists`,
2588
+ sql: columnExistsCheck({
2589
+ schema: schemaName,
2590
+ table: tableName,
2591
+ column: columnName
2592
+ })
2593
+ }],
2594
+ execute: [{
2595
+ description: `${verbLower} default on "${columnName}"`,
2596
+ sql: `ALTER TABLE ${qualified}\nALTER COLUMN ${quoteIdentifier(columnName)} SET ${defaultClause}`
2597
+ }],
2598
+ postcheck: [{
2599
+ description: `verify column "${columnName}" has a default`,
2600
+ sql: columnDefaultExistsCheck({
2601
+ schema: schemaName,
2602
+ table: tableName,
2603
+ column: columnName,
2604
+ exists: true
2605
+ })
2606
+ }]
2607
+ };
2608
+ }
2609
+ function buildDropDefaultOperation(schemaName, tableName, columnName) {
2610
+ const qualified = qualifyTableName(schemaName, tableName);
2611
+ return {
2612
+ id: `dropDefault.${tableName}.${columnName}`,
2613
+ label: `Drop default for ${columnName} on ${tableName}`,
2614
+ summary: `Drops default on column ${columnName} of table ${tableName}`,
2615
+ operationClass: "destructive",
2616
+ target: {
2617
+ id: "postgres",
2618
+ details: buildTargetDetails("column", columnName, schemaName, tableName)
2619
+ },
2620
+ precheck: [{
2621
+ description: `ensure column "${columnName}" exists`,
2622
+ sql: columnExistsCheck({
2623
+ schema: schemaName,
2624
+ table: tableName,
2625
+ column: columnName
2626
+ })
2627
+ }],
2628
+ execute: [{
2629
+ description: `drop default on "${columnName}"`,
2630
+ sql: `ALTER TABLE ${qualified}\nALTER COLUMN ${quoteIdentifier(columnName)} DROP DEFAULT`
2631
+ }],
2632
+ postcheck: [{
2633
+ description: `verify column "${columnName}" has no default`,
2634
+ sql: columnDefaultExistsCheck({
2635
+ schema: schemaName,
2636
+ table: tableName,
2637
+ column: columnName,
2638
+ exists: false
2639
+ })
2640
+ }]
2641
+ };
2642
+ }
2643
+ function convertIssueToConflict(issue) {
2644
+ switch (issue.kind) {
2645
+ case "type_mismatch": return buildConflict("typeMismatch", issue);
2646
+ case "nullability_mismatch": return buildConflict("nullabilityConflict", issue);
2647
+ case "default_missing":
2648
+ case "default_mismatch":
2649
+ case "extra_default":
2650
+ case "extra_table":
2651
+ case "extra_column":
2652
+ case "extra_primary_key":
2653
+ case "extra_foreign_key":
2654
+ case "extra_unique_constraint":
2655
+ case "extra_index": return buildConflict("missingButNonAdditive", issue);
2656
+ case "primary_key_mismatch":
2657
+ case "unique_constraint_mismatch":
2658
+ case "index_mismatch": return buildConflict("indexIncompatible", issue);
2659
+ case "foreign_key_mismatch": return buildConflict("foreignKeyConflict", issue);
2660
+ default: return null;
2661
+ }
2662
+ }
2663
+ function buildConflict(kind, issue) {
2664
+ const location = buildConflictLocation(issue);
2665
+ const meta = issue.expected || issue.actual ? Object.freeze({
2666
+ ...ifDefined("expected", issue.expected),
2667
+ ...ifDefined("actual", issue.actual)
2668
+ }) : void 0;
2669
+ return {
2670
+ kind,
2671
+ summary: issue.message,
2672
+ ...ifDefined("location", location),
2673
+ ...ifDefined("meta", meta)
2674
+ };
2675
+ }
2676
+ function sortSchemaIssues(issues) {
2677
+ return [...issues].sort((a, b) => {
2678
+ const kindCompare = a.kind.localeCompare(b.kind);
2679
+ if (kindCompare !== 0) return kindCompare;
2680
+ const tableCompare = compareStrings(a.table, b.table);
2681
+ if (tableCompare !== 0) return tableCompare;
2682
+ const columnCompare = compareStrings(a.column, b.column);
2683
+ if (columnCompare !== 0) return columnCompare;
2684
+ return compareStrings(a.indexOrConstraint, b.indexOrConstraint);
2685
+ });
2686
+ }
2687
+ function buildConflictLocation(issue) {
2688
+ const location = {
2689
+ ...ifDefined("table", issue.table),
2690
+ ...ifDefined("column", issue.column),
2691
+ ...ifDefined("constraint", issue.indexOrConstraint)
2692
+ };
2693
+ return Object.keys(location).length > 0 ? location : void 0;
2694
+ }
2695
+ function conflictComparator(a, b) {
2696
+ if (a.kind !== b.kind) return a.kind < b.kind ? -1 : 1;
2697
+ const aLocation = a.location ?? {};
2698
+ const bLocation = b.location ?? {};
2699
+ const tableCompare = compareStrings(aLocation.table, bLocation.table);
2700
+ if (tableCompare !== 0) return tableCompare;
2701
+ const columnCompare = compareStrings(aLocation.column, bLocation.column);
2702
+ if (columnCompare !== 0) return columnCompare;
2703
+ const constraintCompare = compareStrings(aLocation.constraint, bLocation.constraint);
2704
+ if (constraintCompare !== 0) return constraintCompare;
2705
+ return compareStrings(a.summary, b.summary);
2706
+ }
2707
+ function compareStrings(a, b) {
2708
+ if (a === b) return 0;
2709
+ if (a === void 0) return -1;
2710
+ if (b === void 0) return 1;
2711
+ return a < b ? -1 : 1;
2712
+ }
2713
+
2714
+ //#endregion
2715
+ //#region src/core/migrations/planner.ts
2716
+ const DEFAULT_PLANNER_CONFIG = { defaultSchema: "public" };
2717
+ function createPostgresMigrationPlanner(config = {}) {
2718
+ return new PostgresMigrationPlanner({
2719
+ ...DEFAULT_PLANNER_CONFIG,
2720
+ ...config
2721
+ });
2722
+ }
2723
+ var PostgresMigrationPlanner = class {
2724
+ constructor(config) {
2725
+ this.config = config;
2726
+ }
2727
+ plan(options) {
2728
+ const schemaName = options.schemaName ?? this.config.defaultSchema;
2729
+ const policyResult = this.ensureAdditivePolicy(options.policy);
2730
+ if (policyResult) return policyResult;
2731
+ const planningMode = this.resolvePlanningMode(options.policy);
2732
+ const schemaIssues = this.collectSchemaIssues(options, planningMode.includeExtraObjects);
2733
+ const codecHooks = extractCodecControlHooks(options.frameworkComponents);
2734
+ const operations = [];
2735
+ const reconciliationPlan = buildReconciliationPlan({
2736
+ contract: options.contract,
2737
+ issues: schemaIssues,
2738
+ schemaName,
2739
+ mode: planningMode,
2740
+ policy: options.policy,
2741
+ codecHooks
2742
+ });
2743
+ if (reconciliationPlan.conflicts.length > 0) return plannerFailure(reconciliationPlan.conflicts);
2744
+ const storageTypePlan = this.buildStorageTypeOperations(options, schemaName, codecHooks);
2745
+ if (storageTypePlan.conflicts.length > 0) return plannerFailure(storageTypePlan.conflicts);
2746
+ const sortedTables = sortedEntries(options.contract.storage.tables);
2747
+ const schemaLookups = buildSchemaLookupMap(options.schema);
2748
+ operations.push(...this.buildDatabaseDependencyOperations(options), ...storageTypePlan.operations, ...reconciliationPlan.operations, ...this.buildTableOperations(sortedTables, options.schema, schemaName, codecHooks), ...this.buildColumnOperations(sortedTables, options.schema, schemaLookups, schemaName, codecHooks), ...this.buildPrimaryKeyOperations(sortedTables, options.schema, schemaName), ...this.buildUniqueOperations(sortedTables, schemaLookups, schemaName), ...this.buildIndexOperations(sortedTables, schemaLookups, schemaName), ...this.buildFkBackingIndexOperations(sortedTables, schemaLookups, schemaName), ...this.buildForeignKeyOperations(sortedTables, schemaLookups, schemaName));
2749
+ return plannerSuccess(createMigrationPlan({
2750
+ targetId: "postgres",
2751
+ origin: null,
2752
+ destination: {
2753
+ storageHash: options.contract.storageHash,
2754
+ ...ifDefined("profileHash", options.contract.profileHash)
2755
+ },
2756
+ operations
2757
+ }));
2758
+ }
2759
+ ensureAdditivePolicy(policy) {
2760
+ if (!policy.allowedOperationClasses.includes("additive")) return plannerFailure([{
2761
+ kind: "unsupportedOperation",
2762
+ summary: "Migration planner requires additive operations be allowed",
2763
+ why: "The planner requires the \"additive\" operation class to be allowed in the policy."
2764
+ }]);
2765
+ return null;
2766
+ }
2767
+ /**
2768
+ * Builds migration operations from component-owned database dependencies.
2769
+ * These operations install database-side persistence structures declared by components.
2770
+ */
2771
+ buildDatabaseDependencyOperations(options) {
2772
+ const dependencies = this.collectDependencies(options);
2773
+ const operations = [];
2774
+ const seenDependencyIds = /* @__PURE__ */ new Set();
2775
+ const seenOperationIds = /* @__PURE__ */ new Set();
2776
+ const installedIds = new Set(options.schema.dependencies.map((d) => d.id));
2777
+ for (const dependency of dependencies) {
2778
+ if (seenDependencyIds.has(dependency.id)) continue;
2779
+ seenDependencyIds.add(dependency.id);
2780
+ if (installedIds.has(dependency.id)) continue;
2781
+ for (const installOp of dependency.install) {
2782
+ if (seenOperationIds.has(installOp.id)) continue;
2783
+ seenOperationIds.add(installOp.id);
2784
+ operations.push(installOp);
2785
+ }
2786
+ }
2787
+ return operations;
2788
+ }
2789
+ buildStorageTypeOperations(options, schemaName, codecHooks) {
2790
+ const operations = [];
2791
+ const conflicts = [];
2792
+ const storageTypes = options.contract.storage.types ?? {};
2793
+ for (const [typeName, typeInstance] of sortedEntries(storageTypes)) {
2794
+ const planResult = codecHooks.get(typeInstance.codecId)?.planTypeOperations?.({
2795
+ typeName,
2796
+ typeInstance,
2797
+ contract: options.contract,
2798
+ schema: options.schema,
2799
+ schemaName,
2800
+ policy: options.policy
2801
+ });
2802
+ if (!planResult) continue;
2803
+ for (const operation of planResult.operations) {
2804
+ if (!options.policy.allowedOperationClasses.includes(operation.operationClass)) {
2805
+ conflicts.push({
2806
+ kind: "missingButNonAdditive",
2807
+ summary: `Storage type "${typeName}" requires "${operation.operationClass}" operation "${operation.id}"`,
2808
+ location: { type: typeName }
2809
+ });
2810
+ continue;
2811
+ }
2812
+ operations.push({
2813
+ ...operation,
2814
+ target: {
2815
+ id: operation.target.id,
2816
+ details: this.buildTargetDetails("type", typeName, schemaName)
2817
+ }
2818
+ });
2819
+ }
2820
+ }
2821
+ return {
2822
+ operations,
2823
+ conflicts
2824
+ };
2825
+ }
2826
+ collectDependencies(options) {
2827
+ return sortDependencies(collectInitDependencies(options.frameworkComponents).filter(isPostgresPlannerDependency));
2828
+ }
2829
+ buildTableOperations(tables, schema, schemaName, codecHooks) {
2830
+ const operations = [];
2831
+ for (const [tableName, table] of tables) {
2832
+ if (schema.tables[tableName]) continue;
2833
+ const qualified = qualifyTableName(schemaName, tableName);
2834
+ operations.push({
2835
+ id: `table.${tableName}`,
2836
+ label: `Create table ${tableName}`,
2837
+ summary: `Creates table ${tableName} with required columns`,
2838
+ operationClass: "additive",
2839
+ target: {
2840
+ id: "postgres",
2841
+ details: this.buildTargetDetails("table", tableName, schemaName)
2842
+ },
2843
+ precheck: [{
2844
+ description: `ensure table "${tableName}" does not exist`,
2845
+ sql: `SELECT to_regclass(${toRegclassLiteral(schemaName, tableName)}) IS NULL`
2846
+ }],
2847
+ execute: [{
2848
+ description: `create table "${tableName}"`,
2849
+ sql: buildCreateTableSql(qualified, table, codecHooks)
2850
+ }],
2851
+ postcheck: [{
2852
+ description: `verify table "${tableName}" exists`,
2853
+ sql: `SELECT to_regclass(${toRegclassLiteral(schemaName, tableName)}) IS NOT NULL`
2854
+ }]
2855
+ });
2856
+ }
2857
+ return operations;
2858
+ }
2859
+ buildColumnOperations(tables, schema, schemaLookups, schemaName, codecHooks) {
2860
+ const operations = [];
2861
+ for (const [tableName, table] of tables) {
2862
+ const schemaTable = schema.tables[tableName];
2863
+ if (!schemaTable) continue;
2864
+ const schemaLookup = schemaLookups.get(tableName);
2865
+ for (const [columnName, column] of sortedEntries(table.columns)) {
2866
+ if (schemaTable.columns[columnName]) continue;
2867
+ operations.push(this.buildAddColumnOperation({
2868
+ schema: schemaName,
2869
+ tableName,
2870
+ table,
2871
+ schemaTable,
2872
+ schemaLookup,
2873
+ columnName,
2874
+ column,
2875
+ codecHooks
2876
+ }));
2877
+ }
2878
+ }
2879
+ return operations;
2880
+ }
2881
+ buildAddColumnOperation(options) {
2882
+ const { schema, tableName, table, schemaTable, schemaLookup, columnName, column, codecHooks } = options;
2883
+ const notNull = column.nullable === false;
2884
+ const hasDefault = column.default !== void 0;
2885
+ const needsTemporaryDefault = notNull && !hasDefault;
2886
+ const temporaryDefault = needsTemporaryDefault ? resolveIdentityValue(column, codecHooks) : null;
2887
+ const canUseSharedTemporaryDefault = needsTemporaryDefault && temporaryDefault !== null && canUseSharedTemporaryDefaultStrategy({
2888
+ table,
2889
+ schemaTable,
2890
+ schemaLookup,
2891
+ columnName
2892
+ });
2893
+ if (canUseSharedTemporaryDefault) return buildAddNotNullColumnWithTemporaryDefaultOperation({
2894
+ schema,
2895
+ tableName,
2896
+ columnName,
2897
+ column,
2898
+ codecHooks,
2899
+ temporaryDefault
2900
+ });
2901
+ const qualified = qualifyTableName(schema, tableName);
2902
+ const requiresEmptyTableCheck = needsTemporaryDefault && !canUseSharedTemporaryDefault;
2903
+ return {
2904
+ ...buildAddColumnOperationIdentity(schema, tableName, columnName),
2905
+ operationClass: "additive",
2906
+ precheck: [{
2907
+ description: `ensure column "${columnName}" is missing`,
2908
+ sql: columnExistsCheck({
2909
+ schema,
2910
+ table: tableName,
2911
+ column: columnName,
2912
+ exists: false
2913
+ })
2914
+ }, ...requiresEmptyTableCheck ? [{
2915
+ description: `ensure table "${tableName}" is empty before adding NOT NULL column without default`,
2916
+ sql: tableIsEmptyCheck(qualified)
2917
+ }] : []],
2918
+ execute: [{
2919
+ description: `add column "${columnName}"`,
2920
+ sql: buildAddColumnSql(qualified, columnName, column, codecHooks)
2921
+ }],
2922
+ postcheck: [{
2923
+ description: `verify column "${columnName}" exists`,
2924
+ sql: columnExistsCheck({
2925
+ schema,
2926
+ table: tableName,
2927
+ column: columnName
2928
+ })
2929
+ }, ...notNull ? [{
2930
+ description: `verify column "${columnName}" is NOT NULL`,
2931
+ sql: columnNullabilityCheck({
2932
+ schema,
2933
+ table: tableName,
2934
+ column: columnName,
2935
+ nullable: false
2936
+ })
2937
+ }] : []]
2938
+ };
2939
+ }
2940
+ buildPrimaryKeyOperations(tables, schema, schemaName) {
2941
+ const operations = [];
2942
+ for (const [tableName, table] of tables) {
2943
+ if (!table.primaryKey) continue;
2944
+ const schemaTable = schema.tables[tableName];
2945
+ if (!schemaTable || schemaTable.primaryKey) continue;
2946
+ const constraintName = table.primaryKey.name ?? `${tableName}_pkey`;
2947
+ operations.push({
2948
+ id: `primaryKey.${tableName}.${constraintName}`,
2949
+ label: `Add primary key ${constraintName} on ${tableName}`,
2950
+ summary: `Adds primary key ${constraintName} on ${tableName}`,
2951
+ operationClass: "additive",
2952
+ target: {
2953
+ id: "postgres",
2954
+ details: this.buildTargetDetails("table", tableName, schemaName)
2955
+ },
2956
+ precheck: [{
2957
+ description: `ensure primary key does not exist on "${tableName}"`,
2958
+ sql: tableHasPrimaryKeyCheck(schemaName, tableName, false)
2959
+ }],
2960
+ execute: [{
2961
+ description: `add primary key "${constraintName}"`,
2962
+ sql: `ALTER TABLE ${qualifyTableName(schemaName, tableName)}
2963
+ ADD CONSTRAINT ${quoteIdentifier(constraintName)}
2964
+ PRIMARY KEY (${table.primaryKey.columns.map(quoteIdentifier).join(", ")})`
2965
+ }],
2966
+ postcheck: [{
2967
+ description: `verify primary key "${constraintName}" exists`,
2968
+ sql: tableHasPrimaryKeyCheck(schemaName, tableName, true, constraintName)
2969
+ }]
2970
+ });
2971
+ }
2972
+ return operations;
2973
+ }
2974
+ buildUniqueOperations(tables, schemaLookups, schemaName) {
2975
+ const operations = [];
2976
+ for (const [tableName, table] of tables) {
2977
+ const lookup = schemaLookups.get(tableName);
2978
+ for (const unique of table.uniques) {
2979
+ if (lookup && hasUniqueConstraint(lookup, unique.columns)) continue;
2980
+ const constraintName = unique.name ?? `${tableName}_${unique.columns.join("_")}_key`;
2981
+ operations.push({
2982
+ id: `unique.${tableName}.${constraintName}`,
2983
+ label: `Add unique constraint ${constraintName} on ${tableName}`,
2984
+ summary: `Adds unique constraint ${constraintName} on ${tableName}`,
2985
+ operationClass: "additive",
2986
+ target: {
2987
+ id: "postgres",
2988
+ details: this.buildTargetDetails("unique", constraintName, schemaName, tableName)
2989
+ },
2990
+ precheck: [{
2991
+ description: `ensure unique constraint "${constraintName}" is missing`,
2992
+ sql: constraintExistsCheck({
2993
+ constraintName,
2994
+ schema: schemaName,
2995
+ table: tableName,
2996
+ exists: false
2997
+ })
2998
+ }],
2999
+ execute: [{
3000
+ description: `add unique constraint "${constraintName}"`,
3001
+ sql: `ALTER TABLE ${qualifyTableName(schemaName, tableName)}
3002
+ ADD CONSTRAINT ${quoteIdentifier(constraintName)}
3003
+ UNIQUE (${unique.columns.map(quoteIdentifier).join(", ")})`
3004
+ }],
3005
+ postcheck: [{
3006
+ description: `verify unique constraint "${constraintName}" exists`,
3007
+ sql: constraintExistsCheck({
3008
+ constraintName,
3009
+ schema: schemaName,
3010
+ table: tableName
3011
+ })
3012
+ }]
3013
+ });
3014
+ }
3015
+ }
3016
+ return operations;
3017
+ }
3018
+ buildIndexOperations(tables, schemaLookups, schemaName) {
3019
+ const operations = [];
3020
+ for (const [tableName, table] of tables) {
3021
+ const lookup = schemaLookups.get(tableName);
3022
+ for (const index of table.indexes) {
3023
+ if (lookup && hasIndex(lookup, index.columns)) continue;
3024
+ const indexName = index.name ?? defaultIndexName(tableName, index.columns);
3025
+ operations.push({
3026
+ id: `index.${tableName}.${indexName}`,
3027
+ label: `Create index ${indexName} on ${tableName}`,
3028
+ summary: `Creates index ${indexName} on ${tableName}`,
3029
+ operationClass: "additive",
3030
+ target: {
3031
+ id: "postgres",
3032
+ details: this.buildTargetDetails("index", indexName, schemaName, tableName)
3033
+ },
3034
+ precheck: [{
3035
+ description: `ensure index "${indexName}" is missing`,
3036
+ sql: `SELECT to_regclass(${toRegclassLiteral(schemaName, indexName)}) IS NULL`
3037
+ }],
3038
+ execute: [{
3039
+ description: `create index "${indexName}"`,
3040
+ sql: `CREATE INDEX ${quoteIdentifier(indexName)} ON ${qualifyTableName(schemaName, tableName)} (${index.columns.map(quoteIdentifier).join(", ")})`
3041
+ }],
3042
+ postcheck: [{
3043
+ description: `verify index "${indexName}" exists`,
3044
+ sql: `SELECT to_regclass(${toRegclassLiteral(schemaName, indexName)}) IS NOT NULL`
3045
+ }]
3046
+ });
3047
+ }
3048
+ }
3049
+ return operations;
3050
+ }
3051
+ /**
3052
+ * Generates FK-backing index operations for FKs with `index: true`,
3053
+ * but only when no matching user-declared index exists in `contractTable.indexes`.
3054
+ */
3055
+ buildFkBackingIndexOperations(tables, schemaLookups, schemaName) {
3056
+ const operations = [];
3057
+ for (const [tableName, table] of tables) {
3058
+ const lookup = schemaLookups.get(tableName);
3059
+ const declaredIndexColumns = new Set(table.indexes.map((idx) => idx.columns.join(",")));
3060
+ for (const fk of table.foreignKeys) {
3061
+ if (fk.index === false) continue;
3062
+ if (declaredIndexColumns.has(fk.columns.join(","))) continue;
3063
+ if (lookup && hasIndex(lookup, fk.columns)) continue;
3064
+ const indexName = defaultIndexName(tableName, fk.columns);
3065
+ operations.push({
3066
+ id: `index.${tableName}.${indexName}`,
3067
+ label: `Create FK-backing index ${indexName} on ${tableName}`,
3068
+ summary: `Creates FK-backing index ${indexName} on ${tableName}`,
3069
+ operationClass: "additive",
3070
+ target: {
3071
+ id: "postgres",
3072
+ details: this.buildTargetDetails("index", indexName, schemaName, tableName)
3073
+ },
3074
+ precheck: [{
3075
+ description: `ensure index "${indexName}" is missing`,
3076
+ sql: `SELECT to_regclass(${toRegclassLiteral(schemaName, indexName)}) IS NULL`
3077
+ }],
3078
+ execute: [{
3079
+ description: `create FK-backing index "${indexName}"`,
3080
+ sql: `CREATE INDEX ${quoteIdentifier(indexName)} ON ${qualifyTableName(schemaName, tableName)} (${fk.columns.map(quoteIdentifier).join(", ")})`
3081
+ }],
3082
+ postcheck: [{
3083
+ description: `verify index "${indexName}" exists`,
3084
+ sql: `SELECT to_regclass(${toRegclassLiteral(schemaName, indexName)}) IS NOT NULL`
3085
+ }]
3086
+ });
3087
+ }
3088
+ }
3089
+ return operations;
3090
+ }
3091
+ buildForeignKeyOperations(tables, schemaLookups, schemaName) {
3092
+ const operations = [];
3093
+ for (const [tableName, table] of tables) {
3094
+ const lookup = schemaLookups.get(tableName);
3095
+ for (const foreignKey of table.foreignKeys) {
3096
+ if (foreignKey.constraint === false) continue;
3097
+ if (lookup && hasForeignKey(lookup, foreignKey)) continue;
3098
+ const fkName = foreignKey.name ?? `${tableName}_${foreignKey.columns.join("_")}_fkey`;
3099
+ operations.push({
3100
+ id: `foreignKey.${tableName}.${fkName}`,
3101
+ label: `Add foreign key ${fkName} on ${tableName}`,
3102
+ summary: `Adds foreign key ${fkName} referencing ${foreignKey.references.table}`,
3103
+ operationClass: "additive",
3104
+ target: {
3105
+ id: "postgres",
3106
+ details: this.buildTargetDetails("foreignKey", fkName, schemaName, tableName)
3107
+ },
3108
+ precheck: [{
3109
+ description: `ensure foreign key "${fkName}" is missing`,
3110
+ sql: constraintExistsCheck({
3111
+ constraintName: fkName,
3112
+ schema: schemaName,
3113
+ table: tableName,
3114
+ exists: false
3115
+ })
3116
+ }],
3117
+ execute: [{
3118
+ description: `add foreign key "${fkName}"`,
3119
+ sql: buildForeignKeySql(schemaName, tableName, fkName, foreignKey)
3120
+ }],
3121
+ postcheck: [{
3122
+ description: `verify foreign key "${fkName}" exists`,
3123
+ sql: constraintExistsCheck({
3124
+ constraintName: fkName,
3125
+ schema: schemaName,
3126
+ table: tableName
3127
+ })
3128
+ }]
3129
+ });
3130
+ }
3131
+ }
3132
+ return operations;
3133
+ }
3134
+ buildTargetDetails(objectType, name, schema, table) {
3135
+ return buildTargetDetails(objectType, name, schema, table);
3136
+ }
3137
+ resolvePlanningMode(policy) {
3138
+ const allowWidening = policy.allowedOperationClasses.includes("widening");
3139
+ const allowDestructive = policy.allowedOperationClasses.includes("destructive");
3140
+ return {
3141
+ includeExtraObjects: allowWidening || allowDestructive,
3142
+ allowWidening,
3143
+ allowDestructive
3144
+ };
3145
+ }
3146
+ collectSchemaIssues(options, strict) {
3147
+ return verifySqlSchema({
3148
+ contract: options.contract,
3149
+ schema: options.schema,
3150
+ strict,
3151
+ typeMetadataRegistry: /* @__PURE__ */ new Map(),
3152
+ frameworkComponents: options.frameworkComponents,
3153
+ normalizeDefault: parsePostgresDefault,
3154
+ normalizeNativeType: normalizeSchemaNativeType
3155
+ }).schema.issues;
3156
+ }
3157
+ };
3158
+ function canUseSharedTemporaryDefaultStrategy(options) {
3159
+ const { table, schemaTable, schemaLookup, columnName } = options;
3160
+ if (table.primaryKey?.columns.includes(columnName) && !schemaTable.primaryKey) return false;
3161
+ for (const unique of table.uniques) {
3162
+ if (!unique.columns.includes(columnName)) continue;
3163
+ if (!schemaLookup || !hasUniqueConstraint(schemaLookup, unique.columns)) return false;
3164
+ }
3165
+ for (const foreignKey of table.foreignKeys) {
3166
+ if (foreignKey.constraint === false || !foreignKey.columns.includes(columnName)) continue;
3167
+ if (!schemaLookup || !hasForeignKey(schemaLookup, foreignKey)) return false;
3168
+ }
3169
+ return true;
3170
+ }
3171
+ function sortDependencies(dependencies) {
3172
+ return [...dependencies].sort((a, b) => a.id.localeCompare(b.id));
3173
+ }
3174
+ function isPostgresPlannerDependency(dependency) {
3175
+ return dependency.install.every((operation) => operation.target.id === "postgres");
3176
+ }
3177
+ function sortedEntries(record) {
3178
+ return Object.entries(record).sort(([a], [b]) => a.localeCompare(b));
3179
+ }
3180
+ function tableHasPrimaryKeyCheck(schema, table, exists, constraintName) {
3181
+ const comparison = exists ? "" : "NOT ";
3182
+ const constraintFilter = constraintName ? `AND c2.relname = '${escapeLiteral(constraintName)}'` : "";
3183
+ return `SELECT ${comparison}EXISTS (
3184
+ SELECT 1
3185
+ FROM pg_index i
3186
+ JOIN pg_class c ON c.oid = i.indrelid
3187
+ JOIN pg_namespace n ON n.oid = c.relnamespace
3188
+ LEFT JOIN pg_class c2 ON c2.oid = i.indexrelid
3189
+ WHERE n.nspname = '${escapeLiteral(schema)}'
3190
+ AND c.relname = '${escapeLiteral(table)}'
3191
+ AND i.indisprimary
3192
+ ${constraintFilter}
3193
+ )`;
3194
+ }
3195
+ function buildSchemaLookupMap(schema) {
3196
+ const map = /* @__PURE__ */ new Map();
3197
+ for (const [tableName, table] of Object.entries(schema.tables)) map.set(tableName, buildSchemaTableLookup(table));
3198
+ return map;
3199
+ }
3200
+ function buildSchemaTableLookup(table) {
3201
+ return {
3202
+ uniqueKeys: new Set(table.uniques.map((u) => u.columns.join(","))),
3203
+ indexKeys: new Set(table.indexes.map((i) => i.columns.join(","))),
3204
+ uniqueIndexKeys: new Set(table.indexes.filter((i) => i.unique).map((i) => i.columns.join(","))),
3205
+ fkKeys: new Set(table.foreignKeys.map((fk) => `${fk.columns.join(",")}|${fk.referencedTable}|${fk.referencedColumns.join(",")}`))
3206
+ };
3207
+ }
3208
+ function hasUniqueConstraint(lookup, columns) {
3209
+ const key = columns.join(",");
3210
+ return lookup.uniqueKeys.has(key) || lookup.uniqueIndexKeys.has(key);
3211
+ }
3212
+ function hasIndex(lookup, columns) {
3213
+ const key = columns.join(",");
3214
+ return lookup.indexKeys.has(key) || lookup.uniqueKeys.has(key);
3215
+ }
3216
+ function hasForeignKey(lookup, fk) {
3217
+ return lookup.fkKeys.has(`${fk.columns.join(",")}|${fk.references.table}|${fk.references.columns.join(",")}`);
3218
+ }
3219
+
3220
+ //#endregion
3221
+ //#region src/core/migrations/statement-builders.ts
3222
+ const ensurePrismaContractSchemaStatement = {
3223
+ sql: "create schema if not exists prisma_contract",
3224
+ params: []
3225
+ };
3226
+ const ensureMarkerTableStatement = {
3227
+ sql: `create table if not exists prisma_contract.marker (
3228
+ id smallint primary key default 1,
3229
+ core_hash text not null,
3230
+ profile_hash text not null,
3231
+ contract_json jsonb,
3232
+ canonical_version int,
3233
+ updated_at timestamptz not null default now(),
3234
+ app_tag text,
3235
+ meta jsonb not null default '{}'
3236
+ )`,
3237
+ params: []
3238
+ };
3239
+ const ensureLedgerTableStatement = {
3240
+ sql: `create table if not exists prisma_contract.ledger (
3241
+ id bigserial primary key,
3242
+ created_at timestamptz not null default now(),
3243
+ origin_core_hash text,
3244
+ origin_profile_hash text,
3245
+ destination_core_hash text not null,
3246
+ destination_profile_hash text,
3247
+ contract_json_before jsonb,
3248
+ contract_json_after jsonb,
3249
+ operations jsonb not null
3250
+ )`,
3251
+ params: []
3252
+ };
3253
+ function buildWriteMarkerStatements(input) {
3254
+ const params = [
3255
+ 1,
3256
+ input.storageHash,
3257
+ input.profileHash,
3258
+ jsonParam(input.contractJson),
3259
+ input.canonicalVersion ?? null,
3260
+ input.appTag ?? null,
3261
+ jsonParam(input.meta ?? {})
3262
+ ];
3263
+ return {
3264
+ insert: {
3265
+ sql: `insert into prisma_contract.marker (
3266
+ id,
3267
+ core_hash,
3268
+ profile_hash,
3269
+ contract_json,
3270
+ canonical_version,
3271
+ updated_at,
3272
+ app_tag,
3273
+ meta
3274
+ ) values (
3275
+ $1,
3276
+ $2,
3277
+ $3,
3278
+ $4::jsonb,
3279
+ $5,
3280
+ now(),
3281
+ $6,
3282
+ $7::jsonb
3283
+ )`,
3284
+ params
3285
+ },
3286
+ update: {
3287
+ sql: `update prisma_contract.marker set
3288
+ core_hash = $2,
3289
+ profile_hash = $3,
3290
+ contract_json = $4::jsonb,
3291
+ canonical_version = $5,
3292
+ updated_at = now(),
3293
+ app_tag = $6,
3294
+ meta = $7::jsonb
3295
+ where id = $1`,
3296
+ params
3297
+ }
3298
+ };
3299
+ }
3300
+ function buildLedgerInsertStatement(input) {
3301
+ return {
3302
+ sql: `insert into prisma_contract.ledger (
3303
+ origin_core_hash,
3304
+ origin_profile_hash,
3305
+ destination_core_hash,
3306
+ destination_profile_hash,
3307
+ contract_json_before,
3308
+ contract_json_after,
3309
+ operations
3310
+ ) values (
3311
+ $1,
3312
+ $2,
3313
+ $3,
3314
+ $4,
3315
+ $5::jsonb,
3316
+ $6::jsonb,
3317
+ $7::jsonb
3318
+ )`,
3319
+ params: [
3320
+ input.originStorageHash ?? null,
3321
+ input.originProfileHash ?? null,
3322
+ input.destinationStorageHash,
3323
+ input.destinationProfileHash ?? null,
3324
+ jsonParam(input.contractJsonBefore),
3325
+ jsonParam(input.contractJsonAfter),
3326
+ jsonParam(input.operations)
3327
+ ]
3328
+ };
3329
+ }
3330
+ function jsonParam(value) {
3331
+ return JSON.stringify(value ?? null, bigintJsonReplacer);
3332
+ }
3333
+
3334
+ //#endregion
3335
+ //#region src/core/migrations/runner.ts
3336
+ const DEFAULT_CONFIG = { defaultSchema: "public" };
3337
+ const LOCK_DOMAIN = "prisma_next.contract.marker";
3338
+ /**
3339
+ * Deep clones and freezes a record object to prevent mutation.
3340
+ * Recursively clones nested objects and arrays to ensure complete isolation.
3341
+ */
3342
+ function cloneAndFreezeRecord(value) {
3343
+ const cloned = {};
3344
+ for (const [key, val] of Object.entries(value)) if (val === null || val === void 0) cloned[key] = val;
3345
+ else if (Array.isArray(val)) cloned[key] = Object.freeze([...val]);
3346
+ else if (typeof val === "object") cloned[key] = cloneAndFreezeRecord(val);
3347
+ else cloned[key] = val;
3348
+ return Object.freeze(cloned);
3349
+ }
3350
+ function createPostgresMigrationRunner(family, config = {}) {
3351
+ return new PostgresMigrationRunner(family, {
3352
+ ...DEFAULT_CONFIG,
3353
+ ...config
3354
+ });
3355
+ }
3356
+ var PostgresMigrationRunner = class {
3357
+ constructor(family, config) {
3358
+ this.family = family;
3359
+ this.config = config;
3360
+ }
3361
+ async execute(options) {
3362
+ const schema = options.schemaName ?? this.config.defaultSchema;
3363
+ const driver = options.driver;
3364
+ const lockKey = `${LOCK_DOMAIN}:${schema}`;
3365
+ const destinationCheck = this.ensurePlanMatchesDestinationContract(options.plan.destination, options.destinationContract);
3366
+ if (!destinationCheck.ok) return destinationCheck;
3367
+ const policyCheck = this.enforcePolicyCompatibility(options.policy, options.plan.operations);
3368
+ if (!policyCheck.ok) return policyCheck;
3369
+ await this.beginTransaction(driver);
3370
+ let committed = false;
3371
+ try {
3372
+ await this.acquireLock(driver, lockKey);
3373
+ await this.ensureControlTables(driver);
3374
+ const existingMarker = await readMarker(driver);
3375
+ const markerCheck = this.ensureMarkerCompatibility(existingMarker, options.plan);
3376
+ if (!markerCheck.ok) return markerCheck;
3377
+ const skipOperations = this.markerMatchesDestination(existingMarker, options.plan) && options.plan.origin != null;
3378
+ let applyValue;
3379
+ if (skipOperations) applyValue = {
3380
+ operationsExecuted: 0,
3381
+ executedOperations: []
3382
+ };
3383
+ else {
3384
+ const applyResult = await this.applyPlan(driver, options);
3385
+ if (!applyResult.ok) return applyResult;
3386
+ applyValue = applyResult.value;
3387
+ }
3388
+ const schemaIR = await this.family.introspect({
3389
+ driver,
3390
+ contractIR: options.destinationContract
3391
+ });
3392
+ const schemaVerifyResult = verifySqlSchema({
3393
+ contract: options.destinationContract,
3394
+ schema: schemaIR,
3395
+ strict: options.strictVerification ?? true,
3396
+ context: options.context ?? {},
3397
+ typeMetadataRegistry: this.family.typeMetadataRegistry,
3398
+ frameworkComponents: options.frameworkComponents,
3399
+ normalizeDefault: parsePostgresDefault,
3400
+ normalizeNativeType: normalizeSchemaNativeType
3401
+ });
3402
+ if (!schemaVerifyResult.ok) return runnerFailure("SCHEMA_VERIFY_FAILED", schemaVerifyResult.summary, {
3403
+ why: "The resulting database schema does not satisfy the destination contract.",
3404
+ meta: { issues: schemaVerifyResult.schema.issues }
3405
+ });
3406
+ await this.upsertMarker(driver, options, existingMarker);
3407
+ await this.recordLedgerEntry(driver, options, existingMarker, applyValue.executedOperations);
3408
+ await this.commitTransaction(driver);
3409
+ committed = true;
3410
+ return runnerSuccess({
3411
+ operationsPlanned: options.plan.operations.length,
3412
+ operationsExecuted: applyValue.operationsExecuted
3413
+ });
3414
+ } finally {
3415
+ if (!committed) await this.rollbackTransaction(driver);
3416
+ }
3417
+ }
3418
+ async applyPlan(driver, options) {
3419
+ const checks = options.executionChecks;
3420
+ const runPrechecks = checks?.prechecks !== false;
3421
+ const runPostchecks = checks?.postchecks !== false;
3422
+ const runIdempotency = checks?.idempotencyChecks !== false;
3423
+ let operationsExecuted = 0;
3424
+ const executedOperations = [];
3425
+ for (const operation of options.plan.operations) {
3426
+ options.callbacks?.onOperationStart?.(operation);
3427
+ try {
3428
+ if (runPostchecks && runIdempotency) {
3429
+ if (await this.expectationsAreSatisfied(driver, operation.postcheck)) {
3430
+ executedOperations.push(this.createPostcheckPreSatisfiedSkipRecord(operation));
3431
+ continue;
3432
+ }
3433
+ }
3434
+ if (runPrechecks) {
3435
+ const precheckResult = await this.runExpectationSteps(driver, operation.precheck, operation, "precheck");
3436
+ if (!precheckResult.ok) return precheckResult;
3437
+ }
3438
+ const executeResult = await this.runExecuteSteps(driver, operation.execute, operation);
3439
+ if (!executeResult.ok) return executeResult;
3440
+ if (runPostchecks) {
3441
+ const postcheckResult = await this.runExpectationSteps(driver, operation.postcheck, operation, "postcheck");
3442
+ if (!postcheckResult.ok) return postcheckResult;
3443
+ }
3444
+ executedOperations.push(operation);
3445
+ operationsExecuted += 1;
3446
+ } finally {
3447
+ options.callbacks?.onOperationComplete?.(operation);
3448
+ }
3449
+ }
3450
+ return ok({
3451
+ operationsExecuted,
3452
+ executedOperations
3453
+ });
3454
+ }
3455
+ async ensureControlTables(driver) {
3456
+ await this.executeStatement(driver, ensurePrismaContractSchemaStatement);
3457
+ await this.executeStatement(driver, ensureMarkerTableStatement);
3458
+ await this.executeStatement(driver, ensureLedgerTableStatement);
3459
+ }
3460
+ async runExpectationSteps(driver, steps, operation, phase) {
3461
+ for (const step of steps) {
3462
+ const result = await driver.query(step.sql);
3463
+ if (!this.stepResultIsTrue(result.rows)) return runnerFailure(phase === "precheck" ? "PRECHECK_FAILED" : "POSTCHECK_FAILED", `Operation ${operation.id} failed during ${phase}: ${step.description}`, { meta: {
3464
+ operationId: operation.id,
3465
+ phase,
3466
+ stepDescription: step.description
3467
+ } });
3468
+ }
3469
+ return okVoid();
3470
+ }
3471
+ async runExecuteSteps(driver, steps, operation) {
3472
+ for (const step of steps) try {
3473
+ await driver.query(step.sql);
3474
+ } catch (error) {
3475
+ if (SqlQueryError.is(error)) return runnerFailure("EXECUTION_FAILED", `Operation ${operation.id} failed during execution: ${step.description}`, {
3476
+ why: error.message,
3477
+ meta: {
3478
+ operationId: operation.id,
3479
+ stepDescription: step.description,
3480
+ sql: step.sql,
3481
+ sqlState: error.sqlState,
3482
+ constraint: error.constraint,
3483
+ table: error.table,
3484
+ column: error.column,
3485
+ detail: error.detail
3486
+ }
3487
+ });
3488
+ throw error;
3489
+ }
3490
+ return okVoid();
3491
+ }
3492
+ stepResultIsTrue(rows) {
3493
+ if (!rows || rows.length === 0) return false;
3494
+ const firstRow = rows[0];
3495
+ const firstValue = firstRow ? Object.values(firstRow)[0] : void 0;
3496
+ if (typeof firstValue === "boolean") return firstValue;
3497
+ if (typeof firstValue === "number") return firstValue !== 0;
3498
+ if (typeof firstValue === "string") {
3499
+ const lower = firstValue.toLowerCase();
3500
+ if (lower === "t" || lower === "true" || lower === "1") return true;
3501
+ if (lower === "f" || lower === "false" || lower === "0") return false;
3502
+ return firstValue.length > 0;
3503
+ }
3504
+ return Boolean(firstValue);
3505
+ }
3506
+ async expectationsAreSatisfied(driver, steps) {
3507
+ if (steps.length === 0) return false;
3508
+ for (const step of steps) {
3509
+ const result = await driver.query(step.sql);
3510
+ if (!this.stepResultIsTrue(result.rows)) return false;
3511
+ }
3512
+ return true;
3513
+ }
3514
+ createPostcheckPreSatisfiedSkipRecord(operation) {
3515
+ const clonedMeta = operation.meta ? cloneAndFreezeRecord(operation.meta) : void 0;
3516
+ const runnerMeta = Object.freeze({
3517
+ skipped: true,
3518
+ reason: "postcheck_pre_satisfied"
3519
+ });
3520
+ const mergedMeta = Object.freeze({
3521
+ ...clonedMeta ?? {},
3522
+ runner: runnerMeta
3523
+ });
3524
+ const frozenPostcheck = Object.freeze([...operation.postcheck]);
3525
+ return Object.freeze({
3526
+ id: operation.id,
3527
+ label: operation.label,
3528
+ ...ifDefined("summary", operation.summary),
3529
+ operationClass: operation.operationClass,
3530
+ target: operation.target,
3531
+ precheck: Object.freeze([]),
3532
+ execute: Object.freeze([]),
3533
+ postcheck: frozenPostcheck,
3534
+ ...ifDefined("meta", operation.meta || mergedMeta ? mergedMeta : void 0)
3535
+ });
3536
+ }
3537
+ markerMatchesDestination(marker, plan) {
3538
+ if (!marker) return false;
3539
+ if (marker.storageHash !== plan.destination.storageHash) return false;
3540
+ if (plan.destination.profileHash && marker.profileHash !== plan.destination.profileHash) return false;
3541
+ return true;
3542
+ }
3543
+ enforcePolicyCompatibility(policy, operations) {
3544
+ const allowedClasses = new Set(policy.allowedOperationClasses);
3545
+ for (const operation of operations) if (!allowedClasses.has(operation.operationClass)) return runnerFailure("POLICY_VIOLATION", `Operation ${operation.id} has class "${operation.operationClass}" which is not allowed by policy.`, {
3546
+ why: `Policy only allows: ${policy.allowedOperationClasses.join(", ")}.`,
3547
+ meta: {
3548
+ operationId: operation.id,
3549
+ operationClass: operation.operationClass,
3550
+ allowedClasses: policy.allowedOperationClasses
3551
+ }
3552
+ });
3553
+ return okVoid();
3554
+ }
3555
+ ensureMarkerCompatibility(marker, plan) {
3556
+ const origin = plan.origin ?? null;
3557
+ if (!origin) return okVoid();
3558
+ if (!marker) return runnerFailure("MARKER_ORIGIN_MISMATCH", `Missing contract marker: expected origin storage hash ${origin.storageHash}.`, { meta: { expectedOriginStorageHash: origin.storageHash } });
3559
+ if (marker.storageHash !== origin.storageHash) return runnerFailure("MARKER_ORIGIN_MISMATCH", `Existing contract marker (${marker.storageHash}) does not match plan origin (${origin.storageHash}).`, { meta: {
3560
+ markerStorageHash: marker.storageHash,
3561
+ expectedOriginStorageHash: origin.storageHash
3562
+ } });
3563
+ if (origin.profileHash && marker.profileHash !== origin.profileHash) return runnerFailure("MARKER_ORIGIN_MISMATCH", `Existing contract marker profile hash (${marker.profileHash}) does not match plan origin profile hash (${origin.profileHash}).`, { meta: {
3564
+ markerProfileHash: marker.profileHash,
3565
+ expectedOriginProfileHash: origin.profileHash
3566
+ } });
3567
+ return okVoid();
3568
+ }
3569
+ ensurePlanMatchesDestinationContract(destination, contract) {
3570
+ if (destination.storageHash !== contract.storageHash) return runnerFailure("DESTINATION_CONTRACT_MISMATCH", `Plan destination storage hash (${destination.storageHash}) does not match provided contract storage hash (${contract.storageHash}).`, { meta: {
3571
+ planStorageHash: destination.storageHash,
3572
+ contractStorageHash: contract.storageHash
3573
+ } });
3574
+ if (destination.profileHash && contract.profileHash && destination.profileHash !== contract.profileHash) return runnerFailure("DESTINATION_CONTRACT_MISMATCH", `Plan destination profile hash (${destination.profileHash}) does not match provided contract profile hash (${contract.profileHash}).`, { meta: {
3575
+ planProfileHash: destination.profileHash,
3576
+ contractProfileHash: contract.profileHash
3577
+ } });
3578
+ return okVoid();
3579
+ }
3580
+ async upsertMarker(driver, options, existingMarker) {
3581
+ const writeStatements = buildWriteMarkerStatements({
3582
+ storageHash: options.plan.destination.storageHash,
3583
+ profileHash: options.plan.destination.profileHash ?? options.destinationContract.profileHash ?? options.plan.destination.storageHash,
3584
+ contractJson: options.destinationContract,
3585
+ canonicalVersion: null,
3586
+ meta: {}
3587
+ });
3588
+ const statement = existingMarker ? writeStatements.update : writeStatements.insert;
3589
+ await this.executeStatement(driver, statement);
3590
+ }
3591
+ async recordLedgerEntry(driver, options, existingMarker, executedOperations) {
3592
+ const ledgerStatement = buildLedgerInsertStatement({
3593
+ originStorageHash: existingMarker?.storageHash ?? null,
3594
+ originProfileHash: existingMarker?.profileHash ?? null,
3595
+ destinationStorageHash: options.plan.destination.storageHash,
3596
+ destinationProfileHash: options.plan.destination.profileHash ?? options.destinationContract.profileHash ?? options.plan.destination.storageHash,
3597
+ contractJsonBefore: existingMarker?.contractJson ?? null,
3598
+ contractJsonAfter: options.destinationContract,
3599
+ operations: executedOperations
3600
+ });
3601
+ await this.executeStatement(driver, ledgerStatement);
3602
+ }
3603
+ async acquireLock(driver, key) {
3604
+ await driver.query("select pg_advisory_xact_lock(hashtext($1))", [key]);
3605
+ }
3606
+ async beginTransaction(driver) {
3607
+ await driver.query("BEGIN");
3608
+ }
3609
+ async commitTransaction(driver) {
3610
+ await driver.query("COMMIT");
3611
+ }
3612
+ async rollbackTransaction(driver) {
3613
+ await driver.query("ROLLBACK");
3614
+ }
3615
+ async executeStatement(driver, statement) {
3616
+ if (statement.params.length > 0) {
3617
+ await driver.query(statement.sql, statement.params);
3618
+ return;
3619
+ }
3620
+ await driver.query(statement.sql);
3621
+ }
3622
+ };
3623
+
3624
+ //#endregion
3625
+ //#region src/exports/control.ts
3626
+ function buildNativeTypeExpander(frameworkComponents) {
3627
+ if (!frameworkComponents) return;
3628
+ const codecHooks = extractCodecControlHooks(frameworkComponents);
3629
+ return (input) => {
3630
+ if (!input.typeParams) return input.nativeType;
3631
+ if (!input.codecId) throw new Error(`Column declares typeParams for nativeType "${input.nativeType}" but has no codecId. Ensure the column is associated with a codec.`);
3632
+ const hooks = codecHooks.get(input.codecId);
3633
+ if (!hooks?.expandNativeType) throw new Error(`Column declares typeParams for nativeType "${input.nativeType}" but no expandNativeType hook is registered for codecId "${input.codecId}". Ensure the extension providing this codec is included in extensionPacks.`);
3634
+ return hooks.expandNativeType(input);
3635
+ };
3636
+ }
3637
+ function postgresRenderDefault(def, column) {
3638
+ if (def.kind === "function") return def.expression;
3639
+ return renderDefaultLiteral(def.value, column);
3640
+ }
3641
+ const postgresTargetDescriptor = {
3642
+ ...postgresTargetDescriptorMeta,
3643
+ operationSignatures: () => [],
3644
+ migrations: {
3645
+ createPlanner(_family) {
3646
+ return createPostgresMigrationPlanner();
3647
+ },
3648
+ createRunner(family) {
3649
+ return createPostgresMigrationRunner(family);
3650
+ },
3651
+ contractToSchema(contract, frameworkComponents) {
3652
+ return contractToSchemaIR(contract, {
3653
+ annotationNamespace: "pg",
3654
+ ...ifDefined("expandNativeType", buildNativeTypeExpander(frameworkComponents)),
3655
+ renderDefault: postgresRenderDefault,
3656
+ frameworkComponents: frameworkComponents ?? []
3657
+ });
3658
+ }
3659
+ },
3660
+ create() {
3661
+ return {
3662
+ familyId: "sql",
3663
+ targetId: "postgres"
3664
+ };
3665
+ },
3666
+ createPlanner(_family) {
3667
+ return createPostgresMigrationPlanner();
3668
+ },
3669
+ createRunner(family) {
3670
+ return createPostgresMigrationRunner(family);
3671
+ }
3672
+ };
3673
+ var control_default = postgresTargetDescriptor;
3674
+
3675
+ //#endregion
3676
+ export { control_default as default, postgresRenderDefault };
3677
+ //# sourceMappingURL=control.mjs.map