@prisma-next/target-postgres 0.4.1 → 0.4.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (178) hide show
  1. package/dist/codec-ids-CojIXVf9.mjs +29 -0
  2. package/dist/codec-ids-CojIXVf9.mjs.map +1 -0
  3. package/dist/codec-ids.d.mts +28 -0
  4. package/dist/codec-ids.d.mts.map +1 -0
  5. package/dist/codec-ids.mjs +3 -0
  6. package/dist/codec-types.d.mts +42 -0
  7. package/dist/codec-types.d.mts.map +1 -0
  8. package/dist/codec-types.mjs +3 -0
  9. package/dist/codecs-BoahtY_Q.mjs +385 -0
  10. package/dist/codecs-BoahtY_Q.mjs.map +1 -0
  11. package/dist/codecs-D-F2KJqt.d.mts +299 -0
  12. package/dist/codecs-D-F2KJqt.d.mts.map +1 -0
  13. package/dist/codecs.d.mts +2 -0
  14. package/dist/codecs.mjs +3 -0
  15. package/dist/control.d.mts +1 -1
  16. package/dist/control.mjs +9 -1982
  17. package/dist/control.mjs.map +1 -1
  18. package/dist/data-transform-CxFRBIUp.d.mts +32 -0
  19. package/dist/data-transform-CxFRBIUp.d.mts.map +1 -0
  20. package/dist/data-transform-VfEGzXWt.mjs +39 -0
  21. package/dist/data-transform-VfEGzXWt.mjs.map +1 -0
  22. package/dist/data-transform.d.mts +2 -0
  23. package/dist/data-transform.mjs +3 -0
  24. package/dist/default-normalizer-DNOpRoOF.mjs +131 -0
  25. package/dist/default-normalizer-DNOpRoOF.mjs.map +1 -0
  26. package/dist/default-normalizer.d.mts +19 -0
  27. package/dist/default-normalizer.d.mts.map +1 -0
  28. package/dist/default-normalizer.mjs +3 -0
  29. package/dist/{descriptor-meta-DkvCmY98.mjs → descriptor-meta-BVoVtyp-.mjs} +1 -1
  30. package/dist/{descriptor-meta-DkvCmY98.mjs.map → descriptor-meta-BVoVtyp-.mjs.map} +1 -1
  31. package/dist/errors-AFvEPZ1R.mjs +34 -0
  32. package/dist/errors-AFvEPZ1R.mjs.map +1 -0
  33. package/dist/errors.d.mts +27 -0
  34. package/dist/errors.d.mts.map +1 -0
  35. package/dist/errors.mjs +3 -0
  36. package/dist/issue-planner-CFjB0_oO.mjs +879 -0
  37. package/dist/issue-planner-CFjB0_oO.mjs.map +1 -0
  38. package/dist/issue-planner.d.mts +85 -0
  39. package/dist/issue-planner.d.mts.map +1 -0
  40. package/dist/issue-planner.mjs +3 -0
  41. package/dist/migration.d.mts +5 -79
  42. package/dist/migration.d.mts.map +1 -1
  43. package/dist/migration.mjs +6 -428
  44. package/dist/migration.mjs.map +1 -1
  45. package/dist/native-type-normalizer-CInai_oY.mjs +38 -0
  46. package/dist/native-type-normalizer-CInai_oY.mjs.map +1 -0
  47. package/dist/native-type-normalizer.d.mts +18 -0
  48. package/dist/native-type-normalizer.d.mts.map +1 -0
  49. package/dist/native-type-normalizer.mjs +3 -0
  50. package/dist/op-factory-call-BKlruaiC.mjs +605 -0
  51. package/dist/op-factory-call-BKlruaiC.mjs.map +1 -0
  52. package/dist/op-factory-call-C3bWXKSP.d.mts +304 -0
  53. package/dist/op-factory-call-C3bWXKSP.d.mts.map +1 -0
  54. package/dist/op-factory-call.d.mts +3 -0
  55. package/dist/op-factory-call.mjs +3 -0
  56. package/dist/pack.d.mts +1 -1
  57. package/dist/pack.mjs +1 -1
  58. package/dist/planner-CLUvVhUN.mjs +98 -0
  59. package/dist/planner-CLUvVhUN.mjs.map +1 -0
  60. package/dist/planner-ddl-builders-Dxvw1LHw.mjs +132 -0
  61. package/dist/planner-ddl-builders-Dxvw1LHw.mjs.map +1 -0
  62. package/dist/planner-ddl-builders.d.mts +22 -0
  63. package/dist/planner-ddl-builders.d.mts.map +1 -0
  64. package/dist/planner-ddl-builders.mjs +3 -0
  65. package/dist/planner-identity-values-Dju-o5GF.mjs +91 -0
  66. package/dist/planner-identity-values-Dju-o5GF.mjs.map +1 -0
  67. package/dist/planner-identity-values.d.mts +20 -0
  68. package/dist/planner-identity-values.d.mts.map +1 -0
  69. package/dist/planner-identity-values.mjs +3 -0
  70. package/dist/planner-produced-postgres-migration-CRRTno6Z.d.mts +20 -0
  71. package/dist/planner-produced-postgres-migration-CRRTno6Z.d.mts.map +1 -0
  72. package/dist/planner-produced-postgres-migration-DSSPq8QS.mjs +33 -0
  73. package/dist/planner-produced-postgres-migration-DSSPq8QS.mjs.map +1 -0
  74. package/dist/planner-produced-postgres-migration.d.mts +5 -0
  75. package/dist/planner-produced-postgres-migration.mjs +3 -0
  76. package/dist/planner-schema-lookup-B7lkypwn.mjs +29 -0
  77. package/dist/planner-schema-lookup-B7lkypwn.mjs.map +1 -0
  78. package/dist/planner-schema-lookup.d.mts +22 -0
  79. package/dist/planner-schema-lookup.d.mts.map +1 -0
  80. package/dist/planner-schema-lookup.mjs +3 -0
  81. package/dist/planner-sql-checks-7jkgm9TX.mjs +241 -0
  82. package/dist/planner-sql-checks-7jkgm9TX.mjs.map +1 -0
  83. package/dist/planner-sql-checks.d.mts +55 -0
  84. package/dist/planner-sql-checks.d.mts.map +1 -0
  85. package/dist/planner-sql-checks.mjs +3 -0
  86. package/dist/{planner-target-details-MXb3oeul.d.mts → planner-target-details-DH-azLu-.d.mts} +1 -1
  87. package/dist/{planner-target-details-MXb3oeul.d.mts.map → planner-target-details-DH-azLu-.d.mts.map} +1 -1
  88. package/dist/planner-target-details.d.mts +2 -0
  89. package/dist/planner-target-details.mjs +1 -0
  90. package/dist/planner.d.mts +68 -0
  91. package/dist/planner.d.mts.map +1 -0
  92. package/dist/planner.mjs +4 -0
  93. package/dist/postgres-migration-BjA3Zmts.d.mts +50 -0
  94. package/dist/postgres-migration-BjA3Zmts.d.mts.map +1 -0
  95. package/dist/postgres-migration-qtmtbONe.mjs +52 -0
  96. package/dist/postgres-migration-qtmtbONe.mjs.map +1 -0
  97. package/dist/render-ops-D6_DHdOK.mjs +8 -0
  98. package/dist/render-ops-D6_DHdOK.mjs.map +1 -0
  99. package/dist/render-ops.d.mts +11 -0
  100. package/dist/render-ops.d.mts.map +1 -0
  101. package/dist/render-ops.mjs +3 -0
  102. package/dist/render-typescript-1rF_SB4g.mjs +85 -0
  103. package/dist/render-typescript-1rF_SB4g.mjs.map +1 -0
  104. package/dist/render-typescript.d.mts +15 -0
  105. package/dist/render-typescript.d.mts.map +1 -0
  106. package/dist/render-typescript.mjs +3 -0
  107. package/dist/runtime.d.mts +15 -3
  108. package/dist/runtime.d.mts.map +1 -1
  109. package/dist/runtime.mjs +10 -1
  110. package/dist/runtime.mjs.map +1 -1
  111. package/dist/shared-Bxkt8pNO.d.mts +41 -0
  112. package/dist/shared-Bxkt8pNO.d.mts.map +1 -0
  113. package/dist/sql-utils-r-Lw535w.mjs +76 -0
  114. package/dist/sql-utils-r-Lw535w.mjs.map +1 -0
  115. package/dist/sql-utils.d.mts +59 -0
  116. package/dist/sql-utils.d.mts.map +1 -0
  117. package/dist/sql-utils.mjs +3 -0
  118. package/dist/statement-builders-BPnmt6wx.mjs +116 -0
  119. package/dist/statement-builders-BPnmt6wx.mjs.map +1 -0
  120. package/dist/statement-builders.d.mts +23 -0
  121. package/dist/statement-builders.d.mts.map +1 -0
  122. package/dist/statement-builders.mjs +3 -0
  123. package/dist/tables-BmdW_FWO.mjs +477 -0
  124. package/dist/tables-BmdW_FWO.mjs.map +1 -0
  125. package/dist/types-ClK03Ojd.d.mts +10 -0
  126. package/dist/types-ClK03Ojd.d.mts.map +1 -0
  127. package/dist/types.d.mts +2 -0
  128. package/dist/types.mjs +1 -0
  129. package/package.json +40 -20
  130. package/src/core/codec-ids.ts +30 -0
  131. package/src/core/codecs.ts +645 -0
  132. package/src/core/default-normalizer.ts +131 -0
  133. package/src/core/descriptor-meta.ts +1 -1
  134. package/src/core/errors.ts +33 -0
  135. package/src/core/json-schema-type-expression.ts +131 -0
  136. package/src/core/migrations/op-factory-call.ts +1 -5
  137. package/src/core/migrations/operations/columns.ts +1 -1
  138. package/src/core/migrations/operations/constraints.ts +1 -1
  139. package/src/core/migrations/operations/data-transform.ts +27 -21
  140. package/src/core/migrations/operations/dependencies.ts +1 -1
  141. package/src/core/migrations/operations/enums.ts +1 -1
  142. package/src/core/migrations/operations/indexes.ts +1 -1
  143. package/src/core/migrations/operations/shared.ts +1 -1
  144. package/src/core/migrations/operations/tables.ts +1 -1
  145. package/src/core/migrations/planner-ddl-builders.ts +1 -1
  146. package/src/core/migrations/planner-recipes.ts +1 -1
  147. package/src/core/migrations/planner-sql-checks.ts +1 -1
  148. package/src/core/migrations/planner.ts +2 -4
  149. package/src/core/migrations/postgres-migration.ts +54 -1
  150. package/src/core/migrations/render-typescript.ts +22 -12
  151. package/src/core/migrations/runner.ts +2 -4
  152. package/src/core/native-type-normalizer.ts +49 -0
  153. package/src/core/sql-utils.ts +104 -0
  154. package/src/exports/codec-ids.ts +1 -0
  155. package/src/exports/codec-types.ts +51 -0
  156. package/src/exports/codecs.ts +2 -0
  157. package/src/exports/data-transform.ts +1 -0
  158. package/src/exports/default-normalizer.ts +1 -0
  159. package/src/exports/errors.ts +1 -0
  160. package/src/exports/issue-planner.ts +1 -0
  161. package/src/exports/migration.ts +6 -0
  162. package/src/exports/native-type-normalizer.ts +1 -0
  163. package/src/exports/op-factory-call.ts +25 -0
  164. package/src/exports/planner-ddl-builders.ts +8 -0
  165. package/src/exports/planner-identity-values.ts +1 -0
  166. package/src/exports/planner-produced-postgres-migration.ts +1 -0
  167. package/src/exports/planner-schema-lookup.ts +6 -0
  168. package/src/exports/planner-sql-checks.ts +11 -0
  169. package/src/exports/planner-target-details.ts +1 -0
  170. package/src/exports/planner.ts +1 -0
  171. package/src/exports/render-ops.ts +1 -0
  172. package/src/exports/render-typescript.ts +1 -0
  173. package/src/exports/runtime.ts +19 -4
  174. package/src/exports/sql-utils.ts +7 -0
  175. package/src/exports/statement-builders.ts +7 -0
  176. package/src/exports/types.ts +1 -0
  177. package/dist/postgres-migration-BsHJHV9O.mjs +0 -2793
  178. package/dist/postgres-migration-BsHJHV9O.mjs.map +0 -1
@@ -1,2793 +0,0 @@
1
- import { ifDefined } from "@prisma-next/utils/defined";
2
- import { SQL_CHAR_CODEC_ID, SQL_FLOAT_CODEC_ID, SQL_INT_CODEC_ID, SQL_TEXT_CODEC_ID, SQL_TIMESTAMP_CODEC_ID, SQL_VARCHAR_CODEC_ID, codec, defineCodecs, sqlCodecDefinitions } from "@prisma-next/sql-relational-core/ast";
3
- import { type } from "arktype";
4
- import { arraysEqual } from "@prisma-next/family-sql/schema-verify";
5
- import { Migration } from "@prisma-next/family-sql/migration";
6
-
7
- //#region ../../6-adapters/postgres/dist/sql-utils-DkUJyZmA.mjs
8
- /**
9
- * Shared SQL utility functions for the Postgres adapter.
10
- *
11
- * These functions handle safe SQL identifier and literal escaping
12
- * with security validations to prevent injection and encoding issues.
13
- */
14
- /**
15
- * Error thrown when an invalid SQL identifier or literal is detected.
16
- * Boundary layers map this to structured envelopes.
17
- */
18
- var SqlEscapeError = class extends Error {
19
- constructor(message, value, kind) {
20
- super(message);
21
- this.value = value;
22
- this.kind = kind;
23
- this.name = "SqlEscapeError";
24
- }
25
- };
26
- /**
27
- * Maximum length for PostgreSQL identifiers (NAMEDATALEN - 1).
28
- */
29
- const MAX_IDENTIFIER_LENGTH$1 = 63;
30
- /**
31
- * Validates and quotes a PostgreSQL identifier (table, column, type, schema names).
32
- *
33
- * Security validations:
34
- * - Rejects null bytes which could cause truncation or unexpected behavior
35
- * - Rejects empty identifiers
36
- * - Warns on identifiers exceeding PostgreSQL's 63-character limit
37
- *
38
- * @throws {SqlEscapeError} If the identifier contains null bytes or is empty
39
- */
40
- function quoteIdentifier(identifier) {
41
- if (identifier.length === 0) throw new SqlEscapeError("Identifier cannot be empty", identifier, "identifier");
42
- if (identifier.includes("\0")) throw new SqlEscapeError("Identifier cannot contain null bytes", identifier.replace(/\0/g, "\\0"), "identifier");
43
- if (identifier.length > MAX_IDENTIFIER_LENGTH$1) console.warn(`Identifier "${identifier.slice(0, 20)}..." exceeds PostgreSQL's ${MAX_IDENTIFIER_LENGTH$1}-character limit and will be truncated`);
44
- return `"${identifier.replace(/"/g, "\"\"")}"`;
45
- }
46
- /**
47
- * Escapes a string literal for safe use in SQL statements.
48
- *
49
- * Security validations:
50
- * - Rejects null bytes which could cause truncation or unexpected behavior
51
- *
52
- * Note: This assumes PostgreSQL's `standard_conforming_strings` is ON (default since PG 9.1).
53
- * Backslashes are treated as literal characters, not escape sequences.
54
- *
55
- * @throws {SqlEscapeError} If the value contains null bytes
56
- */
57
- function escapeLiteral(value) {
58
- if (value.includes("\0")) throw new SqlEscapeError("Literal value cannot contain null bytes", value.replace(/\0/g, "\\0"), "literal");
59
- return value.replace(/'/g, "''");
60
- }
61
- /**
62
- * Builds a qualified name (schema.object) with proper quoting.
63
- */
64
- function qualifyName(schemaName, objectName) {
65
- return `${quoteIdentifier(schemaName)}.${quoteIdentifier(objectName)}`;
66
- }
67
- /**
68
- * Validates that an enum value doesn't exceed PostgreSQL's label length limit.
69
- *
70
- * PostgreSQL enum labels have a maximum length of NAMEDATALEN-1 (63 bytes by default).
71
- * Unlike identifiers, enum labels that exceed this limit cause an error rather than
72
- * silent truncation.
73
- *
74
- * @param value - The enum value to validate
75
- * @param enumTypeName - Name of the enum type (for error messages)
76
- * @throws {SqlEscapeError} If the value exceeds the maximum length
77
- */
78
- function validateEnumValueLength(value, enumTypeName) {
79
- if (value.length > MAX_IDENTIFIER_LENGTH$1) throw new SqlEscapeError(`Enum value "${value.slice(0, 20)}..." for type "${enumTypeName}" exceeds PostgreSQL's ${MAX_IDENTIFIER_LENGTH$1}-character label limit`, value, "literal");
80
- }
81
-
82
- //#endregion
83
- //#region ../../6-adapters/postgres/dist/codec-ids-5g4Gwrgm.mjs
84
- const PG_TEXT_CODEC_ID = "pg/text@1";
85
- const PG_ENUM_CODEC_ID = "pg/enum@1";
86
- const PG_CHAR_CODEC_ID = "pg/char@1";
87
- const PG_VARCHAR_CODEC_ID = "pg/varchar@1";
88
- const PG_INT_CODEC_ID = "pg/int@1";
89
- const PG_INT2_CODEC_ID = "pg/int2@1";
90
- const PG_INT4_CODEC_ID = "pg/int4@1";
91
- const PG_INT8_CODEC_ID = "pg/int8@1";
92
- const PG_FLOAT_CODEC_ID = "pg/float@1";
93
- const PG_FLOAT4_CODEC_ID = "pg/float4@1";
94
- const PG_FLOAT8_CODEC_ID = "pg/float8@1";
95
- const PG_NUMERIC_CODEC_ID = "pg/numeric@1";
96
- const PG_BOOL_CODEC_ID = "pg/bool@1";
97
- const PG_BIT_CODEC_ID = "pg/bit@1";
98
- const PG_VARBIT_CODEC_ID = "pg/varbit@1";
99
- const PG_TIMESTAMP_CODEC_ID = "pg/timestamp@1";
100
- const PG_TIMESTAMPTZ_CODEC_ID = "pg/timestamptz@1";
101
- const PG_TIME_CODEC_ID = "pg/time@1";
102
- const PG_TIMETZ_CODEC_ID = "pg/timetz@1";
103
- const PG_INTERVAL_CODEC_ID = "pg/interval@1";
104
- const PG_JSON_CODEC_ID = "pg/json@1";
105
- const PG_JSONB_CODEC_ID = "pg/jsonb@1";
106
-
107
- //#endregion
108
- //#region ../../6-adapters/postgres/dist/codecs-DiPlMi3-.mjs
109
- const MAX_DEPTH = 32;
110
- function isRecord(value) {
111
- return typeof value === "object" && value !== null;
112
- }
113
- function escapeStringLiteral(str) {
114
- return str.replace(/\\/g, "\\\\").replace(/'/g, "\\'").replace(/\n/g, "\\n").replace(/\r/g, "\\r");
115
- }
116
- function quotePropertyKey(key) {
117
- return /^[A-Za-z_$][A-Za-z0-9_$]*$/.test(key) ? key : `'${escapeStringLiteral(key)}'`;
118
- }
119
- function renderLiteral(value) {
120
- if (typeof value === "string") return `'${escapeStringLiteral(value)}'`;
121
- if (typeof value === "number" || typeof value === "boolean") return String(value);
122
- if (value === null) return "null";
123
- return "unknown";
124
- }
125
- function renderUnion(items, depth) {
126
- return items.map((item) => render(item, depth)).join(" | ");
127
- }
128
- function renderObjectType(schema, depth) {
129
- const properties = isRecord(schema["properties"]) ? schema["properties"] : {};
130
- const required = Array.isArray(schema["required"]) ? new Set(schema["required"].filter((key) => typeof key === "string")) : /* @__PURE__ */ new Set();
131
- const keys = Object.keys(properties).sort((left, right) => left.localeCompare(right));
132
- if (keys.length === 0) {
133
- const additionalProperties = schema["additionalProperties"];
134
- if (additionalProperties === true || additionalProperties === void 0) return "Record<string, unknown>";
135
- return `Record<string, ${render(additionalProperties, depth)}>`;
136
- }
137
- return `{ ${keys.map((key) => {
138
- const valueSchema = properties[key];
139
- const optionalMarker = required.has(key) ? "" : "?";
140
- return `${quotePropertyKey(key)}${optionalMarker}: ${render(valueSchema, depth)}`;
141
- }).join("; ")} }`;
142
- }
143
- function renderArrayType(schema, depth) {
144
- if (Array.isArray(schema["items"])) return `readonly [${schema["items"].map((item) => render(item, depth)).join(", ")}]`;
145
- if (schema["items"] !== void 0) {
146
- const itemType = render(schema["items"], depth);
147
- return itemType.includes(" | ") || itemType.includes(" & ") ? `(${itemType})[]` : `${itemType}[]`;
148
- }
149
- return "unknown[]";
150
- }
151
- function render(schema, depth) {
152
- if (depth > MAX_DEPTH || !isRecord(schema)) return "JsonValue";
153
- const nextDepth = depth + 1;
154
- if ("const" in schema) return renderLiteral(schema["const"]);
155
- if (Array.isArray(schema["enum"])) return schema["enum"].map((value) => renderLiteral(value)).join(" | ");
156
- if (Array.isArray(schema["oneOf"])) return renderUnion(schema["oneOf"], nextDepth);
157
- if (Array.isArray(schema["anyOf"])) return renderUnion(schema["anyOf"], nextDepth);
158
- if (Array.isArray(schema["allOf"])) return schema["allOf"].map((item) => render(item, nextDepth)).join(" & ");
159
- if (Array.isArray(schema["type"])) return schema["type"].map((item) => render({
160
- ...schema,
161
- type: item
162
- }, nextDepth)).join(" | ");
163
- switch (schema["type"]) {
164
- case "string": return "string";
165
- case "number":
166
- case "integer": return "number";
167
- case "boolean": return "boolean";
168
- case "null": return "null";
169
- case "array": return renderArrayType(schema, nextDepth);
170
- case "object": return renderObjectType(schema, nextDepth);
171
- default: break;
172
- }
173
- return "JsonValue";
174
- }
175
- function renderTypeScriptTypeFromJsonSchema(schema) {
176
- return render(schema, 0);
177
- }
178
- const lengthParamsSchema = type({ length: "number.integer > 0" });
179
- const numericParamsSchema = type({
180
- precision: "number.integer > 0 & number.integer <= 1000",
181
- "scale?": "number.integer >= 0"
182
- });
183
- const precisionParamsSchema = type({ "precision?": "number.integer >= 0 & number.integer <= 6" });
184
- function renderLength(typeName, typeParams) {
185
- const length = typeParams["length"];
186
- if (length === void 0) return;
187
- if (typeof length !== "number" || !Number.isFinite(length) || !Number.isInteger(length)) throw new Error(`renderOutputType: expected integer "length" in typeParams for ${typeName}, got ${String(length)}`);
188
- return `${typeName}<${length}>`;
189
- }
190
- function renderPrecision(typeName, typeParams) {
191
- const precision = typeParams["precision"];
192
- if (precision === void 0) return typeName;
193
- if (typeof precision !== "number" || !Number.isFinite(precision) || !Number.isInteger(precision)) throw new Error(`renderOutputType: expected integer "precision" in typeParams for ${typeName}, got ${String(precision)}`);
194
- return `${typeName}<${precision}>`;
195
- }
196
- function renderJsonOutputType(typeParams) {
197
- const typeName = typeParams["type"];
198
- if (typeof typeName === "string" && typeName.trim().length > 0) return typeName.trim();
199
- const schema = typeParams["schemaJson"];
200
- if (schema && typeof schema === "object") return renderTypeScriptTypeFromJsonSchema(schema);
201
- throw new Error(`renderOutputType: JSON codec typeParams must contain "type" (string) or "schemaJson" (object), got keys: ${Object.keys(typeParams).join(", ")}`);
202
- }
203
- function aliasCodec(base, options) {
204
- return {
205
- id: options.typeId,
206
- targetTypes: options.targetTypes,
207
- ...ifDefined("meta", options.meta),
208
- ...ifDefined("paramsSchema", base.paramsSchema),
209
- ...ifDefined("init", base.init),
210
- ...ifDefined("encode", base.encode),
211
- ...ifDefined("traits", base.traits),
212
- ...ifDefined("renderOutputType", base.renderOutputType),
213
- decode: base.decode,
214
- encodeJson: base.encodeJson,
215
- decodeJson: base.decodeJson
216
- };
217
- }
218
- const sqlCharCodec = sqlCodecDefinitions.char.codec;
219
- const sqlVarcharCodec = sqlCodecDefinitions.varchar.codec;
220
- const sqlIntCodec = sqlCodecDefinitions.int.codec;
221
- const sqlFloatCodec = sqlCodecDefinitions.float.codec;
222
- const sqlTextCodec = sqlCodecDefinitions.text.codec;
223
- const sqlTimestampCodec = sqlCodecDefinitions.timestamp.codec;
224
- const pgTextCodec = codec({
225
- typeId: PG_TEXT_CODEC_ID,
226
- targetTypes: ["text"],
227
- traits: [
228
- "equality",
229
- "order",
230
- "textual"
231
- ],
232
- encode: (value) => value,
233
- decode: (wire) => wire,
234
- meta: { db: { sql: { postgres: { nativeType: "text" } } } }
235
- });
236
- const pgCharCodec = aliasCodec(sqlCharCodec, {
237
- typeId: PG_CHAR_CODEC_ID,
238
- targetTypes: ["character"],
239
- meta: { db: { sql: { postgres: { nativeType: "character" } } } }
240
- });
241
- const pgVarcharCodec = aliasCodec(sqlVarcharCodec, {
242
- typeId: PG_VARCHAR_CODEC_ID,
243
- targetTypes: ["character varying"],
244
- meta: { db: { sql: { postgres: { nativeType: "character varying" } } } }
245
- });
246
- const pgIntCodec = aliasCodec(sqlIntCodec, {
247
- typeId: PG_INT_CODEC_ID,
248
- targetTypes: ["int4"],
249
- meta: { db: { sql: { postgres: { nativeType: "integer" } } } }
250
- });
251
- const pgFloatCodec = aliasCodec(sqlFloatCodec, {
252
- typeId: PG_FLOAT_CODEC_ID,
253
- targetTypes: ["float8"],
254
- meta: { db: { sql: { postgres: { nativeType: "double precision" } } } }
255
- });
256
- const pgInt4Codec = codec({
257
- typeId: PG_INT4_CODEC_ID,
258
- targetTypes: ["int4"],
259
- traits: [
260
- "equality",
261
- "order",
262
- "numeric"
263
- ],
264
- encode: (value) => value,
265
- decode: (wire) => wire,
266
- meta: { db: { sql: { postgres: { nativeType: "integer" } } } }
267
- });
268
- const pgNumericCodec = codec({
269
- typeId: PG_NUMERIC_CODEC_ID,
270
- targetTypes: ["numeric", "decimal"],
271
- traits: [
272
- "equality",
273
- "order",
274
- "numeric"
275
- ],
276
- encode: (value) => value,
277
- decode: (wire) => {
278
- if (typeof wire === "number") return String(wire);
279
- return wire;
280
- },
281
- paramsSchema: numericParamsSchema,
282
- renderOutputType: (typeParams) => {
283
- const precision = typeParams["precision"];
284
- if (precision === void 0) return void 0;
285
- if (typeof precision !== "number" || !Number.isFinite(precision) || !Number.isInteger(precision)) throw new Error(`renderOutputType: expected integer "precision" in typeParams for Numeric, got ${String(precision)}`);
286
- const scale = typeParams["scale"];
287
- return typeof scale === "number" ? `Numeric<${precision}, ${scale}>` : `Numeric<${precision}>`;
288
- },
289
- meta: { db: { sql: { postgres: { nativeType: "numeric" } } } }
290
- });
291
- const pgInt2Codec = codec({
292
- typeId: PG_INT2_CODEC_ID,
293
- targetTypes: ["int2"],
294
- traits: [
295
- "equality",
296
- "order",
297
- "numeric"
298
- ],
299
- encode: (value) => value,
300
- decode: (wire) => wire,
301
- meta: { db: { sql: { postgres: { nativeType: "smallint" } } } }
302
- });
303
- const pgInt8Codec = codec({
304
- typeId: PG_INT8_CODEC_ID,
305
- targetTypes: ["int8"],
306
- traits: [
307
- "equality",
308
- "order",
309
- "numeric"
310
- ],
311
- encode: (value) => value,
312
- decode: (wire) => wire,
313
- meta: { db: { sql: { postgres: { nativeType: "bigint" } } } }
314
- });
315
- const pgFloat4Codec = codec({
316
- typeId: PG_FLOAT4_CODEC_ID,
317
- targetTypes: ["float4"],
318
- traits: [
319
- "equality",
320
- "order",
321
- "numeric"
322
- ],
323
- encode: (value) => value,
324
- decode: (wire) => wire,
325
- meta: { db: { sql: { postgres: { nativeType: "real" } } } }
326
- });
327
- const pgFloat8Codec = codec({
328
- typeId: PG_FLOAT8_CODEC_ID,
329
- targetTypes: ["float8"],
330
- traits: [
331
- "equality",
332
- "order",
333
- "numeric"
334
- ],
335
- encode: (value) => value,
336
- decode: (wire) => wire,
337
- meta: { db: { sql: { postgres: { nativeType: "double precision" } } } }
338
- });
339
- const pgTimestampCodec = codec({
340
- typeId: PG_TIMESTAMP_CODEC_ID,
341
- targetTypes: ["timestamp"],
342
- traits: ["equality", "order"],
343
- encode: (value) => {
344
- if (value instanceof Date) return value.toISOString();
345
- if (typeof value === "string") return value;
346
- return String(value);
347
- },
348
- decode: (wire) => {
349
- if (wire instanceof Date) return wire.toISOString();
350
- return wire;
351
- },
352
- encodeJson: (value) => value instanceof Date ? value.toISOString() : value,
353
- decodeJson: (json) => {
354
- if (typeof json !== "string") throw new Error(`Expected ISO date string for pg/timestamp@1, got ${typeof json}`);
355
- const date = new Date(json);
356
- if (Number.isNaN(date.getTime())) throw new Error(`Invalid ISO date string for pg/timestamp@1: ${json}`);
357
- return date;
358
- },
359
- paramsSchema: precisionParamsSchema,
360
- renderOutputType: (typeParams) => renderPrecision("Timestamp", typeParams),
361
- meta: { db: { sql: { postgres: { nativeType: "timestamp without time zone" } } } }
362
- });
363
- const pgTimestamptzCodec = codec({
364
- typeId: PG_TIMESTAMPTZ_CODEC_ID,
365
- targetTypes: ["timestamptz"],
366
- traits: ["equality", "order"],
367
- encode: (value) => {
368
- if (value instanceof Date) return value.toISOString();
369
- if (typeof value === "string") return value;
370
- return String(value);
371
- },
372
- decode: (wire) => {
373
- if (wire instanceof Date) return wire.toISOString();
374
- return wire;
375
- },
376
- encodeJson: (value) => value instanceof Date ? value.toISOString() : value,
377
- decodeJson: (json) => {
378
- if (typeof json !== "string") throw new Error(`Expected ISO date string for pg/timestamptz@1, got ${typeof json}`);
379
- const date = new Date(json);
380
- if (Number.isNaN(date.getTime())) throw new Error(`Invalid ISO date string for pg/timestamptz@1: ${json}`);
381
- return date;
382
- },
383
- paramsSchema: precisionParamsSchema,
384
- renderOutputType: (typeParams) => renderPrecision("Timestamptz", typeParams),
385
- meta: { db: { sql: { postgres: { nativeType: "timestamp with time zone" } } } }
386
- });
387
- const pgTimeCodec = codec({
388
- typeId: PG_TIME_CODEC_ID,
389
- targetTypes: ["time"],
390
- traits: ["equality", "order"],
391
- encode: (value) => value,
392
- decode: (wire) => wire,
393
- paramsSchema: precisionParamsSchema,
394
- renderOutputType: (typeParams) => renderPrecision("Time", typeParams),
395
- meta: { db: { sql: { postgres: { nativeType: "time" } } } }
396
- });
397
- const pgTimetzCodec = codec({
398
- typeId: PG_TIMETZ_CODEC_ID,
399
- targetTypes: ["timetz"],
400
- traits: ["equality", "order"],
401
- encode: (value) => value,
402
- decode: (wire) => wire,
403
- paramsSchema: precisionParamsSchema,
404
- renderOutputType: (typeParams) => renderPrecision("Timetz", typeParams),
405
- meta: { db: { sql: { postgres: { nativeType: "timetz" } } } }
406
- });
407
- const pgBoolCodec = codec({
408
- typeId: PG_BOOL_CODEC_ID,
409
- targetTypes: ["bool"],
410
- traits: ["equality", "boolean"],
411
- encode: (value) => value,
412
- decode: (wire) => wire,
413
- meta: { db: { sql: { postgres: { nativeType: "boolean" } } } }
414
- });
415
- const pgBitCodec = codec({
416
- typeId: PG_BIT_CODEC_ID,
417
- targetTypes: ["bit"],
418
- traits: ["equality", "order"],
419
- encode: (value) => value,
420
- decode: (wire) => wire,
421
- paramsSchema: lengthParamsSchema,
422
- renderOutputType: (typeParams) => renderLength("Bit", typeParams),
423
- meta: { db: { sql: { postgres: { nativeType: "bit" } } } }
424
- });
425
- const pgVarbitCodec = codec({
426
- typeId: PG_VARBIT_CODEC_ID,
427
- targetTypes: ["bit varying"],
428
- traits: ["equality", "order"],
429
- encode: (value) => value,
430
- decode: (wire) => wire,
431
- paramsSchema: lengthParamsSchema,
432
- renderOutputType: (typeParams) => renderLength("VarBit", typeParams),
433
- meta: { db: { sql: { postgres: { nativeType: "bit varying" } } } }
434
- });
435
- const pgEnumCodec = codec({
436
- typeId: PG_ENUM_CODEC_ID,
437
- targetTypes: ["enum"],
438
- traits: ["equality", "order"],
439
- encode: (value) => value,
440
- decode: (wire) => wire,
441
- renderOutputType: (typeParams) => {
442
- const values = typeParams["values"];
443
- if (!Array.isArray(values)) throw new Error(`renderOutputType: expected array "values" in typeParams for enum, got ${typeof values}`);
444
- return values.map((value) => `'${String(value).replace(/\\/g, "\\\\").replace(/'/g, "\\'")}'`).join(" | ");
445
- }
446
- });
447
- const pgIntervalCodec = codec({
448
- typeId: PG_INTERVAL_CODEC_ID,
449
- targetTypes: ["interval"],
450
- traits: ["equality", "order"],
451
- encode: (value) => value,
452
- decode: (wire) => {
453
- if (typeof wire === "string") return wire;
454
- return JSON.stringify(wire);
455
- },
456
- paramsSchema: precisionParamsSchema,
457
- renderOutputType: (typeParams) => renderPrecision("Interval", typeParams),
458
- meta: { db: { sql: { postgres: { nativeType: "interval" } } } }
459
- });
460
- const pgJsonCodec = codec({
461
- typeId: PG_JSON_CODEC_ID,
462
- targetTypes: ["json"],
463
- traits: [],
464
- encode: (value) => JSON.stringify(value),
465
- decode: (wire) => typeof wire === "string" ? JSON.parse(wire) : wire,
466
- renderOutputType: renderJsonOutputType,
467
- meta: { db: { sql: { postgres: { nativeType: "json" } } } }
468
- });
469
- const pgJsonbCodec = codec({
470
- typeId: PG_JSONB_CODEC_ID,
471
- targetTypes: ["jsonb"],
472
- traits: ["equality"],
473
- encode: (value) => JSON.stringify(value),
474
- decode: (wire) => typeof wire === "string" ? JSON.parse(wire) : wire,
475
- renderOutputType: renderJsonOutputType,
476
- meta: { db: { sql: { postgres: { nativeType: "jsonb" } } } }
477
- });
478
- const codecs = defineCodecs().add("char", sqlCharCodec).add("varchar", sqlVarcharCodec).add("int", sqlIntCodec).add("float", sqlFloatCodec).add("sql-text", sqlTextCodec).add("sql-timestamp", sqlTimestampCodec).add("text", pgTextCodec).add("character", pgCharCodec).add("character varying", pgVarcharCodec).add("integer", pgIntCodec).add("double precision", pgFloatCodec).add("int4", pgInt4Codec).add("int2", pgInt2Codec).add("int8", pgInt8Codec).add("float4", pgFloat4Codec).add("float8", pgFloat8Codec).add("numeric", pgNumericCodec).add("timestamp", pgTimestampCodec).add("timestamptz", pgTimestamptzCodec).add("time", pgTimeCodec).add("timetz", pgTimetzCodec).add("bool", pgBoolCodec).add("bit", pgBitCodec).add("bit varying", pgVarbitCodec).add("interval", pgIntervalCodec).add("enum", pgEnumCodec).add("json", pgJsonCodec).add("jsonb", pgJsonbCodec);
479
- const codecDefinitions = codecs.codecDefinitions;
480
- const dataTypes = codecs.dataTypes;
481
-
482
- //#endregion
483
- //#region ../../6-adapters/postgres/dist/descriptor-meta-BB9XPAFi.mjs
484
- const ENUM_INTROSPECT_QUERY = `
485
- SELECT
486
- n.nspname AS schema_name,
487
- t.typname AS type_name,
488
- array_agg(e.enumlabel ORDER BY e.enumsortorder) AS values
489
- FROM pg_type t
490
- JOIN pg_namespace n ON t.typnamespace = n.oid
491
- JOIN pg_enum e ON t.oid = e.enumtypid
492
- WHERE n.nspname = $1
493
- GROUP BY n.nspname, t.typname
494
- ORDER BY n.nspname, t.typname
495
- `;
496
- /**
497
- * Type guard for string arrays. Used for runtime validation of introspected data.
498
- */
499
- function isStringArray(value) {
500
- return Array.isArray(value) && value.every((entry) => typeof entry === "string");
501
- }
502
- /**
503
- * Parses a PostgreSQL array value into a JavaScript string array.
504
- *
505
- * PostgreSQL's `pg` library may return `array_agg` results either as:
506
- * - A JavaScript array (when type parsers are configured)
507
- * - A string in PostgreSQL array literal format: `{value1,value2,...}`
508
- *
509
- * Handles PostgreSQL's quoting rules for array elements:
510
- * - Elements containing commas, double quotes, backslashes, or whitespace are double-quoted
511
- * - Inside quoted elements, `\"` represents `"` and `\\` represents `\`
512
- *
513
- * @param value - The value to parse (array or PostgreSQL array string)
514
- * @returns A string array, or null if the value cannot be parsed
515
- */
516
- function parsePostgresArray(value) {
517
- if (isStringArray(value)) return value;
518
- if (typeof value === "string" && value.startsWith("{") && value.endsWith("}")) {
519
- const inner = value.slice(1, -1);
520
- if (inner === "") return [];
521
- return parseArrayElements(inner);
522
- }
523
- return null;
524
- }
525
- function parseArrayElements(input) {
526
- const result = [];
527
- let i = 0;
528
- while (i < input.length) {
529
- if (input[i] === ",") {
530
- i++;
531
- continue;
532
- }
533
- if (input[i] === "\"") {
534
- i++;
535
- let element = "";
536
- while (i < input.length && input[i] !== "\"") {
537
- if (input[i] === "\\" && i + 1 < input.length) {
538
- i++;
539
- element += input[i];
540
- } else element += input[i];
541
- i++;
542
- }
543
- i++;
544
- result.push(element);
545
- } else {
546
- const nextComma = input.indexOf(",", i);
547
- if (nextComma === -1) {
548
- result.push(input.slice(i).trim());
549
- i = input.length;
550
- } else {
551
- result.push(input.slice(i, nextComma).trim());
552
- i = nextComma;
553
- }
554
- }
555
- }
556
- return result;
557
- }
558
- /**
559
- * Extracts enum values from a StorageTypeInstance.
560
- * Returns null if values are missing or invalid.
561
- */
562
- function getEnumValues(typeInstance) {
563
- const values = typeInstance.typeParams?.["values"];
564
- return isStringArray(values) ? values : null;
565
- }
566
- /**
567
- * Reads existing enum values from the schema IR for a given native type.
568
- * Uses optional chaining to simplify navigation through the annotations structure.
569
- */
570
- function readExistingEnumValues(schema, nativeType) {
571
- const existing = ((schema.annotations?.["pg"])?.["storageTypes"])?.[nativeType];
572
- if (!existing || existing.codecId !== PG_ENUM_CODEC_ID) return null;
573
- return getEnumValues(existing);
574
- }
575
- /**
576
- * Determines what changes are needed to transform existing enum values to desired values.
577
- *
578
- * Returns one of:
579
- * - `unchanged`: No changes needed, values match exactly
580
- * - `add_values`: New values can be safely appended (PostgreSQL supports this)
581
- * - `rebuild`: Full enum rebuild required (value removal, reordering, or both)
582
- *
583
- * Note: PostgreSQL enums can only have values added (not removed or reordered) without
584
- * a full type rebuild involving temp type creation and column migration.
585
- *
586
- * @param existing - Current enum values in the database
587
- * @param desired - Target enum values from the contract
588
- * @returns The type of change required
589
- */
590
- function determineEnumDiff(existing, desired) {
591
- if (arraysEqual(existing, desired)) return { kind: "unchanged" };
592
- const existingSet = new Set(existing);
593
- const desiredSet = new Set(desired);
594
- const missingValues = desired.filter((value) => !existingSet.has(value));
595
- const removedValues = existing.filter((value) => !desiredSet.has(value));
596
- const orderMismatch = missingValues.length === 0 && removedValues.length === 0 && !arraysEqual(existing, desired);
597
- if (removedValues.length > 0 || orderMismatch) return {
598
- kind: "rebuild",
599
- removedValues
600
- };
601
- return {
602
- kind: "add_values",
603
- values: missingValues
604
- };
605
- }
606
- function enumTypeExistsCheck$1(schemaName, typeName, exists = true) {
607
- return `SELECT ${exists ? "EXISTS" : "NOT EXISTS"} (
608
- SELECT 1
609
- FROM pg_type t
610
- JOIN pg_namespace n ON t.typnamespace = n.oid
611
- WHERE n.nspname = '${escapeLiteral(schemaName)}'
612
- AND t.typname = '${escapeLiteral(typeName)}'
613
- )`;
614
- }
615
- function buildCreateEnumOperation(typeName, nativeType, schemaName, values) {
616
- for (const value of values) validateEnumValueLength(value, typeName);
617
- const literalValues = values.map((value) => `'${escapeLiteral(value)}'`).join(", ");
618
- const qualifiedType = qualifyName(schemaName, nativeType);
619
- return {
620
- id: `type.${typeName}`,
621
- label: `Create type ${typeName}`,
622
- summary: `Creates enum type ${typeName}`,
623
- operationClass: "additive",
624
- target: { id: "postgres" },
625
- precheck: [{
626
- description: `ensure type "${nativeType}" does not exist`,
627
- sql: enumTypeExistsCheck$1(schemaName, nativeType, false)
628
- }],
629
- execute: [{
630
- description: `create type "${nativeType}"`,
631
- sql: `CREATE TYPE ${qualifiedType} AS ENUM (${literalValues})`
632
- }],
633
- postcheck: [{
634
- description: `verify type "${nativeType}" exists`,
635
- sql: enumTypeExistsCheck$1(schemaName, nativeType)
636
- }]
637
- };
638
- }
639
- /**
640
- * Computes the optimal position for inserting a new enum value to maintain
641
- * the desired order relative to existing values.
642
- *
643
- * PostgreSQL's `ALTER TYPE ADD VALUE` supports BEFORE/AFTER positioning.
644
- * This function finds the best reference value by:
645
- * 1. Looking for the nearest preceding value that already exists
646
- * 2. Falling back to the nearest following value if no preceding exists
647
- * 3. Defaulting to end-of-list if no reference is found
648
- *
649
- * @param options.desired - The target ordered list of all enum values
650
- * @param options.desiredIndex - Index of the value being inserted in the desired list
651
- * @param options.current - Current list of enum values (being built up incrementally)
652
- * @returns SQL clause (e.g., " AFTER 'x'") and insert position for tracking
653
- */
654
- function computeInsertPosition(options) {
655
- const { desired, desiredIndex, current } = options;
656
- const currentSet = new Set(current);
657
- const previous = desired.slice(0, desiredIndex).reverse().find((candidate) => currentSet.has(candidate));
658
- const next = desired.slice(desiredIndex + 1).find((candidate) => currentSet.has(candidate));
659
- return {
660
- clause: previous ? ` AFTER '${escapeLiteral(previous)}'` : next ? ` BEFORE '${escapeLiteral(next)}'` : "",
661
- insertAt: previous ? current.indexOf(previous) + 1 : next ? current.indexOf(next) : current.length
662
- };
663
- }
664
- /**
665
- * Builds operations to add new enum values to an existing PostgreSQL enum type.
666
- *
667
- * Each new value is added with `ALTER TYPE ... ADD VALUE IF NOT EXISTS` for idempotency.
668
- * Values are inserted in the correct order using BEFORE/AFTER positioning to match
669
- * the desired final order.
670
- *
671
- * This is a safe, non-destructive operation - existing data is not affected.
672
- *
673
- * @param options.typeName - Contract-level type name (e.g., 'Role')
674
- * @param options.nativeType - PostgreSQL type name (e.g., 'role')
675
- * @param options.schemaName - PostgreSQL schema (e.g., 'public')
676
- * @param options.desired - Target ordered list of all enum values
677
- * @param options.existing - Current enum values in the database
678
- * @returns Array of migration operations to add each missing value
679
- */
680
- function buildAddValueOperations(options) {
681
- const { typeName, nativeType, schemaName } = options;
682
- const current = [...options.existing];
683
- const currentSet = new Set(current);
684
- const operations = [];
685
- for (let index = 0; index < options.desired.length; index += 1) {
686
- const value = options.desired[index];
687
- if (value === void 0) continue;
688
- if (currentSet.has(value)) continue;
689
- validateEnumValueLength(value, typeName);
690
- const { clause, insertAt } = computeInsertPosition({
691
- desired: options.desired,
692
- desiredIndex: index,
693
- current
694
- });
695
- operations.push({
696
- id: `type.${typeName}.value.${value}`,
697
- label: `Add value ${value} to ${typeName}`,
698
- summary: `Adds enum value ${value} to ${typeName}`,
699
- operationClass: "widening",
700
- target: { id: "postgres" },
701
- precheck: [],
702
- execute: [{
703
- description: `add value "${value}" if not exists`,
704
- sql: `ALTER TYPE ${qualifyName(schemaName, nativeType)} ADD VALUE IF NOT EXISTS '${escapeLiteral(value)}'${clause}`
705
- }],
706
- postcheck: []
707
- });
708
- current.splice(insertAt, 0, value);
709
- currentSet.add(value);
710
- }
711
- return operations;
712
- }
713
- /**
714
- * Collects columns using the enum type from the contract (desired state).
715
- * Used for type-safe reference tracking.
716
- */
717
- function collectEnumColumnsFromContract(contract, typeName, nativeType) {
718
- const columns = [];
719
- for (const [tableName, table] of Object.entries(contract.storage.tables)) for (const [columnName, column] of Object.entries(table.columns)) if (column.typeRef === typeName || column.nativeType === nativeType && column.codecId === PG_ENUM_CODEC_ID) columns.push({
720
- table: tableName,
721
- column: columnName
722
- });
723
- return columns;
724
- }
725
- /**
726
- * Collects columns using the enum type from the schema IR (live database state).
727
- * This ensures we find ALL dependent columns, including those added outside the contract
728
- * (e.g., manual DDL), which is critical for safe enum rebuild operations.
729
- */
730
- function collectEnumColumnsFromSchema(schema, nativeType) {
731
- const columns = [];
732
- for (const [tableName, table] of Object.entries(schema.tables)) for (const [columnName, column] of Object.entries(table.columns)) if (column.nativeType === nativeType) columns.push({
733
- table: tableName,
734
- column: columnName
735
- });
736
- return columns;
737
- }
738
- /**
739
- * Collects all columns using the enum type from both contract AND live database.
740
- * Merges and deduplicates to ensure we migrate ALL dependent columns during rebuild.
741
- *
742
- * This is critical for data integrity: if a column exists in the database using
743
- * this enum but is not in the contract (e.g., added via manual DDL), we must
744
- * still migrate it to avoid DROP TYPE failures.
745
- */
746
- function collectAllEnumColumns(contract, schema, typeName, nativeType) {
747
- const contractColumns = collectEnumColumnsFromContract(contract, typeName, nativeType);
748
- const schemaColumns = collectEnumColumnsFromSchema(schema, nativeType);
749
- const seen = /* @__PURE__ */ new Set();
750
- const result = [];
751
- for (const col of [...contractColumns, ...schemaColumns]) {
752
- const key = `${col.table}.${col.column}`;
753
- if (!seen.has(key)) {
754
- seen.add(key);
755
- result.push(col);
756
- }
757
- }
758
- return result.sort((a, b) => {
759
- const tableCompare = a.table.localeCompare(b.table);
760
- return tableCompare !== 0 ? tableCompare : a.column.localeCompare(b.column);
761
- });
762
- }
763
- /**
764
- * Builds a SQL check to verify a column's type matches an expected type.
765
- */
766
- function columnTypeCheck$1(options) {
767
- return `SELECT EXISTS (
768
- SELECT 1
769
- FROM information_schema.columns
770
- WHERE table_schema = '${escapeLiteral(options.schemaName)}'
771
- AND table_name = '${escapeLiteral(options.tableName)}'
772
- AND column_name = '${escapeLiteral(options.columnName)}'
773
- AND udt_name = '${escapeLiteral(options.expectedType)}'
774
- )`;
775
- }
776
- /** PostgreSQL maximum identifier length (NAMEDATALEN - 1) */
777
- const MAX_IDENTIFIER_LENGTH = 63;
778
- /** Suffix added to enum type names during rebuild operations */
779
- const REBUILD_SUFFIX = "__pn_rebuild";
780
- /**
781
- * Builds an SQL check to verify no rows contain any of the removed enum values.
782
- * This prevents data loss during enum rebuild operations.
783
- *
784
- * @param schemaName - PostgreSQL schema name
785
- * @param tableName - Table containing the enum column
786
- * @param columnName - Column using the enum type
787
- * @param removedValues - Array of enum values being removed
788
- * @returns SQL query that returns true if no rows contain removed values
789
- */
790
- function noRemovedValuesExistCheck(schemaName, tableName, columnName, removedValues) {
791
- if (removedValues.length === 0) return "SELECT true";
792
- const valuesList = removedValues.map((v) => `'${escapeLiteral(v)}'`).join(", ");
793
- return `SELECT NOT EXISTS (
794
- SELECT 1 FROM ${qualifyName(schemaName, tableName)}
795
- WHERE ${quoteIdentifier(columnName)}::text IN (${valuesList})
796
- LIMIT 1
797
- )`;
798
- }
799
- /**
800
- * Builds a migration operation to recreate a PostgreSQL enum type with updated values.
801
- *
802
- * This is required when:
803
- * - Enum values are removed (PostgreSQL doesn't support direct removal)
804
- * - Enum values are reordered (PostgreSQL doesn't support reordering)
805
- *
806
- * The operation:
807
- * 1. Creates a new enum type with the desired values (temp name)
808
- * 2. Migrates all columns to use the new type via text cast
809
- * 3. Drops the original type
810
- * 4. Renames the temp type to the original name
811
- *
812
- * IMPORTANT: If values are being removed and data exists using those values,
813
- * the operation will fail at the precheck stage with a clear error message.
814
- * This prevents silent data loss.
815
- *
816
- * @param options.typeName - Contract-level type name
817
- * @param options.nativeType - PostgreSQL type name
818
- * @param options.schemaName - PostgreSQL schema
819
- * @param options.values - Desired final enum values
820
- * @param options.removedValues - Values being removed (for data loss checks)
821
- * @param options.contract - Full contract for column discovery
822
- * @param options.schema - Current schema IR for column discovery
823
- * @returns Migration operation for full enum rebuild
824
- */
825
- function buildRecreateEnumOperation(options) {
826
- const tempTypeName = `${options.nativeType}${REBUILD_SUFFIX}`;
827
- if (tempTypeName.length > MAX_IDENTIFIER_LENGTH) {
828
- const maxBaseLength = MAX_IDENTIFIER_LENGTH - 12;
829
- throw new Error(`Enum type name "${options.nativeType}" is too long for rebuild operation. Maximum length is ${maxBaseLength} characters (type name + "${REBUILD_SUFFIX}" suffix must fit within PostgreSQL's ${MAX_IDENTIFIER_LENGTH}-character identifier limit).`);
830
- }
831
- const qualifiedOriginal = qualifyName(options.schemaName, options.nativeType);
832
- const qualifiedTemp = qualifyName(options.schemaName, tempTypeName);
833
- const literalValues = options.values.map((value) => `'${escapeLiteral(value)}'`).join(", ");
834
- const columnRefs = collectAllEnumColumns(options.contract, options.schema, options.typeName, options.nativeType);
835
- const alterColumns = columnRefs.map((ref) => ({
836
- description: `alter ${ref.table}.${ref.column} to ${tempTypeName}`,
837
- sql: `ALTER TABLE ${qualifyName(options.schemaName, ref.table)}
838
- ALTER COLUMN ${quoteIdentifier(ref.column)}
839
- TYPE ${qualifiedTemp}
840
- USING ${quoteIdentifier(ref.column)}::text::${qualifiedTemp}`
841
- }));
842
- const postchecks = [
843
- {
844
- description: `verify type "${options.nativeType}" exists`,
845
- sql: enumTypeExistsCheck$1(options.schemaName, options.nativeType)
846
- },
847
- {
848
- description: `verify temp type "${tempTypeName}" was removed`,
849
- sql: enumTypeExistsCheck$1(options.schemaName, tempTypeName, false)
850
- },
851
- ...columnRefs.map((ref) => ({
852
- description: `verify ${ref.table}.${ref.column} uses type "${options.nativeType}"`,
853
- sql: columnTypeCheck$1({
854
- schemaName: options.schemaName,
855
- tableName: ref.table,
856
- columnName: ref.column,
857
- expectedType: options.nativeType
858
- })
859
- }))
860
- ];
861
- return {
862
- id: `type.${options.typeName}.rebuild`,
863
- label: `Rebuild type ${options.typeName}`,
864
- summary: `Recreates enum type ${options.typeName} with updated values`,
865
- operationClass: "destructive",
866
- target: { id: "postgres" },
867
- precheck: [{
868
- description: `ensure type "${options.nativeType}" exists`,
869
- sql: enumTypeExistsCheck$1(options.schemaName, options.nativeType)
870
- }, ...options.removedValues.length > 0 ? columnRefs.map((ref) => ({
871
- description: `ensure no rows in ${ref.table}.${ref.column} contain removed values (${options.removedValues.join(", ")})`,
872
- sql: noRemovedValuesExistCheck(options.schemaName, ref.table, ref.column, options.removedValues)
873
- })) : []],
874
- execute: [
875
- {
876
- description: `drop orphaned temp type "${tempTypeName}" if exists`,
877
- sql: `DROP TYPE IF EXISTS ${qualifiedTemp}`
878
- },
879
- {
880
- description: `create temp type "${tempTypeName}"`,
881
- sql: `CREATE TYPE ${qualifiedTemp} AS ENUM (${literalValues})`
882
- },
883
- ...alterColumns,
884
- {
885
- description: `drop type "${options.nativeType}"`,
886
- sql: `DROP TYPE ${qualifiedOriginal}`
887
- },
888
- {
889
- description: `rename type "${tempTypeName}" to "${options.nativeType}"`,
890
- sql: `ALTER TYPE ${qualifiedTemp} RENAME TO ${quoteIdentifier(options.nativeType)}`
891
- }
892
- ],
893
- postcheck: postchecks
894
- };
895
- }
896
- /**
897
- * Postgres enum hooks for planning, verifying, and introspecting `storage.types`.
898
- */
899
- const pgEnumControlHooks = {
900
- planTypeOperations: ({ typeName, typeInstance, contract, schema, schemaName }) => {
901
- const desired = getEnumValues(typeInstance);
902
- if (!desired || desired.length === 0) return { operations: [] };
903
- const schemaNamespace = schemaName ?? "public";
904
- const existing = readExistingEnumValues(schema, typeInstance.nativeType);
905
- if (!existing) return { operations: [buildCreateEnumOperation(typeName, typeInstance.nativeType, schemaNamespace, desired)] };
906
- const diff = determineEnumDiff(existing, desired);
907
- if (diff.kind === "unchanged") return { operations: [] };
908
- if (diff.kind === "rebuild") return { operations: [buildRecreateEnumOperation({
909
- typeName,
910
- nativeType: typeInstance.nativeType,
911
- schemaName: schemaNamespace,
912
- values: desired,
913
- removedValues: diff.removedValues,
914
- contract,
915
- schema
916
- })] };
917
- return { operations: buildAddValueOperations({
918
- typeName,
919
- nativeType: typeInstance.nativeType,
920
- schemaName: schemaNamespace,
921
- desired,
922
- existing
923
- }) };
924
- },
925
- verifyType: ({ typeName, typeInstance, schema }) => {
926
- const desired = getEnumValues(typeInstance);
927
- if (!desired) return [];
928
- const existing = readExistingEnumValues(schema, typeInstance.nativeType);
929
- if (!existing) return [{
930
- kind: "type_missing",
931
- typeName,
932
- message: `Type "${typeName}" is missing from database`
933
- }];
934
- const diff = determineEnumDiff(existing, desired);
935
- if (diff.kind === "unchanged") return [];
936
- const existingSet = new Set(existing);
937
- const desiredSet = new Set(desired);
938
- const addedValues = desired.filter((v) => !existingSet.has(v));
939
- const removedValues = existing.filter((v) => !desiredSet.has(v));
940
- return [{
941
- kind: "enum_values_changed",
942
- typeName,
943
- addedValues,
944
- removedValues,
945
- message: diff.kind === "add_values" ? `Enum type "${typeName}" needs new values: ${addedValues.join(", ")}` : `Enum type "${typeName}" values changed (requires rebuild): +[${addedValues.join(", ")}] -[${removedValues.join(", ")}]`
946
- }];
947
- },
948
- introspectTypes: async ({ driver, schemaName }) => {
949
- const namespace = schemaName ?? "public";
950
- const result = await driver.query(ENUM_INTROSPECT_QUERY, [namespace]);
951
- const types = {};
952
- for (const row of result.rows) {
953
- const values = parsePostgresArray(row.values);
954
- if (!values) throw new Error(`Failed to parse enum values for type "${row.type_name}": unexpected format: ${JSON.stringify(row.values)}`);
955
- types[row.type_name] = {
956
- codecId: PG_ENUM_CODEC_ID,
957
- nativeType: row.type_name,
958
- typeParams: { values }
959
- };
960
- }
961
- return types;
962
- }
963
- };
964
- /** Creates a type import spec for codec types */
965
- const codecTypeImport = (named) => ({
966
- package: "@prisma-next/adapter-postgres/codec-types",
967
- named,
968
- alias: named
969
- });
970
- function isPositiveInteger(value) {
971
- return typeof value === "number" && Number.isFinite(value) && Number.isInteger(value) && value > 0;
972
- }
973
- function isNonNegativeInteger(value) {
974
- return typeof value === "number" && Number.isFinite(value) && Number.isInteger(value) && value >= 0;
975
- }
976
- function expandLength({ nativeType, typeParams }) {
977
- if (!typeParams || !("length" in typeParams)) return nativeType;
978
- const length = typeParams["length"];
979
- if (!isPositiveInteger(length)) throw new Error(`Invalid "length" type parameter for "${nativeType}": expected a positive integer, got ${JSON.stringify(length)}`);
980
- return `${nativeType}(${length})`;
981
- }
982
- function expandPrecision({ nativeType, typeParams }) {
983
- if (!typeParams || !("precision" in typeParams)) return nativeType;
984
- const precision = typeParams["precision"];
985
- if (!isPositiveInteger(precision)) throw new Error(`Invalid "precision" type parameter for "${nativeType}": expected a positive integer, got ${JSON.stringify(precision)}`);
986
- return `${nativeType}(${precision})`;
987
- }
988
- function expandNumeric({ nativeType, typeParams }) {
989
- const hasPrecision = typeParams && "precision" in typeParams;
990
- const hasScale = typeParams && "scale" in typeParams;
991
- if (!hasPrecision && !hasScale) return nativeType;
992
- if (!hasPrecision && hasScale) throw new Error(`Invalid type parameters for "${nativeType}": "scale" requires "precision" to be specified`);
993
- if (hasPrecision) {
994
- const precision = typeParams["precision"];
995
- if (!isPositiveInteger(precision)) throw new Error(`Invalid "precision" type parameter for "${nativeType}": expected a positive integer, got ${JSON.stringify(precision)}`);
996
- if (hasScale) {
997
- const scale = typeParams["scale"];
998
- if (!isNonNegativeInteger(scale)) throw new Error(`Invalid "scale" type parameter for "${nativeType}": expected a non-negative integer, got ${JSON.stringify(scale)}`);
999
- return `${nativeType}(${precision},${scale})`;
1000
- }
1001
- return `${nativeType}(${precision})`;
1002
- }
1003
- return nativeType;
1004
- }
1005
- const lengthHooks = { expandNativeType: expandLength };
1006
- const precisionHooks = { expandNativeType: expandPrecision };
1007
- const numericHooks = { expandNativeType: expandNumeric };
1008
- const identityHooks = { expandNativeType: ({ nativeType }) => nativeType };
1009
- const postgresAdapterDescriptorMeta = {
1010
- kind: "adapter",
1011
- familyId: "sql",
1012
- targetId: "postgres",
1013
- id: "postgres",
1014
- version: "0.0.1",
1015
- capabilities: {
1016
- postgres: {
1017
- orderBy: true,
1018
- limit: true,
1019
- lateral: true,
1020
- jsonAgg: true,
1021
- returning: true
1022
- },
1023
- sql: {
1024
- enums: true,
1025
- returning: true,
1026
- defaultInInsert: true
1027
- }
1028
- },
1029
- types: {
1030
- codecTypes: {
1031
- codecInstances: Object.values(codecDefinitions).map((def) => def.codec),
1032
- import: {
1033
- package: "@prisma-next/adapter-postgres/codec-types",
1034
- named: "CodecTypes",
1035
- alias: "PgTypes"
1036
- },
1037
- typeImports: [
1038
- {
1039
- package: "@prisma-next/adapter-postgres/codec-types",
1040
- named: "JsonValue",
1041
- alias: "JsonValue"
1042
- },
1043
- codecTypeImport("Char"),
1044
- codecTypeImport("Varchar"),
1045
- codecTypeImport("Numeric"),
1046
- codecTypeImport("Bit"),
1047
- codecTypeImport("VarBit"),
1048
- codecTypeImport("Timestamp"),
1049
- codecTypeImport("Timestamptz"),
1050
- codecTypeImport("Time"),
1051
- codecTypeImport("Timetz"),
1052
- codecTypeImport("Interval")
1053
- ],
1054
- controlPlaneHooks: {
1055
- [SQL_CHAR_CODEC_ID]: lengthHooks,
1056
- [SQL_VARCHAR_CODEC_ID]: lengthHooks,
1057
- [SQL_TIMESTAMP_CODEC_ID]: precisionHooks,
1058
- [PG_CHAR_CODEC_ID]: lengthHooks,
1059
- [PG_VARCHAR_CODEC_ID]: lengthHooks,
1060
- [PG_NUMERIC_CODEC_ID]: numericHooks,
1061
- [PG_BIT_CODEC_ID]: lengthHooks,
1062
- [PG_VARBIT_CODEC_ID]: lengthHooks,
1063
- [PG_TIMESTAMP_CODEC_ID]: precisionHooks,
1064
- [PG_TIMESTAMPTZ_CODEC_ID]: precisionHooks,
1065
- [PG_TIME_CODEC_ID]: precisionHooks,
1066
- [PG_TIMETZ_CODEC_ID]: precisionHooks,
1067
- [PG_INTERVAL_CODEC_ID]: precisionHooks,
1068
- [PG_ENUM_CODEC_ID]: pgEnumControlHooks,
1069
- [PG_JSON_CODEC_ID]: identityHooks,
1070
- [PG_JSONB_CODEC_ID]: identityHooks
1071
- }
1072
- },
1073
- storage: [
1074
- {
1075
- typeId: PG_TEXT_CODEC_ID,
1076
- familyId: "sql",
1077
- targetId: "postgres",
1078
- nativeType: "text"
1079
- },
1080
- {
1081
- typeId: SQL_TEXT_CODEC_ID,
1082
- familyId: "sql",
1083
- targetId: "postgres",
1084
- nativeType: "text"
1085
- },
1086
- {
1087
- typeId: SQL_CHAR_CODEC_ID,
1088
- familyId: "sql",
1089
- targetId: "postgres",
1090
- nativeType: "character"
1091
- },
1092
- {
1093
- typeId: SQL_VARCHAR_CODEC_ID,
1094
- familyId: "sql",
1095
- targetId: "postgres",
1096
- nativeType: "character varying"
1097
- },
1098
- {
1099
- typeId: SQL_INT_CODEC_ID,
1100
- familyId: "sql",
1101
- targetId: "postgres",
1102
- nativeType: "int4"
1103
- },
1104
- {
1105
- typeId: SQL_FLOAT_CODEC_ID,
1106
- familyId: "sql",
1107
- targetId: "postgres",
1108
- nativeType: "float8"
1109
- },
1110
- {
1111
- typeId: SQL_TIMESTAMP_CODEC_ID,
1112
- familyId: "sql",
1113
- targetId: "postgres",
1114
- nativeType: "timestamp"
1115
- },
1116
- {
1117
- typeId: PG_CHAR_CODEC_ID,
1118
- familyId: "sql",
1119
- targetId: "postgres",
1120
- nativeType: "character"
1121
- },
1122
- {
1123
- typeId: PG_VARCHAR_CODEC_ID,
1124
- familyId: "sql",
1125
- targetId: "postgres",
1126
- nativeType: "character varying"
1127
- },
1128
- {
1129
- typeId: PG_INT_CODEC_ID,
1130
- familyId: "sql",
1131
- targetId: "postgres",
1132
- nativeType: "int4"
1133
- },
1134
- {
1135
- typeId: PG_FLOAT_CODEC_ID,
1136
- familyId: "sql",
1137
- targetId: "postgres",
1138
- nativeType: "float8"
1139
- },
1140
- {
1141
- typeId: PG_INT4_CODEC_ID,
1142
- familyId: "sql",
1143
- targetId: "postgres",
1144
- nativeType: "int4"
1145
- },
1146
- {
1147
- typeId: PG_INT2_CODEC_ID,
1148
- familyId: "sql",
1149
- targetId: "postgres",
1150
- nativeType: "int2"
1151
- },
1152
- {
1153
- typeId: PG_INT8_CODEC_ID,
1154
- familyId: "sql",
1155
- targetId: "postgres",
1156
- nativeType: "int8"
1157
- },
1158
- {
1159
- typeId: PG_FLOAT4_CODEC_ID,
1160
- familyId: "sql",
1161
- targetId: "postgres",
1162
- nativeType: "float4"
1163
- },
1164
- {
1165
- typeId: PG_FLOAT8_CODEC_ID,
1166
- familyId: "sql",
1167
- targetId: "postgres",
1168
- nativeType: "float8"
1169
- },
1170
- {
1171
- typeId: PG_NUMERIC_CODEC_ID,
1172
- familyId: "sql",
1173
- targetId: "postgres",
1174
- nativeType: "numeric"
1175
- },
1176
- {
1177
- typeId: PG_TIMESTAMP_CODEC_ID,
1178
- familyId: "sql",
1179
- targetId: "postgres",
1180
- nativeType: "timestamp"
1181
- },
1182
- {
1183
- typeId: PG_TIMESTAMPTZ_CODEC_ID,
1184
- familyId: "sql",
1185
- targetId: "postgres",
1186
- nativeType: "timestamptz"
1187
- },
1188
- {
1189
- typeId: PG_TIME_CODEC_ID,
1190
- familyId: "sql",
1191
- targetId: "postgres",
1192
- nativeType: "time"
1193
- },
1194
- {
1195
- typeId: PG_TIMETZ_CODEC_ID,
1196
- familyId: "sql",
1197
- targetId: "postgres",
1198
- nativeType: "timetz"
1199
- },
1200
- {
1201
- typeId: PG_BOOL_CODEC_ID,
1202
- familyId: "sql",
1203
- targetId: "postgres",
1204
- nativeType: "bool"
1205
- },
1206
- {
1207
- typeId: PG_BIT_CODEC_ID,
1208
- familyId: "sql",
1209
- targetId: "postgres",
1210
- nativeType: "bit"
1211
- },
1212
- {
1213
- typeId: PG_VARBIT_CODEC_ID,
1214
- familyId: "sql",
1215
- targetId: "postgres",
1216
- nativeType: "bit varying"
1217
- },
1218
- {
1219
- typeId: PG_INTERVAL_CODEC_ID,
1220
- familyId: "sql",
1221
- targetId: "postgres",
1222
- nativeType: "interval"
1223
- },
1224
- {
1225
- typeId: PG_JSON_CODEC_ID,
1226
- familyId: "sql",
1227
- targetId: "postgres",
1228
- nativeType: "json"
1229
- },
1230
- {
1231
- typeId: PG_JSONB_CODEC_ID,
1232
- familyId: "sql",
1233
- targetId: "postgres",
1234
- nativeType: "jsonb"
1235
- }
1236
- ],
1237
- queryOperationTypes: { import: {
1238
- package: "@prisma-next/adapter-postgres/operation-types",
1239
- named: "QueryOperationTypes",
1240
- alias: "PgAdapterQueryOps"
1241
- } }
1242
- }
1243
- };
1244
-
1245
- //#endregion
1246
- //#region ../../../1-framework/2-authoring/ids/dist/index.mjs
1247
- const builtinGeneratorIds = [
1248
- "ulid",
1249
- "nanoid",
1250
- "uuidv7",
1251
- "uuidv4",
1252
- "cuid2",
1253
- "ksuid"
1254
- ];
1255
- function resolveNanoidColumnDescriptor(params) {
1256
- const rawSize = params?.["size"];
1257
- if (rawSize === void 0) return {
1258
- type: {
1259
- codecId: "sql/char@1",
1260
- nativeType: "character"
1261
- },
1262
- typeParams: { length: 21 }
1263
- };
1264
- if (typeof rawSize !== "number" || !Number.isInteger(rawSize) || rawSize < 2 || rawSize > 255) throw new Error("nanoid size must be an integer between 2 and 255");
1265
- return {
1266
- type: {
1267
- codecId: "sql/char@1",
1268
- nativeType: "character"
1269
- },
1270
- typeParams: { length: rawSize }
1271
- };
1272
- }
1273
- const builtinGeneratorMetadataById = {
1274
- ulid: {
1275
- applicableCodecIds: ["pg/text@1", "sql/char@1"],
1276
- generatedColumnDescriptor: {
1277
- type: {
1278
- codecId: "sql/char@1",
1279
- nativeType: "character"
1280
- },
1281
- typeParams: { length: 26 }
1282
- }
1283
- },
1284
- nanoid: {
1285
- applicableCodecIds: ["pg/text@1", "sql/char@1"],
1286
- generatedColumnDescriptor: {
1287
- type: {
1288
- codecId: "sql/char@1",
1289
- nativeType: "character"
1290
- },
1291
- typeParams: { length: 21 }
1292
- },
1293
- resolveGeneratedColumnDescriptor: resolveNanoidColumnDescriptor
1294
- },
1295
- uuidv7: {
1296
- applicableCodecIds: ["pg/text@1", "sql/char@1"],
1297
- generatedColumnDescriptor: {
1298
- type: {
1299
- codecId: "sql/char@1",
1300
- nativeType: "character"
1301
- },
1302
- typeParams: { length: 36 }
1303
- }
1304
- },
1305
- uuidv4: {
1306
- applicableCodecIds: ["pg/text@1", "sql/char@1"],
1307
- generatedColumnDescriptor: {
1308
- type: {
1309
- codecId: "sql/char@1",
1310
- nativeType: "character"
1311
- },
1312
- typeParams: { length: 36 }
1313
- }
1314
- },
1315
- cuid2: {
1316
- applicableCodecIds: ["pg/text@1", "sql/char@1"],
1317
- generatedColumnDescriptor: {
1318
- type: {
1319
- codecId: "sql/char@1",
1320
- nativeType: "character"
1321
- },
1322
- typeParams: { length: 24 }
1323
- }
1324
- },
1325
- ksuid: {
1326
- applicableCodecIds: ["pg/text@1", "sql/char@1"],
1327
- generatedColumnDescriptor: {
1328
- type: {
1329
- codecId: "sql/char@1",
1330
- nativeType: "character"
1331
- },
1332
- typeParams: { length: 27 }
1333
- }
1334
- }
1335
- };
1336
- const builtinGeneratorRegistryMetadata = builtinGeneratorIds.map((id) => ({
1337
- id,
1338
- applicableCodecIds: builtinGeneratorMetadataById[id].applicableCodecIds
1339
- }));
1340
- function resolveBuiltinGeneratedColumnDescriptor(input) {
1341
- const metadata = builtinGeneratorMetadataById[input.id];
1342
- if (metadata.resolveGeneratedColumnDescriptor) return metadata.resolveGeneratedColumnDescriptor(input.params);
1343
- return metadata.generatedColumnDescriptor;
1344
- }
1345
-
1346
- //#endregion
1347
- //#region ../../6-adapters/postgres/dist/control.mjs
1348
- /**
1349
- * Pre-compiled regex patterns for performance.
1350
- * These are compiled once at module load time rather than on each function call.
1351
- */
1352
- const NEXTVAL_PATTERN = /^nextval\s*\(/i;
1353
- const NOW_FUNCTION_PATTERN = /^(now\s*\(\s*\)|CURRENT_TIMESTAMP)$/i;
1354
- const CLOCK_TIMESTAMP_PATTERN = /^clock_timestamp\s*\(\s*\)$/i;
1355
- const TIMESTAMP_CAST_SUFFIX = /::timestamp(?:tz|\s+(?:with|without)\s+time\s+zone)?$/i;
1356
- const TEXT_CAST_SUFFIX = /::text$/i;
1357
- const NOW_LITERAL_PATTERN = /^'now'$/i;
1358
- const UUID_PATTERN = /^gen_random_uuid\s*\(\s*\)$/i;
1359
- const UUID_OSSP_PATTERN = /^uuid_generate_v4\s*\(\s*\)$/i;
1360
- const NULL_PATTERN = /^NULL(?:::.+)?$/i;
1361
- const TRUE_PATTERN = /^true$/i;
1362
- const FALSE_PATTERN = /^false$/i;
1363
- const NUMERIC_PATTERN = /^-?\d+(\.\d+)?$/;
1364
- const STRING_LITERAL_PATTERN = /^'((?:[^']|'')*)'(?:::(?:"[^"]+"|[\w\s]+)(?:\(\d+\))?)?$/;
1365
- /**
1366
- * Returns the canonical expression for a timestamp default function, or undefined
1367
- * if the expression is not a recognized timestamp default.
1368
- *
1369
- * Keeps now()/CURRENT_TIMESTAMP and clock_timestamp() distinct:
1370
- * - now(), CURRENT_TIMESTAMP, ('now'::text)::timestamp... → 'now()'
1371
- * - clock_timestamp(), clock_timestamp()::timestamptz → 'clock_timestamp()'
1372
- *
1373
- * These are semantically different in Postgres: now() returns the transaction
1374
- * start time (constant within a transaction), while clock_timestamp() returns
1375
- * the actual wall-clock time (can differ across rows in a single INSERT).
1376
- */
1377
- function canonicalizeTimestampDefault(expr) {
1378
- if (NOW_FUNCTION_PATTERN.test(expr)) return "now()";
1379
- if (CLOCK_TIMESTAMP_PATTERN.test(expr)) return "clock_timestamp()";
1380
- if (!TIMESTAMP_CAST_SUFFIX.test(expr)) return void 0;
1381
- let inner = expr.replace(TIMESTAMP_CAST_SUFFIX, "").trim();
1382
- if (inner.startsWith("(") && inner.endsWith(")")) inner = inner.slice(1, -1).trim();
1383
- if (NOW_FUNCTION_PATTERN.test(inner)) return "now()";
1384
- if (CLOCK_TIMESTAMP_PATTERN.test(inner)) return "clock_timestamp()";
1385
- inner = inner.replace(TEXT_CAST_SUFFIX, "").trim();
1386
- if (NOW_LITERAL_PATTERN.test(inner)) return "now()";
1387
- }
1388
- /**
1389
- * Parses a raw Postgres column default expression into a normalized ColumnDefault.
1390
- * This enables semantic comparison between contract defaults and introspected schema defaults.
1391
- *
1392
- * Used by the migration diff layer to normalize raw database defaults during comparison,
1393
- * keeping the introspection layer focused on faithful data capture.
1394
- *
1395
- * @param rawDefault - Raw default expression from information_schema.columns.column_default
1396
- * @param nativeType - Native column type, used for type-aware parsing (bigint tagging, JSON detection)
1397
- * @returns Normalized ColumnDefault or undefined if the expression cannot be parsed
1398
- */
1399
- function parsePostgresDefault(rawDefault, nativeType) {
1400
- const trimmed = rawDefault.trim();
1401
- const normalizedType = nativeType?.toLowerCase();
1402
- const isBigInt = normalizedType === "bigint" || normalizedType === "int8";
1403
- if (NEXTVAL_PATTERN.test(trimmed)) return {
1404
- kind: "function",
1405
- expression: "autoincrement()"
1406
- };
1407
- const canonicalTimestamp = canonicalizeTimestampDefault(trimmed);
1408
- if (canonicalTimestamp) return {
1409
- kind: "function",
1410
- expression: canonicalTimestamp
1411
- };
1412
- if (UUID_PATTERN.test(trimmed)) return {
1413
- kind: "function",
1414
- expression: "gen_random_uuid()"
1415
- };
1416
- if (UUID_OSSP_PATTERN.test(trimmed)) return {
1417
- kind: "function",
1418
- expression: "gen_random_uuid()"
1419
- };
1420
- if (NULL_PATTERN.test(trimmed)) return {
1421
- kind: "literal",
1422
- value: null
1423
- };
1424
- if (TRUE_PATTERN.test(trimmed)) return {
1425
- kind: "literal",
1426
- value: true
1427
- };
1428
- if (FALSE_PATTERN.test(trimmed)) return {
1429
- kind: "literal",
1430
- value: false
1431
- };
1432
- if (NUMERIC_PATTERN.test(trimmed)) {
1433
- const num = Number(trimmed);
1434
- if (!Number.isFinite(num)) return void 0;
1435
- if (isBigInt && !Number.isSafeInteger(num)) return {
1436
- kind: "literal",
1437
- value: trimmed
1438
- };
1439
- return {
1440
- kind: "literal",
1441
- value: num
1442
- };
1443
- }
1444
- const stringMatch = trimmed.match(STRING_LITERAL_PATTERN);
1445
- if (stringMatch?.[1] !== void 0) {
1446
- const unescaped = stringMatch[1].replace(/''/g, "'");
1447
- if (normalizedType === "json" || normalizedType === "jsonb") try {
1448
- return {
1449
- kind: "literal",
1450
- value: JSON.parse(unescaped)
1451
- };
1452
- } catch {}
1453
- if (isBigInt && NUMERIC_PATTERN.test(unescaped)) {
1454
- const num = Number(unescaped);
1455
- if (Number.isSafeInteger(num)) return {
1456
- kind: "literal",
1457
- value: num
1458
- };
1459
- return {
1460
- kind: "literal",
1461
- value: unescaped
1462
- };
1463
- }
1464
- return {
1465
- kind: "literal",
1466
- value: unescaped
1467
- };
1468
- }
1469
- return {
1470
- kind: "function",
1471
- expression: trimmed
1472
- };
1473
- }
1474
- /**
1475
- * Postgres control plane adapter for control-plane operations like introspection.
1476
- * Provides target-specific implementations for control-plane domain actions.
1477
- */
1478
- var PostgresControlAdapter = class {
1479
- familyId = "sql";
1480
- targetId = "postgres";
1481
- /**
1482
- * Target-specific normalizer for raw Postgres default expressions.
1483
- * Used by schema verification to normalize raw defaults before comparison.
1484
- */
1485
- normalizeDefault = parsePostgresDefault;
1486
- /**
1487
- * Target-specific normalizer for Postgres schema native type names.
1488
- * Used by schema verification to normalize introspected type names
1489
- * before comparison with contract native types.
1490
- */
1491
- normalizeNativeType = normalizeSchemaNativeType;
1492
- /**
1493
- * Introspects a Postgres database schema and returns a raw SqlSchemaIR.
1494
- *
1495
- * This is a pure schema discovery operation that queries the Postgres catalog
1496
- * and returns the schema structure without type mapping or contract enrichment.
1497
- * Type mapping and enrichment are handled separately by enrichment helpers.
1498
- *
1499
- * Uses batched queries to minimize database round trips (7 queries instead of 5T+3).
1500
- *
1501
- * @param driver - ControlDriverInstance<'sql', 'postgres'> instance for executing queries
1502
- * @param contract - Optional contract for contract-guided introspection (filtering, optimization)
1503
- * @param schema - Schema name to introspect (defaults to 'public')
1504
- * @returns Promise resolving to SqlSchemaIR representing the live database schema
1505
- */
1506
- async introspect(driver, _contract, schema = "public") {
1507
- const [tablesResult, columnsResult, pkResult, fkResult, uniqueResult, indexResult, extensionsResult] = await Promise.all([
1508
- driver.query(`SELECT table_name
1509
- FROM information_schema.tables
1510
- WHERE table_schema = $1
1511
- AND table_type = 'BASE TABLE'
1512
- ORDER BY table_name`, [schema]),
1513
- driver.query(`SELECT
1514
- c.table_name,
1515
- column_name,
1516
- data_type,
1517
- udt_name,
1518
- is_nullable,
1519
- character_maximum_length,
1520
- numeric_precision,
1521
- numeric_scale,
1522
- column_default,
1523
- format_type(a.atttypid, a.atttypmod) AS formatted_type
1524
- FROM information_schema.columns c
1525
- JOIN pg_catalog.pg_class cl
1526
- ON cl.relname = c.table_name
1527
- JOIN pg_catalog.pg_namespace ns
1528
- ON ns.nspname = c.table_schema
1529
- AND ns.oid = cl.relnamespace
1530
- JOIN pg_catalog.pg_attribute a
1531
- ON a.attrelid = cl.oid
1532
- AND a.attname = c.column_name
1533
- AND a.attnum > 0
1534
- AND NOT a.attisdropped
1535
- WHERE c.table_schema = $1
1536
- ORDER BY c.table_name, c.ordinal_position`, [schema]),
1537
- driver.query(`SELECT
1538
- tc.table_name,
1539
- tc.constraint_name,
1540
- kcu.column_name,
1541
- kcu.ordinal_position
1542
- FROM information_schema.table_constraints tc
1543
- JOIN information_schema.key_column_usage kcu
1544
- ON tc.constraint_name = kcu.constraint_name
1545
- AND tc.table_schema = kcu.table_schema
1546
- AND tc.table_name = kcu.table_name
1547
- WHERE tc.table_schema = $1
1548
- AND tc.constraint_type = 'PRIMARY KEY'
1549
- ORDER BY tc.table_name, kcu.ordinal_position`, [schema]),
1550
- driver.query(`SELECT
1551
- tc.table_name,
1552
- tc.constraint_name,
1553
- kcu.column_name,
1554
- kcu.ordinal_position,
1555
- ref_ns.nspname AS referenced_table_schema,
1556
- ref_cl.relname AS referenced_table_name,
1557
- ref_att.attname AS referenced_column_name,
1558
- rc.delete_rule,
1559
- rc.update_rule
1560
- FROM information_schema.table_constraints tc
1561
- JOIN information_schema.key_column_usage kcu
1562
- ON tc.constraint_name = kcu.constraint_name
1563
- AND tc.table_schema = kcu.table_schema
1564
- AND tc.table_name = kcu.table_name
1565
- JOIN pg_catalog.pg_constraint pgc
1566
- ON pgc.conname = tc.constraint_name
1567
- AND pgc.connamespace = (
1568
- SELECT oid FROM pg_catalog.pg_namespace WHERE nspname = tc.table_schema
1569
- )
1570
- JOIN pg_catalog.pg_class ref_cl
1571
- ON ref_cl.oid = pgc.confrelid
1572
- JOIN pg_catalog.pg_namespace ref_ns
1573
- ON ref_ns.oid = ref_cl.relnamespace
1574
- JOIN pg_catalog.pg_attribute ref_att
1575
- ON ref_att.attrelid = pgc.confrelid
1576
- AND ref_att.attnum = pgc.confkey[kcu.ordinal_position]
1577
- JOIN information_schema.referential_constraints rc
1578
- ON rc.constraint_name = tc.constraint_name
1579
- AND rc.constraint_schema = tc.table_schema
1580
- WHERE tc.table_schema = $1
1581
- AND tc.constraint_type = 'FOREIGN KEY'
1582
- ORDER BY tc.table_name, tc.constraint_name, kcu.ordinal_position`, [schema]),
1583
- driver.query(`SELECT
1584
- tc.table_name,
1585
- tc.constraint_name,
1586
- kcu.column_name,
1587
- kcu.ordinal_position
1588
- FROM information_schema.table_constraints tc
1589
- JOIN information_schema.key_column_usage kcu
1590
- ON tc.constraint_name = kcu.constraint_name
1591
- AND tc.table_schema = kcu.table_schema
1592
- AND tc.table_name = kcu.table_name
1593
- WHERE tc.table_schema = $1
1594
- AND tc.constraint_type = 'UNIQUE'
1595
- ORDER BY tc.table_name, tc.constraint_name, kcu.ordinal_position`, [schema]),
1596
- driver.query(`SELECT
1597
- i.tablename,
1598
- i.indexname,
1599
- ix.indisunique,
1600
- a.attname,
1601
- a.attnum
1602
- FROM pg_indexes i
1603
- JOIN pg_class ic ON ic.relname = i.indexname
1604
- JOIN pg_namespace ins ON ins.oid = ic.relnamespace AND ins.nspname = $1
1605
- JOIN pg_index ix ON ix.indexrelid = ic.oid
1606
- JOIN pg_class t ON t.oid = ix.indrelid
1607
- JOIN pg_namespace tn ON tn.oid = t.relnamespace AND tn.nspname = $1
1608
- LEFT JOIN pg_attribute a ON a.attrelid = t.oid AND a.attnum = ANY(ix.indkey) AND a.attnum > 0
1609
- WHERE i.schemaname = $1
1610
- AND NOT EXISTS (
1611
- SELECT 1
1612
- FROM information_schema.table_constraints tc
1613
- WHERE tc.table_schema = $1
1614
- AND tc.table_name = i.tablename
1615
- AND tc.constraint_name = i.indexname
1616
- )
1617
- ORDER BY i.tablename, i.indexname, a.attnum`, [schema]),
1618
- driver.query(`SELECT extname
1619
- FROM pg_extension
1620
- ORDER BY extname`, [])
1621
- ]);
1622
- const columnsByTable = groupBy(columnsResult.rows, "table_name");
1623
- const pksByTable = groupBy(pkResult.rows, "table_name");
1624
- const fksByTable = groupBy(fkResult.rows, "table_name");
1625
- const uniquesByTable = groupBy(uniqueResult.rows, "table_name");
1626
- const indexesByTable = groupBy(indexResult.rows, "tablename");
1627
- const pkConstraintsByTable = /* @__PURE__ */ new Map();
1628
- for (const row of pkResult.rows) {
1629
- let constraints = pkConstraintsByTable.get(row.table_name);
1630
- if (!constraints) {
1631
- constraints = /* @__PURE__ */ new Set();
1632
- pkConstraintsByTable.set(row.table_name, constraints);
1633
- }
1634
- constraints.add(row.constraint_name);
1635
- }
1636
- const tables = {};
1637
- for (const tableRow of tablesResult.rows) {
1638
- const tableName = tableRow.table_name;
1639
- const columns = {};
1640
- for (const colRow of columnsByTable.get(tableName) ?? []) {
1641
- let nativeType = colRow.udt_name;
1642
- const formattedType = colRow.formatted_type ? normalizeFormattedType(colRow.formatted_type, colRow.data_type, colRow.udt_name) : null;
1643
- if (formattedType) nativeType = formattedType;
1644
- else if (colRow.data_type === "character varying" || colRow.data_type === "character") if (colRow.character_maximum_length) nativeType = `${colRow.data_type}(${colRow.character_maximum_length})`;
1645
- else nativeType = colRow.data_type;
1646
- else if (colRow.data_type === "numeric" || colRow.data_type === "decimal") if (colRow.numeric_precision && colRow.numeric_scale !== null) nativeType = `${colRow.data_type}(${colRow.numeric_precision},${colRow.numeric_scale})`;
1647
- else if (colRow.numeric_precision) nativeType = `${colRow.data_type}(${colRow.numeric_precision})`;
1648
- else nativeType = colRow.data_type;
1649
- else nativeType = colRow.udt_name || colRow.data_type;
1650
- columns[colRow.column_name] = {
1651
- name: colRow.column_name,
1652
- nativeType,
1653
- nullable: colRow.is_nullable === "YES",
1654
- ...ifDefined("default", colRow.column_default ?? void 0)
1655
- };
1656
- }
1657
- const pkRows = [...pksByTable.get(tableName) ?? []];
1658
- const primaryKeyColumns = pkRows.sort((a, b) => a.ordinal_position - b.ordinal_position).map((row) => row.column_name);
1659
- const primaryKey = primaryKeyColumns.length > 0 ? {
1660
- columns: primaryKeyColumns,
1661
- ...pkRows[0]?.constraint_name ? { name: pkRows[0].constraint_name } : {}
1662
- } : void 0;
1663
- const foreignKeysMap = /* @__PURE__ */ new Map();
1664
- for (const fkRow of fksByTable.get(tableName) ?? []) {
1665
- const existing = foreignKeysMap.get(fkRow.constraint_name);
1666
- if (existing) {
1667
- existing.columns.push(fkRow.column_name);
1668
- existing.referencedColumns.push(fkRow.referenced_column_name);
1669
- } else foreignKeysMap.set(fkRow.constraint_name, {
1670
- columns: [fkRow.column_name],
1671
- referencedTable: fkRow.referenced_table_name,
1672
- referencedColumns: [fkRow.referenced_column_name],
1673
- name: fkRow.constraint_name,
1674
- deleteRule: fkRow.delete_rule,
1675
- updateRule: fkRow.update_rule
1676
- });
1677
- }
1678
- const foreignKeys = Array.from(foreignKeysMap.values()).map((fk) => ({
1679
- columns: Object.freeze([...fk.columns]),
1680
- referencedTable: fk.referencedTable,
1681
- referencedColumns: Object.freeze([...fk.referencedColumns]),
1682
- name: fk.name,
1683
- ...ifDefined("onDelete", mapReferentialAction(fk.deleteRule)),
1684
- ...ifDefined("onUpdate", mapReferentialAction(fk.updateRule))
1685
- }));
1686
- const pkConstraints = pkConstraintsByTable.get(tableName) ?? /* @__PURE__ */ new Set();
1687
- const uniquesMap = /* @__PURE__ */ new Map();
1688
- for (const uniqueRow of uniquesByTable.get(tableName) ?? []) {
1689
- if (pkConstraints.has(uniqueRow.constraint_name)) continue;
1690
- const existing = uniquesMap.get(uniqueRow.constraint_name);
1691
- if (existing) existing.columns.push(uniqueRow.column_name);
1692
- else uniquesMap.set(uniqueRow.constraint_name, {
1693
- columns: [uniqueRow.column_name],
1694
- name: uniqueRow.constraint_name
1695
- });
1696
- }
1697
- const uniques = Array.from(uniquesMap.values()).map((uq) => ({
1698
- columns: Object.freeze([...uq.columns]),
1699
- name: uq.name
1700
- }));
1701
- const indexesMap = /* @__PURE__ */ new Map();
1702
- for (const idxRow of indexesByTable.get(tableName) ?? []) {
1703
- if (!idxRow.attname) continue;
1704
- const existing = indexesMap.get(idxRow.indexname);
1705
- if (existing) existing.columns.push(idxRow.attname);
1706
- else indexesMap.set(idxRow.indexname, {
1707
- columns: [idxRow.attname],
1708
- name: idxRow.indexname,
1709
- unique: idxRow.indisunique
1710
- });
1711
- }
1712
- const indexes = Array.from(indexesMap.values()).map((idx) => ({
1713
- columns: Object.freeze([...idx.columns]),
1714
- name: idx.name,
1715
- unique: idx.unique
1716
- }));
1717
- tables[tableName] = {
1718
- name: tableName,
1719
- columns,
1720
- ...ifDefined("primaryKey", primaryKey),
1721
- foreignKeys,
1722
- uniques,
1723
- indexes
1724
- };
1725
- }
1726
- const dependencies = extensionsResult.rows.map((row) => ({ id: `postgres.extension.${row.extname}` }));
1727
- const storageTypes = await pgEnumControlHooks.introspectTypes?.({
1728
- driver,
1729
- schemaName: schema
1730
- }) ?? {};
1731
- return {
1732
- tables,
1733
- dependencies,
1734
- annotations: { pg: {
1735
- schema,
1736
- version: await this.getPostgresVersion(driver),
1737
- ...ifDefined("storageTypes", Object.keys(storageTypes).length > 0 ? storageTypes : void 0)
1738
- } }
1739
- };
1740
- }
1741
- /**
1742
- * Gets the Postgres version from the database.
1743
- */
1744
- async getPostgresVersion(driver) {
1745
- return ((await driver.query("SELECT version() AS version", [])).rows[0]?.version ?? "").match(/PostgreSQL (\d+\.\d+)/)?.[1] ?? "unknown";
1746
- }
1747
- };
1748
- /**
1749
- * Pre-computed lookup map for simple prefix-based type normalization.
1750
- * Maps short Postgres type names to their canonical SQL names.
1751
- * Using a Map for O(1) lookup instead of multiple startsWith checks.
1752
- */
1753
- const TYPE_PREFIX_MAP = new Map([
1754
- ["varchar", "character varying"],
1755
- ["bpchar", "character"],
1756
- ["varbit", "bit varying"]
1757
- ]);
1758
- /**
1759
- * Normalizes a Postgres schema native type to its canonical form for comparison.
1760
- *
1761
- * Uses a pre-computed lookup map for simple prefix replacements (O(1))
1762
- * and handles complex temporal type normalization separately.
1763
- */
1764
- function normalizeSchemaNativeType(nativeType) {
1765
- const trimmed = nativeType.trim();
1766
- for (const [prefix, replacement] of TYPE_PREFIX_MAP) if (trimmed.startsWith(prefix)) return replacement + trimmed.slice(prefix.length);
1767
- if (trimmed.includes(" with time zone")) {
1768
- if (trimmed.startsWith("timestamp")) return `timestamptz${trimmed.slice(9).replace(" with time zone", "")}`;
1769
- if (trimmed.startsWith("time")) return `timetz${trimmed.slice(4).replace(" with time zone", "")}`;
1770
- }
1771
- if (trimmed.includes(" without time zone")) return trimmed.replace(" without time zone", "");
1772
- return trimmed;
1773
- }
1774
- function normalizeFormattedType(formattedType, dataType, udtName) {
1775
- if (formattedType === "integer") return "int4";
1776
- if (formattedType === "smallint") return "int2";
1777
- if (formattedType === "bigint") return "int8";
1778
- if (formattedType === "real") return "float4";
1779
- if (formattedType === "double precision") return "float8";
1780
- if (formattedType === "boolean") return "bool";
1781
- if (formattedType.startsWith("varchar")) return formattedType.replace("varchar", "character varying");
1782
- if (formattedType.startsWith("bpchar")) return formattedType.replace("bpchar", "character");
1783
- if (formattedType.startsWith("varbit")) return formattedType.replace("varbit", "bit varying");
1784
- if (dataType === "timestamp with time zone" || udtName === "timestamptz") return formattedType.replace("timestamp", "timestamptz").replace(" with time zone", "").trim();
1785
- if (dataType === "timestamp without time zone" || udtName === "timestamp") return formattedType.replace(" without time zone", "").trim();
1786
- if (dataType === "time with time zone" || udtName === "timetz") return formattedType.replace("time", "timetz").replace(" with time zone", "").trim();
1787
- if (dataType === "time without time zone" || udtName === "time") return formattedType.replace(" without time zone", "").trim();
1788
- if (formattedType.startsWith("\"") && formattedType.endsWith("\"")) return formattedType.slice(1, -1);
1789
- return formattedType;
1790
- }
1791
- const PG_REFERENTIAL_ACTION_MAP = {
1792
- "NO ACTION": "noAction",
1793
- RESTRICT: "restrict",
1794
- CASCADE: "cascade",
1795
- "SET NULL": "setNull",
1796
- "SET DEFAULT": "setDefault"
1797
- };
1798
- /**
1799
- * Maps a Postgres referential action rule to the canonical SqlReferentialAction.
1800
- * Returns undefined for 'NO ACTION' (the database default) to keep the IR sparse.
1801
- * Throws for unrecognized rules to prevent silent data loss.
1802
- */
1803
- function mapReferentialAction(rule) {
1804
- const mapped = PG_REFERENTIAL_ACTION_MAP[rule];
1805
- if (mapped === void 0) throw new Error(`Unknown PostgreSQL referential action rule: "${rule}". Expected one of: NO ACTION, RESTRICT, CASCADE, SET NULL, SET DEFAULT.`);
1806
- if (mapped === "noAction") return void 0;
1807
- return mapped;
1808
- }
1809
- /**
1810
- * Groups an array of objects by a specified key.
1811
- * Returns a Map for O(1) lookup by group key.
1812
- */
1813
- function groupBy(items, key) {
1814
- const map = /* @__PURE__ */ new Map();
1815
- for (const item of items) {
1816
- const groupKey = item[key];
1817
- let group = map.get(groupKey);
1818
- if (!group) {
1819
- group = [];
1820
- map.set(groupKey, group);
1821
- }
1822
- group.push(item);
1823
- }
1824
- return map;
1825
- }
1826
- function invalidArgumentDiagnostic(input) {
1827
- return {
1828
- ok: false,
1829
- diagnostic: {
1830
- code: "PSL_INVALID_DEFAULT_FUNCTION_ARGUMENT",
1831
- message: input.message,
1832
- sourceId: input.context.sourceId,
1833
- span: input.span
1834
- }
1835
- };
1836
- }
1837
- function executionGenerator(id, params) {
1838
- return {
1839
- ok: true,
1840
- value: {
1841
- kind: "execution",
1842
- generated: {
1843
- kind: "generator",
1844
- id,
1845
- ...params ? { params } : {}
1846
- }
1847
- }
1848
- };
1849
- }
1850
- function expectNoArgs(input) {
1851
- if (input.call.args.length === 0) return;
1852
- return invalidArgumentDiagnostic({
1853
- context: input.context,
1854
- span: input.call.span,
1855
- message: `Default function "${input.call.name}" does not accept arguments. Use ${input.usage}.`
1856
- });
1857
- }
1858
- function parseIntegerArgument(raw) {
1859
- const trimmed = raw.trim();
1860
- if (!/^-?\d+$/.test(trimmed)) return;
1861
- const value = Number(trimmed);
1862
- if (!Number.isInteger(value)) return;
1863
- return value;
1864
- }
1865
- function parseStringLiteral(raw) {
1866
- const match = raw.trim().match(/^(['"])(.*)\1$/s);
1867
- if (!match) return;
1868
- return match[2] ?? "";
1869
- }
1870
- function lowerAutoincrement(input) {
1871
- const maybeNoArgs = expectNoArgs({
1872
- call: input.call,
1873
- context: input.context,
1874
- usage: "`autoincrement()`"
1875
- });
1876
- if (maybeNoArgs) return maybeNoArgs;
1877
- return {
1878
- ok: true,
1879
- value: {
1880
- kind: "storage",
1881
- defaultValue: {
1882
- kind: "function",
1883
- expression: "autoincrement()"
1884
- }
1885
- }
1886
- };
1887
- }
1888
- function lowerNow(input) {
1889
- const maybeNoArgs = expectNoArgs({
1890
- call: input.call,
1891
- context: input.context,
1892
- usage: "`now()`"
1893
- });
1894
- if (maybeNoArgs) return maybeNoArgs;
1895
- return {
1896
- ok: true,
1897
- value: {
1898
- kind: "storage",
1899
- defaultValue: {
1900
- kind: "function",
1901
- expression: "now()"
1902
- }
1903
- }
1904
- };
1905
- }
1906
- function lowerUuid(input) {
1907
- if (input.call.args.length === 0) return executionGenerator("uuidv4");
1908
- if (input.call.args.length !== 1) return invalidArgumentDiagnostic({
1909
- context: input.context,
1910
- span: input.call.span,
1911
- message: "Default function \"uuid\" accepts at most one version argument: `uuid()`, `uuid(4)`, or `uuid(7)`."
1912
- });
1913
- const version = parseIntegerArgument(input.call.args[0]?.raw ?? "");
1914
- if (version === 4) return executionGenerator("uuidv4");
1915
- if (version === 7) return executionGenerator("uuidv7");
1916
- return invalidArgumentDiagnostic({
1917
- context: input.context,
1918
- span: input.call.args[0]?.span ?? input.call.span,
1919
- message: "Default function \"uuid\" supports only `uuid()`, `uuid(4)`, or `uuid(7)` in SQL PSL provider v1."
1920
- });
1921
- }
1922
- function lowerCuid(input) {
1923
- if (input.call.args.length === 0) return {
1924
- ok: false,
1925
- diagnostic: {
1926
- code: "PSL_UNKNOWN_DEFAULT_FUNCTION",
1927
- message: "Default function \"cuid()\" is not supported in SQL PSL provider v1. Use `cuid(2)` instead.",
1928
- sourceId: input.context.sourceId,
1929
- span: input.call.span
1930
- }
1931
- };
1932
- if (input.call.args.length !== 1) return invalidArgumentDiagnostic({
1933
- context: input.context,
1934
- span: input.call.span,
1935
- message: "Default function \"cuid\" accepts exactly one version argument: `cuid(2)`."
1936
- });
1937
- if (parseIntegerArgument(input.call.args[0]?.raw ?? "") === 2) return executionGenerator("cuid2");
1938
- return invalidArgumentDiagnostic({
1939
- context: input.context,
1940
- span: input.call.args[0]?.span ?? input.call.span,
1941
- message: "Default function \"cuid\" supports only `cuid(2)` in SQL PSL provider v1."
1942
- });
1943
- }
1944
- function lowerUlid(input) {
1945
- const maybeNoArgs = expectNoArgs({
1946
- call: input.call,
1947
- context: input.context,
1948
- usage: "`ulid()`"
1949
- });
1950
- if (maybeNoArgs) return maybeNoArgs;
1951
- return executionGenerator("ulid");
1952
- }
1953
- function lowerNanoid(input) {
1954
- if (input.call.args.length === 0) return executionGenerator("nanoid");
1955
- if (input.call.args.length !== 1) return invalidArgumentDiagnostic({
1956
- context: input.context,
1957
- span: input.call.span,
1958
- message: "Default function \"nanoid\" accepts at most one size argument: `nanoid()` or `nanoid(<2-255>)`."
1959
- });
1960
- const size = parseIntegerArgument(input.call.args[0]?.raw ?? "");
1961
- if (size !== void 0 && size >= 2 && size <= 255) return executionGenerator("nanoid", { size });
1962
- return invalidArgumentDiagnostic({
1963
- context: input.context,
1964
- span: input.call.args[0]?.span ?? input.call.span,
1965
- message: "Default function \"nanoid\" size argument must be an integer between 2 and 255."
1966
- });
1967
- }
1968
- function lowerDbgenerated(input) {
1969
- if (input.call.args.length !== 1) return invalidArgumentDiagnostic({
1970
- context: input.context,
1971
- span: input.call.span,
1972
- message: "Default function \"dbgenerated\" requires exactly one string argument: `dbgenerated(\"...\")`."
1973
- });
1974
- const rawExpression = parseStringLiteral(input.call.args[0]?.raw ?? "");
1975
- if (rawExpression === void 0) return invalidArgumentDiagnostic({
1976
- context: input.context,
1977
- span: input.call.args[0]?.span ?? input.call.span,
1978
- message: "Default function \"dbgenerated\" argument must be a string literal."
1979
- });
1980
- if (rawExpression.trim().length === 0) return invalidArgumentDiagnostic({
1981
- context: input.context,
1982
- span: input.call.args[0]?.span ?? input.call.span,
1983
- message: "Default function \"dbgenerated\" argument cannot be empty."
1984
- });
1985
- return {
1986
- ok: true,
1987
- value: {
1988
- kind: "storage",
1989
- defaultValue: {
1990
- kind: "function",
1991
- expression: rawExpression
1992
- }
1993
- }
1994
- };
1995
- }
1996
- const postgresDefaultFunctionRegistryEntries = [
1997
- ["autoincrement", {
1998
- lower: lowerAutoincrement,
1999
- usageSignatures: ["autoincrement()"]
2000
- }],
2001
- ["now", {
2002
- lower: lowerNow,
2003
- usageSignatures: ["now()"]
2004
- }],
2005
- ["uuid", {
2006
- lower: lowerUuid,
2007
- usageSignatures: [
2008
- "uuid()",
2009
- "uuid(4)",
2010
- "uuid(7)"
2011
- ]
2012
- }],
2013
- ["cuid", {
2014
- lower: lowerCuid,
2015
- usageSignatures: ["cuid(2)"]
2016
- }],
2017
- ["ulid", {
2018
- lower: lowerUlid,
2019
- usageSignatures: ["ulid()"]
2020
- }],
2021
- ["nanoid", {
2022
- lower: lowerNanoid,
2023
- usageSignatures: ["nanoid()", "nanoid(<2-255>)"]
2024
- }],
2025
- ["dbgenerated", {
2026
- lower: lowerDbgenerated,
2027
- usageSignatures: ["dbgenerated(\"...\")"]
2028
- }]
2029
- ];
2030
- const postgresScalarTypeDescriptors = new Map([
2031
- ["String", "pg/text@1"],
2032
- ["Boolean", "pg/bool@1"],
2033
- ["Int", "pg/int4@1"],
2034
- ["BigInt", "pg/int8@1"],
2035
- ["Float", "pg/float8@1"],
2036
- ["Decimal", "pg/numeric@1"],
2037
- ["DateTime", "pg/timestamptz@1"],
2038
- ["Json", "pg/jsonb@1"],
2039
- ["Bytes", "pg/bytea@1"]
2040
- ]);
2041
- function createPostgresDefaultFunctionRegistry() {
2042
- return new Map(postgresDefaultFunctionRegistryEntries);
2043
- }
2044
- function createPostgresMutationDefaultGeneratorDescriptors() {
2045
- return builtinGeneratorRegistryMetadata.map(({ id, applicableCodecIds }) => ({
2046
- id,
2047
- applicableCodecIds,
2048
- resolveGeneratedColumnDescriptor: ({ generated }) => {
2049
- if (generated.kind !== "generator" || generated.id !== id) return;
2050
- const descriptor = resolveBuiltinGeneratedColumnDescriptor({
2051
- id,
2052
- ...generated.params ? { params: generated.params } : {}
2053
- });
2054
- return {
2055
- codecId: descriptor.type.codecId,
2056
- nativeType: descriptor.type.nativeType,
2057
- ...descriptor.type.typeRef ? { typeRef: descriptor.type.typeRef } : {},
2058
- ...descriptor.typeParams ? { typeParams: descriptor.typeParams } : {}
2059
- };
2060
- }
2061
- }));
2062
- }
2063
- function createPostgresScalarTypeDescriptors() {
2064
- return new Map(postgresScalarTypeDescriptors);
2065
- }
2066
- var control_default = {
2067
- ...postgresAdapterDescriptorMeta,
2068
- scalarTypeDescriptors: createPostgresScalarTypeDescriptors(),
2069
- controlMutationDefaults: {
2070
- defaultFunctionRegistry: createPostgresDefaultFunctionRegistry(),
2071
- generatorDescriptors: createPostgresMutationDefaultGeneratorDescriptors()
2072
- },
2073
- create() {
2074
- return new PostgresControlAdapter();
2075
- }
2076
- };
2077
-
2078
- //#endregion
2079
- //#region src/core/migrations/planner-type-resolution.ts
2080
- function resolveColumnTypeMetadata(column, storageTypes) {
2081
- if (!column.typeRef) return column;
2082
- const referencedType = storageTypes[column.typeRef];
2083
- if (!referencedType) return column;
2084
- return {
2085
- codecId: referencedType.codecId,
2086
- nativeType: referencedType.nativeType,
2087
- typeParams: referencedType.typeParams
2088
- };
2089
- }
2090
-
2091
- //#endregion
2092
- //#region src/core/migrations/planner-sql-checks.ts
2093
- function qualifyTableName(schema, table) {
2094
- return `${quoteIdentifier(schema)}.${quoteIdentifier(table)}`;
2095
- }
2096
- function toRegclassLiteral(schema, name) {
2097
- return `'${escapeLiteral(`${quoteIdentifier(schema)}.${quoteIdentifier(name)}`)}'`;
2098
- }
2099
- /**
2100
- * When `table` is omitted the check matches by name + schema across all tables.
2101
- * Pass `table` to scope the check to a single table (prevents false matches on
2102
- * identically-named constraints in different tables).
2103
- */
2104
- function constraintExistsCheck({ constraintName, schema, table, exists = true }) {
2105
- const existsClause = exists ? "EXISTS" : "NOT EXISTS";
2106
- const tableFilter = table ? `AND c.conrelid = to_regclass(${toRegclassLiteral(schema, table)})` : "";
2107
- return `SELECT ${existsClause} (
2108
- SELECT 1 FROM pg_constraint c
2109
- JOIN pg_namespace n ON c.connamespace = n.oid
2110
- WHERE c.conname = '${escapeLiteral(constraintName)}'
2111
- AND n.nspname = '${escapeLiteral(schema)}'
2112
- ${tableFilter}
2113
- )`;
2114
- }
2115
- function columnExistsCheck({ schema, table, column, exists = true }) {
2116
- return `SELECT ${exists ? "" : "NOT "}EXISTS (
2117
- SELECT 1
2118
- FROM information_schema.columns
2119
- WHERE table_schema = '${escapeLiteral(schema)}'
2120
- AND table_name = '${escapeLiteral(table)}'
2121
- AND column_name = '${escapeLiteral(column)}'
2122
- )`;
2123
- }
2124
- function columnNullabilityCheck({ schema, table, column, nullable }) {
2125
- const expected = nullable ? "YES" : "NO";
2126
- return `SELECT EXISTS (
2127
- SELECT 1
2128
- FROM information_schema.columns
2129
- WHERE table_schema = '${escapeLiteral(schema)}'
2130
- AND table_name = '${escapeLiteral(table)}'
2131
- AND column_name = '${escapeLiteral(column)}'
2132
- AND is_nullable = '${expected}'
2133
- )`;
2134
- }
2135
- function tableIsEmptyCheck(qualifiedTableName) {
2136
- return `SELECT NOT EXISTS (SELECT 1 FROM ${qualifiedTableName} LIMIT 1)`;
2137
- }
2138
- function columnHasNoDefaultCheck(opts) {
2139
- return `SELECT NOT EXISTS (
2140
- SELECT 1
2141
- FROM information_schema.columns
2142
- WHERE table_schema = '${escapeLiteral(opts.schema)}'
2143
- AND table_name = '${escapeLiteral(opts.table)}'
2144
- AND column_name = '${escapeLiteral(opts.column)}'
2145
- AND column_default IS NOT NULL
2146
- )`;
2147
- }
2148
- const FORMAT_TYPE_DISPLAY = new Map([
2149
- ["int2", "smallint"],
2150
- ["int4", "integer"],
2151
- ["int8", "bigint"],
2152
- ["float4", "real"],
2153
- ["float8", "double precision"],
2154
- ["bool", "boolean"],
2155
- ["timestamp", "timestamp without time zone"],
2156
- ["timestamptz", "timestamp with time zone"],
2157
- ["time", "time without time zone"],
2158
- ["timetz", "time with time zone"]
2159
- ]);
2160
- const UNQUOTED_POSTGRES_IDENTIFIER_PATTERN = /^[a-z_][a-z0-9_$]*$/;
2161
- const POSTGRES_RESERVED_IDENTIFIER_WORDS = new Set([
2162
- "all",
2163
- "analyse",
2164
- "analyze",
2165
- "and",
2166
- "any",
2167
- "array",
2168
- "as",
2169
- "asc",
2170
- "asymmetric",
2171
- "authorization",
2172
- "between",
2173
- "binary",
2174
- "both",
2175
- "case",
2176
- "cast",
2177
- "check",
2178
- "collate",
2179
- "column",
2180
- "constraint",
2181
- "create",
2182
- "current_catalog",
2183
- "current_date",
2184
- "current_role",
2185
- "current_time",
2186
- "current_timestamp",
2187
- "current_user",
2188
- "default",
2189
- "deferrable",
2190
- "desc",
2191
- "distinct",
2192
- "do",
2193
- "else",
2194
- "end",
2195
- "except",
2196
- "false",
2197
- "fetch",
2198
- "for",
2199
- "foreign",
2200
- "freeze",
2201
- "from",
2202
- "full",
2203
- "grant",
2204
- "group",
2205
- "having",
2206
- "ilike",
2207
- "in",
2208
- "initially",
2209
- "inner",
2210
- "intersect",
2211
- "into",
2212
- "is",
2213
- "isnull",
2214
- "join",
2215
- "lateral",
2216
- "leading",
2217
- "left",
2218
- "like",
2219
- "limit",
2220
- "localtime",
2221
- "localtimestamp",
2222
- "natural",
2223
- "not",
2224
- "notnull",
2225
- "null",
2226
- "offset",
2227
- "on",
2228
- "only",
2229
- "or",
2230
- "order",
2231
- "outer",
2232
- "overlaps",
2233
- "placing",
2234
- "primary",
2235
- "references",
2236
- "right",
2237
- "select",
2238
- "session_user",
2239
- "similar",
2240
- "some",
2241
- "symmetric",
2242
- "table",
2243
- "then",
2244
- "to",
2245
- "trailing",
2246
- "true",
2247
- "union",
2248
- "unique",
2249
- "user",
2250
- "using",
2251
- "variadic",
2252
- "verbose",
2253
- "when",
2254
- "where",
2255
- "window",
2256
- "with"
2257
- ]);
2258
- function formatUserDefinedTypeName(identifier) {
2259
- if (UNQUOTED_POSTGRES_IDENTIFIER_PATTERN.test(identifier) && !POSTGRES_RESERVED_IDENTIFIER_WORDS.has(identifier)) return identifier;
2260
- return quoteIdentifier(identifier);
2261
- }
2262
- function buildExpectedFormatType(column, codecHooks, storageTypes = {}) {
2263
- const resolved = resolveColumnTypeMetadata(column, storageTypes);
2264
- if (resolved.typeParams && resolved.codecId) {
2265
- const hooks = codecHooks.get(resolved.codecId);
2266
- if (hooks?.expandNativeType) return hooks.expandNativeType({
2267
- nativeType: resolved.nativeType,
2268
- codecId: resolved.codecId,
2269
- typeParams: resolved.typeParams
2270
- });
2271
- }
2272
- if (column.typeRef) return formatUserDefinedTypeName(resolved.nativeType);
2273
- return FORMAT_TYPE_DISPLAY.get(resolved.nativeType) ?? resolved.nativeType;
2274
- }
2275
- function columnTypeCheck({ schema, table, column, expectedType }) {
2276
- return `SELECT EXISTS (
2277
- SELECT 1
2278
- FROM pg_attribute a
2279
- JOIN pg_class c ON c.oid = a.attrelid
2280
- JOIN pg_namespace n ON n.oid = c.relnamespace
2281
- WHERE n.nspname = '${escapeLiteral(schema)}'
2282
- AND c.relname = '${escapeLiteral(table)}'
2283
- AND a.attname = '${escapeLiteral(column)}'
2284
- AND format_type(a.atttypid, a.atttypmod) = '${escapeLiteral(expectedType)}'
2285
- AND NOT a.attisdropped
2286
- )`;
2287
- }
2288
- function columnDefaultExistsCheck({ schema, table, column, exists = true }) {
2289
- const nullCheck = exists ? "IS NOT NULL" : "IS NULL";
2290
- return `SELECT EXISTS (
2291
- SELECT 1
2292
- FROM information_schema.columns
2293
- WHERE table_schema = '${escapeLiteral(schema)}'
2294
- AND table_name = '${escapeLiteral(table)}'
2295
- AND column_name = '${escapeLiteral(column)}'
2296
- AND column_default ${nullCheck}
2297
- )`;
2298
- }
2299
-
2300
- //#endregion
2301
- //#region src/core/migrations/operations/shared.ts
2302
- function step(description, sql) {
2303
- return {
2304
- description,
2305
- sql
2306
- };
2307
- }
2308
- function targetDetails(objectType, name, schema, table) {
2309
- return {
2310
- id: "postgres",
2311
- details: {
2312
- schema,
2313
- objectType,
2314
- name,
2315
- ...ifDefined("table", table)
2316
- }
2317
- };
2318
- }
2319
- function renderColumnDefinition(column) {
2320
- return [
2321
- quoteIdentifier(column.name),
2322
- column.typeSql,
2323
- column.defaultSql,
2324
- column.nullable ? "" : "NOT NULL"
2325
- ].filter(Boolean).join(" ");
2326
- }
2327
-
2328
- //#endregion
2329
- //#region src/core/migrations/operations/columns.ts
2330
- function addColumn(schemaName, tableName, column) {
2331
- const addSql = [
2332
- `ALTER TABLE ${qualifyTableName(schemaName, tableName)}`,
2333
- `ADD COLUMN ${quoteIdentifier(column.name)} ${column.typeSql}`,
2334
- column.defaultSql,
2335
- column.nullable ? "" : "NOT NULL"
2336
- ].filter(Boolean).join(" ");
2337
- return {
2338
- id: `column.${tableName}.${column.name}`,
2339
- label: `Add column "${column.name}" to "${tableName}"`,
2340
- operationClass: "additive",
2341
- target: targetDetails("column", column.name, schemaName, tableName),
2342
- precheck: [step(`ensure column "${column.name}" is missing`, columnExistsCheck({
2343
- schema: schemaName,
2344
- table: tableName,
2345
- column: column.name,
2346
- exists: false
2347
- }))],
2348
- execute: [step(`add column "${column.name}"`, addSql)],
2349
- postcheck: [step(`verify column "${column.name}" exists`, columnExistsCheck({
2350
- schema: schemaName,
2351
- table: tableName,
2352
- column: column.name
2353
- }))]
2354
- };
2355
- }
2356
- function dropColumn(schemaName, tableName, columnName) {
2357
- const qualified = qualifyTableName(schemaName, tableName);
2358
- return {
2359
- id: `dropColumn.${tableName}.${columnName}`,
2360
- label: `Drop column "${columnName}" from "${tableName}"`,
2361
- operationClass: "destructive",
2362
- target: targetDetails("column", columnName, schemaName, tableName),
2363
- precheck: [step(`ensure column "${columnName}" exists`, columnExistsCheck({
2364
- schema: schemaName,
2365
- table: tableName,
2366
- column: columnName
2367
- }))],
2368
- execute: [step(`drop column "${columnName}"`, `ALTER TABLE ${qualified} DROP COLUMN ${quoteIdentifier(columnName)}`)],
2369
- postcheck: [step(`verify column "${columnName}" does not exist`, columnExistsCheck({
2370
- schema: schemaName,
2371
- table: tableName,
2372
- column: columnName,
2373
- exists: false
2374
- }))]
2375
- };
2376
- }
2377
- /**
2378
- * `qualifiedTargetType` is the new column type as it appears in the
2379
- * `ALTER COLUMN TYPE` clause (schema-qualified for user-defined types, raw
2380
- * native name for built-ins). `formatTypeExpected` is the unqualified
2381
- * `format_type` form used in the postcheck. `rawTargetTypeForLabel` is the
2382
- * string appearing in the human-readable label (typically `toType` when
2383
- * explicit, else the column's native type).
2384
- */
2385
- function alterColumnType(schemaName, tableName, columnName, options) {
2386
- const qualified = qualifyTableName(schemaName, tableName);
2387
- const usingClause = options.using ? ` USING ${options.using}` : ` USING ${quoteIdentifier(columnName)}::${options.qualifiedTargetType}`;
2388
- return {
2389
- id: `alterType.${tableName}.${columnName}`,
2390
- label: `Alter type of "${tableName}"."${columnName}" to ${options.rawTargetTypeForLabel}`,
2391
- operationClass: "destructive",
2392
- target: targetDetails("column", columnName, schemaName, tableName),
2393
- precheck: [step(`ensure column "${columnName}" exists`, columnExistsCheck({
2394
- schema: schemaName,
2395
- table: tableName,
2396
- column: columnName
2397
- }))],
2398
- execute: [step(`alter type of "${columnName}"`, `ALTER TABLE ${qualified} ALTER COLUMN ${quoteIdentifier(columnName)} TYPE ${options.qualifiedTargetType}${usingClause}`)],
2399
- postcheck: [step(`verify column "${columnName}" has type "${options.formatTypeExpected}"`, columnTypeCheck({
2400
- schema: schemaName,
2401
- table: tableName,
2402
- column: columnName,
2403
- expectedType: options.formatTypeExpected
2404
- }))],
2405
- meta: { warning: "TABLE_REWRITE" }
2406
- };
2407
- }
2408
- function setNotNull(schemaName, tableName, columnName) {
2409
- const qualified = qualifyTableName(schemaName, tableName);
2410
- return {
2411
- id: `alterNullability.setNotNull.${tableName}.${columnName}`,
2412
- label: `Set NOT NULL on "${tableName}"."${columnName}"`,
2413
- operationClass: "destructive",
2414
- target: targetDetails("column", columnName, schemaName, tableName),
2415
- precheck: [step(`ensure column "${columnName}" exists`, columnExistsCheck({
2416
- schema: schemaName,
2417
- table: tableName,
2418
- column: columnName
2419
- })), step(`ensure no NULL values in "${columnName}"`, `SELECT NOT EXISTS (SELECT 1 FROM ${qualified} WHERE ${quoteIdentifier(columnName)} IS NULL)`)],
2420
- execute: [step(`set NOT NULL on "${columnName}"`, `ALTER TABLE ${qualified} ALTER COLUMN ${quoteIdentifier(columnName)} SET NOT NULL`)],
2421
- postcheck: [step(`verify column "${columnName}" is NOT NULL`, columnNullabilityCheck({
2422
- schema: schemaName,
2423
- table: tableName,
2424
- column: columnName,
2425
- nullable: false
2426
- }))]
2427
- };
2428
- }
2429
- function dropNotNull(schemaName, tableName, columnName) {
2430
- const qualified = qualifyTableName(schemaName, tableName);
2431
- return {
2432
- id: `alterNullability.dropNotNull.${tableName}.${columnName}`,
2433
- label: `Drop NOT NULL on "${tableName}"."${columnName}"`,
2434
- operationClass: "widening",
2435
- target: targetDetails("column", columnName, schemaName, tableName),
2436
- precheck: [step(`ensure column "${columnName}" exists`, columnExistsCheck({
2437
- schema: schemaName,
2438
- table: tableName,
2439
- column: columnName
2440
- }))],
2441
- execute: [step(`drop NOT NULL on "${columnName}"`, `ALTER TABLE ${qualified} ALTER COLUMN ${quoteIdentifier(columnName)} DROP NOT NULL`)],
2442
- postcheck: [step(`verify column "${columnName}" is nullable`, columnNullabilityCheck({
2443
- schema: schemaName,
2444
- table: tableName,
2445
- column: columnName,
2446
- nullable: true
2447
- }))]
2448
- };
2449
- }
2450
- /**
2451
- * `defaultSql` is the full `DEFAULT …` clause as produced by
2452
- * `buildColumnDefaultSql` — e.g. `"DEFAULT 42"`,
2453
- * `"DEFAULT (CURRENT_TIMESTAMP)"`, or `"DEFAULT nextval('seq'::regclass)"`.
2454
- *
2455
- * `operationClass` defaults to `'additive'` (setting a default on a column
2456
- * that currently has none). The reconciliation planner passes `'widening'`
2457
- * when the column already has a different default — policy enforcement
2458
- * treats that as a widening change rather than an additive one.
2459
- */
2460
- function setDefault(schemaName, tableName, columnName, defaultSql, operationClass = "additive") {
2461
- const qualified = qualifyTableName(schemaName, tableName);
2462
- return {
2463
- id: `setDefault.${tableName}.${columnName}`,
2464
- label: `Set default on "${tableName}"."${columnName}"`,
2465
- operationClass,
2466
- target: targetDetails("column", columnName, schemaName, tableName),
2467
- precheck: [step(`ensure column "${columnName}" exists`, columnExistsCheck({
2468
- schema: schemaName,
2469
- table: tableName,
2470
- column: columnName
2471
- }))],
2472
- execute: [step(`set default on "${columnName}"`, `ALTER TABLE ${qualified} ALTER COLUMN ${quoteIdentifier(columnName)} SET ${defaultSql}`)],
2473
- postcheck: [step(`verify column "${columnName}" has a default`, columnDefaultExistsCheck({
2474
- schema: schemaName,
2475
- table: tableName,
2476
- column: columnName,
2477
- exists: true
2478
- }))]
2479
- };
2480
- }
2481
- function dropDefault(schemaName, tableName, columnName) {
2482
- const qualified = qualifyTableName(schemaName, tableName);
2483
- return {
2484
- id: `dropDefault.${tableName}.${columnName}`,
2485
- label: `Drop default on "${tableName}"."${columnName}"`,
2486
- operationClass: "destructive",
2487
- target: targetDetails("column", columnName, schemaName, tableName),
2488
- precheck: [step(`ensure column "${columnName}" exists`, columnExistsCheck({
2489
- schema: schemaName,
2490
- table: tableName,
2491
- column: columnName
2492
- }))],
2493
- execute: [step(`drop default on "${columnName}"`, `ALTER TABLE ${qualified} ALTER COLUMN ${quoteIdentifier(columnName)} DROP DEFAULT`)],
2494
- postcheck: [step(`verify column "${columnName}" has no default`, columnDefaultExistsCheck({
2495
- schema: schemaName,
2496
- table: tableName,
2497
- column: columnName,
2498
- exists: false
2499
- }))]
2500
- };
2501
- }
2502
-
2503
- //#endregion
2504
- //#region src/core/migrations/operations/constraints.ts
2505
- const REFERENTIAL_ACTION_SQL = {
2506
- noAction: "NO ACTION",
2507
- restrict: "RESTRICT",
2508
- cascade: "CASCADE",
2509
- setNull: "SET NULL",
2510
- setDefault: "SET DEFAULT"
2511
- };
2512
- function renderForeignKeySql(schemaName, tableName, fk) {
2513
- let sql = `ALTER TABLE ${qualifyTableName(schemaName, tableName)}
2514
- ADD CONSTRAINT ${quoteIdentifier(fk.name)}
2515
- FOREIGN KEY (${fk.columns.map(quoteIdentifier).join(", ")})
2516
- REFERENCES ${qualifyTableName(schemaName, fk.references.table)} (${fk.references.columns.map(quoteIdentifier).join(", ")})`;
2517
- if (fk.onDelete !== void 0) {
2518
- const action = REFERENTIAL_ACTION_SQL[fk.onDelete];
2519
- if (!action) throw new Error(`Unknown referential action for onDelete: ${String(fk.onDelete)}`);
2520
- sql += `\nON DELETE ${action}`;
2521
- }
2522
- if (fk.onUpdate !== void 0) {
2523
- const action = REFERENTIAL_ACTION_SQL[fk.onUpdate];
2524
- if (!action) throw new Error(`Unknown referential action for onUpdate: ${String(fk.onUpdate)}`);
2525
- sql += `\nON UPDATE ${action}`;
2526
- }
2527
- return sql;
2528
- }
2529
- function addPrimaryKey(schemaName, tableName, constraintName, columns) {
2530
- const qualified = qualifyTableName(schemaName, tableName);
2531
- const columnList = columns.map(quoteIdentifier).join(", ");
2532
- return {
2533
- id: `primaryKey.${tableName}.${constraintName}`,
2534
- label: `Add primary key on "${tableName}"`,
2535
- operationClass: "additive",
2536
- target: targetDetails("primaryKey", constraintName, schemaName, tableName),
2537
- precheck: [step(`ensure primary key "${constraintName}" does not exist`, constraintExistsCheck({
2538
- constraintName,
2539
- schema: schemaName,
2540
- table: tableName,
2541
- exists: false
2542
- }))],
2543
- execute: [step(`add primary key "${constraintName}"`, `ALTER TABLE ${qualified} ADD CONSTRAINT ${quoteIdentifier(constraintName)} PRIMARY KEY (${columnList})`)],
2544
- postcheck: [step(`verify primary key "${constraintName}" exists`, constraintExistsCheck({
2545
- constraintName,
2546
- schema: schemaName,
2547
- table: tableName
2548
- }))]
2549
- };
2550
- }
2551
- function addUnique(schemaName, tableName, constraintName, columns) {
2552
- const qualified = qualifyTableName(schemaName, tableName);
2553
- const columnList = columns.map(quoteIdentifier).join(", ");
2554
- return {
2555
- id: `unique.${tableName}.${constraintName}`,
2556
- label: `Add unique constraint on "${tableName}" (${columns.join(", ")})`,
2557
- operationClass: "additive",
2558
- target: targetDetails("unique", constraintName, schemaName, tableName),
2559
- precheck: [step(`ensure constraint "${constraintName}" does not exist`, constraintExistsCheck({
2560
- constraintName,
2561
- schema: schemaName,
2562
- table: tableName,
2563
- exists: false
2564
- }))],
2565
- execute: [step(`add unique constraint "${constraintName}"`, `ALTER TABLE ${qualified} ADD CONSTRAINT ${quoteIdentifier(constraintName)} UNIQUE (${columnList})`)],
2566
- postcheck: [step(`verify constraint "${constraintName}" exists`, constraintExistsCheck({
2567
- constraintName,
2568
- schema: schemaName,
2569
- table: tableName
2570
- }))]
2571
- };
2572
- }
2573
- function addForeignKey(schemaName, tableName, fk) {
2574
- return {
2575
- id: `foreignKey.${tableName}.${fk.name}`,
2576
- label: `Add foreign key "${fk.name}" on "${tableName}"`,
2577
- operationClass: "additive",
2578
- target: targetDetails("foreignKey", fk.name, schemaName, tableName),
2579
- precheck: [step(`ensure FK "${fk.name}" does not exist`, constraintExistsCheck({
2580
- constraintName: fk.name,
2581
- schema: schemaName,
2582
- table: tableName,
2583
- exists: false
2584
- }))],
2585
- execute: [step(`add FK "${fk.name}"`, renderForeignKeySql(schemaName, tableName, fk))],
2586
- postcheck: [step(`verify FK "${fk.name}" exists`, constraintExistsCheck({
2587
- constraintName: fk.name,
2588
- schema: schemaName,
2589
- table: tableName
2590
- }))]
2591
- };
2592
- }
2593
- /**
2594
- * `kind` feeds the operation's `target.details.objectType`. Descriptor-flow
2595
- * does not carry kind information in its drop-constraint descriptor, so the
2596
- * default is `'unique'`. The reconciliation planner passes the correct kind
2597
- * (`'foreignKey'`, `'primaryKey'`, or `'unique'`) based on the `SchemaIssue`
2598
- * that produced the drop.
2599
- */
2600
- function dropConstraint(schemaName, tableName, constraintName, kind = "unique") {
2601
- const qualified = qualifyTableName(schemaName, tableName);
2602
- return {
2603
- id: `dropConstraint.${tableName}.${constraintName}`,
2604
- label: `Drop constraint "${constraintName}" on "${tableName}"`,
2605
- operationClass: "destructive",
2606
- target: targetDetails(kind, constraintName, schemaName, tableName),
2607
- precheck: [step(`ensure constraint "${constraintName}" exists`, constraintExistsCheck({
2608
- constraintName,
2609
- schema: schemaName,
2610
- table: tableName
2611
- }))],
2612
- execute: [step(`drop constraint "${constraintName}"`, `ALTER TABLE ${qualified} DROP CONSTRAINT ${quoteIdentifier(constraintName)}`)],
2613
- postcheck: [step(`verify constraint "${constraintName}" does not exist`, constraintExistsCheck({
2614
- constraintName,
2615
- schema: schemaName,
2616
- table: tableName,
2617
- exists: false
2618
- }))]
2619
- };
2620
- }
2621
-
2622
- //#endregion
2623
- //#region src/core/migrations/operations/dependencies.ts
2624
- function createExtension(extensionName) {
2625
- return {
2626
- id: `extension.${extensionName}`,
2627
- label: `Create extension "${extensionName}"`,
2628
- operationClass: "additive",
2629
- target: { id: "postgres" },
2630
- precheck: [],
2631
- execute: [step(`Create extension "${extensionName}"`, `CREATE EXTENSION IF NOT EXISTS ${quoteIdentifier(extensionName)}`)],
2632
- postcheck: []
2633
- };
2634
- }
2635
- function createSchema(schemaName) {
2636
- return {
2637
- id: `schema.${schemaName}`,
2638
- label: `Create schema "${schemaName}"`,
2639
- operationClass: "additive",
2640
- target: { id: "postgres" },
2641
- precheck: [],
2642
- execute: [step(`Create schema "${schemaName}"`, `CREATE SCHEMA IF NOT EXISTS ${quoteIdentifier(schemaName)}`)],
2643
- postcheck: []
2644
- };
2645
- }
2646
-
2647
- //#endregion
2648
- //#region src/core/migrations/operations/enums.ts
2649
- function enumTypeExistsCheck(schemaName, nativeType, exists = true) {
2650
- return `SELECT ${exists ? "EXISTS" : "NOT EXISTS"} (
2651
- SELECT 1
2652
- FROM pg_type t
2653
- JOIN pg_namespace n ON t.typnamespace = n.oid
2654
- WHERE n.nspname = '${escapeLiteral(schemaName)}'
2655
- AND t.typname = '${escapeLiteral(nativeType)}'
2656
- )`;
2657
- }
2658
- function createEnumType(schemaName, typeName, values) {
2659
- const qualifiedType = qualifyName(schemaName, typeName);
2660
- const literalValues = values.map((v) => `'${escapeLiteral(v)}'`).join(", ");
2661
- return {
2662
- id: `type.${typeName}`,
2663
- label: `Create enum type "${typeName}"`,
2664
- operationClass: "additive",
2665
- target: targetDetails("type", typeName, schemaName),
2666
- precheck: [step(`ensure type "${typeName}" does not exist`, enumTypeExistsCheck(schemaName, typeName, false))],
2667
- execute: [step(`create enum type "${typeName}"`, `CREATE TYPE ${qualifiedType} AS ENUM (${literalValues})`)],
2668
- postcheck: [step(`verify type "${typeName}" exists`, enumTypeExistsCheck(schemaName, typeName))]
2669
- };
2670
- }
2671
- /**
2672
- * `typeName` is the contract-facing type name (used for id/label).
2673
- * `nativeType` is the Postgres type name to mutate (may differ for external types).
2674
- */
2675
- function addEnumValues(schemaName, typeName, nativeType, values) {
2676
- const qualifiedType = qualifyName(schemaName, nativeType);
2677
- return {
2678
- id: `type.${typeName}.addValues`,
2679
- label: `Add values to enum type "${typeName}": ${values.join(", ")}`,
2680
- operationClass: "additive",
2681
- target: targetDetails("type", typeName, schemaName),
2682
- precheck: [step(`ensure type "${nativeType}" exists`, enumTypeExistsCheck(schemaName, nativeType))],
2683
- execute: values.map((value) => step(`add value '${value}' to enum "${nativeType}"`, `ALTER TYPE ${qualifiedType} ADD VALUE '${escapeLiteral(value)}'`)),
2684
- postcheck: [step(`verify type "${nativeType}" exists`, enumTypeExistsCheck(schemaName, nativeType))]
2685
- };
2686
- }
2687
- function dropEnumType(schemaName, typeName) {
2688
- const qualified = qualifyName(schemaName, typeName);
2689
- return {
2690
- id: `type.${typeName}.drop`,
2691
- label: `Drop enum type "${typeName}"`,
2692
- operationClass: "destructive",
2693
- target: targetDetails("type", typeName, schemaName),
2694
- precheck: [step(`ensure type "${typeName}" exists`, enumTypeExistsCheck(schemaName, typeName))],
2695
- execute: [step(`drop enum type "${typeName}"`, `DROP TYPE ${qualified}`)],
2696
- postcheck: [step(`verify type "${typeName}" removed`, enumTypeExistsCheck(schemaName, typeName, false))]
2697
- };
2698
- }
2699
- function renameType(schemaName, fromName, toName) {
2700
- const qualifiedFrom = qualifyName(schemaName, fromName);
2701
- return {
2702
- id: `type.${fromName}.rename`,
2703
- label: `Rename type "${fromName}" to "${toName}"`,
2704
- operationClass: "destructive",
2705
- target: targetDetails("type", fromName, schemaName),
2706
- precheck: [step(`ensure type "${fromName}" exists`, enumTypeExistsCheck(schemaName, fromName)), step(`ensure type "${toName}" does not already exist`, enumTypeExistsCheck(schemaName, toName, false))],
2707
- execute: [step(`rename type "${fromName}" to "${toName}"`, `ALTER TYPE ${qualifiedFrom} RENAME TO ${quoteIdentifier(toName)}`)],
2708
- postcheck: [step(`verify type "${toName}" exists`, enumTypeExistsCheck(schemaName, toName))]
2709
- };
2710
- }
2711
-
2712
- //#endregion
2713
- //#region src/core/migrations/operations/indexes.ts
2714
- function createIndex(schemaName, tableName, indexName, columns) {
2715
- const qualified = qualifyTableName(schemaName, tableName);
2716
- const columnList = columns.map(quoteIdentifier).join(", ");
2717
- return {
2718
- id: `index.${tableName}.${indexName}`,
2719
- label: `Create index "${indexName}" on "${tableName}"`,
2720
- operationClass: "additive",
2721
- target: targetDetails("index", indexName, schemaName, tableName),
2722
- precheck: [step(`ensure index "${indexName}" does not exist`, `SELECT to_regclass(${toRegclassLiteral(schemaName, indexName)}) IS NULL`)],
2723
- execute: [step(`create index "${indexName}"`, `CREATE INDEX ${quoteIdentifier(indexName)} ON ${qualified} (${columnList})`)],
2724
- postcheck: [step(`verify index "${indexName}" exists`, `SELECT to_regclass(${toRegclassLiteral(schemaName, indexName)}) IS NOT NULL`)]
2725
- };
2726
- }
2727
- function dropIndex(schemaName, tableName, indexName) {
2728
- return {
2729
- id: `dropIndex.${tableName}.${indexName}`,
2730
- label: `Drop index "${indexName}"`,
2731
- operationClass: "destructive",
2732
- target: targetDetails("index", indexName, schemaName, tableName),
2733
- precheck: [step(`ensure index "${indexName}" exists`, `SELECT to_regclass(${toRegclassLiteral(schemaName, indexName)}) IS NOT NULL`)],
2734
- execute: [step(`drop index "${indexName}"`, `DROP INDEX ${qualifyTableName(schemaName, indexName)}`)],
2735
- postcheck: [step(`verify index "${indexName}" does not exist`, `SELECT to_regclass(${toRegclassLiteral(schemaName, indexName)}) IS NULL`)]
2736
- };
2737
- }
2738
-
2739
- //#endregion
2740
- //#region src/core/migrations/operations/tables.ts
2741
- function createTable(schemaName, tableName, columns, primaryKey) {
2742
- const qualified = qualifyTableName(schemaName, tableName);
2743
- const columnDefs = columns.map(renderColumnDefinition);
2744
- const constraintDefs = [];
2745
- if (primaryKey) constraintDefs.push(`PRIMARY KEY (${primaryKey.columns.map(quoteIdentifier).join(", ")})`);
2746
- const createSql = `CREATE TABLE ${qualified} (\n ${[...columnDefs, ...constraintDefs].join(",\n ")}\n)`;
2747
- return {
2748
- id: `table.${tableName}`,
2749
- label: `Create table "${tableName}"`,
2750
- summary: `Creates table "${tableName}"`,
2751
- operationClass: "additive",
2752
- target: targetDetails("table", tableName, schemaName),
2753
- precheck: [step(`ensure table "${tableName}" does not exist`, `SELECT to_regclass(${toRegclassLiteral(schemaName, tableName)}) IS NULL`)],
2754
- execute: [step(`create table "${tableName}"`, createSql)],
2755
- postcheck: [step(`verify table "${tableName}" exists`, `SELECT to_regclass(${toRegclassLiteral(schemaName, tableName)}) IS NOT NULL`)]
2756
- };
2757
- }
2758
- function dropTable(schemaName, tableName) {
2759
- const qualified = qualifyTableName(schemaName, tableName);
2760
- return {
2761
- id: `dropTable.${tableName}`,
2762
- label: `Drop table "${tableName}"`,
2763
- operationClass: "destructive",
2764
- target: targetDetails("table", tableName, schemaName),
2765
- precheck: [step(`ensure table "${tableName}" exists`, `SELECT to_regclass(${toRegclassLiteral(schemaName, tableName)}) IS NOT NULL`)],
2766
- execute: [step(`drop table "${tableName}"`, `DROP TABLE ${qualified}`)],
2767
- postcheck: [step(`verify table "${tableName}" does not exist`, `SELECT to_regclass(${toRegclassLiteral(schemaName, tableName)}) IS NULL`)]
2768
- };
2769
- }
2770
-
2771
- //#endregion
2772
- //#region src/core/migrations/postgres-migration.ts
2773
- /**
2774
- * Target-owned base class for Postgres migrations.
2775
- *
2776
- * Fixes the `SqlMigration` generic to `PostgresPlanTargetDetails` and the
2777
- * abstract `targetId` to the Postgres target-id string literal, so both
2778
- * user-authored migrations and renderer-generated scaffolds (the output of
2779
- * `renderCallsToTypeScript`) can extend `PostgresMigration` directly without
2780
- * redeclaring target-local identity.
2781
- *
2782
- * Mirrors `MongoMigration` in `@prisma-next/family-mongo`: the renderer
2783
- * emits `extends Migration` against a target-specific re-export of this
2784
- * class from `@prisma-next/target-postgres/migration`, keeping the
2785
- * authoring surface target-scoped rather than family-scoped.
2786
- */
2787
- var PostgresMigration = class extends Migration {
2788
- targetId = "postgres";
2789
- };
2790
-
2791
- //#endregion
2792
- export { normalizeSchemaNativeType as A, buildExpectedFormatType as C, qualifyTableName as D, columnNullabilityCheck as E, escapeLiteral as F, quoteIdentifier as I, codecDefinitions as M, PG_JSONB_CODEC_ID as N, tableIsEmptyCheck as O, PG_JSON_CODEC_ID as P, setNotNull as S, columnHasNoDefaultCheck as T, alterColumnType as _, dropIndex as a, dropNotNull as b, dropEnumType as c, createSchema as d, addForeignKey as f, addColumn as g, dropConstraint as h, createIndex as i, parsePostgresDefault as j, resolveColumnTypeMetadata as k, renameType as l, addUnique as m, createTable as n, addEnumValues as o, addPrimaryKey as p, dropTable as r, createEnumType as s, PostgresMigration as t, createExtension as u, dropColumn as v, columnExistsCheck as w, setDefault as x, dropDefault as y };
2793
- //# sourceMappingURL=postgres-migration-BsHJHV9O.mjs.map