@prisma-next/adapter-postgres 0.3.0-dev.33 → 0.3.0-dev.36

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (88) hide show
  1. package/README.md +64 -2
  2. package/dist/adapter-DB1CK2jM.mjs +265 -0
  3. package/dist/adapter-DB1CK2jM.mjs.map +1 -0
  4. package/dist/adapter.d.mts +23 -0
  5. package/dist/adapter.d.mts.map +1 -0
  6. package/dist/adapter.mjs +3 -0
  7. package/dist/codec-ids-Bsm9c7ns.mjs +29 -0
  8. package/dist/codec-ids-Bsm9c7ns.mjs.map +1 -0
  9. package/dist/codec-types.d.mts +141 -0
  10. package/dist/codec-types.d.mts.map +1 -0
  11. package/dist/codec-types.mjs +3 -0
  12. package/dist/codecs-DcC1nPzh.mjs +206 -0
  13. package/dist/codecs-DcC1nPzh.mjs.map +1 -0
  14. package/dist/column-types.d.mts +110 -0
  15. package/dist/column-types.d.mts.map +1 -0
  16. package/dist/column-types.mjs +180 -0
  17. package/dist/column-types.mjs.map +1 -0
  18. package/dist/control.d.mts +111 -0
  19. package/dist/control.d.mts.map +1 -0
  20. package/dist/control.mjs +405 -0
  21. package/dist/control.mjs.map +1 -0
  22. package/dist/descriptor-meta-D7pxo-wo.mjs +996 -0
  23. package/dist/descriptor-meta-D7pxo-wo.mjs.map +1 -0
  24. package/dist/runtime.d.mts +19 -0
  25. package/dist/runtime.d.mts.map +1 -0
  26. package/dist/runtime.mjs +85 -0
  27. package/dist/runtime.mjs.map +1 -0
  28. package/dist/types-BY395pUv.d.mts +19 -0
  29. package/dist/types-BY395pUv.d.mts.map +1 -0
  30. package/dist/types.d.mts +2 -0
  31. package/dist/types.mjs +1 -0
  32. package/package.json +32 -41
  33. package/src/core/adapter.ts +90 -17
  34. package/src/core/codec-ids.ts +28 -0
  35. package/src/core/codecs.ts +316 -19
  36. package/src/core/control-adapter.ts +341 -180
  37. package/src/core/default-normalizer.ts +77 -0
  38. package/src/core/descriptor-meta.ts +221 -9
  39. package/src/core/enum-control-hooks.ts +735 -0
  40. package/src/core/json-schema-type-expression.ts +131 -0
  41. package/src/core/json-schema-validator.ts +53 -0
  42. package/src/core/parameterized-types.ts +118 -0
  43. package/src/core/sql-utils.ts +111 -0
  44. package/src/core/standard-schema.ts +71 -0
  45. package/src/exports/codec-types.ts +73 -1
  46. package/src/exports/column-types.ts +233 -9
  47. package/src/exports/control.ts +16 -9
  48. package/src/exports/runtime.ts +61 -18
  49. package/dist/chunk-HD5YISNQ.js +0 -47
  50. package/dist/chunk-HD5YISNQ.js.map +0 -1
  51. package/dist/chunk-J3XSOAM2.js +0 -162
  52. package/dist/chunk-J3XSOAM2.js.map +0 -1
  53. package/dist/chunk-Y6L4BBLR.js +0 -309
  54. package/dist/chunk-Y6L4BBLR.js.map +0 -1
  55. package/dist/core/adapter.d.ts +0 -19
  56. package/dist/core/adapter.d.ts.map +0 -1
  57. package/dist/core/codecs.d.ts +0 -110
  58. package/dist/core/codecs.d.ts.map +0 -1
  59. package/dist/core/control-adapter.d.ts +0 -33
  60. package/dist/core/control-adapter.d.ts.map +0 -1
  61. package/dist/core/descriptor-meta.d.ts +0 -72
  62. package/dist/core/descriptor-meta.d.ts.map +0 -1
  63. package/dist/core/types.d.ts +0 -16
  64. package/dist/core/types.d.ts.map +0 -1
  65. package/dist/exports/adapter.d.ts +0 -2
  66. package/dist/exports/adapter.d.ts.map +0 -1
  67. package/dist/exports/adapter.js +0 -8
  68. package/dist/exports/adapter.js.map +0 -1
  69. package/dist/exports/codec-types.d.ts +0 -11
  70. package/dist/exports/codec-types.d.ts.map +0 -1
  71. package/dist/exports/codec-types.js +0 -7
  72. package/dist/exports/codec-types.js.map +0 -1
  73. package/dist/exports/column-types.d.ts +0 -17
  74. package/dist/exports/column-types.d.ts.map +0 -1
  75. package/dist/exports/column-types.js +0 -49
  76. package/dist/exports/column-types.js.map +0 -1
  77. package/dist/exports/control.d.ts +0 -8
  78. package/dist/exports/control.d.ts.map +0 -1
  79. package/dist/exports/control.js +0 -279
  80. package/dist/exports/control.js.map +0 -1
  81. package/dist/exports/runtime.d.ts +0 -15
  82. package/dist/exports/runtime.d.ts.map +0 -1
  83. package/dist/exports/runtime.js +0 -20
  84. package/dist/exports/runtime.js.map +0 -1
  85. package/dist/exports/types.d.ts +0 -2
  86. package/dist/exports/types.d.ts.map +0 -1
  87. package/dist/exports/types.js +0 -1
  88. package/dist/exports/types.js.map +0 -1
@@ -0,0 +1,996 @@
1
+ import { C as SQL_CHAR_CODEC_ID, E as SQL_VARCHAR_CODEC_ID, S as PG_VARCHAR_CODEC_ID, T as SQL_INT_CODEC_ID, _ as PG_TIMESTAMPTZ_CODEC_ID, a as PG_FLOAT4_CODEC_ID, b as PG_TIME_CODEC_ID, c as PG_INT2_CODEC_ID, d as PG_INTERVAL_CODEC_ID, f as PG_INT_CODEC_ID, g as PG_TEXT_CODEC_ID, h as PG_NUMERIC_CODEC_ID, i as PG_ENUM_CODEC_ID, l as PG_INT4_CODEC_ID, m as PG_JSON_CODEC_ID, n as PG_BOOL_CODEC_ID, o as PG_FLOAT8_CODEC_ID, p as PG_JSONB_CODEC_ID, r as PG_CHAR_CODEC_ID, s as PG_FLOAT_CODEC_ID, t as PG_BIT_CODEC_ID, u as PG_INT8_CODEC_ID, v as PG_TIMESTAMP_CODEC_ID, w as SQL_FLOAT_CODEC_ID, x as PG_VARBIT_CODEC_ID, y as PG_TIMETZ_CODEC_ID } from "./codec-ids-Bsm9c7ns.mjs";
2
+ import { arraysEqual } from "@prisma-next/family-sql/schema-verify";
3
+
4
+ //#region src/core/sql-utils.ts
5
+ /**
6
+ * Shared SQL utility functions for the Postgres adapter.
7
+ *
8
+ * These functions handle safe SQL identifier and literal escaping
9
+ * with security validations to prevent injection and encoding issues.
10
+ */
11
+ /**
12
+ * Error thrown when an invalid SQL identifier or literal is detected.
13
+ * Boundary layers map this to structured envelopes.
14
+ */
15
+ var SqlEscapeError = class extends Error {
16
+ constructor(message, value, kind) {
17
+ super(message);
18
+ this.value = value;
19
+ this.kind = kind;
20
+ this.name = "SqlEscapeError";
21
+ }
22
+ };
23
+ /**
24
+ * Maximum length for PostgreSQL identifiers (NAMEDATALEN - 1).
25
+ */
26
+ const MAX_IDENTIFIER_LENGTH$1 = 63;
27
+ /**
28
+ * Validates and quotes a PostgreSQL identifier (table, column, type, schema names).
29
+ *
30
+ * Security validations:
31
+ * - Rejects null bytes which could cause truncation or unexpected behavior
32
+ * - Rejects empty identifiers
33
+ * - Warns on identifiers exceeding PostgreSQL's 63-character limit
34
+ *
35
+ * @throws {SqlEscapeError} If the identifier contains null bytes or is empty
36
+ */
37
+ function quoteIdentifier(identifier) {
38
+ if (identifier.length === 0) throw new SqlEscapeError("Identifier cannot be empty", identifier, "identifier");
39
+ if (identifier.includes("\0")) throw new SqlEscapeError("Identifier cannot contain null bytes", identifier.replace(/\0/g, "\\0"), "identifier");
40
+ if (identifier.length > MAX_IDENTIFIER_LENGTH$1) console.warn(`Identifier "${identifier.slice(0, 20)}..." exceeds PostgreSQL's ${MAX_IDENTIFIER_LENGTH$1}-character limit and will be truncated`);
41
+ return `"${identifier.replace(/"/g, "\"\"")}"`;
42
+ }
43
+ /**
44
+ * Escapes a string literal for safe use in SQL statements.
45
+ *
46
+ * Security validations:
47
+ * - Rejects null bytes which could cause truncation or unexpected behavior
48
+ *
49
+ * Note: This assumes PostgreSQL's `standard_conforming_strings` is ON (default since PG 9.1).
50
+ * Backslashes are treated as literal characters, not escape sequences.
51
+ *
52
+ * @throws {SqlEscapeError} If the value contains null bytes
53
+ */
54
+ function escapeLiteral(value) {
55
+ if (value.includes("\0")) throw new SqlEscapeError("Literal value cannot contain null bytes", value.replace(/\0/g, "\\0"), "literal");
56
+ return value.replace(/'/g, "''");
57
+ }
58
+ /**
59
+ * Builds a qualified name (schema.object) with proper quoting.
60
+ */
61
+ function qualifyName(schemaName, objectName) {
62
+ return `${quoteIdentifier(schemaName)}.${quoteIdentifier(objectName)}`;
63
+ }
64
+ /**
65
+ * Validates that an enum value doesn't exceed PostgreSQL's label length limit.
66
+ *
67
+ * PostgreSQL enum labels have a maximum length of NAMEDATALEN-1 (63 bytes by default).
68
+ * Unlike identifiers, enum labels that exceed this limit cause an error rather than
69
+ * silent truncation.
70
+ *
71
+ * @param value - The enum value to validate
72
+ * @param enumTypeName - Name of the enum type (for error messages)
73
+ * @throws {SqlEscapeError} If the value exceeds the maximum length
74
+ */
75
+ function validateEnumValueLength(value, enumTypeName) {
76
+ if (value.length > MAX_IDENTIFIER_LENGTH$1) throw new SqlEscapeError(`Enum value "${value.slice(0, 20)}..." for type "${enumTypeName}" exceeds PostgreSQL's ${MAX_IDENTIFIER_LENGTH$1}-character label limit`, value, "literal");
77
+ }
78
+
79
+ //#endregion
80
+ //#region src/core/enum-control-hooks.ts
81
+ const ENUM_INTROSPECT_QUERY = `
82
+ SELECT
83
+ n.nspname AS schema_name,
84
+ t.typname AS type_name,
85
+ array_agg(e.enumlabel ORDER BY e.enumsortorder) AS values
86
+ FROM pg_type t
87
+ JOIN pg_namespace n ON t.typnamespace = n.oid
88
+ JOIN pg_enum e ON t.oid = e.enumtypid
89
+ WHERE n.nspname = $1
90
+ GROUP BY n.nspname, t.typname
91
+ ORDER BY n.nspname, t.typname
92
+ `;
93
+ /**
94
+ * Type guard for string arrays. Used for runtime validation of introspected data.
95
+ */
96
+ function isStringArray(value) {
97
+ return Array.isArray(value) && value.every((entry) => typeof entry === "string");
98
+ }
99
+ /**
100
+ * Parses a PostgreSQL array value into a JavaScript string array.
101
+ *
102
+ * PostgreSQL's `pg` library may return `array_agg` results either as:
103
+ * - A JavaScript array (when type parsers are configured)
104
+ * - A string in PostgreSQL array literal format: `{value1,value2,...}`
105
+ *
106
+ * Handles PostgreSQL's quoting rules for array elements:
107
+ * - Elements containing commas, double quotes, backslashes, or whitespace are double-quoted
108
+ * - Inside quoted elements, `\"` represents `"` and `\\` represents `\`
109
+ *
110
+ * @param value - The value to parse (array or PostgreSQL array string)
111
+ * @returns A string array, or null if the value cannot be parsed
112
+ */
113
+ function parsePostgresArray(value) {
114
+ if (isStringArray(value)) return value;
115
+ if (typeof value === "string" && value.startsWith("{") && value.endsWith("}")) {
116
+ const inner = value.slice(1, -1);
117
+ if (inner === "") return [];
118
+ return parseArrayElements(inner);
119
+ }
120
+ return null;
121
+ }
122
+ function parseArrayElements(input) {
123
+ const result = [];
124
+ let i = 0;
125
+ while (i < input.length) {
126
+ if (input[i] === ",") {
127
+ i++;
128
+ continue;
129
+ }
130
+ if (input[i] === "\"") {
131
+ i++;
132
+ let element = "";
133
+ while (i < input.length && input[i] !== "\"") {
134
+ if (input[i] === "\\" && i + 1 < input.length) {
135
+ i++;
136
+ element += input[i];
137
+ } else element += input[i];
138
+ i++;
139
+ }
140
+ i++;
141
+ result.push(element);
142
+ } else {
143
+ const nextComma = input.indexOf(",", i);
144
+ if (nextComma === -1) {
145
+ result.push(input.slice(i).trim());
146
+ i = input.length;
147
+ } else {
148
+ result.push(input.slice(i, nextComma).trim());
149
+ i = nextComma;
150
+ }
151
+ }
152
+ }
153
+ return result;
154
+ }
155
+ /**
156
+ * Extracts enum values from a StorageTypeInstance.
157
+ * Returns null if values are missing or invalid.
158
+ */
159
+ function getEnumValues(typeInstance) {
160
+ const values = typeInstance.typeParams?.["values"];
161
+ return isStringArray(values) ? values : null;
162
+ }
163
+ /**
164
+ * Reads existing enum values from the schema IR for a given native type.
165
+ * Uses optional chaining to simplify navigation through the annotations structure.
166
+ */
167
+ function readExistingEnumValues(schema, nativeType) {
168
+ const existing = ((schema.annotations?.["pg"])?.["storageTypes"])?.[nativeType];
169
+ if (!existing || existing.codecId !== PG_ENUM_CODEC_ID) return null;
170
+ return getEnumValues(existing);
171
+ }
172
+ /**
173
+ * Determines what changes are needed to transform existing enum values to desired values.
174
+ *
175
+ * Returns one of:
176
+ * - `unchanged`: No changes needed, values match exactly
177
+ * - `add_values`: New values can be safely appended (PostgreSQL supports this)
178
+ * - `rebuild`: Full enum rebuild required (value removal, reordering, or both)
179
+ *
180
+ * Note: PostgreSQL enums can only have values added (not removed or reordered) without
181
+ * a full type rebuild involving temp type creation and column migration.
182
+ *
183
+ * @param existing - Current enum values in the database
184
+ * @param desired - Target enum values from the contract
185
+ * @returns The type of change required
186
+ */
187
+ function determineEnumDiff(existing, desired) {
188
+ if (arraysEqual(existing, desired)) return { kind: "unchanged" };
189
+ const existingSet = new Set(existing);
190
+ const desiredSet = new Set(desired);
191
+ const missingValues = desired.filter((value) => !existingSet.has(value));
192
+ const removedValues = existing.filter((value) => !desiredSet.has(value));
193
+ const orderMismatch = missingValues.length === 0 && removedValues.length === 0 && !arraysEqual(existing, desired);
194
+ if (removedValues.length > 0 || orderMismatch) return {
195
+ kind: "rebuild",
196
+ removedValues
197
+ };
198
+ return {
199
+ kind: "add_values",
200
+ values: missingValues
201
+ };
202
+ }
203
+ function enumTypeExistsCheck(schemaName, typeName, exists = true) {
204
+ return `SELECT ${exists ? "EXISTS" : "NOT EXISTS"} (
205
+ SELECT 1
206
+ FROM pg_type t
207
+ JOIN pg_namespace n ON t.typnamespace = n.oid
208
+ WHERE n.nspname = '${escapeLiteral(schemaName)}'
209
+ AND t.typname = '${escapeLiteral(typeName)}'
210
+ )`;
211
+ }
212
+ function buildCreateEnumOperation(typeName, nativeType, schemaName, values) {
213
+ for (const value of values) validateEnumValueLength(value, typeName);
214
+ const literalValues = values.map((value) => `'${escapeLiteral(value)}'`).join(", ");
215
+ const qualifiedType = qualifyName(schemaName, nativeType);
216
+ return {
217
+ id: `type.${typeName}`,
218
+ label: `Create type ${typeName}`,
219
+ summary: `Creates enum type ${typeName}`,
220
+ operationClass: "additive",
221
+ target: { id: "postgres" },
222
+ precheck: [{
223
+ description: `ensure type "${nativeType}" does not exist`,
224
+ sql: enumTypeExistsCheck(schemaName, nativeType, false)
225
+ }],
226
+ execute: [{
227
+ description: `create type "${nativeType}"`,
228
+ sql: `CREATE TYPE ${qualifiedType} AS ENUM (${literalValues})`
229
+ }],
230
+ postcheck: [{
231
+ description: `verify type "${nativeType}" exists`,
232
+ sql: enumTypeExistsCheck(schemaName, nativeType)
233
+ }]
234
+ };
235
+ }
236
+ /**
237
+ * Computes the optimal position for inserting a new enum value to maintain
238
+ * the desired order relative to existing values.
239
+ *
240
+ * PostgreSQL's `ALTER TYPE ADD VALUE` supports BEFORE/AFTER positioning.
241
+ * This function finds the best reference value by:
242
+ * 1. Looking for the nearest preceding value that already exists
243
+ * 2. Falling back to the nearest following value if no preceding exists
244
+ * 3. Defaulting to end-of-list if no reference is found
245
+ *
246
+ * @param options.desired - The target ordered list of all enum values
247
+ * @param options.desiredIndex - Index of the value being inserted in the desired list
248
+ * @param options.current - Current list of enum values (being built up incrementally)
249
+ * @returns SQL clause (e.g., " AFTER 'x'") and insert position for tracking
250
+ */
251
+ function computeInsertPosition(options) {
252
+ const { desired, desiredIndex, current } = options;
253
+ const currentSet = new Set(current);
254
+ const previous = desired.slice(0, desiredIndex).reverse().find((candidate) => currentSet.has(candidate));
255
+ const next = desired.slice(desiredIndex + 1).find((candidate) => currentSet.has(candidate));
256
+ return {
257
+ clause: previous ? ` AFTER '${escapeLiteral(previous)}'` : next ? ` BEFORE '${escapeLiteral(next)}'` : "",
258
+ insertAt: previous ? current.indexOf(previous) + 1 : next ? current.indexOf(next) : current.length
259
+ };
260
+ }
261
+ /**
262
+ * Builds operations to add new enum values to an existing PostgreSQL enum type.
263
+ *
264
+ * Each new value is added with `ALTER TYPE ... ADD VALUE IF NOT EXISTS` for idempotency.
265
+ * Values are inserted in the correct order using BEFORE/AFTER positioning to match
266
+ * the desired final order.
267
+ *
268
+ * This is a safe, non-destructive operation - existing data is not affected.
269
+ *
270
+ * @param options.typeName - Contract-level type name (e.g., 'Role')
271
+ * @param options.nativeType - PostgreSQL type name (e.g., 'role')
272
+ * @param options.schemaName - PostgreSQL schema (e.g., 'public')
273
+ * @param options.desired - Target ordered list of all enum values
274
+ * @param options.existing - Current enum values in the database
275
+ * @returns Array of migration operations to add each missing value
276
+ */
277
+ function buildAddValueOperations(options) {
278
+ const { typeName, nativeType, schemaName } = options;
279
+ const current = [...options.existing];
280
+ const currentSet = new Set(current);
281
+ const operations = [];
282
+ for (let index = 0; index < options.desired.length; index += 1) {
283
+ const value = options.desired[index];
284
+ if (value === void 0) continue;
285
+ if (currentSet.has(value)) continue;
286
+ validateEnumValueLength(value, typeName);
287
+ const { clause, insertAt } = computeInsertPosition({
288
+ desired: options.desired,
289
+ desiredIndex: index,
290
+ current
291
+ });
292
+ operations.push({
293
+ id: `type.${typeName}.value.${value}`,
294
+ label: `Add value ${value} to ${typeName}`,
295
+ summary: `Adds enum value ${value} to ${typeName}`,
296
+ operationClass: "widening",
297
+ target: { id: "postgres" },
298
+ precheck: [],
299
+ execute: [{
300
+ description: `add value "${value}" if not exists`,
301
+ sql: `ALTER TYPE ${qualifyName(schemaName, nativeType)} ADD VALUE IF NOT EXISTS '${escapeLiteral(value)}'${clause}`
302
+ }],
303
+ postcheck: []
304
+ });
305
+ current.splice(insertAt, 0, value);
306
+ currentSet.add(value);
307
+ }
308
+ return operations;
309
+ }
310
+ /**
311
+ * Collects columns using the enum type from the contract (desired state).
312
+ * Used for type-safe reference tracking.
313
+ */
314
+ function collectEnumColumnsFromContract(contract, typeName, nativeType) {
315
+ const columns = [];
316
+ for (const [tableName, table] of Object.entries(contract.storage.tables)) for (const [columnName, column] of Object.entries(table.columns)) if (column.typeRef === typeName || column.nativeType === nativeType && column.codecId === PG_ENUM_CODEC_ID) columns.push({
317
+ table: tableName,
318
+ column: columnName
319
+ });
320
+ return columns;
321
+ }
322
+ /**
323
+ * Collects columns using the enum type from the schema IR (live database state).
324
+ * This ensures we find ALL dependent columns, including those added outside the contract
325
+ * (e.g., manual DDL), which is critical for safe enum rebuild operations.
326
+ */
327
+ function collectEnumColumnsFromSchema(schema, nativeType) {
328
+ const columns = [];
329
+ for (const [tableName, table] of Object.entries(schema.tables)) for (const [columnName, column] of Object.entries(table.columns)) if (column.nativeType === nativeType) columns.push({
330
+ table: tableName,
331
+ column: columnName
332
+ });
333
+ return columns;
334
+ }
335
+ /**
336
+ * Collects all columns using the enum type from both contract AND live database.
337
+ * Merges and deduplicates to ensure we migrate ALL dependent columns during rebuild.
338
+ *
339
+ * This is critical for data integrity: if a column exists in the database using
340
+ * this enum but is not in the contract (e.g., added via manual DDL), we must
341
+ * still migrate it to avoid DROP TYPE failures.
342
+ */
343
+ function collectAllEnumColumns(contract, schema, typeName, nativeType) {
344
+ const contractColumns = collectEnumColumnsFromContract(contract, typeName, nativeType);
345
+ const schemaColumns = collectEnumColumnsFromSchema(schema, nativeType);
346
+ const seen = /* @__PURE__ */ new Set();
347
+ const result = [];
348
+ for (const col of [...contractColumns, ...schemaColumns]) {
349
+ const key = `${col.table}.${col.column}`;
350
+ if (!seen.has(key)) {
351
+ seen.add(key);
352
+ result.push(col);
353
+ }
354
+ }
355
+ return result.sort((a, b) => {
356
+ const tableCompare = a.table.localeCompare(b.table);
357
+ return tableCompare !== 0 ? tableCompare : a.column.localeCompare(b.column);
358
+ });
359
+ }
360
+ /**
361
+ * Builds a SQL check to verify a column's type matches an expected type.
362
+ */
363
+ function columnTypeCheck(options) {
364
+ return `SELECT EXISTS (
365
+ SELECT 1
366
+ FROM information_schema.columns
367
+ WHERE table_schema = '${escapeLiteral(options.schemaName)}'
368
+ AND table_name = '${escapeLiteral(options.tableName)}'
369
+ AND column_name = '${escapeLiteral(options.columnName)}'
370
+ AND udt_name = '${escapeLiteral(options.expectedType)}'
371
+ )`;
372
+ }
373
+ /** PostgreSQL maximum identifier length (NAMEDATALEN - 1) */
374
+ const MAX_IDENTIFIER_LENGTH = 63;
375
+ /** Suffix added to enum type names during rebuild operations */
376
+ const REBUILD_SUFFIX = "__pn_rebuild";
377
+ /**
378
+ * Builds an SQL check to verify no rows contain any of the removed enum values.
379
+ * This prevents data loss during enum rebuild operations.
380
+ *
381
+ * @param schemaName - PostgreSQL schema name
382
+ * @param tableName - Table containing the enum column
383
+ * @param columnName - Column using the enum type
384
+ * @param removedValues - Array of enum values being removed
385
+ * @returns SQL query that returns true if no rows contain removed values
386
+ */
387
+ function noRemovedValuesExistCheck(schemaName, tableName, columnName, removedValues) {
388
+ if (removedValues.length === 0) return "SELECT true";
389
+ const valuesList = removedValues.map((v) => `'${escapeLiteral(v)}'`).join(", ");
390
+ return `SELECT NOT EXISTS (
391
+ SELECT 1 FROM ${qualifyName(schemaName, tableName)}
392
+ WHERE ${quoteIdentifier(columnName)}::text IN (${valuesList})
393
+ LIMIT 1
394
+ )`;
395
+ }
396
+ /**
397
+ * Builds a migration operation to recreate a PostgreSQL enum type with updated values.
398
+ *
399
+ * This is required when:
400
+ * - Enum values are removed (PostgreSQL doesn't support direct removal)
401
+ * - Enum values are reordered (PostgreSQL doesn't support reordering)
402
+ *
403
+ * The operation:
404
+ * 1. Creates a new enum type with the desired values (temp name)
405
+ * 2. Migrates all columns to use the new type via text cast
406
+ * 3. Drops the original type
407
+ * 4. Renames the temp type to the original name
408
+ *
409
+ * IMPORTANT: If values are being removed and data exists using those values,
410
+ * the operation will fail at the precheck stage with a clear error message.
411
+ * This prevents silent data loss.
412
+ *
413
+ * @param options.typeName - Contract-level type name
414
+ * @param options.nativeType - PostgreSQL type name
415
+ * @param options.schemaName - PostgreSQL schema
416
+ * @param options.values - Desired final enum values
417
+ * @param options.removedValues - Values being removed (for data loss checks)
418
+ * @param options.contract - Full contract for column discovery
419
+ * @param options.schema - Current schema IR for column discovery
420
+ * @returns Migration operation for full enum rebuild
421
+ */
422
+ function buildRecreateEnumOperation(options) {
423
+ const tempTypeName = `${options.nativeType}${REBUILD_SUFFIX}`;
424
+ if (tempTypeName.length > MAX_IDENTIFIER_LENGTH) {
425
+ const maxBaseLength = MAX_IDENTIFIER_LENGTH - 12;
426
+ throw new Error(`Enum type name "${options.nativeType}" is too long for rebuild operation. Maximum length is ${maxBaseLength} characters (type name + "${REBUILD_SUFFIX}" suffix must fit within PostgreSQL's ${MAX_IDENTIFIER_LENGTH}-character identifier limit).`);
427
+ }
428
+ const qualifiedOriginal = qualifyName(options.schemaName, options.nativeType);
429
+ const qualifiedTemp = qualifyName(options.schemaName, tempTypeName);
430
+ const literalValues = options.values.map((value) => `'${escapeLiteral(value)}'`).join(", ");
431
+ const columnRefs = collectAllEnumColumns(options.contract, options.schema, options.typeName, options.nativeType);
432
+ const alterColumns = columnRefs.map((ref) => ({
433
+ description: `alter ${ref.table}.${ref.column} to ${tempTypeName}`,
434
+ sql: `ALTER TABLE ${qualifyName(options.schemaName, ref.table)}
435
+ ALTER COLUMN ${quoteIdentifier(ref.column)}
436
+ TYPE ${qualifiedTemp}
437
+ USING ${quoteIdentifier(ref.column)}::text::${qualifiedTemp}`
438
+ }));
439
+ const postchecks = [
440
+ {
441
+ description: `verify type "${options.nativeType}" exists`,
442
+ sql: enumTypeExistsCheck(options.schemaName, options.nativeType)
443
+ },
444
+ {
445
+ description: `verify temp type "${tempTypeName}" was removed`,
446
+ sql: enumTypeExistsCheck(options.schemaName, tempTypeName, false)
447
+ },
448
+ ...columnRefs.map((ref) => ({
449
+ description: `verify ${ref.table}.${ref.column} uses type "${options.nativeType}"`,
450
+ sql: columnTypeCheck({
451
+ schemaName: options.schemaName,
452
+ tableName: ref.table,
453
+ columnName: ref.column,
454
+ expectedType: options.nativeType
455
+ })
456
+ }))
457
+ ];
458
+ return {
459
+ id: `type.${options.typeName}.rebuild`,
460
+ label: `Rebuild type ${options.typeName}`,
461
+ summary: `Recreates enum type ${options.typeName} with updated values`,
462
+ operationClass: "destructive",
463
+ target: { id: "postgres" },
464
+ precheck: [{
465
+ description: `ensure type "${options.nativeType}" exists`,
466
+ sql: enumTypeExistsCheck(options.schemaName, options.nativeType)
467
+ }, ...options.removedValues.length > 0 ? columnRefs.map((ref) => ({
468
+ description: `ensure no rows in ${ref.table}.${ref.column} contain removed values (${options.removedValues.join(", ")})`,
469
+ sql: noRemovedValuesExistCheck(options.schemaName, ref.table, ref.column, options.removedValues)
470
+ })) : []],
471
+ execute: [
472
+ {
473
+ description: `drop orphaned temp type "${tempTypeName}" if exists`,
474
+ sql: `DROP TYPE IF EXISTS ${qualifiedTemp}`
475
+ },
476
+ {
477
+ description: `create temp type "${tempTypeName}"`,
478
+ sql: `CREATE TYPE ${qualifiedTemp} AS ENUM (${literalValues})`
479
+ },
480
+ ...alterColumns,
481
+ {
482
+ description: `drop type "${options.nativeType}"`,
483
+ sql: `DROP TYPE ${qualifiedOriginal}`
484
+ },
485
+ {
486
+ description: `rename type "${tempTypeName}" to "${options.nativeType}"`,
487
+ sql: `ALTER TYPE ${qualifiedTemp} RENAME TO ${quoteIdentifier(options.nativeType)}`
488
+ }
489
+ ],
490
+ postcheck: postchecks
491
+ };
492
+ }
493
+ /**
494
+ * Postgres enum hooks for planning, verifying, and introspecting `storage.types`.
495
+ */
496
+ const pgEnumControlHooks = {
497
+ planTypeOperations: ({ typeName, typeInstance, contract, schema, schemaName }) => {
498
+ const desired = getEnumValues(typeInstance);
499
+ if (!desired || desired.length === 0) return { operations: [] };
500
+ const schemaNamespace = schemaName ?? "public";
501
+ const existing = readExistingEnumValues(schema, typeInstance.nativeType);
502
+ if (!existing) return { operations: [buildCreateEnumOperation(typeName, typeInstance.nativeType, schemaNamespace, desired)] };
503
+ const diff = determineEnumDiff(existing, desired);
504
+ if (diff.kind === "unchanged") return { operations: [] };
505
+ if (diff.kind === "rebuild") return { operations: [buildRecreateEnumOperation({
506
+ typeName,
507
+ nativeType: typeInstance.nativeType,
508
+ schemaName: schemaNamespace,
509
+ values: desired,
510
+ removedValues: diff.removedValues,
511
+ contract,
512
+ schema
513
+ })] };
514
+ return { operations: buildAddValueOperations({
515
+ typeName,
516
+ nativeType: typeInstance.nativeType,
517
+ schemaName: schemaNamespace,
518
+ desired,
519
+ existing
520
+ }) };
521
+ },
522
+ verifyType: ({ typeName, typeInstance, schema }) => {
523
+ const desired = getEnumValues(typeInstance);
524
+ if (!desired) return [];
525
+ const existing = readExistingEnumValues(schema, typeInstance.nativeType);
526
+ if (!existing) return [{
527
+ kind: "type_missing",
528
+ table: "",
529
+ typeName,
530
+ message: `Type "${typeName}" is missing from database`
531
+ }];
532
+ if (!arraysEqual(existing, desired)) return [{
533
+ kind: "type_values_mismatch",
534
+ table: "",
535
+ typeName,
536
+ expected: desired.join(", "),
537
+ actual: existing.join(", "),
538
+ message: `Type "${typeName}" values do not match contract`
539
+ }];
540
+ return [];
541
+ },
542
+ introspectTypes: async ({ driver, schemaName }) => {
543
+ const namespace = schemaName ?? "public";
544
+ const result = await driver.query(ENUM_INTROSPECT_QUERY, [namespace]);
545
+ const types = {};
546
+ for (const row of result.rows) {
547
+ const values = parsePostgresArray(row.values);
548
+ if (!values) throw new Error(`Failed to parse enum values for type "${row.type_name}": unexpected format: ${JSON.stringify(row.values)}`);
549
+ types[row.type_name] = {
550
+ codecId: PG_ENUM_CODEC_ID,
551
+ nativeType: row.type_name,
552
+ typeParams: { values }
553
+ };
554
+ }
555
+ return types;
556
+ }
557
+ };
558
+
559
+ //#endregion
560
+ //#region src/core/json-schema-type-expression.ts
561
+ const MAX_DEPTH = 32;
562
+ function isRecord(value) {
563
+ return typeof value === "object" && value !== null;
564
+ }
565
+ function escapeStringLiteral(str) {
566
+ return str.replace(/\\/g, "\\\\").replace(/'/g, "\\'").replace(/\n/g, "\\n").replace(/\r/g, "\\r");
567
+ }
568
+ function quotePropertyKey(key) {
569
+ return /^[A-Za-z_$][A-Za-z0-9_$]*$/.test(key) ? key : `'${escapeStringLiteral(key)}'`;
570
+ }
571
+ function renderLiteral(value) {
572
+ if (typeof value === "string") return `'${escapeStringLiteral(value)}'`;
573
+ if (typeof value === "number" || typeof value === "boolean") return String(value);
574
+ if (value === null) return "null";
575
+ return "unknown";
576
+ }
577
+ function renderUnion(items, depth) {
578
+ return items.map((item) => render(item, depth)).join(" | ");
579
+ }
580
+ function renderObjectType(schema, depth) {
581
+ const properties = isRecord(schema["properties"]) ? schema["properties"] : {};
582
+ const required = Array.isArray(schema["required"]) ? new Set(schema["required"].filter((key) => typeof key === "string")) : /* @__PURE__ */ new Set();
583
+ const keys = Object.keys(properties).sort((left, right) => left.localeCompare(right));
584
+ if (keys.length === 0) {
585
+ const additionalProperties = schema["additionalProperties"];
586
+ if (additionalProperties === true || additionalProperties === void 0) return "Record<string, unknown>";
587
+ return `Record<string, ${render(additionalProperties, depth)}>`;
588
+ }
589
+ return `{ ${keys.map((key) => {
590
+ const valueSchema = properties[key];
591
+ const optionalMarker = required.has(key) ? "" : "?";
592
+ return `${quotePropertyKey(key)}${optionalMarker}: ${render(valueSchema, depth)}`;
593
+ }).join("; ")} }`;
594
+ }
595
+ function renderArrayType(schema, depth) {
596
+ if (Array.isArray(schema["items"])) return `readonly [${schema["items"].map((item) => render(item, depth)).join(", ")}]`;
597
+ if (schema["items"] !== void 0) {
598
+ const itemType = render(schema["items"], depth);
599
+ return itemType.includes(" | ") || itemType.includes(" & ") ? `(${itemType})[]` : `${itemType}[]`;
600
+ }
601
+ return "unknown[]";
602
+ }
603
+ function render(schema, depth) {
604
+ if (depth > MAX_DEPTH || !isRecord(schema)) return "JsonValue";
605
+ const nextDepth = depth + 1;
606
+ if ("const" in schema) return renderLiteral(schema["const"]);
607
+ if (Array.isArray(schema["enum"])) return schema["enum"].map((value) => renderLiteral(value)).join(" | ");
608
+ if (Array.isArray(schema["oneOf"])) return renderUnion(schema["oneOf"], nextDepth);
609
+ if (Array.isArray(schema["anyOf"])) return renderUnion(schema["anyOf"], nextDepth);
610
+ if (Array.isArray(schema["allOf"])) return schema["allOf"].map((item) => render(item, nextDepth)).join(" & ");
611
+ if (Array.isArray(schema["type"])) return schema["type"].map((item) => render({
612
+ ...schema,
613
+ type: item
614
+ }, nextDepth)).join(" | ");
615
+ switch (schema["type"]) {
616
+ case "string": return "string";
617
+ case "number":
618
+ case "integer": return "number";
619
+ case "boolean": return "boolean";
620
+ case "null": return "null";
621
+ case "array": return renderArrayType(schema, nextDepth);
622
+ case "object": return renderObjectType(schema, nextDepth);
623
+ default: break;
624
+ }
625
+ return "JsonValue";
626
+ }
627
+ function renderTypeScriptTypeFromJsonSchema(schema) {
628
+ return render(schema, 0);
629
+ }
630
+
631
+ //#endregion
632
+ //#region src/core/parameterized-types.ts
633
+ /**
634
+ * Shared utility for expanding parameterized Postgres types to their full SQL representation.
635
+ *
636
+ * This module provides a single source of truth for type expansion logic, used by:
637
+ * - Schema verification (verify-sql-schema.ts) via the expandNativeType codec control hook
638
+ * - Migration planner (planner.ts) via direct import
639
+ *
640
+ * @module
641
+ */
642
+ /** Set of codec IDs that use the 'length' parameter */
643
+ const LENGTH_CODEC_IDS = new Set([
644
+ SQL_CHAR_CODEC_ID,
645
+ SQL_VARCHAR_CODEC_ID,
646
+ PG_CHAR_CODEC_ID,
647
+ PG_VARCHAR_CODEC_ID,
648
+ PG_BIT_CODEC_ID,
649
+ PG_VARBIT_CODEC_ID
650
+ ]);
651
+ /** Set of codec IDs that use the 'precision' parameter for temporal types */
652
+ const TEMPORAL_PRECISION_CODEC_IDS = new Set([
653
+ PG_TIMESTAMP_CODEC_ID,
654
+ PG_TIMESTAMPTZ_CODEC_ID,
655
+ PG_TIME_CODEC_ID,
656
+ PG_TIMETZ_CODEC_ID,
657
+ PG_INTERVAL_CODEC_ID
658
+ ]);
659
+ /**
660
+ * Validates that a value is a valid type parameter number.
661
+ * Type parameters must be finite, non-negative integers.
662
+ */
663
+ function isValidTypeParamNumber(value) {
664
+ return typeof value === "number" && Number.isFinite(value) && Number.isInteger(value) && value >= 0;
665
+ }
666
+ /**
667
+ * Expands a parameterized native type to its full SQL representation.
668
+ *
669
+ * For example:
670
+ * - { nativeType: 'character varying', typeParams: { length: 255 } } -> 'character varying(255)'
671
+ * - { nativeType: 'numeric', typeParams: { precision: 10, scale: 2 } } -> 'numeric(10,2)'
672
+ * - { nativeType: 'timestamp without time zone', typeParams: { precision: 3 } } -> 'timestamp without time zone(3)'
673
+ *
674
+ * Returns the original nativeType if:
675
+ * - No typeParams are provided
676
+ * - No codecId is provided
677
+ * - The codecId is not a known parameterized type
678
+ * - The typeParams values are invalid
679
+ */
680
+ function expandParameterizedNativeType(input) {
681
+ const { nativeType, codecId, typeParams } = input;
682
+ if (!typeParams || !codecId) return nativeType;
683
+ if (LENGTH_CODEC_IDS.has(codecId)) {
684
+ const length = typeParams["length"];
685
+ if (isValidTypeParamNumber(length)) return `${nativeType}(${length})`;
686
+ return nativeType;
687
+ }
688
+ if (codecId === PG_NUMERIC_CODEC_ID) {
689
+ const precision = typeParams["precision"];
690
+ const scale = typeParams["scale"];
691
+ if (isValidTypeParamNumber(precision)) {
692
+ if (isValidTypeParamNumber(scale)) return `${nativeType}(${precision},${scale})`;
693
+ return `${nativeType}(${precision})`;
694
+ }
695
+ return nativeType;
696
+ }
697
+ if (TEMPORAL_PRECISION_CODEC_IDS.has(codecId)) {
698
+ const precision = typeParams["precision"];
699
+ if (isValidTypeParamNumber(precision)) return `${nativeType}(${precision})`;
700
+ return nativeType;
701
+ }
702
+ return nativeType;
703
+ }
704
+
705
+ //#endregion
706
+ //#region src/core/descriptor-meta.ts
707
+ /** Creates a type import spec for codec types */
708
+ const codecTypeImport = (named) => ({
709
+ package: "@prisma-next/adapter-postgres/codec-types",
710
+ named,
711
+ alias: named
712
+ });
713
+ /** Creates a precision-based TypeScript type renderer for temporal types */
714
+ const precisionRenderer = (typeName) => ({
715
+ kind: "function",
716
+ render: (params) => {
717
+ const precision = params["precision"];
718
+ return typeof precision === "number" ? `${typeName}<${precision}>` : typeName;
719
+ }
720
+ });
721
+ /** Creates control hooks with just expandNativeType for parameterized types */
722
+ const parameterizedTypeHooks = { expandNativeType: expandParameterizedNativeType };
723
+ /**
724
+ * Validates that a type expression string is safe to embed in generated .d.ts files.
725
+ * Rejects expressions containing patterns that could inject executable code.
726
+ */
727
+ function isSafeTypeExpression(expr) {
728
+ return !/import\s*\(|require\s*\(|declare\s|export\s|eval\s*\(/.test(expr);
729
+ }
730
+ function renderJsonTypeExpression(params) {
731
+ const typeName = params["type"];
732
+ if (typeof typeName === "string" && typeName.trim().length > 0) {
733
+ const trimmed = typeName.trim();
734
+ if (!isSafeTypeExpression(trimmed)) return "JsonValue";
735
+ return trimmed;
736
+ }
737
+ const schema = params["schemaJson"];
738
+ if (schema && typeof schema === "object") {
739
+ const rendered = renderTypeScriptTypeFromJsonSchema(schema);
740
+ if (!isSafeTypeExpression(rendered)) return "JsonValue";
741
+ return rendered;
742
+ }
743
+ return "JsonValue";
744
+ }
745
+ const postgresAdapterDescriptorMeta = {
746
+ kind: "adapter",
747
+ familyId: "sql",
748
+ targetId: "postgres",
749
+ id: "postgres",
750
+ version: "0.0.1",
751
+ capabilities: {
752
+ postgres: {
753
+ orderBy: true,
754
+ limit: true,
755
+ lateral: true,
756
+ jsonAgg: true,
757
+ returning: true
758
+ },
759
+ sql: { enums: true }
760
+ },
761
+ types: {
762
+ codecTypes: {
763
+ import: {
764
+ package: "@prisma-next/adapter-postgres/codec-types",
765
+ named: "CodecTypes",
766
+ alias: "PgTypes"
767
+ },
768
+ parameterized: {
769
+ [SQL_CHAR_CODEC_ID]: "Char<{{length}}>",
770
+ [SQL_VARCHAR_CODEC_ID]: "Varchar<{{length}}>",
771
+ [PG_CHAR_CODEC_ID]: "Char<{{length}}>",
772
+ [PG_VARCHAR_CODEC_ID]: "Varchar<{{length}}>",
773
+ [PG_NUMERIC_CODEC_ID]: {
774
+ kind: "function",
775
+ render: (params) => {
776
+ const precision = params["precision"];
777
+ if (typeof precision !== "number") throw new Error("pg/numeric@1 renderer expects precision");
778
+ const scale = params["scale"];
779
+ return typeof scale === "number" ? `Numeric<${precision}, ${scale}>` : `Numeric<${precision}>`;
780
+ }
781
+ },
782
+ [PG_BIT_CODEC_ID]: "Bit<{{length}}>",
783
+ [PG_VARBIT_CODEC_ID]: "VarBit<{{length}}>",
784
+ [PG_TIMESTAMP_CODEC_ID]: precisionRenderer("Timestamp"),
785
+ [PG_TIMESTAMPTZ_CODEC_ID]: precisionRenderer("Timestamptz"),
786
+ [PG_TIME_CODEC_ID]: precisionRenderer("Time"),
787
+ [PG_TIMETZ_CODEC_ID]: precisionRenderer("Timetz"),
788
+ [PG_INTERVAL_CODEC_ID]: precisionRenderer("Interval"),
789
+ [PG_ENUM_CODEC_ID]: {
790
+ kind: "function",
791
+ render: (params) => {
792
+ const values = params["values"];
793
+ if (!Array.isArray(values)) throw new Error("pg/enum@1 renderer expects values array");
794
+ return values.map((value) => `'${String(value).replace(/'/g, "\\'")}'`).join(" | ");
795
+ }
796
+ },
797
+ [PG_JSON_CODEC_ID]: {
798
+ kind: "function",
799
+ render: renderJsonTypeExpression
800
+ },
801
+ [PG_JSONB_CODEC_ID]: {
802
+ kind: "function",
803
+ render: renderJsonTypeExpression
804
+ }
805
+ },
806
+ typeImports: [
807
+ {
808
+ package: "@prisma-next/adapter-postgres/codec-types",
809
+ named: "JsonValue",
810
+ alias: "JsonValue"
811
+ },
812
+ codecTypeImport("Char"),
813
+ codecTypeImport("Varchar"),
814
+ codecTypeImport("Numeric"),
815
+ codecTypeImport("Bit"),
816
+ codecTypeImport("VarBit"),
817
+ codecTypeImport("Timestamp"),
818
+ codecTypeImport("Timestamptz"),
819
+ codecTypeImport("Time"),
820
+ codecTypeImport("Timetz"),
821
+ codecTypeImport("Interval")
822
+ ],
823
+ controlPlaneHooks: {
824
+ [SQL_CHAR_CODEC_ID]: parameterizedTypeHooks,
825
+ [SQL_VARCHAR_CODEC_ID]: parameterizedTypeHooks,
826
+ [PG_CHAR_CODEC_ID]: parameterizedTypeHooks,
827
+ [PG_VARCHAR_CODEC_ID]: parameterizedTypeHooks,
828
+ [PG_NUMERIC_CODEC_ID]: parameterizedTypeHooks,
829
+ [PG_BIT_CODEC_ID]: parameterizedTypeHooks,
830
+ [PG_VARBIT_CODEC_ID]: parameterizedTypeHooks,
831
+ [PG_TIMESTAMP_CODEC_ID]: parameterizedTypeHooks,
832
+ [PG_TIMESTAMPTZ_CODEC_ID]: parameterizedTypeHooks,
833
+ [PG_TIME_CODEC_ID]: parameterizedTypeHooks,
834
+ [PG_TIMETZ_CODEC_ID]: parameterizedTypeHooks,
835
+ [PG_INTERVAL_CODEC_ID]: parameterizedTypeHooks,
836
+ [PG_ENUM_CODEC_ID]: pgEnumControlHooks
837
+ }
838
+ },
839
+ storage: [
840
+ {
841
+ typeId: PG_TEXT_CODEC_ID,
842
+ familyId: "sql",
843
+ targetId: "postgres",
844
+ nativeType: "text"
845
+ },
846
+ {
847
+ typeId: SQL_CHAR_CODEC_ID,
848
+ familyId: "sql",
849
+ targetId: "postgres",
850
+ nativeType: "character"
851
+ },
852
+ {
853
+ typeId: SQL_VARCHAR_CODEC_ID,
854
+ familyId: "sql",
855
+ targetId: "postgres",
856
+ nativeType: "character varying"
857
+ },
858
+ {
859
+ typeId: SQL_INT_CODEC_ID,
860
+ familyId: "sql",
861
+ targetId: "postgres",
862
+ nativeType: "int4"
863
+ },
864
+ {
865
+ typeId: SQL_FLOAT_CODEC_ID,
866
+ familyId: "sql",
867
+ targetId: "postgres",
868
+ nativeType: "float8"
869
+ },
870
+ {
871
+ typeId: PG_CHAR_CODEC_ID,
872
+ familyId: "sql",
873
+ targetId: "postgres",
874
+ nativeType: "character"
875
+ },
876
+ {
877
+ typeId: PG_VARCHAR_CODEC_ID,
878
+ familyId: "sql",
879
+ targetId: "postgres",
880
+ nativeType: "character varying"
881
+ },
882
+ {
883
+ typeId: PG_INT_CODEC_ID,
884
+ familyId: "sql",
885
+ targetId: "postgres",
886
+ nativeType: "int4"
887
+ },
888
+ {
889
+ typeId: PG_FLOAT_CODEC_ID,
890
+ familyId: "sql",
891
+ targetId: "postgres",
892
+ nativeType: "float8"
893
+ },
894
+ {
895
+ typeId: PG_INT4_CODEC_ID,
896
+ familyId: "sql",
897
+ targetId: "postgres",
898
+ nativeType: "int4"
899
+ },
900
+ {
901
+ typeId: PG_INT2_CODEC_ID,
902
+ familyId: "sql",
903
+ targetId: "postgres",
904
+ nativeType: "int2"
905
+ },
906
+ {
907
+ typeId: PG_INT8_CODEC_ID,
908
+ familyId: "sql",
909
+ targetId: "postgres",
910
+ nativeType: "int8"
911
+ },
912
+ {
913
+ typeId: PG_FLOAT4_CODEC_ID,
914
+ familyId: "sql",
915
+ targetId: "postgres",
916
+ nativeType: "float4"
917
+ },
918
+ {
919
+ typeId: PG_FLOAT8_CODEC_ID,
920
+ familyId: "sql",
921
+ targetId: "postgres",
922
+ nativeType: "float8"
923
+ },
924
+ {
925
+ typeId: PG_NUMERIC_CODEC_ID,
926
+ familyId: "sql",
927
+ targetId: "postgres",
928
+ nativeType: "numeric"
929
+ },
930
+ {
931
+ typeId: PG_TIMESTAMP_CODEC_ID,
932
+ familyId: "sql",
933
+ targetId: "postgres",
934
+ nativeType: "timestamp"
935
+ },
936
+ {
937
+ typeId: PG_TIMESTAMPTZ_CODEC_ID,
938
+ familyId: "sql",
939
+ targetId: "postgres",
940
+ nativeType: "timestamptz"
941
+ },
942
+ {
943
+ typeId: PG_TIME_CODEC_ID,
944
+ familyId: "sql",
945
+ targetId: "postgres",
946
+ nativeType: "time"
947
+ },
948
+ {
949
+ typeId: PG_TIMETZ_CODEC_ID,
950
+ familyId: "sql",
951
+ targetId: "postgres",
952
+ nativeType: "timetz"
953
+ },
954
+ {
955
+ typeId: PG_BOOL_CODEC_ID,
956
+ familyId: "sql",
957
+ targetId: "postgres",
958
+ nativeType: "bool"
959
+ },
960
+ {
961
+ typeId: PG_BIT_CODEC_ID,
962
+ familyId: "sql",
963
+ targetId: "postgres",
964
+ nativeType: "bit"
965
+ },
966
+ {
967
+ typeId: PG_VARBIT_CODEC_ID,
968
+ familyId: "sql",
969
+ targetId: "postgres",
970
+ nativeType: "bit varying"
971
+ },
972
+ {
973
+ typeId: PG_INTERVAL_CODEC_ID,
974
+ familyId: "sql",
975
+ targetId: "postgres",
976
+ nativeType: "interval"
977
+ },
978
+ {
979
+ typeId: PG_JSON_CODEC_ID,
980
+ familyId: "sql",
981
+ targetId: "postgres",
982
+ nativeType: "json"
983
+ },
984
+ {
985
+ typeId: PG_JSONB_CODEC_ID,
986
+ familyId: "sql",
987
+ targetId: "postgres",
988
+ nativeType: "jsonb"
989
+ }
990
+ ]
991
+ }
992
+ };
993
+
994
+ //#endregion
995
+ export { escapeLiteral as a, SqlEscapeError as i, expandParameterizedNativeType as n, qualifyName as o, pgEnumControlHooks as r, quoteIdentifier as s, postgresAdapterDescriptorMeta as t };
996
+ //# sourceMappingURL=descriptor-meta-D7pxo-wo.mjs.map