@prisma-next/sql-runtime 0.5.0-dev.4 → 0.5.0-dev.41

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/README.md +29 -21
  2. package/dist/exports-CrHMfIKo.mjs +1564 -0
  3. package/dist/exports-CrHMfIKo.mjs.map +1 -0
  4. package/dist/{index-yb51L_1h.d.mts → index-_dXSGeho.d.mts} +78 -25
  5. package/dist/index-_dXSGeho.d.mts.map +1 -0
  6. package/dist/index.d.mts +2 -2
  7. package/dist/index.mjs +2 -2
  8. package/dist/test/utils.d.mts +6 -5
  9. package/dist/test/utils.d.mts.map +1 -1
  10. package/dist/test/utils.mjs +11 -5
  11. package/dist/test/utils.mjs.map +1 -1
  12. package/package.json +10 -12
  13. package/src/codecs/decoding.ts +294 -173
  14. package/src/codecs/encoding.ts +162 -37
  15. package/src/codecs/validation.ts +22 -3
  16. package/src/exports/index.ts +11 -7
  17. package/src/fingerprint.ts +22 -0
  18. package/src/guardrails/raw.ts +165 -0
  19. package/src/lower-sql-plan.ts +3 -3
  20. package/src/marker.ts +75 -0
  21. package/src/middleware/before-compile-chain.ts +1 -0
  22. package/src/middleware/budgets.ts +26 -96
  23. package/src/middleware/lints.ts +3 -3
  24. package/src/middleware/sql-middleware.ts +6 -5
  25. package/src/runtime-spi.ts +44 -0
  26. package/src/sql-context.ts +332 -78
  27. package/src/sql-family-adapter.ts +3 -2
  28. package/src/sql-marker.ts +62 -47
  29. package/src/sql-runtime.ts +332 -113
  30. package/dist/exports-BQZSVXXt.mjs +0 -981
  31. package/dist/exports-BQZSVXXt.mjs.map +0 -1
  32. package/dist/index-yb51L_1h.d.mts.map +0 -1
  33. package/test/async-iterable-result.test.ts +0 -141
  34. package/test/before-compile-chain.test.ts +0 -223
  35. package/test/budgets.test.ts +0 -431
  36. package/test/context.types.test-d.ts +0 -68
  37. package/test/execution-stack.test.ts +0 -161
  38. package/test/json-schema-validation.test.ts +0 -571
  39. package/test/lints.test.ts +0 -160
  40. package/test/mutation-default-generators.test.ts +0 -254
  41. package/test/parameterized-types.test.ts +0 -529
  42. package/test/sql-context.test.ts +0 -384
  43. package/test/sql-family-adapter.test.ts +0 -103
  44. package/test/sql-runtime.test.ts +0 -792
  45. package/test/utils.ts +0 -297
@@ -0,0 +1,1564 @@
1
+ import { AsyncIterableResult, RuntimeCore, checkAborted, checkMiddlewareCompatibility, isRuntimeError, raceAgainstAbort, runWithMiddleware, runtimeError } from "@prisma-next/framework-components/runtime";
2
+ import { type } from "arktype";
3
+ import { collectOrderedParamRefs, createCodecRegistry, isQueryAst } from "@prisma-next/sql-relational-core/ast";
4
+ import { ifDefined } from "@prisma-next/utils/defined";
5
+ import { synthesizeNonParameterizedDescriptor } from "@prisma-next/framework-components/codec";
6
+ import { checkContractComponentRequirements } from "@prisma-next/framework-components/components";
7
+ import { createExecutionStack } from "@prisma-next/framework-components/execution";
8
+ import { createSqlOperationRegistry } from "@prisma-next/sql-operations";
9
+ import { createHash } from "node:crypto";
10
+
11
+ //#region src/codecs/validation.ts
12
+ function extractCodecIds(contract) {
13
+ const codecIds = /* @__PURE__ */ new Set();
14
+ for (const table of Object.values(contract.storage.tables)) for (const column of Object.values(table.columns)) {
15
+ const codecId = column.codecId;
16
+ codecIds.add(codecId);
17
+ }
18
+ return codecIds;
19
+ }
20
+ function extractCodecIdsFromColumns(contract) {
21
+ const codecIds = /* @__PURE__ */ new Map();
22
+ for (const [tableName, table] of Object.entries(contract.storage.tables)) for (const [columnName, column] of Object.entries(table.columns)) {
23
+ const codecId = column.codecId;
24
+ const key = `${tableName}.${columnName}`;
25
+ codecIds.set(key, codecId);
26
+ }
27
+ return codecIds;
28
+ }
29
+ function adaptDescriptorRegistry(registry) {
30
+ return { has: (id) => registry.descriptorFor(id) !== void 0 };
31
+ }
32
+ function isDescriptorRegistry(registry) {
33
+ return "descriptorFor" in registry;
34
+ }
35
+ function validateContractCodecMappings(registry, contract) {
36
+ const lookup = isDescriptorRegistry(registry) ? adaptDescriptorRegistry(registry) : registry;
37
+ const codecIds = extractCodecIdsFromColumns(contract);
38
+ const invalidCodecs = [];
39
+ for (const [key, codecId] of codecIds.entries()) if (!lookup.has(codecId)) {
40
+ const parts = key.split(".");
41
+ const table = parts[0] ?? "";
42
+ const column = parts[1] ?? "";
43
+ invalidCodecs.push({
44
+ table,
45
+ column,
46
+ codecId
47
+ });
48
+ }
49
+ if (invalidCodecs.length > 0) {
50
+ const details = {
51
+ contractTarget: contract.target,
52
+ invalidCodecs
53
+ };
54
+ throw runtimeError("RUNTIME.CODEC_MISSING", `Missing codec implementations for column codecIds: ${invalidCodecs.map((c) => `${c.table}.${c.column} (${c.codecId})`).join(", ")}`, details);
55
+ }
56
+ }
57
+ function validateCodecRegistryCompleteness(registry, contract) {
58
+ validateContractCodecMappings(registry, contract);
59
+ }
60
+
61
+ //#endregion
62
+ //#region src/lower-sql-plan.ts
63
+ /**
64
+ * Lowers a SQL query plan to an executable Plan by calling the adapter's lower method.
65
+ *
66
+ * @param adapter - Adapter to lower AST to SQL
67
+ * @param contract - Contract for lowering context
68
+ * @param queryPlan - SQL query plan from a lane (contains AST, params, meta, but no SQL)
69
+ * @returns Fully executable Plan with SQL string
70
+ */
71
+ function lowerSqlPlan(adapter, contract, queryPlan) {
72
+ const lowered = adapter.lower(queryPlan.ast, {
73
+ contract,
74
+ params: queryPlan.params
75
+ });
76
+ return Object.freeze({
77
+ sql: lowered.sql,
78
+ params: lowered.params ?? queryPlan.params,
79
+ ast: queryPlan.ast,
80
+ meta: queryPlan.meta
81
+ });
82
+ }
83
+
84
+ //#endregion
85
+ //#region src/marker.ts
86
+ const MetaSchema = type({ "[string]": "unknown" });
87
+ function parseMeta(meta) {
88
+ if (meta === null || meta === void 0) return {};
89
+ let parsed;
90
+ if (typeof meta === "string") try {
91
+ parsed = JSON.parse(meta);
92
+ } catch {
93
+ return {};
94
+ }
95
+ else parsed = meta;
96
+ const result = MetaSchema(parsed);
97
+ if (result instanceof type.errors) return {};
98
+ return result;
99
+ }
100
+ const ContractMarkerRowSchema = type({
101
+ core_hash: "string",
102
+ profile_hash: "string",
103
+ "contract_json?": "unknown | null",
104
+ "canonical_version?": "number | null",
105
+ "updated_at?": "Date | string",
106
+ "app_tag?": "string | null",
107
+ "meta?": "unknown | null",
108
+ invariants: type("string").array()
109
+ });
110
+ function parseContractMarkerRow(row) {
111
+ const result = ContractMarkerRowSchema(row);
112
+ if (result instanceof type.errors) {
113
+ const messages = result.map((p) => p.message).join("; ");
114
+ throw new Error(`Invalid contract marker row: ${messages}`);
115
+ }
116
+ const updatedAt = result.updated_at ? result.updated_at instanceof Date ? result.updated_at : new Date(result.updated_at) : /* @__PURE__ */ new Date();
117
+ return {
118
+ storageHash: result.core_hash,
119
+ profileHash: result.profile_hash,
120
+ contractJson: result.contract_json ?? null,
121
+ canonicalVersion: result.canonical_version ?? null,
122
+ updatedAt,
123
+ appTag: result.app_tag ?? null,
124
+ meta: parseMeta(result.meta),
125
+ invariants: result.invariants
126
+ };
127
+ }
128
+
129
+ //#endregion
130
+ //#region src/middleware/budgets.ts
131
+ function hasAggregateWithoutGroupBy(ast) {
132
+ if (ast.groupBy !== void 0) return false;
133
+ return ast.projection.some((item) => item.expr.kind === "aggregate");
134
+ }
135
+ function primaryTableFromAst(ast) {
136
+ switch (ast.from.kind) {
137
+ case "table-source": return ast.from.name;
138
+ case "derived-table-source": return ast.from.alias;
139
+ default: return;
140
+ }
141
+ }
142
+ function estimateRowsFromAst(ast, tableRows, defaultTableRows, hasAggregateWithoutGroup) {
143
+ if (hasAggregateWithoutGroup) return 1;
144
+ const table = primaryTableFromAst(ast);
145
+ if (!table) return null;
146
+ const tableEstimate = tableRows[table] ?? defaultTableRows;
147
+ if (ast.limit !== void 0) return Math.min(ast.limit, tableEstimate);
148
+ return tableEstimate;
149
+ }
150
+ function emitBudgetViolation(error, shouldBlock, ctx) {
151
+ if (shouldBlock) throw error;
152
+ ctx.log.warn({
153
+ code: error.code,
154
+ message: error.message,
155
+ details: error.details
156
+ });
157
+ }
158
+ function budgets(options) {
159
+ const maxRows = options?.maxRows ?? 1e4;
160
+ const defaultTableRows = options?.defaultTableRows ?? 1e4;
161
+ const tableRows = options?.tableRows ?? {};
162
+ const maxLatencyMs = options?.maxLatencyMs ?? 1e3;
163
+ const rowSeverity = options?.severities?.rowCount ?? "error";
164
+ const observedRowsByPlan = /* @__PURE__ */ new WeakMap();
165
+ return Object.freeze({
166
+ name: "budgets",
167
+ familyId: "sql",
168
+ async beforeExecute(plan, ctx) {
169
+ observedRowsByPlan.set(plan, { count: 0 });
170
+ if (isQueryAst(plan.ast) && plan.ast.kind === "select") return evaluateSelectAst(plan.ast, ctx);
171
+ },
172
+ async onRow(_row, plan, _ctx) {
173
+ const state = observedRowsByPlan.get(plan);
174
+ if (!state) return;
175
+ state.count += 1;
176
+ if (state.count > maxRows) throw runtimeError("BUDGET.ROWS_EXCEEDED", "Observed row count exceeds budget", {
177
+ source: "observed",
178
+ observedRows: state.count,
179
+ maxRows
180
+ });
181
+ },
182
+ async afterExecute(_plan, result, ctx) {
183
+ const latencyMs = result.latencyMs;
184
+ if (latencyMs > maxLatencyMs) {
185
+ const shouldBlock = ctx.mode === "strict";
186
+ emitBudgetViolation(runtimeError("BUDGET.TIME_EXCEEDED", "Query latency exceeds budget", {
187
+ latencyMs,
188
+ maxLatencyMs
189
+ }), shouldBlock, ctx);
190
+ }
191
+ }
192
+ });
193
+ function evaluateSelectAst(ast, ctx) {
194
+ const hasAggNoGroup = hasAggregateWithoutGroupBy(ast);
195
+ const estimated = estimateRowsFromAst(ast, tableRows, defaultTableRows, hasAggNoGroup);
196
+ const isUnbounded = ast.limit === void 0 && !hasAggNoGroup;
197
+ const shouldBlock = rowSeverity === "error" || ctx.mode === "strict";
198
+ if (isUnbounded) {
199
+ if (estimated !== null && estimated >= maxRows) {
200
+ emitBudgetViolation(runtimeError("BUDGET.ROWS_EXCEEDED", "Unbounded SELECT query exceeds budget", {
201
+ source: "ast",
202
+ estimatedRows: estimated,
203
+ maxRows
204
+ }), shouldBlock, ctx);
205
+ return;
206
+ }
207
+ emitBudgetViolation(runtimeError("BUDGET.ROWS_EXCEEDED", "Unbounded SELECT query exceeds budget", {
208
+ source: "ast",
209
+ maxRows
210
+ }), shouldBlock, ctx);
211
+ return;
212
+ }
213
+ if (estimated !== null && estimated > maxRows) emitBudgetViolation(runtimeError("BUDGET.ROWS_EXCEEDED", "Estimated row count exceeds budget", {
214
+ source: "ast",
215
+ estimatedRows: estimated,
216
+ maxRows
217
+ }), shouldBlock, ctx);
218
+ }
219
+ }
220
+
221
+ //#endregion
222
+ //#region src/guardrails/raw.ts
223
+ const SELECT_STAR_REGEX = /select\s+\*/i;
224
+ const LIMIT_REGEX = /\blimit\b/i;
225
+ const MUTATION_PREFIX_REGEX = /^(insert|update|delete|create|alter|drop|truncate)\b/i;
226
+ const READ_ONLY_INTENTS = new Set([
227
+ "read",
228
+ "report",
229
+ "readonly"
230
+ ]);
231
+ function evaluateRawGuardrails(plan, config) {
232
+ const lints$1 = [];
233
+ const budgets$1 = [];
234
+ const normalized = normalizeWhitespace(plan.sql);
235
+ const statementType = classifyStatement(normalized);
236
+ if (statementType === "select") {
237
+ if (SELECT_STAR_REGEX.test(normalized)) lints$1.push(createLint("LINT.SELECT_STAR", "error", "Raw SQL plan selects all columns via *", { sql: snippet(plan.sql) }));
238
+ if (!LIMIT_REGEX.test(normalized)) {
239
+ const severity = config?.budgets?.unboundedSelectSeverity ?? "error";
240
+ lints$1.push(createLint("LINT.NO_LIMIT", "warn", "Raw SQL plan omits LIMIT clause", { sql: snippet(plan.sql) }));
241
+ budgets$1.push(createBudget("BUDGET.ROWS_EXCEEDED", severity, "Raw SQL plan is unbounded and may exceed row budget", {
242
+ sql: snippet(plan.sql),
243
+ ...config?.budgets?.estimatedRows !== void 0 ? { estimatedRows: config.budgets.estimatedRows } : {}
244
+ }));
245
+ }
246
+ }
247
+ if (isMutationStatement(statementType) && isReadOnlyIntent(plan.meta)) lints$1.push(createLint("LINT.READ_ONLY_MUTATION", "error", "Raw SQL plan mutates data despite read-only intent", {
248
+ sql: snippet(plan.sql),
249
+ intent: plan.meta.annotations?.["intent"]
250
+ }));
251
+ return {
252
+ lints: lints$1,
253
+ budgets: budgets$1,
254
+ statement: statementType
255
+ };
256
+ }
257
+ function classifyStatement(sql) {
258
+ const trimmed = sql.trim();
259
+ const lower = trimmed.toLowerCase();
260
+ if (lower.startsWith("with")) {
261
+ if (lower.includes("select")) return "select";
262
+ }
263
+ if (lower.startsWith("select")) return "select";
264
+ if (MUTATION_PREFIX_REGEX.test(trimmed)) return "mutation";
265
+ return "other";
266
+ }
267
+ function isMutationStatement(statement) {
268
+ return statement === "mutation";
269
+ }
270
+ function isReadOnlyIntent(meta) {
271
+ const annotations = meta.annotations;
272
+ const intent = typeof annotations?.intent === "string" ? annotations.intent.toLowerCase() : void 0;
273
+ return intent !== void 0 && READ_ONLY_INTENTS.has(intent);
274
+ }
275
+ function normalizeWhitespace(value) {
276
+ return value.replace(/\s+/g, " ").trim();
277
+ }
278
+ function snippet(sql) {
279
+ return normalizeWhitespace(sql).slice(0, 200);
280
+ }
281
+ function createLint(code, severity, message, details) {
282
+ return {
283
+ code,
284
+ severity,
285
+ message,
286
+ ...details ? { details } : {}
287
+ };
288
+ }
289
+ function createBudget(code, severity, message, details) {
290
+ return {
291
+ code,
292
+ severity,
293
+ message,
294
+ ...details ? { details } : {}
295
+ };
296
+ }
297
+
298
+ //#endregion
299
+ //#region src/middleware/lints.ts
300
+ function getFromSourceTableDetail(source) {
301
+ switch (source.kind) {
302
+ case "table-source": return source.name;
303
+ case "derived-table-source": return source.alias;
304
+ default: throw new Error(`Unsupported source kind: ${source.kind}`);
305
+ }
306
+ }
307
+ function evaluateAstLints(ast) {
308
+ const findings = [];
309
+ switch (ast.kind) {
310
+ case "delete":
311
+ if (ast.where === void 0) findings.push({
312
+ code: "LINT.DELETE_WITHOUT_WHERE",
313
+ severity: "error",
314
+ message: "DELETE without WHERE clause blocks execution to prevent accidental full-table deletion",
315
+ details: { table: ast.table.name }
316
+ });
317
+ break;
318
+ case "update":
319
+ if (ast.where === void 0) findings.push({
320
+ code: "LINT.UPDATE_WITHOUT_WHERE",
321
+ severity: "error",
322
+ message: "UPDATE without WHERE clause blocks execution to prevent accidental full-table update",
323
+ details: { table: ast.table.name }
324
+ });
325
+ break;
326
+ case "select":
327
+ if (ast.limit === void 0) {
328
+ const table = getFromSourceTableDetail(ast.from);
329
+ findings.push({
330
+ code: "LINT.NO_LIMIT",
331
+ severity: "warn",
332
+ message: "Unbounded SELECT may return large result sets",
333
+ ...ifDefined("details", table !== void 0 ? { table } : void 0)
334
+ });
335
+ }
336
+ if (ast.selectAllIntent !== void 0) {
337
+ const table = ast.selectAllIntent.table;
338
+ findings.push({
339
+ code: "LINT.SELECT_STAR",
340
+ severity: "warn",
341
+ message: "Query selects all columns via selectAll intent",
342
+ ...ifDefined("details", table !== void 0 ? { table } : void 0)
343
+ });
344
+ }
345
+ break;
346
+ case "insert": break;
347
+ default: throw new Error(`Unsupported AST kind: ${ast.kind}`);
348
+ }
349
+ return findings;
350
+ }
351
+ function getConfiguredSeverity(code, options) {
352
+ const severities = options?.severities;
353
+ if (!severities) return void 0;
354
+ switch (code) {
355
+ case "LINT.SELECT_STAR": return severities.selectStar;
356
+ case "LINT.NO_LIMIT": return severities.noLimit;
357
+ case "LINT.DELETE_WITHOUT_WHERE": return severities.deleteWithoutWhere;
358
+ case "LINT.UPDATE_WITHOUT_WHERE": return severities.updateWithoutWhere;
359
+ case "LINT.READ_ONLY_MUTATION": return severities.readOnlyMutation;
360
+ case "LINT.UNINDEXED_PREDICATE": return severities.unindexedPredicate;
361
+ default: return;
362
+ }
363
+ }
364
+ /**
365
+ * AST-first lint middleware for SQL plans. When `plan.ast` is a SQL QueryAst, inspects
366
+ * the AST structurally. When `plan.ast` is missing, falls back to raw heuristic
367
+ * guardrails or skips linting depending on `fallbackWhenAstMissing`.
368
+ *
369
+ * Rules (AST-based):
370
+ * - DELETE without WHERE: blocks execution (configurable severity, default error)
371
+ * - UPDATE without WHERE: blocks execution (configurable severity, default error)
372
+ * - Unbounded SELECT: warn/error (severity from noLimit)
373
+ * - SELECT * intent: warn/error (severity from selectStar)
374
+ *
375
+ * Fallback: When ast is missing, `fallbackWhenAstMissing: 'raw'` uses heuristic
376
+ * SQL parsing; `'skip'` skips all lints. Default is `'raw'`.
377
+ */
378
+ function lints(options) {
379
+ const fallback = options?.fallbackWhenAstMissing ?? "raw";
380
+ return Object.freeze({
381
+ name: "lints",
382
+ familyId: "sql",
383
+ async beforeExecute(plan, ctx) {
384
+ if (isQueryAst(plan.ast)) {
385
+ const findings = evaluateAstLints(plan.ast);
386
+ for (const lint of findings) {
387
+ const effectiveSeverity = getConfiguredSeverity(lint.code, options) ?? lint.severity;
388
+ if (effectiveSeverity === "error") throw runtimeError(lint.code, lint.message, lint.details);
389
+ if (effectiveSeverity === "warn") ctx.log.warn({
390
+ code: lint.code,
391
+ message: lint.message,
392
+ details: lint.details
393
+ });
394
+ }
395
+ return;
396
+ }
397
+ if (fallback === "skip") return;
398
+ const evaluation = evaluateRawGuardrails(plan);
399
+ for (const lint of evaluation.lints) {
400
+ const effectiveSeverity = getConfiguredSeverity(lint.code, options) ?? lint.severity;
401
+ if (effectiveSeverity === "error") throw runtimeError(lint.code, lint.message, lint.details);
402
+ if (effectiveSeverity === "warn") ctx.log.warn({
403
+ code: lint.code,
404
+ message: lint.message,
405
+ details: lint.details
406
+ });
407
+ }
408
+ }
409
+ });
410
+ }
411
+
412
+ //#endregion
413
+ //#region src/sql-context.ts
414
+ function createSqlExecutionStack(options) {
415
+ return createExecutionStack({
416
+ target: options.target,
417
+ adapter: options.adapter,
418
+ driver: options.driver,
419
+ extensionPacks: options.extensionPacks
420
+ });
421
+ }
422
+ function assertExecutionStackContractRequirements(contract, stack) {
423
+ const providedComponentIds = new Set([
424
+ stack.target.id,
425
+ stack.adapter.id,
426
+ ...stack.extensionPacks.map((pack) => pack.id)
427
+ ]);
428
+ const result = checkContractComponentRequirements({
429
+ contract,
430
+ expectedTargetFamily: "sql",
431
+ expectedTargetId: stack.target.targetId,
432
+ providedComponentIds
433
+ });
434
+ if (result.familyMismatch) throw runtimeError("RUNTIME.CONTRACT_FAMILY_MISMATCH", `Contract target family '${result.familyMismatch.actual}' does not match runtime family '${result.familyMismatch.expected}'.`, {
435
+ actual: result.familyMismatch.actual,
436
+ expected: result.familyMismatch.expected
437
+ });
438
+ if (result.targetMismatch) throw runtimeError("RUNTIME.CONTRACT_TARGET_MISMATCH", `Contract target '${result.targetMismatch.actual}' does not match runtime target descriptor '${result.targetMismatch.expected}'.`, {
439
+ actual: result.targetMismatch.actual,
440
+ expected: result.targetMismatch.expected
441
+ });
442
+ if (result.missingExtensionPackIds.length > 0) {
443
+ const packIds = result.missingExtensionPackIds;
444
+ throw runtimeError("RUNTIME.MISSING_EXTENSION_PACK", `Contract requires extension pack(s) ${packIds.map((id) => `'${id}'`).join(", ")}, but runtime descriptors do not provide matching component(s).`, { packIds });
445
+ }
446
+ }
447
+ function validateTypeParams(typeParams, codecDescriptor, context) {
448
+ const result = codecDescriptor.paramsSchema["~standard"].validate(typeParams);
449
+ if (result instanceof Promise) throw runtimeError("RUNTIME.TYPE_PARAMS_INVALID", `paramsSchema for codec '${codecDescriptor.codecId}' returned a Promise; runtime validation requires a synchronous Standard Schema validator.`, {
450
+ ...context,
451
+ codecId: codecDescriptor.codecId,
452
+ typeParams
453
+ });
454
+ if (result.issues) {
455
+ const messages = result.issues.map((issue) => issue.message).join("; ");
456
+ throw runtimeError("RUNTIME.TYPE_PARAMS_INVALID", `Invalid typeParams for ${context.typeName ? `type '${context.typeName}'` : `column '${context.tableName}.${context.columnName}'`} (codecId: ${codecDescriptor.codecId}): ${messages}`, {
457
+ ...context,
458
+ codecId: codecDescriptor.codecId,
459
+ typeParams
460
+ });
461
+ }
462
+ return result.value;
463
+ }
464
+ function collectParameterizedCodecDescriptors(contributors) {
465
+ const descriptors = /* @__PURE__ */ new Map();
466
+ for (const contributor of contributors) for (const descriptor of contributor.parameterizedCodecs()) {
467
+ if (descriptors.has(descriptor.codecId)) throw runtimeError("RUNTIME.DUPLICATE_PARAMETERIZED_CODEC", `Duplicate parameterized codec descriptor for codecId '${descriptor.codecId}'.`, { codecId: descriptor.codecId });
468
+ descriptors.set(descriptor.codecId, descriptor);
469
+ }
470
+ return descriptors;
471
+ }
472
+ /**
473
+ * Build the unified descriptor map. Combines parameterized descriptors
474
+ * (which already ship as `CodecDescriptor`s) with synthesized descriptors
475
+ * for non-parameterized codecs registered through the legacy `codecs:`
476
+ * slot. Codec ids that ship a parameterized descriptor take precedence —
477
+ * even when the legacy registry registers a representative codec under
478
+ * the same id, the parameterized descriptor is the authoritative source.
479
+ *
480
+ * Codec-registry-unification spec § Decision: every codec resolves
481
+ * through one descriptor map; reads are non-branching.
482
+ */
483
+ function buildCodecDescriptorRegistry(codecRegistry, parameterizedDescriptors) {
484
+ const byId = /* @__PURE__ */ new Map();
485
+ const byTargetType = /* @__PURE__ */ new Map();
486
+ function registerInIndices(descriptor) {
487
+ byId.set(descriptor.codecId, descriptor);
488
+ for (const targetType of descriptor.targetTypes) {
489
+ const list = byTargetType.get(targetType);
490
+ if (list) list.push(descriptor);
491
+ else byTargetType.set(targetType, [descriptor]);
492
+ }
493
+ }
494
+ for (const descriptor of parameterizedDescriptors.values()) registerInIndices(descriptor);
495
+ for (const codec$1 of codecRegistry.values()) {
496
+ if (byId.has(codec$1.id)) continue;
497
+ registerInIndices(synthesizeNonParameterizedDescriptor(codec$1));
498
+ }
499
+ return {
500
+ descriptorFor(codecId) {
501
+ return byId.get(codecId);
502
+ },
503
+ *values() {
504
+ yield* byId.values();
505
+ },
506
+ byTargetType(targetType) {
507
+ return byTargetType.get(targetType) ?? Object.freeze([]);
508
+ }
509
+ };
510
+ }
511
+ function collectTypeRefSites(storage) {
512
+ const sites = /* @__PURE__ */ new Map();
513
+ for (const [tableName, table] of Object.entries(storage.tables)) for (const [columnName, column] of Object.entries(table.columns)) {
514
+ if (typeof column.typeRef !== "string") continue;
515
+ const list = sites.get(column.typeRef);
516
+ const entry = {
517
+ table: tableName,
518
+ column: columnName
519
+ };
520
+ if (list) list.push(entry);
521
+ else sites.set(column.typeRef, [entry]);
522
+ }
523
+ return sites;
524
+ }
525
+ function initializeTypeHelpers(storage, codecDescriptors) {
526
+ const helpers = {};
527
+ const storageTypes = storage.types;
528
+ if (!storageTypes) return helpers;
529
+ const typeRefSites = collectTypeRefSites(storage);
530
+ for (const [typeName, typeInstance] of Object.entries(storageTypes)) {
531
+ const descriptor = codecDescriptors.get(typeInstance.codecId);
532
+ if (!descriptor) {
533
+ helpers[typeName] = typeInstance;
534
+ continue;
535
+ }
536
+ const validatedParams = validateTypeParams(typeInstance.typeParams, descriptor, { typeName });
537
+ const ctx = {
538
+ name: typeName,
539
+ usedAt: typeRefSites.get(typeName) ?? []
540
+ };
541
+ helpers[typeName] = descriptor.factory(validatedParams)(ctx);
542
+ }
543
+ return helpers;
544
+ }
545
+ function validateColumnTypeParams(storage, codecDescriptors) {
546
+ for (const [tableName, table] of Object.entries(storage.tables)) for (const [columnName, column] of Object.entries(table.columns)) if (column.typeParams) {
547
+ const descriptor = codecDescriptors.get(column.codecId);
548
+ if (descriptor) validateTypeParams(column.typeParams, descriptor, {
549
+ tableName,
550
+ columnName
551
+ });
552
+ }
553
+ }
554
+ function hasJsonValidatorTrait(candidate) {
555
+ if (candidate === null || typeof candidate !== "object") return false;
556
+ const traits = candidate.traits;
557
+ if (!Array.isArray(traits)) return false;
558
+ if (!traits.includes("json-validator")) return false;
559
+ return typeof candidate.validate === "function";
560
+ }
561
+ function extractValidator(candidate) {
562
+ return hasJsonValidatorTrait(candidate) ? candidate.validate : void 0;
563
+ }
564
+ function isResolvedCodec(candidate) {
565
+ return candidate !== null && typeof candidate === "object" && "id" in candidate && "decode" in candidate;
566
+ }
567
+ /**
568
+ * Walk the contract's `storage.tables[].columns[]` and resolve each
569
+ * column to a `Codec` through the unified descriptor map. Per-instance
570
+ * behavior:
571
+ *
572
+ * - **typeRef columns**: reuse the resolved codec materialized once by
573
+ * `initializeTypeHelpers` for the `storage.types` entry. Multiple
574
+ * columns sharing one typeRef share one codec instance.
575
+ * - **inline-typeParams columns**: call `descriptor.factory(typeParams)
576
+ * (ctx)` once per column (per-column anonymous instance).
577
+ * - **non-parameterized columns**: call `descriptor.factory()(ctx)`
578
+ * once. The synthesized descriptor's factory is constant — every call
579
+ * returns the same shared codec instance — so columns sharing a non-
580
+ * parameterized codec id share one resolved codec without explicit
581
+ * caching.
582
+ *
583
+ * Combines what `initializeTypeHelpers` (named-instance walk) and the
584
+ * old `buildJsonSchemaValidatorRegistry` (per-column walk) used to do
585
+ * separately: one walk over all columns, one resolved codec per column,
586
+ * one trait-gated validator extraction per column. The result drives
587
+ * both the dispatch registry (`ContractCodecRegistry.forColumn`) and the
588
+ * validator registry.
589
+ *
590
+ * Codec-registry-unification spec § AC-4: every column resolves through
591
+ * one descriptor map without branching on parameterization.
592
+ */
593
+ function buildContractCodecRegistry(contract, codecDescriptors, legacyCodecRegistry, types, parameterizedDescriptors) {
594
+ const byColumn = /* @__PURE__ */ new Map();
595
+ const byCodecId = /* @__PURE__ */ new Map();
596
+ const ambiguousCodecIds = /* @__PURE__ */ new Set();
597
+ const validators = /* @__PURE__ */ new Map();
598
+ for (const [tableName, table] of Object.entries(contract.storage.tables)) for (const [columnName, column] of Object.entries(table.columns)) {
599
+ const columnKey = `${tableName}.${columnName}`;
600
+ const descriptor = codecDescriptors.descriptorFor(column.codecId);
601
+ let resolvedCodec;
602
+ if (descriptor) {
603
+ const isParameterized = parameterizedDescriptors.has(column.codecId);
604
+ if (column.typeRef) {
605
+ const helper = types[column.typeRef];
606
+ if (isResolvedCodec(helper)) resolvedCodec = helper;
607
+ } else if (column.typeParams && isParameterized) {
608
+ const parameterizedDescriptor = parameterizedDescriptors.get(column.codecId);
609
+ if (parameterizedDescriptor) {
610
+ const validatedParams = validateTypeParams(column.typeParams, parameterizedDescriptor, {
611
+ tableName,
612
+ columnName
613
+ });
614
+ const ctx = {
615
+ name: `<anon:${tableName}.${columnName}>`,
616
+ usedAt: [{
617
+ table: tableName,
618
+ column: columnName
619
+ }]
620
+ };
621
+ resolvedCodec = parameterizedDescriptor.factory(validatedParams)(ctx);
622
+ }
623
+ } else if (!isParameterized) {
624
+ let cached = byCodecId.get(column.codecId);
625
+ if (!cached) {
626
+ const ctx = {
627
+ name: `<shared:${column.codecId}>`,
628
+ usedAt: [{
629
+ table: tableName,
630
+ column: columnName
631
+ }]
632
+ };
633
+ const voidFactory = descriptor.factory;
634
+ cached = voidFactory(void 0)(ctx);
635
+ byCodecId.set(column.codecId, cached);
636
+ }
637
+ resolvedCodec = cached;
638
+ }
639
+ }
640
+ if (resolvedCodec) {
641
+ byColumn.set(columnKey, resolvedCodec);
642
+ const validate = extractValidator(resolvedCodec);
643
+ if (validate) validators.set(columnKey, validate);
644
+ const existing = byCodecId.get(column.codecId);
645
+ if (existing === void 0) byCodecId.set(column.codecId, resolvedCodec);
646
+ else if (existing !== resolvedCodec && parameterizedDescriptors.has(column.codecId)) ambiguousCodecIds.add(column.codecId);
647
+ }
648
+ }
649
+ return {
650
+ registry: {
651
+ forColumn(table, column) {
652
+ return byColumn.get(`${table}.${column}`);
653
+ },
654
+ forCodecId(codecId) {
655
+ if (ambiguousCodecIds.has(codecId)) throw runtimeError("RUNTIME.TYPE_PARAMS_INVALID", `Codec '${codecId}' resolves to multiple parameterized instances; column-aware dispatch is required.`, { codecId });
656
+ return byCodecId.get(codecId) ?? legacyCodecRegistry.get(codecId);
657
+ }
658
+ },
659
+ jsonValidators: validators.size > 0 ? {
660
+ get: (key) => validators.get(key),
661
+ size: validators.size
662
+ } : void 0
663
+ };
664
+ }
665
+ function collectMutationDefaultGenerators(contributors) {
666
+ const generators = /* @__PURE__ */ new Map();
667
+ const owners = /* @__PURE__ */ new Map();
668
+ for (const contributor of contributors) {
669
+ const nextGenerators = contributor.mutationDefaultGenerators?.() ?? [];
670
+ for (const generator of nextGenerators) {
671
+ const existingOwner = owners.get(generator.id);
672
+ if (existingOwner !== void 0) throw runtimeError("RUNTIME.DUPLICATE_MUTATION_DEFAULT_GENERATOR", `Duplicate mutation default generator '${generator.id}'.`, {
673
+ id: generator.id,
674
+ existingOwner,
675
+ incomingOwner: contributor.id
676
+ });
677
+ generators.set(generator.id, generator);
678
+ owners.set(generator.id, contributor.id);
679
+ }
680
+ }
681
+ return generators;
682
+ }
683
+ function computeExecutionDefaultValue(spec, generatorRegistry) {
684
+ switch (spec.kind) {
685
+ case "generator": {
686
+ const generator = generatorRegistry.get(spec.id);
687
+ if (!generator) throw runtimeError("RUNTIME.MUTATION_DEFAULT_GENERATOR_MISSING", `Contract references mutation default generator '${spec.id}' but no runtime component provides it.`, { id: spec.id });
688
+ return generator.generate(spec.params);
689
+ }
690
+ }
691
+ }
692
+ function applyMutationDefaults(contract, generatorRegistry, options) {
693
+ const defaults = contract.execution?.mutations.defaults ?? [];
694
+ if (defaults.length === 0) return [];
695
+ const applied = [];
696
+ const appliedColumns = /* @__PURE__ */ new Set();
697
+ for (const mutationDefault of defaults) {
698
+ if (mutationDefault.ref.table !== options.table) continue;
699
+ const defaultSpec = options.op === "create" ? mutationDefault.onCreate : mutationDefault.onUpdate;
700
+ if (!defaultSpec) continue;
701
+ const columnName = mutationDefault.ref.column;
702
+ if (Object.hasOwn(options.values, columnName) || appliedColumns.has(columnName)) continue;
703
+ applied.push({
704
+ column: columnName,
705
+ value: computeExecutionDefaultValue(defaultSpec, generatorRegistry)
706
+ });
707
+ appliedColumns.add(columnName);
708
+ }
709
+ return applied;
710
+ }
711
+ function createExecutionContext(options) {
712
+ const { contract, stack } = options;
713
+ assertExecutionStackContractRequirements(contract, stack);
714
+ const codecRegistry = createCodecRegistry();
715
+ const contributors = [
716
+ stack.target,
717
+ stack.adapter,
718
+ ...stack.extensionPacks
719
+ ];
720
+ for (const contributor of contributors) for (const c of contributor.codecs().values()) codecRegistry.register(c);
721
+ const queryOperationRegistry = createSqlOperationRegistry();
722
+ for (const contributor of contributors) for (const op of contributor.queryOperations?.() ?? []) queryOperationRegistry.register(op);
723
+ const parameterizedCodecDescriptors = collectParameterizedCodecDescriptors(contributors);
724
+ const codecDescriptors = buildCodecDescriptorRegistry(codecRegistry, parameterizedCodecDescriptors);
725
+ const mutationDefaultGeneratorRegistry = collectMutationDefaultGenerators(contributors);
726
+ if (parameterizedCodecDescriptors.size > 0) validateColumnTypeParams(contract.storage, parameterizedCodecDescriptors);
727
+ const types = initializeTypeHelpers(contract.storage, parameterizedCodecDescriptors);
728
+ const { registry: contractCodecs, jsonValidators: jsonSchemaValidators } = buildContractCodecRegistry(contract, codecDescriptors, codecRegistry, types, parameterizedCodecDescriptors);
729
+ return {
730
+ contract,
731
+ codecs: codecRegistry,
732
+ contractCodecs,
733
+ codecDescriptors,
734
+ queryOperations: queryOperationRegistry,
735
+ types,
736
+ ...jsonSchemaValidators ? { jsonSchemaValidators } : {},
737
+ applyMutationDefaults: (options$1) => applyMutationDefaults(contract, mutationDefaultGeneratorRegistry, options$1)
738
+ };
739
+ }
740
+
741
+ //#endregion
742
+ //#region src/sql-marker.ts
743
+ const ensureSchemaStatement = {
744
+ sql: "create schema if not exists prisma_contract",
745
+ params: []
746
+ };
747
+ const ensureTableStatement = {
748
+ sql: `create table if not exists prisma_contract.marker (
749
+ id smallint primary key default 1,
750
+ core_hash text not null,
751
+ profile_hash text not null,
752
+ contract_json jsonb,
753
+ canonical_version int,
754
+ updated_at timestamptz not null default now(),
755
+ app_tag text,
756
+ meta jsonb not null default '{}',
757
+ invariants text[] not null default '{}'
758
+ )`,
759
+ params: []
760
+ };
761
+ function readContractMarker() {
762
+ return {
763
+ sql: `select
764
+ core_hash,
765
+ profile_hash,
766
+ contract_json,
767
+ canonical_version,
768
+ updated_at,
769
+ app_tag,
770
+ meta,
771
+ invariants
772
+ from prisma_contract.marker
773
+ where id = $1`,
774
+ params: [1]
775
+ };
776
+ }
777
+ /**
778
+ * Variable columns that participate in INSERT/UPDATE alongside the
779
+ * always-on `id = $1` and `updated_at = now()`. Each column declares
780
+ * its name, optional cast type, and parameter value; the placeholder
781
+ * (`$N`) is computed positionally below — adding or reordering a
782
+ * column doesn't desync indices. `invariants` only appears when the
783
+ * caller supplies it — see `WriteMarkerInput.invariants`.
784
+ */
785
+ function markerColumns(input) {
786
+ return [
787
+ {
788
+ name: "core_hash",
789
+ param: input.storageHash
790
+ },
791
+ {
792
+ name: "profile_hash",
793
+ param: input.profileHash
794
+ },
795
+ {
796
+ name: "contract_json",
797
+ type: "jsonb",
798
+ param: input.contractJson ?? null
799
+ },
800
+ {
801
+ name: "canonical_version",
802
+ param: input.canonicalVersion ?? null
803
+ },
804
+ {
805
+ name: "app_tag",
806
+ param: input.appTag ?? null
807
+ },
808
+ {
809
+ name: "meta",
810
+ type: "jsonb",
811
+ param: JSON.stringify(input.meta ?? {})
812
+ },
813
+ ...input.invariants !== void 0 ? [{
814
+ name: "invariants",
815
+ type: "text[]",
816
+ param: input.invariants
817
+ }] : []
818
+ ];
819
+ }
820
+ function writeContractMarker(input) {
821
+ const placed = markerColumns(input).map((c, i) => ({
822
+ name: c.name,
823
+ expr: c.type ? `$${i + 2}::${c.type}` : `$${i + 2}`,
824
+ param: c.param
825
+ }));
826
+ const params = [1, ...placed.map((c) => c.param)];
827
+ const insertColumns = [
828
+ "id",
829
+ ...placed.map((c) => c.name),
830
+ "updated_at"
831
+ ].join(", ");
832
+ const insertValues = [
833
+ "$1",
834
+ ...placed.map((c) => c.expr),
835
+ "now()"
836
+ ].join(", ");
837
+ const setClauses = [...placed.map((c) => `${c.name} = ${c.expr}`), "updated_at = now()"].join(", ");
838
+ return {
839
+ insert: {
840
+ sql: `insert into prisma_contract.marker (${insertColumns}) values (${insertValues})`,
841
+ params
842
+ },
843
+ update: {
844
+ sql: `update prisma_contract.marker set ${setClauses} where id = $1`,
845
+ params
846
+ }
847
+ };
848
+ }
849
+
850
+ //#endregion
851
+ //#region src/codecs/json-schema-validation.ts
852
+ /**
853
+ * Validates a JSON value against its column's JSON Schema, if a validator exists.
854
+ *
855
+ * Throws `RUNTIME.JSON_SCHEMA_VALIDATION_FAILED` on validation failure.
856
+ * No-ops if no validator is registered for the column.
857
+ */
858
+ function validateJsonValue(registry, table, column, value, direction, codecId) {
859
+ const key = `${table}.${column}`;
860
+ const validate = registry.get(key);
861
+ if (!validate) return;
862
+ const result = validate(value);
863
+ if (result.valid) return;
864
+ throw createJsonSchemaValidationError(table, column, direction, result.errors, codecId);
865
+ }
866
+ function createJsonSchemaValidationError(table, column, direction, errors, codecId) {
867
+ return runtimeError("RUNTIME.JSON_SCHEMA_VALIDATION_FAILED", `JSON schema validation failed for column '${table}.${column}' (${direction}): ${formatErrorSummary(errors)}`, {
868
+ table,
869
+ column,
870
+ codecId,
871
+ direction,
872
+ errors: [...errors]
873
+ });
874
+ }
875
+ function formatErrorSummary(errors) {
876
+ if (errors.length === 0) return "unknown validation error";
877
+ if (errors.length === 1) {
878
+ const err = errors[0];
879
+ return err.path === "/" ? err.message : `${err.path}: ${err.message}`;
880
+ }
881
+ return errors.map((err) => err.path === "/" ? err.message : `${err.path}: ${err.message}`).join("; ");
882
+ }
883
+
884
+ //#endregion
885
+ //#region src/codecs/decoding.ts
886
+ const WIRE_PREVIEW_LIMIT = 100;
887
+ const EMPTY_INCLUDE_ALIASES = /* @__PURE__ */ new Set();
888
+ function isAstBackedPlan(plan) {
889
+ return plan.ast !== void 0;
890
+ }
891
+ function projectionListFromAst(ast) {
892
+ if (ast.kind === "select") return ast.projection;
893
+ return ast.returning;
894
+ }
895
+ /**
896
+ * Resolve the per-cell codec for a projection item.
897
+ *
898
+ * Phase B: when a `(table, column)` ref is available for the projection,
899
+ * prefer `contractCodecs.forColumn(table, column)` — that's the per-
900
+ * instance resolved codec materialized from the codec descriptor's
901
+ * factory at context-construction time (carries any per-instance state
902
+ * such as the compiled JSON-Schema validator). When the projection
903
+ * resolves to a non-`column-ref` expression (computed projections, raw
904
+ * SQL aliases) but still carries a codec id (ADR 205 stamps every
905
+ * `ProjectionItem` with the producer's codec id), fall back to the
906
+ * codec-id-keyed `forCodecId(codecId)` lookup, which itself falls back
907
+ * to the legacy `CodecRegistry` for codec ids the contract walk
908
+ * couldn't resolve.
909
+ *
910
+ * Codec-registry-unification spec § AC-4.
911
+ */
912
+ function resolveProjectionCodec(item, registry, contractCodecs) {
913
+ if (item.expr.kind === "column-ref" && contractCodecs) {
914
+ const byColumn = contractCodecs.forColumn(item.expr.table, item.expr.column);
915
+ if (byColumn) return byColumn;
916
+ }
917
+ if (item.codecId) {
918
+ const fromContract = contractCodecs?.forCodecId(item.codecId);
919
+ if (fromContract) return fromContract;
920
+ return registry.get(item.codecId);
921
+ }
922
+ }
923
+ function buildDecodeContext(plan, registry, contractCodecs) {
924
+ if (!isAstBackedPlan(plan)) return {
925
+ aliases: void 0,
926
+ codecs: /* @__PURE__ */ new Map(),
927
+ columnRefs: /* @__PURE__ */ new Map(),
928
+ includeAliases: EMPTY_INCLUDE_ALIASES
929
+ };
930
+ const projection = projectionListFromAst(plan.ast);
931
+ if (!projection) return {
932
+ aliases: void 0,
933
+ codecs: /* @__PURE__ */ new Map(),
934
+ columnRefs: /* @__PURE__ */ new Map(),
935
+ includeAliases: EMPTY_INCLUDE_ALIASES
936
+ };
937
+ const aliases = [];
938
+ const codecs = /* @__PURE__ */ new Map();
939
+ const columnRefs = /* @__PURE__ */ new Map();
940
+ const includeAliases = /* @__PURE__ */ new Set();
941
+ for (const item of projection) {
942
+ aliases.push(item.alias);
943
+ const codec$1 = resolveProjectionCodec(item, registry, contractCodecs);
944
+ if (codec$1) codecs.set(item.alias, codec$1);
945
+ if (item.expr.kind === "column-ref") columnRefs.set(item.alias, {
946
+ table: item.expr.table,
947
+ column: item.expr.column
948
+ });
949
+ else if (item.expr.kind === "subquery" || item.expr.kind === "json-array-agg") includeAliases.add(item.alias);
950
+ }
951
+ return {
952
+ aliases,
953
+ codecs,
954
+ columnRefs,
955
+ includeAliases
956
+ };
957
+ }
958
+ function previewWireValue(wireValue) {
959
+ if (typeof wireValue === "string") return wireValue.length > WIRE_PREVIEW_LIMIT ? `${wireValue.substring(0, WIRE_PREVIEW_LIMIT)}...` : wireValue;
960
+ return String(wireValue).substring(0, WIRE_PREVIEW_LIMIT);
961
+ }
962
+ function isJsonSchemaValidationError(error) {
963
+ return isRuntimeError(error) && error.code === "RUNTIME.JSON_SCHEMA_VALIDATION_FAILED";
964
+ }
965
+ function wrapDecodeFailure(error, alias, ref, codec$1, wireValue) {
966
+ const message = error instanceof Error ? error.message : String(error);
967
+ const wrapped = runtimeError("RUNTIME.DECODE_FAILED", `Failed to decode column ${ref ? `${ref.table}.${ref.column}` : alias} with codec '${codec$1.id}': ${message}`, {
968
+ ...ref ? {
969
+ table: ref.table,
970
+ column: ref.column
971
+ } : { alias },
972
+ codec: codec$1.id,
973
+ wirePreview: previewWireValue(wireValue)
974
+ });
975
+ wrapped.cause = error;
976
+ throw wrapped;
977
+ }
978
+ function wrapIncludeAggregateFailure(error, alias, wireValue) {
979
+ const wrapped = runtimeError("RUNTIME.DECODE_FAILED", `Failed to parse JSON array for include alias '${alias}': ${error instanceof Error ? error.message : String(error)}`, {
980
+ alias,
981
+ wirePreview: previewWireValue(wireValue)
982
+ });
983
+ wrapped.cause = error;
984
+ throw wrapped;
985
+ }
986
+ function decodeIncludeAggregate(alias, wireValue) {
987
+ if (wireValue === null || wireValue === void 0) return [];
988
+ try {
989
+ let parsed;
990
+ if (typeof wireValue === "string") parsed = JSON.parse(wireValue);
991
+ else if (Array.isArray(wireValue)) parsed = wireValue;
992
+ else parsed = JSON.parse(String(wireValue));
993
+ if (!Array.isArray(parsed)) throw new Error(`Expected array for include alias '${alias}', got ${typeof parsed}`);
994
+ return parsed;
995
+ } catch (error) {
996
+ wrapIncludeAggregateFailure(error, alias, wireValue);
997
+ }
998
+ }
999
+ /**
1000
+ * Decodes a single field. Single-armed: every cell takes the same path —
1001
+ * `codec.decode → await → JSON-Schema validate → return plain value` — so
1002
+ * sync- and async-authored codecs are indistinguishable to callers.
1003
+ *
1004
+ * The row-level `rowCtx` is repackaged into a per-cell
1005
+ * `SqlCodecCallContext` whose `column = { table, name }` is a structural
1006
+ * projection of the per-cell `ColumnRef = { table, column }` resolved from
1007
+ * the AST-backed `DecodeContext` (the same resolution `wrapDecodeFailure`
1008
+ * uses for envelope construction — one resolution per cell, two consumers).
1009
+ * Cells the runtime cannot resolve to a single underlying column (aggregate
1010
+ * aliases, computed projections without a simple ref) get
1011
+ * `column: undefined`, matching the spec contract that the runtime never
1012
+ * silently defaults this field.
1013
+ */
1014
+ async function decodeField(alias, wireValue, decodeCtx, jsonValidators, rowCtx) {
1015
+ if (wireValue === null) return null;
1016
+ const codec$1 = decodeCtx.codecs.get(alias);
1017
+ if (!codec$1) return wireValue;
1018
+ const ref = decodeCtx.columnRefs.get(alias);
1019
+ let cellCtx;
1020
+ if (ref) cellCtx = {
1021
+ ...rowCtx,
1022
+ column: {
1023
+ table: ref.table,
1024
+ name: ref.column
1025
+ }
1026
+ };
1027
+ else {
1028
+ const { column: _drop, ...rowCtxWithoutColumn } = rowCtx;
1029
+ cellCtx = rowCtxWithoutColumn;
1030
+ }
1031
+ let decoded;
1032
+ try {
1033
+ decoded = await codec$1.decode(wireValue, cellCtx);
1034
+ } catch (error) {
1035
+ wrapDecodeFailure(error, alias, ref, codec$1, wireValue);
1036
+ }
1037
+ if (jsonValidators && ref) try {
1038
+ validateJsonValue(jsonValidators, ref.table, ref.column, decoded, "decode", codec$1.id);
1039
+ } catch (error) {
1040
+ if (isJsonSchemaValidationError(error)) throw error;
1041
+ wrapDecodeFailure(error, alias, ref, codec$1, wireValue);
1042
+ }
1043
+ return decoded;
1044
+ }
1045
+ /**
1046
+ * Decodes a row by dispatching all per-cell codec calls concurrently via
1047
+ * `Promise.all`. Each cell follows the single-armed `decodeField` path.
1048
+ * Failures are wrapped in `RUNTIME.DECODE_FAILED` with `{ table, column,
1049
+ * codec }` (or `{ alias, codec }` when no column ref is resolvable) and the
1050
+ * original error attached on `cause`.
1051
+ *
1052
+ * When `rowCtx.signal` is provided:
1053
+ *
1054
+ * - **Already-aborted at entry** short-circuits with `RUNTIME.ABORTED`
1055
+ * (`{ phase: 'decode' }`) before any `codec.decode` call is made.
1056
+ * - **Mid-flight aborts** race the per-cell `Promise.all` against the
1057
+ * signal so the runtime returns promptly even when codec bodies ignore
1058
+ * it. In-flight bodies that ignore the signal complete in the
1059
+ * background (cooperative cancellation).
1060
+ * - Existing `RUNTIME.DECODE_FAILED` envelopes from codec bodies pass
1061
+ * through unchanged (no double wrap).
1062
+ */
1063
+ async function decodeRow(row, plan, registry, jsonValidators, rowCtx, contractCodecs) {
1064
+ checkAborted(rowCtx, "decode");
1065
+ const signal = rowCtx.signal;
1066
+ const decodeCtx = buildDecodeContext(plan, registry, contractCodecs);
1067
+ const aliases = decodeCtx.aliases ?? Object.keys(row);
1068
+ if (decodeCtx.aliases !== void 0) {
1069
+ for (const alias of decodeCtx.aliases) if (!Object.hasOwn(row, alias)) throw runtimeError("RUNTIME.DECODE_FAILED", `Row missing projection alias "${alias}"`, {
1070
+ alias,
1071
+ expectedAliases: decodeCtx.aliases,
1072
+ presentKeys: Object.keys(row)
1073
+ });
1074
+ }
1075
+ const tasks = [];
1076
+ const includeIndices = [];
1077
+ for (let i = 0; i < aliases.length; i++) {
1078
+ const alias = aliases[i];
1079
+ const wireValue = row[alias];
1080
+ if (decodeCtx.includeAliases.has(alias)) {
1081
+ includeIndices.push({
1082
+ index: i,
1083
+ alias,
1084
+ value: wireValue
1085
+ });
1086
+ tasks.push(Promise.resolve(void 0));
1087
+ continue;
1088
+ }
1089
+ tasks.push(decodeField(alias, wireValue, decodeCtx, jsonValidators, rowCtx));
1090
+ }
1091
+ const settled = await raceAgainstAbort(Promise.all(tasks), signal, "decode");
1092
+ for (const entry of includeIndices) settled[entry.index] = decodeIncludeAggregate(entry.alias, entry.value);
1093
+ const decoded = {};
1094
+ for (let i = 0; i < aliases.length; i++) decoded[aliases[i]] = settled[i];
1095
+ return decoded;
1096
+ }
1097
+
1098
+ //#endregion
1099
+ //#region src/codecs/encoding.ts
1100
+ const NO_METADATA = Object.freeze({
1101
+ codecId: void 0,
1102
+ name: void 0
1103
+ });
1104
+ /**
1105
+ * Resolve the codec for an outgoing param.
1106
+ *
1107
+ * Phase B (and AC-5-deferred carve-out): `ParamRef` does not carry a
1108
+ * `(table, column)` ref today — every `ParamRef` carries `codecId` but
1109
+ * not the column it relates to. Encode-side dispatch therefore consults
1110
+ * `contractCodecs.forCodecId(codecId)` (which itself prefers the
1111
+ * contract-walk-derived shared codec, falling back to the legacy
1112
+ * `CodecRegistry.get` for parameterized codec ids whose contracts don't
1113
+ * have a column the walk could resolve through).
1114
+ *
1115
+ * For the parameterized codecs shipped at Phase B (pgvector, postgres
1116
+ * json/jsonb), encode is per-instance-stateless w.r.t. params:
1117
+ * - pgvector formats `[v1,v2,...]` regardless of declared length;
1118
+ * - postgres json/jsonb encode is `JSON.stringify` regardless of schema.
1119
+ *
1120
+ * So the codec-id-keyed lookup yields a structurally equivalent encoder
1121
+ * even when the resolved per-instance codec carries extra state (e.g. a
1122
+ * compiled JSON-Schema validator used only by `decode`). TML-2357 retires
1123
+ * the fallback by threading `ParamRef.refs` through column-bound
1124
+ * construction sites.
1125
+ */
1126
+ function resolveParamCodec(metadata, registry, contractCodecs) {
1127
+ if (!metadata.codecId) return void 0;
1128
+ const fromContract = contractCodecs?.forCodecId(metadata.codecId);
1129
+ if (fromContract) return fromContract;
1130
+ return registry.get(metadata.codecId);
1131
+ }
1132
+ function paramLabel(metadata, paramIndex) {
1133
+ return metadata.name ?? `param[${paramIndex}]`;
1134
+ }
1135
+ function wrapEncodeFailure(error, metadata, paramIndex, codecId) {
1136
+ const label = paramLabel(metadata, paramIndex);
1137
+ const wrapped = runtimeError("RUNTIME.ENCODE_FAILED", `Failed to encode parameter ${label} with codec '${codecId}': ${error instanceof Error ? error.message : String(error)}`, {
1138
+ label,
1139
+ codec: codecId,
1140
+ paramIndex
1141
+ });
1142
+ wrapped.cause = error;
1143
+ throw wrapped;
1144
+ }
1145
+ async function encodeParamValue(value, metadata, paramIndex, registry, ctx, contractCodecs) {
1146
+ if (value === null || value === void 0) return null;
1147
+ const codec$1 = resolveParamCodec(metadata, registry, contractCodecs);
1148
+ if (!codec$1) return value;
1149
+ try {
1150
+ return await codec$1.encode(value, ctx);
1151
+ } catch (error) {
1152
+ wrapEncodeFailure(error, metadata, paramIndex, codec$1.id);
1153
+ }
1154
+ }
1155
+ /**
1156
+ * Encodes all parameters concurrently via `Promise.all`. Per parameter, sync-
1157
+ * and async-authored codecs share the same path: `codec.encode → await →
1158
+ * return`. Param-level failures are wrapped in `RUNTIME.ENCODE_FAILED`.
1159
+ *
1160
+ * When `ctx.signal` is provided:
1161
+ *
1162
+ * - **Already-aborted at entry** short-circuits with `RUNTIME.ABORTED`
1163
+ * (`{ phase: 'encode' }`) before any `codec.encode` call is made — codecs
1164
+ * can pin this with a per-call counter that stays at zero.
1165
+ * - **Mid-flight abort** races the per-param `Promise.all` against
1166
+ * `abortable(ctx.signal)`. The runtime returns `RUNTIME.ABORTED` promptly
1167
+ * even if codec bodies ignore the signal; the in-flight bodies are
1168
+ * abandoned and run to completion in the background (cooperative
1169
+ * cancellation, see ADR 204).
1170
+ * - Existing `RUNTIME.ENCODE_FAILED` envelopes that surface from a codec
1171
+ * body before the runtime observes the abort pass through unchanged
1172
+ * (no double wrap).
1173
+ */
1174
+ async function encodeParams(plan, registry, ctx, contractCodecs) {
1175
+ checkAborted(ctx, "encode");
1176
+ const signal = ctx.signal;
1177
+ if (plan.params.length === 0) return plan.params;
1178
+ const paramCount = plan.params.length;
1179
+ const metadata = new Array(paramCount).fill(NO_METADATA);
1180
+ if (plan.ast) {
1181
+ const refs = collectOrderedParamRefs(plan.ast);
1182
+ for (let i = 0; i < paramCount && i < refs.length; i++) {
1183
+ const ref = refs[i];
1184
+ if (ref) metadata[i] = {
1185
+ codecId: ref.codecId,
1186
+ name: ref.name
1187
+ };
1188
+ }
1189
+ }
1190
+ const tasks = new Array(paramCount);
1191
+ for (let i = 0; i < paramCount; i++) tasks[i] = encodeParamValue(plan.params[i], metadata[i] ?? NO_METADATA, i, registry, ctx, contractCodecs);
1192
+ const settled = await raceAgainstAbort(Promise.all(tasks), signal, "encode");
1193
+ return Object.freeze(settled);
1194
+ }
1195
+
1196
+ //#endregion
1197
+ //#region src/fingerprint.ts
1198
+ const STRING_LITERAL_REGEX = /'(?:''|[^'])*'/g;
1199
+ const NUMERIC_LITERAL_REGEX = /\b\d+(?:\.\d+)?\b/g;
1200
+ const WHITESPACE_REGEX = /\s+/g;
1201
+ /**
1202
+ * Computes a literal-stripped, normalized fingerprint of a SQL statement.
1203
+ *
1204
+ * The function strips string and numeric literals, collapses whitespace, and
1205
+ * lowercases the result before hashing — so two structurally equivalent
1206
+ * statements (with different parameter values) produce the same fingerprint.
1207
+ * Used by SQL telemetry to group queries.
1208
+ */
1209
+ function computeSqlFingerprint(sql) {
1210
+ const normalized = sql.replace(STRING_LITERAL_REGEX, "?").replace(NUMERIC_LITERAL_REGEX, "?").replace(WHITESPACE_REGEX, " ").trim().toLowerCase();
1211
+ return `sha256:${createHash("sha256").update(normalized).digest("hex")}`;
1212
+ }
1213
+
1214
+ //#endregion
1215
+ //#region src/middleware/before-compile-chain.ts
1216
+ async function runBeforeCompileChain(middleware, initial, ctx) {
1217
+ let current = initial;
1218
+ for (const mw of middleware) {
1219
+ if (!mw.beforeCompile) continue;
1220
+ const result = await mw.beforeCompile(current, ctx);
1221
+ if (result === void 0) continue;
1222
+ if (result.ast === current.ast) continue;
1223
+ ctx.log.debug?.({
1224
+ event: "middleware.rewrite",
1225
+ middleware: mw.name,
1226
+ lane: current.meta.lane
1227
+ });
1228
+ current = result;
1229
+ }
1230
+ return current;
1231
+ }
1232
+
1233
+ //#endregion
1234
+ //#region src/sql-family-adapter.ts
1235
+ var SqlFamilyAdapter = class {
1236
+ contract;
1237
+ markerReader;
1238
+ constructor(contract, adapterProfile) {
1239
+ this.contract = contract;
1240
+ this.markerReader = adapterProfile;
1241
+ }
1242
+ validatePlan(plan, contract) {
1243
+ if (plan.meta.target !== contract.target) throw runtimeError("PLAN.TARGET_MISMATCH", "Plan target does not match runtime target", {
1244
+ planTarget: plan.meta.target,
1245
+ runtimeTarget: contract.target
1246
+ });
1247
+ if (plan.meta.storageHash !== contract.storage.storageHash) throw runtimeError("PLAN.HASH_MISMATCH", "Plan storage hash does not match runtime contract", {
1248
+ planStorageHash: plan.meta.storageHash,
1249
+ runtimeStorageHash: contract.storage.storageHash
1250
+ });
1251
+ }
1252
+ };
1253
+
1254
+ //#endregion
1255
+ //#region src/sql-runtime.ts
1256
+ function isExecutionPlan(plan) {
1257
+ return "sql" in plan;
1258
+ }
1259
+ var SqlRuntimeImpl = class extends RuntimeCore {
1260
+ contract;
1261
+ adapter;
1262
+ driver;
1263
+ familyAdapter;
1264
+ codecRegistry;
1265
+ contractCodecs;
1266
+ codecDescriptors;
1267
+ jsonSchemaValidators;
1268
+ sqlCtx;
1269
+ verify;
1270
+ codecRegistryValidated;
1271
+ verified;
1272
+ startupVerified;
1273
+ _telemetry;
1274
+ constructor(options) {
1275
+ const { context, adapter, driver, verify, middleware, mode, log } = options;
1276
+ if (middleware) for (const mw of middleware) checkMiddlewareCompatibility(mw, "sql", context.contract.target);
1277
+ const sqlCtx = {
1278
+ contract: context.contract,
1279
+ mode: mode ?? "strict",
1280
+ now: () => Date.now(),
1281
+ log: log ?? {
1282
+ info: () => {},
1283
+ warn: () => {},
1284
+ error: () => {}
1285
+ }
1286
+ };
1287
+ super({
1288
+ middleware: middleware ?? [],
1289
+ ctx: sqlCtx
1290
+ });
1291
+ this.contract = context.contract;
1292
+ this.adapter = adapter;
1293
+ this.driver = driver;
1294
+ this.familyAdapter = new SqlFamilyAdapter(context.contract, adapter.profile);
1295
+ this.codecRegistry = context.codecs;
1296
+ this.contractCodecs = context.contractCodecs;
1297
+ this.codecDescriptors = context.codecDescriptors;
1298
+ this.jsonSchemaValidators = context.jsonSchemaValidators;
1299
+ this.sqlCtx = sqlCtx;
1300
+ this.verify = verify;
1301
+ this.codecRegistryValidated = false;
1302
+ this.verified = verify.mode === "startup" ? false : verify.mode === "always";
1303
+ this.startupVerified = false;
1304
+ this._telemetry = null;
1305
+ if (verify.mode === "startup") {
1306
+ validateCodecRegistryCompleteness(this.codecDescriptors, context.contract);
1307
+ this.codecRegistryValidated = true;
1308
+ }
1309
+ }
1310
+ /**
1311
+ * Lower a `SqlQueryPlan` (AST + meta) into a `SqlExecutionPlan` with
1312
+ * encoded parameters ready for the driver. This is the single point at
1313
+ * which params transition from app-layer values to driver wire-format.
1314
+ *
1315
+ * `ctx: SqlCodecCallContext` is forwarded to `encodeParams` so per-query
1316
+ * cancellation reaches every codec body during parameter encoding. The
1317
+ * framework abstract typed this as `CodecCallContext`; the SQL family
1318
+ * narrows it to the SQL-specific extension. SQL params do not populate
1319
+ * `ctx.column` — encode-side column metadata is the middleware's domain.
1320
+ */
1321
+ async lower(plan, ctx) {
1322
+ const lowered = lowerSqlPlan(this.adapter, this.contract, plan);
1323
+ return Object.freeze({
1324
+ ...lowered,
1325
+ params: await encodeParams(lowered, this.codecRegistry, ctx, this.contractCodecs)
1326
+ });
1327
+ }
1328
+ /**
1329
+ * Default driver invocation. Production execution paths override the
1330
+ * queryable target (e.g. transaction or connection) by going through
1331
+ * `executeAgainstQueryable`; this implementation supports any caller of
1332
+ * `super.execute(plan)` and the abstract-base contract.
1333
+ */
1334
+ runDriver(exec) {
1335
+ return this.driver.execute({
1336
+ sql: exec.sql,
1337
+ params: exec.params
1338
+ });
1339
+ }
1340
+ /**
1341
+ * SQL pre-compile hook. Runs the registered middleware `beforeCompile`
1342
+ * chain over the plan's draft (AST + meta). Returns the original plan
1343
+ * unchanged when no middleware rewrote the AST; otherwise returns a new
1344
+ * plan carrying the rewritten AST and meta. The AST is the authoritative
1345
+ * source of execution metadata, so a rewrite needs no sidecar
1346
+ * reconciliation here — the lowering adapter and the encoder both walk
1347
+ * the rewritten AST directly.
1348
+ */
1349
+ async runBeforeCompile(plan) {
1350
+ const rewrittenDraft = await runBeforeCompileChain(this.middleware, {
1351
+ ast: plan.ast,
1352
+ meta: plan.meta
1353
+ }, this.sqlCtx);
1354
+ return rewrittenDraft.ast === plan.ast ? plan : {
1355
+ ...plan,
1356
+ ast: rewrittenDraft.ast,
1357
+ meta: rewrittenDraft.meta
1358
+ };
1359
+ }
1360
+ execute(plan, options) {
1361
+ return this.executeAgainstQueryable(plan, this.driver, options);
1362
+ }
1363
+ executeAgainstQueryable(plan, queryable, options) {
1364
+ this.ensureCodecRegistryValidated();
1365
+ const self = this;
1366
+ const signal = options?.signal;
1367
+ const codecCtx = signal === void 0 ? {} : { signal };
1368
+ const generator = async function* () {
1369
+ checkAborted(codecCtx, "stream");
1370
+ const exec = isExecutionPlan(plan) ? Object.freeze({
1371
+ ...plan,
1372
+ params: await encodeParams(plan, self.codecRegistry, codecCtx, self.contractCodecs)
1373
+ }) : await self.lower(await self.runBeforeCompile(plan), codecCtx);
1374
+ self.familyAdapter.validatePlan(exec, self.contract);
1375
+ self._telemetry = null;
1376
+ if (!self.startupVerified && self.verify.mode === "startup") await self.verifyMarker();
1377
+ if (!self.verified && self.verify.mode === "onFirstUse") await self.verifyMarker();
1378
+ const startedAt = Date.now();
1379
+ let outcome = null;
1380
+ try {
1381
+ if (self.verify.mode === "always") await self.verifyMarker();
1382
+ const iterator = runWithMiddleware(exec, self.middleware, self.ctx, () => queryable.execute({
1383
+ sql: exec.sql,
1384
+ params: exec.params
1385
+ }))[Symbol.asyncIterator]();
1386
+ try {
1387
+ while (true) {
1388
+ checkAborted(codecCtx, "stream");
1389
+ const next = await iterator.next();
1390
+ if (next.done) break;
1391
+ yield await decodeRow(next.value, exec, self.codecRegistry, self.jsonSchemaValidators, codecCtx, self.contractCodecs);
1392
+ }
1393
+ } finally {
1394
+ await iterator.return?.();
1395
+ }
1396
+ outcome = "success";
1397
+ } catch (error) {
1398
+ outcome = "runtime-error";
1399
+ throw error;
1400
+ } finally {
1401
+ if (outcome !== null) self.recordTelemetry(exec, outcome, Date.now() - startedAt);
1402
+ }
1403
+ };
1404
+ return new AsyncIterableResult(generator());
1405
+ }
1406
+ async connection() {
1407
+ const driverConn = await this.driver.acquireConnection();
1408
+ const self = this;
1409
+ return {
1410
+ async transaction() {
1411
+ const driverTx = await driverConn.beginTransaction();
1412
+ return self.wrapTransaction(driverTx);
1413
+ },
1414
+ async release() {
1415
+ await driverConn.release();
1416
+ },
1417
+ async destroy(reason) {
1418
+ await driverConn.destroy(reason);
1419
+ },
1420
+ execute(plan, options) {
1421
+ return self.executeAgainstQueryable(plan, driverConn, options);
1422
+ }
1423
+ };
1424
+ }
1425
+ wrapTransaction(driverTx) {
1426
+ const self = this;
1427
+ return {
1428
+ async commit() {
1429
+ await driverTx.commit();
1430
+ },
1431
+ async rollback() {
1432
+ await driverTx.rollback();
1433
+ },
1434
+ execute(plan, options) {
1435
+ return self.executeAgainstQueryable(plan, driverTx, options);
1436
+ }
1437
+ };
1438
+ }
1439
+ telemetry() {
1440
+ return this._telemetry;
1441
+ }
1442
+ async close() {
1443
+ await this.driver.close();
1444
+ }
1445
+ ensureCodecRegistryValidated() {
1446
+ if (!this.codecRegistryValidated) {
1447
+ validateCodecRegistryCompleteness(this.codecDescriptors, this.contract);
1448
+ this.codecRegistryValidated = true;
1449
+ }
1450
+ }
1451
+ async verifyMarker() {
1452
+ if (this.verify.mode === "always") this.verified = false;
1453
+ if (this.verified) return;
1454
+ const readStatement = this.familyAdapter.markerReader.readMarkerStatement();
1455
+ const result = await this.driver.query(readStatement.sql, readStatement.params);
1456
+ if (result.rows.length === 0) {
1457
+ if (this.verify.requireMarker) throw runtimeError("CONTRACT.MARKER_MISSING", "Contract marker not found in database");
1458
+ this.verified = true;
1459
+ return;
1460
+ }
1461
+ const marker = this.familyAdapter.markerReader.parseMarkerRow(result.rows[0]);
1462
+ const contract = this.contract;
1463
+ if (marker.storageHash !== contract.storage.storageHash) throw runtimeError("CONTRACT.MARKER_MISMATCH", "Database storage hash does not match contract", {
1464
+ expected: contract.storage.storageHash,
1465
+ actual: marker.storageHash
1466
+ });
1467
+ const expectedProfile = contract.profileHash ?? null;
1468
+ if (expectedProfile !== null && marker.profileHash !== expectedProfile) throw runtimeError("CONTRACT.MARKER_MISMATCH", "Database profile hash does not match contract", {
1469
+ expectedProfile,
1470
+ actualProfile: marker.profileHash
1471
+ });
1472
+ this.verified = true;
1473
+ this.startupVerified = true;
1474
+ }
1475
+ recordTelemetry(plan, outcome, durationMs) {
1476
+ const contract = this.contract;
1477
+ this._telemetry = Object.freeze({
1478
+ lane: plan.meta.lane,
1479
+ target: contract.target,
1480
+ fingerprint: computeSqlFingerprint(plan.sql),
1481
+ outcome,
1482
+ ...durationMs !== void 0 ? { durationMs } : {}
1483
+ });
1484
+ }
1485
+ };
1486
+ function transactionClosedError() {
1487
+ return runtimeError("RUNTIME.TRANSACTION_CLOSED", "Cannot read from a query result after the transaction has ended. Await the result or call .toArray() inside the transaction callback.", {});
1488
+ }
1489
+ async function withTransaction(runtime, fn) {
1490
+ const connection = await runtime.connection();
1491
+ const transaction = await connection.transaction();
1492
+ let invalidated = false;
1493
+ const txContext = {
1494
+ get invalidated() {
1495
+ return invalidated;
1496
+ },
1497
+ execute(plan, options) {
1498
+ if (invalidated) throw transactionClosedError();
1499
+ const inner = transaction.execute(plan, options);
1500
+ const guarded = async function* () {
1501
+ for await (const row of inner) {
1502
+ if (invalidated) throw transactionClosedError();
1503
+ yield row;
1504
+ }
1505
+ };
1506
+ return new AsyncIterableResult(guarded());
1507
+ }
1508
+ };
1509
+ let connectionDisposed = false;
1510
+ const destroyConnection = async (reason) => {
1511
+ if (connectionDisposed) return;
1512
+ connectionDisposed = true;
1513
+ await connection.destroy(reason).catch(() => void 0);
1514
+ };
1515
+ try {
1516
+ let result;
1517
+ try {
1518
+ result = await fn(txContext);
1519
+ } catch (error) {
1520
+ try {
1521
+ await transaction.rollback();
1522
+ } catch (rollbackError) {
1523
+ await destroyConnection(rollbackError);
1524
+ const wrapped = runtimeError("RUNTIME.TRANSACTION_ROLLBACK_FAILED", "Transaction rollback failed after callback error", { rollbackError });
1525
+ wrapped.cause = error;
1526
+ throw wrapped;
1527
+ }
1528
+ throw error;
1529
+ } finally {
1530
+ invalidated = true;
1531
+ }
1532
+ try {
1533
+ await transaction.commit();
1534
+ } catch (commitError) {
1535
+ try {
1536
+ await transaction.rollback();
1537
+ } catch {
1538
+ await destroyConnection(commitError);
1539
+ }
1540
+ const wrapped = runtimeError("RUNTIME.TRANSACTION_COMMIT_FAILED", "Transaction commit failed", { commitError });
1541
+ wrapped.cause = commitError;
1542
+ throw wrapped;
1543
+ }
1544
+ return result;
1545
+ } finally {
1546
+ if (!connectionDisposed) await connection.release();
1547
+ }
1548
+ }
1549
+ function createRuntime(options) {
1550
+ const { stackInstance, context, driver, verify, middleware, mode, log } = options;
1551
+ return new SqlRuntimeImpl({
1552
+ context,
1553
+ adapter: stackInstance.adapter,
1554
+ driver,
1555
+ verify,
1556
+ ...ifDefined("middleware", middleware),
1557
+ ...ifDefined("mode", mode),
1558
+ ...ifDefined("log", log)
1559
+ });
1560
+ }
1561
+
1562
+ //#endregion
1563
+ export { readContractMarker as a, createSqlExecutionStack as c, parseContractMarkerRow as d, lowerSqlPlan as f, validateContractCodecMappings as h, ensureTableStatement as i, lints as l, validateCodecRegistryCompleteness as m, withTransaction as n, writeContractMarker as o, extractCodecIds as p, ensureSchemaStatement as r, createExecutionContext as s, createRuntime as t, budgets as u };
1564
+ //# sourceMappingURL=exports-CrHMfIKo.mjs.map