@prisma-next/sql-runtime 0.5.0-dev.4 → 0.5.0-dev.40

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/README.md +29 -21
  2. package/dist/exports-CrHMfIKo.mjs +1564 -0
  3. package/dist/exports-CrHMfIKo.mjs.map +1 -0
  4. package/dist/{index-yb51L_1h.d.mts → index-_dXSGeho.d.mts} +78 -25
  5. package/dist/index-_dXSGeho.d.mts.map +1 -0
  6. package/dist/index.d.mts +2 -2
  7. package/dist/index.mjs +2 -2
  8. package/dist/test/utils.d.mts +6 -5
  9. package/dist/test/utils.d.mts.map +1 -1
  10. package/dist/test/utils.mjs +11 -5
  11. package/dist/test/utils.mjs.map +1 -1
  12. package/package.json +10 -12
  13. package/src/codecs/decoding.ts +294 -173
  14. package/src/codecs/encoding.ts +162 -37
  15. package/src/codecs/validation.ts +22 -3
  16. package/src/exports/index.ts +11 -7
  17. package/src/fingerprint.ts +22 -0
  18. package/src/guardrails/raw.ts +165 -0
  19. package/src/lower-sql-plan.ts +3 -3
  20. package/src/marker.ts +75 -0
  21. package/src/middleware/before-compile-chain.ts +1 -0
  22. package/src/middleware/budgets.ts +26 -96
  23. package/src/middleware/lints.ts +3 -3
  24. package/src/middleware/sql-middleware.ts +6 -5
  25. package/src/runtime-spi.ts +44 -0
  26. package/src/sql-context.ts +332 -78
  27. package/src/sql-family-adapter.ts +3 -2
  28. package/src/sql-marker.ts +62 -47
  29. package/src/sql-runtime.ts +332 -113
  30. package/dist/exports-BQZSVXXt.mjs +0 -981
  31. package/dist/exports-BQZSVXXt.mjs.map +0 -1
  32. package/dist/index-yb51L_1h.d.mts.map +0 -1
  33. package/test/async-iterable-result.test.ts +0 -141
  34. package/test/before-compile-chain.test.ts +0 -223
  35. package/test/budgets.test.ts +0 -431
  36. package/test/context.types.test-d.ts +0 -68
  37. package/test/execution-stack.test.ts +0 -161
  38. package/test/json-schema-validation.test.ts +0 -571
  39. package/test/lints.test.ts +0 -160
  40. package/test/mutation-default-generators.test.ts +0 -254
  41. package/test/parameterized-types.test.ts +0 -529
  42. package/test/sql-context.test.ts +0 -384
  43. package/test/sql-family-adapter.test.ts +0 -103
  44. package/test/sql-runtime.test.ts +0 -792
  45. package/test/utils.ts +0 -297
@@ -1,4 +1,4 @@
1
- import { c as createSqlExecutionStack, i as ensureTableStatement, o as writeContractMarker, r as ensureSchemaStatement, s as createExecutionContext } from "../exports-BQZSVXXt.mjs";
1
+ import { c as createSqlExecutionStack, d as parseContractMarkerRow, i as ensureTableStatement, o as writeContractMarker, r as ensureSchemaStatement, s as createExecutionContext } from "../exports-CrHMfIKo.mjs";
2
2
  import { codec, createCodecRegistry } from "@prisma-next/sql-relational-core/ast";
3
3
  import { instantiateExecutionStack } from "@prisma-next/framework-components/execution";
4
4
  import { coreHash, profileHash } from "@prisma-next/contract/types";
@@ -163,8 +163,13 @@ function createStubAdapter() {
163
163
  codecRegistry.register(codec({
164
164
  typeId: "pg/timestamptz@1",
165
165
  targetTypes: ["timestamptz"],
166
- encode: (value) => value instanceof Date ? value.toISOString() : value,
167
- decode: (wire) => wire instanceof Date ? wire : new Date(wire)
166
+ encode: (value) => value,
167
+ decode: (wire) => wire,
168
+ encodeJson: (value) => value.toISOString(),
169
+ decodeJson: (json) => {
170
+ if (typeof json !== "string") throw new Error("expected ISO date string");
171
+ return new Date(json);
172
+ }
168
173
  }));
169
174
  return {
170
175
  profile: {
@@ -176,10 +181,11 @@ function createStubAdapter() {
176
181
  },
177
182
  readMarkerStatement() {
178
183
  return {
179
- sql: "select core_hash, profile_hash, contract_json, canonical_version, updated_at, app_tag, meta from prisma_contract.marker where id = $1",
184
+ sql: "select core_hash, profile_hash, contract_json, canonical_version, updated_at, app_tag, meta, invariants from prisma_contract.marker where id = $1",
180
185
  params: [1]
181
186
  };
182
- }
187
+ },
188
+ parseMarkerRow: parseContractMarkerRow
183
189
  },
184
190
  lower(ast, ctx) {
185
191
  const sqlText = JSON.stringify(ast);
@@ -1 +1 @@
1
- {"version":3,"file":"utils.mjs","names":["collectAsync"],"sources":["../../test/utils.ts"],"sourcesContent":["import type { Contract, ExecutionPlan, ResultType } from '@prisma-next/contract/types';\nimport { coreHash, profileHash } from '@prisma-next/contract/types';\nimport {\n instantiateExecutionStack,\n type RuntimeDriverDescriptor,\n} from '@prisma-next/framework-components/execution';\nimport { builtinGeneratorIds } from '@prisma-next/ids';\nimport { generateId } from '@prisma-next/ids/runtime';\nimport type { SqlStorage } from '@prisma-next/sql-contract/types';\nimport type { Adapter, LoweredStatement, SelectAst } from '@prisma-next/sql-relational-core/ast';\nimport { codec, createCodecRegistry } from '@prisma-next/sql-relational-core/ast';\nimport type { SqlQueryPlan } from '@prisma-next/sql-relational-core/plan';\nimport { collectAsync, drainAsyncIterable } from '@prisma-next/test-utils';\nimport type { Client } from 'pg';\nimport type { SqlStatement } from '../src/exports';\nimport {\n createExecutionContext,\n type createRuntime,\n createSqlExecutionStack,\n ensureSchemaStatement,\n ensureTableStatement,\n writeContractMarker,\n} from '../src/exports';\nimport type {\n ExecutionContext,\n SqlRuntimeAdapterDescriptor,\n SqlRuntimeAdapterInstance,\n SqlRuntimeDriverInstance,\n SqlRuntimeExtensionDescriptor,\n SqlRuntimeTargetDescriptor,\n} from '../src/sql-context';\n\nfunction createTestMutationDefaultGenerators() {\n return builtinGeneratorIds.map((id) => ({\n id,\n generate: (params?: Record<string, unknown>) => generateId(params ? { id, params } : { id }),\n }));\n}\n\n/**\n * Executes a plan and collects all results into an array.\n * This helper DRYs up the common pattern of executing plans in tests.\n * The return type is inferred from the plan's type parameter.\n */\nexport async function executePlanAndCollect<\n P extends ExecutionPlan<ResultType<P>> | SqlQueryPlan<ResultType<P>>,\n>(runtime: ReturnType<typeof createRuntime>, plan: P): Promise<ResultType<P>[]> {\n type Row = ResultType<P>;\n return collectAsync<Row>(runtime.execute<Row>(plan));\n}\n\n/**\n * Drains a plan execution, consuming all results without collecting them.\n * Useful for testing side effects without memory overhead.\n */\nexport async function drainPlanExecution(\n runtime: ReturnType<typeof createRuntime>,\n plan: ExecutionPlan | SqlQueryPlan<unknown>,\n): Promise<void> {\n return drainAsyncIterable(runtime.execute(plan));\n}\n\n/**\n * Executes a SQL statement on a database client.\n */\nexport async function executeStatement(client: Client, statement: SqlStatement): Promise<void> {\n if (statement.params.length > 0) {\n await client.query(statement.sql, [...statement.params]);\n return;\n }\n\n await client.query(statement.sql);\n}\n\n/**\n * Sets up database schema and data, then writes the contract marker.\n * This helper DRYs up the common pattern of database setup in tests.\n */\nexport async function setupTestDatabase(\n client: Client,\n contract: Contract<SqlStorage>,\n setupFn: (client: Client) => Promise<void>,\n): Promise<void> {\n await client.query('drop schema if exists prisma_contract cascade');\n await client.query('create schema if not exists public');\n\n await setupFn(client);\n\n await executeStatement(client, ensureSchemaStatement);\n await executeStatement(client, ensureTableStatement);\n const write = writeContractMarker({\n storageHash: contract.storage.storageHash,\n profileHash: contract.profileHash,\n contractJson: contract,\n canonicalVersion: 1,\n });\n await executeStatement(client, write.insert);\n}\n\n/**\n * Writes a contract marker to the database.\n * This helper DRYs up the common pattern of writing contract markers in tests.\n */\nexport async function writeTestContractMarker(\n client: Client,\n contract: Contract<SqlStorage>,\n): Promise<void> {\n const write = writeContractMarker({\n storageHash: contract.storage.storageHash,\n profileHash: contract.profileHash,\n contractJson: contract,\n canonicalVersion: 1,\n });\n await executeStatement(client, write.insert);\n}\n\n/**\n * Creates a test adapter descriptor from a raw adapter.\n * Wraps the adapter in an SqlRuntimeAdapterDescriptor with static contributions\n * derived from the adapter's codec registry.\n */\nexport function createTestAdapterDescriptor(\n adapter: Adapter<SelectAst, Contract<SqlStorage>, LoweredStatement>,\n): SqlRuntimeAdapterDescriptor<'postgres'> {\n const codecRegistry = adapter.profile.codecs();\n return {\n kind: 'adapter' as const,\n id: 'test-adapter',\n version: '0.0.1',\n familyId: 'sql' as const,\n targetId: 'postgres' as const,\n codecs: () => codecRegistry,\n parameterizedCodecs: () => [],\n mutationDefaultGenerators: createTestMutationDefaultGenerators,\n create(_stack): SqlRuntimeAdapterInstance<'postgres'> {\n return Object.assign({ familyId: 'sql' as const, targetId: 'postgres' as const }, adapter);\n },\n };\n}\n\n/**\n * Creates a test target descriptor with empty static contributions.\n */\nexport function createTestTargetDescriptor(): SqlRuntimeTargetDescriptor<'postgres'> {\n return {\n kind: 'target' as const,\n id: 'postgres',\n version: '0.0.1',\n familyId: 'sql' as const,\n targetId: 'postgres' as const,\n codecs: () => createCodecRegistry(),\n parameterizedCodecs: () => [],\n create() {\n return { familyId: 'sql' as const, targetId: 'postgres' as const };\n },\n };\n}\n\n/**\n * Creates an ExecutionContext for testing.\n * This helper DRYs up the common pattern of context creation in tests.\n *\n * Accepts a raw adapter and optional extension descriptors, wrapping the\n * adapter in a descriptor internally for descriptor-first context creation.\n */\nexport function createTestContext<TContract extends Contract<SqlStorage>>(\n contract: TContract,\n adapter: Adapter<SelectAst, Contract<SqlStorage>, LoweredStatement>,\n options?: {\n extensionPacks?: ReadonlyArray<SqlRuntimeExtensionDescriptor<'postgres'>>;\n },\n): ExecutionContext<TContract> {\n return createExecutionContext({\n contract,\n stack: {\n target: createTestTargetDescriptor(),\n adapter: createTestAdapterDescriptor(adapter),\n extensionPacks: options?.extensionPacks ?? [],\n },\n });\n}\n\nexport function createTestStackInstance(options?: {\n extensionPacks?: ReadonlyArray<SqlRuntimeExtensionDescriptor<'postgres'>>;\n driver?: RuntimeDriverDescriptor<\n 'sql',\n 'postgres',\n unknown,\n SqlRuntimeDriverInstance<'postgres'>\n >;\n}) {\n const stack = createSqlExecutionStack({\n target: createTestTargetDescriptor(),\n adapter: createTestAdapterDescriptor(createStubAdapter()),\n driver: options?.driver,\n extensionPacks: options?.extensionPacks ?? [],\n });\n\n return instantiateExecutionStack(stack);\n}\n\n/**\n * Creates a stub adapter for testing.\n * This helper DRYs up the common pattern of adapter creation in tests.\n *\n * The stub adapter includes simple codecs for common test types (pg/int4@1, pg/text@1, pg/timestamptz@1)\n * to enable type inference in tests without requiring the postgres adapter package.\n */\nexport function createStubAdapter(): Adapter<SelectAst, Contract<SqlStorage>, LoweredStatement> {\n const codecRegistry = createCodecRegistry();\n\n // Register stub codecs for common test types\n // These match the codec IDs used in test contracts (pg/int4@1, pg/text@1, pg/timestamptz@1)\n // but don't require importing from the postgres adapter package\n codecRegistry.register(\n codec({\n typeId: 'pg/int4@1',\n targetTypes: ['int4'],\n encode: (value: number) => value,\n decode: (wire: number) => wire,\n }),\n );\n\n codecRegistry.register(\n codec({\n typeId: 'pg/text@1',\n targetTypes: ['text'],\n encode: (value: string) => value,\n decode: (wire: string) => wire,\n }),\n );\n\n codecRegistry.register(\n codec({\n typeId: 'pg/timestamptz@1',\n targetTypes: ['timestamptz'],\n encode: (value: string | Date) => (value instanceof Date ? value.toISOString() : value),\n decode: (wire: string | Date) => (wire instanceof Date ? wire : new Date(wire)),\n }),\n );\n\n return {\n profile: {\n id: 'stub-profile',\n target: 'postgres',\n capabilities: {},\n codecs() {\n return codecRegistry;\n },\n readMarkerStatement() {\n return {\n sql: 'select core_hash, profile_hash, contract_json, canonical_version, updated_at, app_tag, meta from prisma_contract.marker where id = $1',\n params: [1],\n };\n },\n },\n lower(ast: SelectAst, ctx: { contract: Contract<SqlStorage>; params?: readonly unknown[] }) {\n const sqlText = JSON.stringify(ast);\n return Object.freeze({ sql: sqlText, params: ctx.params ? [...ctx.params] : [] });\n },\n };\n}\n\nexport function createTestContract(\n contract: Partial<Omit<Contract<SqlStorage>, 'profileHash' | 'storage'>> & {\n storageHash?: string;\n profileHash?: string;\n storage?: Omit<SqlStorage, 'storageHash'>;\n },\n): Contract<SqlStorage> {\n const { execution, ...rest } = contract;\n const storageHashValue = coreHash(rest['storageHash'] ?? 'sha256:testcore');\n\n return {\n target: rest['target'] ?? 'postgres',\n targetFamily: rest['targetFamily'] ?? 'sql',\n storage: rest['storage']\n ? { ...rest['storage'], storageHash: storageHashValue }\n : { storageHash: storageHashValue, tables: {} },\n models: rest['models'] ?? {},\n roots: rest['roots'] ?? {},\n capabilities: rest['capabilities'] ?? {},\n extensionPacks: rest['extensionPacks'] ?? {},\n meta: rest['meta'] ?? {},\n ...(execution ? { execution } : {}),\n profileHash: profileHash(rest['profileHash'] ?? 'sha256:testprofile'),\n };\n}\n\n// Re-export generic utilities from test-utils\nexport {\n collectAsync,\n createDevDatabase,\n type DevDatabase,\n teardownTestDatabase,\n withClient,\n} from '@prisma-next/test-utils';\n"],"mappings":";;;;;;;;;AAgCA,SAAS,sCAAsC;AAC7C,QAAO,oBAAoB,KAAK,QAAQ;EACtC;EACA,WAAW,WAAqC,WAAW,SAAS;GAAE;GAAI;GAAQ,GAAG,EAAE,IAAI,CAAC;EAC7F,EAAE;;;;;;;AAQL,eAAsB,sBAEpB,SAA2C,MAAmC;AAE9E,QAAOA,eAAkB,QAAQ,QAAa,KAAK,CAAC;;;;;;AAOtD,eAAsB,mBACpB,SACA,MACe;AACf,QAAO,mBAAmB,QAAQ,QAAQ,KAAK,CAAC;;;;;AAMlD,eAAsB,iBAAiB,QAAgB,WAAwC;AAC7F,KAAI,UAAU,OAAO,SAAS,GAAG;AAC/B,QAAM,OAAO,MAAM,UAAU,KAAK,CAAC,GAAG,UAAU,OAAO,CAAC;AACxD;;AAGF,OAAM,OAAO,MAAM,UAAU,IAAI;;;;;;AAOnC,eAAsB,kBACpB,QACA,UACA,SACe;AACf,OAAM,OAAO,MAAM,gDAAgD;AACnE,OAAM,OAAO,MAAM,qCAAqC;AAExD,OAAM,QAAQ,OAAO;AAErB,OAAM,iBAAiB,QAAQ,sBAAsB;AACrD,OAAM,iBAAiB,QAAQ,qBAAqB;AAOpD,OAAM,iBAAiB,QANT,oBAAoB;EAChC,aAAa,SAAS,QAAQ;EAC9B,aAAa,SAAS;EACtB,cAAc;EACd,kBAAkB;EACnB,CAAC,CACmC,OAAO;;;;;;AAO9C,eAAsB,wBACpB,QACA,UACe;AAOf,OAAM,iBAAiB,QANT,oBAAoB;EAChC,aAAa,SAAS,QAAQ;EAC9B,aAAa,SAAS;EACtB,cAAc;EACd,kBAAkB;EACnB,CAAC,CACmC,OAAO;;;;;;;AAQ9C,SAAgB,4BACd,SACyC;CACzC,MAAM,gBAAgB,QAAQ,QAAQ,QAAQ;AAC9C,QAAO;EACL,MAAM;EACN,IAAI;EACJ,SAAS;EACT,UAAU;EACV,UAAU;EACV,cAAc;EACd,2BAA2B,EAAE;EAC7B,2BAA2B;EAC3B,OAAO,QAA+C;AACpD,UAAO,OAAO,OAAO;IAAE,UAAU;IAAgB,UAAU;IAAqB,EAAE,QAAQ;;EAE7F;;;;;AAMH,SAAgB,6BAAqE;AACnF,QAAO;EACL,MAAM;EACN,IAAI;EACJ,SAAS;EACT,UAAU;EACV,UAAU;EACV,cAAc,qBAAqB;EACnC,2BAA2B,EAAE;EAC7B,SAAS;AACP,UAAO;IAAE,UAAU;IAAgB,UAAU;IAAqB;;EAErE;;;;;;;;;AAUH,SAAgB,kBACd,UACA,SACA,SAG6B;AAC7B,QAAO,uBAAuB;EAC5B;EACA,OAAO;GACL,QAAQ,4BAA4B;GACpC,SAAS,4BAA4B,QAAQ;GAC7C,gBAAgB,SAAS,kBAAkB,EAAE;GAC9C;EACF,CAAC;;AAGJ,SAAgB,wBAAwB,SAQrC;AAQD,QAAO,0BAPO,wBAAwB;EACpC,QAAQ,4BAA4B;EACpC,SAAS,4BAA4B,mBAAmB,CAAC;EACzD,QAAQ,SAAS;EACjB,gBAAgB,SAAS,kBAAkB,EAAE;EAC9C,CAAC,CAEqC;;;;;;;;;AAUzC,SAAgB,oBAAgF;CAC9F,MAAM,gBAAgB,qBAAqB;AAK3C,eAAc,SACZ,MAAM;EACJ,QAAQ;EACR,aAAa,CAAC,OAAO;EACrB,SAAS,UAAkB;EAC3B,SAAS,SAAiB;EAC3B,CAAC,CACH;AAED,eAAc,SACZ,MAAM;EACJ,QAAQ;EACR,aAAa,CAAC,OAAO;EACrB,SAAS,UAAkB;EAC3B,SAAS,SAAiB;EAC3B,CAAC,CACH;AAED,eAAc,SACZ,MAAM;EACJ,QAAQ;EACR,aAAa,CAAC,cAAc;EAC5B,SAAS,UAA0B,iBAAiB,OAAO,MAAM,aAAa,GAAG;EACjF,SAAS,SAAyB,gBAAgB,OAAO,OAAO,IAAI,KAAK,KAAK;EAC/E,CAAC,CACH;AAED,QAAO;EACL,SAAS;GACP,IAAI;GACJ,QAAQ;GACR,cAAc,EAAE;GAChB,SAAS;AACP,WAAO;;GAET,sBAAsB;AACpB,WAAO;KACL,KAAK;KACL,QAAQ,CAAC,EAAE;KACZ;;GAEJ;EACD,MAAM,KAAgB,KAAsE;GAC1F,MAAM,UAAU,KAAK,UAAU,IAAI;AACnC,UAAO,OAAO,OAAO;IAAE,KAAK;IAAS,QAAQ,IAAI,SAAS,CAAC,GAAG,IAAI,OAAO,GAAG,EAAE;IAAE,CAAC;;EAEpF;;AAGH,SAAgB,mBACd,UAKsB;CACtB,MAAM,EAAE,WAAW,GAAG,SAAS;CAC/B,MAAM,mBAAmB,SAAS,KAAK,kBAAkB,kBAAkB;AAE3E,QAAO;EACL,QAAQ,KAAK,aAAa;EAC1B,cAAc,KAAK,mBAAmB;EACtC,SAAS,KAAK,aACV;GAAE,GAAG,KAAK;GAAY,aAAa;GAAkB,GACrD;GAAE,aAAa;GAAkB,QAAQ,EAAE;GAAE;EACjD,QAAQ,KAAK,aAAa,EAAE;EAC5B,OAAO,KAAK,YAAY,EAAE;EAC1B,cAAc,KAAK,mBAAmB,EAAE;EACxC,gBAAgB,KAAK,qBAAqB,EAAE;EAC5C,MAAM,KAAK,WAAW,EAAE;EACxB,GAAI,YAAY,EAAE,WAAW,GAAG,EAAE;EAClC,aAAa,YAAY,KAAK,kBAAkB,qBAAqB;EACtE"}
1
+ {"version":3,"file":"utils.mjs","names":["collectAsync"],"sources":["../../test/utils.ts"],"sourcesContent":["import type { Contract } from '@prisma-next/contract/types';\nimport { coreHash, profileHash } from '@prisma-next/contract/types';\nimport {\n instantiateExecutionStack,\n type RuntimeDriverDescriptor,\n} from '@prisma-next/framework-components/execution';\nimport type { ResultType } from '@prisma-next/framework-components/runtime';\nimport { builtinGeneratorIds } from '@prisma-next/ids';\nimport { generateId } from '@prisma-next/ids/runtime';\nimport type { SqlStorage } from '@prisma-next/sql-contract/types';\nimport type { Adapter, LoweredStatement, SelectAst } from '@prisma-next/sql-relational-core/ast';\nimport { codec, createCodecRegistry } from '@prisma-next/sql-relational-core/ast';\nimport type { SqlExecutionPlan, SqlQueryPlan } from '@prisma-next/sql-relational-core/plan';\nimport { collectAsync, drainAsyncIterable } from '@prisma-next/test-utils';\nimport type { Client } from 'pg';\nimport type { SqlStatement } from '../src/exports';\nimport {\n createExecutionContext,\n type createRuntime,\n createSqlExecutionStack,\n ensureSchemaStatement,\n ensureTableStatement,\n parseContractMarkerRow,\n writeContractMarker,\n} from '../src/exports';\nimport type {\n ExecutionContext,\n SqlRuntimeAdapterDescriptor,\n SqlRuntimeAdapterInstance,\n SqlRuntimeDriverInstance,\n SqlRuntimeExtensionDescriptor,\n SqlRuntimeTargetDescriptor,\n} from '../src/sql-context';\n\nfunction createTestMutationDefaultGenerators() {\n return builtinGeneratorIds.map((id) => ({\n id,\n generate: (params?: Record<string, unknown>) => generateId(params ? { id, params } : { id }),\n }));\n}\n\n/**\n * Executes a plan and collects all results into an array.\n * This helper DRYs up the common pattern of executing plans in tests.\n * The return type is inferred from the plan's type parameter.\n */\nexport async function executePlanAndCollect<\n P extends SqlExecutionPlan<ResultType<P>> | SqlQueryPlan<ResultType<P>>,\n>(runtime: ReturnType<typeof createRuntime>, plan: P): Promise<ResultType<P>[]> {\n type Row = ResultType<P>;\n return collectAsync<Row>(runtime.execute<Row>(plan));\n}\n\n/**\n * Drains a plan execution, consuming all results without collecting them.\n * Useful for testing side effects without memory overhead.\n */\nexport async function drainPlanExecution(\n runtime: ReturnType<typeof createRuntime>,\n plan: SqlExecutionPlan | SqlQueryPlan<unknown>,\n): Promise<void> {\n return drainAsyncIterable(runtime.execute(plan));\n}\n\n/**\n * Executes a SQL statement on a database client.\n */\nexport async function executeStatement(client: Client, statement: SqlStatement): Promise<void> {\n if (statement.params.length > 0) {\n await client.query(statement.sql, [...statement.params]);\n return;\n }\n\n await client.query(statement.sql);\n}\n\n/**\n * Sets up database schema and data, then writes the contract marker.\n * This helper DRYs up the common pattern of database setup in tests.\n */\nexport async function setupTestDatabase(\n client: Client,\n contract: Contract<SqlStorage>,\n setupFn: (client: Client) => Promise<void>,\n): Promise<void> {\n await client.query('drop schema if exists prisma_contract cascade');\n await client.query('create schema if not exists public');\n\n await setupFn(client);\n\n await executeStatement(client, ensureSchemaStatement);\n await executeStatement(client, ensureTableStatement);\n const write = writeContractMarker({\n storageHash: contract.storage.storageHash,\n profileHash: contract.profileHash,\n contractJson: contract,\n canonicalVersion: 1,\n });\n await executeStatement(client, write.insert);\n}\n\n/**\n * Writes a contract marker to the database.\n * This helper DRYs up the common pattern of writing contract markers in tests.\n */\nexport async function writeTestContractMarker(\n client: Client,\n contract: Contract<SqlStorage>,\n): Promise<void> {\n const write = writeContractMarker({\n storageHash: contract.storage.storageHash,\n profileHash: contract.profileHash,\n contractJson: contract,\n canonicalVersion: 1,\n });\n await executeStatement(client, write.insert);\n}\n\n/**\n * Creates a test adapter descriptor from a raw adapter.\n * Wraps the adapter in an SqlRuntimeAdapterDescriptor with static contributions\n * derived from the adapter's codec registry.\n */\nexport function createTestAdapterDescriptor(\n adapter: Adapter<SelectAst, Contract<SqlStorage>, LoweredStatement>,\n): SqlRuntimeAdapterDescriptor<'postgres'> {\n const codecRegistry = adapter.profile.codecs();\n return {\n kind: 'adapter' as const,\n id: 'test-adapter',\n version: '0.0.1',\n familyId: 'sql' as const,\n targetId: 'postgres' as const,\n codecs: () => codecRegistry,\n parameterizedCodecs: () => [],\n mutationDefaultGenerators: createTestMutationDefaultGenerators,\n create(_stack): SqlRuntimeAdapterInstance<'postgres'> {\n return Object.assign({ familyId: 'sql' as const, targetId: 'postgres' as const }, adapter);\n },\n };\n}\n\n/**\n * Creates a test target descriptor with empty static contributions.\n */\nexport function createTestTargetDescriptor(): SqlRuntimeTargetDescriptor<'postgres'> {\n return {\n kind: 'target' as const,\n id: 'postgres',\n version: '0.0.1',\n familyId: 'sql' as const,\n targetId: 'postgres' as const,\n codecs: () => createCodecRegistry(),\n parameterizedCodecs: () => [],\n create() {\n return { familyId: 'sql' as const, targetId: 'postgres' as const };\n },\n };\n}\n\n/**\n * Creates an ExecutionContext for testing.\n * This helper DRYs up the common pattern of context creation in tests.\n *\n * Accepts a raw adapter and optional extension descriptors, wrapping the\n * adapter in a descriptor internally for descriptor-first context creation.\n */\nexport function createTestContext<TContract extends Contract<SqlStorage>>(\n contract: TContract,\n adapter: Adapter<SelectAst, Contract<SqlStorage>, LoweredStatement>,\n options?: {\n extensionPacks?: ReadonlyArray<SqlRuntimeExtensionDescriptor<'postgres'>>;\n },\n): ExecutionContext<TContract> {\n return createExecutionContext({\n contract,\n stack: {\n target: createTestTargetDescriptor(),\n adapter: createTestAdapterDescriptor(adapter),\n extensionPacks: options?.extensionPacks ?? [],\n },\n });\n}\n\nexport function createTestStackInstance(options?: {\n extensionPacks?: ReadonlyArray<SqlRuntimeExtensionDescriptor<'postgres'>>;\n driver?: RuntimeDriverDescriptor<\n 'sql',\n 'postgres',\n unknown,\n SqlRuntimeDriverInstance<'postgres'>\n >;\n}) {\n const stack = createSqlExecutionStack({\n target: createTestTargetDescriptor(),\n adapter: createTestAdapterDescriptor(createStubAdapter()),\n driver: options?.driver,\n extensionPacks: options?.extensionPacks ?? [],\n });\n\n return instantiateExecutionStack(stack);\n}\n\n/**\n * Creates a stub adapter for testing.\n * This helper DRYs up the common pattern of adapter creation in tests.\n *\n * The stub adapter includes simple codecs for common test types (pg/int4@1, pg/text@1, pg/timestamptz@1)\n * to enable type inference in tests without requiring the postgres adapter package.\n */\nexport function createStubAdapter(): Adapter<SelectAst, Contract<SqlStorage>, LoweredStatement> {\n const codecRegistry = createCodecRegistry();\n\n // Register stub codecs for common test types\n // These match the codec IDs used in test contracts (pg/int4@1, pg/text@1, pg/timestamptz@1)\n // but don't require importing from the postgres adapter package\n codecRegistry.register(\n codec({\n typeId: 'pg/int4@1',\n targetTypes: ['int4'],\n encode: (value: number) => value,\n decode: (wire: number) => wire,\n }),\n );\n\n codecRegistry.register(\n codec({\n typeId: 'pg/text@1',\n targetTypes: ['text'],\n encode: (value: string) => value,\n decode: (wire: string) => wire,\n }),\n );\n\n codecRegistry.register(\n codec({\n typeId: 'pg/timestamptz@1',\n targetTypes: ['timestamptz'],\n encode: (value: Date) => value,\n decode: (wire: Date) => wire,\n // Date is not assignable to JsonValue, so the JSON round-trip pair\n // must be supplied explicitly.\n encodeJson: (value: Date) => value.toISOString(),\n decodeJson: (json) => {\n if (typeof json !== 'string') throw new Error('expected ISO date string');\n return new Date(json);\n },\n }),\n );\n\n return {\n profile: {\n id: 'stub-profile',\n target: 'postgres',\n capabilities: {},\n codecs() {\n return codecRegistry;\n },\n readMarkerStatement() {\n return {\n sql: 'select core_hash, profile_hash, contract_json, canonical_version, updated_at, app_tag, meta, invariants from prisma_contract.marker where id = $1',\n params: [1],\n };\n },\n parseMarkerRow: parseContractMarkerRow,\n },\n lower(ast: SelectAst, ctx: { contract: Contract<SqlStorage>; params?: readonly unknown[] }) {\n const sqlText = JSON.stringify(ast);\n return Object.freeze({ sql: sqlText, params: ctx.params ? [...ctx.params] : [] });\n },\n };\n}\n\nexport function createTestContract(\n contract: Partial<Omit<Contract<SqlStorage>, 'profileHash' | 'storage'>> & {\n storageHash?: string;\n profileHash?: string;\n storage?: Omit<SqlStorage, 'storageHash'>;\n },\n): Contract<SqlStorage> {\n const { execution, ...rest } = contract;\n const storageHashValue = coreHash(rest['storageHash'] ?? 'sha256:testcore');\n\n return {\n target: rest['target'] ?? 'postgres',\n targetFamily: rest['targetFamily'] ?? 'sql',\n storage: rest['storage']\n ? { ...rest['storage'], storageHash: storageHashValue }\n : { storageHash: storageHashValue, tables: {} },\n models: rest['models'] ?? {},\n roots: rest['roots'] ?? {},\n capabilities: rest['capabilities'] ?? {},\n extensionPacks: rest['extensionPacks'] ?? {},\n meta: rest['meta'] ?? {},\n ...(execution ? { execution } : {}),\n profileHash: profileHash(rest['profileHash'] ?? 'sha256:testprofile'),\n };\n}\n\n// Re-export generic utilities from test-utils\nexport {\n collectAsync,\n createDevDatabase,\n type DevDatabase,\n teardownTestDatabase,\n withClient,\n} from '@prisma-next/test-utils';\n"],"mappings":";;;;;;;;;AAkCA,SAAS,sCAAsC;AAC7C,QAAO,oBAAoB,KAAK,QAAQ;EACtC;EACA,WAAW,WAAqC,WAAW,SAAS;GAAE;GAAI;GAAQ,GAAG,EAAE,IAAI,CAAC;EAC7F,EAAE;;;;;;;AAQL,eAAsB,sBAEpB,SAA2C,MAAmC;AAE9E,QAAOA,eAAkB,QAAQ,QAAa,KAAK,CAAC;;;;;;AAOtD,eAAsB,mBACpB,SACA,MACe;AACf,QAAO,mBAAmB,QAAQ,QAAQ,KAAK,CAAC;;;;;AAMlD,eAAsB,iBAAiB,QAAgB,WAAwC;AAC7F,KAAI,UAAU,OAAO,SAAS,GAAG;AAC/B,QAAM,OAAO,MAAM,UAAU,KAAK,CAAC,GAAG,UAAU,OAAO,CAAC;AACxD;;AAGF,OAAM,OAAO,MAAM,UAAU,IAAI;;;;;;AAOnC,eAAsB,kBACpB,QACA,UACA,SACe;AACf,OAAM,OAAO,MAAM,gDAAgD;AACnE,OAAM,OAAO,MAAM,qCAAqC;AAExD,OAAM,QAAQ,OAAO;AAErB,OAAM,iBAAiB,QAAQ,sBAAsB;AACrD,OAAM,iBAAiB,QAAQ,qBAAqB;AAOpD,OAAM,iBAAiB,QANT,oBAAoB;EAChC,aAAa,SAAS,QAAQ;EAC9B,aAAa,SAAS;EACtB,cAAc;EACd,kBAAkB;EACnB,CAAC,CACmC,OAAO;;;;;;AAO9C,eAAsB,wBACpB,QACA,UACe;AAOf,OAAM,iBAAiB,QANT,oBAAoB;EAChC,aAAa,SAAS,QAAQ;EAC9B,aAAa,SAAS;EACtB,cAAc;EACd,kBAAkB;EACnB,CAAC,CACmC,OAAO;;;;;;;AAQ9C,SAAgB,4BACd,SACyC;CACzC,MAAM,gBAAgB,QAAQ,QAAQ,QAAQ;AAC9C,QAAO;EACL,MAAM;EACN,IAAI;EACJ,SAAS;EACT,UAAU;EACV,UAAU;EACV,cAAc;EACd,2BAA2B,EAAE;EAC7B,2BAA2B;EAC3B,OAAO,QAA+C;AACpD,UAAO,OAAO,OAAO;IAAE,UAAU;IAAgB,UAAU;IAAqB,EAAE,QAAQ;;EAE7F;;;;;AAMH,SAAgB,6BAAqE;AACnF,QAAO;EACL,MAAM;EACN,IAAI;EACJ,SAAS;EACT,UAAU;EACV,UAAU;EACV,cAAc,qBAAqB;EACnC,2BAA2B,EAAE;EAC7B,SAAS;AACP,UAAO;IAAE,UAAU;IAAgB,UAAU;IAAqB;;EAErE;;;;;;;;;AAUH,SAAgB,kBACd,UACA,SACA,SAG6B;AAC7B,QAAO,uBAAuB;EAC5B;EACA,OAAO;GACL,QAAQ,4BAA4B;GACpC,SAAS,4BAA4B,QAAQ;GAC7C,gBAAgB,SAAS,kBAAkB,EAAE;GAC9C;EACF,CAAC;;AAGJ,SAAgB,wBAAwB,SAQrC;AAQD,QAAO,0BAPO,wBAAwB;EACpC,QAAQ,4BAA4B;EACpC,SAAS,4BAA4B,mBAAmB,CAAC;EACzD,QAAQ,SAAS;EACjB,gBAAgB,SAAS,kBAAkB,EAAE;EAC9C,CAAC,CAEqC;;;;;;;;;AAUzC,SAAgB,oBAAgF;CAC9F,MAAM,gBAAgB,qBAAqB;AAK3C,eAAc,SACZ,MAAM;EACJ,QAAQ;EACR,aAAa,CAAC,OAAO;EACrB,SAAS,UAAkB;EAC3B,SAAS,SAAiB;EAC3B,CAAC,CACH;AAED,eAAc,SACZ,MAAM;EACJ,QAAQ;EACR,aAAa,CAAC,OAAO;EACrB,SAAS,UAAkB;EAC3B,SAAS,SAAiB;EAC3B,CAAC,CACH;AAED,eAAc,SACZ,MAAM;EACJ,QAAQ;EACR,aAAa,CAAC,cAAc;EAC5B,SAAS,UAAgB;EACzB,SAAS,SAAe;EAGxB,aAAa,UAAgB,MAAM,aAAa;EAChD,aAAa,SAAS;AACpB,OAAI,OAAO,SAAS,SAAU,OAAM,IAAI,MAAM,2BAA2B;AACzE,UAAO,IAAI,KAAK,KAAK;;EAExB,CAAC,CACH;AAED,QAAO;EACL,SAAS;GACP,IAAI;GACJ,QAAQ;GACR,cAAc,EAAE;GAChB,SAAS;AACP,WAAO;;GAET,sBAAsB;AACpB,WAAO;KACL,KAAK;KACL,QAAQ,CAAC,EAAE;KACZ;;GAEH,gBAAgB;GACjB;EACD,MAAM,KAAgB,KAAsE;GAC1F,MAAM,UAAU,KAAK,UAAU,IAAI;AACnC,UAAO,OAAO,OAAO;IAAE,KAAK;IAAS,QAAQ,IAAI,SAAS,CAAC,GAAG,IAAI,OAAO,GAAG,EAAE;IAAE,CAAC;;EAEpF;;AAGH,SAAgB,mBACd,UAKsB;CACtB,MAAM,EAAE,WAAW,GAAG,SAAS;CAC/B,MAAM,mBAAmB,SAAS,KAAK,kBAAkB,kBAAkB;AAE3E,QAAO;EACL,QAAQ,KAAK,aAAa;EAC1B,cAAc,KAAK,mBAAmB;EACtC,SAAS,KAAK,aACV;GAAE,GAAG,KAAK;GAAY,aAAa;GAAkB,GACrD;GAAE,aAAa;GAAkB,QAAQ,EAAE;GAAE;EACjD,QAAQ,KAAK,aAAa,EAAE;EAC5B,OAAO,KAAK,YAAY,EAAE;EAC1B,cAAc,KAAK,mBAAmB,EAAE;EACxC,gBAAgB,KAAK,qBAAqB,EAAE;EAC5C,MAAM,KAAK,WAAW,EAAE;EACxB,GAAI,YAAY,EAAE,WAAW,GAAG,EAAE;EAClC,aAAa,YAAY,KAAK,kBAAkB,qBAAqB;EACtE"}
package/package.json CHANGED
@@ -1,20 +1,19 @@
1
1
  {
2
2
  "name": "@prisma-next/sql-runtime",
3
- "version": "0.5.0-dev.4",
3
+ "version": "0.5.0-dev.40",
4
4
  "type": "module",
5
5
  "sideEffects": false,
6
6
  "description": "SQL runtime implementation for Prisma Next",
7
7
  "dependencies": {
8
8
  "arktype": "^2.1.26",
9
- "@prisma-next/contract": "0.5.0-dev.4",
10
- "@prisma-next/utils": "0.5.0-dev.4",
11
- "@prisma-next/ids": "0.5.0-dev.4",
12
- "@prisma-next/operations": "0.5.0-dev.4",
13
- "@prisma-next/runtime-executor": "0.5.0-dev.4",
14
- "@prisma-next/framework-components": "0.5.0-dev.4",
15
- "@prisma-next/sql-contract": "0.5.0-dev.4",
16
- "@prisma-next/sql-operations": "0.5.0-dev.4",
17
- "@prisma-next/sql-relational-core": "0.5.0-dev.4"
9
+ "@prisma-next/contract": "0.5.0-dev.40",
10
+ "@prisma-next/ids": "0.5.0-dev.40",
11
+ "@prisma-next/utils": "0.5.0-dev.40",
12
+ "@prisma-next/operations": "0.5.0-dev.40",
13
+ "@prisma-next/sql-contract": "0.5.0-dev.40",
14
+ "@prisma-next/framework-components": "0.5.0-dev.40",
15
+ "@prisma-next/sql-operations": "0.5.0-dev.40",
16
+ "@prisma-next/sql-relational-core": "0.5.0-dev.40"
18
17
  },
19
18
  "devDependencies": {
20
19
  "@types/pg": "8.16.0",
@@ -28,8 +27,7 @@
28
27
  },
29
28
  "files": [
30
29
  "dist",
31
- "src",
32
- "test"
30
+ "src"
33
31
  ],
34
32
  "exports": {
35
33
  ".": "./dist/index.mjs",
@@ -1,221 +1,342 @@
1
- import type { ExecutionPlan } from '@prisma-next/contract/types';
2
- import type { Codec, CodecRegistry } from '@prisma-next/sql-relational-core/ast';
1
+ import {
2
+ checkAborted,
3
+ isRuntimeError,
4
+ raceAgainstAbort,
5
+ runtimeError,
6
+ } from '@prisma-next/framework-components/runtime';
7
+ import type {
8
+ AnyQueryAst,
9
+ Codec,
10
+ CodecRegistry,
11
+ ContractCodecRegistry,
12
+ ProjectionItem,
13
+ SqlCodecCallContext,
14
+ } from '@prisma-next/sql-relational-core/ast';
15
+ import type { SqlExecutionPlan } from '@prisma-next/sql-relational-core/plan';
3
16
  import type { JsonSchemaValidatorRegistry } from '@prisma-next/sql-relational-core/query-lane-context';
4
17
  import { validateJsonValue } from './json-schema-validation';
5
18
 
6
- function resolveRowCodec(
7
- alias: string,
8
- plan: ExecutionPlan,
19
+ type ColumnRef = { table: string; column: string };
20
+
21
+ interface DecodeContext {
22
+ readonly aliases: ReadonlyArray<string> | undefined;
23
+ readonly codecs: ReadonlyMap<string, Codec>;
24
+ readonly columnRefs: ReadonlyMap<string, ColumnRef>;
25
+ readonly includeAliases: ReadonlySet<string>;
26
+ }
27
+
28
+ const WIRE_PREVIEW_LIMIT = 100;
29
+ const EMPTY_INCLUDE_ALIASES: ReadonlySet<string> = new Set<string>();
30
+
31
+ function isAstBackedPlan(
32
+ plan: SqlExecutionPlan,
33
+ ): plan is SqlExecutionPlan & { readonly ast: AnyQueryAst } {
34
+ return plan.ast !== undefined;
35
+ }
36
+
37
+ function projectionListFromAst(ast: AnyQueryAst): ReadonlyArray<ProjectionItem> | undefined {
38
+ if (ast.kind === 'select') {
39
+ return ast.projection;
40
+ }
41
+ return ast.returning;
42
+ }
43
+
44
+ /**
45
+ * Resolve the per-cell codec for a projection item.
46
+ *
47
+ * Phase B: when a `(table, column)` ref is available for the projection,
48
+ * prefer `contractCodecs.forColumn(table, column)` — that's the per-
49
+ * instance resolved codec materialized from the codec descriptor's
50
+ * factory at context-construction time (carries any per-instance state
51
+ * such as the compiled JSON-Schema validator). When the projection
52
+ * resolves to a non-`column-ref` expression (computed projections, raw
53
+ * SQL aliases) but still carries a codec id (ADR 205 stamps every
54
+ * `ProjectionItem` with the producer's codec id), fall back to the
55
+ * codec-id-keyed `forCodecId(codecId)` lookup, which itself falls back
56
+ * to the legacy `CodecRegistry` for codec ids the contract walk
57
+ * couldn't resolve.
58
+ *
59
+ * Codec-registry-unification spec § AC-4.
60
+ */
61
+ function resolveProjectionCodec(
62
+ item: ProjectionItem,
63
+ registry: CodecRegistry,
64
+ contractCodecs: ContractCodecRegistry | undefined,
65
+ ): Codec | undefined {
66
+ if (item.expr.kind === 'column-ref' && contractCodecs) {
67
+ const byColumn = contractCodecs.forColumn(item.expr.table, item.expr.column);
68
+ if (byColumn) return byColumn;
69
+ }
70
+ if (item.codecId) {
71
+ const fromContract = contractCodecs?.forCodecId(item.codecId);
72
+ if (fromContract) return fromContract;
73
+ return registry.get(item.codecId);
74
+ }
75
+ return undefined;
76
+ }
77
+
78
+ function buildDecodeContext(
79
+ plan: SqlExecutionPlan,
9
80
  registry: CodecRegistry,
10
- ): Codec | null {
11
- const planCodecId = plan.meta.annotations?.codecs?.[alias] as string | undefined;
12
- if (planCodecId) {
13
- const codec = registry.get(planCodecId);
81
+ contractCodecs: ContractCodecRegistry | undefined,
82
+ ): DecodeContext {
83
+ if (!isAstBackedPlan(plan)) {
84
+ return {
85
+ aliases: undefined,
86
+ codecs: new Map(),
87
+ columnRefs: new Map(),
88
+ includeAliases: EMPTY_INCLUDE_ALIASES,
89
+ };
90
+ }
91
+
92
+ const projection = projectionListFromAst(plan.ast);
93
+ if (!projection) {
94
+ return {
95
+ aliases: undefined,
96
+ codecs: new Map(),
97
+ columnRefs: new Map(),
98
+ includeAliases: EMPTY_INCLUDE_ALIASES,
99
+ };
100
+ }
101
+
102
+ const aliases: string[] = [];
103
+ const codecs = new Map<string, Codec>();
104
+ const columnRefs = new Map<string, ColumnRef>();
105
+ const includeAliases = new Set<string>();
106
+
107
+ for (const item of projection) {
108
+ aliases.push(item.alias);
109
+
110
+ const codec = resolveProjectionCodec(item, registry, contractCodecs);
14
111
  if (codec) {
15
- return codec;
112
+ codecs.set(item.alias, codec);
16
113
  }
17
- }
18
114
 
19
- if (plan.meta.projectionTypes) {
20
- const typeId = plan.meta.projectionTypes[alias];
21
- if (typeId) {
22
- const codec = registry.get(typeId);
23
- if (codec) {
24
- return codec;
25
- }
115
+ if (item.expr.kind === 'column-ref') {
116
+ columnRefs.set(item.alias, { table: item.expr.table, column: item.expr.column });
117
+ } else if (item.expr.kind === 'subquery' || item.expr.kind === 'json-array-agg') {
118
+ includeAliases.add(item.alias);
26
119
  }
27
120
  }
28
121
 
29
- return null;
122
+ return { aliases, codecs, columnRefs, includeAliases };
30
123
  }
31
124
 
32
- type ColumnRefIndex = Map<string, { table: string; column: string }>;
125
+ function previewWireValue(wireValue: unknown): string {
126
+ if (typeof wireValue === 'string') {
127
+ return wireValue.length > WIRE_PREVIEW_LIMIT
128
+ ? `${wireValue.substring(0, WIRE_PREVIEW_LIMIT)}...`
129
+ : wireValue;
130
+ }
131
+ return String(wireValue).substring(0, WIRE_PREVIEW_LIMIT);
132
+ }
33
133
 
34
- /**
35
- * Builds a lookup index from column name → { table, column } ref.
36
- * Called once per decodeRow invocation to avoid O(aliases × refs) linear scans.
37
- */
38
- function buildColumnRefIndex(plan: ExecutionPlan): ColumnRefIndex | null {
39
- const columns = plan.meta.refs?.columns;
40
- if (!columns) return null;
134
+ function isJsonSchemaValidationError(error: unknown): boolean {
135
+ return isRuntimeError(error) && error.code === 'RUNTIME.JSON_SCHEMA_VALIDATION_FAILED';
136
+ }
137
+
138
+ function wrapDecodeFailure(
139
+ error: unknown,
140
+ alias: string,
141
+ ref: ColumnRef | undefined,
142
+ codec: Codec,
143
+ wireValue: unknown,
144
+ ): never {
145
+ const message = error instanceof Error ? error.message : String(error);
146
+ const target = ref ? `${ref.table}.${ref.column}` : alias;
147
+ const wrapped = runtimeError(
148
+ 'RUNTIME.DECODE_FAILED',
149
+ `Failed to decode column ${target} with codec '${codec.id}': ${message}`,
150
+ {
151
+ ...(ref ? { table: ref.table, column: ref.column } : { alias }),
152
+ codec: codec.id,
153
+ wirePreview: previewWireValue(wireValue),
154
+ },
155
+ );
156
+ wrapped.cause = error;
157
+ throw wrapped;
158
+ }
159
+
160
+ function wrapIncludeAggregateFailure(error: unknown, alias: string, wireValue: unknown): never {
161
+ const message = error instanceof Error ? error.message : String(error);
162
+ const wrapped = runtimeError(
163
+ 'RUNTIME.DECODE_FAILED',
164
+ `Failed to parse JSON array for include alias '${alias}': ${message}`,
165
+ {
166
+ alias,
167
+ wirePreview: previewWireValue(wireValue),
168
+ },
169
+ );
170
+ wrapped.cause = error;
171
+ throw wrapped;
172
+ }
173
+
174
+ function decodeIncludeAggregate(alias: string, wireValue: unknown): unknown {
175
+ if (wireValue === null || wireValue === undefined) {
176
+ return [];
177
+ }
178
+
179
+ try {
180
+ let parsed: unknown;
181
+ if (typeof wireValue === 'string') {
182
+ parsed = JSON.parse(wireValue);
183
+ } else if (Array.isArray(wireValue)) {
184
+ parsed = wireValue;
185
+ } else {
186
+ parsed = JSON.parse(String(wireValue));
187
+ }
41
188
 
42
- const index: ColumnRefIndex = new Map();
43
- for (const ref of columns) {
44
- index.set(ref.column, ref);
189
+ if (!Array.isArray(parsed)) {
190
+ throw new Error(`Expected array for include alias '${alias}', got ${typeof parsed}`);
191
+ }
192
+
193
+ return parsed;
194
+ } catch (error) {
195
+ wrapIncludeAggregateFailure(error, alias, wireValue);
45
196
  }
46
- return index;
47
197
  }
48
198
 
49
- function parseProjectionRef(value: string): { table: string; column: string } | null {
50
- if (value.startsWith('include:') || value.startsWith('operation:')) {
199
+ /**
200
+ * Decodes a single field. Single-armed: every cell takes the same path —
201
+ * `codec.decode → await → JSON-Schema validate → return plain value` — so
202
+ * sync- and async-authored codecs are indistinguishable to callers.
203
+ *
204
+ * The row-level `rowCtx` is repackaged into a per-cell
205
+ * `SqlCodecCallContext` whose `column = { table, name }` is a structural
206
+ * projection of the per-cell `ColumnRef = { table, column }` resolved from
207
+ * the AST-backed `DecodeContext` (the same resolution `wrapDecodeFailure`
208
+ * uses for envelope construction — one resolution per cell, two consumers).
209
+ * Cells the runtime cannot resolve to a single underlying column (aggregate
210
+ * aliases, computed projections without a simple ref) get
211
+ * `column: undefined`, matching the spec contract that the runtime never
212
+ * silently defaults this field.
213
+ */
214
+ async function decodeField(
215
+ alias: string,
216
+ wireValue: unknown,
217
+ decodeCtx: DecodeContext,
218
+ jsonValidators: JsonSchemaValidatorRegistry | undefined,
219
+ rowCtx: SqlCodecCallContext,
220
+ ): Promise<unknown> {
221
+ if (wireValue === null) {
51
222
  return null;
52
223
  }
53
224
 
54
- const separatorIndex = value.indexOf('.');
55
- if (separatorIndex <= 0 || separatorIndex === value.length - 1) {
56
- return null;
225
+ const codec = decodeCtx.codecs.get(alias);
226
+ if (!codec) {
227
+ return wireValue;
57
228
  }
58
229
 
59
- return {
60
- table: value.slice(0, separatorIndex),
61
- column: value.slice(separatorIndex + 1),
62
- };
63
- }
230
+ const ref = decodeCtx.columnRefs.get(alias);
64
231
 
65
- function resolveColumnRefForAlias(
66
- alias: string,
67
- projection: ExecutionPlan['meta']['projection'],
68
- fallbackColumnRefIndex: ColumnRefIndex | null,
69
- ): { table: string; column: string } | undefined {
70
- if (projection && !Array.isArray(projection)) {
71
- const mappedRef = (projection as Record<string, string>)[alias];
72
- if (typeof mappedRef !== 'string') {
73
- return undefined;
232
+ // Per-cell ctx: the cell-level `column` is a `SqlColumnRef = { table, name }`
233
+ // projection of the resolved `ColumnRef = { table, column }` (same
234
+ // resolution `wrapDecodeFailure` uses below — no double work). Cells the
235
+ // runtime cannot resolve (aggregate aliases, computed projections without
236
+ // a simple ref) drop the `column` field entirely explicitly cleared so
237
+ // a previously-populated `rowCtx.column` cannot leak through to unrelated
238
+ // cells. Destructuring (rather than `column: undefined`) is required
239
+ // because `SqlCodecCallContext.column` is declared `column?: SqlColumnRef`
240
+ // under `exactOptionalPropertyTypes`.
241
+ let cellCtx: SqlCodecCallContext;
242
+ if (ref) {
243
+ cellCtx = { ...rowCtx, column: { table: ref.table, name: ref.column } };
244
+ } else {
245
+ const { column: _drop, ...rowCtxWithoutColumn } = rowCtx;
246
+ cellCtx = rowCtxWithoutColumn;
247
+ }
248
+
249
+ let decoded: unknown;
250
+ try {
251
+ decoded = await codec.decode(wireValue, cellCtx);
252
+ } catch (error) {
253
+ wrapDecodeFailure(error, alias, ref, codec, wireValue);
254
+ }
255
+
256
+ if (jsonValidators && ref) {
257
+ try {
258
+ validateJsonValue(jsonValidators, ref.table, ref.column, decoded, 'decode', codec.id);
259
+ } catch (error) {
260
+ if (isJsonSchemaValidationError(error)) throw error;
261
+ wrapDecodeFailure(error, alias, ref, codec, wireValue);
74
262
  }
75
- return parseProjectionRef(mappedRef) ?? undefined;
76
263
  }
77
264
 
78
- return fallbackColumnRefIndex?.get(alias);
265
+ return decoded;
79
266
  }
80
267
 
81
- export function decodeRow(
268
+ /**
269
+ * Decodes a row by dispatching all per-cell codec calls concurrently via
270
+ * `Promise.all`. Each cell follows the single-armed `decodeField` path.
271
+ * Failures are wrapped in `RUNTIME.DECODE_FAILED` with `{ table, column,
272
+ * codec }` (or `{ alias, codec }` when no column ref is resolvable) and the
273
+ * original error attached on `cause`.
274
+ *
275
+ * When `rowCtx.signal` is provided:
276
+ *
277
+ * - **Already-aborted at entry** short-circuits with `RUNTIME.ABORTED`
278
+ * (`{ phase: 'decode' }`) before any `codec.decode` call is made.
279
+ * - **Mid-flight aborts** race the per-cell `Promise.all` against the
280
+ * signal so the runtime returns promptly even when codec bodies ignore
281
+ * it. In-flight bodies that ignore the signal complete in the
282
+ * background (cooperative cancellation).
283
+ * - Existing `RUNTIME.DECODE_FAILED` envelopes from codec bodies pass
284
+ * through unchanged (no double wrap).
285
+ */
286
+ export async function decodeRow(
82
287
  row: Record<string, unknown>,
83
- plan: ExecutionPlan,
288
+ plan: SqlExecutionPlan,
84
289
  registry: CodecRegistry,
85
- jsonValidators?: JsonSchemaValidatorRegistry,
86
- ): Record<string, unknown> {
87
- const decoded: Record<string, unknown> = {};
88
- const projection = plan.meta.projection;
290
+ jsonValidators: JsonSchemaValidatorRegistry | undefined,
291
+ rowCtx: SqlCodecCallContext,
292
+ contractCodecs?: ContractCodecRegistry,
293
+ ): Promise<Record<string, unknown>> {
294
+ checkAborted(rowCtx, 'decode');
295
+ const signal = rowCtx.signal;
89
296
 
90
- // Fallback for plans that do not provide projection alias -> table.column mapping.
91
- const fallbackColumnRefIndex =
92
- jsonValidators && (!projection || Array.isArray(projection)) ? buildColumnRefIndex(plan) : null;
297
+ const decodeCtx = buildDecodeContext(plan, registry, contractCodecs);
93
298
 
94
- let aliases: readonly string[];
95
- if (projection && !Array.isArray(projection)) {
96
- aliases = Object.keys(projection);
97
- } else if (projection && Array.isArray(projection)) {
98
- aliases = projection;
99
- } else {
100
- aliases = Object.keys(row);
101
- }
102
-
103
- for (const alias of aliases) {
104
- const wireValue = row[alias];
105
-
106
- const projectionValue =
107
- projection && typeof projection === 'object' && !Array.isArray(projection)
108
- ? (projection as Record<string, string>)[alias]
109
- : undefined;
110
-
111
- if (typeof projectionValue === 'string' && projectionValue.startsWith('include:')) {
112
- if (wireValue === null || wireValue === undefined) {
113
- decoded[alias] = [];
114
- continue;
115
- }
299
+ const aliases = decodeCtx.aliases ?? Object.keys(row);
116
300
 
117
- try {
118
- let parsed: unknown;
119
- if (typeof wireValue === 'string') {
120
- parsed = JSON.parse(wireValue);
121
- } else if (Array.isArray(wireValue)) {
122
- parsed = wireValue;
123
- } else {
124
- parsed = JSON.parse(String(wireValue));
125
- }
126
-
127
- if (!Array.isArray(parsed)) {
128
- throw new Error(`Expected array for include alias '${alias}', got ${typeof parsed}`);
129
- }
130
-
131
- decoded[alias] = parsed;
132
- } catch (error) {
133
- const decodeError = new Error(
134
- `Failed to parse JSON array for include alias '${alias}': ${error instanceof Error ? error.message : String(error)}`,
135
- ) as Error & {
136
- code: string;
137
- category: string;
138
- severity: string;
139
- details?: Record<string, unknown>;
140
- };
141
- decodeError.code = 'RUNTIME.DECODE_FAILED';
142
- decodeError.category = 'RUNTIME';
143
- decodeError.severity = 'error';
144
- decodeError.details = {
301
+ if (decodeCtx.aliases !== undefined) {
302
+ for (const alias of decodeCtx.aliases) {
303
+ if (!Object.hasOwn(row, alias)) {
304
+ throw runtimeError('RUNTIME.DECODE_FAILED', `Row missing projection alias "${alias}"`, {
145
305
  alias,
146
- wirePreview:
147
- typeof wireValue === 'string' && wireValue.length > 100
148
- ? `${wireValue.substring(0, 100)}...`
149
- : String(wireValue).substring(0, 100),
150
- };
151
- throw decodeError;
306
+ expectedAliases: decodeCtx.aliases,
307
+ presentKeys: Object.keys(row),
308
+ });
152
309
  }
153
- continue;
154
310
  }
311
+ }
155
312
 
156
- if (wireValue === null || wireValue === undefined) {
157
- decoded[alias] = wireValue;
158
- continue;
159
- }
313
+ const tasks: Promise<unknown>[] = [];
314
+ const includeIndices: { index: number; alias: string; value: unknown }[] = [];
160
315
 
161
- const codec = resolveRowCodec(alias, plan, registry);
316
+ for (let i = 0; i < aliases.length; i++) {
317
+ const alias = aliases[i] as string;
318
+ const wireValue = row[alias];
162
319
 
163
- if (!codec) {
164
- decoded[alias] = wireValue;
320
+ if (decodeCtx.includeAliases.has(alias)) {
321
+ includeIndices.push({ index: i, alias, value: wireValue });
322
+ tasks.push(Promise.resolve(undefined));
165
323
  continue;
166
324
  }
167
325
 
168
- try {
169
- const decodedValue = codec.decode(wireValue);
170
-
171
- // Validate decoded JSON value against schema
172
- if (jsonValidators) {
173
- const ref = resolveColumnRefForAlias(alias, projection, fallbackColumnRefIndex);
174
- if (ref) {
175
- validateJsonValue(
176
- jsonValidators,
177
- ref.table,
178
- ref.column,
179
- decodedValue,
180
- 'decode',
181
- codec.id,
182
- );
183
- }
184
- }
326
+ tasks.push(decodeField(alias, wireValue, decodeCtx, jsonValidators, rowCtx));
327
+ }
185
328
 
186
- decoded[alias] = decodedValue;
187
- } catch (error) {
188
- // Re-throw JSON schema validation errors as-is
189
- if (
190
- error instanceof Error &&
191
- 'code' in error &&
192
- (error as Error & { code: string }).code === 'RUNTIME.JSON_SCHEMA_VALIDATION_FAILED'
193
- ) {
194
- throw error;
195
- }
329
+ const settled = await raceAgainstAbort(Promise.all(tasks), signal, 'decode');
196
330
 
197
- const decodeError = new Error(
198
- `Failed to decode row alias '${alias}' with codec '${codec.id}': ${error instanceof Error ? error.message : String(error)}`,
199
- ) as Error & {
200
- code: string;
201
- category: string;
202
- severity: string;
203
- details?: Record<string, unknown>;
204
- };
205
- decodeError.code = 'RUNTIME.DECODE_FAILED';
206
- decodeError.category = 'RUNTIME';
207
- decodeError.severity = 'error';
208
- decodeError.details = {
209
- alias,
210
- codec: codec.id,
211
- wirePreview:
212
- typeof wireValue === 'string' && wireValue.length > 100
213
- ? `${wireValue.substring(0, 100)}...`
214
- : String(wireValue).substring(0, 100),
215
- };
216
- throw decodeError;
217
- }
331
+ // Include aggregates are decoded synchronously after concurrent codec
332
+ // dispatch settles, so any decode failures upstream propagate first.
333
+ for (const entry of includeIndices) {
334
+ settled[entry.index] = decodeIncludeAggregate(entry.alias, entry.value);
218
335
  }
219
336
 
337
+ const decoded: Record<string, unknown> = {};
338
+ for (let i = 0; i < aliases.length; i++) {
339
+ decoded[aliases[i] as string] = settled[i];
340
+ }
220
341
  return decoded;
221
342
  }