@prisma-next/emitter 0.3.0-dev.5 → 0.3.0-dev.50

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -6,7 +6,7 @@ Contract emission engine that transforms authored data models into canonical JSO
6
6
 
7
7
  The emitter is the core of Prisma Next's contract-first architecture. It takes authored data models (from PSL or TypeScript builders) and produces two deterministic artifacts:
8
8
 
9
- 1. **`contract.json`** — Canonical JSON representation of the data contract with embedded `coreHash` and `profileHash`. Callers may add `_generated` metadata field to indicate it's a generated artifact (excluded from canonicalization/hashing).
9
+ 1. **`contract.json`** — Canonical JSON representation of the data contract with embedded `storageHash` and optional `executionHash`/`profileHash`. Callers may add `_generated` metadata field to indicate it's a generated artifact (excluded from canonicalization/hashing).
10
10
  2. **`contract.d.ts`** — TypeScript type definitions used by query builders and tooling (types-only, no runtime code). Includes warning header comments generated by target family hooks to indicate it's a generated file.
11
11
 
12
12
  The emitter is target-family-agnostic and uses a pluggable hook system (`TargetFamilyHook`) to handle family-specific validation and type generation. This keeps the core thin while allowing SQL, Document, and other target families to extend emission behavior.
@@ -19,7 +19,7 @@ Provide a deterministic, verifiable representation of the application's data con
19
19
 
20
20
  - **Parse**: Accept contract IR (Intermediate Representation) from authoring surfaces
21
21
  - **Validate**: Core structure validation plus family-specific type and structure validation via hooks
22
- - **Canonicalize**: Compute `coreHash` (schema meaning) and `profileHash` (capabilities/pins) from canonical JSON
22
+ - **Canonicalize**: Compute `storageHash` (schema meaning), `executionHash` (execution defaults), and `profileHash` (capabilities/pins) from canonical JSON
23
23
  - **Emit**: Generate `contract.json` and `contract.d.ts` with family-specific type generation
24
24
  - **Descriptor-Agnostic**: The emitter is completely agnostic to how descriptors are produced. It receives pre-assembled `OperationRegistry`, `codecTypeImports`, `operationTypeImports`, and `extensionIds` from the CLI or family helpers—no pack manifest parsing happens inside the emitter.
25
25
 
@@ -90,7 +90,8 @@ flowchart TD
90
90
  - **Note**: `TargetFamilyHook`, `ValidationContext`, and `TypesImportSpec` types are defined in `@prisma-next/contract/types` (shared plane) and re-exported from this package for backward compatibility.
91
91
 
92
92
  ### Hashing (`hashing.ts`)
93
- - `computeCoreHash`: SHA-256 of schema structure (models, storage, relations)
93
+ - `computeStorageHash`: SHA-256 of schema structure (models, storage, relations)
94
+ - `computeExecutionHash`: SHA-256 of execution defaults
94
95
  - `computeProfileHash`: SHA-256 of capabilities and adapter pins
95
96
 
96
97
  ### Canonicalization (`canonicalization.ts`)
@@ -156,7 +157,8 @@ const result = await emit(ir, {
156
157
 
157
158
  // result.contractJson: string (JSON) - canonical JSON without _generated metadata
158
159
  // result.contractDts: string (TypeScript definitions) - includes warning header
159
- // result.coreHash: string
160
+ // result.storageHash: string
161
+ // result.executionHash?: string
160
162
  // result.profileHash?: string
161
163
  ```
162
164
 
@@ -0,0 +1,3 @@
1
+ import { EmitOptions, EmitResult, emit } from "@prisma-next/core-control-plane/emission";
2
+ import { TargetFamilyHook, TypesImportSpec, ValidationContext } from "@prisma-next/contract/types";
3
+ export { type EmitOptions, type EmitResult, type TargetFamilyHook, type TypesImportSpec, type ValidationContext, emit };
@@ -0,0 +1,3 @@
1
+ import { emit } from "@prisma-next/core-control-plane/emission";
2
+
3
+ export { emit };
@@ -1,4 +1,7 @@
1
- import { type ContractIR } from '@prisma-next/contract/ir';
1
+ import { ContractIR } from "@prisma-next/contract/ir";
2
+
3
+ //#region test/utils.d.ts
4
+
2
5
  /**
3
6
  * Factory function for creating ContractIR objects in tests.
4
7
  * Provides sensible defaults and allows overriding specific fields.
@@ -7,8 +10,10 @@ import { type ContractIR } from '@prisma-next/contract/ir';
7
10
  * If a field is explicitly set to `undefined` in overrides, it will be omitted
8
11
  * from the result (useful for testing validation of missing fields).
9
12
  */
10
- export declare function createContractIR(overrides?: Partial<ContractIR> & {
11
- coreHash?: string;
12
- profileHash?: string;
13
+ declare function createContractIR(overrides?: Partial<ContractIR> & {
14
+ storageHash?: string;
15
+ profileHash?: string;
13
16
  }): ContractIR;
14
- //# sourceMappingURL=utils.d.ts.map
17
+ //#endregion
18
+ export { createContractIR };
19
+ //# sourceMappingURL=utils.d.mts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"utils.d.mts","names":[],"sources":["../../test/utils.ts"],"sourcesContent":[],"mappings":";;;;;;AAUA;;;;;;iBAAgB,gBAAA,aACH,QAAQ;;;IAClB"}
@@ -0,0 +1,59 @@
1
+ import { irHeader, irMeta } from "@prisma-next/contract/ir";
2
+
3
+ //#region test/utils.ts
4
+ /**
5
+ * Factory function for creating ContractIR objects in tests.
6
+ * Provides sensible defaults and allows overriding specific fields.
7
+ * Uses the emitter factories internally for consistency.
8
+ *
9
+ * If a field is explicitly set to `undefined` in overrides, it will be omitted
10
+ * from the result (useful for testing validation of missing fields).
11
+ */
12
+ function createContractIR(overrides = {}) {
13
+ const hasTarget = "target" in overrides;
14
+ const hasTargetFamily = "targetFamily" in overrides;
15
+ const hasStorageHash = "storageHash" in overrides;
16
+ const hasSchemaVersion = "schemaVersion" in overrides;
17
+ const hasModels = "models" in overrides;
18
+ const hasRelations = "relations" in overrides;
19
+ const hasStorage = "storage" in overrides;
20
+ const hasCapabilities = "capabilities" in overrides;
21
+ const hasExtensionPacks = "extensionPacks" in overrides;
22
+ const hasMeta = "meta" in overrides;
23
+ const hasSources = "sources" in overrides;
24
+ const headerOpts = {};
25
+ if (hasTarget && overrides.target !== void 0) headerOpts.target = overrides.target;
26
+ else if (!hasTarget) headerOpts.target = "postgres";
27
+ if (hasTargetFamily && overrides.targetFamily !== void 0) headerOpts.targetFamily = overrides.targetFamily;
28
+ else if (!hasTargetFamily) headerOpts.targetFamily = "sql";
29
+ if (hasStorageHash && overrides.storageHash !== void 0) headerOpts.storageHash = overrides.storageHash;
30
+ else if (!hasStorageHash) headerOpts.storageHash = "sha256:test";
31
+ if (overrides.profileHash !== void 0) headerOpts.profileHash = overrides.profileHash;
32
+ const header = irHeader(headerOpts);
33
+ const metaOpts = {};
34
+ if (hasCapabilities && overrides.capabilities !== void 0) metaOpts.capabilities = overrides.capabilities;
35
+ else if (!hasCapabilities) metaOpts.capabilities = {};
36
+ if (hasExtensionPacks && overrides.extensionPacks !== void 0) metaOpts.extensionPacks = overrides.extensionPacks;
37
+ else if (!hasExtensionPacks) metaOpts.extensionPacks = {};
38
+ if (hasMeta && overrides.meta !== void 0) metaOpts.meta = overrides.meta;
39
+ else if (!hasMeta) metaOpts.meta = {};
40
+ if (hasSources && overrides.sources !== void 0) metaOpts.sources = overrides.sources;
41
+ else if (!hasSources) metaOpts.sources = {};
42
+ const meta = irMeta(Object.keys(metaOpts).length > 0 ? metaOpts : void 0);
43
+ return {
44
+ schemaVersion: hasSchemaVersion && overrides.schemaVersion !== void 0 ? overrides.schemaVersion : hasSchemaVersion && overrides.schemaVersion === void 0 ? void 0 : header.schemaVersion,
45
+ target: header.target,
46
+ targetFamily: header.targetFamily,
47
+ capabilities: hasCapabilities && overrides.capabilities === void 0 ? void 0 : !hasCapabilities || overrides.capabilities !== void 0 ? meta.capabilities : {},
48
+ extensionPacks: hasExtensionPacks && overrides.extensionPacks === void 0 ? void 0 : !hasExtensionPacks || overrides.extensionPacks !== void 0 ? meta.extensionPacks : {},
49
+ meta: hasMeta && overrides.meta === void 0 ? void 0 : !hasMeta || overrides.meta !== void 0 ? meta.meta : {},
50
+ sources: hasSources && overrides.sources === void 0 ? void 0 : !hasSources || overrides.sources !== void 0 ? meta.sources : {},
51
+ storage: hasStorage && overrides.storage === void 0 ? void 0 : hasStorage && overrides.storage !== void 0 ? overrides.storage : !hasStorage ? { tables: {} } : {},
52
+ models: hasModels && overrides.models === void 0 ? void 0 : hasModels && overrides.models !== void 0 ? overrides.models : !hasModels ? {} : {},
53
+ relations: hasRelations && overrides.relations === void 0 ? void 0 : hasRelations && overrides.relations !== void 0 ? overrides.relations : !hasRelations ? {} : {}
54
+ };
55
+ }
56
+
57
+ //#endregion
58
+ export { createContractIR };
59
+ //# sourceMappingURL=utils.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"utils.mjs","names":["headerOpts: {\n target?: string;\n targetFamily?: string;\n storageHash?: string;\n profileHash?: string;\n }","metaOpts: {\n capabilities?: Record<string, Record<string, boolean>>;\n extensionPacks?: Record<string, unknown>;\n meta?: Record<string, unknown>;\n sources?: Record<string, unknown>;\n }"],"sources":["../../test/utils.ts"],"sourcesContent":["import { type ContractIR, irHeader, irMeta } from '@prisma-next/contract/ir';\n\n/**\n * Factory function for creating ContractIR objects in tests.\n * Provides sensible defaults and allows overriding specific fields.\n * Uses the emitter factories internally for consistency.\n *\n * If a field is explicitly set to `undefined` in overrides, it will be omitted\n * from the result (useful for testing validation of missing fields).\n */\nexport function createContractIR(\n overrides: Partial<ContractIR> & { storageHash?: string; profileHash?: string } = {},\n): ContractIR {\n // Check if fields are explicitly undefined (not just missing)\n const hasTarget = 'target' in overrides;\n const hasTargetFamily = 'targetFamily' in overrides;\n const hasStorageHash = 'storageHash' in overrides;\n const hasSchemaVersion = 'schemaVersion' in overrides;\n const hasModels = 'models' in overrides;\n const hasRelations = 'relations' in overrides;\n const hasStorage = 'storage' in overrides;\n const hasCapabilities = 'capabilities' in overrides;\n const hasExtensionPacks = 'extensionPacks' in overrides;\n const hasMeta = 'meta' in overrides;\n const hasSources = 'sources' in overrides;\n\n // Build header, omitting fields that are explicitly undefined\n const headerOpts: {\n target?: string;\n targetFamily?: string;\n storageHash?: string;\n profileHash?: string;\n } = {};\n\n if (hasTarget && overrides.target !== undefined) {\n headerOpts.target = overrides.target;\n } else if (!hasTarget) {\n headerOpts.target = 'postgres';\n }\n\n if (hasTargetFamily && overrides.targetFamily !== undefined) {\n headerOpts.targetFamily = overrides.targetFamily;\n } else if (!hasTargetFamily) {\n headerOpts.targetFamily = 'sql';\n }\n\n if (hasStorageHash && overrides.storageHash !== undefined) {\n headerOpts.storageHash = overrides.storageHash;\n } else if (!hasStorageHash) {\n headerOpts.storageHash = 'sha256:test';\n }\n\n // profileHash is not part of ContractIR, but we can accept it for header creation\n if (overrides.profileHash !== undefined) {\n headerOpts.profileHash = overrides.profileHash;\n }\n\n const header = irHeader(\n headerOpts as {\n target: string;\n targetFamily: string;\n storageHash: string;\n profileHash?: string;\n },\n );\n\n // Build meta, handling explicitly undefined fields\n // If a field is explicitly undefined, we'll omit it from the result later\n const metaOpts: {\n capabilities?: Record<string, Record<string, boolean>>;\n extensionPacks?: Record<string, unknown>;\n meta?: Record<string, unknown>;\n sources?: Record<string, unknown>;\n } = {};\n\n if (hasCapabilities && overrides.capabilities !== undefined) {\n metaOpts.capabilities = overrides.capabilities;\n } else if (!hasCapabilities) {\n metaOpts.capabilities = {};\n }\n\n if (hasExtensionPacks && overrides.extensionPacks !== undefined) {\n metaOpts.extensionPacks = overrides.extensionPacks;\n } else if (!hasExtensionPacks) {\n metaOpts.extensionPacks = {};\n }\n\n if (hasMeta && overrides.meta !== undefined) {\n metaOpts.meta = overrides.meta;\n } else if (!hasMeta) {\n metaOpts.meta = {};\n }\n\n if (hasSources && overrides.sources !== undefined) {\n metaOpts.sources = overrides.sources;\n } else if (!hasSources) {\n metaOpts.sources = {};\n }\n\n const meta = irMeta(Object.keys(metaOpts).length > 0 ? metaOpts : undefined);\n\n // Build result by constructing the object directly (ContractIR doesn't include storageHash/profileHash)\n // When fields are explicitly undefined, include them as undefined (tests use type assertions to bypass TS)\n const result = {\n schemaVersion:\n hasSchemaVersion && overrides.schemaVersion !== undefined\n ? overrides.schemaVersion\n : hasSchemaVersion && overrides.schemaVersion === undefined\n ? (undefined as unknown as string)\n : header.schemaVersion,\n target: header.target,\n targetFamily: header.targetFamily,\n // Only include meta fields if they're not explicitly undefined\n capabilities:\n hasCapabilities && overrides.capabilities === undefined\n ? (undefined as unknown as Record<string, Record<string, boolean>>)\n : !hasCapabilities || overrides.capabilities !== undefined\n ? meta.capabilities\n : ({} as Record<string, Record<string, boolean>>),\n extensionPacks:\n hasExtensionPacks && overrides.extensionPacks === undefined\n ? (undefined as unknown as Record<string, unknown>)\n : !hasExtensionPacks || overrides.extensionPacks !== undefined\n ? meta.extensionPacks\n : ({} as Record<string, unknown>),\n meta:\n hasMeta && overrides.meta === undefined\n ? (undefined as unknown as Record<string, unknown>)\n : !hasMeta || overrides.meta !== undefined\n ? meta.meta\n : ({} as Record<string, unknown>),\n sources:\n hasSources && overrides.sources === undefined\n ? (undefined as unknown as Record<string, unknown>)\n : !hasSources || overrides.sources !== undefined\n ? meta.sources\n : ({} as Record<string, unknown>),\n // Only include family sections if they're not explicitly undefined\n storage:\n hasStorage && overrides.storage === undefined\n ? (undefined as unknown as Record<string, unknown>)\n : hasStorage && overrides.storage !== undefined\n ? (overrides.storage as Record<string, unknown>)\n : !hasStorage\n ? ({ tables: {} } as Record<string, unknown>)\n : ({} as Record<string, unknown>),\n models:\n hasModels && overrides.models === undefined\n ? (undefined as unknown as Record<string, unknown>)\n : hasModels && overrides.models !== undefined\n ? (overrides.models as Record<string, unknown>)\n : !hasModels\n ? {}\n : ({} as Record<string, unknown>),\n relations:\n hasRelations && overrides.relations === undefined\n ? (undefined as unknown as Record<string, unknown>)\n : hasRelations && overrides.relations !== undefined\n ? (overrides.relations as Record<string, unknown>)\n : !hasRelations\n ? {}\n : ({} as Record<string, unknown>),\n } as ContractIR;\n\n return result;\n}\n"],"mappings":";;;;;;;;;;;AAUA,SAAgB,iBACd,YAAkF,EAAE,EACxE;CAEZ,MAAM,YAAY,YAAY;CAC9B,MAAM,kBAAkB,kBAAkB;CAC1C,MAAM,iBAAiB,iBAAiB;CACxC,MAAM,mBAAmB,mBAAmB;CAC5C,MAAM,YAAY,YAAY;CAC9B,MAAM,eAAe,eAAe;CACpC,MAAM,aAAa,aAAa;CAChC,MAAM,kBAAkB,kBAAkB;CAC1C,MAAM,oBAAoB,oBAAoB;CAC9C,MAAM,UAAU,UAAU;CAC1B,MAAM,aAAa,aAAa;CAGhC,MAAMA,aAKF,EAAE;AAEN,KAAI,aAAa,UAAU,WAAW,OACpC,YAAW,SAAS,UAAU;UACrB,CAAC,UACV,YAAW,SAAS;AAGtB,KAAI,mBAAmB,UAAU,iBAAiB,OAChD,YAAW,eAAe,UAAU;UAC3B,CAAC,gBACV,YAAW,eAAe;AAG5B,KAAI,kBAAkB,UAAU,gBAAgB,OAC9C,YAAW,cAAc,UAAU;UAC1B,CAAC,eACV,YAAW,cAAc;AAI3B,KAAI,UAAU,gBAAgB,OAC5B,YAAW,cAAc,UAAU;CAGrC,MAAM,SAAS,SACb,WAMD;CAID,MAAMC,WAKF,EAAE;AAEN,KAAI,mBAAmB,UAAU,iBAAiB,OAChD,UAAS,eAAe,UAAU;UACzB,CAAC,gBACV,UAAS,eAAe,EAAE;AAG5B,KAAI,qBAAqB,UAAU,mBAAmB,OACpD,UAAS,iBAAiB,UAAU;UAC3B,CAAC,kBACV,UAAS,iBAAiB,EAAE;AAG9B,KAAI,WAAW,UAAU,SAAS,OAChC,UAAS,OAAO,UAAU;UACjB,CAAC,QACV,UAAS,OAAO,EAAE;AAGpB,KAAI,cAAc,UAAU,YAAY,OACtC,UAAS,UAAU,UAAU;UACpB,CAAC,WACV,UAAS,UAAU,EAAE;CAGvB,MAAM,OAAO,OAAO,OAAO,KAAK,SAAS,CAAC,SAAS,IAAI,WAAW,OAAU;AAiE5E,QA7De;EACb,eACE,oBAAoB,UAAU,kBAAkB,SAC5C,UAAU,gBACV,oBAAoB,UAAU,kBAAkB,SAC7C,SACD,OAAO;EACf,QAAQ,OAAO;EACf,cAAc,OAAO;EAErB,cACE,mBAAmB,UAAU,iBAAiB,SACzC,SACD,CAAC,mBAAmB,UAAU,iBAAiB,SAC7C,KAAK,eACJ,EAAE;EACX,gBACE,qBAAqB,UAAU,mBAAmB,SAC7C,SACD,CAAC,qBAAqB,UAAU,mBAAmB,SACjD,KAAK,iBACJ,EAAE;EACX,MACE,WAAW,UAAU,SAAS,SACzB,SACD,CAAC,WAAW,UAAU,SAAS,SAC7B,KAAK,OACJ,EAAE;EACX,SACE,cAAc,UAAU,YAAY,SAC/B,SACD,CAAC,cAAc,UAAU,YAAY,SACnC,KAAK,UACJ,EAAE;EAEX,SACE,cAAc,UAAU,YAAY,SAC/B,SACD,cAAc,UAAU,YAAY,SACjC,UAAU,UACX,CAAC,aACE,EAAE,QAAQ,EAAE,EAAE,GACd,EAAE;EACb,QACE,aAAa,UAAU,WAAW,SAC7B,SACD,aAAa,UAAU,WAAW,SAC/B,UAAU,SACX,CAAC,YACC,EAAE,GACD,EAAE;EACb,WACE,gBAAgB,UAAU,cAAc,SACnC,SACD,gBAAgB,UAAU,cAAc,SACrC,UAAU,YACX,CAAC,eACC,EAAE,GACD,EAAE;EACd"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@prisma-next/emitter",
3
- "version": "0.3.0-dev.5",
3
+ "version": "0.3.0-dev.50",
4
4
  "type": "module",
5
5
  "sideEffects": false,
6
6
  "files": [
@@ -10,36 +10,42 @@
10
10
  ],
11
11
  "dependencies": {
12
12
  "arktype": "^2.0.0",
13
- "@prisma-next/contract": "0.3.0-dev.5",
14
- "@prisma-next/core-control-plane": "0.3.0-dev.5"
13
+ "@prisma-next/contract": "0.3.0-dev.50",
14
+ "@prisma-next/core-control-plane": "0.3.0-dev.50"
15
15
  },
16
16
  "devDependencies": {
17
17
  "@types/node": "24.10.4",
18
- "@vitest/coverage-v8": "4.0.16",
19
- "tsup": "8.5.1",
18
+ "tsdown": "0.18.4",
20
19
  "typescript": "5.9.3",
21
- "vitest": "4.0.16",
22
- "@prisma-next/operations": "0.3.0-dev.5",
23
- "@prisma-next/test-utils": "0.0.1"
20
+ "vitest": "4.0.17",
21
+ "@prisma-next/operations": "0.3.0-dev.50",
22
+ "@prisma-next/test-utils": "0.0.1",
23
+ "@prisma-next/tsconfig": "0.0.0",
24
+ "@prisma-next/tsdown": "0.0.0"
24
25
  },
25
26
  "exports": {
26
27
  ".": {
27
- "types": "./dist/src/exports/index.d.ts",
28
- "import": "./dist/exports/index.js"
28
+ "types": "./dist/exports/index.d.mts",
29
+ "import": "./dist/exports/index.mjs"
29
30
  },
30
31
  "./test/utils": {
31
- "types": "./dist/test/utils.d.ts",
32
- "import": "./dist/test/utils.js"
32
+ "types": "./dist/test/utils.d.mts",
33
+ "import": "./dist/test/utils.mjs"
33
34
  }
34
35
  },
36
+ "repository": {
37
+ "type": "git",
38
+ "url": "https://github.com/prisma/prisma-next.git",
39
+ "directory": "packages/1-framework/3-tooling/emitter"
40
+ },
35
41
  "scripts": {
36
- "build": "tsup --config tsup.config.ts && tsc --project tsconfig.build.json",
42
+ "build": "tsdown",
37
43
  "test": "vitest run",
38
44
  "test:coverage": "vitest run --coverage",
39
45
  "typecheck": "tsc --project tsconfig.json --noEmit",
40
- "lint": "biome check . --config-path ../../../../biome.json --error-on-warnings",
41
- "lint:fix": "biome check --write . --config-path ../../../../biome.json",
42
- "lint:fix:unsafe": "biome check --write --unsafe . --config-path ../../../../biome.json",
43
- "clean": "node ../../../../scripts/clean.mjs"
46
+ "lint": "biome check . --error-on-warnings",
47
+ "lint:fix": "biome check --write .",
48
+ "lint:fix:unsafe": "biome check --write --unsafe .",
49
+ "clean": "rm -rf dist dist-tsc dist-tsc-prod coverage .tmp-output"
44
50
  }
45
51
  }
@@ -6,7 +6,7 @@ describe('canonicalization', () => {
6
6
  it('orders top-level sections correctly', () => {
7
7
  const ir = createContractIR({
8
8
  capabilities: { postgres: { jsonAgg: true } },
9
- meta: { source: 'test' },
9
+ meta: { emitterVersion: 'test' },
10
10
  });
11
11
 
12
12
  const result = canonicalizeContract(ir);
@@ -17,6 +17,7 @@ describe('canonicalization', () => {
17
17
  const targetFamilyIndex = keys.indexOf('targetFamily');
18
18
  const targetIndex = keys.indexOf('target');
19
19
  const modelsIndex = keys.indexOf('models');
20
+ const relationsIndex = keys.indexOf('relations');
20
21
  const storageIndex = keys.indexOf('storage');
21
22
  const capabilitiesIndex = keys.indexOf('capabilities');
22
23
  const metaIndex = keys.indexOf('meta');
@@ -24,6 +25,8 @@ describe('canonicalization', () => {
24
25
  expect(schemaVersionIndex).toBeLessThan(targetFamilyIndex);
25
26
  expect(targetFamilyIndex).toBeLessThan(targetIndex);
26
27
  expect(targetIndex).toBeLessThan(modelsIndex);
28
+ expect(modelsIndex).toBeLessThan(relationsIndex);
29
+ expect(relationsIndex).toBeLessThan(storageIndex);
27
30
  expect(modelsIndex).toBeLessThan(storageIndex);
28
31
  expect(storageIndex).toBeLessThan(capabilitiesIndex);
29
32
  expect(capabilitiesIndex).toBeLessThan(metaIndex);
@@ -55,6 +58,73 @@ describe('canonicalization', () => {
55
58
  expect(email['nullable']).toBe(true);
56
59
  });
57
60
 
61
+ it.each([
62
+ { nullable: false },
63
+ { nullable: undefined },
64
+ ])('omits nullable:false for columns with defaults (nullable=$nullable)', ({ nullable }) => {
65
+ const ir = createContractIR({
66
+ storage: {
67
+ tables: {
68
+ user: {
69
+ columns: {
70
+ created_at: {
71
+ codecId: 'pg/timestamptz@1',
72
+ nativeType: 'timestamptz',
73
+ nullable,
74
+ default: { kind: 'function', expression: 'now()' },
75
+ },
76
+ updated_at: {
77
+ codecId: 'pg/timestamptz@1',
78
+ nativeType: 'timestamptz',
79
+ nullable: true,
80
+ },
81
+ },
82
+ },
83
+ },
84
+ },
85
+ });
86
+
87
+ const result = canonicalizeContract(ir);
88
+ const parsed = JSON.parse(result) as Record<string, unknown>;
89
+ const storage = parsed['storage'] as Record<string, unknown>;
90
+ const tables = storage['tables'] as Record<string, unknown>;
91
+ const user = tables['user'] as Record<string, unknown>;
92
+ const columns = user['columns'] as Record<string, unknown>;
93
+ const createdAt = columns['created_at'] as Record<string, unknown>;
94
+ const updatedAt = columns['updated_at'] as Record<string, unknown>;
95
+ expect(createdAt['nullable']).toBeUndefined();
96
+ expect(updatedAt['nullable']).toBe(true);
97
+ });
98
+
99
+ it('preserves nullable:true for columns with defaults', () => {
100
+ const ir = createContractIR({
101
+ storage: {
102
+ tables: {
103
+ user: {
104
+ columns: {
105
+ bio: {
106
+ codecId: 'pg/text@1',
107
+ nativeType: 'text',
108
+ nullable: true,
109
+ default: { kind: 'literal', value: '' },
110
+ },
111
+ },
112
+ },
113
+ },
114
+ },
115
+ });
116
+
117
+ const result = canonicalizeContract(ir);
118
+ const parsed = JSON.parse(result) as Record<string, unknown>;
119
+ const storage = parsed['storage'] as Record<string, unknown>;
120
+ const tables = storage['tables'] as Record<string, unknown>;
121
+ const user = tables['user'] as Record<string, unknown>;
122
+ const columns = user['columns'] as Record<string, unknown>;
123
+ const bio = columns['bio'] as Record<string, unknown>;
124
+ expect(bio['nullable']).toBe(true);
125
+ expect(bio['default']).toEqual({ kind: 'literal', value: '' });
126
+ });
127
+
58
128
  it('omits empty arrays and objects except required ones', () => {
59
129
  const ir = createContractIR();
60
130
 
@@ -66,14 +136,12 @@ describe('canonicalization', () => {
66
136
  tables: expect.anything(),
67
137
  },
68
138
  });
69
- // Required top-level fields (capabilities, extensionPacks, meta, relations, sources) are preserved even when empty
70
- // because they are required by ContractIR and needed for round-trip tests
139
+ // Required top-level fields (capabilities, extensionPacks, meta, relations) are preserved even when empty.
71
140
  expect(parsed).toMatchObject({
72
141
  capabilities: expect.anything(),
73
142
  extensionPacks: expect.anything(),
74
143
  meta: expect.anything(),
75
144
  relations: expect.anything(),
76
- sources: expect.anything(),
77
145
  });
78
146
  });
79
147
 
@@ -117,7 +185,7 @@ describe('canonicalization', () => {
117
185
  expect(result1).not.toBe(result2);
118
186
  });
119
187
 
120
- it('sorts non-semantic arrays by canonical name', () => {
188
+ it('sorts indexes by canonical name', () => {
121
189
  const ir = createContractIR({
122
190
  storage: {
123
191
  tables: {
@@ -144,6 +212,60 @@ describe('canonicalization', () => {
144
212
  expect(indexNames).toEqual(['user_email_idx', 'user_name_idx']);
145
213
  });
146
214
 
215
+ it('sorts uniques by canonical name', () => {
216
+ const ir = createContractIR({
217
+ storage: {
218
+ tables: {
219
+ user: {
220
+ columns: {
221
+ id: { codecId: 'pg/int4@1', nativeType: 'int4', nullable: false },
222
+ email: { codecId: 'pg/text@1', nativeType: 'text', nullable: false },
223
+ username: { codecId: 'pg/text@1', nativeType: 'text', nullable: false },
224
+ },
225
+ uniques: [
226
+ { columns: ['username'], name: 'user_username_key' },
227
+ { columns: ['email'], name: 'user_email_key' },
228
+ ],
229
+ },
230
+ },
231
+ },
232
+ });
233
+
234
+ const result = canonicalizeContract(ir);
235
+ const parsed = JSON.parse(result) as Record<string, unknown>;
236
+ const storage = parsed['storage'] as Record<string, unknown>;
237
+ const tables = storage['tables'] as Record<string, unknown>;
238
+ const user = tables['user'] as Record<string, unknown>;
239
+ const uniques = user['uniques'] as Array<{ name: string }>;
240
+ const uniqueNames = uniques.map((u) => u.name);
241
+ expect(uniqueNames).toEqual(['user_email_key', 'user_username_key']);
242
+ });
243
+
244
+ it('preserves column order in composite unique constraints', () => {
245
+ const ir = createContractIR({
246
+ storage: {
247
+ tables: {
248
+ user: {
249
+ columns: {
250
+ id: { codecId: 'pg/int4@1', nativeType: 'int4', nullable: false },
251
+ first_name: { codecId: 'pg/text@1', nativeType: 'text', nullable: false },
252
+ last_name: { codecId: 'pg/text@1', nativeType: 'text', nullable: false },
253
+ },
254
+ uniques: [{ columns: ['last_name', 'first_name'], name: 'user_name_key' }],
255
+ },
256
+ },
257
+ },
258
+ });
259
+
260
+ const result = canonicalizeContract(ir);
261
+ const parsed = JSON.parse(result) as Record<string, unknown>;
262
+ const storage = parsed['storage'] as Record<string, unknown>;
263
+ const tables = storage['tables'] as Record<string, unknown>;
264
+ const user = tables['user'] as Record<string, unknown>;
265
+ const uniques = user['uniques'] as Array<{ columns: string[] }>;
266
+ expect(uniques[0]!.columns).toEqual(['last_name', 'first_name']);
267
+ });
268
+
147
269
  it('sorts nested object keys lexicographically', () => {
148
270
  const ir = createContractIR({
149
271
  storage: {
@@ -128,7 +128,7 @@ describe('emitter integration', () => {
128
128
 
129
129
  const result = await emit(ir, options, mockSqlHook);
130
130
 
131
- expect(result.coreHash).toMatch(/^sha256:[a-f0-9]{64}$/);
131
+ expect(result.storageHash).toMatch(/^sha256:[a-f0-9]{64}$/);
132
132
  expect(result.contractDts).toContain('export type Contract');
133
133
  expect(result.contractDts).toContain('CodecTypes');
134
134
  expect(result.contractDts).toContain('LaneCodecTypes');
@@ -138,7 +138,7 @@ describe('emitter integration', () => {
138
138
  schemaVersion: '1',
139
139
  targetFamily: 'sql',
140
140
  target: 'postgres',
141
- coreHash: result.coreHash,
141
+ storageHash: result.storageHash,
142
142
  storage: {
143
143
  tables: {
144
144
  user: expect.anything(),
@@ -197,78 +197,82 @@ describe('emitter integration', () => {
197
197
  const result1 = await emit(ir, options, mockSqlHook);
198
198
  const result2 = await emit(ir, options, mockSqlHook);
199
199
 
200
- expect(result1.coreHash).toBe(result2.coreHash);
200
+ expect(result1.storageHash).toBe(result2.storageHash);
201
201
  expect(result1.contractDts).toBe(result2.contractDts);
202
202
  expect(result1.contractJson).toBe(result2.contractJson);
203
203
  });
204
204
 
205
- it('round-trip: IR → JSON → parse JSON → compare', async () => {
206
- const ir = createContractIR({
207
- models: {
208
- User: {
209
- storage: { table: 'user' },
210
- fields: {
211
- id: { column: 'id' },
212
- email: { column: 'email' },
205
+ it(
206
+ 'round-trip: IR JSON → parse JSON → compare',
207
+ async () => {
208
+ const ir = createContractIR({
209
+ models: {
210
+ User: {
211
+ storage: { table: 'user' },
212
+ fields: {
213
+ id: { column: 'id' },
214
+ email: { column: 'email' },
215
+ },
216
+ relations: {},
213
217
  },
214
- relations: {},
215
218
  },
216
- },
217
- storage: {
218
- tables: {
219
- user: {
220
- columns: {
221
- id: { codecId: 'pg/int4@1', nativeType: 'int4', nullable: false },
222
- email: { codecId: 'pg/text@1', nativeType: 'text', nullable: false },
219
+ storage: {
220
+ tables: {
221
+ user: {
222
+ columns: {
223
+ id: { codecId: 'pg/int4@1', nativeType: 'int4', nullable: false },
224
+ email: { codecId: 'pg/text@1', nativeType: 'text', nullable: false },
225
+ },
226
+ primaryKey: { columns: ['id'] },
227
+ uniques: [],
228
+ indexes: [],
229
+ foreignKeys: [],
223
230
  },
224
- primaryKey: { columns: ['id'] },
225
- uniques: [],
226
- indexes: [],
227
- foreignKeys: [],
228
231
  },
229
232
  },
230
- },
231
- extensionPacks: {
232
- postgres: {
233
- version: '0.0.1',
233
+ extensionPacks: {
234
+ postgres: {
235
+ version: '0.0.1',
236
+ },
237
+ pg: {},
234
238
  },
235
- pg: {},
236
- },
237
- });
239
+ });
238
240
 
239
- // Create minimal test data (emitter tests don't load packs)
240
- const operationRegistry = createOperationRegistry();
241
- const codecTypeImports: TypesImportSpec[] = [];
242
- const operationTypeImports: TypesImportSpec[] = [];
243
- const extensionIds = ['postgres', 'pg'];
244
- const options: EmitOptions = {
245
- outputDir: '',
246
- operationRegistry,
247
- codecTypeImports,
248
- operationTypeImports,
249
- extensionIds,
250
- };
241
+ // Create minimal test data (emitter tests don't load packs)
242
+ const operationRegistry = createOperationRegistry();
243
+ const codecTypeImports: TypesImportSpec[] = [];
244
+ const operationTypeImports: TypesImportSpec[] = [];
245
+ const extensionIds = ['postgres', 'pg'];
246
+ const options: EmitOptions = {
247
+ outputDir: '',
248
+ operationRegistry,
249
+ codecTypeImports,
250
+ operationTypeImports,
251
+ extensionIds,
252
+ };
251
253
 
252
- const result1 = await emit(ir, options, mockSqlHook);
253
- const contractJson1 = JSON.parse(result1.contractJson) as Record<string, unknown>;
254
+ const result1 = await emit(ir, options, mockSqlHook);
255
+ const contractJson1 = JSON.parse(result1.contractJson) as Record<string, unknown>;
254
256
 
255
- const ir2 = createContractIR({
256
- schemaVersion: contractJson1['schemaVersion'] as string,
257
- targetFamily: contractJson1['targetFamily'] as string,
258
- target: contractJson1['target'] as string,
259
- models: contractJson1['models'] as Record<string, unknown>,
260
- relations: (contractJson1['relations'] as Record<string, unknown>) || {},
261
- storage: contractJson1['storage'] as Record<string, unknown>,
262
- extensionPacks: contractJson1['extensionPacks'] as Record<string, unknown>,
263
- capabilities:
264
- (contractJson1['capabilities'] as Record<string, Record<string, boolean>>) || {},
265
- meta: (contractJson1['meta'] as Record<string, unknown>) || {},
266
- sources: (contractJson1['sources'] as Record<string, unknown>) || {},
267
- });
257
+ const ir2 = createContractIR({
258
+ schemaVersion: contractJson1['schemaVersion'] as string,
259
+ targetFamily: contractJson1['targetFamily'] as string,
260
+ target: contractJson1['target'] as string,
261
+ models: contractJson1['models'] as Record<string, unknown>,
262
+ relations: (contractJson1['relations'] as Record<string, unknown>) || {},
263
+ storage: contractJson1['storage'] as Record<string, unknown>,
264
+ extensionPacks: contractJson1['extensionPacks'] as Record<string, unknown>,
265
+ capabilities:
266
+ (contractJson1['capabilities'] as Record<string, Record<string, boolean>>) || {},
267
+ meta: (contractJson1['meta'] as Record<string, unknown>) || {},
268
+ sources: (contractJson1['sources'] as Record<string, unknown>) || {},
269
+ });
268
270
 
269
- const result2 = await emit(ir2, options, mockSqlHook);
271
+ const result2 = await emit(ir2, options, mockSqlHook);
270
272
 
271
- expect(result1.contractJson).toBe(result2.contractJson);
272
- expect(result1.coreHash).toBe(result2.coreHash);
273
- });
273
+ expect(result1.contractJson).toBe(result2.contractJson);
274
+ expect(result1.storageHash).toBe(result2.storageHash);
275
+ },
276
+ timeouts.typeScriptCompilation,
277
+ );
274
278
  });
@@ -133,7 +133,7 @@ describe('emitter round-trip', () => {
133
133
  const result2 = await emit(ir2, options, mockSqlHook);
134
134
 
135
135
  expect(result1.contractJson).toBe(result2.contractJson);
136
- expect(result1.coreHash).toBe(result2.coreHash);
136
+ expect(result1.storageHash).toBe(result2.storageHash);
137
137
  },
138
138
  timeouts.typeScriptCompilation,
139
139
  );
@@ -230,7 +230,7 @@ describe('emitter round-trip', () => {
230
230
  const result2 = await emit(ir2, options, mockSqlHook);
231
231
 
232
232
  expect(result1.contractJson).toBe(result2.contractJson);
233
- expect(result1.coreHash).toBe(result2.coreHash);
233
+ expect(result1.storageHash).toBe(result2.storageHash);
234
234
  });
235
235
 
236
236
  it('round-trip with nullable fields', async () => {
@@ -289,7 +289,7 @@ describe('emitter round-trip', () => {
289
289
  const result2 = await emit(ir2, options, mockSqlHook);
290
290
 
291
291
  expect(result1.contractJson).toBe(result2.contractJson);
292
- expect(result1.coreHash).toBe(result2.coreHash);
292
+ expect(result1.storageHash).toBe(result2.storageHash);
293
293
 
294
294
  const parsed2 = JSON.parse(result2.contractJson) as Record<string, unknown>;
295
295
  const storage = parsed2['storage'] as Record<string, unknown>;
@@ -364,7 +364,7 @@ describe('emitter round-trip', () => {
364
364
  const result2 = await emit(ir2, options, mockSqlHook);
365
365
 
366
366
  expect(result1.contractJson).toBe(result2.contractJson);
367
- expect(result1.coreHash).toBe(result2.coreHash);
367
+ expect(result1.storageHash).toBe(result2.storageHash);
368
368
  expect(result1.profileHash).toBe(result2.profileHash);
369
369
  });
370
370
  });
@@ -1,8 +1,9 @@
1
1
  import type { ContractIR } from '@prisma-next/contract/ir';
2
2
  import type {
3
+ GenerateContractTypesOptions,
3
4
  TargetFamilyHook,
5
+ TypeRenderEntry,
4
6
  TypesImportSpec,
5
- ValidationContext,
6
7
  } from '@prisma-next/contract/types';
7
8
  import type { EmitOptions } from '@prisma-next/core-control-plane/emission';
8
9
  import { emit } from '@prisma-next/core-control-plane/emission';
@@ -13,7 +14,7 @@ import { createContractIR } from './utils';
13
14
 
14
15
  const mockSqlHook: TargetFamilyHook = {
15
16
  id: 'sql',
16
- validateTypes: (ir: ContractIR, _ctx: ValidationContext) => {
17
+ validateTypes: (ir: ContractIR) => {
17
18
  const storage = ir.storage as
18
19
  | { tables?: Record<string, { columns?: Record<string, { codecId?: string }> }> }
19
20
  | undefined;
@@ -45,11 +46,12 @@ const mockSqlHook: TargetFamilyHook = {
45
46
  throw new Error(`Expected targetFamily "sql", got "${ir.targetFamily}"`);
46
47
  }
47
48
  },
48
- generateContractTypes: (ir: ContractIR, _codecTypeImports, _operationTypeImports) => {
49
+ generateContractTypes: (ir: ContractIR, _codecTypeImports, _operationTypeImports, _hashes) => {
49
50
  // Access ir properties to satisfy lint rules, but we don't use them in the mock
50
51
  void ir;
51
52
  void _codecTypeImports;
52
53
  void _operationTypeImports;
54
+ void _hashes;
53
55
  return `// Generated contract types
54
56
  export type CodecTypes = Record<string, never>;
55
57
  export type LaneCodecTypes = CodecTypes;
@@ -109,7 +111,7 @@ describe('emitter', () => {
109
111
  };
110
112
 
111
113
  const result = await emit(ir, options, mockSqlHook);
112
- expect(result.coreHash).toMatch(/^sha256:[a-f0-9]{64}$/);
114
+ expect(result.storageHash).toMatch(/^sha256:[a-f0-9]{64}$/);
113
115
  expect(result.contractDts).toContain('export type Contract');
114
116
  expect(result.contractDts).toContain('CodecTypes');
115
117
 
@@ -570,10 +572,12 @@ describe('emitter', () => {
570
572
  await expect(emit(ir, options, mockSqlHook)).rejects.toThrow('ContractIR must have meta');
571
573
  });
572
574
 
573
- it('throws error when sources is missing', async () => {
575
+ it('omits sources from emitted contract artifact', async () => {
574
576
  const ir = createContractIR({
575
- sources: undefined as unknown as Record<string, unknown>,
576
- }) as ContractIR;
577
+ sources: {
578
+ schema: { sourceId: 'schema.prisma' },
579
+ },
580
+ });
577
581
 
578
582
  const operationRegistry = createOperationRegistry();
579
583
  const options: EmitOptions = {
@@ -584,24 +588,68 @@ describe('emitter', () => {
584
588
  extensionIds: [],
585
589
  };
586
590
 
587
- await expect(emit(ir, options, mockSqlHook)).rejects.toThrow('ContractIR must have sources');
591
+ const result = await emit(ir, options, mockSqlHook);
592
+ const contractJson = JSON.parse(result.contractJson) as Record<string, unknown>;
593
+ expect(contractJson).not.toHaveProperty('sources');
588
594
  });
589
595
 
590
- it('throws error when sources is not an object', async () => {
596
+ it('accepts meta keys when family validation allows them', async () => {
591
597
  const ir = createContractIR({
592
- sources: 'not-an-object' as unknown as Record<string, unknown>,
593
- }) as ContractIR;
598
+ meta: {
599
+ sourceId: 'schema.prisma',
600
+ schemaPath: '/tmp/schema.prisma',
601
+ source: 'psl',
602
+ },
603
+ });
594
604
 
595
- const operationRegistry = createOperationRegistry();
596
605
  const options: EmitOptions = {
597
606
  outputDir: '',
598
- operationRegistry,
607
+ operationRegistry: createOperationRegistry(),
608
+ codecTypeImports: [],
609
+ operationTypeImports: [],
610
+ extensionIds: [],
611
+ };
612
+
613
+ await expect(emit(ir, options, mockSqlHook)).resolves.toMatchObject({
614
+ contractJson: expect.any(String),
615
+ contractDts: expect.any(String),
616
+ });
617
+ });
618
+
619
+ it('accepts canonical section keys when family validation allows them', async () => {
620
+ const ir = createContractIR({
621
+ storage: {
622
+ tables: {
623
+ user: {
624
+ columns: {
625
+ id: {
626
+ codecId: 'pg/int4@1',
627
+ nativeType: 'int4',
628
+ nullable: false,
629
+ sourceId: 'schema.prisma',
630
+ },
631
+ },
632
+ primaryKey: { columns: ['id'] },
633
+ uniques: [],
634
+ indexes: [],
635
+ foreignKeys: [],
636
+ },
637
+ },
638
+ } as unknown as Record<string, unknown>,
639
+ });
640
+
641
+ const options: EmitOptions = {
642
+ outputDir: '',
643
+ operationRegistry: createOperationRegistry(),
599
644
  codecTypeImports: [],
600
645
  operationTypeImports: [],
601
646
  extensionIds: [],
602
647
  };
603
648
 
604
- await expect(emit(ir, options, mockSqlHook)).rejects.toThrow('ContractIR must have sources');
649
+ await expect(emit(ir, options, mockSqlHook)).resolves.toMatchObject({
650
+ contractJson: expect.any(String),
651
+ contractDts: expect.any(String),
652
+ });
605
653
  });
606
654
 
607
655
  it('emits contract even when extensionIds are not in contract.extensionPacks', async () => {
@@ -623,9 +671,10 @@ describe('emitter', () => {
623
671
  throw new Error(`Expected targetFamily "sql", got "${ir.targetFamily}"`);
624
672
  }
625
673
  },
626
- generateContractTypes: (_ir, _codecTypeImports, _operationTypeImports) => {
674
+ generateContractTypes: (_ir, _codecTypeImports, _operationTypeImports, _hashes) => {
627
675
  void _codecTypeImports;
628
676
  void _operationTypeImports;
677
+ void _hashes;
629
678
  return `// Generated contract types
630
679
  export type CodecTypes = Record<string, never>;
631
680
  export type LaneCodecTypes = CodecTypes;
@@ -647,4 +696,56 @@ export type Contract = unknown;
647
696
  expect(result.contractJson).toBeDefined();
648
697
  expect(result.contractDts).toBeDefined();
649
698
  });
699
+
700
+ it('passes parameterizedRenderers to generateContractTypes options', async () => {
701
+ const ir = createContractIR({
702
+ storage: {
703
+ tables: {},
704
+ },
705
+ });
706
+
707
+ let receivedOptions: GenerateContractTypesOptions | undefined;
708
+
709
+ const mockHookCapturingOptions: TargetFamilyHook = {
710
+ id: 'sql',
711
+ validateTypes: () => {},
712
+ validateStructure: () => {},
713
+ generateContractTypes: (_ir, _codecTypeImports, _operationTypeImports, _hashes, options) => {
714
+ receivedOptions = options;
715
+ return `// Generated contract types
716
+ export type CodecTypes = Record<string, never>;
717
+ export type LaneCodecTypes = CodecTypes;
718
+ export type Contract = unknown;
719
+ `;
720
+ },
721
+ };
722
+
723
+ const vectorRenderer: TypeRenderEntry = {
724
+ codecId: 'pg/vector@1',
725
+ render: (params) => `Vector<${params['length']}>`,
726
+ };
727
+
728
+ const parameterizedRenderers = new Map<string, TypeRenderEntry>();
729
+ parameterizedRenderers.set('pg/vector@1', vectorRenderer);
730
+
731
+ const options: EmitOptions = {
732
+ outputDir: '',
733
+ operationRegistry: createOperationRegistry(),
734
+ codecTypeImports: [],
735
+ operationTypeImports: [],
736
+ extensionIds: [],
737
+ parameterizedRenderers,
738
+ };
739
+
740
+ await emit(ir, options, mockHookCapturingOptions);
741
+
742
+ expect(receivedOptions).toBeDefined();
743
+ expect(receivedOptions?.parameterizedRenderers).toBeDefined();
744
+ expect(receivedOptions?.parameterizedRenderers?.size).toBe(1);
745
+
746
+ const entry = receivedOptions?.parameterizedRenderers?.get('pg/vector@1');
747
+ expect(entry).toBeDefined();
748
+ expect(entry?.codecId).toBe('pg/vector@1');
749
+ expect(entry?.render({ length: 1536 }, { codecTypesName: 'CodecTypes' })).toBe('Vector<1536>');
750
+ });
650
751
  });
@@ -7,13 +7,13 @@ describe('emitter factories', () => {
7
7
  const header = irHeader({
8
8
  target: 'postgres',
9
9
  targetFamily: 'sql',
10
- coreHash: 'sha256:abc123',
10
+ storageHash: 'sha256:abc123',
11
11
  });
12
12
  expect(header).toEqual({
13
13
  schemaVersion: '1',
14
14
  target: 'postgres',
15
15
  targetFamily: 'sql',
16
- coreHash: 'sha256:abc123',
16
+ storageHash: 'sha256:abc123',
17
17
  });
18
18
  });
19
19
 
@@ -21,14 +21,14 @@ describe('emitter factories', () => {
21
21
  const header = irHeader({
22
22
  target: 'postgres',
23
23
  targetFamily: 'sql',
24
- coreHash: 'sha256:abc123',
24
+ storageHash: 'sha256:abc123',
25
25
  profileHash: 'sha256:def456',
26
26
  });
27
27
  expect(header).toEqual({
28
28
  schemaVersion: '1',
29
29
  target: 'postgres',
30
30
  targetFamily: 'sql',
31
- coreHash: 'sha256:abc123',
31
+ storageHash: 'sha256:abc123',
32
32
  profileHash: 'sha256:def456',
33
33
  });
34
34
  });
@@ -37,7 +37,7 @@ describe('emitter factories', () => {
37
37
  const header = irHeader({
38
38
  target: 'mongodb',
39
39
  targetFamily: 'document',
40
- coreHash: 'sha256:xyz789',
40
+ storageHash: 'sha256:xyz789',
41
41
  });
42
42
  expect(header.targetFamily).toBe('document');
43
43
  expect(header.target).toBe('mongodb');
@@ -149,7 +149,7 @@ describe('emitter factories', () => {
149
149
  const header = irHeader({
150
150
  target: 'postgres',
151
151
  targetFamily: 'sql',
152
- coreHash: 'sha256:abc123',
152
+ storageHash: 'sha256:abc123',
153
153
  });
154
154
  const meta = irMeta({
155
155
  capabilities: {
@@ -171,7 +171,7 @@ describe('emitter factories', () => {
171
171
  expect(ir.schemaVersion).toBe('1');
172
172
  expect(ir.target).toBe('postgres');
173
173
  expect(ir.targetFamily).toBe('sql');
174
- // Note: coreHash is not part of ContractIR (it's computed by emitter)
174
+ // Note: storageHash is not part of ContractIR (it's computed by emitter)
175
175
  expect(ir.storage).toEqual(storage);
176
176
  expect(ir.models).toEqual(models);
177
177
  expect(ir.relations).toEqual(relations);
@@ -187,7 +187,7 @@ describe('emitter factories', () => {
187
187
  const header = irHeader({
188
188
  target: 'postgres',
189
189
  targetFamily: 'sql',
190
- coreHash: 'sha256:abc123',
190
+ storageHash: 'sha256:abc123',
191
191
  profileHash: 'sha256:def456',
192
192
  });
193
193
  const meta = irMeta({});
@@ -213,7 +213,7 @@ describe('emitter factories', () => {
213
213
  const header = irHeader({
214
214
  target: 'postgres',
215
215
  targetFamily: 'sql',
216
- coreHash: 'sha256:abc123',
216
+ storageHash: 'sha256:abc123',
217
217
  });
218
218
  const meta = irMeta({
219
219
  capabilities: {
@@ -251,7 +251,7 @@ describe('emitter factories', () => {
251
251
  const header = irHeader({
252
252
  target: 'mongodb',
253
253
  targetFamily: 'document',
254
- coreHash: 'sha256:xyz789',
254
+ storageHash: 'sha256:xyz789',
255
255
  });
256
256
  const meta = irMeta({});
257
257
  const storage = { document: { collections: {} } };
@@ -1,8 +1,8 @@
1
- import { computeCoreHash, computeProfileHash } from '@prisma-next/core-control-plane/emission';
1
+ import { computeProfileHash, computeStorageHash } from '@prisma-next/core-control-plane/emission';
2
2
  import { describe, expect, it } from 'vitest';
3
3
 
4
4
  describe('hashing', () => {
5
- it('computes core hash', () => {
5
+ it('computes storage hash', () => {
6
6
  const contract = {
7
7
  schemaVersion: '1',
8
8
  targetFamily: 'sql',
@@ -16,7 +16,7 @@ describe('hashing', () => {
16
16
  sources: {},
17
17
  };
18
18
 
19
- const hash = computeCoreHash(contract);
19
+ const hash = computeStorageHash(contract);
20
20
  expect(hash).toMatch(/^sha256:[a-f0-9]{64}$/);
21
21
  });
22
22
 
@@ -52,8 +52,8 @@ describe('hashing', () => {
52
52
  sources: {},
53
53
  };
54
54
 
55
- const hash1 = computeCoreHash(contract);
56
- const hash2 = computeCoreHash(contract);
55
+ const hash1 = computeStorageHash(contract);
56
+ const hash2 = computeStorageHash(contract);
57
57
  expect(hash1).toBe(hash2);
58
58
  });
59
59
  });
package/test/utils.ts CHANGED
@@ -9,12 +9,12 @@ import { type ContractIR, irHeader, irMeta } from '@prisma-next/contract/ir';
9
9
  * from the result (useful for testing validation of missing fields).
10
10
  */
11
11
  export function createContractIR(
12
- overrides: Partial<ContractIR> & { coreHash?: string; profileHash?: string } = {},
12
+ overrides: Partial<ContractIR> & { storageHash?: string; profileHash?: string } = {},
13
13
  ): ContractIR {
14
14
  // Check if fields are explicitly undefined (not just missing)
15
15
  const hasTarget = 'target' in overrides;
16
16
  const hasTargetFamily = 'targetFamily' in overrides;
17
- const hasCoreHash = 'coreHash' in overrides;
17
+ const hasStorageHash = 'storageHash' in overrides;
18
18
  const hasSchemaVersion = 'schemaVersion' in overrides;
19
19
  const hasModels = 'models' in overrides;
20
20
  const hasRelations = 'relations' in overrides;
@@ -28,7 +28,7 @@ export function createContractIR(
28
28
  const headerOpts: {
29
29
  target?: string;
30
30
  targetFamily?: string;
31
- coreHash?: string;
31
+ storageHash?: string;
32
32
  profileHash?: string;
33
33
  } = {};
34
34
 
@@ -44,10 +44,10 @@ export function createContractIR(
44
44
  headerOpts.targetFamily = 'sql';
45
45
  }
46
46
 
47
- if (hasCoreHash && overrides.coreHash !== undefined) {
48
- headerOpts.coreHash = overrides.coreHash;
49
- } else if (!hasCoreHash) {
50
- headerOpts.coreHash = 'sha256:test';
47
+ if (hasStorageHash && overrides.storageHash !== undefined) {
48
+ headerOpts.storageHash = overrides.storageHash;
49
+ } else if (!hasStorageHash) {
50
+ headerOpts.storageHash = 'sha256:test';
51
51
  }
52
52
 
53
53
  // profileHash is not part of ContractIR, but we can accept it for header creation
@@ -59,7 +59,7 @@ export function createContractIR(
59
59
  headerOpts as {
60
60
  target: string;
61
61
  targetFamily: string;
62
- coreHash: string;
62
+ storageHash: string;
63
63
  profileHash?: string;
64
64
  },
65
65
  );
@@ -99,7 +99,7 @@ export function createContractIR(
99
99
 
100
100
  const meta = irMeta(Object.keys(metaOpts).length > 0 ? metaOpts : undefined);
101
101
 
102
- // Build result by constructing the object directly (ContractIR doesn't include coreHash/profileHash)
102
+ // Build result by constructing the object directly (ContractIR doesn't include storageHash/profileHash)
103
103
  // When fields are explicitly undefined, include them as undefined (tests use type assertions to bypass TS)
104
104
  const result = {
105
105
  schemaVersion:
@@ -1,6 +0,0 @@
1
- // src/exports/index.ts
2
- import { emit } from "@prisma-next/core-control-plane/emission";
3
- export {
4
- emit
5
- };
6
- //# sourceMappingURL=index.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../../src/exports/index.ts"],"sourcesContent":["// Re-export types from @prisma-next/contract for backward compatibility\nexport type {\n TargetFamilyHook,\n TypesImportSpec,\n ValidationContext,\n} from '@prisma-next/contract/types';\nexport type { EmitOptions, EmitResult } from '@prisma-next/core-control-plane/emission';\n// Re-export emit function and types from core-control-plane\nexport { emit } from '@prisma-next/core-control-plane/emission';\n"],"mappings":";AAQA,SAAS,YAAY;","names":[]}
@@ -1,4 +0,0 @@
1
- export type { TargetFamilyHook, TypesImportSpec, ValidationContext, } from '@prisma-next/contract/types';
2
- export type { EmitOptions, EmitResult } from '@prisma-next/core-control-plane/emission';
3
- export { emit } from '@prisma-next/core-control-plane/emission';
4
- //# sourceMappingURL=index.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/exports/index.ts"],"names":[],"mappings":"AACA,YAAY,EACV,gBAAgB,EAChB,eAAe,EACf,iBAAiB,GAClB,MAAM,6BAA6B,CAAC;AACrC,YAAY,EAAE,WAAW,EAAE,UAAU,EAAE,MAAM,0CAA0C,CAAC;AAExF,OAAO,EAAE,IAAI,EAAE,MAAM,0CAA0C,CAAC"}
@@ -1,2 +0,0 @@
1
- export type { TargetFamilyHook, TypesImportSpec, ValidationContext, } from '@prisma-next/contract/types';
2
- //# sourceMappingURL=target-family.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"target-family.d.ts","sourceRoot":"","sources":["../../src/target-family.ts"],"names":[],"mappings":"AAEA,YAAY,EACV,gBAAgB,EAChB,eAAe,EACf,iBAAiB,GAClB,MAAM,6BAA6B,CAAC"}
@@ -1 +0,0 @@
1
- {"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../../test/utils.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,UAAU,EAAoB,MAAM,0BAA0B,CAAC;AAE7E;;;;;;;GAOG;AACH,wBAAgB,gBAAgB,CAC9B,SAAS,GAAE,OAAO,CAAC,UAAU,CAAC,GAAG;IAAE,QAAQ,CAAC,EAAE,MAAM,CAAC;IAAC,WAAW,CAAC,EAAE,MAAM,CAAA;CAAO,GAChF,UAAU,CAyJZ"}
@@ -1,78 +0,0 @@
1
- // test/utils.ts
2
- import { irHeader, irMeta } from "@prisma-next/contract/ir";
3
- function createContractIR(overrides = {}) {
4
- const hasTarget = "target" in overrides;
5
- const hasTargetFamily = "targetFamily" in overrides;
6
- const hasCoreHash = "coreHash" in overrides;
7
- const hasSchemaVersion = "schemaVersion" in overrides;
8
- const hasModels = "models" in overrides;
9
- const hasRelations = "relations" in overrides;
10
- const hasStorage = "storage" in overrides;
11
- const hasCapabilities = "capabilities" in overrides;
12
- const hasExtensionPacks = "extensionPacks" in overrides;
13
- const hasMeta = "meta" in overrides;
14
- const hasSources = "sources" in overrides;
15
- const headerOpts = {};
16
- if (hasTarget && overrides.target !== void 0) {
17
- headerOpts.target = overrides.target;
18
- } else if (!hasTarget) {
19
- headerOpts.target = "postgres";
20
- }
21
- if (hasTargetFamily && overrides.targetFamily !== void 0) {
22
- headerOpts.targetFamily = overrides.targetFamily;
23
- } else if (!hasTargetFamily) {
24
- headerOpts.targetFamily = "sql";
25
- }
26
- if (hasCoreHash && overrides.coreHash !== void 0) {
27
- headerOpts.coreHash = overrides.coreHash;
28
- } else if (!hasCoreHash) {
29
- headerOpts.coreHash = "sha256:test";
30
- }
31
- if (overrides.profileHash !== void 0) {
32
- headerOpts.profileHash = overrides.profileHash;
33
- }
34
- const header = irHeader(
35
- headerOpts
36
- );
37
- const metaOpts = {};
38
- if (hasCapabilities && overrides.capabilities !== void 0) {
39
- metaOpts.capabilities = overrides.capabilities;
40
- } else if (!hasCapabilities) {
41
- metaOpts.capabilities = {};
42
- }
43
- if (hasExtensionPacks && overrides.extensionPacks !== void 0) {
44
- metaOpts.extensionPacks = overrides.extensionPacks;
45
- } else if (!hasExtensionPacks) {
46
- metaOpts.extensionPacks = {};
47
- }
48
- if (hasMeta && overrides.meta !== void 0) {
49
- metaOpts.meta = overrides.meta;
50
- } else if (!hasMeta) {
51
- metaOpts.meta = {};
52
- }
53
- if (hasSources && overrides.sources !== void 0) {
54
- metaOpts.sources = overrides.sources;
55
- } else if (!hasSources) {
56
- metaOpts.sources = {};
57
- }
58
- const meta = irMeta(Object.keys(metaOpts).length > 0 ? metaOpts : void 0);
59
- const result = {
60
- schemaVersion: hasSchemaVersion && overrides.schemaVersion !== void 0 ? overrides.schemaVersion : hasSchemaVersion && overrides.schemaVersion === void 0 ? void 0 : header.schemaVersion,
61
- target: header.target,
62
- targetFamily: header.targetFamily,
63
- // Only include meta fields if they're not explicitly undefined
64
- capabilities: hasCapabilities && overrides.capabilities === void 0 ? void 0 : !hasCapabilities || overrides.capabilities !== void 0 ? meta.capabilities : {},
65
- extensionPacks: hasExtensionPacks && overrides.extensionPacks === void 0 ? void 0 : !hasExtensionPacks || overrides.extensionPacks !== void 0 ? meta.extensionPacks : {},
66
- meta: hasMeta && overrides.meta === void 0 ? void 0 : !hasMeta || overrides.meta !== void 0 ? meta.meta : {},
67
- sources: hasSources && overrides.sources === void 0 ? void 0 : !hasSources || overrides.sources !== void 0 ? meta.sources : {},
68
- // Only include family sections if they're not explicitly undefined
69
- storage: hasStorage && overrides.storage === void 0 ? void 0 : hasStorage && overrides.storage !== void 0 ? overrides.storage : !hasStorage ? { tables: {} } : {},
70
- models: hasModels && overrides.models === void 0 ? void 0 : hasModels && overrides.models !== void 0 ? overrides.models : !hasModels ? {} : {},
71
- relations: hasRelations && overrides.relations === void 0 ? void 0 : hasRelations && overrides.relations !== void 0 ? overrides.relations : !hasRelations ? {} : {}
72
- };
73
- return result;
74
- }
75
- export {
76
- createContractIR
77
- };
78
- //# sourceMappingURL=utils.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../../test/utils.ts"],"sourcesContent":["import { type ContractIR, irHeader, irMeta } from '@prisma-next/contract/ir';\n\n/**\n * Factory function for creating ContractIR objects in tests.\n * Provides sensible defaults and allows overriding specific fields.\n * Uses the emitter factories internally for consistency.\n *\n * If a field is explicitly set to `undefined` in overrides, it will be omitted\n * from the result (useful for testing validation of missing fields).\n */\nexport function createContractIR(\n overrides: Partial<ContractIR> & { coreHash?: string; profileHash?: string } = {},\n): ContractIR {\n // Check if fields are explicitly undefined (not just missing)\n const hasTarget = 'target' in overrides;\n const hasTargetFamily = 'targetFamily' in overrides;\n const hasCoreHash = 'coreHash' in overrides;\n const hasSchemaVersion = 'schemaVersion' in overrides;\n const hasModels = 'models' in overrides;\n const hasRelations = 'relations' in overrides;\n const hasStorage = 'storage' in overrides;\n const hasCapabilities = 'capabilities' in overrides;\n const hasExtensionPacks = 'extensionPacks' in overrides;\n const hasMeta = 'meta' in overrides;\n const hasSources = 'sources' in overrides;\n\n // Build header, omitting fields that are explicitly undefined\n const headerOpts: {\n target?: string;\n targetFamily?: string;\n coreHash?: string;\n profileHash?: string;\n } = {};\n\n if (hasTarget && overrides.target !== undefined) {\n headerOpts.target = overrides.target;\n } else if (!hasTarget) {\n headerOpts.target = 'postgres';\n }\n\n if (hasTargetFamily && overrides.targetFamily !== undefined) {\n headerOpts.targetFamily = overrides.targetFamily;\n } else if (!hasTargetFamily) {\n headerOpts.targetFamily = 'sql';\n }\n\n if (hasCoreHash && overrides.coreHash !== undefined) {\n headerOpts.coreHash = overrides.coreHash;\n } else if (!hasCoreHash) {\n headerOpts.coreHash = 'sha256:test';\n }\n\n // profileHash is not part of ContractIR, but we can accept it for header creation\n if (overrides.profileHash !== undefined) {\n headerOpts.profileHash = overrides.profileHash;\n }\n\n const header = irHeader(\n headerOpts as {\n target: string;\n targetFamily: string;\n coreHash: string;\n profileHash?: string;\n },\n );\n\n // Build meta, handling explicitly undefined fields\n // If a field is explicitly undefined, we'll omit it from the result later\n const metaOpts: {\n capabilities?: Record<string, Record<string, boolean>>;\n extensionPacks?: Record<string, unknown>;\n meta?: Record<string, unknown>;\n sources?: Record<string, unknown>;\n } = {};\n\n if (hasCapabilities && overrides.capabilities !== undefined) {\n metaOpts.capabilities = overrides.capabilities;\n } else if (!hasCapabilities) {\n metaOpts.capabilities = {};\n }\n\n if (hasExtensionPacks && overrides.extensionPacks !== undefined) {\n metaOpts.extensionPacks = overrides.extensionPacks;\n } else if (!hasExtensionPacks) {\n metaOpts.extensionPacks = {};\n }\n\n if (hasMeta && overrides.meta !== undefined) {\n metaOpts.meta = overrides.meta;\n } else if (!hasMeta) {\n metaOpts.meta = {};\n }\n\n if (hasSources && overrides.sources !== undefined) {\n metaOpts.sources = overrides.sources;\n } else if (!hasSources) {\n metaOpts.sources = {};\n }\n\n const meta = irMeta(Object.keys(metaOpts).length > 0 ? metaOpts : undefined);\n\n // Build result by constructing the object directly (ContractIR doesn't include coreHash/profileHash)\n // When fields are explicitly undefined, include them as undefined (tests use type assertions to bypass TS)\n const result = {\n schemaVersion:\n hasSchemaVersion && overrides.schemaVersion !== undefined\n ? overrides.schemaVersion\n : hasSchemaVersion && overrides.schemaVersion === undefined\n ? (undefined as unknown as string)\n : header.schemaVersion,\n target: header.target,\n targetFamily: header.targetFamily,\n // Only include meta fields if they're not explicitly undefined\n capabilities:\n hasCapabilities && overrides.capabilities === undefined\n ? (undefined as unknown as Record<string, Record<string, boolean>>)\n : !hasCapabilities || overrides.capabilities !== undefined\n ? meta.capabilities\n : ({} as Record<string, Record<string, boolean>>),\n extensionPacks:\n hasExtensionPacks && overrides.extensionPacks === undefined\n ? (undefined as unknown as Record<string, unknown>)\n : !hasExtensionPacks || overrides.extensionPacks !== undefined\n ? meta.extensionPacks\n : ({} as Record<string, unknown>),\n meta:\n hasMeta && overrides.meta === undefined\n ? (undefined as unknown as Record<string, unknown>)\n : !hasMeta || overrides.meta !== undefined\n ? meta.meta\n : ({} as Record<string, unknown>),\n sources:\n hasSources && overrides.sources === undefined\n ? (undefined as unknown as Record<string, unknown>)\n : !hasSources || overrides.sources !== undefined\n ? meta.sources\n : ({} as Record<string, unknown>),\n // Only include family sections if they're not explicitly undefined\n storage:\n hasStorage && overrides.storage === undefined\n ? (undefined as unknown as Record<string, unknown>)\n : hasStorage && overrides.storage !== undefined\n ? (overrides.storage as Record<string, unknown>)\n : !hasStorage\n ? ({ tables: {} } as Record<string, unknown>)\n : ({} as Record<string, unknown>),\n models:\n hasModels && overrides.models === undefined\n ? (undefined as unknown as Record<string, unknown>)\n : hasModels && overrides.models !== undefined\n ? (overrides.models as Record<string, unknown>)\n : !hasModels\n ? {}\n : ({} as Record<string, unknown>),\n relations:\n hasRelations && overrides.relations === undefined\n ? (undefined as unknown as Record<string, unknown>)\n : hasRelations && overrides.relations !== undefined\n ? (overrides.relations as Record<string, unknown>)\n : !hasRelations\n ? {}\n : ({} as Record<string, unknown>),\n } as ContractIR;\n\n return result;\n}\n"],"mappings":";AAAA,SAA0B,UAAU,cAAc;AAU3C,SAAS,iBACd,YAA+E,CAAC,GACpE;AAEZ,QAAM,YAAY,YAAY;AAC9B,QAAM,kBAAkB,kBAAkB;AAC1C,QAAM,cAAc,cAAc;AAClC,QAAM,mBAAmB,mBAAmB;AAC5C,QAAM,YAAY,YAAY;AAC9B,QAAM,eAAe,eAAe;AACpC,QAAM,aAAa,aAAa;AAChC,QAAM,kBAAkB,kBAAkB;AAC1C,QAAM,oBAAoB,oBAAoB;AAC9C,QAAM,UAAU,UAAU;AAC1B,QAAM,aAAa,aAAa;AAGhC,QAAM,aAKF,CAAC;AAEL,MAAI,aAAa,UAAU,WAAW,QAAW;AAC/C,eAAW,SAAS,UAAU;AAAA,EAChC,WAAW,CAAC,WAAW;AACrB,eAAW,SAAS;AAAA,EACtB;AAEA,MAAI,mBAAmB,UAAU,iBAAiB,QAAW;AAC3D,eAAW,eAAe,UAAU;AAAA,EACtC,WAAW,CAAC,iBAAiB;AAC3B,eAAW,eAAe;AAAA,EAC5B;AAEA,MAAI,eAAe,UAAU,aAAa,QAAW;AACnD,eAAW,WAAW,UAAU;AAAA,EAClC,WAAW,CAAC,aAAa;AACvB,eAAW,WAAW;AAAA,EACxB;AAGA,MAAI,UAAU,gBAAgB,QAAW;AACvC,eAAW,cAAc,UAAU;AAAA,EACrC;AAEA,QAAM,SAAS;AAAA,IACb;AAAA,EAMF;AAIA,QAAM,WAKF,CAAC;AAEL,MAAI,mBAAmB,UAAU,iBAAiB,QAAW;AAC3D,aAAS,eAAe,UAAU;AAAA,EACpC,WAAW,CAAC,iBAAiB;AAC3B,aAAS,eAAe,CAAC;AAAA,EAC3B;AAEA,MAAI,qBAAqB,UAAU,mBAAmB,QAAW;AAC/D,aAAS,iBAAiB,UAAU;AAAA,EACtC,WAAW,CAAC,mBAAmB;AAC7B,aAAS,iBAAiB,CAAC;AAAA,EAC7B;AAEA,MAAI,WAAW,UAAU,SAAS,QAAW;AAC3C,aAAS,OAAO,UAAU;AAAA,EAC5B,WAAW,CAAC,SAAS;AACnB,aAAS,OAAO,CAAC;AAAA,EACnB;AAEA,MAAI,cAAc,UAAU,YAAY,QAAW;AACjD,aAAS,UAAU,UAAU;AAAA,EAC/B,WAAW,CAAC,YAAY;AACtB,aAAS,UAAU,CAAC;AAAA,EACtB;AAEA,QAAM,OAAO,OAAO,OAAO,KAAK,QAAQ,EAAE,SAAS,IAAI,WAAW,MAAS;AAI3E,QAAM,SAAS;AAAA,IACb,eACE,oBAAoB,UAAU,kBAAkB,SAC5C,UAAU,gBACV,oBAAoB,UAAU,kBAAkB,SAC7C,SACD,OAAO;AAAA,IACf,QAAQ,OAAO;AAAA,IACf,cAAc,OAAO;AAAA;AAAA,IAErB,cACE,mBAAmB,UAAU,iBAAiB,SACzC,SACD,CAAC,mBAAmB,UAAU,iBAAiB,SAC7C,KAAK,eACJ,CAAC;AAAA,IACV,gBACE,qBAAqB,UAAU,mBAAmB,SAC7C,SACD,CAAC,qBAAqB,UAAU,mBAAmB,SACjD,KAAK,iBACJ,CAAC;AAAA,IACV,MACE,WAAW,UAAU,SAAS,SACzB,SACD,CAAC,WAAW,UAAU,SAAS,SAC7B,KAAK,OACJ,CAAC;AAAA,IACV,SACE,cAAc,UAAU,YAAY,SAC/B,SACD,CAAC,cAAc,UAAU,YAAY,SACnC,KAAK,UACJ,CAAC;AAAA;AAAA,IAEV,SACE,cAAc,UAAU,YAAY,SAC/B,SACD,cAAc,UAAU,YAAY,SACjC,UAAU,UACX,CAAC,aACE,EAAE,QAAQ,CAAC,EAAE,IACb,CAAC;AAAA,IACZ,QACE,aAAa,UAAU,WAAW,SAC7B,SACD,aAAa,UAAU,WAAW,SAC/B,UAAU,SACX,CAAC,YACC,CAAC,IACA,CAAC;AAAA,IACZ,WACE,gBAAgB,UAAU,cAAc,SACnC,SACD,gBAAgB,UAAU,cAAc,SACrC,UAAU,YACX,CAAC,eACC,CAAC,IACA,CAAC;AAAA,EACd;AAEA,SAAO;AACT;","names":[]}