@prisma-next/migration-tools 0.5.0-dev.66 → 0.5.0-dev.67

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/dist/{constants-B87kJAGj.mjs → constants-DWV9_o2Z.mjs} +2 -2
  2. package/dist/{constants-B87kJAGj.mjs.map → constants-DWV9_o2Z.mjs.map} +1 -1
  3. package/dist/{errors-DQsXvidG.mjs → errors-5KVuWV_5.mjs} +2 -3
  4. package/dist/{errors-DQsXvidG.mjs.map → errors-5KVuWV_5.mjs.map} +1 -1
  5. package/dist/exports/constants.d.mts.map +1 -1
  6. package/dist/exports/constants.mjs +2 -3
  7. package/dist/exports/errors.d.mts.map +1 -1
  8. package/dist/exports/errors.mjs +2 -3
  9. package/dist/exports/graph.d.mts +1 -1
  10. package/dist/exports/graph.mjs +1 -1
  11. package/dist/exports/hash.d.mts +2 -2
  12. package/dist/exports/hash.d.mts.map +1 -1
  13. package/dist/exports/hash.mjs +2 -3
  14. package/dist/exports/invariants.d.mts +1 -2
  15. package/dist/exports/invariants.d.mts.map +1 -1
  16. package/dist/exports/invariants.mjs +2 -4
  17. package/dist/exports/io.d.mts +1 -1
  18. package/dist/exports/io.d.mts.map +1 -1
  19. package/dist/exports/io.mjs +2 -5
  20. package/dist/exports/metadata.d.mts +1 -1
  21. package/dist/exports/metadata.mjs +1 -1
  22. package/dist/exports/migration-graph.d.mts +2 -2
  23. package/dist/exports/migration-graph.d.mts.map +1 -1
  24. package/dist/exports/migration-graph.mjs +7 -10
  25. package/dist/exports/migration-graph.mjs.map +1 -1
  26. package/dist/exports/migration-ts.d.mts.map +1 -1
  27. package/dist/exports/migration-ts.mjs +1 -3
  28. package/dist/exports/migration-ts.mjs.map +1 -1
  29. package/dist/exports/migration.d.mts +1 -1
  30. package/dist/exports/migration.d.mts.map +1 -1
  31. package/dist/exports/migration.mjs +5 -6
  32. package/dist/exports/migration.mjs.map +1 -1
  33. package/dist/exports/package.d.mts +1 -1
  34. package/dist/exports/package.mjs +1 -1
  35. package/dist/exports/refs.d.mts.map +1 -1
  36. package/dist/exports/refs.mjs +2 -3
  37. package/dist/exports/refs.mjs.map +1 -1
  38. package/dist/exports/spaces.d.mts +0 -1
  39. package/dist/exports/spaces.d.mts.map +1 -1
  40. package/dist/exports/spaces.mjs +4 -12
  41. package/dist/exports/spaces.mjs.map +1 -1
  42. package/dist/{graph-Czaj8O2q.d.mts → graph-4dIUm90i.d.mts} +1 -1
  43. package/dist/graph-4dIUm90i.d.mts.map +1 -0
  44. package/dist/{hash-G0bAfIGh.mjs → hash-By50zM_E.mjs} +2 -4
  45. package/dist/hash-By50zM_E.mjs.map +1 -0
  46. package/dist/{invariants-4Avb_Yhy.mjs → invariants-CkLSBcMu.mjs} +3 -4
  47. package/dist/{invariants-4Avb_Yhy.mjs.map → invariants-CkLSBcMu.mjs.map} +1 -1
  48. package/dist/{io-CDJaWGbt.mjs → io-TX8RPDeh.mjs} +11 -13
  49. package/dist/io-TX8RPDeh.mjs.map +1 -0
  50. package/dist/{op-schema-BiF1ZYqH.mjs → op-schema-D5qkXfEf.mjs} +2 -3
  51. package/dist/{op-schema-BiF1ZYqH.mjs.map → op-schema-D5qkXfEf.mjs.map} +1 -1
  52. package/dist/{package-B3Yl6DTr.d.mts → package-BjiZ7KDy.d.mts} +1 -1
  53. package/dist/package-BjiZ7KDy.d.mts.map +1 -0
  54. package/package.json +7 -7
  55. package/dist/graph-Czaj8O2q.d.mts.map +0 -1
  56. package/dist/hash-G0bAfIGh.mjs.map +0 -1
  57. package/dist/io-CDJaWGbt.mjs.map +0 -1
  58. package/dist/package-B3Yl6DTr.d.mts.map +0 -1
  59. /package/dist/{metadata-CSjwljJx.d.mts → metadata-th_MvOTT.d.mts} +0 -0
@@ -1 +1 @@
1
- {"version":3,"file":"migration-ts.mjs","names":[],"sources":["../../src/migration-ts.ts","../../src/runtime-detection.ts"],"sourcesContent":["/**\n * Utilities for reading/writing `migration.ts` files.\n *\n * Rendering migration.ts source is the target's responsibility — the CLI\n * obtains source strings from a planner's `plan.renderTypeScript()`. The\n * helper here is limited to file I/O: writing the returned source with the\n * right executable bit and probing for existence.\n */\n\nimport { stat, writeFile } from 'node:fs/promises';\nimport { join } from 'pathe';\nimport { format } from 'prettier';\n\nconst MIGRATION_TS_FILE = 'migration.ts';\n\n/**\n * Writes a pre-rendered `migration.ts` source string to the given package\n * directory. If the source begins with a shebang, the file is written with\n * executable permissions (0o755) so it can be run directly via\n * `./migration.ts` — the rendered scaffold ends with\n * `MigrationCLI.run(import.meta.url, M)` from\n * `@prisma-next/cli/migration-cli` (re-exported by the postgres facade),\n * which guards on the entrypoint and serializes when the file is the main\n * module.\n *\n * The source is run through prettier before writing so migration renderers\n * can produce structurally-correct but loosely-indented source and rely on\n * a single canonical format on disk. Matches what `@prisma-next/emitter`\n * already does for generated `contract.d.ts`.\n */\nexport async function writeMigrationTs(packageDir: string, content: string): Promise<void> {\n const formatted = await formatMigrationTsSource(content);\n const isExecutable = formatted.startsWith('#!');\n await writeFile(\n join(packageDir, MIGRATION_TS_FILE),\n formatted,\n isExecutable ? { mode: 0o755 } : undefined,\n );\n}\n\nasync function formatMigrationTsSource(source: string): Promise<string> {\n return format(source, {\n parser: 'typescript',\n singleQuote: true,\n semi: true,\n printWidth: 100,\n });\n}\n\n/**\n * Checks whether a migration.ts file exists in the package directory.\n */\nexport async function hasMigrationTs(packageDir: string): Promise<boolean> {\n try {\n const s = await stat(join(packageDir, MIGRATION_TS_FILE));\n return s.isFile();\n } catch {\n return false;\n }\n}\n","export type ScaffoldRuntime = 'node' | 'bun' | 'deno';\n\nexport function detectScaffoldRuntime(): ScaffoldRuntime {\n if (typeof (globalThis as { Bun?: unknown }).Bun !== 'undefined') return 'bun';\n if (typeof (globalThis as { Deno?: unknown }).Deno !== 'undefined') return 'deno';\n return 'node';\n}\n\nexport function shebangLineFor(runtime: ScaffoldRuntime): string {\n switch (runtime) {\n case 'bun':\n return '#!/usr/bin/env -S bun';\n case 'deno':\n return '#!/usr/bin/env -S deno run -A';\n case 'node':\n return '#!/usr/bin/env -S node';\n }\n}\n"],"mappings":";;;;;;;;;;;;;AAaA,MAAM,oBAAoB;;;;;;;;;;;;;;;;AAiB1B,eAAsB,iBAAiB,YAAoB,SAAgC;CACzF,MAAM,YAAY,MAAM,wBAAwB,QAAQ;CACxD,MAAM,eAAe,UAAU,WAAW,KAAK;AAC/C,OAAM,UACJ,KAAK,YAAY,kBAAkB,EACnC,WACA,eAAe,EAAE,MAAM,KAAO,GAAG,OAClC;;AAGH,eAAe,wBAAwB,QAAiC;AACtE,QAAO,OAAO,QAAQ;EACpB,QAAQ;EACR,aAAa;EACb,MAAM;EACN,YAAY;EACb,CAAC;;;;;AAMJ,eAAsB,eAAe,YAAsC;AACzE,KAAI;AAEF,UADU,MAAM,KAAK,KAAK,YAAY,kBAAkB,CAAC,EAChD,QAAQ;SACX;AACN,SAAO;;;;;;ACvDX,SAAgB,wBAAyC;AACvD,KAAI,OAAQ,WAAiC,QAAQ,YAAa,QAAO;AACzE,KAAI,OAAQ,WAAkC,SAAS,YAAa,QAAO;AAC3E,QAAO;;AAGT,SAAgB,eAAe,SAAkC;AAC/D,SAAQ,SAAR;EACE,KAAK,MACH,QAAO;EACT,KAAK,OACH,QAAO;EACT,KAAK,OACH,QAAO"}
1
+ {"version":3,"file":"migration-ts.mjs","names":[],"sources":["../../src/migration-ts.ts","../../src/runtime-detection.ts"],"sourcesContent":["/**\n * Utilities for reading/writing `migration.ts` files.\n *\n * Rendering migration.ts source is the target's responsibility — the CLI\n * obtains source strings from a planner's `plan.renderTypeScript()`. The\n * helper here is limited to file I/O: writing the returned source with the\n * right executable bit and probing for existence.\n */\n\nimport { stat, writeFile } from 'node:fs/promises';\nimport { join } from 'pathe';\nimport { format } from 'prettier';\n\nconst MIGRATION_TS_FILE = 'migration.ts';\n\n/**\n * Writes a pre-rendered `migration.ts` source string to the given package\n * directory. If the source begins with a shebang, the file is written with\n * executable permissions (0o755) so it can be run directly via\n * `./migration.ts` — the rendered scaffold ends with\n * `MigrationCLI.run(import.meta.url, M)` from\n * `@prisma-next/cli/migration-cli` (re-exported by the postgres facade),\n * which guards on the entrypoint and serializes when the file is the main\n * module.\n *\n * The source is run through prettier before writing so migration renderers\n * can produce structurally-correct but loosely-indented source and rely on\n * a single canonical format on disk. Matches what `@prisma-next/emitter`\n * already does for generated `contract.d.ts`.\n */\nexport async function writeMigrationTs(packageDir: string, content: string): Promise<void> {\n const formatted = await formatMigrationTsSource(content);\n const isExecutable = formatted.startsWith('#!');\n await writeFile(\n join(packageDir, MIGRATION_TS_FILE),\n formatted,\n isExecutable ? { mode: 0o755 } : undefined,\n );\n}\n\nasync function formatMigrationTsSource(source: string): Promise<string> {\n return format(source, {\n parser: 'typescript',\n singleQuote: true,\n semi: true,\n printWidth: 100,\n });\n}\n\n/**\n * Checks whether a migration.ts file exists in the package directory.\n */\nexport async function hasMigrationTs(packageDir: string): Promise<boolean> {\n try {\n const s = await stat(join(packageDir, MIGRATION_TS_FILE));\n return s.isFile();\n } catch {\n return false;\n }\n}\n","export type ScaffoldRuntime = 'node' | 'bun' | 'deno';\n\nexport function detectScaffoldRuntime(): ScaffoldRuntime {\n if (typeof (globalThis as { Bun?: unknown }).Bun !== 'undefined') return 'bun';\n if (typeof (globalThis as { Deno?: unknown }).Deno !== 'undefined') return 'deno';\n return 'node';\n}\n\nexport function shebangLineFor(runtime: ScaffoldRuntime): string {\n switch (runtime) {\n case 'bun':\n return '#!/usr/bin/env -S bun';\n case 'deno':\n return '#!/usr/bin/env -S deno run -A';\n case 'node':\n return '#!/usr/bin/env -S node';\n }\n}\n"],"mappings":";;;;;;;;;;;;AAaA,MAAM,oBAAoB;;;;;;;;;;;;;;;;AAiB1B,eAAsB,iBAAiB,YAAoB,SAAgC;CACzF,MAAM,YAAY,MAAM,wBAAwB,QAAQ;CACxD,MAAM,eAAe,UAAU,WAAW,KAAK;CAC/C,MAAM,UACJ,KAAK,YAAY,kBAAkB,EACnC,WACA,eAAe,EAAE,MAAM,KAAO,GAAG,KAAA,EAClC;;AAGH,eAAe,wBAAwB,QAAiC;CACtE,OAAO,OAAO,QAAQ;EACpB,QAAQ;EACR,aAAa;EACb,MAAM;EACN,YAAY;EACb,CAAC;;;;;AAMJ,eAAsB,eAAe,YAAsC;CACzE,IAAI;EAEF,QAAO,MADS,KAAK,KAAK,YAAY,kBAAkB,CAAC,EAChD,QAAQ;SACX;EACN,OAAO;;;;;ACvDX,SAAgB,wBAAyC;CACvD,IAAI,OAAQ,WAAiC,QAAQ,aAAa,OAAO;CACzE,IAAI,OAAQ,WAAkC,SAAS,aAAa,OAAO;CAC3E,OAAO;;AAGT,SAAgB,eAAe,SAAkC;CAC/D,QAAQ,SAAR;EACE,KAAK,OACH,OAAO;EACT,KAAK,QACH,OAAO;EACT,KAAK,QACH,OAAO"}
@@ -1,4 +1,4 @@
1
- import { n as MigrationMetadata$1 } from "../metadata-CSjwljJx.mjs";
1
+ import { n as MigrationMetadata$1 } from "../metadata-th_MvOTT.mjs";
2
2
  import { ControlStack, MigrationPlan, MigrationPlanOperation } from "@prisma-next/framework-components/control";
3
3
 
4
4
  //#region src/migration-base.d.ts
@@ -1 +1 @@
1
- {"version":3,"file":"migration.d.mts","names":[],"sources":["../../src/migration-base.ts"],"sourcesContent":[],"mappings":";;;;UAiBiB,aAAA;;EAAA,SAAA,EAAA,EAAA,MAAa;EA0BR,SAAA,MAAS,CAAA,EAAA,SAAA,MAAA,EAAA;;;;;;;;;;;AAIlB,uBAJS,SAIT,CAAA,mBAHQ,sBAGR,GAHiC,sBAGjC,EAAA,kBAAA,MAAA,GAAA,MAAA,EAAA,kBAAA,MAAA,GAAA,MAAA,CAAA,YAAA,aAAA,CAAA;EAAa,kBAAA,QAAA,EAAA,MAAA;EAmDV;AAsBhB;AAsHA;;;;;;;4BAlL4B,aAAa,WAAW;sBAE9B,aAAa,WAAW;;;;;;;sCAUR;;;;;;uBAOf;;;;;;;;;;;;;;;iBAmBP,kBAAA;;;;;;;;;;;;UAsBC,kBAAA;;qBAEI;;;;;;;;;;;;iBAoHL,uBAAA,WACJ,qBACA,QAAQ,8BACjB"}
1
+ {"version":3,"file":"migration.d.mts","names":[],"sources":["../../src/migration-base.ts"],"mappings":";;;;UAiBiB,aAAA;EAAA,SACN,IAAA;EAAA,SACA,EAAA;EAAA,SACA,MAAA;AAAA;;;;;;;AAuBX;;;uBAAsB,SAAA,oBACD,sBAAA,GAAyB,sBAAA,mFAGjC,aAAA;EAAA,kBAEO,QAAA;EAWqB;;;;;;;;;EAAA,mBAApB,KAAA,EAAO,YAAA,CAAa,SAAA,EAAW,SAAA;cAEtC,KAAA,GAAQ,YAAA,CAAa,SAAA,EAAW,SAAA;EAlB5C;;;;;;EAAA,aA4Ba,UAAA,CAAA,YAAuB,UAAA;EAZjB;;;;;EAAA,SAmBV,QAAA,CAAA,GAAY,aAAA;EAAA,IAEjB,MAAA,CAAA;IAAA,SAAqB,WAAA;EAAA;EAAA,IAKrB,WAAA,CAAA;IAAA,SAA0B,WAAA;EAAA;AAAA;;;;;;;AAYhC;iBAAgB,kBAAA,CAAmB,aAAA;;;;AAsBnC;;;;;;;;UAAiB,kBAAA;EAAA,SACN,OAAA;EAAA,SACA,QAAA,EAAU,mBAAA;EAAA,SACV,YAAA;AAAA;;;;;;;;;;iBAmHK,uBAAA,CACd,QAAA,EAAU,SAAA,EACV,QAAA,EAAU,OAAA,CAAQ,mBAAA,WACjB,kBAAA"}
@@ -1,12 +1,11 @@
1
- import { T as errorStaleContractBookends, d as errorInvalidOperationEntry } from "../errors-DQsXvidG.mjs";
2
- import { t as computeMigrationHash } from "../hash-G0bAfIGh.mjs";
3
- import { t as deriveProvidedInvariants } from "../invariants-4Avb_Yhy.mjs";
4
- import { t as MigrationOpSchema } from "../op-schema-BiF1ZYqH.mjs";
1
+ import { T as errorStaleContractBookends, d as errorInvalidOperationEntry } from "../errors-5KVuWV_5.mjs";
2
+ import { t as computeMigrationHash } from "../hash-By50zM_E.mjs";
3
+ import { t as deriveProvidedInvariants } from "../invariants-CkLSBcMu.mjs";
4
+ import { t as MigrationOpSchema } from "../op-schema-D5qkXfEf.mjs";
5
5
  import { ifDefined } from "@prisma-next/utils/defined";
6
6
  import { type } from "arktype";
7
7
  import { realpathSync } from "node:fs";
8
8
  import { fileURLToPath } from "node:url";
9
-
10
9
  //#region src/migration-base.ts
11
10
  const MigrationMetaSchema = type({
12
11
  from: "string > 0 | null",
@@ -171,7 +170,7 @@ function buildMigrationArtifacts(instance, existing) {
171
170
  metadataJson: JSON.stringify(metadata, null, 2)
172
171
  };
173
172
  }
174
-
175
173
  //#endregion
176
174
  export { Migration, buildMigrationArtifacts, isDirectEntrypoint };
175
+
177
176
  //# sourceMappingURL=migration.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"migration.mjs","names":["baseMetadata: Omit<MigrationMetadata, 'migrationHash'>"],"sources":["../../src/migration-base.ts"],"sourcesContent":["import { realpathSync } from 'node:fs';\nimport { fileURLToPath } from 'node:url';\nimport type { Contract } from '@prisma-next/contract/types';\nimport type {\n ControlStack,\n MigrationPlan,\n MigrationPlanOperation,\n} from '@prisma-next/framework-components/control';\nimport { ifDefined } from '@prisma-next/utils/defined';\nimport { type } from 'arktype';\nimport { errorInvalidOperationEntry, errorStaleContractBookends } from './errors';\nimport { computeMigrationHash } from './hash';\nimport { deriveProvidedInvariants } from './invariants';\nimport type { MigrationHints, MigrationMetadata } from './metadata';\nimport { MigrationOpSchema } from './op-schema';\nimport type { MigrationOps } from './package';\n\nexport interface MigrationMeta {\n readonly from: string | null;\n readonly to: string;\n readonly labels?: readonly string[];\n}\n\n// `from` rejects empty strings to mirror `MigrationMetadataSchema` in\n// `./io.ts`. Without this match, an authored migration could `describe()` with\n// `from: ''` and pass `buildMigrationArtifacts`'s validation, only to have\n// `readMigrationPackage` reject the resulting `migration.json` later — the\n// two validators must agree on the legal value space.\nconst MigrationMetaSchema = type({\n from: 'string > 0 | null',\n to: 'string',\n 'labels?': type('string').array(),\n});\n\n/**\n * Base class for migrations.\n *\n * A `Migration` subclass is itself a `MigrationPlan`: CLI commands and the\n * runner can consume it directly via `targetId`, `operations`, `origin`, and\n * `destination`. The metadata-shaped inputs come from `describe()`, which\n * every migration must implement — `migration.json` is required for a\n * migration to be valid.\n */\nexport abstract class Migration<\n TOperation extends MigrationPlanOperation = MigrationPlanOperation,\n TFamilyId extends string = string,\n TTargetId extends string = string,\n> implements MigrationPlan\n{\n abstract readonly targetId: string;\n\n /**\n * Assembled `ControlStack` injected by the orchestrator (`runMigration`).\n *\n * Subclasses (e.g. `PostgresMigration`) read the stack to materialize their\n * adapter once per instance. Optional at the abstract level so unit tests can\n * construct `Migration` instances purely for `operations` / `describe`\n * assertions without needing a real stack; concrete subclasses that need the\n * stack at runtime should narrow the parameter to required.\n */\n protected readonly stack: ControlStack<TFamilyId, TTargetId> | undefined;\n\n constructor(stack?: ControlStack<TFamilyId, TTargetId>) {\n this.stack = stack;\n }\n\n /**\n * Ordered list of operations this migration performs.\n *\n * Implemented as a getter so that subclasses can either precompute the list\n * in their constructor or build it lazily per access.\n */\n abstract get operations(): readonly TOperation[];\n\n /**\n * Metadata inputs used to build `migration.json` and to derive the plan's\n * origin/destination identities. Every migration must provide this —\n * omitting it would produce an invalid on-disk migration package.\n */\n abstract describe(): MigrationMeta;\n\n get origin(): { readonly storageHash: string } | null {\n const from = this.describe().from;\n return from === null ? null : { storageHash: from };\n }\n\n get destination(): { readonly storageHash: string } {\n return { storageHash: this.describe().to };\n }\n}\n\n/**\n * Returns true when `import.meta.url` resolves to the same file that was\n * invoked as the node entrypoint (`process.argv[1]`). Used by\n * `MigrationCLI.run` (in `@prisma-next/cli/migration-cli`) to no-op when\n * the migration module is being imported (e.g. by another script) rather\n * than executed directly.\n */\nexport function isDirectEntrypoint(importMetaUrl: string): boolean {\n const metaFilename = fileURLToPath(importMetaUrl);\n const argv1 = process.argv[1];\n if (!argv1) return false;\n try {\n return realpathSync(metaFilename) === realpathSync(argv1);\n } catch {\n return false;\n }\n}\n\n/**\n * In-memory artifacts produced from a `Migration` instance: the\n * serialized `ops.json` body, the `migration.json` metadata object, and\n * its serialized form. Returned by `buildMigrationArtifacts` so callers\n * (today: `MigrationCLI.run` in `@prisma-next/cli/migration-cli`) can\n * decide how to persist them — write to disk, print in dry-run, ship\n * over the wire — without coupling artifact construction to file I/O.\n *\n * `metadataJson` is `JSON.stringify(metadata, null, 2)` — the canonical\n * on-disk shape that the arktype loader-schema in `./io` validates.\n */\nexport interface MigrationArtifacts {\n readonly opsJson: string;\n readonly metadata: MigrationMetadata;\n readonly metadataJson: string;\n}\n\n/**\n * Build the attested metadata from `describe()`-derived metadata, the\n * operations list, and the previously-scaffolded metadata (if any).\n *\n * When a `migration.json` already exists for this package (the common\n * case: it was scaffolded by `migration plan`), preserve the contract\n * bookends, hints, labels, and `createdAt` set there — those fields are\n * owned by the CLI scaffolder, not the authored class. Only the\n * `describe()`-derived fields (`from`, `to`) and the operations\n * change as the author iterates. When no metadata exists yet (a bare\n * `migration.ts` run from scratch), synthesize a minimal but\n * schema-conformant record so the resulting package can still be read,\n * verified, and applied.\n *\n * The `migrationHash` is recomputed against the current metadata + ops so\n * the on-disk artifacts are always fully attested.\n */\nfunction buildAttestedMetadata(\n meta: MigrationMeta,\n ops: MigrationOps,\n existing: Partial<MigrationMetadata> | null,\n): MigrationMetadata {\n assertBookendsMatchMeta(meta, existing);\n\n const baseMetadata: Omit<MigrationMetadata, 'migrationHash'> = {\n from: meta.from,\n to: meta.to,\n labels: meta.labels ?? existing?.labels ?? [],\n providedInvariants: deriveProvidedInvariants(ops),\n createdAt: existing?.createdAt ?? new Date().toISOString(),\n fromContract: existing?.fromContract ?? null,\n // When no scaffolded metadata exists we synthesize a minimal contract\n // stub so the package is still readable end-to-end. The cast is\n // intentional: only the storage bookend matters for hash computation\n // (everything else is stripped by `computeMigrationHash`), and a real\n // contract bookend would only be available after `migration plan`.\n toContract: existing?.toContract ?? ({ storage: { storageHash: meta.to } } as Contract),\n hints: normalizeHints(existing?.hints),\n ...ifDefined('authorship', existing?.authorship),\n };\n\n const migrationHash = computeMigrationHash(baseMetadata, ops);\n return { ...baseMetadata, migrationHash };\n}\n\n/**\n * Verify each preserved contract bookend in `existing` agrees with the\n * corresponding side of `describe()`'s output. A mismatch indicates the\n * migration's `describe()` was edited after `migration plan` scaffolded\n * the package, leaving a self-inconsistent manifest. Failing fast at\n * write-time turns a silent foot-gun into an actionable diagnostic.\n *\n * Skipped when a side's `existing.<side>Contract` is null/absent (the\n * synthesis path stays open for origin-less initial migrations and for\n * bare `migration.ts` runs from scratch). When a bookend is *present*\n * but its `storage.storageHash` is missing, that's treated as a\n * mismatch — a malformed bookend is not equivalent to \"no bookend\".\n *\n * This check is paired with TML-2274, which removes `fromContract` /\n * `toContract` from the manifest entirely; once that lands, this\n * function and its error code are deleted.\n */\nfunction assertBookendsMatchMeta(\n meta: MigrationMeta,\n existing: Partial<MigrationMetadata> | null,\n): void {\n if (existing?.fromContract != null) {\n const contractHash = existing.fromContract.storage?.storageHash ?? '';\n if (contractHash !== meta.from) {\n throw errorStaleContractBookends({\n side: 'from',\n metaHash: meta.from,\n contractHash,\n });\n }\n }\n if (existing?.toContract != null) {\n const contractHash = existing.toContract.storage?.storageHash ?? '';\n if (contractHash !== meta.to) {\n throw errorStaleContractBookends({\n side: 'to',\n metaHash: meta.to,\n contractHash,\n });\n }\n }\n}\n\n/**\n * Project `existing.hints` down to the known `MigrationHints` shape, dropping\n * any legacy keys that may linger in metadata scaffolded by older CLI\n * versions (e.g. `planningStrategy`). Picking fields explicitly instead of\n * spreading keeps refreshed `migration.json` files schema-clean regardless\n * of what was on disk before.\n */\nfunction normalizeHints(existing: MigrationHints | undefined): MigrationHints {\n return {\n used: existing?.used ?? [],\n applied: existing?.applied ?? [],\n plannerVersion: existing?.plannerVersion ?? '2.0.0',\n };\n}\n\n/**\n * Pure conversion from a `Migration` instance (plus the previously\n * scaffolded metadata, when one exists on disk) to the in-memory\n * artifacts that downstream tooling persists. Owns metadata validation,\n * metadata synthesis/preservation, hint normalization, and the\n * content-addressed `migrationHash` computation, but performs no file I/O\n * — callers handle reads (to source `existing`) and writes (to persist\n * `opsJson` / `metadataJson`).\n */\nexport function buildMigrationArtifacts(\n instance: Migration,\n existing: Partial<MigrationMetadata> | null,\n): MigrationArtifacts {\n const ops = instance.operations;\n if (!Array.isArray(ops)) {\n throw new Error('operations must be an array');\n }\n\n for (let index = 0; index < ops.length; index++) {\n const result = MigrationOpSchema(ops[index]);\n if (result instanceof type.errors) {\n throw errorInvalidOperationEntry(index, result.summary);\n }\n }\n\n const rawMeta: unknown = instance.describe();\n const parsed = MigrationMetaSchema(rawMeta);\n if (parsed instanceof type.errors) {\n throw new Error(`describe() returned invalid metadata: ${parsed.summary}`);\n }\n\n const metadata = buildAttestedMetadata(parsed, ops, existing);\n\n return {\n opsJson: JSON.stringify(ops, null, 2),\n metadata,\n metadataJson: JSON.stringify(metadata, null, 2),\n };\n}\n"],"mappings":";;;;;;;;;;AA4BA,MAAM,sBAAsB,KAAK;CAC/B,MAAM;CACN,IAAI;CACJ,WAAW,KAAK,SAAS,CAAC,OAAO;CAClC,CAAC;;;;;;;;;;AAWF,IAAsB,YAAtB,MAKA;;;;;;;;;;CAYE,AAAmB;CAEnB,YAAY,OAA4C;AACtD,OAAK,QAAQ;;CAkBf,IAAI,SAAkD;EACpD,MAAM,OAAO,KAAK,UAAU,CAAC;AAC7B,SAAO,SAAS,OAAO,OAAO,EAAE,aAAa,MAAM;;CAGrD,IAAI,cAAgD;AAClD,SAAO,EAAE,aAAa,KAAK,UAAU,CAAC,IAAI;;;;;;;;;;AAW9C,SAAgB,mBAAmB,eAAgC;CACjE,MAAM,eAAe,cAAc,cAAc;CACjD,MAAM,QAAQ,QAAQ,KAAK;AAC3B,KAAI,CAAC,MAAO,QAAO;AACnB,KAAI;AACF,SAAO,aAAa,aAAa,KAAK,aAAa,MAAM;SACnD;AACN,SAAO;;;;;;;;;;;;;;;;;;;;AAsCX,SAAS,sBACP,MACA,KACA,UACmB;AACnB,yBAAwB,MAAM,SAAS;CAEvC,MAAMA,eAAyD;EAC7D,MAAM,KAAK;EACX,IAAI,KAAK;EACT,QAAQ,KAAK,UAAU,UAAU,UAAU,EAAE;EAC7C,oBAAoB,yBAAyB,IAAI;EACjD,WAAW,UAAU,8BAAa,IAAI,MAAM,EAAC,aAAa;EAC1D,cAAc,UAAU,gBAAgB;EAMxC,YAAY,UAAU,cAAe,EAAE,SAAS,EAAE,aAAa,KAAK,IAAI,EAAE;EAC1E,OAAO,eAAe,UAAU,MAAM;EACtC,GAAG,UAAU,cAAc,UAAU,WAAW;EACjD;CAED,MAAM,gBAAgB,qBAAqB,cAAc,IAAI;AAC7D,QAAO;EAAE,GAAG;EAAc;EAAe;;;;;;;;;;;;;;;;;;;AAoB3C,SAAS,wBACP,MACA,UACM;AACN,KAAI,UAAU,gBAAgB,MAAM;EAClC,MAAM,eAAe,SAAS,aAAa,SAAS,eAAe;AACnE,MAAI,iBAAiB,KAAK,KACxB,OAAM,2BAA2B;GAC/B,MAAM;GACN,UAAU,KAAK;GACf;GACD,CAAC;;AAGN,KAAI,UAAU,cAAc,MAAM;EAChC,MAAM,eAAe,SAAS,WAAW,SAAS,eAAe;AACjE,MAAI,iBAAiB,KAAK,GACxB,OAAM,2BAA2B;GAC/B,MAAM;GACN,UAAU,KAAK;GACf;GACD,CAAC;;;;;;;;;;AAYR,SAAS,eAAe,UAAsD;AAC5E,QAAO;EACL,MAAM,UAAU,QAAQ,EAAE;EAC1B,SAAS,UAAU,WAAW,EAAE;EAChC,gBAAgB,UAAU,kBAAkB;EAC7C;;;;;;;;;;;AAYH,SAAgB,wBACd,UACA,UACoB;CACpB,MAAM,MAAM,SAAS;AACrB,KAAI,CAAC,MAAM,QAAQ,IAAI,CACrB,OAAM,IAAI,MAAM,8BAA8B;AAGhD,MAAK,IAAI,QAAQ,GAAG,QAAQ,IAAI,QAAQ,SAAS;EAC/C,MAAM,SAAS,kBAAkB,IAAI,OAAO;AAC5C,MAAI,kBAAkB,KAAK,OACzB,OAAM,2BAA2B,OAAO,OAAO,QAAQ;;CAK3D,MAAM,SAAS,oBADU,SAAS,UAAU,CACD;AAC3C,KAAI,kBAAkB,KAAK,OACzB,OAAM,IAAI,MAAM,yCAAyC,OAAO,UAAU;CAG5E,MAAM,WAAW,sBAAsB,QAAQ,KAAK,SAAS;AAE7D,QAAO;EACL,SAAS,KAAK,UAAU,KAAK,MAAM,EAAE;EACrC;EACA,cAAc,KAAK,UAAU,UAAU,MAAM,EAAE;EAChD"}
1
+ {"version":3,"file":"migration.mjs","names":[],"sources":["../../src/migration-base.ts"],"sourcesContent":["import { realpathSync } from 'node:fs';\nimport { fileURLToPath } from 'node:url';\nimport type { Contract } from '@prisma-next/contract/types';\nimport type {\n ControlStack,\n MigrationPlan,\n MigrationPlanOperation,\n} from '@prisma-next/framework-components/control';\nimport { ifDefined } from '@prisma-next/utils/defined';\nimport { type } from 'arktype';\nimport { errorInvalidOperationEntry, errorStaleContractBookends } from './errors';\nimport { computeMigrationHash } from './hash';\nimport { deriveProvidedInvariants } from './invariants';\nimport type { MigrationHints, MigrationMetadata } from './metadata';\nimport { MigrationOpSchema } from './op-schema';\nimport type { MigrationOps } from './package';\n\nexport interface MigrationMeta {\n readonly from: string | null;\n readonly to: string;\n readonly labels?: readonly string[];\n}\n\n// `from` rejects empty strings to mirror `MigrationMetadataSchema` in\n// `./io.ts`. Without this match, an authored migration could `describe()` with\n// `from: ''` and pass `buildMigrationArtifacts`'s validation, only to have\n// `readMigrationPackage` reject the resulting `migration.json` later — the\n// two validators must agree on the legal value space.\nconst MigrationMetaSchema = type({\n from: 'string > 0 | null',\n to: 'string',\n 'labels?': type('string').array(),\n});\n\n/**\n * Base class for migrations.\n *\n * A `Migration` subclass is itself a `MigrationPlan`: CLI commands and the\n * runner can consume it directly via `targetId`, `operations`, `origin`, and\n * `destination`. The metadata-shaped inputs come from `describe()`, which\n * every migration must implement — `migration.json` is required for a\n * migration to be valid.\n */\nexport abstract class Migration<\n TOperation extends MigrationPlanOperation = MigrationPlanOperation,\n TFamilyId extends string = string,\n TTargetId extends string = string,\n> implements MigrationPlan\n{\n abstract readonly targetId: string;\n\n /**\n * Assembled `ControlStack` injected by the orchestrator (`runMigration`).\n *\n * Subclasses (e.g. `PostgresMigration`) read the stack to materialize their\n * adapter once per instance. Optional at the abstract level so unit tests can\n * construct `Migration` instances purely for `operations` / `describe`\n * assertions without needing a real stack; concrete subclasses that need the\n * stack at runtime should narrow the parameter to required.\n */\n protected readonly stack: ControlStack<TFamilyId, TTargetId> | undefined;\n\n constructor(stack?: ControlStack<TFamilyId, TTargetId>) {\n this.stack = stack;\n }\n\n /**\n * Ordered list of operations this migration performs.\n *\n * Implemented as a getter so that subclasses can either precompute the list\n * in their constructor or build it lazily per access.\n */\n abstract get operations(): readonly TOperation[];\n\n /**\n * Metadata inputs used to build `migration.json` and to derive the plan's\n * origin/destination identities. Every migration must provide this —\n * omitting it would produce an invalid on-disk migration package.\n */\n abstract describe(): MigrationMeta;\n\n get origin(): { readonly storageHash: string } | null {\n const from = this.describe().from;\n return from === null ? null : { storageHash: from };\n }\n\n get destination(): { readonly storageHash: string } {\n return { storageHash: this.describe().to };\n }\n}\n\n/**\n * Returns true when `import.meta.url` resolves to the same file that was\n * invoked as the node entrypoint (`process.argv[1]`). Used by\n * `MigrationCLI.run` (in `@prisma-next/cli/migration-cli`) to no-op when\n * the migration module is being imported (e.g. by another script) rather\n * than executed directly.\n */\nexport function isDirectEntrypoint(importMetaUrl: string): boolean {\n const metaFilename = fileURLToPath(importMetaUrl);\n const argv1 = process.argv[1];\n if (!argv1) return false;\n try {\n return realpathSync(metaFilename) === realpathSync(argv1);\n } catch {\n return false;\n }\n}\n\n/**\n * In-memory artifacts produced from a `Migration` instance: the\n * serialized `ops.json` body, the `migration.json` metadata object, and\n * its serialized form. Returned by `buildMigrationArtifacts` so callers\n * (today: `MigrationCLI.run` in `@prisma-next/cli/migration-cli`) can\n * decide how to persist them — write to disk, print in dry-run, ship\n * over the wire — without coupling artifact construction to file I/O.\n *\n * `metadataJson` is `JSON.stringify(metadata, null, 2)` — the canonical\n * on-disk shape that the arktype loader-schema in `./io` validates.\n */\nexport interface MigrationArtifacts {\n readonly opsJson: string;\n readonly metadata: MigrationMetadata;\n readonly metadataJson: string;\n}\n\n/**\n * Build the attested metadata from `describe()`-derived metadata, the\n * operations list, and the previously-scaffolded metadata (if any).\n *\n * When a `migration.json` already exists for this package (the common\n * case: it was scaffolded by `migration plan`), preserve the contract\n * bookends, hints, labels, and `createdAt` set there — those fields are\n * owned by the CLI scaffolder, not the authored class. Only the\n * `describe()`-derived fields (`from`, `to`) and the operations\n * change as the author iterates. When no metadata exists yet (a bare\n * `migration.ts` run from scratch), synthesize a minimal but\n * schema-conformant record so the resulting package can still be read,\n * verified, and applied.\n *\n * The `migrationHash` is recomputed against the current metadata + ops so\n * the on-disk artifacts are always fully attested.\n */\nfunction buildAttestedMetadata(\n meta: MigrationMeta,\n ops: MigrationOps,\n existing: Partial<MigrationMetadata> | null,\n): MigrationMetadata {\n assertBookendsMatchMeta(meta, existing);\n\n const baseMetadata: Omit<MigrationMetadata, 'migrationHash'> = {\n from: meta.from,\n to: meta.to,\n labels: meta.labels ?? existing?.labels ?? [],\n providedInvariants: deriveProvidedInvariants(ops),\n createdAt: existing?.createdAt ?? new Date().toISOString(),\n fromContract: existing?.fromContract ?? null,\n // When no scaffolded metadata exists we synthesize a minimal contract\n // stub so the package is still readable end-to-end. The cast is\n // intentional: only the storage bookend matters for hash computation\n // (everything else is stripped by `computeMigrationHash`), and a real\n // contract bookend would only be available after `migration plan`.\n toContract: existing?.toContract ?? ({ storage: { storageHash: meta.to } } as Contract),\n hints: normalizeHints(existing?.hints),\n ...ifDefined('authorship', existing?.authorship),\n };\n\n const migrationHash = computeMigrationHash(baseMetadata, ops);\n return { ...baseMetadata, migrationHash };\n}\n\n/**\n * Verify each preserved contract bookend in `existing` agrees with the\n * corresponding side of `describe()`'s output. A mismatch indicates the\n * migration's `describe()` was edited after `migration plan` scaffolded\n * the package, leaving a self-inconsistent manifest. Failing fast at\n * write-time turns a silent foot-gun into an actionable diagnostic.\n *\n * Skipped when a side's `existing.<side>Contract` is null/absent (the\n * synthesis path stays open for origin-less initial migrations and for\n * bare `migration.ts` runs from scratch). When a bookend is *present*\n * but its `storage.storageHash` is missing, that's treated as a\n * mismatch — a malformed bookend is not equivalent to \"no bookend\".\n *\n * This check is paired with TML-2274, which removes `fromContract` /\n * `toContract` from the manifest entirely; once that lands, this\n * function and its error code are deleted.\n */\nfunction assertBookendsMatchMeta(\n meta: MigrationMeta,\n existing: Partial<MigrationMetadata> | null,\n): void {\n if (existing?.fromContract != null) {\n const contractHash = existing.fromContract.storage?.storageHash ?? '';\n if (contractHash !== meta.from) {\n throw errorStaleContractBookends({\n side: 'from',\n metaHash: meta.from,\n contractHash,\n });\n }\n }\n if (existing?.toContract != null) {\n const contractHash = existing.toContract.storage?.storageHash ?? '';\n if (contractHash !== meta.to) {\n throw errorStaleContractBookends({\n side: 'to',\n metaHash: meta.to,\n contractHash,\n });\n }\n }\n}\n\n/**\n * Project `existing.hints` down to the known `MigrationHints` shape, dropping\n * any legacy keys that may linger in metadata scaffolded by older CLI\n * versions (e.g. `planningStrategy`). Picking fields explicitly instead of\n * spreading keeps refreshed `migration.json` files schema-clean regardless\n * of what was on disk before.\n */\nfunction normalizeHints(existing: MigrationHints | undefined): MigrationHints {\n return {\n used: existing?.used ?? [],\n applied: existing?.applied ?? [],\n plannerVersion: existing?.plannerVersion ?? '2.0.0',\n };\n}\n\n/**\n * Pure conversion from a `Migration` instance (plus the previously\n * scaffolded metadata, when one exists on disk) to the in-memory\n * artifacts that downstream tooling persists. Owns metadata validation,\n * metadata synthesis/preservation, hint normalization, and the\n * content-addressed `migrationHash` computation, but performs no file I/O\n * — callers handle reads (to source `existing`) and writes (to persist\n * `opsJson` / `metadataJson`).\n */\nexport function buildMigrationArtifacts(\n instance: Migration,\n existing: Partial<MigrationMetadata> | null,\n): MigrationArtifacts {\n const ops = instance.operations;\n if (!Array.isArray(ops)) {\n throw new Error('operations must be an array');\n }\n\n for (let index = 0; index < ops.length; index++) {\n const result = MigrationOpSchema(ops[index]);\n if (result instanceof type.errors) {\n throw errorInvalidOperationEntry(index, result.summary);\n }\n }\n\n const rawMeta: unknown = instance.describe();\n const parsed = MigrationMetaSchema(rawMeta);\n if (parsed instanceof type.errors) {\n throw new Error(`describe() returned invalid metadata: ${parsed.summary}`);\n }\n\n const metadata = buildAttestedMetadata(parsed, ops, existing);\n\n return {\n opsJson: JSON.stringify(ops, null, 2),\n metadata,\n metadataJson: JSON.stringify(metadata, null, 2),\n };\n}\n"],"mappings":";;;;;;;;;AA4BA,MAAM,sBAAsB,KAAK;CAC/B,MAAM;CACN,IAAI;CACJ,WAAW,KAAK,SAAS,CAAC,OAAO;CAClC,CAAC;;;;;;;;;;AAWF,IAAsB,YAAtB,MAKA;;;;;;;;;;CAYE;CAEA,YAAY,OAA4C;EACtD,KAAK,QAAQ;;CAkBf,IAAI,SAAkD;EACpD,MAAM,OAAO,KAAK,UAAU,CAAC;EAC7B,OAAO,SAAS,OAAO,OAAO,EAAE,aAAa,MAAM;;CAGrD,IAAI,cAAgD;EAClD,OAAO,EAAE,aAAa,KAAK,UAAU,CAAC,IAAI;;;;;;;;;;AAW9C,SAAgB,mBAAmB,eAAgC;CACjE,MAAM,eAAe,cAAc,cAAc;CACjD,MAAM,QAAQ,QAAQ,KAAK;CAC3B,IAAI,CAAC,OAAO,OAAO;CACnB,IAAI;EACF,OAAO,aAAa,aAAa,KAAK,aAAa,MAAM;SACnD;EACN,OAAO;;;;;;;;;;;;;;;;;;;;AAsCX,SAAS,sBACP,MACA,KACA,UACmB;CACnB,wBAAwB,MAAM,SAAS;CAEvC,MAAM,eAAyD;EAC7D,MAAM,KAAK;EACX,IAAI,KAAK;EACT,QAAQ,KAAK,UAAU,UAAU,UAAU,EAAE;EAC7C,oBAAoB,yBAAyB,IAAI;EACjD,WAAW,UAAU,8BAAa,IAAI,MAAM,EAAC,aAAa;EAC1D,cAAc,UAAU,gBAAgB;EAMxC,YAAY,UAAU,cAAe,EAAE,SAAS,EAAE,aAAa,KAAK,IAAI,EAAE;EAC1E,OAAO,eAAe,UAAU,MAAM;EACtC,GAAG,UAAU,cAAc,UAAU,WAAW;EACjD;CAED,MAAM,gBAAgB,qBAAqB,cAAc,IAAI;CAC7D,OAAO;EAAE,GAAG;EAAc;EAAe;;;;;;;;;;;;;;;;;;;AAoB3C,SAAS,wBACP,MACA,UACM;CACN,IAAI,UAAU,gBAAgB,MAAM;EAClC,MAAM,eAAe,SAAS,aAAa,SAAS,eAAe;EACnE,IAAI,iBAAiB,KAAK,MACxB,MAAM,2BAA2B;GAC/B,MAAM;GACN,UAAU,KAAK;GACf;GACD,CAAC;;CAGN,IAAI,UAAU,cAAc,MAAM;EAChC,MAAM,eAAe,SAAS,WAAW,SAAS,eAAe;EACjE,IAAI,iBAAiB,KAAK,IACxB,MAAM,2BAA2B;GAC/B,MAAM;GACN,UAAU,KAAK;GACf;GACD,CAAC;;;;;;;;;;AAYR,SAAS,eAAe,UAAsD;CAC5E,OAAO;EACL,MAAM,UAAU,QAAQ,EAAE;EAC1B,SAAS,UAAU,WAAW,EAAE;EAChC,gBAAgB,UAAU,kBAAkB;EAC7C;;;;;;;;;;;AAYH,SAAgB,wBACd,UACA,UACoB;CACpB,MAAM,MAAM,SAAS;CACrB,IAAI,CAAC,MAAM,QAAQ,IAAI,EACrB,MAAM,IAAI,MAAM,8BAA8B;CAGhD,KAAK,IAAI,QAAQ,GAAG,QAAQ,IAAI,QAAQ,SAAS;EAC/C,MAAM,SAAS,kBAAkB,IAAI,OAAO;EAC5C,IAAI,kBAAkB,KAAK,QACzB,MAAM,2BAA2B,OAAO,OAAO,QAAQ;;CAK3D,MAAM,SAAS,oBADU,SAAS,UACQ,CAAC;CAC3C,IAAI,kBAAkB,KAAK,QACzB,MAAM,IAAI,MAAM,yCAAyC,OAAO,UAAU;CAG5E,MAAM,WAAW,sBAAsB,QAAQ,KAAK,SAAS;CAE7D,OAAO;EACL,SAAS,KAAK,UAAU,KAAK,MAAM,EAAE;EACrC;EACA,cAAc,KAAK,UAAU,UAAU,MAAM,EAAE;EAChD"}
@@ -1,3 +1,3 @@
1
- import { n as OnDiskMigrationPackage, t as MigrationOps } from "../package-B3Yl6DTr.mjs";
1
+ import { n as OnDiskMigrationPackage, t as MigrationOps } from "../package-BjiZ7KDy.mjs";
2
2
  import { MigrationPackage } from "@prisma-next/framework-components/control";
3
3
  export { type MigrationOps, type MigrationPackage, type OnDiskMigrationPackage };
@@ -1 +1 @@
1
- export { };
1
+ export {};
@@ -1 +1 @@
1
- {"version":3,"file":"refs.d.mts","names":[],"sources":["../../src/refs.ts"],"sourcesContent":[],"mappings":";UAUiB,QAAA;EAAA,SAAA,IAAQ,EAAA,MAAA;EAKb,SAAI,UAAA,EAAA,SAAA,MAAA,EAAA;;AAAY,KAAhB,IAAA,GAAO,QAAS,CAAA,MAAA,CAAA,MAAA,EAAe,QAAf,CAAA,CAAA;AAAT,iBAKH,eAAA,CALG,IAAA,EAAA,MAAA,CAAA,EAAA,OAAA;AAAQ,iBAaX,gBAAA,CAbW,KAAA,EAAA,MAAA,CAAA,EAAA,OAAA;AAKX,iBA8BM,OAAA,CA9BS,OAAA,EAAA,MAAA,EAAA,IAAA,EAAA,MAAA,CAAA,EA8B+B,OA9B/B,CA8BuC,QA9BvC,CAAA;AAQf,iBAyDM,QAAA,CAzDU,OAAA,EAAA,MAAA,CAAA,EAyDiB,OAzDjB,CAyDyB,IAzDzB,CAAA;AAsBV,iBAsFA,QAAA,CAtFgD,OAAR,EAAA,MAAO,EAAA,IAAA,EAAA,MAAA,EAAA,KAAA,EAsFA,QAtFA,CAAA,EAsFW,OAtFX,CAAA,IAAA,CAAA;AAmC/C,iBAuEA,SAAA,CAvE2B,OAAO,EAAA,MAAA,EAAA,IAAA,EAAA,MAAA,CAAA,EAuEQ,OAvER,CAAA,IAAA,CAAA;AAmDlC,iBA0DN,UAAA,CA1DqD,IAAW,EA0D/C,IA1DsD,EAAA,IAAA,EAAA,MAAA,CAAA,EA0DjC,QA1DiC"}
1
+ {"version":3,"file":"refs.d.mts","names":[],"sources":["../../src/refs.ts"],"mappings":";UAUiB,QAAA;EAAA,SACN,IAAA;EAAA,SACA,UAAA;AAAA;AAAA,KAGC,IAAA,GAAO,QAAA,CAAS,MAAA,SAAe,QAAA;AAAA,iBAK3B,eAAA,CAAgB,IAAA;AAAA,iBAQhB,gBAAA,CAAiB,KAAA;AAAA,iBAsBX,OAAA,CAAQ,OAAA,UAAiB,IAAA,WAAe,OAAA,CAAQ,QAAA;AAAA,iBAmChD,QAAA,CAAS,OAAA,WAAkB,OAAA,CAAQ,IAAA;AAAA,iBAmDnC,QAAA,CAAS,OAAA,UAAiB,IAAA,UAAc,KAAA,EAAO,QAAA,GAAW,OAAA;AAAA,iBAoB1D,SAAA,CAAU,OAAA,UAAiB,IAAA,WAAe,OAAA;AAAA,iBAsChD,UAAA,CAAW,IAAA,EAAM,IAAA,EAAM,IAAA,WAAe,QAAA"}
@@ -1,8 +1,7 @@
1
- import { f as errorInvalidRefFile, m as errorInvalidRefValue, p as errorInvalidRefName, t as MigrationToolsError } from "../errors-DQsXvidG.mjs";
1
+ import { f as errorInvalidRefFile, m as errorInvalidRefValue, p as errorInvalidRefName, t as MigrationToolsError } from "../errors-5KVuWV_5.mjs";
2
2
  import { dirname, join, relative } from "pathe";
3
3
  import { mkdir, readFile, readdir, rename, rmdir, unlink, writeFile } from "node:fs/promises";
4
4
  import { type } from "arktype";
5
-
6
5
  //#region src/refs.ts
7
6
  const REF_NAME_PATTERN = /^[a-z0-9]([a-z0-9-]*[a-z0-9])?(\/[a-z0-9]([a-z0-9-]*[a-z0-9])?)*$/;
8
7
  const REF_VALUE_PATTERN = /^sha256:(empty|[0-9a-f]{64})$/;
@@ -143,7 +142,7 @@ function resolveRef(refs, name) {
143
142
  });
144
143
  return refs[name];
145
144
  }
146
-
147
145
  //#endregion
148
146
  export { deleteRef, readRef, readRefs, resolveRef, validateRefName, validateRefValue, writeRef };
147
+
149
148
  //# sourceMappingURL=refs.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"refs.mjs","names":["raw: string","parsed: unknown","entries: string[]","refs: Record<string, RefEntry>"],"sources":["../../src/refs.ts"],"sourcesContent":["import { mkdir, readdir, readFile, rename, rmdir, unlink, writeFile } from 'node:fs/promises';\nimport { type } from 'arktype';\nimport { dirname, join, relative } from 'pathe';\nimport {\n errorInvalidRefFile,\n errorInvalidRefName,\n errorInvalidRefValue,\n MigrationToolsError,\n} from './errors';\n\nexport interface RefEntry {\n readonly hash: string;\n readonly invariants: readonly string[];\n}\n\nexport type Refs = Readonly<Record<string, RefEntry>>;\n\nconst REF_NAME_PATTERN = /^[a-z0-9]([a-z0-9-]*[a-z0-9])?(\\/[a-z0-9]([a-z0-9-]*[a-z0-9])?)*$/;\nconst REF_VALUE_PATTERN = /^sha256:(empty|[0-9a-f]{64})$/;\n\nexport function validateRefName(name: string): boolean {\n if (name.length === 0) return false;\n if (name.includes('..')) return false;\n if (name.includes('//')) return false;\n if (name.startsWith('.')) return false;\n return REF_NAME_PATTERN.test(name);\n}\n\nexport function validateRefValue(value: string): boolean {\n return REF_VALUE_PATTERN.test(value);\n}\n\nconst RefEntrySchema = type({\n hash: 'string',\n invariants: 'string[]',\n}).narrow((entry, ctx) => {\n if (!validateRefValue(entry.hash))\n return ctx.mustBe(`a valid contract hash (got \"${entry.hash}\")`);\n return true;\n});\n\nfunction refFilePath(refsDir: string, name: string): string {\n return join(refsDir, `${name}.json`);\n}\n\nfunction refNameFromPath(refsDir: string, filePath: string): string {\n const rel = relative(refsDir, filePath);\n return rel.replace(/\\.json$/, '');\n}\n\nexport async function readRef(refsDir: string, name: string): Promise<RefEntry> {\n if (!validateRefName(name)) {\n throw errorInvalidRefName(name);\n }\n\n const filePath = refFilePath(refsDir, name);\n let raw: string;\n try {\n raw = await readFile(filePath, 'utf-8');\n } catch (error) {\n if (error instanceof Error && (error as { code?: string }).code === 'ENOENT') {\n throw new MigrationToolsError('MIGRATION.UNKNOWN_REF', `Unknown ref \"${name}\"`, {\n why: `No ref file found at \"${filePath}\".`,\n fix: `Create the ref with: prisma-next migration ref set ${name} <hash>`,\n details: { refName: name, filePath },\n });\n }\n throw error;\n }\n\n let parsed: unknown;\n try {\n parsed = JSON.parse(raw);\n } catch {\n throw errorInvalidRefFile(filePath, 'Failed to parse as JSON');\n }\n\n const result = RefEntrySchema(parsed);\n if (result instanceof type.errors) {\n throw errorInvalidRefFile(filePath, result.summary);\n }\n\n return result;\n}\n\nexport async function readRefs(refsDir: string): Promise<Refs> {\n let entries: string[];\n try {\n entries = await readdir(refsDir, { recursive: true, encoding: 'utf-8' });\n } catch (error) {\n if (error instanceof Error && (error as { code?: string }).code === 'ENOENT') {\n return {};\n }\n throw error;\n }\n\n const jsonFiles = entries.filter((entry) => entry.endsWith('.json'));\n const refs: Record<string, RefEntry> = {};\n\n for (const jsonFile of jsonFiles) {\n const filePath = join(refsDir, jsonFile);\n const name = refNameFromPath(refsDir, filePath);\n\n let raw: string;\n try {\n raw = await readFile(filePath, 'utf-8');\n } catch (error) {\n // Tolerate the TOCTOU race between `readdir` and `readFile` (ENOENT) and\n // benign EISDIR if a directory happens to end in `.json`. Anything else\n // (EACCES, EIO, EMFILE, …) is a real failure and propagates so the CLI\n // surfaces it rather than silently dropping the ref.\n const code = error instanceof Error ? (error as { code?: string }).code : undefined;\n if (code === 'ENOENT' || code === 'EISDIR') {\n continue;\n }\n throw error;\n }\n\n let parsed: unknown;\n try {\n parsed = JSON.parse(raw);\n } catch {\n throw errorInvalidRefFile(filePath, 'Failed to parse as JSON');\n }\n\n const result = RefEntrySchema(parsed);\n if (result instanceof type.errors) {\n throw errorInvalidRefFile(filePath, result.summary);\n }\n\n refs[name] = result;\n }\n\n return refs;\n}\n\nexport async function writeRef(refsDir: string, name: string, entry: RefEntry): Promise<void> {\n if (!validateRefName(name)) {\n throw errorInvalidRefName(name);\n }\n if (!validateRefValue(entry.hash)) {\n throw errorInvalidRefValue(entry.hash);\n }\n\n const filePath = refFilePath(refsDir, name);\n const dir = dirname(filePath);\n await mkdir(dir, { recursive: true });\n\n const tmpPath = join(dir, `.${name.split('/').pop()}.json.${Date.now()}.tmp`);\n await writeFile(\n tmpPath,\n `${JSON.stringify({ hash: entry.hash, invariants: [...entry.invariants] }, null, 2)}\\n`,\n );\n await rename(tmpPath, filePath);\n}\n\nexport async function deleteRef(refsDir: string, name: string): Promise<void> {\n if (!validateRefName(name)) {\n throw errorInvalidRefName(name);\n }\n\n const filePath = refFilePath(refsDir, name);\n try {\n await unlink(filePath);\n } catch (error) {\n if (error instanceof Error && (error as { code?: string }).code === 'ENOENT') {\n throw new MigrationToolsError('MIGRATION.UNKNOWN_REF', `Unknown ref \"${name}\"`, {\n why: `No ref file found at \"${filePath}\".`,\n fix: 'Run `prisma-next migration ref list` to see available refs.',\n details: { refName: name, filePath },\n });\n }\n throw error;\n }\n\n // Clean empty parent directories up to refsDir. Stop walking on the expected\n // \"directory has siblings\" signal (ENOTEMPTY on Linux, EEXIST on some BSDs)\n // and on ENOENT (concurrent removal). Anything else (EACCES, EIO, …) is a\n // real failure and propagates.\n let dir = dirname(filePath);\n while (dir !== refsDir && dir.startsWith(refsDir)) {\n try {\n await rmdir(dir);\n dir = dirname(dir);\n } catch (error) {\n const code = error instanceof Error ? (error as { code?: string }).code : undefined;\n if (code === 'ENOTEMPTY' || code === 'EEXIST' || code === 'ENOENT') {\n break;\n }\n throw error;\n }\n }\n}\n\nexport function resolveRef(refs: Refs, name: string): RefEntry {\n if (!validateRefName(name)) {\n throw errorInvalidRefName(name);\n }\n\n // Object.hasOwn gate: plain-object `refs` would otherwise let\n // `refs['constructor']` return Object.prototype.constructor and bypass the\n // UNKNOWN_REF throw. validateRefName accepts `\"constructor\"` as a name shape.\n if (!Object.hasOwn(refs, name)) {\n throw new MigrationToolsError('MIGRATION.UNKNOWN_REF', `Unknown ref \"${name}\"`, {\n why: `No ref named \"${name}\" exists.`,\n fix: `Available refs: ${Object.keys(refs).join(', ') || '(none)'}. Create a ref with: prisma-next migration ref set ${name} <hash>`,\n details: { refName: name, availableRefs: Object.keys(refs) },\n });\n }\n\n // biome-ignore lint/style/noNonNullAssertion: Object.hasOwn gate above guarantees this is defined\n return refs[name]!;\n}\n"],"mappings":";;;;;;AAiBA,MAAM,mBAAmB;AACzB,MAAM,oBAAoB;AAE1B,SAAgB,gBAAgB,MAAuB;AACrD,KAAI,KAAK,WAAW,EAAG,QAAO;AAC9B,KAAI,KAAK,SAAS,KAAK,CAAE,QAAO;AAChC,KAAI,KAAK,SAAS,KAAK,CAAE,QAAO;AAChC,KAAI,KAAK,WAAW,IAAI,CAAE,QAAO;AACjC,QAAO,iBAAiB,KAAK,KAAK;;AAGpC,SAAgB,iBAAiB,OAAwB;AACvD,QAAO,kBAAkB,KAAK,MAAM;;AAGtC,MAAM,iBAAiB,KAAK;CAC1B,MAAM;CACN,YAAY;CACb,CAAC,CAAC,QAAQ,OAAO,QAAQ;AACxB,KAAI,CAAC,iBAAiB,MAAM,KAAK,CAC/B,QAAO,IAAI,OAAO,+BAA+B,MAAM,KAAK,IAAI;AAClE,QAAO;EACP;AAEF,SAAS,YAAY,SAAiB,MAAsB;AAC1D,QAAO,KAAK,SAAS,GAAG,KAAK,OAAO;;AAGtC,SAAS,gBAAgB,SAAiB,UAA0B;AAElE,QADY,SAAS,SAAS,SAAS,CAC5B,QAAQ,WAAW,GAAG;;AAGnC,eAAsB,QAAQ,SAAiB,MAAiC;AAC9E,KAAI,CAAC,gBAAgB,KAAK,CACxB,OAAM,oBAAoB,KAAK;CAGjC,MAAM,WAAW,YAAY,SAAS,KAAK;CAC3C,IAAIA;AACJ,KAAI;AACF,QAAM,MAAM,SAAS,UAAU,QAAQ;UAChC,OAAO;AACd,MAAI,iBAAiB,SAAU,MAA4B,SAAS,SAClE,OAAM,IAAI,oBAAoB,yBAAyB,gBAAgB,KAAK,IAAI;GAC9E,KAAK,yBAAyB,SAAS;GACvC,KAAK,sDAAsD,KAAK;GAChE,SAAS;IAAE,SAAS;IAAM;IAAU;GACrC,CAAC;AAEJ,QAAM;;CAGR,IAAIC;AACJ,KAAI;AACF,WAAS,KAAK,MAAM,IAAI;SAClB;AACN,QAAM,oBAAoB,UAAU,0BAA0B;;CAGhE,MAAM,SAAS,eAAe,OAAO;AACrC,KAAI,kBAAkB,KAAK,OACzB,OAAM,oBAAoB,UAAU,OAAO,QAAQ;AAGrD,QAAO;;AAGT,eAAsB,SAAS,SAAgC;CAC7D,IAAIC;AACJ,KAAI;AACF,YAAU,MAAM,QAAQ,SAAS;GAAE,WAAW;GAAM,UAAU;GAAS,CAAC;UACjE,OAAO;AACd,MAAI,iBAAiB,SAAU,MAA4B,SAAS,SAClE,QAAO,EAAE;AAEX,QAAM;;CAGR,MAAM,YAAY,QAAQ,QAAQ,UAAU,MAAM,SAAS,QAAQ,CAAC;CACpE,MAAMC,OAAiC,EAAE;AAEzC,MAAK,MAAM,YAAY,WAAW;EAChC,MAAM,WAAW,KAAK,SAAS,SAAS;EACxC,MAAM,OAAO,gBAAgB,SAAS,SAAS;EAE/C,IAAIH;AACJ,MAAI;AACF,SAAM,MAAM,SAAS,UAAU,QAAQ;WAChC,OAAO;GAKd,MAAM,OAAO,iBAAiB,QAAS,MAA4B,OAAO;AAC1E,OAAI,SAAS,YAAY,SAAS,SAChC;AAEF,SAAM;;EAGR,IAAIC;AACJ,MAAI;AACF,YAAS,KAAK,MAAM,IAAI;UAClB;AACN,SAAM,oBAAoB,UAAU,0BAA0B;;EAGhE,MAAM,SAAS,eAAe,OAAO;AACrC,MAAI,kBAAkB,KAAK,OACzB,OAAM,oBAAoB,UAAU,OAAO,QAAQ;AAGrD,OAAK,QAAQ;;AAGf,QAAO;;AAGT,eAAsB,SAAS,SAAiB,MAAc,OAAgC;AAC5F,KAAI,CAAC,gBAAgB,KAAK,CACxB,OAAM,oBAAoB,KAAK;AAEjC,KAAI,CAAC,iBAAiB,MAAM,KAAK,CAC/B,OAAM,qBAAqB,MAAM,KAAK;CAGxC,MAAM,WAAW,YAAY,SAAS,KAAK;CAC3C,MAAM,MAAM,QAAQ,SAAS;AAC7B,OAAM,MAAM,KAAK,EAAE,WAAW,MAAM,CAAC;CAErC,MAAM,UAAU,KAAK,KAAK,IAAI,KAAK,MAAM,IAAI,CAAC,KAAK,CAAC,QAAQ,KAAK,KAAK,CAAC,MAAM;AAC7E,OAAM,UACJ,SACA,GAAG,KAAK,UAAU;EAAE,MAAM,MAAM;EAAM,YAAY,CAAC,GAAG,MAAM,WAAW;EAAE,EAAE,MAAM,EAAE,CAAC,IACrF;AACD,OAAM,OAAO,SAAS,SAAS;;AAGjC,eAAsB,UAAU,SAAiB,MAA6B;AAC5E,KAAI,CAAC,gBAAgB,KAAK,CACxB,OAAM,oBAAoB,KAAK;CAGjC,MAAM,WAAW,YAAY,SAAS,KAAK;AAC3C,KAAI;AACF,QAAM,OAAO,SAAS;UACf,OAAO;AACd,MAAI,iBAAiB,SAAU,MAA4B,SAAS,SAClE,OAAM,IAAI,oBAAoB,yBAAyB,gBAAgB,KAAK,IAAI;GAC9E,KAAK,yBAAyB,SAAS;GACvC,KAAK;GACL,SAAS;IAAE,SAAS;IAAM;IAAU;GACrC,CAAC;AAEJ,QAAM;;CAOR,IAAI,MAAM,QAAQ,SAAS;AAC3B,QAAO,QAAQ,WAAW,IAAI,WAAW,QAAQ,CAC/C,KAAI;AACF,QAAM,MAAM,IAAI;AAChB,QAAM,QAAQ,IAAI;UACX,OAAO;EACd,MAAM,OAAO,iBAAiB,QAAS,MAA4B,OAAO;AAC1E,MAAI,SAAS,eAAe,SAAS,YAAY,SAAS,SACxD;AAEF,QAAM;;;AAKZ,SAAgB,WAAW,MAAY,MAAwB;AAC7D,KAAI,CAAC,gBAAgB,KAAK,CACxB,OAAM,oBAAoB,KAAK;AAMjC,KAAI,CAAC,OAAO,OAAO,MAAM,KAAK,CAC5B,OAAM,IAAI,oBAAoB,yBAAyB,gBAAgB,KAAK,IAAI;EAC9E,KAAK,iBAAiB,KAAK;EAC3B,KAAK,mBAAmB,OAAO,KAAK,KAAK,CAAC,KAAK,KAAK,IAAI,SAAS,qDAAqD,KAAK;EAC3H,SAAS;GAAE,SAAS;GAAM,eAAe,OAAO,KAAK,KAAK;GAAE;EAC7D,CAAC;AAIJ,QAAO,KAAK"}
1
+ {"version":3,"file":"refs.mjs","names":[],"sources":["../../src/refs.ts"],"sourcesContent":["import { mkdir, readdir, readFile, rename, rmdir, unlink, writeFile } from 'node:fs/promises';\nimport { type } from 'arktype';\nimport { dirname, join, relative } from 'pathe';\nimport {\n errorInvalidRefFile,\n errorInvalidRefName,\n errorInvalidRefValue,\n MigrationToolsError,\n} from './errors';\n\nexport interface RefEntry {\n readonly hash: string;\n readonly invariants: readonly string[];\n}\n\nexport type Refs = Readonly<Record<string, RefEntry>>;\n\nconst REF_NAME_PATTERN = /^[a-z0-9]([a-z0-9-]*[a-z0-9])?(\\/[a-z0-9]([a-z0-9-]*[a-z0-9])?)*$/;\nconst REF_VALUE_PATTERN = /^sha256:(empty|[0-9a-f]{64})$/;\n\nexport function validateRefName(name: string): boolean {\n if (name.length === 0) return false;\n if (name.includes('..')) return false;\n if (name.includes('//')) return false;\n if (name.startsWith('.')) return false;\n return REF_NAME_PATTERN.test(name);\n}\n\nexport function validateRefValue(value: string): boolean {\n return REF_VALUE_PATTERN.test(value);\n}\n\nconst RefEntrySchema = type({\n hash: 'string',\n invariants: 'string[]',\n}).narrow((entry, ctx) => {\n if (!validateRefValue(entry.hash))\n return ctx.mustBe(`a valid contract hash (got \"${entry.hash}\")`);\n return true;\n});\n\nfunction refFilePath(refsDir: string, name: string): string {\n return join(refsDir, `${name}.json`);\n}\n\nfunction refNameFromPath(refsDir: string, filePath: string): string {\n const rel = relative(refsDir, filePath);\n return rel.replace(/\\.json$/, '');\n}\n\nexport async function readRef(refsDir: string, name: string): Promise<RefEntry> {\n if (!validateRefName(name)) {\n throw errorInvalidRefName(name);\n }\n\n const filePath = refFilePath(refsDir, name);\n let raw: string;\n try {\n raw = await readFile(filePath, 'utf-8');\n } catch (error) {\n if (error instanceof Error && (error as { code?: string }).code === 'ENOENT') {\n throw new MigrationToolsError('MIGRATION.UNKNOWN_REF', `Unknown ref \"${name}\"`, {\n why: `No ref file found at \"${filePath}\".`,\n fix: `Create the ref with: prisma-next migration ref set ${name} <hash>`,\n details: { refName: name, filePath },\n });\n }\n throw error;\n }\n\n let parsed: unknown;\n try {\n parsed = JSON.parse(raw);\n } catch {\n throw errorInvalidRefFile(filePath, 'Failed to parse as JSON');\n }\n\n const result = RefEntrySchema(parsed);\n if (result instanceof type.errors) {\n throw errorInvalidRefFile(filePath, result.summary);\n }\n\n return result;\n}\n\nexport async function readRefs(refsDir: string): Promise<Refs> {\n let entries: string[];\n try {\n entries = await readdir(refsDir, { recursive: true, encoding: 'utf-8' });\n } catch (error) {\n if (error instanceof Error && (error as { code?: string }).code === 'ENOENT') {\n return {};\n }\n throw error;\n }\n\n const jsonFiles = entries.filter((entry) => entry.endsWith('.json'));\n const refs: Record<string, RefEntry> = {};\n\n for (const jsonFile of jsonFiles) {\n const filePath = join(refsDir, jsonFile);\n const name = refNameFromPath(refsDir, filePath);\n\n let raw: string;\n try {\n raw = await readFile(filePath, 'utf-8');\n } catch (error) {\n // Tolerate the TOCTOU race between `readdir` and `readFile` (ENOENT) and\n // benign EISDIR if a directory happens to end in `.json`. Anything else\n // (EACCES, EIO, EMFILE, …) is a real failure and propagates so the CLI\n // surfaces it rather than silently dropping the ref.\n const code = error instanceof Error ? (error as { code?: string }).code : undefined;\n if (code === 'ENOENT' || code === 'EISDIR') {\n continue;\n }\n throw error;\n }\n\n let parsed: unknown;\n try {\n parsed = JSON.parse(raw);\n } catch {\n throw errorInvalidRefFile(filePath, 'Failed to parse as JSON');\n }\n\n const result = RefEntrySchema(parsed);\n if (result instanceof type.errors) {\n throw errorInvalidRefFile(filePath, result.summary);\n }\n\n refs[name] = result;\n }\n\n return refs;\n}\n\nexport async function writeRef(refsDir: string, name: string, entry: RefEntry): Promise<void> {\n if (!validateRefName(name)) {\n throw errorInvalidRefName(name);\n }\n if (!validateRefValue(entry.hash)) {\n throw errorInvalidRefValue(entry.hash);\n }\n\n const filePath = refFilePath(refsDir, name);\n const dir = dirname(filePath);\n await mkdir(dir, { recursive: true });\n\n const tmpPath = join(dir, `.${name.split('/').pop()}.json.${Date.now()}.tmp`);\n await writeFile(\n tmpPath,\n `${JSON.stringify({ hash: entry.hash, invariants: [...entry.invariants] }, null, 2)}\\n`,\n );\n await rename(tmpPath, filePath);\n}\n\nexport async function deleteRef(refsDir: string, name: string): Promise<void> {\n if (!validateRefName(name)) {\n throw errorInvalidRefName(name);\n }\n\n const filePath = refFilePath(refsDir, name);\n try {\n await unlink(filePath);\n } catch (error) {\n if (error instanceof Error && (error as { code?: string }).code === 'ENOENT') {\n throw new MigrationToolsError('MIGRATION.UNKNOWN_REF', `Unknown ref \"${name}\"`, {\n why: `No ref file found at \"${filePath}\".`,\n fix: 'Run `prisma-next migration ref list` to see available refs.',\n details: { refName: name, filePath },\n });\n }\n throw error;\n }\n\n // Clean empty parent directories up to refsDir. Stop walking on the expected\n // \"directory has siblings\" signal (ENOTEMPTY on Linux, EEXIST on some BSDs)\n // and on ENOENT (concurrent removal). Anything else (EACCES, EIO, …) is a\n // real failure and propagates.\n let dir = dirname(filePath);\n while (dir !== refsDir && dir.startsWith(refsDir)) {\n try {\n await rmdir(dir);\n dir = dirname(dir);\n } catch (error) {\n const code = error instanceof Error ? (error as { code?: string }).code : undefined;\n if (code === 'ENOTEMPTY' || code === 'EEXIST' || code === 'ENOENT') {\n break;\n }\n throw error;\n }\n }\n}\n\nexport function resolveRef(refs: Refs, name: string): RefEntry {\n if (!validateRefName(name)) {\n throw errorInvalidRefName(name);\n }\n\n // Object.hasOwn gate: plain-object `refs` would otherwise let\n // `refs['constructor']` return Object.prototype.constructor and bypass the\n // UNKNOWN_REF throw. validateRefName accepts `\"constructor\"` as a name shape.\n if (!Object.hasOwn(refs, name)) {\n throw new MigrationToolsError('MIGRATION.UNKNOWN_REF', `Unknown ref \"${name}\"`, {\n why: `No ref named \"${name}\" exists.`,\n fix: `Available refs: ${Object.keys(refs).join(', ') || '(none)'}. Create a ref with: prisma-next migration ref set ${name} <hash>`,\n details: { refName: name, availableRefs: Object.keys(refs) },\n });\n }\n\n // biome-ignore lint/style/noNonNullAssertion: Object.hasOwn gate above guarantees this is defined\n return refs[name]!;\n}\n"],"mappings":";;;;;AAiBA,MAAM,mBAAmB;AACzB,MAAM,oBAAoB;AAE1B,SAAgB,gBAAgB,MAAuB;CACrD,IAAI,KAAK,WAAW,GAAG,OAAO;CAC9B,IAAI,KAAK,SAAS,KAAK,EAAE,OAAO;CAChC,IAAI,KAAK,SAAS,KAAK,EAAE,OAAO;CAChC,IAAI,KAAK,WAAW,IAAI,EAAE,OAAO;CACjC,OAAO,iBAAiB,KAAK,KAAK;;AAGpC,SAAgB,iBAAiB,OAAwB;CACvD,OAAO,kBAAkB,KAAK,MAAM;;AAGtC,MAAM,iBAAiB,KAAK;CAC1B,MAAM;CACN,YAAY;CACb,CAAC,CAAC,QAAQ,OAAO,QAAQ;CACxB,IAAI,CAAC,iBAAiB,MAAM,KAAK,EAC/B,OAAO,IAAI,OAAO,+BAA+B,MAAM,KAAK,IAAI;CAClE,OAAO;EACP;AAEF,SAAS,YAAY,SAAiB,MAAsB;CAC1D,OAAO,KAAK,SAAS,GAAG,KAAK,OAAO;;AAGtC,SAAS,gBAAgB,SAAiB,UAA0B;CAElE,OADY,SAAS,SAAS,SACpB,CAAC,QAAQ,WAAW,GAAG;;AAGnC,eAAsB,QAAQ,SAAiB,MAAiC;CAC9E,IAAI,CAAC,gBAAgB,KAAK,EACxB,MAAM,oBAAoB,KAAK;CAGjC,MAAM,WAAW,YAAY,SAAS,KAAK;CAC3C,IAAI;CACJ,IAAI;EACF,MAAM,MAAM,SAAS,UAAU,QAAQ;UAChC,OAAO;EACd,IAAI,iBAAiB,SAAU,MAA4B,SAAS,UAClE,MAAM,IAAI,oBAAoB,yBAAyB,gBAAgB,KAAK,IAAI;GAC9E,KAAK,yBAAyB,SAAS;GACvC,KAAK,sDAAsD,KAAK;GAChE,SAAS;IAAE,SAAS;IAAM;IAAU;GACrC,CAAC;EAEJ,MAAM;;CAGR,IAAI;CACJ,IAAI;EACF,SAAS,KAAK,MAAM,IAAI;SAClB;EACN,MAAM,oBAAoB,UAAU,0BAA0B;;CAGhE,MAAM,SAAS,eAAe,OAAO;CACrC,IAAI,kBAAkB,KAAK,QACzB,MAAM,oBAAoB,UAAU,OAAO,QAAQ;CAGrD,OAAO;;AAGT,eAAsB,SAAS,SAAgC;CAC7D,IAAI;CACJ,IAAI;EACF,UAAU,MAAM,QAAQ,SAAS;GAAE,WAAW;GAAM,UAAU;GAAS,CAAC;UACjE,OAAO;EACd,IAAI,iBAAiB,SAAU,MAA4B,SAAS,UAClE,OAAO,EAAE;EAEX,MAAM;;CAGR,MAAM,YAAY,QAAQ,QAAQ,UAAU,MAAM,SAAS,QAAQ,CAAC;CACpE,MAAM,OAAiC,EAAE;CAEzC,KAAK,MAAM,YAAY,WAAW;EAChC,MAAM,WAAW,KAAK,SAAS,SAAS;EACxC,MAAM,OAAO,gBAAgB,SAAS,SAAS;EAE/C,IAAI;EACJ,IAAI;GACF,MAAM,MAAM,SAAS,UAAU,QAAQ;WAChC,OAAO;GAKd,MAAM,OAAO,iBAAiB,QAAS,MAA4B,OAAO,KAAA;GAC1E,IAAI,SAAS,YAAY,SAAS,UAChC;GAEF,MAAM;;EAGR,IAAI;EACJ,IAAI;GACF,SAAS,KAAK,MAAM,IAAI;UAClB;GACN,MAAM,oBAAoB,UAAU,0BAA0B;;EAGhE,MAAM,SAAS,eAAe,OAAO;EACrC,IAAI,kBAAkB,KAAK,QACzB,MAAM,oBAAoB,UAAU,OAAO,QAAQ;EAGrD,KAAK,QAAQ;;CAGf,OAAO;;AAGT,eAAsB,SAAS,SAAiB,MAAc,OAAgC;CAC5F,IAAI,CAAC,gBAAgB,KAAK,EACxB,MAAM,oBAAoB,KAAK;CAEjC,IAAI,CAAC,iBAAiB,MAAM,KAAK,EAC/B,MAAM,qBAAqB,MAAM,KAAK;CAGxC,MAAM,WAAW,YAAY,SAAS,KAAK;CAC3C,MAAM,MAAM,QAAQ,SAAS;CAC7B,MAAM,MAAM,KAAK,EAAE,WAAW,MAAM,CAAC;CAErC,MAAM,UAAU,KAAK,KAAK,IAAI,KAAK,MAAM,IAAI,CAAC,KAAK,CAAC,QAAQ,KAAK,KAAK,CAAC,MAAM;CAC7E,MAAM,UACJ,SACA,GAAG,KAAK,UAAU;EAAE,MAAM,MAAM;EAAM,YAAY,CAAC,GAAG,MAAM,WAAW;EAAE,EAAE,MAAM,EAAE,CAAC,IACrF;CACD,MAAM,OAAO,SAAS,SAAS;;AAGjC,eAAsB,UAAU,SAAiB,MAA6B;CAC5E,IAAI,CAAC,gBAAgB,KAAK,EACxB,MAAM,oBAAoB,KAAK;CAGjC,MAAM,WAAW,YAAY,SAAS,KAAK;CAC3C,IAAI;EACF,MAAM,OAAO,SAAS;UACf,OAAO;EACd,IAAI,iBAAiB,SAAU,MAA4B,SAAS,UAClE,MAAM,IAAI,oBAAoB,yBAAyB,gBAAgB,KAAK,IAAI;GAC9E,KAAK,yBAAyB,SAAS;GACvC,KAAK;GACL,SAAS;IAAE,SAAS;IAAM;IAAU;GACrC,CAAC;EAEJ,MAAM;;CAOR,IAAI,MAAM,QAAQ,SAAS;CAC3B,OAAO,QAAQ,WAAW,IAAI,WAAW,QAAQ,EAC/C,IAAI;EACF,MAAM,MAAM,IAAI;EAChB,MAAM,QAAQ,IAAI;UACX,OAAO;EACd,MAAM,OAAO,iBAAiB,QAAS,MAA4B,OAAO,KAAA;EAC1E,IAAI,SAAS,eAAe,SAAS,YAAY,SAAS,UACxD;EAEF,MAAM;;;AAKZ,SAAgB,WAAW,MAAY,MAAwB;CAC7D,IAAI,CAAC,gBAAgB,KAAK,EACxB,MAAM,oBAAoB,KAAK;CAMjC,IAAI,CAAC,OAAO,OAAO,MAAM,KAAK,EAC5B,MAAM,IAAI,oBAAoB,yBAAyB,gBAAgB,KAAK,IAAI;EAC9E,KAAK,iBAAiB,KAAK;EAC3B,KAAK,mBAAmB,OAAO,KAAK,KAAK,CAAC,KAAK,KAAK,IAAI,SAAS,qDAAqD,KAAK;EAC3H,SAAS;GAAE,SAAS;GAAM,eAAe,OAAO,KAAK,KAAK;GAAE;EAC7D,CAAC;CAIJ,OAAO,KAAK"}
@@ -1,7 +1,6 @@
1
1
  import { APP_SPACE_ID } from "@prisma-next/framework-components/control";
2
2
 
3
3
  //#region src/concatenate-space-apply-inputs.d.ts
4
-
5
4
  /**
6
5
  * Per-space input the runner consumes when applying a migration.
7
6
  *
@@ -1 +1 @@
1
- {"version":3,"file":"spaces.d.mts","names":[],"sources":["../../src/concatenate-space-apply-inputs.ts","../../src/detect-space-contract-drift.ts","../../src/emit-pinned-space-artefacts.ts","../../src/plan-all-spaces.ts","../../src/read-pinned-contract-hash.ts","../../src/space-layout.ts","../../src/verify-contract-spaces.ts"],"sourcesContent":[],"mappings":";;;;;;;AAuBA;AAsCA;;;;;;;;;AC5CA;AAyBA;AA0BA;;;;ACzDA;AAyBiB,UFbA,eEayB,CAAA,GAAA,CAAA,CAAA;EA8BpB,SAAA,OAAA,EAAA,MAAA;;;;ECjDL,SAAA,IAAA,EAAA,SHWS,GGXK,EAAA;AAM/B;AAkCA;;;;;;;;;;;;AClBA;;;;AC1BA;AAWA;AAIA;AAoBA;;;;ACrBA;AA2CA;AAUA;AAKA;;;;AA8BkD,iBNtDlC,2BMsDkC,CAAA,GAAA,CAAA,CAAA,MAAA,EAAA,SNrD/B,eMqD+B,CNrDf,GMqDe,CAAA,EAAA,CAAA,EAAA,SNpDtC,eMoDsC,CNpDtB,GMoDsB,CAAA,EAAA;;;;;;AN5FlD;AAsCA;;;;;;;;;AC5CA;AAyBA;AA0BA;;UAnDiB,8BAAA;;ECNA,SAAA,UAAA,EAAkB,MAAA,GAAA,IAAA;AAyBnC;AA8BA;;;;ACjDA;AAMA;AAkCA;;;;;;;;;;;;AClBA;;KHGY,wBAAA;;EI7BA,SAAA,OAAY,EAAA,MAAA;EAWR,SAAA,cAAc,EAAA,MAA8B;EAI5C,SAAA,UAAA,EAAkB,MAAA,GAAA,IAAsC;AAoBxE,CAAA;;;;ACrBA;AA2CA;AAUA;AAKA;;;;;;;AAiCA;AA+BA;AAqCA;;;;iBLtHgB,wBAAA,0BAEN,iCACP;;;;;;ADhDH;AAsCA;AACmC,UEnDlB,kBAAA,CFmDkB;EAAhB,SAAA,IAAA,EAAA,MAAA;EACS,SAAA,UAAA,EAAA,SAAA,MAAA,EAAA;;;;;;AC9C5B;AAyBA;AA0BA;;;;ACzDA;AAyBA;AA8BA;;;;ACjDA;AAMA;AAkCA;;AACmB,UDtBF,yBAAA,CCsBE;EACiB,SAAA,QAAA,EAAA,OAAA;EAAf,SAAA,WAAA,EAAA,MAAA;EAAuC,SAAA,OAAA,EDpBxC,kBCoBwC;;;;;;;ACpB5D;;;;AC1BA;AAWA;AAIA;AAoBA;;;;ACrBA;AA2CA;AAUA;AAKA;;;;;AA8B8B,iBJjDR,wBAAA,CIiDQ,oBAAA,EAAA,MAAA,EAAA,OAAA,EAAA,MAAA,EAAA,MAAA,EJ9CpB,yBI8CoB,CAAA,EJ7C3B,OI6C2B,CAAA,IAAA,CAAA;;;;;;AN5F9B;AAsCA;;;;;;;;;AC5CA;AAyBA;AA0BgB,UEnDC,cFmDuB,CAAA,SAAA,CAAA,CAE9B;;0BEnDgB;wBACF;ADTxB;AAyBiB,UCbA,eDayB,CAAA,QAAA,CAAA,CAAA;EA8BpB,SAAA,OAAA,EAAA,MAAA;uCCzCiB;;;AARvC;AAMA;AAkCA;;;;;;;;;;;;AClBA;;;;AC1BA;AAWA;AAIA;AAoBA;;;;ACrBA;AA2CA;AAUA;AAKiB,iBH5BD,aG4B2B,CAAA,SAAA,EAAA,QAAA,CAAA,CAAA,MAAA,EAAA,SH3BxB,cG2BwB,CH3BT,SG2BS,CAAA,EAAA,EAAA,SAAA,EAAA,CAAA,KAAA,EH1BtB,cG0BsB,CH1BP,SG0BO,CAAA,EAAA,GAAA,SH1BiB,QG0BjB,EAAA,CAAA,EAAA,SHzB/B,eGyB+B,CHzBf,QGyBe,CAAA,EAAA;;;;;;AN9D3C;AAsCA;;;;;;;;;AC5CA;AAyBA;AA0BA;;;;ACzDA;AAyBA;AA8BA;;;;ACjDA;AAMA;AAkCA;;;AAEoC,iBCpBd,sBAAA,CDoBc,oBAAA,EAAA,MAAA,EAAA,OAAA,EAAA,MAAA,CAAA,ECjBjC,ODiBiC,CAAA,MAAA,GAAA,IAAA,CAAA;;;;AHpCpC;AAsCA;;;;;AAE2B,KKlDf,YAAA,GLkDe,MAAA,GAAA;;;iBKvCX,cAAA,8BAA4C;AJP3C,iBIWD,kBAAA,CJX+B,OAAA,EAAA,MAAA,CAAA,EAAA,QAAA,OAAA,IIWyB,YJXzB;AAyB/C;AA0BA;;;;ACzDA;AAyBA;AA8BA;;;;ACjDA;AAMA;AAkCA;AACkC,iBEVlB,uBAAA,CFUkB,oBAAA,EAAA,MAAA,EAAA,OAAA,EAAA,MAAA,CAAA,EAAA,MAAA;;;;;;AHnClC;AAsCA;;;;;;;;;AC5CA;AAyBA;AA0BA;;;iBKzCsB,0BAAA,gCAEnB;AJlBH;AAyBA;AA8BA;;;;ACjDiB,UGqDA,qBAAA,CHrDc;EAMd,SAAA,IAAA,EAAA,MAAe;EAkChB,SAAA,UAAa,EAAA,SAAA,MAAA,EAAA;;;;;;;AAGjB,UGoBK,iBAAA,CHpBL;EAAe,SAAA,IAAA,EAAA,MAAA;;;UGyBV,0BAAA;EF9CK;;;;AC1BtB;AAWA;AAIA;EAoBgB,SAAA,YAAA,EC6CS,WD7Cc,CAAA,MAAA,CAAA;;;;ACrBvC;AA2CA;EAUiB,SAAA,gBAAiB,EAAA,SAAA,MAAA,EAAA;EAKjB;;;;;;;EAiCL,SAAA,mBAAsB,EATF,WASE,CAAA,MAAA,EATkB,qBASlB,CAAA;EA+BtB;AAqCZ;;;8BAvE8B,oBAAoB;;KAGtC,sBAAA;;;;;;;;;;;;;;;;;;;;;;;;;KA+BA,0BAAA;;;;gCAE4C;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;iBAmCxC,oBAAA,SACN,6BACP"}
1
+ {"version":3,"file":"spaces.d.mts","names":[],"sources":["../../src/concatenate-space-apply-inputs.ts","../../src/detect-space-contract-drift.ts","../../src/emit-pinned-space-artefacts.ts","../../src/plan-all-spaces.ts","../../src/read-pinned-contract-hash.ts","../../src/space-layout.ts","../../src/verify-contract-spaces.ts"],"mappings":";;;;;;AAuBA;;;;;;;;;;;;;AAsCA;;;;UAtCiB,eAAA;EAAA,SACN,OAAA;EAAA,SACA,kBAAA;EAAA,SACA,iBAAA;EAAA,SACA,uBAAA;EAAA,SACA,IAAA,WAAe,GAAA;AAAA;;;;;;;;;;ACX1B;;;;;AAyBA;;;;;;;;;;AA0BA;;;;;;iBDPgB,2BAAA,KAAA,CACd,MAAA,WAAiB,eAAA,CAAgB,GAAA,eACvB,eAAA,CAAgB,GAAA;;;;;;AAxC5B;;;;;;;;;;;;;AAsCA;UC5CiB,8BAAA;EAAA,SACN,cAAA;EAAA,SACA,UAAA;AAAA;;;;;;;;;;;;;;;;AAFX;;;;;KAyBY,wBAAA;EAAA,SACD,IAAA;EAAA,SACA,OAAA;EAAA,SACA,cAAA;EAAA,SACA,UAAA;AAAA;;;;;AAsBX;;;;;;;;;;;;;ACzDA;;iBDyDgB,wBAAA,CACd,OAAA,UACA,MAAA,EAAQ,8BAAA,GACP,wBAAA;;;;;;ADhDH;;UEZiB,kBAAA;EAAA,SACN,IAAA;EAAA,SACA,UAAA;AAAA;;;;;;;;AFgDX;;;;;;;;;;;;;UEzBiB,yBAAA;EAAA,SACN,QAAA;EAAA,SACA,WAAA;EAAA,SACA,OAAA,EAAS,kBAAA;AAAA;;;ADtBpB;;;;;AAyBA;;;;;;;;;;AA0BA;;;;;;;iBCFsB,wBAAA,CACpB,oBAAA,UACA,OAAA,UACA,MAAA,EAAQ,yBAAA,GACP,OAAA;;;;;;AF/CH;;;;;;;;;;;;UGNiB,cAAA;EAAA,SACN,OAAA;EAAA,SACA,aAAA,EAAe,SAAA;EAAA,SACf,WAAA,EAAa,SAAA;AAAA;AAAA,UAGP,eAAA;EAAA,SACN,OAAA;EAAA,SACA,iBAAA,WAA4B,QAAA;AAAA;;;;;;;;;;;;;AFRvC;;;;;AAyBA;;;;;;;;;;AA0BA;;iBEXgB,aAAA,qBAAA,CACd,MAAA,WAAiB,cAAA,CAAe,SAAA,KAChC,SAAA,GAAY,KAAA,EAAO,cAAA,CAAe,SAAA,eAAwB,QAAA,cAChD,eAAA,CAAgB,QAAA;;;;;;AHrC5B;;;;;;;;;;;;;AAsCA;;;;;;;;;;;;;;iBItBsB,sBAAA,CACpB,oBAAA,UACA,OAAA,WACC,OAAA;;;;AJnBH;;;;;;KKVY,YAAA;EAAA,SAAmC,OAAA;AAAA;AAAA,iBAW/B,cAAA,CAAe,OAAA,WAAkB,OAAA,IAAW,YAAA;AAAA,iBAI5C,kBAAA,CAAmB,OAAA,mBAA0B,OAAA,IAAW,YAAA;;;ALiCxE;;;;;;;;;;;;iBKbgB,uBAAA,CAAwB,oBAAA,UAA8B,OAAA;;;;;;ALzBtE;;;;;;;;;;;;;AAsCA;;iBMlCsB,0BAAA,CACpB,oBAAA,WACC,OAAA;;;;;;;UAyCc,qBAAA;EAAA,SACN,IAAA;EAAA,SACA,UAAA;AAAA;;;;;;UAQM,iBAAA;EAAA,SACN,IAAA;EAAA,SACA,UAAA;AAAA;AAAA,UAGM,0BAAA;ELnEN;;AAwBX;;;;;EAxBW,SK2EA,YAAA,EAAc,WAAA;ELhDd;;;;AAuBX;EAvBW,SKuDA,gBAAA;;;;;;;;WASA,mBAAA,EAAqB,WAAA,SAAoB,qBAAA;;;;AJlGpD;WIwGW,iBAAA,EAAmB,WAAA,SAAoB,iBAAA;AAAA;AAAA,KAGtC,sBAAA;EAAA,SAEG,IAAA;EAAA,SACA,OAAA;EAAA,SACA,WAAA;AAAA;EAAA,SAGA,IAAA;EAAA,SACA,OAAA;EAAA,SACA,WAAA;AAAA;EAAA,SAGA,IAAA;EAAA,SACA,OAAA;EAAA,SACA,WAAA;AAAA;EAAA,SAGA,IAAA;EAAA,SACA,OAAA;EAAA,SACA,UAAA;EAAA,SACA,UAAA;EAAA,SACA,WAAA;AAAA;EAAA,SAGA,IAAA;EAAA,SACA,OAAA;EAAA,SACA,gBAAA;EAAA,SACA,gBAAA;EAAA,SACA,WAAA;AAAA;AAAA,KAGH,0BAAA;EAAA,SACG,EAAA;AAAA;EAAA,SACA,EAAA;EAAA,SAAoB,UAAA,WAAqB,sBAAA;AAAA;;;;;;;;AHhIxD;;;;;;;;;;AAkCA;;;;;;;;;;;;;;;;iBGiIgB,oBAAA,CACd,MAAA,EAAQ,0BAAA,GACP,0BAAA"}
@@ -1,11 +1,9 @@
1
- import { S as errorPinnedArtefactsAppSpace, f as errorInvalidRefFile, g as errorInvalidSpaceId, l as errorInvalidJson, o as errorDuplicateSpaceId } from "../errors-DQsXvidG.mjs";
2
- import { r as canonicalizeJson } from "../hash-G0bAfIGh.mjs";
3
- import "../invariants-4Avb_Yhy.mjs";
4
- import { t as MANIFEST_FILE } from "../io-CDJaWGbt.mjs";
1
+ import { S as errorPinnedArtefactsAppSpace, f as errorInvalidRefFile, g as errorInvalidSpaceId, l as errorInvalidJson, o as errorDuplicateSpaceId } from "../errors-5KVuWV_5.mjs";
2
+ import { r as canonicalizeJson } from "../hash-By50zM_E.mjs";
3
+ import { t as MANIFEST_FILE } from "../io-TX8RPDeh.mjs";
5
4
  import { join } from "pathe";
6
5
  import { mkdir, readFile, readdir, stat, writeFile } from "node:fs/promises";
7
6
  import { APP_SPACE_ID } from "@prisma-next/framework-components/control";
8
-
9
7
  //#region src/space-layout.ts
10
8
  /**
11
9
  * Pattern a contract-space identifier must match. The constraint is
@@ -40,7 +38,6 @@ function spaceMigrationDirectory(projectMigrationsDir, spaceId) {
40
38
  assertValidSpaceId(spaceId);
41
39
  return join(projectMigrationsDir, spaceId);
42
40
  }
43
-
44
41
  //#endregion
45
42
  //#region src/concatenate-space-apply-inputs.ts
46
43
  /**
@@ -90,7 +87,6 @@ function concatenateSpaceApplyInputs(inputs) {
90
87
  });
91
88
  return appSpace ? [...extensions, appSpace] : extensions;
92
89
  }
93
-
94
90
  //#endregion
95
91
  //#region src/detect-space-contract-drift.ts
96
92
  /**
@@ -132,7 +128,6 @@ function detectSpaceContractDrift(spaceId, inputs) {
132
128
  pinnedHash: inputs.pinnedHash
133
129
  };
134
130
  }
135
-
136
131
  //#endregion
137
132
  //#region src/emit-pinned-space-artefacts.ts
138
133
  /**
@@ -173,7 +168,6 @@ async function emitPinnedSpaceArtefacts(projectMigrationsDir, spaceId, inputs) {
173
168
  });
174
169
  await writeFile(join(dir, "refs", "head.json"), `${headJson}\n`);
175
170
  }
176
-
177
171
  //#endregion
178
172
  //#region src/plan-all-spaces.ts
179
173
  /**
@@ -220,7 +214,6 @@ function planAllSpaces(inputs, planSpace) {
220
214
  migrationPackages: planSpace(input)
221
215
  }));
222
216
  }
223
-
224
217
  //#endregion
225
218
  //#region src/read-pinned-contract-hash.ts
226
219
  function hasErrnoCode$1(error, code) {
@@ -276,7 +269,6 @@ async function readPinnedContractHash(projectMigrationsDir, spaceId) {
276
269
  if (typeof parsed !== "object" || parsed === null || typeof parsed.hash !== "string") throw errorInvalidRefFile(filePath, "expected an object with a string `hash` field");
277
270
  return parsed.hash;
278
271
  }
279
-
280
272
  //#endregion
281
273
  //#region src/verify-contract-spaces.ts
282
274
  function hasErrnoCode(error, code) {
@@ -427,7 +419,7 @@ function verifyContractSpaces(inputs) {
427
419
  violations
428
420
  };
429
421
  }
430
-
431
422
  //#endregion
432
423
  export { APP_SPACE_ID, assertValidSpaceId, concatenateSpaceApplyInputs, detectSpaceContractDrift, emitPinnedSpaceArtefacts, isValidSpaceId, listPinnedSpaceDirectories, planAllSpaces, readPinnedContractHash, spaceMigrationDirectory, verifyContractSpaces };
424
+
433
425
  //# sourceMappingURL=spaces.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"spaces.mjs","names":["extensions: SpaceApplyInput<TOp>[]","appSpace: SpaceApplyInput<TOp> | undefined","hasErrnoCode","raw: string","parsed: unknown","entries: { readonly name: string; readonly isDirectory: boolean }[]","violations: SpaceVerifierViolation[]","kindOrder: Record<SpaceVerifierViolation['kind'], number>"],"sources":["../../src/space-layout.ts","../../src/concatenate-space-apply-inputs.ts","../../src/detect-space-contract-drift.ts","../../src/emit-pinned-space-artefacts.ts","../../src/plan-all-spaces.ts","../../src/read-pinned-contract-hash.ts","../../src/verify-contract-spaces.ts"],"sourcesContent":["import { APP_SPACE_ID } from '@prisma-next/framework-components/control';\nimport { join } from 'pathe';\nimport { errorInvalidSpaceId } from './errors';\n\nexport { APP_SPACE_ID };\n\n/**\n * Branded string carrying a compile-time guarantee that the value has\n * been validated by {@link assertValidSpaceId}. Downstream filesystem\n * helpers (e.g. {@link spaceMigrationDirectory}) accept this type to\n * make \"validated\" tracking visible at the type level rather than\n * relying purely on a runtime check.\n */\nexport type ValidSpaceId = string & { readonly __brand: 'ValidSpaceId' };\n\n/**\n * Pattern a contract-space identifier must match. The constraint is\n * filesystem-friendly: lowercase letters / digits / hyphen / underscore,\n * starts with a letter, max 64 characters.\n *\n * @see specs/framework-mechanism.spec.md § 3.\n */\nconst SPACE_ID_PATTERN = /^[a-z][a-z0-9_-]{0,63}$/;\n\nexport function isValidSpaceId(spaceId: string): spaceId is ValidSpaceId {\n return SPACE_ID_PATTERN.test(spaceId);\n}\n\nexport function assertValidSpaceId(spaceId: string): asserts spaceId is ValidSpaceId {\n if (!isValidSpaceId(spaceId)) {\n throw errorInvalidSpaceId(spaceId);\n }\n}\n\n/**\n * Resolve the migrations subdirectory for a given contract space.\n *\n * - **App space** (`spaceId === APP_SPACE_ID`) keeps today's layout: the\n * project's `migrations/` directory is the migrations directory, no\n * subdirectory.\n * - **Extension space** lands under `<projectMigrationsDir>/<spaceId>/`.\n * The space id is validated against {@link SPACE_ID_PATTERN} because\n * it becomes a filesystem directory name verbatim.\n *\n * `projectMigrationsDir` is the project's top-level `migrations/`\n * directory; the helper does not assume anything about its absolute /\n * relative shape and is symmetric with `pathe.join`.\n */\nexport function spaceMigrationDirectory(projectMigrationsDir: string, spaceId: string): string {\n if (spaceId === APP_SPACE_ID) {\n return projectMigrationsDir;\n }\n assertValidSpaceId(spaceId);\n return join(projectMigrationsDir, spaceId);\n}\n","import { errorDuplicateSpaceId } from './errors';\nimport { APP_SPACE_ID } from './space-layout';\n\n/**\n * Per-space input the runner consumes when applying a migration.\n *\n * The shape is target-agnostic: callers (today the SQL family; later\n * any other family) bind `TOp` to their own per-target operation type\n * (e.g. `SqlMigrationPlanOperation<TTargetDetails>` for the SQL family)\n * and the helper preserves it through the concatenation.\n *\n * - `migrationDirectory` is the on-disk migration directory for the\n * space — `<projectRoot>/migrations` for `'app'` and\n * `<projectRoot>/migrations/<space-id>` for an extension space.\n * - `currentMarkerHash` and `currentMarkerInvariants` are the values\n * read from the `prisma_contract.marker` row keyed by `space = <space-id>`\n * (T1.1). `null` hash = no marker row yet.\n * - `path` is the per-space operation list resolved from\n * `findPathWithDecision(currentMarker, ref.hash, effectiveRequired)`\n * per ADR 208, materialised against the on-disk migration packages.\n *\n * @see specs/framework-mechanism.spec.md § 4 — Runner.\n */\nexport interface SpaceApplyInput<TOp> {\n readonly spaceId: string;\n readonly migrationDirectory: string;\n readonly currentMarkerHash: string | null;\n readonly currentMarkerInvariants: readonly string[];\n readonly path: readonly TOp[];\n}\n\n/**\n * Order a set of per-space apply inputs into the canonical cross-space\n * sequence the runner applies under a single transaction.\n *\n * Cross-space ordering convention (sub-spec § 4):\n *\n * 1. **Extension spaces first**, alphabetically by `spaceId`.\n * 2. **App space last** — only one `'app'` entry expected, at most.\n *\n * Rationale: extensions install their own structural objects (types,\n * functions, helper tables) before the app's structural ops reference\n * them. Putting app-space last lets app-space ops freely depend on any\n * extension-space declaration in the same transaction.\n *\n * Determinism (NFR6): the output order is independent of the input\n * order, so two callers with the same set of `extensionPacks` produce\n * identical apply sequences.\n *\n * Atomicity: rejects duplicate `spaceId`s with\n * `MIGRATION.DUPLICATE_SPACE_ID` before producing any output. This\n * mirrors {@link import('./plan-all-spaces').planAllSpaces} so the\n * planner-side and runner-side helpers reject malformed inputs the same\n * way (callers don't need a separate dedup pass).\n *\n * Synchronous, pure, no I/O: callers resolve marker rows and `path`\n * before invoking this helper. The actual DB application — driving the\n * transaction, committing marker writes, recording the per-space marker\n * rows — happens at the SQL-family consumption site (per the\n * helper-location convention from R3).\n */\nexport function concatenateSpaceApplyInputs<TOp>(\n inputs: readonly SpaceApplyInput<TOp>[],\n): readonly SpaceApplyInput<TOp>[] {\n const seen = new Set<string>();\n for (const input of inputs) {\n if (seen.has(input.spaceId)) {\n throw errorDuplicateSpaceId(input.spaceId);\n }\n seen.add(input.spaceId);\n }\n\n const extensions: SpaceApplyInput<TOp>[] = [];\n let appSpace: SpaceApplyInput<TOp> | undefined;\n for (const input of inputs) {\n if (input.spaceId === APP_SPACE_ID) {\n appSpace = input;\n } else {\n extensions.push(input);\n }\n }\n\n extensions.sort((a, b) => {\n if (a.spaceId < b.spaceId) return -1;\n if (a.spaceId > b.spaceId) return 1;\n return 0;\n });\n\n return appSpace ? [...extensions, appSpace] : extensions;\n}\n","/**\n * Inputs for {@link detectSpaceContractDrift}.\n *\n * Both hashes are produced by the caller (the SQL-family wiring at the\n * consumption site) using the canonical contract hashing pipeline.\n * Keeping the helper pure lets `migration-tools` stay framework-neutral\n * — the SQL family already speaks `Contract<SqlStorage>`, the Mongo\n * family speaks its own contract type, and both reduce to a hash string\n * before drift detection runs.\n *\n * `pinnedHash` is `null` when no pinned `contract.json` exists yet for\n * the space (the descriptor declares an extension that has never been\n * emitted into the user's repo). That's the \"first emit\" case — no\n * drift to surface; the migrate emit will create the pinned files.\n *\n * @see specs/framework-mechanism.spec.md § 3 — Drift detection (T1.9).\n */\nexport interface DetectSpaceContractDriftInputs {\n readonly descriptorHash: string;\n readonly pinnedHash: string | null;\n}\n\n/**\n * Result discriminant for {@link detectSpaceContractDrift}.\n *\n * - `noDrift`: descriptor hash and pinned hash agree byte-for-byte.\n * The migrate emit can proceed with no warning.\n * - `firstEmit`: no pinned `contract.json` on disk yet. The extension\n * was just added to `extensionPacks`; this run will create the\n * pinned files. No warning either — the user's intent is to install\n * the extension, not to \"drift\" from a state they haven't pinned.\n * - `drift`: descriptor hash differs from pinned hash. The caller\n * surfaces a non-fatal warning naming the extension and the\n * diff direction (descriptor → pinned). The migrate emit proceeds\n * normally so the bump is materialised this run; the warning just\n * confirms the bump is being captured.\n *\n * `spaceId`, `descriptorHash`, and `pinnedHash` are threaded through\n * verbatim so the caller (logger / TerminalUI / strict-mode envelope)\n * has everything it needs to format the warning message without\n * re-reading the descriptor or the pinned file.\n */\nexport type SpaceContractDriftResult = {\n readonly kind: 'noDrift' | 'firstEmit' | 'drift';\n readonly spaceId: string;\n readonly descriptorHash: string;\n readonly pinnedHash: string | null;\n};\n\n/**\n * Pure drift-detection primitive for a single contract space.\n *\n * Runs once per loaded extension space, just before computing the\n * `priorContract` that feeds {@link import('./plan-all-spaces').planAllSpaces}.\n * Hash equality is byte-for-byte (no normalisation) — both sides are\n * already canonical hashes produced by the same pipeline, so any\n * difference is meaningful drift.\n *\n * Synchronous, pure, no I/O. The caller (SQL family in M2 R1) reads\n * the pinned `contract.json` and computes its hash, then invokes this\n * helper alongside the descriptor's `headRef.hash`. Composes naturally\n * with {@link import('./read-pinned-contract-hash').readPinnedContractHash}\n * which provides the read-side primitive.\n *\n * @see specs/framework-mechanism.spec.md § 3 — Drift detection (T1.9).\n * @see specs/framework-mechanism.spec.md AM7 — drift warning surfaces\n * the extension name and the diff direction.\n */\nexport function detectSpaceContractDrift(\n spaceId: string,\n inputs: DetectSpaceContractDriftInputs,\n): SpaceContractDriftResult {\n if (inputs.pinnedHash === null) {\n return {\n kind: 'firstEmit',\n spaceId,\n descriptorHash: inputs.descriptorHash,\n pinnedHash: null,\n };\n }\n if (inputs.descriptorHash === inputs.pinnedHash) {\n return {\n kind: 'noDrift',\n spaceId,\n descriptorHash: inputs.descriptorHash,\n pinnedHash: inputs.pinnedHash,\n };\n }\n return {\n kind: 'drift',\n spaceId,\n descriptorHash: inputs.descriptorHash,\n pinnedHash: inputs.pinnedHash,\n };\n}\n","import { mkdir, writeFile } from 'node:fs/promises';\nimport { join } from 'pathe';\nimport { canonicalizeJson } from './canonicalize-json';\nimport { errorPinnedArtefactsAppSpace } from './errors';\nimport { APP_SPACE_ID, assertValidSpaceId } from './space-layout';\n\n/**\n * Pinned head reference for a contract space — `(hash, invariants)`.\n * Mirrors {@link import('./refs').RefEntry} but is redeclared locally so\n * callers can construct the input without depending on the refs module.\n */\nexport interface PinnedSpaceHeadRef {\n readonly hash: string;\n readonly invariants: readonly string[];\n}\n\n/**\n * Inputs for {@link emitPinnedSpaceArtefacts}.\n *\n * - `contract` is the canonical contract value the framework just emitted\n * for the space; it is serialised through {@link canonicalizeJson}, so\n * it must be a JSON-compatible value (objects / arrays / primitives).\n * Typed as `unknown` rather than the SQL-family `Contract<SqlStorage>`\n * to keep `migration-tools` framework-neutral; SQL-family callers pass\n * their typed value through unchanged.\n *\n * - `contractDts` is the pre-rendered `.d.ts` text. Rendering happens in\n * the SQL family (which owns the codec / typemap input the renderer\n * needs), so this helper accepts the text verbatim and writes it out\n * without further transformation.\n *\n * - `headRef` is the pinned head reference for the space.\n * `invariants` are sorted alphabetically before serialisation so two\n * callers passing the same set in different orders produce\n * byte-identical `refs/head.json`.\n */\nexport interface PinnedSpaceArtefactInputs {\n readonly contract: unknown;\n readonly contractDts: string;\n readonly headRef: PinnedSpaceHeadRef;\n}\n\n/**\n * Emit the pinned per-space artefacts (`contract.json`, `contract.d.ts`,\n * `refs/head.json`) under `<projectMigrationsDir>/<spaceId>/`.\n *\n * Always-overwrite: the framework owns these files; running `migrate`\n * twice with the same inputs is a no-op observably (idempotent), but the\n * helper does not check pre-existing contents — re-emit always wins.\n *\n * Path layout matches the convention in\n * [`spaceMigrationDirectory`](./space-layout.ts), with two restrictions\n * specific to pinned artefacts:\n *\n * - Rejects the app space (`spaceId === APP_SPACE_ID`): the app space's\n * canonical `contract.json` lives at the project root, not under\n * `migrations/`. Callers that want to emit it use the app-space\n * contract emit pipeline.\n * - Validates `spaceId` against `[a-z][a-z0-9_-]{0,63}` via\n * {@link assertValidSpaceId} for the same filesystem-safety reasons.\n *\n * The migrations directory and space subdirectory are created if they\n * do not yet exist (`mkdir { recursive: true }`).\n *\n * @see specs/framework-mechanism.spec.md § 3 — Pinned artefact emission (T1.8).\n */\nexport async function emitPinnedSpaceArtefacts(\n projectMigrationsDir: string,\n spaceId: string,\n inputs: PinnedSpaceArtefactInputs,\n): Promise<void> {\n if (spaceId === APP_SPACE_ID) {\n throw errorPinnedArtefactsAppSpace();\n }\n assertValidSpaceId(spaceId);\n\n const dir = join(projectMigrationsDir, spaceId);\n await mkdir(join(dir, 'refs'), { recursive: true });\n\n await writeFile(join(dir, 'contract.json'), `${canonicalizeJson(inputs.contract)}\\n`);\n await writeFile(join(dir, 'contract.d.ts'), inputs.contractDts);\n\n const sortedInvariants = [...inputs.headRef.invariants].sort();\n const headJson = canonicalizeJson({\n hash: inputs.headRef.hash,\n invariants: sortedInvariants,\n });\n await writeFile(join(dir, 'refs', 'head.json'), `${headJson}\\n`);\n}\n","import { errorDuplicateSpaceId } from './errors';\n\n/**\n * Per-space input for {@link planAllSpaces}. One entry per loaded\n * contract space (the application's `'app'` plus each extension that\n * exposes a `contractSpace`).\n *\n * - `priorContract` is `null` for a space that has never been emitted\n * (no `migrations/<space-id>/contract.json` on disk yet); otherwise it\n * is the canonical contract value pinned for that space.\n * - `newContract` is the canonical contract value the planner is about\n * to emit for that space — for app-space, the just-emitted root\n * `contract.json`; for an extension space, the descriptor's\n * `contractSpace.contractJson`.\n *\n * @see specs/framework-mechanism.spec.md § 3.\n */\nexport interface SpacePlanInput<TContract> {\n readonly spaceId: string;\n readonly priorContract: TContract | null;\n readonly newContract: TContract;\n}\n\nexport interface SpacePlanOutput<TPackage> {\n readonly spaceId: string;\n readonly migrationPackages: readonly TPackage[];\n}\n\n/**\n * Iterate the per-space planner across a set of loaded contract spaces\n * and return a deterministic shape regardless of declaration order.\n *\n * Behaviour:\n *\n * - The output is sorted alphabetically by `spaceId` (AM3). Two callers\n * passing the same set of inputs in different orders observe\n * byte-identical outputs.\n * - The per-space planner (`planSpace`) is called exactly once per\n * input, in alphabetical-by-spaceId order. Its return value is\n * attached to the corresponding output entry verbatim.\n * - Duplicate `spaceId`s in the input array throw\n * `MIGRATION.DUPLICATE_SPACE_ID` before any `planSpace` call runs,\n * keeping the planner pure when the input is malformed.\n *\n * The signature is generic over `TContract` and `TPackage` because the\n * shape is framework-neutral (SQL family today, Mongo family\n * eventually). Callers wire in whatever contract value and migration\n * package shape their family already speaks.\n *\n * Synchronous: the underlying per-space planner (target's\n * `MigrationPlanner.plan(...)`) is synchronous; callers that need to\n * resolve async I/O (e.g. reading pinned `contract.json` from disk)\n * resolve it before calling `planAllSpaces` and pass the materialised\n * inputs through.\n *\n * @see specs/framework-mechanism.spec.md § 3 — Per-space planner (T1.3).\n */\nexport function planAllSpaces<TContract, TPackage>(\n inputs: readonly SpacePlanInput<TContract>[],\n planSpace: (input: SpacePlanInput<TContract>) => readonly TPackage[],\n): readonly SpacePlanOutput<TPackage>[] {\n const seen = new Set<string>();\n for (const input of inputs) {\n if (seen.has(input.spaceId)) {\n throw errorDuplicateSpaceId(input.spaceId);\n }\n seen.add(input.spaceId);\n }\n\n const sorted = [...inputs].sort((a, b) => {\n if (a.spaceId < b.spaceId) return -1;\n if (a.spaceId > b.spaceId) return 1;\n return 0;\n });\n\n return sorted.map((input) => ({\n spaceId: input.spaceId,\n migrationPackages: planSpace(input),\n }));\n}\n","import { readFile } from 'node:fs/promises';\nimport { join } from 'pathe';\nimport { errorInvalidJson, errorInvalidRefFile, errorPinnedArtefactsAppSpace } from './errors';\nimport { APP_SPACE_ID, assertValidSpaceId } from './space-layout';\n\nfunction hasErrnoCode(error: unknown, code: string): boolean {\n return error instanceof Error && (error as { code?: string }).code === code;\n}\n\n/**\n * Read the pinned head hash for an extension space.\n *\n * Returns the `hash` field of `<projectMigrationsDir>/<spaceId>/refs/head.json`\n * — i.e. the canonical contract hash the framework wrote on the last\n * `migrate` for this space. Returns `null` when the file does not exist\n * (or the migrations directory is missing entirely), which is the\n * \"first emit\" signal {@link import('./detect-space-contract-drift').detectSpaceContractDrift}\n * uses to distinguish a brand-new extension from drift.\n *\n * Pure I/O (read + parse). The \"comparison hash\" is stored on disk by\n * {@link import('./emit-pinned-space-artefacts').emitPinnedSpaceArtefacts}\n * via the descriptor's `headRef.hash`, so reading it back here matches\n * the descriptor's hashing pipeline by construction — neither side\n * recomputes anything.\n *\n * Validation:\n *\n * - Rejects the app space — pinned head refs are an extension-space\n * concept; the app space's contract-of-record lives at the project\n * root, not under `migrations/`.\n * - Validates the space id against the same `[a-z][a-z0-9_-]{0,63}`\n * pattern as the rest of the per-space helpers.\n * - Surfaces `MIGRATION.INVALID_JSON` / `MIGRATION.INVALID_REF_FILE`\n * on a corrupt `refs/head.json` so callers can distinguish \"no\n * pinned file\" (returns `null`) from \"pinned file but unreadable\"\n * (throws).\n *\n * @see specs/framework-mechanism.spec.md § 3 — Drift detection (T1.9).\n */\nexport async function readPinnedContractHash(\n projectMigrationsDir: string,\n spaceId: string,\n): Promise<string | null> {\n if (spaceId === APP_SPACE_ID) {\n throw errorPinnedArtefactsAppSpace();\n }\n assertValidSpaceId(spaceId);\n\n const filePath = join(projectMigrationsDir, spaceId, 'refs', 'head.json');\n\n let raw: string;\n try {\n raw = await readFile(filePath, 'utf-8');\n } catch (error) {\n if (hasErrnoCode(error, 'ENOENT')) {\n return null;\n }\n throw error;\n }\n\n let parsed: unknown;\n try {\n parsed = JSON.parse(raw);\n } catch (e) {\n throw errorInvalidJson(filePath, e instanceof Error ? e.message : String(e));\n }\n\n if (\n typeof parsed !== 'object' ||\n parsed === null ||\n typeof (parsed as { hash?: unknown }).hash !== 'string'\n ) {\n throw errorInvalidRefFile(filePath, 'expected an object with a string `hash` field');\n }\n\n return (parsed as { hash: string }).hash;\n}\n","import { readdir, stat } from 'node:fs/promises';\nimport { join } from 'pathe';\nimport { MANIFEST_FILE } from './io';\nimport { APP_SPACE_ID } from './space-layout';\n\nfunction hasErrnoCode(error: unknown, code: string): boolean {\n return error instanceof Error && (error as { code?: string }).code === code;\n}\n\n/**\n * List the per-space pinned subdirectories under\n * `<projectRoot>/migrations/`. Returns space-id directory names (sorted\n * alphabetically) — i.e. any non-dot-prefixed subdirectory whose root\n * does **not** contain a `migration.json` manifest. The manifest is the\n * structural marker of a user-authored migration directory (see\n * `readMigrationsDir` in `./io`); directory names themselves belong to\n * the user and are not part of the contract.\n *\n * Returns `[]` if the migrations directory does not exist (greenfield\n * project).\n *\n * Reads only the user's repo. **No descriptor import.** The caller\n * (verifier) feeds the result into {@link verifyContractSpaces} alongside\n * the loaded-space set and the marker rows.\n *\n * @see specs/framework-mechanism.spec.md § 4 — Verifier (steps 5–6).\n */\nexport async function listPinnedSpaceDirectories(\n projectMigrationsDir: string,\n): Promise<readonly string[]> {\n let entries: { readonly name: string; readonly isDirectory: boolean }[];\n try {\n const dirents = await readdir(projectMigrationsDir, { withFileTypes: true });\n entries = dirents.map((d) => ({ name: d.name, isDirectory: d.isDirectory() }));\n } catch (error) {\n if (hasErrnoCode(error, 'ENOENT')) {\n return [];\n }\n throw error;\n }\n\n const namedCandidates = entries\n .filter((e) => e.isDirectory)\n .map((e) => e.name)\n .filter((name) => !name.startsWith('.'))\n .sort();\n\n const manifestChecks = await Promise.all(\n namedCandidates.map(async (name) => {\n try {\n await stat(join(projectMigrationsDir, name, MANIFEST_FILE));\n return { name, isMigrationDir: true };\n } catch (error) {\n if (hasErrnoCode(error, 'ENOENT')) {\n return { name, isMigrationDir: false };\n }\n throw error;\n }\n }),\n );\n\n return manifestChecks.filter((c) => !c.isMigrationDir).map((c) => c.name);\n}\n\n/**\n * Pinned head value (`(hash, invariants)`) for one contract space.\n * The verifier compares this against the marker row for the same space\n * to detect drift between the user-emitted artefacts and the live DB\n * marker.\n */\nexport interface SpacePinnedHashRecord {\n readonly hash: string;\n readonly invariants: readonly string[];\n}\n\n/**\n * Marker row read from `prisma_contract.marker` (one per `space`).\n * Caller resolves these via the family runtime's marker reader (T1.1)\n * before invoking {@link verifyContractSpaces}.\n */\nexport interface SpaceMarkerRecord {\n readonly hash: string;\n readonly invariants: readonly string[];\n}\n\nexport interface VerifyContractSpacesInputs {\n /**\n * Set of contract spaces the project declares: `'app'` plus each\n * extension space in `extensionPacks`. The caller's discovery path\n * never reads the extension descriptor module — it walks the\n * `extensionPacks` configuration in `prisma-next.config.ts` for the\n * space ids.\n */\n readonly loadedSpaces: ReadonlySet<string>;\n\n /**\n * Pinned per-space subdirectories observed under\n * `<projectRoot>/migrations/`. Resolved via\n * {@link listPinnedSpaceDirectories}.\n */\n readonly pinnedDirsOnDisk: readonly string[];\n\n /**\n * Pinned head ref per space, keyed by space id. Caller reads\n * `<projectRoot>/migrations/<space-id>/contract.json` and\n * `refs/head.json` (or, for app-space if its pinned shape ever moves\n * under `migrations/`, the equivalent files) to construct this map.\n * Spaces with no pinned dir on disk simply omit a map entry.\n */\n readonly pinnedHashesBySpace: ReadonlyMap<string, SpacePinnedHashRecord>;\n\n /**\n * Marker rows keyed by `space`. Caller reads them from the\n * `prisma_contract.marker` table.\n */\n readonly markerRowsBySpace: ReadonlyMap<string, SpaceMarkerRecord>;\n}\n\nexport type SpaceVerifierViolation =\n | {\n readonly kind: 'declaredButUnmigrated';\n readonly spaceId: string;\n readonly remediation: string;\n }\n | {\n readonly kind: 'orphanMarker';\n readonly spaceId: string;\n readonly remediation: string;\n }\n | {\n readonly kind: 'orphanPinnedDir';\n readonly spaceId: string;\n readonly remediation: string;\n }\n | {\n readonly kind: 'hashMismatch';\n readonly spaceId: string;\n readonly pinnedHash: string;\n readonly markerHash: string;\n readonly remediation: string;\n }\n | {\n readonly kind: 'invariantsMismatch';\n readonly spaceId: string;\n readonly pinnedInvariants: readonly string[];\n readonly markerInvariants: readonly string[];\n readonly remediation: string;\n };\n\nexport type VerifyContractSpacesResult =\n | { readonly ok: true }\n | { readonly ok: false; readonly violations: readonly SpaceVerifierViolation[] };\n\n/**\n * Pure structural verifier for the per-space mechanism. Aggregates the\n * three orphan / missing checks (FR6 cases a–c) plus per-space hash and\n * invariant comparison.\n *\n * Algorithm (sub-spec § 4):\n *\n * - For every extension space declared in `loadedSpaces` (`'app'`\n * excluded — its pinned `contract.json` lives at the project root):\n * - If no pinned dir on disk → `declaredButUnmigrated`.\n * - Else if `markerRowsBySpace` lacks an entry → no violation here;\n * the live-DB compare in step 8 (out of scope of this helper) is\n * where the absence shows up.\n * - Else compare marker hash / invariants vs. pinned hash /\n * invariants → `hashMismatch` / `invariantsMismatch` on drift.\n * - For every pinned dir on disk that is not in `loadedSpaces` →\n * `orphanPinnedDir`.\n * - For every marker row whose `space` is not in `loadedSpaces` →\n * `orphanMarker`. The app-space marker is always loaded (`'app'` is\n * in `loadedSpaces` by definition).\n *\n * Output is deterministic (NFR6): violations are sorted first by `kind`\n * (`declaredButUnmigrated` → `orphanMarker` → `orphanPinnedDir` →\n * `hashMismatch` → `invariantsMismatch`) then by `spaceId`. Two callers\n * passing equivalent inputs see byte-identical violation lists.\n *\n * Synchronous, pure, no I/O. **Does not import the extension descriptor**\n * (the inputs are pre-resolved by the caller). This is the property\n * AC-15 / AC-26 (\"verifier reads only the user repo, not\n * `node_modules`\") locks in.\n *\n * @see specs/framework-mechanism.spec.md § 4 — Verifier (T1.5).\n */\nexport function verifyContractSpaces(\n inputs: VerifyContractSpacesInputs,\n): VerifyContractSpacesResult {\n const violations: SpaceVerifierViolation[] = [];\n\n for (const spaceId of [...inputs.loadedSpaces].sort()) {\n if (spaceId === APP_SPACE_ID) continue;\n\n if (!inputs.pinnedDirsOnDisk.includes(spaceId)) {\n violations.push({\n kind: 'declaredButUnmigrated',\n spaceId,\n remediation: `Extension '${spaceId}' is declared in extensionPacks but has not been emitted; run \\`prisma-next migrate\\`.`,\n });\n continue;\n }\n\n const pinned = inputs.pinnedHashesBySpace.get(spaceId);\n const marker = inputs.markerRowsBySpace.get(spaceId);\n if (!pinned || !marker) {\n continue;\n }\n\n if (pinned.hash !== marker.hash) {\n violations.push({\n kind: 'hashMismatch',\n spaceId,\n pinnedHash: pinned.hash,\n markerHash: marker.hash,\n remediation: `Marker row for space '${spaceId}' is keyed at ${marker.hash}, but the pinned ${join('migrations', spaceId, 'contract.json')} resolves to ${pinned.hash}. Run \\`prisma-next db update\\` to advance the database, or \\`prisma-next migrate\\` if the descriptor was bumped without re-emitting.`,\n });\n continue;\n }\n\n const pinnedInvariants = [...pinned.invariants].sort();\n const markerInvariants = new Set(marker.invariants);\n const missing = pinnedInvariants.filter((id) => !markerInvariants.has(id));\n if (missing.length > 0) {\n violations.push({\n kind: 'invariantsMismatch',\n spaceId,\n pinnedInvariants,\n markerInvariants: [...marker.invariants].sort(),\n remediation: `Marker row for space '${spaceId}' is missing invariants [${missing.map((s) => JSON.stringify(s)).join(', ')}]. Run \\`prisma-next db update\\` to apply the corresponding data-transform migrations.`,\n });\n }\n }\n\n for (const dir of [...inputs.pinnedDirsOnDisk].sort()) {\n if (!inputs.loadedSpaces.has(dir)) {\n violations.push({\n kind: 'orphanPinnedDir',\n spaceId: dir,\n remediation: `Orphan pinned directory \\`${join('migrations', dir)}/\\` for an extension not in extensionPacks; remove the directory or re-add the extension.`,\n });\n }\n }\n\n for (const space of [...inputs.markerRowsBySpace.keys()].sort()) {\n if (!inputs.loadedSpaces.has(space)) {\n violations.push({\n kind: 'orphanMarker',\n spaceId: space,\n remediation: `Orphan marker row for space '${space}' (no longer in extensionPacks); remediation: manually delete the row from \\`prisma_contract.marker\\`.`,\n });\n }\n }\n\n if (violations.length === 0) {\n return { ok: true };\n }\n\n const kindOrder: Record<SpaceVerifierViolation['kind'], number> = {\n declaredButUnmigrated: 0,\n orphanMarker: 1,\n orphanPinnedDir: 2,\n hashMismatch: 3,\n invariantsMismatch: 4,\n };\n\n violations.sort((a, b) => {\n const k = kindOrder[a.kind] - kindOrder[b.kind];\n if (k !== 0) return k;\n if (a.spaceId < b.spaceId) return -1;\n if (a.spaceId > b.spaceId) return 1;\n return 0;\n });\n\n return { ok: false, violations };\n}\n"],"mappings":";;;;;;;;;;;;;;;;AAsBA,MAAM,mBAAmB;AAEzB,SAAgB,eAAe,SAA0C;AACvE,QAAO,iBAAiB,KAAK,QAAQ;;AAGvC,SAAgB,mBAAmB,SAAkD;AACnF,KAAI,CAAC,eAAe,QAAQ,CAC1B,OAAM,oBAAoB,QAAQ;;;;;;;;;;;;;;;;AAkBtC,SAAgB,wBAAwB,sBAA8B,SAAyB;AAC7F,KAAI,YAAY,aACd,QAAO;AAET,oBAAmB,QAAQ;AAC3B,QAAO,KAAK,sBAAsB,QAAQ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACQ5C,SAAgB,4BACd,QACiC;CACjC,MAAM,uBAAO,IAAI,KAAa;AAC9B,MAAK,MAAM,SAAS,QAAQ;AAC1B,MAAI,KAAK,IAAI,MAAM,QAAQ,CACzB,OAAM,sBAAsB,MAAM,QAAQ;AAE5C,OAAK,IAAI,MAAM,QAAQ;;CAGzB,MAAMA,aAAqC,EAAE;CAC7C,IAAIC;AACJ,MAAK,MAAM,SAAS,OAClB,KAAI,MAAM,YAAY,aACpB,YAAW;KAEX,YAAW,KAAK,MAAM;AAI1B,YAAW,MAAM,GAAG,MAAM;AACxB,MAAI,EAAE,UAAU,EAAE,QAAS,QAAO;AAClC,MAAI,EAAE,UAAU,EAAE,QAAS,QAAO;AAClC,SAAO;GACP;AAEF,QAAO,WAAW,CAAC,GAAG,YAAY,SAAS,GAAG;;;;;;;;;;;;;;;;;;;;;;;;ACpBhD,SAAgB,yBACd,SACA,QAC0B;AAC1B,KAAI,OAAO,eAAe,KACxB,QAAO;EACL,MAAM;EACN;EACA,gBAAgB,OAAO;EACvB,YAAY;EACb;AAEH,KAAI,OAAO,mBAAmB,OAAO,WACnC,QAAO;EACL,MAAM;EACN;EACA,gBAAgB,OAAO;EACvB,YAAY,OAAO;EACpB;AAEH,QAAO;EACL,MAAM;EACN;EACA,gBAAgB,OAAO;EACvB,YAAY,OAAO;EACpB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC3BH,eAAsB,yBACpB,sBACA,SACA,QACe;AACf,KAAI,YAAY,aACd,OAAM,8BAA8B;AAEtC,oBAAmB,QAAQ;CAE3B,MAAM,MAAM,KAAK,sBAAsB,QAAQ;AAC/C,OAAM,MAAM,KAAK,KAAK,OAAO,EAAE,EAAE,WAAW,MAAM,CAAC;AAEnD,OAAM,UAAU,KAAK,KAAK,gBAAgB,EAAE,GAAG,iBAAiB,OAAO,SAAS,CAAC,IAAI;AACrF,OAAM,UAAU,KAAK,KAAK,gBAAgB,EAAE,OAAO,YAAY;CAE/D,MAAM,mBAAmB,CAAC,GAAG,OAAO,QAAQ,WAAW,CAAC,MAAM;CAC9D,MAAM,WAAW,iBAAiB;EAChC,MAAM,OAAO,QAAQ;EACrB,YAAY;EACb,CAAC;AACF,OAAM,UAAU,KAAK,KAAK,QAAQ,YAAY,EAAE,GAAG,SAAS,IAAI;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC9BlE,SAAgB,cACd,QACA,WACsC;CACtC,MAAM,uBAAO,IAAI,KAAa;AAC9B,MAAK,MAAM,SAAS,QAAQ;AAC1B,MAAI,KAAK,IAAI,MAAM,QAAQ,CACzB,OAAM,sBAAsB,MAAM,QAAQ;AAE5C,OAAK,IAAI,MAAM,QAAQ;;AASzB,QANe,CAAC,GAAG,OAAO,CAAC,MAAM,GAAG,MAAM;AACxC,MAAI,EAAE,UAAU,EAAE,QAAS,QAAO;AAClC,MAAI,EAAE,UAAU,EAAE,QAAS,QAAO;AAClC,SAAO;GACP,CAEY,KAAK,WAAW;EAC5B,SAAS,MAAM;EACf,mBAAmB,UAAU,MAAM;EACpC,EAAE;;;;;ACzEL,SAASC,eAAa,OAAgB,MAAuB;AAC3D,QAAO,iBAAiB,SAAU,MAA4B,SAAS;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiCzE,eAAsB,uBACpB,sBACA,SACwB;AACxB,KAAI,YAAY,aACd,OAAM,8BAA8B;AAEtC,oBAAmB,QAAQ;CAE3B,MAAM,WAAW,KAAK,sBAAsB,SAAS,QAAQ,YAAY;CAEzE,IAAIC;AACJ,KAAI;AACF,QAAM,MAAM,SAAS,UAAU,QAAQ;UAChC,OAAO;AACd,MAAID,eAAa,OAAO,SAAS,CAC/B,QAAO;AAET,QAAM;;CAGR,IAAIE;AACJ,KAAI;AACF,WAAS,KAAK,MAAM,IAAI;UACjB,GAAG;AACV,QAAM,iBAAiB,UAAU,aAAa,QAAQ,EAAE,UAAU,OAAO,EAAE,CAAC;;AAG9E,KACE,OAAO,WAAW,YAClB,WAAW,QACX,OAAQ,OAA8B,SAAS,SAE/C,OAAM,oBAAoB,UAAU,gDAAgD;AAGtF,QAAQ,OAA4B;;;;;ACtEtC,SAAS,aAAa,OAAgB,MAAuB;AAC3D,QAAO,iBAAiB,SAAU,MAA4B,SAAS;;;;;;;;;;;;;;;;;;;;AAqBzE,eAAsB,2BACpB,sBAC4B;CAC5B,IAAIC;AACJ,KAAI;AAEF,aADgB,MAAM,QAAQ,sBAAsB,EAAE,eAAe,MAAM,CAAC,EAC1D,KAAK,OAAO;GAAE,MAAM,EAAE;GAAM,aAAa,EAAE,aAAa;GAAE,EAAE;UACvE,OAAO;AACd,MAAI,aAAa,OAAO,SAAS,CAC/B,QAAO,EAAE;AAEX,QAAM;;CAGR,MAAM,kBAAkB,QACrB,QAAQ,MAAM,EAAE,YAAY,CAC5B,KAAK,MAAM,EAAE,KAAK,CAClB,QAAQ,SAAS,CAAC,KAAK,WAAW,IAAI,CAAC,CACvC,MAAM;AAgBT,SAduB,MAAM,QAAQ,IACnC,gBAAgB,IAAI,OAAO,SAAS;AAClC,MAAI;AACF,SAAM,KAAK,KAAK,sBAAsB,MAAM,cAAc,CAAC;AAC3D,UAAO;IAAE;IAAM,gBAAgB;IAAM;WAC9B,OAAO;AACd,OAAI,aAAa,OAAO,SAAS,CAC/B,QAAO;IAAE;IAAM,gBAAgB;IAAO;AAExC,SAAM;;GAER,CACH,EAEqB,QAAQ,MAAM,CAAC,EAAE,eAAe,CAAC,KAAK,MAAM,EAAE,KAAK;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA6H3E,SAAgB,qBACd,QAC4B;CAC5B,MAAMC,aAAuC,EAAE;AAE/C,MAAK,MAAM,WAAW,CAAC,GAAG,OAAO,aAAa,CAAC,MAAM,EAAE;AACrD,MAAI,YAAY,aAAc;AAE9B,MAAI,CAAC,OAAO,iBAAiB,SAAS,QAAQ,EAAE;AAC9C,cAAW,KAAK;IACd,MAAM;IACN;IACA,aAAa,cAAc,QAAQ;IACpC,CAAC;AACF;;EAGF,MAAM,SAAS,OAAO,oBAAoB,IAAI,QAAQ;EACtD,MAAM,SAAS,OAAO,kBAAkB,IAAI,QAAQ;AACpD,MAAI,CAAC,UAAU,CAAC,OACd;AAGF,MAAI,OAAO,SAAS,OAAO,MAAM;AAC/B,cAAW,KAAK;IACd,MAAM;IACN;IACA,YAAY,OAAO;IACnB,YAAY,OAAO;IACnB,aAAa,yBAAyB,QAAQ,gBAAgB,OAAO,KAAK,mBAAmB,KAAK,cAAc,SAAS,gBAAgB,CAAC,eAAe,OAAO,KAAK;IACtK,CAAC;AACF;;EAGF,MAAM,mBAAmB,CAAC,GAAG,OAAO,WAAW,CAAC,MAAM;EACtD,MAAM,mBAAmB,IAAI,IAAI,OAAO,WAAW;EACnD,MAAM,UAAU,iBAAiB,QAAQ,OAAO,CAAC,iBAAiB,IAAI,GAAG,CAAC;AAC1E,MAAI,QAAQ,SAAS,EACnB,YAAW,KAAK;GACd,MAAM;GACN;GACA;GACA,kBAAkB,CAAC,GAAG,OAAO,WAAW,CAAC,MAAM;GAC/C,aAAa,yBAAyB,QAAQ,2BAA2B,QAAQ,KAAK,MAAM,KAAK,UAAU,EAAE,CAAC,CAAC,KAAK,KAAK,CAAC;GAC3H,CAAC;;AAIN,MAAK,MAAM,OAAO,CAAC,GAAG,OAAO,iBAAiB,CAAC,MAAM,CACnD,KAAI,CAAC,OAAO,aAAa,IAAI,IAAI,CAC/B,YAAW,KAAK;EACd,MAAM;EACN,SAAS;EACT,aAAa,6BAA6B,KAAK,cAAc,IAAI,CAAC;EACnE,CAAC;AAIN,MAAK,MAAM,SAAS,CAAC,GAAG,OAAO,kBAAkB,MAAM,CAAC,CAAC,MAAM,CAC7D,KAAI,CAAC,OAAO,aAAa,IAAI,MAAM,CACjC,YAAW,KAAK;EACd,MAAM;EACN,SAAS;EACT,aAAa,gCAAgC,MAAM;EACpD,CAAC;AAIN,KAAI,WAAW,WAAW,EACxB,QAAO,EAAE,IAAI,MAAM;CAGrB,MAAMC,YAA4D;EAChE,uBAAuB;EACvB,cAAc;EACd,iBAAiB;EACjB,cAAc;EACd,oBAAoB;EACrB;AAED,YAAW,MAAM,GAAG,MAAM;EACxB,MAAM,IAAI,UAAU,EAAE,QAAQ,UAAU,EAAE;AAC1C,MAAI,MAAM,EAAG,QAAO;AACpB,MAAI,EAAE,UAAU,EAAE,QAAS,QAAO;AAClC,MAAI,EAAE,UAAU,EAAE,QAAS,QAAO;AAClC,SAAO;GACP;AAEF,QAAO;EAAE,IAAI;EAAO;EAAY"}
1
+ {"version":3,"file":"spaces.mjs","names":["hasErrnoCode"],"sources":["../../src/space-layout.ts","../../src/concatenate-space-apply-inputs.ts","../../src/detect-space-contract-drift.ts","../../src/emit-pinned-space-artefacts.ts","../../src/plan-all-spaces.ts","../../src/read-pinned-contract-hash.ts","../../src/verify-contract-spaces.ts"],"sourcesContent":["import { APP_SPACE_ID } from '@prisma-next/framework-components/control';\nimport { join } from 'pathe';\nimport { errorInvalidSpaceId } from './errors';\n\nexport { APP_SPACE_ID };\n\n/**\n * Branded string carrying a compile-time guarantee that the value has\n * been validated by {@link assertValidSpaceId}. Downstream filesystem\n * helpers (e.g. {@link spaceMigrationDirectory}) accept this type to\n * make \"validated\" tracking visible at the type level rather than\n * relying purely on a runtime check.\n */\nexport type ValidSpaceId = string & { readonly __brand: 'ValidSpaceId' };\n\n/**\n * Pattern a contract-space identifier must match. The constraint is\n * filesystem-friendly: lowercase letters / digits / hyphen / underscore,\n * starts with a letter, max 64 characters.\n *\n * @see specs/framework-mechanism.spec.md § 3.\n */\nconst SPACE_ID_PATTERN = /^[a-z][a-z0-9_-]{0,63}$/;\n\nexport function isValidSpaceId(spaceId: string): spaceId is ValidSpaceId {\n return SPACE_ID_PATTERN.test(spaceId);\n}\n\nexport function assertValidSpaceId(spaceId: string): asserts spaceId is ValidSpaceId {\n if (!isValidSpaceId(spaceId)) {\n throw errorInvalidSpaceId(spaceId);\n }\n}\n\n/**\n * Resolve the migrations subdirectory for a given contract space.\n *\n * - **App space** (`spaceId === APP_SPACE_ID`) keeps today's layout: the\n * project's `migrations/` directory is the migrations directory, no\n * subdirectory.\n * - **Extension space** lands under `<projectMigrationsDir>/<spaceId>/`.\n * The space id is validated against {@link SPACE_ID_PATTERN} because\n * it becomes a filesystem directory name verbatim.\n *\n * `projectMigrationsDir` is the project's top-level `migrations/`\n * directory; the helper does not assume anything about its absolute /\n * relative shape and is symmetric with `pathe.join`.\n */\nexport function spaceMigrationDirectory(projectMigrationsDir: string, spaceId: string): string {\n if (spaceId === APP_SPACE_ID) {\n return projectMigrationsDir;\n }\n assertValidSpaceId(spaceId);\n return join(projectMigrationsDir, spaceId);\n}\n","import { errorDuplicateSpaceId } from './errors';\nimport { APP_SPACE_ID } from './space-layout';\n\n/**\n * Per-space input the runner consumes when applying a migration.\n *\n * The shape is target-agnostic: callers (today the SQL family; later\n * any other family) bind `TOp` to their own per-target operation type\n * (e.g. `SqlMigrationPlanOperation<TTargetDetails>` for the SQL family)\n * and the helper preserves it through the concatenation.\n *\n * - `migrationDirectory` is the on-disk migration directory for the\n * space — `<projectRoot>/migrations` for `'app'` and\n * `<projectRoot>/migrations/<space-id>` for an extension space.\n * - `currentMarkerHash` and `currentMarkerInvariants` are the values\n * read from the `prisma_contract.marker` row keyed by `space = <space-id>`\n * (T1.1). `null` hash = no marker row yet.\n * - `path` is the per-space operation list resolved from\n * `findPathWithDecision(currentMarker, ref.hash, effectiveRequired)`\n * per ADR 208, materialised against the on-disk migration packages.\n *\n * @see specs/framework-mechanism.spec.md § 4 — Runner.\n */\nexport interface SpaceApplyInput<TOp> {\n readonly spaceId: string;\n readonly migrationDirectory: string;\n readonly currentMarkerHash: string | null;\n readonly currentMarkerInvariants: readonly string[];\n readonly path: readonly TOp[];\n}\n\n/**\n * Order a set of per-space apply inputs into the canonical cross-space\n * sequence the runner applies under a single transaction.\n *\n * Cross-space ordering convention (sub-spec § 4):\n *\n * 1. **Extension spaces first**, alphabetically by `spaceId`.\n * 2. **App space last** — only one `'app'` entry expected, at most.\n *\n * Rationale: extensions install their own structural objects (types,\n * functions, helper tables) before the app's structural ops reference\n * them. Putting app-space last lets app-space ops freely depend on any\n * extension-space declaration in the same transaction.\n *\n * Determinism (NFR6): the output order is independent of the input\n * order, so two callers with the same set of `extensionPacks` produce\n * identical apply sequences.\n *\n * Atomicity: rejects duplicate `spaceId`s with\n * `MIGRATION.DUPLICATE_SPACE_ID` before producing any output. This\n * mirrors {@link import('./plan-all-spaces').planAllSpaces} so the\n * planner-side and runner-side helpers reject malformed inputs the same\n * way (callers don't need a separate dedup pass).\n *\n * Synchronous, pure, no I/O: callers resolve marker rows and `path`\n * before invoking this helper. The actual DB application — driving the\n * transaction, committing marker writes, recording the per-space marker\n * rows — happens at the SQL-family consumption site (per the\n * helper-location convention from R3).\n */\nexport function concatenateSpaceApplyInputs<TOp>(\n inputs: readonly SpaceApplyInput<TOp>[],\n): readonly SpaceApplyInput<TOp>[] {\n const seen = new Set<string>();\n for (const input of inputs) {\n if (seen.has(input.spaceId)) {\n throw errorDuplicateSpaceId(input.spaceId);\n }\n seen.add(input.spaceId);\n }\n\n const extensions: SpaceApplyInput<TOp>[] = [];\n let appSpace: SpaceApplyInput<TOp> | undefined;\n for (const input of inputs) {\n if (input.spaceId === APP_SPACE_ID) {\n appSpace = input;\n } else {\n extensions.push(input);\n }\n }\n\n extensions.sort((a, b) => {\n if (a.spaceId < b.spaceId) return -1;\n if (a.spaceId > b.spaceId) return 1;\n return 0;\n });\n\n return appSpace ? [...extensions, appSpace] : extensions;\n}\n","/**\n * Inputs for {@link detectSpaceContractDrift}.\n *\n * Both hashes are produced by the caller (the SQL-family wiring at the\n * consumption site) using the canonical contract hashing pipeline.\n * Keeping the helper pure lets `migration-tools` stay framework-neutral\n * — the SQL family already speaks `Contract<SqlStorage>`, the Mongo\n * family speaks its own contract type, and both reduce to a hash string\n * before drift detection runs.\n *\n * `pinnedHash` is `null` when no pinned `contract.json` exists yet for\n * the space (the descriptor declares an extension that has never been\n * emitted into the user's repo). That's the \"first emit\" case — no\n * drift to surface; the migrate emit will create the pinned files.\n *\n * @see specs/framework-mechanism.spec.md § 3 — Drift detection (T1.9).\n */\nexport interface DetectSpaceContractDriftInputs {\n readonly descriptorHash: string;\n readonly pinnedHash: string | null;\n}\n\n/**\n * Result discriminant for {@link detectSpaceContractDrift}.\n *\n * - `noDrift`: descriptor hash and pinned hash agree byte-for-byte.\n * The migrate emit can proceed with no warning.\n * - `firstEmit`: no pinned `contract.json` on disk yet. The extension\n * was just added to `extensionPacks`; this run will create the\n * pinned files. No warning either — the user's intent is to install\n * the extension, not to \"drift\" from a state they haven't pinned.\n * - `drift`: descriptor hash differs from pinned hash. The caller\n * surfaces a non-fatal warning naming the extension and the\n * diff direction (descriptor → pinned). The migrate emit proceeds\n * normally so the bump is materialised this run; the warning just\n * confirms the bump is being captured.\n *\n * `spaceId`, `descriptorHash`, and `pinnedHash` are threaded through\n * verbatim so the caller (logger / TerminalUI / strict-mode envelope)\n * has everything it needs to format the warning message without\n * re-reading the descriptor or the pinned file.\n */\nexport type SpaceContractDriftResult = {\n readonly kind: 'noDrift' | 'firstEmit' | 'drift';\n readonly spaceId: string;\n readonly descriptorHash: string;\n readonly pinnedHash: string | null;\n};\n\n/**\n * Pure drift-detection primitive for a single contract space.\n *\n * Runs once per loaded extension space, just before computing the\n * `priorContract` that feeds {@link import('./plan-all-spaces').planAllSpaces}.\n * Hash equality is byte-for-byte (no normalisation) — both sides are\n * already canonical hashes produced by the same pipeline, so any\n * difference is meaningful drift.\n *\n * Synchronous, pure, no I/O. The caller (SQL family in M2 R1) reads\n * the pinned `contract.json` and computes its hash, then invokes this\n * helper alongside the descriptor's `headRef.hash`. Composes naturally\n * with {@link import('./read-pinned-contract-hash').readPinnedContractHash}\n * which provides the read-side primitive.\n *\n * @see specs/framework-mechanism.spec.md § 3 — Drift detection (T1.9).\n * @see specs/framework-mechanism.spec.md AM7 — drift warning surfaces\n * the extension name and the diff direction.\n */\nexport function detectSpaceContractDrift(\n spaceId: string,\n inputs: DetectSpaceContractDriftInputs,\n): SpaceContractDriftResult {\n if (inputs.pinnedHash === null) {\n return {\n kind: 'firstEmit',\n spaceId,\n descriptorHash: inputs.descriptorHash,\n pinnedHash: null,\n };\n }\n if (inputs.descriptorHash === inputs.pinnedHash) {\n return {\n kind: 'noDrift',\n spaceId,\n descriptorHash: inputs.descriptorHash,\n pinnedHash: inputs.pinnedHash,\n };\n }\n return {\n kind: 'drift',\n spaceId,\n descriptorHash: inputs.descriptorHash,\n pinnedHash: inputs.pinnedHash,\n };\n}\n","import { mkdir, writeFile } from 'node:fs/promises';\nimport { join } from 'pathe';\nimport { canonicalizeJson } from './canonicalize-json';\nimport { errorPinnedArtefactsAppSpace } from './errors';\nimport { APP_SPACE_ID, assertValidSpaceId } from './space-layout';\n\n/**\n * Pinned head reference for a contract space — `(hash, invariants)`.\n * Mirrors {@link import('./refs').RefEntry} but is redeclared locally so\n * callers can construct the input without depending on the refs module.\n */\nexport interface PinnedSpaceHeadRef {\n readonly hash: string;\n readonly invariants: readonly string[];\n}\n\n/**\n * Inputs for {@link emitPinnedSpaceArtefacts}.\n *\n * - `contract` is the canonical contract value the framework just emitted\n * for the space; it is serialised through {@link canonicalizeJson}, so\n * it must be a JSON-compatible value (objects / arrays / primitives).\n * Typed as `unknown` rather than the SQL-family `Contract<SqlStorage>`\n * to keep `migration-tools` framework-neutral; SQL-family callers pass\n * their typed value through unchanged.\n *\n * - `contractDts` is the pre-rendered `.d.ts` text. Rendering happens in\n * the SQL family (which owns the codec / typemap input the renderer\n * needs), so this helper accepts the text verbatim and writes it out\n * without further transformation.\n *\n * - `headRef` is the pinned head reference for the space.\n * `invariants` are sorted alphabetically before serialisation so two\n * callers passing the same set in different orders produce\n * byte-identical `refs/head.json`.\n */\nexport interface PinnedSpaceArtefactInputs {\n readonly contract: unknown;\n readonly contractDts: string;\n readonly headRef: PinnedSpaceHeadRef;\n}\n\n/**\n * Emit the pinned per-space artefacts (`contract.json`, `contract.d.ts`,\n * `refs/head.json`) under `<projectMigrationsDir>/<spaceId>/`.\n *\n * Always-overwrite: the framework owns these files; running `migrate`\n * twice with the same inputs is a no-op observably (idempotent), but the\n * helper does not check pre-existing contents — re-emit always wins.\n *\n * Path layout matches the convention in\n * [`spaceMigrationDirectory`](./space-layout.ts), with two restrictions\n * specific to pinned artefacts:\n *\n * - Rejects the app space (`spaceId === APP_SPACE_ID`): the app space's\n * canonical `contract.json` lives at the project root, not under\n * `migrations/`. Callers that want to emit it use the app-space\n * contract emit pipeline.\n * - Validates `spaceId` against `[a-z][a-z0-9_-]{0,63}` via\n * {@link assertValidSpaceId} for the same filesystem-safety reasons.\n *\n * The migrations directory and space subdirectory are created if they\n * do not yet exist (`mkdir { recursive: true }`).\n *\n * @see specs/framework-mechanism.spec.md § 3 — Pinned artefact emission (T1.8).\n */\nexport async function emitPinnedSpaceArtefacts(\n projectMigrationsDir: string,\n spaceId: string,\n inputs: PinnedSpaceArtefactInputs,\n): Promise<void> {\n if (spaceId === APP_SPACE_ID) {\n throw errorPinnedArtefactsAppSpace();\n }\n assertValidSpaceId(spaceId);\n\n const dir = join(projectMigrationsDir, spaceId);\n await mkdir(join(dir, 'refs'), { recursive: true });\n\n await writeFile(join(dir, 'contract.json'), `${canonicalizeJson(inputs.contract)}\\n`);\n await writeFile(join(dir, 'contract.d.ts'), inputs.contractDts);\n\n const sortedInvariants = [...inputs.headRef.invariants].sort();\n const headJson = canonicalizeJson({\n hash: inputs.headRef.hash,\n invariants: sortedInvariants,\n });\n await writeFile(join(dir, 'refs', 'head.json'), `${headJson}\\n`);\n}\n","import { errorDuplicateSpaceId } from './errors';\n\n/**\n * Per-space input for {@link planAllSpaces}. One entry per loaded\n * contract space (the application's `'app'` plus each extension that\n * exposes a `contractSpace`).\n *\n * - `priorContract` is `null` for a space that has never been emitted\n * (no `migrations/<space-id>/contract.json` on disk yet); otherwise it\n * is the canonical contract value pinned for that space.\n * - `newContract` is the canonical contract value the planner is about\n * to emit for that space — for app-space, the just-emitted root\n * `contract.json`; for an extension space, the descriptor's\n * `contractSpace.contractJson`.\n *\n * @see specs/framework-mechanism.spec.md § 3.\n */\nexport interface SpacePlanInput<TContract> {\n readonly spaceId: string;\n readonly priorContract: TContract | null;\n readonly newContract: TContract;\n}\n\nexport interface SpacePlanOutput<TPackage> {\n readonly spaceId: string;\n readonly migrationPackages: readonly TPackage[];\n}\n\n/**\n * Iterate the per-space planner across a set of loaded contract spaces\n * and return a deterministic shape regardless of declaration order.\n *\n * Behaviour:\n *\n * - The output is sorted alphabetically by `spaceId` (AM3). Two callers\n * passing the same set of inputs in different orders observe\n * byte-identical outputs.\n * - The per-space planner (`planSpace`) is called exactly once per\n * input, in alphabetical-by-spaceId order. Its return value is\n * attached to the corresponding output entry verbatim.\n * - Duplicate `spaceId`s in the input array throw\n * `MIGRATION.DUPLICATE_SPACE_ID` before any `planSpace` call runs,\n * keeping the planner pure when the input is malformed.\n *\n * The signature is generic over `TContract` and `TPackage` because the\n * shape is framework-neutral (SQL family today, Mongo family\n * eventually). Callers wire in whatever contract value and migration\n * package shape their family already speaks.\n *\n * Synchronous: the underlying per-space planner (target's\n * `MigrationPlanner.plan(...)`) is synchronous; callers that need to\n * resolve async I/O (e.g. reading pinned `contract.json` from disk)\n * resolve it before calling `planAllSpaces` and pass the materialised\n * inputs through.\n *\n * @see specs/framework-mechanism.spec.md § 3 — Per-space planner (T1.3).\n */\nexport function planAllSpaces<TContract, TPackage>(\n inputs: readonly SpacePlanInput<TContract>[],\n planSpace: (input: SpacePlanInput<TContract>) => readonly TPackage[],\n): readonly SpacePlanOutput<TPackage>[] {\n const seen = new Set<string>();\n for (const input of inputs) {\n if (seen.has(input.spaceId)) {\n throw errorDuplicateSpaceId(input.spaceId);\n }\n seen.add(input.spaceId);\n }\n\n const sorted = [...inputs].sort((a, b) => {\n if (a.spaceId < b.spaceId) return -1;\n if (a.spaceId > b.spaceId) return 1;\n return 0;\n });\n\n return sorted.map((input) => ({\n spaceId: input.spaceId,\n migrationPackages: planSpace(input),\n }));\n}\n","import { readFile } from 'node:fs/promises';\nimport { join } from 'pathe';\nimport { errorInvalidJson, errorInvalidRefFile, errorPinnedArtefactsAppSpace } from './errors';\nimport { APP_SPACE_ID, assertValidSpaceId } from './space-layout';\n\nfunction hasErrnoCode(error: unknown, code: string): boolean {\n return error instanceof Error && (error as { code?: string }).code === code;\n}\n\n/**\n * Read the pinned head hash for an extension space.\n *\n * Returns the `hash` field of `<projectMigrationsDir>/<spaceId>/refs/head.json`\n * — i.e. the canonical contract hash the framework wrote on the last\n * `migrate` for this space. Returns `null` when the file does not exist\n * (or the migrations directory is missing entirely), which is the\n * \"first emit\" signal {@link import('./detect-space-contract-drift').detectSpaceContractDrift}\n * uses to distinguish a brand-new extension from drift.\n *\n * Pure I/O (read + parse). The \"comparison hash\" is stored on disk by\n * {@link import('./emit-pinned-space-artefacts').emitPinnedSpaceArtefacts}\n * via the descriptor's `headRef.hash`, so reading it back here matches\n * the descriptor's hashing pipeline by construction — neither side\n * recomputes anything.\n *\n * Validation:\n *\n * - Rejects the app space — pinned head refs are an extension-space\n * concept; the app space's contract-of-record lives at the project\n * root, not under `migrations/`.\n * - Validates the space id against the same `[a-z][a-z0-9_-]{0,63}`\n * pattern as the rest of the per-space helpers.\n * - Surfaces `MIGRATION.INVALID_JSON` / `MIGRATION.INVALID_REF_FILE`\n * on a corrupt `refs/head.json` so callers can distinguish \"no\n * pinned file\" (returns `null`) from \"pinned file but unreadable\"\n * (throws).\n *\n * @see specs/framework-mechanism.spec.md § 3 — Drift detection (T1.9).\n */\nexport async function readPinnedContractHash(\n projectMigrationsDir: string,\n spaceId: string,\n): Promise<string | null> {\n if (spaceId === APP_SPACE_ID) {\n throw errorPinnedArtefactsAppSpace();\n }\n assertValidSpaceId(spaceId);\n\n const filePath = join(projectMigrationsDir, spaceId, 'refs', 'head.json');\n\n let raw: string;\n try {\n raw = await readFile(filePath, 'utf-8');\n } catch (error) {\n if (hasErrnoCode(error, 'ENOENT')) {\n return null;\n }\n throw error;\n }\n\n let parsed: unknown;\n try {\n parsed = JSON.parse(raw);\n } catch (e) {\n throw errorInvalidJson(filePath, e instanceof Error ? e.message : String(e));\n }\n\n if (\n typeof parsed !== 'object' ||\n parsed === null ||\n typeof (parsed as { hash?: unknown }).hash !== 'string'\n ) {\n throw errorInvalidRefFile(filePath, 'expected an object with a string `hash` field');\n }\n\n return (parsed as { hash: string }).hash;\n}\n","import { readdir, stat } from 'node:fs/promises';\nimport { join } from 'pathe';\nimport { MANIFEST_FILE } from './io';\nimport { APP_SPACE_ID } from './space-layout';\n\nfunction hasErrnoCode(error: unknown, code: string): boolean {\n return error instanceof Error && (error as { code?: string }).code === code;\n}\n\n/**\n * List the per-space pinned subdirectories under\n * `<projectRoot>/migrations/`. Returns space-id directory names (sorted\n * alphabetically) — i.e. any non-dot-prefixed subdirectory whose root\n * does **not** contain a `migration.json` manifest. The manifest is the\n * structural marker of a user-authored migration directory (see\n * `readMigrationsDir` in `./io`); directory names themselves belong to\n * the user and are not part of the contract.\n *\n * Returns `[]` if the migrations directory does not exist (greenfield\n * project).\n *\n * Reads only the user's repo. **No descriptor import.** The caller\n * (verifier) feeds the result into {@link verifyContractSpaces} alongside\n * the loaded-space set and the marker rows.\n *\n * @see specs/framework-mechanism.spec.md § 4 — Verifier (steps 5–6).\n */\nexport async function listPinnedSpaceDirectories(\n projectMigrationsDir: string,\n): Promise<readonly string[]> {\n let entries: { readonly name: string; readonly isDirectory: boolean }[];\n try {\n const dirents = await readdir(projectMigrationsDir, { withFileTypes: true });\n entries = dirents.map((d) => ({ name: d.name, isDirectory: d.isDirectory() }));\n } catch (error) {\n if (hasErrnoCode(error, 'ENOENT')) {\n return [];\n }\n throw error;\n }\n\n const namedCandidates = entries\n .filter((e) => e.isDirectory)\n .map((e) => e.name)\n .filter((name) => !name.startsWith('.'))\n .sort();\n\n const manifestChecks = await Promise.all(\n namedCandidates.map(async (name) => {\n try {\n await stat(join(projectMigrationsDir, name, MANIFEST_FILE));\n return { name, isMigrationDir: true };\n } catch (error) {\n if (hasErrnoCode(error, 'ENOENT')) {\n return { name, isMigrationDir: false };\n }\n throw error;\n }\n }),\n );\n\n return manifestChecks.filter((c) => !c.isMigrationDir).map((c) => c.name);\n}\n\n/**\n * Pinned head value (`(hash, invariants)`) for one contract space.\n * The verifier compares this against the marker row for the same space\n * to detect drift between the user-emitted artefacts and the live DB\n * marker.\n */\nexport interface SpacePinnedHashRecord {\n readonly hash: string;\n readonly invariants: readonly string[];\n}\n\n/**\n * Marker row read from `prisma_contract.marker` (one per `space`).\n * Caller resolves these via the family runtime's marker reader (T1.1)\n * before invoking {@link verifyContractSpaces}.\n */\nexport interface SpaceMarkerRecord {\n readonly hash: string;\n readonly invariants: readonly string[];\n}\n\nexport interface VerifyContractSpacesInputs {\n /**\n * Set of contract spaces the project declares: `'app'` plus each\n * extension space in `extensionPacks`. The caller's discovery path\n * never reads the extension descriptor module — it walks the\n * `extensionPacks` configuration in `prisma-next.config.ts` for the\n * space ids.\n */\n readonly loadedSpaces: ReadonlySet<string>;\n\n /**\n * Pinned per-space subdirectories observed under\n * `<projectRoot>/migrations/`. Resolved via\n * {@link listPinnedSpaceDirectories}.\n */\n readonly pinnedDirsOnDisk: readonly string[];\n\n /**\n * Pinned head ref per space, keyed by space id. Caller reads\n * `<projectRoot>/migrations/<space-id>/contract.json` and\n * `refs/head.json` (or, for app-space if its pinned shape ever moves\n * under `migrations/`, the equivalent files) to construct this map.\n * Spaces with no pinned dir on disk simply omit a map entry.\n */\n readonly pinnedHashesBySpace: ReadonlyMap<string, SpacePinnedHashRecord>;\n\n /**\n * Marker rows keyed by `space`. Caller reads them from the\n * `prisma_contract.marker` table.\n */\n readonly markerRowsBySpace: ReadonlyMap<string, SpaceMarkerRecord>;\n}\n\nexport type SpaceVerifierViolation =\n | {\n readonly kind: 'declaredButUnmigrated';\n readonly spaceId: string;\n readonly remediation: string;\n }\n | {\n readonly kind: 'orphanMarker';\n readonly spaceId: string;\n readonly remediation: string;\n }\n | {\n readonly kind: 'orphanPinnedDir';\n readonly spaceId: string;\n readonly remediation: string;\n }\n | {\n readonly kind: 'hashMismatch';\n readonly spaceId: string;\n readonly pinnedHash: string;\n readonly markerHash: string;\n readonly remediation: string;\n }\n | {\n readonly kind: 'invariantsMismatch';\n readonly spaceId: string;\n readonly pinnedInvariants: readonly string[];\n readonly markerInvariants: readonly string[];\n readonly remediation: string;\n };\n\nexport type VerifyContractSpacesResult =\n | { readonly ok: true }\n | { readonly ok: false; readonly violations: readonly SpaceVerifierViolation[] };\n\n/**\n * Pure structural verifier for the per-space mechanism. Aggregates the\n * three orphan / missing checks (FR6 cases a–c) plus per-space hash and\n * invariant comparison.\n *\n * Algorithm (sub-spec § 4):\n *\n * - For every extension space declared in `loadedSpaces` (`'app'`\n * excluded — its pinned `contract.json` lives at the project root):\n * - If no pinned dir on disk → `declaredButUnmigrated`.\n * - Else if `markerRowsBySpace` lacks an entry → no violation here;\n * the live-DB compare in step 8 (out of scope of this helper) is\n * where the absence shows up.\n * - Else compare marker hash / invariants vs. pinned hash /\n * invariants → `hashMismatch` / `invariantsMismatch` on drift.\n * - For every pinned dir on disk that is not in `loadedSpaces` →\n * `orphanPinnedDir`.\n * - For every marker row whose `space` is not in `loadedSpaces` →\n * `orphanMarker`. The app-space marker is always loaded (`'app'` is\n * in `loadedSpaces` by definition).\n *\n * Output is deterministic (NFR6): violations are sorted first by `kind`\n * (`declaredButUnmigrated` → `orphanMarker` → `orphanPinnedDir` →\n * `hashMismatch` → `invariantsMismatch`) then by `spaceId`. Two callers\n * passing equivalent inputs see byte-identical violation lists.\n *\n * Synchronous, pure, no I/O. **Does not import the extension descriptor**\n * (the inputs are pre-resolved by the caller). This is the property\n * AC-15 / AC-26 (\"verifier reads only the user repo, not\n * `node_modules`\") locks in.\n *\n * @see specs/framework-mechanism.spec.md § 4 — Verifier (T1.5).\n */\nexport function verifyContractSpaces(\n inputs: VerifyContractSpacesInputs,\n): VerifyContractSpacesResult {\n const violations: SpaceVerifierViolation[] = [];\n\n for (const spaceId of [...inputs.loadedSpaces].sort()) {\n if (spaceId === APP_SPACE_ID) continue;\n\n if (!inputs.pinnedDirsOnDisk.includes(spaceId)) {\n violations.push({\n kind: 'declaredButUnmigrated',\n spaceId,\n remediation: `Extension '${spaceId}' is declared in extensionPacks but has not been emitted; run \\`prisma-next migrate\\`.`,\n });\n continue;\n }\n\n const pinned = inputs.pinnedHashesBySpace.get(spaceId);\n const marker = inputs.markerRowsBySpace.get(spaceId);\n if (!pinned || !marker) {\n continue;\n }\n\n if (pinned.hash !== marker.hash) {\n violations.push({\n kind: 'hashMismatch',\n spaceId,\n pinnedHash: pinned.hash,\n markerHash: marker.hash,\n remediation: `Marker row for space '${spaceId}' is keyed at ${marker.hash}, but the pinned ${join('migrations', spaceId, 'contract.json')} resolves to ${pinned.hash}. Run \\`prisma-next db update\\` to advance the database, or \\`prisma-next migrate\\` if the descriptor was bumped without re-emitting.`,\n });\n continue;\n }\n\n const pinnedInvariants = [...pinned.invariants].sort();\n const markerInvariants = new Set(marker.invariants);\n const missing = pinnedInvariants.filter((id) => !markerInvariants.has(id));\n if (missing.length > 0) {\n violations.push({\n kind: 'invariantsMismatch',\n spaceId,\n pinnedInvariants,\n markerInvariants: [...marker.invariants].sort(),\n remediation: `Marker row for space '${spaceId}' is missing invariants [${missing.map((s) => JSON.stringify(s)).join(', ')}]. Run \\`prisma-next db update\\` to apply the corresponding data-transform migrations.`,\n });\n }\n }\n\n for (const dir of [...inputs.pinnedDirsOnDisk].sort()) {\n if (!inputs.loadedSpaces.has(dir)) {\n violations.push({\n kind: 'orphanPinnedDir',\n spaceId: dir,\n remediation: `Orphan pinned directory \\`${join('migrations', dir)}/\\` for an extension not in extensionPacks; remove the directory or re-add the extension.`,\n });\n }\n }\n\n for (const space of [...inputs.markerRowsBySpace.keys()].sort()) {\n if (!inputs.loadedSpaces.has(space)) {\n violations.push({\n kind: 'orphanMarker',\n spaceId: space,\n remediation: `Orphan marker row for space '${space}' (no longer in extensionPacks); remediation: manually delete the row from \\`prisma_contract.marker\\`.`,\n });\n }\n }\n\n if (violations.length === 0) {\n return { ok: true };\n }\n\n const kindOrder: Record<SpaceVerifierViolation['kind'], number> = {\n declaredButUnmigrated: 0,\n orphanMarker: 1,\n orphanPinnedDir: 2,\n hashMismatch: 3,\n invariantsMismatch: 4,\n };\n\n violations.sort((a, b) => {\n const k = kindOrder[a.kind] - kindOrder[b.kind];\n if (k !== 0) return k;\n if (a.spaceId < b.spaceId) return -1;\n if (a.spaceId > b.spaceId) return 1;\n return 0;\n });\n\n return { ok: false, violations };\n}\n"],"mappings":";;;;;;;;;;;;;;AAsBA,MAAM,mBAAmB;AAEzB,SAAgB,eAAe,SAA0C;CACvE,OAAO,iBAAiB,KAAK,QAAQ;;AAGvC,SAAgB,mBAAmB,SAAkD;CACnF,IAAI,CAAC,eAAe,QAAQ,EAC1B,MAAM,oBAAoB,QAAQ;;;;;;;;;;;;;;;;AAkBtC,SAAgB,wBAAwB,sBAA8B,SAAyB;CAC7F,IAAI,YAAY,cACd,OAAO;CAET,mBAAmB,QAAQ;CAC3B,OAAO,KAAK,sBAAsB,QAAQ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACQ5C,SAAgB,4BACd,QACiC;CACjC,MAAM,uBAAO,IAAI,KAAa;CAC9B,KAAK,MAAM,SAAS,QAAQ;EAC1B,IAAI,KAAK,IAAI,MAAM,QAAQ,EACzB,MAAM,sBAAsB,MAAM,QAAQ;EAE5C,KAAK,IAAI,MAAM,QAAQ;;CAGzB,MAAM,aAAqC,EAAE;CAC7C,IAAI;CACJ,KAAK,MAAM,SAAS,QAClB,IAAI,MAAM,YAAY,cACpB,WAAW;MAEX,WAAW,KAAK,MAAM;CAI1B,WAAW,MAAM,GAAG,MAAM;EACxB,IAAI,EAAE,UAAU,EAAE,SAAS,OAAO;EAClC,IAAI,EAAE,UAAU,EAAE,SAAS,OAAO;EAClC,OAAO;GACP;CAEF,OAAO,WAAW,CAAC,GAAG,YAAY,SAAS,GAAG;;;;;;;;;;;;;;;;;;;;;;;ACpBhD,SAAgB,yBACd,SACA,QAC0B;CAC1B,IAAI,OAAO,eAAe,MACxB,OAAO;EACL,MAAM;EACN;EACA,gBAAgB,OAAO;EACvB,YAAY;EACb;CAEH,IAAI,OAAO,mBAAmB,OAAO,YACnC,OAAO;EACL,MAAM;EACN;EACA,gBAAgB,OAAO;EACvB,YAAY,OAAO;EACpB;CAEH,OAAO;EACL,MAAM;EACN;EACA,gBAAgB,OAAO;EACvB,YAAY,OAAO;EACpB;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC3BH,eAAsB,yBACpB,sBACA,SACA,QACe;CACf,IAAI,YAAY,cACd,MAAM,8BAA8B;CAEtC,mBAAmB,QAAQ;CAE3B,MAAM,MAAM,KAAK,sBAAsB,QAAQ;CAC/C,MAAM,MAAM,KAAK,KAAK,OAAO,EAAE,EAAE,WAAW,MAAM,CAAC;CAEnD,MAAM,UAAU,KAAK,KAAK,gBAAgB,EAAE,GAAG,iBAAiB,OAAO,SAAS,CAAC,IAAI;CACrF,MAAM,UAAU,KAAK,KAAK,gBAAgB,EAAE,OAAO,YAAY;CAE/D,MAAM,mBAAmB,CAAC,GAAG,OAAO,QAAQ,WAAW,CAAC,MAAM;CAC9D,MAAM,WAAW,iBAAiB;EAChC,MAAM,OAAO,QAAQ;EACrB,YAAY;EACb,CAAC;CACF,MAAM,UAAU,KAAK,KAAK,QAAQ,YAAY,EAAE,GAAG,SAAS,IAAI;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC9BlE,SAAgB,cACd,QACA,WACsC;CACtC,MAAM,uBAAO,IAAI,KAAa;CAC9B,KAAK,MAAM,SAAS,QAAQ;EAC1B,IAAI,KAAK,IAAI,MAAM,QAAQ,EACzB,MAAM,sBAAsB,MAAM,QAAQ;EAE5C,KAAK,IAAI,MAAM,QAAQ;;CASzB,OANe,CAAC,GAAG,OAAO,CAAC,MAAM,GAAG,MAAM;EACxC,IAAI,EAAE,UAAU,EAAE,SAAS,OAAO;EAClC,IAAI,EAAE,UAAU,EAAE,SAAS,OAAO;EAClC,OAAO;GAGI,CAAC,KAAK,WAAW;EAC5B,SAAS,MAAM;EACf,mBAAmB,UAAU,MAAM;EACpC,EAAE;;;;ACzEL,SAASA,eAAa,OAAgB,MAAuB;CAC3D,OAAO,iBAAiB,SAAU,MAA4B,SAAS;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiCzE,eAAsB,uBACpB,sBACA,SACwB;CACxB,IAAI,YAAY,cACd,MAAM,8BAA8B;CAEtC,mBAAmB,QAAQ;CAE3B,MAAM,WAAW,KAAK,sBAAsB,SAAS,QAAQ,YAAY;CAEzE,IAAI;CACJ,IAAI;EACF,MAAM,MAAM,SAAS,UAAU,QAAQ;UAChC,OAAO;EACd,IAAIA,eAAa,OAAO,SAAS,EAC/B,OAAO;EAET,MAAM;;CAGR,IAAI;CACJ,IAAI;EACF,SAAS,KAAK,MAAM,IAAI;UACjB,GAAG;EACV,MAAM,iBAAiB,UAAU,aAAa,QAAQ,EAAE,UAAU,OAAO,EAAE,CAAC;;CAG9E,IACE,OAAO,WAAW,YAClB,WAAW,QACX,OAAQ,OAA8B,SAAS,UAE/C,MAAM,oBAAoB,UAAU,gDAAgD;CAGtF,OAAQ,OAA4B;;;;ACtEtC,SAAS,aAAa,OAAgB,MAAuB;CAC3D,OAAO,iBAAiB,SAAU,MAA4B,SAAS;;;;;;;;;;;;;;;;;;;;AAqBzE,eAAsB,2BACpB,sBAC4B;CAC5B,IAAI;CACJ,IAAI;EAEF,WAAU,MADY,QAAQ,sBAAsB,EAAE,eAAe,MAAM,CAAC,EAC1D,KAAK,OAAO;GAAE,MAAM,EAAE;GAAM,aAAa,EAAE,aAAa;GAAE,EAAE;UACvE,OAAO;EACd,IAAI,aAAa,OAAO,SAAS,EAC/B,OAAO,EAAE;EAEX,MAAM;;CAGR,MAAM,kBAAkB,QACrB,QAAQ,MAAM,EAAE,YAAY,CAC5B,KAAK,MAAM,EAAE,KAAK,CAClB,QAAQ,SAAS,CAAC,KAAK,WAAW,IAAI,CAAC,CACvC,MAAM;CAgBT,QAAO,MAdsB,QAAQ,IACnC,gBAAgB,IAAI,OAAO,SAAS;EAClC,IAAI;GACF,MAAM,KAAK,KAAK,sBAAsB,MAAM,cAAc,CAAC;GAC3D,OAAO;IAAE;IAAM,gBAAgB;IAAM;WAC9B,OAAO;GACd,IAAI,aAAa,OAAO,SAAS,EAC/B,OAAO;IAAE;IAAM,gBAAgB;IAAO;GAExC,MAAM;;GAER,CACH,EAEqB,QAAQ,MAAM,CAAC,EAAE,eAAe,CAAC,KAAK,MAAM,EAAE,KAAK;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA6H3E,SAAgB,qBACd,QAC4B;CAC5B,MAAM,aAAuC,EAAE;CAE/C,KAAK,MAAM,WAAW,CAAC,GAAG,OAAO,aAAa,CAAC,MAAM,EAAE;EACrD,IAAI,YAAY,cAAc;EAE9B,IAAI,CAAC,OAAO,iBAAiB,SAAS,QAAQ,EAAE;GAC9C,WAAW,KAAK;IACd,MAAM;IACN;IACA,aAAa,cAAc,QAAQ;IACpC,CAAC;GACF;;EAGF,MAAM,SAAS,OAAO,oBAAoB,IAAI,QAAQ;EACtD,MAAM,SAAS,OAAO,kBAAkB,IAAI,QAAQ;EACpD,IAAI,CAAC,UAAU,CAAC,QACd;EAGF,IAAI,OAAO,SAAS,OAAO,MAAM;GAC/B,WAAW,KAAK;IACd,MAAM;IACN;IACA,YAAY,OAAO;IACnB,YAAY,OAAO;IACnB,aAAa,yBAAyB,QAAQ,gBAAgB,OAAO,KAAK,mBAAmB,KAAK,cAAc,SAAS,gBAAgB,CAAC,eAAe,OAAO,KAAK;IACtK,CAAC;GACF;;EAGF,MAAM,mBAAmB,CAAC,GAAG,OAAO,WAAW,CAAC,MAAM;EACtD,MAAM,mBAAmB,IAAI,IAAI,OAAO,WAAW;EACnD,MAAM,UAAU,iBAAiB,QAAQ,OAAO,CAAC,iBAAiB,IAAI,GAAG,CAAC;EAC1E,IAAI,QAAQ,SAAS,GACnB,WAAW,KAAK;GACd,MAAM;GACN;GACA;GACA,kBAAkB,CAAC,GAAG,OAAO,WAAW,CAAC,MAAM;GAC/C,aAAa,yBAAyB,QAAQ,2BAA2B,QAAQ,KAAK,MAAM,KAAK,UAAU,EAAE,CAAC,CAAC,KAAK,KAAK,CAAC;GAC3H,CAAC;;CAIN,KAAK,MAAM,OAAO,CAAC,GAAG,OAAO,iBAAiB,CAAC,MAAM,EACnD,IAAI,CAAC,OAAO,aAAa,IAAI,IAAI,EAC/B,WAAW,KAAK;EACd,MAAM;EACN,SAAS;EACT,aAAa,6BAA6B,KAAK,cAAc,IAAI,CAAC;EACnE,CAAC;CAIN,KAAK,MAAM,SAAS,CAAC,GAAG,OAAO,kBAAkB,MAAM,CAAC,CAAC,MAAM,EAC7D,IAAI,CAAC,OAAO,aAAa,IAAI,MAAM,EACjC,WAAW,KAAK;EACd,MAAM;EACN,SAAS;EACT,aAAa,gCAAgC,MAAM;EACpD,CAAC;CAIN,IAAI,WAAW,WAAW,GACxB,OAAO,EAAE,IAAI,MAAM;CAGrB,MAAM,YAA4D;EAChE,uBAAuB;EACvB,cAAc;EACd,iBAAiB;EACjB,cAAc;EACd,oBAAoB;EACrB;CAED,WAAW,MAAM,GAAG,MAAM;EACxB,MAAM,IAAI,UAAU,EAAE,QAAQ,UAAU,EAAE;EAC1C,IAAI,MAAM,GAAG,OAAO;EACpB,IAAI,EAAE,UAAU,EAAE,SAAS,OAAO;EAClC,IAAI,EAAE,UAAU,EAAE,SAAS,OAAO;EAClC,OAAO;GACP;CAEF,OAAO;EAAE,IAAI;EAAO;EAAY"}
@@ -25,4 +25,4 @@ interface MigrationGraph {
25
25
  }
26
26
  //#endregion
27
27
  export { MigrationGraph as n, MigrationEdge as t };
28
- //# sourceMappingURL=graph-Czaj8O2q.d.mts.map
28
+ //# sourceMappingURL=graph-4dIUm90i.d.mts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"graph-4dIUm90i.d.mts","names":[],"sources":["../src/graph.ts"],"mappings":";;AAIA;;;UAAiB,aAAA;EAAA,SACN,IAAA;EAAA,SACA,EAAA;EAAA,SACA,aAAA;EAAA,SACA,OAAA;EAAA,SACA,SAAA;EAAA,SACA,MAAA;EAMA;;;AAGX;;EAHW,SAAA,UAAA;AAAA;AAAA,UAGM,cAAA;EAAA,SACN,KAAA,EAAO,WAAA;EAAA,SACP,YAAA,EAAc,WAAA,kBAA6B,aAAA;EAAA,SAC3C,YAAA,EAAc,WAAA,kBAA6B,aAAA;EAAA,SAC3C,eAAA,EAAiB,WAAA,SAAoB,aAAA;AAAA"}
@@ -1,5 +1,4 @@
1
1
  import { createHash } from "node:crypto";
2
-
3
2
  //#region src/canonicalize-json.ts
4
3
  function sortKeys(value) {
5
4
  if (value === null || typeof value !== "object") return value;
@@ -11,7 +10,6 @@ function sortKeys(value) {
11
10
  function canonicalizeJson(value) {
12
11
  return JSON.stringify(sortKeys(value));
13
12
  }
14
-
15
13
  //#endregion
16
14
  //#region src/hash.ts
17
15
  function sha256Hex(input) {
@@ -70,7 +68,7 @@ function verifyMigrationHash(pkg) {
70
68
  computedHash: computed
71
69
  };
72
70
  }
73
-
74
71
  //#endregion
75
72
  export { verifyMigrationHash as n, canonicalizeJson as r, computeMigrationHash as t };
76
- //# sourceMappingURL=hash-G0bAfIGh.mjs.map
73
+
74
+ //# sourceMappingURL=hash-By50zM_E.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"hash-By50zM_E.mjs","names":[],"sources":["../src/canonicalize-json.ts","../src/hash.ts"],"sourcesContent":["function sortKeys(value: unknown): unknown {\n if (value === null || typeof value !== 'object') {\n return value;\n }\n if (Array.isArray(value)) {\n return value.map(sortKeys);\n }\n const sorted: Record<string, unknown> = {};\n for (const key of Object.keys(value).sort()) {\n sorted[key] = sortKeys((value as Record<string, unknown>)[key]);\n }\n return sorted;\n}\n\nexport function canonicalizeJson(value: unknown): string {\n return JSON.stringify(sortKeys(value));\n}\n","import { createHash } from 'node:crypto';\nimport { canonicalizeJson } from './canonicalize-json';\nimport type { MigrationMetadata } from './metadata';\nimport type { MigrationOps, OnDiskMigrationPackage } from './package';\n\nexport interface VerifyResult {\n readonly ok: boolean;\n readonly reason?: 'mismatch';\n readonly storedHash: string;\n readonly computedHash: string;\n}\n\nfunction sha256Hex(input: string): string {\n return createHash('sha256').update(input).digest('hex');\n}\n\n/**\n * Content-addressed migration hash over (metadata envelope sans\n * contracts/hints/signature, ops). See ADR 199 — Storage-only migration\n * identity for the rationale: contracts are anchored separately by the\n * storage-hash bookends inside the envelope; planner hints are advisory\n * and must not affect identity.\n *\n * The integrity check is purely structural, not semantic. The function\n * canonicalizes its inputs via `sortKeys` (recursive) + `JSON.stringify`\n * and hashes the result. Target-specific operation payloads (`step.sql`,\n * Mongo's pipeline AST, …) are hashed verbatim — no per-target\n * normalization is required, because what's being verified is \"do the\n * on-disk bytes still produce their recorded hash\", not \"do two\n * semantically-equivalent migrations hash the same\". The latter is an\n * emit-drift concern (ADR 192 step 2).\n *\n * The symmetry across write and read holds because `JSON.parse(\n * JSON.stringify(x))` round-trips JSON-safe values losslessly and\n * `sortKeys` is idempotent and deterministic — write-time and read-time\n * canonicalization produce the same canonical bytes regardless of\n * source-side key ordering or whitespace.\n *\n * The `migrationHash` field on the metadata is stripped before hashing\n * so the function can be used both at write time (when no hash exists\n * yet) and at verify time (rehashing an already-attested record).\n */\nexport function computeMigrationHash(\n metadata: Omit<MigrationMetadata, 'migrationHash'> & { readonly migrationHash?: string },\n ops: MigrationOps,\n): string {\n const {\n migrationHash: _migrationHash,\n signature: _signature,\n fromContract: _fromContract,\n toContract: _toContract,\n hints: _hints,\n ...strippedMeta\n } = metadata;\n\n const canonicalMetadata = canonicalizeJson(strippedMeta);\n const canonicalOps = canonicalizeJson(ops);\n\n const partHashes = [canonicalMetadata, canonicalOps].map(sha256Hex);\n const hash = sha256Hex(canonicalizeJson(partHashes));\n\n return `sha256:${hash}`;\n}\n\n/**\n * Re-hash an in-memory migration package and compare against the stored\n * `migrationHash`. See `computeMigrationHash` for the canonicalization rules.\n *\n * Returns `{ ok: true }` when the package is internally consistent, or\n * `{ ok: false, reason: 'mismatch', storedHash, computedHash }` when it is\n * not — typically a sign of FS corruption, partial writes, or a post-emit\n * hand edit.\n */\nexport function verifyMigrationHash(pkg: OnDiskMigrationPackage): VerifyResult {\n const computed = computeMigrationHash(pkg.metadata, pkg.ops);\n\n if (pkg.metadata.migrationHash === computed) {\n return {\n ok: true,\n storedHash: pkg.metadata.migrationHash,\n computedHash: computed,\n };\n }\n\n return {\n ok: false,\n reason: 'mismatch',\n storedHash: pkg.metadata.migrationHash,\n computedHash: computed,\n };\n}\n"],"mappings":";;AAAA,SAAS,SAAS,OAAyB;CACzC,IAAI,UAAU,QAAQ,OAAO,UAAU,UACrC,OAAO;CAET,IAAI,MAAM,QAAQ,MAAM,EACtB,OAAO,MAAM,IAAI,SAAS;CAE5B,MAAM,SAAkC,EAAE;CAC1C,KAAK,MAAM,OAAO,OAAO,KAAK,MAAM,CAAC,MAAM,EACzC,OAAO,OAAO,SAAU,MAAkC,KAAK;CAEjE,OAAO;;AAGT,SAAgB,iBAAiB,OAAwB;CACvD,OAAO,KAAK,UAAU,SAAS,MAAM,CAAC;;;;ACHxC,SAAS,UAAU,OAAuB;CACxC,OAAO,WAAW,SAAS,CAAC,OAAO,MAAM,CAAC,OAAO,MAAM;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA6BzD,SAAgB,qBACd,UACA,KACQ;CACR,MAAM,EACJ,eAAe,gBACf,WAAW,YACX,cAAc,eACd,YAAY,aACZ,OAAO,QACP,GAAG,iBACD;CAQJ,OAAO,UAFM,UAAU,iBADJ,CAHO,iBAAiB,aAGN,EAFhB,iBAAiB,IAEa,CAAC,CAAC,IAAI,UACP,CAAC,CAE9B;;;;;;;;;;;AAYvB,SAAgB,oBAAoB,KAA2C;CAC7E,MAAM,WAAW,qBAAqB,IAAI,UAAU,IAAI,IAAI;CAE5D,IAAI,IAAI,SAAS,kBAAkB,UACjC,OAAO;EACL,IAAI;EACJ,YAAY,IAAI,SAAS;EACzB,cAAc;EACf;CAGH,OAAO;EACL,IAAI;EACJ,QAAQ;EACR,YAAY,IAAI,SAAS;EACzB,cAAc;EACf"}
@@ -1,5 +1,4 @@
1
- import { c as errorInvalidInvariantId, i as errorDuplicateInvariantInEdge } from "./errors-DQsXvidG.mjs";
2
-
1
+ import { c as errorInvalidInvariantId, i as errorDuplicateInvariantInEdge } from "./errors-5KVuWV_5.mjs";
3
2
  //#region src/invariants.ts
4
3
  /**
5
4
  * Hygiene check for `invariantId`. Rejects empty values plus any
@@ -36,7 +35,7 @@ function readInvariantId(op) {
36
35
  const candidate = op.invariantId;
37
36
  return typeof candidate === "string" ? candidate : void 0;
38
37
  }
39
-
40
38
  //#endregion
41
39
  export { validateInvariantId as n, deriveProvidedInvariants as t };
42
- //# sourceMappingURL=invariants-4Avb_Yhy.mjs.map
40
+
41
+ //# sourceMappingURL=invariants-CkLSBcMu.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"invariants-4Avb_Yhy.mjs","names":[],"sources":["../src/invariants.ts"],"sourcesContent":["import type { MigrationPlanOperation } from '@prisma-next/framework-components/control';\nimport { errorDuplicateInvariantInEdge, errorInvalidInvariantId } from './errors';\nimport type { MigrationOps } from './package';\n\n/**\n * Hygiene check for `invariantId`. Rejects empty values plus any\n * whitespace or control character (including Unicode whitespace like\n * NBSP and em space, which are visually identical to ASCII space and\n * routinely sneak in via paste).\n */\nexport function validateInvariantId(invariantId: string): boolean {\n if (invariantId.length === 0) return false;\n return !/[\\p{Cc}\\p{White_Space}]/u.test(invariantId);\n}\n\n/**\n * Walk a migration's operations and produce its `providedInvariants`\n * aggregate: the sorted, deduplicated list of `invariantId`s declared\n * by data-transform ops. Ops without `operationClass === 'data'` are\n * skipped; data ops without an `invariantId` are skipped.\n *\n * Throws `MIGRATION.INVALID_INVARIANT_ID` on a malformed id and\n * `MIGRATION.DUPLICATE_INVARIANT_IN_EDGE` on duplicates.\n */\nexport function deriveProvidedInvariants(ops: MigrationOps): readonly string[] {\n const seen = new Set<string>();\n for (const op of ops) {\n const invariantId = readInvariantId(op);\n if (invariantId === undefined) continue;\n if (!validateInvariantId(invariantId)) {\n throw errorInvalidInvariantId(invariantId);\n }\n if (seen.has(invariantId)) {\n throw errorDuplicateInvariantInEdge(invariantId);\n }\n seen.add(invariantId);\n }\n return [...seen].sort();\n}\n\nfunction readInvariantId(op: MigrationPlanOperation): string | undefined {\n if (op.operationClass !== 'data') return undefined;\n const candidate = (op as { invariantId?: unknown }).invariantId;\n return typeof candidate === 'string' ? candidate : undefined;\n}\n"],"mappings":";;;;;;;;;AAUA,SAAgB,oBAAoB,aAA8B;AAChE,KAAI,YAAY,WAAW,EAAG,QAAO;AACrC,QAAO,CAAC,2BAA2B,KAAK,YAAY;;;;;;;;;;;AAYtD,SAAgB,yBAAyB,KAAsC;CAC7E,MAAM,uBAAO,IAAI,KAAa;AAC9B,MAAK,MAAM,MAAM,KAAK;EACpB,MAAM,cAAc,gBAAgB,GAAG;AACvC,MAAI,gBAAgB,OAAW;AAC/B,MAAI,CAAC,oBAAoB,YAAY,CACnC,OAAM,wBAAwB,YAAY;AAE5C,MAAI,KAAK,IAAI,YAAY,CACvB,OAAM,8BAA8B,YAAY;AAElD,OAAK,IAAI,YAAY;;AAEvB,QAAO,CAAC,GAAG,KAAK,CAAC,MAAM;;AAGzB,SAAS,gBAAgB,IAAgD;AACvE,KAAI,GAAG,mBAAmB,OAAQ,QAAO;CACzC,MAAM,YAAa,GAAiC;AACpD,QAAO,OAAO,cAAc,WAAW,YAAY"}
1
+ {"version":3,"file":"invariants-CkLSBcMu.mjs","names":[],"sources":["../src/invariants.ts"],"sourcesContent":["import type { MigrationPlanOperation } from '@prisma-next/framework-components/control';\nimport { errorDuplicateInvariantInEdge, errorInvalidInvariantId } from './errors';\nimport type { MigrationOps } from './package';\n\n/**\n * Hygiene check for `invariantId`. Rejects empty values plus any\n * whitespace or control character (including Unicode whitespace like\n * NBSP and em space, which are visually identical to ASCII space and\n * routinely sneak in via paste).\n */\nexport function validateInvariantId(invariantId: string): boolean {\n if (invariantId.length === 0) return false;\n return !/[\\p{Cc}\\p{White_Space}]/u.test(invariantId);\n}\n\n/**\n * Walk a migration's operations and produce its `providedInvariants`\n * aggregate: the sorted, deduplicated list of `invariantId`s declared\n * by data-transform ops. Ops without `operationClass === 'data'` are\n * skipped; data ops without an `invariantId` are skipped.\n *\n * Throws `MIGRATION.INVALID_INVARIANT_ID` on a malformed id and\n * `MIGRATION.DUPLICATE_INVARIANT_IN_EDGE` on duplicates.\n */\nexport function deriveProvidedInvariants(ops: MigrationOps): readonly string[] {\n const seen = new Set<string>();\n for (const op of ops) {\n const invariantId = readInvariantId(op);\n if (invariantId === undefined) continue;\n if (!validateInvariantId(invariantId)) {\n throw errorInvalidInvariantId(invariantId);\n }\n if (seen.has(invariantId)) {\n throw errorDuplicateInvariantInEdge(invariantId);\n }\n seen.add(invariantId);\n }\n return [...seen].sort();\n}\n\nfunction readInvariantId(op: MigrationPlanOperation): string | undefined {\n if (op.operationClass !== 'data') return undefined;\n const candidate = (op as { invariantId?: unknown }).invariantId;\n return typeof candidate === 'string' ? candidate : undefined;\n}\n"],"mappings":";;;;;;;;AAUA,SAAgB,oBAAoB,aAA8B;CAChE,IAAI,YAAY,WAAW,GAAG,OAAO;CACrC,OAAO,CAAC,2BAA2B,KAAK,YAAY;;;;;;;;;;;AAYtD,SAAgB,yBAAyB,KAAsC;CAC7E,MAAM,uBAAO,IAAI,KAAa;CAC9B,KAAK,MAAM,MAAM,KAAK;EACpB,MAAM,cAAc,gBAAgB,GAAG;EACvC,IAAI,gBAAgB,KAAA,GAAW;EAC/B,IAAI,CAAC,oBAAoB,YAAY,EACnC,MAAM,wBAAwB,YAAY;EAE5C,IAAI,KAAK,IAAI,YAAY,EACvB,MAAM,8BAA8B,YAAY;EAElD,KAAK,IAAI,YAAY;;CAEvB,OAAO,CAAC,GAAG,KAAK,CAAC,MAAM;;AAGzB,SAAS,gBAAgB,IAAgD;CACvE,IAAI,GAAG,mBAAmB,QAAQ,OAAO,KAAA;CACzC,MAAM,YAAa,GAAiC;CACpD,OAAO,OAAO,cAAc,WAAW,YAAY,KAAA"}