@prisma-next/migration-tools 0.5.0-dev.67 → 0.5.0-dev.68

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. package/dist/{errors-5KVuWV_5.mjs → errors-EPL_9p9f.mjs} +12 -6
  2. package/dist/errors-EPL_9p9f.mjs.map +1 -0
  3. package/dist/exports/aggregate.d.mts +534 -0
  4. package/dist/exports/aggregate.d.mts.map +1 -0
  5. package/dist/exports/aggregate.mjs +598 -0
  6. package/dist/exports/aggregate.mjs.map +1 -0
  7. package/dist/exports/errors.d.mts +6 -1
  8. package/dist/exports/errors.d.mts.map +1 -1
  9. package/dist/exports/errors.mjs +2 -2
  10. package/dist/exports/graph.d.mts +1 -1
  11. package/dist/exports/hash.d.mts +1 -1
  12. package/dist/exports/invariants.d.mts +13 -2
  13. package/dist/exports/invariants.d.mts.map +1 -1
  14. package/dist/exports/invariants.mjs +1 -1
  15. package/dist/exports/io.d.mts +25 -1
  16. package/dist/exports/io.d.mts.map +1 -1
  17. package/dist/exports/io.mjs +2 -2
  18. package/dist/exports/metadata.d.mts +1 -1
  19. package/dist/exports/migration-graph.d.mts +1 -1
  20. package/dist/exports/migration-graph.mjs +1 -522
  21. package/dist/exports/migration.d.mts +1 -1
  22. package/dist/exports/migration.mjs +2 -2
  23. package/dist/exports/refs.mjs +1 -1
  24. package/dist/exports/spaces.d.mts +341 -237
  25. package/dist/exports/spaces.d.mts.map +1 -1
  26. package/dist/exports/spaces.mjs +137 -339
  27. package/dist/exports/spaces.mjs.map +1 -1
  28. package/dist/{graph-4dIUm90i.d.mts → graph-HMWAldoR.d.mts} +1 -1
  29. package/dist/{graph-4dIUm90i.d.mts.map → graph-HMWAldoR.d.mts.map} +1 -1
  30. package/dist/{invariants-CkLSBcMu.mjs → invariants-Duc8f9NM.mjs} +16 -5
  31. package/dist/invariants-Duc8f9NM.mjs.map +1 -0
  32. package/dist/{io-TX8RPDeh.mjs → io-D13dLvUh.mjs} +38 -4
  33. package/dist/io-D13dLvUh.mjs.map +1 -0
  34. package/dist/migration-graph-DGNnKDY5.mjs +523 -0
  35. package/dist/{exports/migration-graph.mjs.map → migration-graph-DGNnKDY5.mjs.map} +1 -1
  36. package/dist/read-contract-space-contract-C3-1eyaI.mjs +298 -0
  37. package/dist/read-contract-space-contract-C3-1eyaI.mjs.map +1 -0
  38. package/package.json +10 -6
  39. package/src/aggregate/loader.ts +409 -0
  40. package/src/aggregate/marker-types.ts +16 -0
  41. package/src/aggregate/planner-types.ts +137 -0
  42. package/src/aggregate/planner.ts +158 -0
  43. package/src/aggregate/project-schema-to-space.ts +64 -0
  44. package/src/aggregate/strategies/graph-walk.ts +92 -0
  45. package/src/aggregate/strategies/synth.ts +122 -0
  46. package/src/aggregate/types.ts +89 -0
  47. package/src/aggregate/verifier.ts +230 -0
  48. package/src/assert-descriptor-self-consistency.ts +70 -0
  49. package/src/compute-extension-space-apply-path.ts +152 -0
  50. package/src/concatenate-space-apply-inputs.ts +2 -2
  51. package/src/detect-space-contract-drift.ts +22 -26
  52. package/src/{emit-pinned-space-artefacts.ts → emit-contract-space-artefacts.ts} +14 -33
  53. package/src/errors.ts +11 -5
  54. package/src/exports/aggregate.ts +37 -0
  55. package/src/exports/errors.ts +1 -0
  56. package/src/exports/io.ts +1 -0
  57. package/src/exports/spaces.ts +23 -10
  58. package/src/gather-disk-contract-space-state.ts +62 -0
  59. package/src/invariants.ts +14 -3
  60. package/src/io.ts +42 -0
  61. package/src/plan-all-spaces.ts +3 -7
  62. package/src/read-contract-space-contract.ts +44 -0
  63. package/src/read-contract-space-head-ref.ts +63 -0
  64. package/src/space-layout.ts +4 -11
  65. package/src/verify-contract-spaces.ts +45 -49
  66. package/dist/errors-5KVuWV_5.mjs.map +0 -1
  67. package/dist/invariants-CkLSBcMu.mjs.map +0 -1
  68. package/dist/io-TX8RPDeh.mjs.map +0 -1
  69. package/src/read-pinned-contract-hash.ts +0 -77
  70. /package/dist/{metadata-th_MvOTT.d.mts → metadata-BnLFiI6B.d.mts} +0 -0
@@ -1 +1 @@
1
- {"version":3,"file":"migration-graph.mjs","names":[],"sources":["../../src/queue.ts","../../src/graph-ops.ts","../../src/migration-graph.ts"],"sourcesContent":["/**\n * FIFO queue with amortised O(1) push and shift.\n *\n * Uses a head-index cursor over a backing array rather than\n * `Array.prototype.shift()`, which is O(n) on V8. Intended for BFS-shaped\n * traversals where the queue is drained in a single pass — it does not\n * reclaim memory for already-shifted items, so it is not suitable for\n * long-lived queues with many push/shift cycles.\n */\nexport class Queue<T> {\n private readonly items: T[];\n private head = 0;\n\n constructor(initial: Iterable<T> = []) {\n this.items = [...initial];\n }\n\n push(item: T): void {\n this.items.push(item);\n }\n\n /**\n * Remove and return the next item. Caller must check `isEmpty` first —\n * shifting an empty queue throws.\n */\n shift(): T {\n if (this.head >= this.items.length) {\n throw new Error('Queue.shift called on empty queue');\n }\n // biome-ignore lint/style/noNonNullAssertion: bounds-checked on the line above\n return this.items[this.head++]!;\n }\n\n get isEmpty(): boolean {\n return this.head >= this.items.length;\n }\n}\n","import { Queue } from './queue';\n\n/**\n * One step of a BFS traversal.\n *\n * `parent` and `incomingEdge` are `null` for start states — they were not\n * reached via any edge. For every other state they record the predecessor\n * state and the edge by which this state was first reached.\n *\n * `state` is the BFS state, most often a string (graph node identifier) but\n * can be a composite object. The string overload keeps the common case\n * ergonomic; the generic overload accepts a caller-supplied `key` function\n * that produces a stable equality key for dedup.\n */\nexport interface BfsStep<S, E> {\n readonly state: S;\n readonly parent: S | null;\n readonly incomingEdge: E | null;\n}\n\n/**\n * Generic breadth-first traversal.\n *\n * Direction (forward/reverse) is expressed by the caller's `neighbours`\n * closure: return `{ next, edge }` pairs where `next` is the state to visit\n * next and `edge` is the edge that connects them. Callers that don't need\n * path reconstruction can ignore the `parent`/`incomingEdge` fields of each\n * yielded step.\n *\n * Ordering — when the result needs to be deterministic (path-finding) the\n * caller is responsible for sorting inside `neighbours`; this generator\n * does not impose an ordering hook of its own. State-dependent orderings\n * have full access to the source state inside the closure.\n *\n * Stops are intrinsic — callers `break` out of the `for..of` loop when\n * they've found what they're looking for.\n */\nexport function bfs<E>(\n starts: Iterable<string>,\n neighbours: (state: string) => Iterable<{ next: string; edge: E }>,\n): Generator<BfsStep<string, E>>;\nexport function bfs<S, E>(\n starts: Iterable<S>,\n neighbours: (state: S) => Iterable<{ next: S; edge: E }>,\n key: (state: S) => string,\n): Generator<BfsStep<S, E>>;\nexport function* bfs<S, E>(\n starts: Iterable<S>,\n neighbours: (state: S) => Iterable<{ next: S; edge: E }>,\n // Identity default for the string overload. TypeScript can't express\n // \"default applies only when S = string\", so this cast bridges the\n // generic implementation signature to the public overloads — which\n // guarantee `key` is omitted only when S = string at the call site.\n key: (state: S) => string = (state) => state as unknown as string,\n): Generator<BfsStep<S, E>> {\n // Queue entries carry the state alongside its key so we don't recompute\n // key() twice per visit (once on dedup, once on parent lookup). Composite\n // keys can be non-trivial to compute; string-overload callers pay nothing\n // since key() is identity there.\n interface Entry {\n readonly state: S;\n readonly key: string;\n }\n const visited = new Set<string>();\n const parentMap = new Map<string, { parent: S; edge: E }>();\n const queue = new Queue<Entry>();\n for (const start of starts) {\n const k = key(start);\n if (!visited.has(k)) {\n visited.add(k);\n queue.push({ state: start, key: k });\n }\n }\n while (!queue.isEmpty) {\n const { state: current, key: curKey } = queue.shift();\n const parentInfo = parentMap.get(curKey);\n yield {\n state: current,\n parent: parentInfo?.parent ?? null,\n incomingEdge: parentInfo?.edge ?? null,\n };\n\n for (const { next, edge } of neighbours(current)) {\n const k = key(next);\n if (!visited.has(k)) {\n visited.add(k);\n parentMap.set(k, { parent: current, edge });\n queue.push({ state: next, key: k });\n }\n }\n }\n}\n","import { ifDefined } from '@prisma-next/utils/defined';\nimport { EMPTY_CONTRACT_HASH } from './constants';\nimport {\n errorAmbiguousTarget,\n errorDuplicateMigrationHash,\n errorNoInitialMigration,\n errorNoTarget,\n errorSameSourceAndTarget,\n} from './errors';\nimport type { MigrationEdge, MigrationGraph } from './graph';\nimport { bfs } from './graph-ops';\nimport type { OnDiskMigrationPackage } from './package';\n\n/** Forward-edge neighbours: edge `e` from `n` visits `e.to` next. */\nfunction forwardNeighbours(graph: MigrationGraph, node: string) {\n return (graph.forwardChain.get(node) ?? []).map((edge) => ({ next: edge.to, edge }));\n}\n\n/**\n * Forward-edge neighbours, sorted by the deterministic tie-break.\n * Used by path-finding so the resulting shortest path is stable across runs.\n */\nfunction sortedForwardNeighbours(graph: MigrationGraph, node: string) {\n const edges = graph.forwardChain.get(node) ?? [];\n return [...edges].sort(compareTieBreak).map((edge) => ({ next: edge.to, edge }));\n}\n\n/** Reverse-edge neighbours: edge `e` from `n` visits `e.from` next. */\nfunction reverseNeighbours(graph: MigrationGraph, node: string) {\n return (graph.reverseChain.get(node) ?? []).map((edge) => ({ next: edge.from, edge }));\n}\n\nfunction appendEdge(map: Map<string, MigrationEdge[]>, key: string, entry: MigrationEdge): void {\n const bucket = map.get(key);\n if (bucket) bucket.push(entry);\n else map.set(key, [entry]);\n}\n\nexport function reconstructGraph(packages: readonly OnDiskMigrationPackage[]): MigrationGraph {\n const nodes = new Set<string>();\n const forwardChain = new Map<string, MigrationEdge[]>();\n const reverseChain = new Map<string, MigrationEdge[]>();\n const migrationByHash = new Map<string, MigrationEdge>();\n\n for (const pkg of packages) {\n // Manifest `from` is `string | null` (null = baseline). The graph layer\n // is the marker/path layer where \"no prior state\" is encoded as the\n // EMPTY_CONTRACT_HASH sentinel; bridge here so pathfinding stays string-\n // keyed.\n const from = pkg.metadata.from ?? EMPTY_CONTRACT_HASH;\n const { to } = pkg.metadata;\n\n if (from === to) {\n const hasDataOp = pkg.ops.some((op) => op.operationClass === 'data');\n if (!hasDataOp) {\n throw errorSameSourceAndTarget(pkg.dirPath, from);\n }\n }\n\n nodes.add(from);\n nodes.add(to);\n\n const migration: MigrationEdge = {\n from,\n to,\n migrationHash: pkg.metadata.migrationHash,\n dirName: pkg.dirName,\n createdAt: pkg.metadata.createdAt,\n labels: pkg.metadata.labels,\n invariants: pkg.metadata.providedInvariants,\n };\n\n if (migrationByHash.has(migration.migrationHash)) {\n throw errorDuplicateMigrationHash(migration.migrationHash);\n }\n migrationByHash.set(migration.migrationHash, migration);\n\n appendEdge(forwardChain, from, migration);\n appendEdge(reverseChain, to, migration);\n }\n\n return { nodes, forwardChain, reverseChain, migrationByHash };\n}\n\n// ---------------------------------------------------------------------------\n// Deterministic tie-breaking for BFS neighbour order.\n// Used by path-finders only; not a general-purpose utility.\n// Ordering: label priority → createdAt → to → migrationHash.\n// ---------------------------------------------------------------------------\n\nconst LABEL_PRIORITY: Record<string, number> = { main: 0, default: 1, feature: 2 };\n\nfunction labelPriority(labels: readonly string[]): number {\n let best = 3;\n for (const l of labels) {\n const p = LABEL_PRIORITY[l];\n if (p !== undefined && p < best) best = p;\n }\n return best;\n}\n\nfunction compareTieBreak(a: MigrationEdge, b: MigrationEdge): number {\n const lp = labelPriority(a.labels) - labelPriority(b.labels);\n if (lp !== 0) return lp;\n const ca = a.createdAt.localeCompare(b.createdAt);\n if (ca !== 0) return ca;\n const tc = a.to.localeCompare(b.to);\n if (tc !== 0) return tc;\n return a.migrationHash.localeCompare(b.migrationHash);\n}\n\nfunction sortedNeighbors(edges: readonly MigrationEdge[]): readonly MigrationEdge[] {\n return [...edges].sort(compareTieBreak);\n}\n\n/**\n * Find the shortest path from `fromHash` to `toHash` using BFS over the\n * contract-hash graph. Returns the ordered list of edges, or null if no path\n * exists. Returns an empty array when `fromHash === toHash` (no-op).\n *\n * Neighbor ordering is deterministic via the tie-break sort key:\n * label priority → createdAt → to → migrationHash.\n */\nexport function findPath(\n graph: MigrationGraph,\n fromHash: string,\n toHash: string,\n): readonly MigrationEdge[] | null {\n if (fromHash === toHash) return [];\n\n const parents = new Map<string, { parent: string; edge: MigrationEdge }>();\n for (const step of bfs([fromHash], (n) => sortedForwardNeighbours(graph, n))) {\n if (step.parent !== null && step.incomingEdge !== null) {\n parents.set(step.state, { parent: step.parent, edge: step.incomingEdge });\n }\n if (step.state === toHash) {\n const path: MigrationEdge[] = [];\n let cur = toHash;\n let p = parents.get(cur);\n while (p) {\n path.push(p.edge);\n cur = p.parent;\n p = parents.get(cur);\n }\n path.reverse();\n return path;\n }\n }\n\n return null;\n}\n\n/**\n * Find the shortest path from `fromHash` to `toHash` whose edges collectively\n * cover every invariant in `required`. Returns `null` when no such path exists\n * (either `fromHash`→`toHash` is structurally unreachable, or every reachable\n * path leaves at least one required invariant uncovered). When `required` is\n * empty, delegates to `findPath` so the result is byte-identical for that case.\n *\n * Algorithm: BFS over `(node, coveredSubset)` states with state-level dedup.\n * The covered subset is a `Set<string>` of invariant ids; the state's dedup\n * key is `${node}\\0${[...covered].sort().join('\\0')}`. State keys distinguish\n * distinct `(node, covered)` tuples regardless of node-name length because\n * `\\0` cannot appear in any invariant id (validation rejects whitespace and\n * control chars at authoring time).\n *\n * Neighbour ordering when `required ≠ ∅`: edges covering ≥1 still-needed\n * invariant come first, with `labelPriority → createdAt → to → migrationHash`\n * as the secondary key. The heuristic steers BFS toward the satisfying path;\n * correctness (shortest, deterministic) does not depend on it.\n */\nexport function findPathWithInvariants(\n graph: MigrationGraph,\n fromHash: string,\n toHash: string,\n required: ReadonlySet<string>,\n): readonly MigrationEdge[] | null {\n if (required.size === 0) {\n return findPath(graph, fromHash, toHash);\n }\n\n interface InvState {\n readonly node: string;\n readonly covered: ReadonlySet<string>;\n }\n // `\\0` is a safe segment separator: `validateInvariantId` rejects any id\n // containing whitespace or control characters (NUL is U+0000), and node\n // hashes are hex strings. Distinct `(node, covered)` tuples therefore\n // map to distinct strings. If `validateInvariantId` is ever relaxed,\n // re-confirm dedup correctness here.\n const stateKey = (s: InvState): string => {\n if (s.covered.size === 0) return `${s.node}\\0`;\n return `${s.node}\\0${[...s.covered].sort().join('\\0')}`;\n };\n\n const neighbours = (s: InvState): Iterable<{ next: InvState; edge: MigrationEdge }> => {\n const outgoing = graph.forwardChain.get(s.node) ?? [];\n if (outgoing.length === 0) return [];\n return [...outgoing]\n .map((edge) => {\n let useful = false;\n let next: Set<string> | null = null;\n for (const inv of edge.invariants) {\n if (required.has(inv) && !s.covered.has(inv)) {\n if (next === null) next = new Set(s.covered);\n next.add(inv);\n useful = true;\n }\n }\n return { edge, useful, nextCovered: next ?? s.covered };\n })\n .sort((a, b) => {\n if (a.useful !== b.useful) return a.useful ? -1 : 1;\n return compareTieBreak(a.edge, b.edge);\n })\n .map(({ edge, nextCovered }) => ({\n next: { node: edge.to, covered: nextCovered },\n edge,\n }));\n };\n\n // Path reconstruction is consumer-side, keyed on stateKey, same shape as\n // findPath's parents map.\n const parents = new Map<string, { parentKey: string; edge: MigrationEdge }>();\n for (const step of bfs<InvState, MigrationEdge>(\n [{ node: fromHash, covered: new Set() }],\n neighbours,\n stateKey,\n )) {\n const curKey = stateKey(step.state);\n if (step.parent !== null && step.incomingEdge !== null) {\n parents.set(curKey, { parentKey: stateKey(step.parent), edge: step.incomingEdge });\n }\n if (step.state.node === toHash && step.state.covered.size === required.size) {\n const path: MigrationEdge[] = [];\n let cur: string | undefined = curKey;\n while (cur !== undefined) {\n const p = parents.get(cur);\n if (!p) break;\n path.push(p.edge);\n cur = p.parentKey;\n }\n path.reverse();\n return path;\n }\n }\n\n return null;\n}\n\n/**\n * Reverse-BFS from `toHash` over `reverseChain` to collect every node from\n * which `toHash` is reachable (inclusive of `toHash` itself).\n */\nfunction collectNodesReachingTarget(graph: MigrationGraph, toHash: string): Set<string> {\n const reached = new Set<string>();\n for (const step of bfs([toHash], (n) => reverseNeighbours(graph, n))) {\n reached.add(step.state);\n }\n return reached;\n}\n\nexport interface PathDecision {\n readonly selectedPath: readonly MigrationEdge[];\n readonly fromHash: string;\n readonly toHash: string;\n readonly alternativeCount: number;\n readonly tieBreakReasons: readonly string[];\n readonly refName?: string;\n /** The caller-supplied required invariant set, sorted ascending. */\n readonly requiredInvariants: readonly string[];\n /**\n * The subset of `requiredInvariants` actually covered by edges on\n * `selectedPath`. Always a subset of `requiredInvariants` (when the path\n * is satisfying, equal to it); always derived from `selectedPath`.\n */\n readonly satisfiedInvariants: readonly string[];\n}\n\n/**\n * Outcome of {@link findPathWithDecision}. The pathfinder distinguishes\n * three cases up front so callers don't re-derive structural reachability:\n *\n * - `ok` — a path covering `required` exists; `decision` carries the\n * selection metadata and per-edge invariants.\n * - `unreachable` — `from`→`to` has no structural path. Mapped by callers\n * to the existing no-path / `NO_TARGET` diagnostic.\n * - `unsatisfiable` — `from`→`to` is structurally reachable but no path\n * covers every required invariant. `structuralPath` is the\n * `findPath(graph, from, to)` result, included so callers don't have to\n * recompute it when raising `MIGRATION.NO_INVARIANT_PATH`. `missing` is\n * the subset of `required` that the structural path does *not* cover —\n * correctly accounts for partial coverage when some required invariants\n * are met by the fallback path. Only emitted when `required` is\n * non-empty.\n */\nexport type FindPathOutcome =\n | { readonly kind: 'ok'; readonly decision: PathDecision }\n | { readonly kind: 'unreachable' }\n | {\n readonly kind: 'unsatisfiable';\n readonly structuralPath: readonly MigrationEdge[];\n readonly missing: readonly string[];\n };\n\n/**\n * Routing context for {@link findPathWithDecision}. Both fields are optional;\n * `refName` is only used to decorate the resulting `PathDecision` for the\n * JSON envelope, and `required` defaults to an empty set (purely structural\n * routing). They are passed via a single options object so the call sites\n * cannot silently swap two adjacent string parameters.\n */\nexport interface FindPathWithDecisionOptions {\n readonly refName?: string;\n readonly required?: ReadonlySet<string>;\n}\n\n/**\n * Find the shortest path from `fromHash` to `toHash` and return structured\n * path-decision metadata for machine-readable output. When `required` is\n * non-empty, the returned path is the shortest one whose edges collectively\n * cover every required invariant.\n *\n * The discriminated return type tells the caller *why* a path could not be\n * found, so the CLI can pick the right structured error without re-running\n * a structural BFS.\n */\nexport function findPathWithDecision(\n graph: MigrationGraph,\n fromHash: string,\n toHash: string,\n options: FindPathWithDecisionOptions = {},\n): FindPathOutcome {\n const { refName, required = new Set<string>() } = options;\n const requiredInvariants = [...required].sort();\n\n if (fromHash === toHash && required.size === 0) {\n return {\n kind: 'ok',\n decision: {\n selectedPath: [],\n fromHash,\n toHash,\n alternativeCount: 0,\n tieBreakReasons: [],\n requiredInvariants,\n satisfiedInvariants: [],\n ...ifDefined('refName', refName),\n },\n };\n }\n\n const path = findPathWithInvariants(graph, fromHash, toHash, required);\n if (!path) {\n if (required.size === 0) {\n return { kind: 'unreachable' };\n }\n const structural = findPath(graph, fromHash, toHash);\n if (structural === null) {\n return { kind: 'unreachable' };\n }\n const coveredByStructural = new Set<string>();\n for (const edge of structural) {\n for (const inv of edge.invariants) {\n if (required.has(inv)) coveredByStructural.add(inv);\n }\n }\n const missing = requiredInvariants.filter((id) => !coveredByStructural.has(id));\n return { kind: 'unsatisfiable', structuralPath: structural, missing };\n }\n\n const satisfiedInvariants = computeSatisfiedInvariants(required, path);\n\n // Single reverse BFS marks every node from which `toHash` is reachable.\n // Replaces a per-edge `findPath(e.to, toHash)` call inside the loop below,\n // which made the whole function O(|path| · (V + E)) instead of O(V + E).\n const reachesTarget = collectNodesReachingTarget(graph, toHash);\n const coveragePrefixes = requiredCoveragePrefixes(required, path);\n\n const tieBreakReasons: string[] = [];\n let alternativeCount = 0;\n\n for (const [i, edge] of path.entries()) {\n const outgoing = graph.forwardChain.get(edge.from);\n if (!outgoing || outgoing.length <= 1) continue;\n const reachable = outgoing.filter((e) => reachesTarget.has(e.to));\n if (reachable.length <= 1) continue;\n\n let comparisonPool: readonly MigrationEdge[] = reachable;\n if (required.size > 0) {\n // coveragePrefixes is built one-per-edge from path, so the index is\n // always in range here; the explicit guard keeps the type narrowed\n // without a non-null assertion.\n const prefixSet = coveragePrefixes[i];\n if (prefixSet === undefined) continue;\n comparisonPool = invariantViableAlternativesAtStep(required, prefixSet, reachable);\n }\n\n alternativeCount += reachable.length - 1;\n const sorted = sortedNeighbors(reachable);\n if (sorted[0]?.migrationHash !== edge.migrationHash) continue;\n if (!reachable.some((e) => e.migrationHash !== edge.migrationHash)) continue;\n\n const sortedViable = sortedNeighbors(comparisonPool);\n if (\n sortedViable.length > 1 &&\n sortedViable[0]?.migrationHash === edge.migrationHash &&\n sortedViable.some((e) => e.migrationHash !== edge.migrationHash)\n ) {\n tieBreakReasons.push(\n `at ${edge.from}: ${comparisonPool.length} candidates, selected by tie-break`,\n );\n }\n }\n\n return {\n kind: 'ok',\n decision: {\n selectedPath: path,\n fromHash,\n toHash,\n alternativeCount,\n tieBreakReasons,\n requiredInvariants,\n satisfiedInvariants,\n ...ifDefined('refName', refName),\n },\n };\n}\n\nfunction computeSatisfiedInvariants(\n required: ReadonlySet<string>,\n path: readonly MigrationEdge[],\n): readonly string[] {\n if (required.size === 0) return [];\n const covered = new Set<string>();\n for (const edge of path) {\n for (const inv of edge.invariants) {\n if (required.has(inv)) covered.add(inv);\n }\n }\n return [...covered].sort();\n}\n\n/**\n * For each edge on path, invariant coverage accumulated from earlier edges only —\n * `(required ∩ ∪_{j<i} path[j].invariants)` represented as cumulative set along `required`,\n * keyed as \"full set of required ids satisfied before taking path[i]\".\n */\nfunction requiredCoveragePrefixes(\n required: ReadonlySet<string>,\n path: readonly MigrationEdge[],\n): readonly ReadonlySet<string>[] {\n const prefixes: ReadonlySet<string>[] = [];\n const acc = new Set<string>();\n for (const edge of path) {\n prefixes.push(new Set(acc));\n for (const inv of edge.invariants) {\n if (required.has(inv)) acc.add(inv);\n }\n }\n return prefixes;\n}\n\nfunction invariantViableAlternativesAtStep(\n required: ReadonlySet<string>,\n coverageBeforeTakingEdge: ReadonlySet<string>,\n outgoing: readonly MigrationEdge[],\n): readonly MigrationEdge[] {\n if (required.size === 0) return [...outgoing];\n return outgoing.filter((e) =>\n [...required].every((id) => coverageBeforeTakingEdge.has(id) || e.invariants.includes(id)),\n );\n}\n\n/**\n * Walk ancestors of each branch tip back to find the last node\n * that appears on all paths. Returns `fromHash` if no shared ancestor is found.\n */\nfunction findDivergencePoint(\n graph: MigrationGraph,\n fromHash: string,\n leaves: readonly string[],\n): string {\n const ancestorSets = leaves.map((leaf) => {\n const ancestors = new Set<string>();\n for (const step of bfs([leaf], (n) => reverseNeighbours(graph, n))) {\n ancestors.add(step.state);\n }\n return ancestors;\n });\n\n const commonAncestors = [...(ancestorSets[0] ?? [])].filter((node) =>\n ancestorSets.every((s) => s.has(node)),\n );\n\n let deepest = fromHash;\n let deepestDepth = -1;\n for (const ancestor of commonAncestors) {\n const path = findPath(graph, fromHash, ancestor);\n const depth = path ? path.length : 0;\n if (depth > deepestDepth) {\n deepestDepth = depth;\n deepest = ancestor;\n }\n }\n return deepest;\n}\n\n/**\n * Find all branch tips (nodes with no outgoing edges) reachable from\n * `fromHash` via forward edges.\n */\nexport function findReachableLeaves(graph: MigrationGraph, fromHash: string): readonly string[] {\n const leaves: string[] = [];\n for (const step of bfs([fromHash], (n) => forwardNeighbours(graph, n))) {\n if (!graph.forwardChain.get(step.state)?.length) {\n leaves.push(step.state);\n }\n }\n return leaves;\n}\n\n/**\n * Find the target contract hash of the migration graph reachable from\n * EMPTY_CONTRACT_HASH. Returns `null` for a graph that has no target\n * state (either empty, or containing only the root with no outgoing\n * edges). Throws NO_INITIAL_MIGRATION if the graph has nodes but none\n * originate from the empty hash, and AMBIGUOUS_TARGET if multiple\n * branch tips exist.\n */\nexport function findLeaf(graph: MigrationGraph): string | null {\n if (graph.nodes.size === 0) {\n return null;\n }\n\n if (!graph.nodes.has(EMPTY_CONTRACT_HASH)) {\n throw errorNoInitialMigration([...graph.nodes]);\n }\n\n const leaves = findReachableLeaves(graph, EMPTY_CONTRACT_HASH);\n\n if (leaves.length === 0) {\n const reachable = [...graph.nodes].filter((n) => n !== EMPTY_CONTRACT_HASH);\n if (reachable.length > 0) {\n throw errorNoTarget(reachable);\n }\n return null;\n }\n\n if (leaves.length > 1) {\n const divergencePoint = findDivergencePoint(graph, EMPTY_CONTRACT_HASH, leaves);\n const branches = leaves.map((tip) => {\n const path = findPath(graph, divergencePoint, tip);\n return {\n tip,\n edges: (path ?? []).map((e) => ({ dirName: e.dirName, from: e.from, to: e.to })),\n };\n });\n throw errorAmbiguousTarget(leaves, { divergencePoint, branches });\n }\n\n // biome-ignore lint/style/noNonNullAssertion: leaves.length is neither 0 nor >1 per the branches above, so exactly one leaf remains\n return leaves[0]!;\n}\n\n/**\n * Find the latest migration entry by traversing from EMPTY_CONTRACT_HASH\n * to the single target. Returns null for an empty graph.\n * Throws AMBIGUOUS_TARGET if the graph has multiple branch tips.\n */\nexport function findLatestMigration(graph: MigrationGraph): MigrationEdge | null {\n const leafHash = findLeaf(graph);\n if (leafHash === null) return null;\n\n const path = findPath(graph, EMPTY_CONTRACT_HASH, leafHash);\n return path?.at(-1) ?? null;\n}\n\nexport function detectCycles(graph: MigrationGraph): readonly string[][] {\n const WHITE = 0;\n const GRAY = 1;\n const BLACK = 2;\n\n const color = new Map<string, number>();\n const parentMap = new Map<string, string | null>();\n const cycles: string[][] = [];\n\n for (const node of graph.nodes) {\n color.set(node, WHITE);\n }\n\n // Iterative three-color DFS. A frame is (node, outgoing edges, next-index).\n interface Frame {\n node: string;\n outgoing: readonly MigrationEdge[];\n index: number;\n }\n const stack: Frame[] = [];\n\n function pushFrame(u: string): void {\n color.set(u, GRAY);\n stack.push({ node: u, outgoing: graph.forwardChain.get(u) ?? [], index: 0 });\n }\n\n for (const root of graph.nodes) {\n if (color.get(root) !== WHITE) continue;\n parentMap.set(root, null);\n pushFrame(root);\n\n while (stack.length > 0) {\n // biome-ignore lint/style/noNonNullAssertion: stack.length > 0 should guarantee that this cannot be undefined\n const frame = stack[stack.length - 1]!;\n if (frame.index >= frame.outgoing.length) {\n color.set(frame.node, BLACK);\n stack.pop();\n continue;\n }\n // biome-ignore lint/style/noNonNullAssertion: the early-continue above guarantees frame.index < frame.outgoing.length here, so this is defined\n const edge = frame.outgoing[frame.index++]!;\n const v = edge.to;\n const vColor = color.get(v);\n if (vColor === GRAY) {\n const cycle: string[] = [v];\n let cur = frame.node;\n while (cur !== v) {\n cycle.push(cur);\n cur = parentMap.get(cur) ?? v;\n }\n cycle.reverse();\n cycles.push(cycle);\n } else if (vColor === WHITE) {\n parentMap.set(v, frame.node);\n pushFrame(v);\n }\n }\n }\n\n return cycles;\n}\n\nexport function detectOrphans(graph: MigrationGraph): readonly MigrationEdge[] {\n if (graph.nodes.size === 0) return [];\n\n const reachable = new Set<string>();\n const startNodes: string[] = [];\n\n if (graph.forwardChain.has(EMPTY_CONTRACT_HASH)) {\n startNodes.push(EMPTY_CONTRACT_HASH);\n } else {\n const allTargets = new Set<string>();\n for (const edges of graph.forwardChain.values()) {\n for (const edge of edges) {\n allTargets.add(edge.to);\n }\n }\n for (const node of graph.nodes) {\n if (!allTargets.has(node)) {\n startNodes.push(node);\n }\n }\n }\n\n for (const step of bfs(startNodes, (n) => forwardNeighbours(graph, n))) {\n reachable.add(step.state);\n }\n\n const orphans: MigrationEdge[] = [];\n for (const [from, migrations] of graph.forwardChain) {\n if (!reachable.has(from)) {\n orphans.push(...migrations);\n }\n }\n\n return orphans;\n}\n"],"mappings":";;;;;;;;;;;;;AASA,IAAa,QAAb,MAAsB;CACpB;CACA,OAAe;CAEf,YAAY,UAAuB,EAAE,EAAE;EACrC,KAAK,QAAQ,CAAC,GAAG,QAAQ;;CAG3B,KAAK,MAAe;EAClB,KAAK,MAAM,KAAK,KAAK;;;;;;CAOvB,QAAW;EACT,IAAI,KAAK,QAAQ,KAAK,MAAM,QAC1B,MAAM,IAAI,MAAM,oCAAoC;EAGtD,OAAO,KAAK,MAAM,KAAK;;CAGzB,IAAI,UAAmB;EACrB,OAAO,KAAK,QAAQ,KAAK,MAAM;;;;;ACYnC,UAAiB,IACf,QACA,YAKA,OAA6B,UAAU,OACb;CAS1B,MAAM,0BAAU,IAAI,KAAa;CACjC,MAAM,4BAAY,IAAI,KAAqC;CAC3D,MAAM,QAAQ,IAAI,OAAc;CAChC,KAAK,MAAM,SAAS,QAAQ;EAC1B,MAAM,IAAI,IAAI,MAAM;EACpB,IAAI,CAAC,QAAQ,IAAI,EAAE,EAAE;GACnB,QAAQ,IAAI,EAAE;GACd,MAAM,KAAK;IAAE,OAAO;IAAO,KAAK;IAAG,CAAC;;;CAGxC,OAAO,CAAC,MAAM,SAAS;EACrB,MAAM,EAAE,OAAO,SAAS,KAAK,WAAW,MAAM,OAAO;EACrD,MAAM,aAAa,UAAU,IAAI,OAAO;EACxC,MAAM;GACJ,OAAO;GACP,QAAQ,YAAY,UAAU;GAC9B,cAAc,YAAY,QAAQ;GACnC;EAED,KAAK,MAAM,EAAE,MAAM,UAAU,WAAW,QAAQ,EAAE;GAChD,MAAM,IAAI,IAAI,KAAK;GACnB,IAAI,CAAC,QAAQ,IAAI,EAAE,EAAE;IACnB,QAAQ,IAAI,EAAE;IACd,UAAU,IAAI,GAAG;KAAE,QAAQ;KAAS;KAAM,CAAC;IAC3C,MAAM,KAAK;KAAE,OAAO;KAAM,KAAK;KAAG,CAAC;;;;;;;;ACzE3C,SAAS,kBAAkB,OAAuB,MAAc;CAC9D,QAAQ,MAAM,aAAa,IAAI,KAAK,IAAI,EAAE,EAAE,KAAK,UAAU;EAAE,MAAM,KAAK;EAAI;EAAM,EAAE;;;;;;AAOtF,SAAS,wBAAwB,OAAuB,MAAc;CAEpE,OAAO,CAAC,GADM,MAAM,aAAa,IAAI,KAAK,IAAI,EAAE,CAC/B,CAAC,KAAK,gBAAgB,CAAC,KAAK,UAAU;EAAE,MAAM,KAAK;EAAI;EAAM,EAAE;;;AAIlF,SAAS,kBAAkB,OAAuB,MAAc;CAC9D,QAAQ,MAAM,aAAa,IAAI,KAAK,IAAI,EAAE,EAAE,KAAK,UAAU;EAAE,MAAM,KAAK;EAAM;EAAM,EAAE;;AAGxF,SAAS,WAAW,KAAmC,KAAa,OAA4B;CAC9F,MAAM,SAAS,IAAI,IAAI,IAAI;CAC3B,IAAI,QAAQ,OAAO,KAAK,MAAM;MACzB,IAAI,IAAI,KAAK,CAAC,MAAM,CAAC;;AAG5B,SAAgB,iBAAiB,UAA6D;CAC5F,MAAM,wBAAQ,IAAI,KAAa;CAC/B,MAAM,+BAAe,IAAI,KAA8B;CACvD,MAAM,+BAAe,IAAI,KAA8B;CACvD,MAAM,kCAAkB,IAAI,KAA4B;CAExD,KAAK,MAAM,OAAO,UAAU;EAK1B,MAAM,OAAO,IAAI,SAAS,QAAA;EAC1B,MAAM,EAAE,OAAO,IAAI;EAEnB,IAAI,SAAS;OAEP,CADc,IAAI,IAAI,MAAM,OAAO,GAAG,mBAAmB,OAC/C,EACZ,MAAM,yBAAyB,IAAI,SAAS,KAAK;;EAIrD,MAAM,IAAI,KAAK;EACf,MAAM,IAAI,GAAG;EAEb,MAAM,YAA2B;GAC/B;GACA;GACA,eAAe,IAAI,SAAS;GAC5B,SAAS,IAAI;GACb,WAAW,IAAI,SAAS;GACxB,QAAQ,IAAI,SAAS;GACrB,YAAY,IAAI,SAAS;GAC1B;EAED,IAAI,gBAAgB,IAAI,UAAU,cAAc,EAC9C,MAAM,4BAA4B,UAAU,cAAc;EAE5D,gBAAgB,IAAI,UAAU,eAAe,UAAU;EAEvD,WAAW,cAAc,MAAM,UAAU;EACzC,WAAW,cAAc,IAAI,UAAU;;CAGzC,OAAO;EAAE;EAAO;EAAc;EAAc;EAAiB;;AAS/D,MAAM,iBAAyC;CAAE,MAAM;CAAG,SAAS;CAAG,SAAS;CAAG;AAElF,SAAS,cAAc,QAAmC;CACxD,IAAI,OAAO;CACX,KAAK,MAAM,KAAK,QAAQ;EACtB,MAAM,IAAI,eAAe;EACzB,IAAI,MAAM,KAAA,KAAa,IAAI,MAAM,OAAO;;CAE1C,OAAO;;AAGT,SAAS,gBAAgB,GAAkB,GAA0B;CACnE,MAAM,KAAK,cAAc,EAAE,OAAO,GAAG,cAAc,EAAE,OAAO;CAC5D,IAAI,OAAO,GAAG,OAAO;CACrB,MAAM,KAAK,EAAE,UAAU,cAAc,EAAE,UAAU;CACjD,IAAI,OAAO,GAAG,OAAO;CACrB,MAAM,KAAK,EAAE,GAAG,cAAc,EAAE,GAAG;CACnC,IAAI,OAAO,GAAG,OAAO;CACrB,OAAO,EAAE,cAAc,cAAc,EAAE,cAAc;;AAGvD,SAAS,gBAAgB,OAA2D;CAClF,OAAO,CAAC,GAAG,MAAM,CAAC,KAAK,gBAAgB;;;;;;;;;;AAWzC,SAAgB,SACd,OACA,UACA,QACiC;CACjC,IAAI,aAAa,QAAQ,OAAO,EAAE;CAElC,MAAM,0BAAU,IAAI,KAAsD;CAC1E,KAAK,MAAM,QAAQ,IAAI,CAAC,SAAS,GAAG,MAAM,wBAAwB,OAAO,EAAE,CAAC,EAAE;EAC5E,IAAI,KAAK,WAAW,QAAQ,KAAK,iBAAiB,MAChD,QAAQ,IAAI,KAAK,OAAO;GAAE,QAAQ,KAAK;GAAQ,MAAM,KAAK;GAAc,CAAC;EAE3E,IAAI,KAAK,UAAU,QAAQ;GACzB,MAAM,OAAwB,EAAE;GAChC,IAAI,MAAM;GACV,IAAI,IAAI,QAAQ,IAAI,IAAI;GACxB,OAAO,GAAG;IACR,KAAK,KAAK,EAAE,KAAK;IACjB,MAAM,EAAE;IACR,IAAI,QAAQ,IAAI,IAAI;;GAEtB,KAAK,SAAS;GACd,OAAO;;;CAIX,OAAO;;;;;;;;;;;;;;;;;;;;;AAsBT,SAAgB,uBACd,OACA,UACA,QACA,UACiC;CACjC,IAAI,SAAS,SAAS,GACpB,OAAO,SAAS,OAAO,UAAU,OAAO;CAY1C,MAAM,YAAY,MAAwB;EACxC,IAAI,EAAE,QAAQ,SAAS,GAAG,OAAO,GAAG,EAAE,KAAK;EAC3C,OAAO,GAAG,EAAE,KAAK,IAAI,CAAC,GAAG,EAAE,QAAQ,CAAC,MAAM,CAAC,KAAK,KAAK;;CAGvD,MAAM,cAAc,MAAmE;EACrF,MAAM,WAAW,MAAM,aAAa,IAAI,EAAE,KAAK,IAAI,EAAE;EACrD,IAAI,SAAS,WAAW,GAAG,OAAO,EAAE;EACpC,OAAO,CAAC,GAAG,SAAS,CACjB,KAAK,SAAS;GACb,IAAI,SAAS;GACb,IAAI,OAA2B;GAC/B,KAAK,MAAM,OAAO,KAAK,YACrB,IAAI,SAAS,IAAI,IAAI,IAAI,CAAC,EAAE,QAAQ,IAAI,IAAI,EAAE;IAC5C,IAAI,SAAS,MAAM,OAAO,IAAI,IAAI,EAAE,QAAQ;IAC5C,KAAK,IAAI,IAAI;IACb,SAAS;;GAGb,OAAO;IAAE;IAAM;IAAQ,aAAa,QAAQ,EAAE;IAAS;IACvD,CACD,MAAM,GAAG,MAAM;GACd,IAAI,EAAE,WAAW,EAAE,QAAQ,OAAO,EAAE,SAAS,KAAK;GAClD,OAAO,gBAAgB,EAAE,MAAM,EAAE,KAAK;IACtC,CACD,KAAK,EAAE,MAAM,mBAAmB;GAC/B,MAAM;IAAE,MAAM,KAAK;IAAI,SAAS;IAAa;GAC7C;GACD,EAAE;;CAKP,MAAM,0BAAU,IAAI,KAAyD;CAC7E,KAAK,MAAM,QAAQ,IACjB,CAAC;EAAE,MAAM;EAAU,yBAAS,IAAI,KAAK;EAAE,CAAC,EACxC,YACA,SACD,EAAE;EACD,MAAM,SAAS,SAAS,KAAK,MAAM;EACnC,IAAI,KAAK,WAAW,QAAQ,KAAK,iBAAiB,MAChD,QAAQ,IAAI,QAAQ;GAAE,WAAW,SAAS,KAAK,OAAO;GAAE,MAAM,KAAK;GAAc,CAAC;EAEpF,IAAI,KAAK,MAAM,SAAS,UAAU,KAAK,MAAM,QAAQ,SAAS,SAAS,MAAM;GAC3E,MAAM,OAAwB,EAAE;GAChC,IAAI,MAA0B;GAC9B,OAAO,QAAQ,KAAA,GAAW;IACxB,MAAM,IAAI,QAAQ,IAAI,IAAI;IAC1B,IAAI,CAAC,GAAG;IACR,KAAK,KAAK,EAAE,KAAK;IACjB,MAAM,EAAE;;GAEV,KAAK,SAAS;GACd,OAAO;;;CAIX,OAAO;;;;;;AAOT,SAAS,2BAA2B,OAAuB,QAA6B;CACtF,MAAM,0BAAU,IAAI,KAAa;CACjC,KAAK,MAAM,QAAQ,IAAI,CAAC,OAAO,GAAG,MAAM,kBAAkB,OAAO,EAAE,CAAC,EAClE,QAAQ,IAAI,KAAK,MAAM;CAEzB,OAAO;;;;;;;;;;;;AAoET,SAAgB,qBACd,OACA,UACA,QACA,UAAuC,EAAE,EACxB;CACjB,MAAM,EAAE,SAAS,2BAAW,IAAI,KAAa,KAAK;CAClD,MAAM,qBAAqB,CAAC,GAAG,SAAS,CAAC,MAAM;CAE/C,IAAI,aAAa,UAAU,SAAS,SAAS,GAC3C,OAAO;EACL,MAAM;EACN,UAAU;GACR,cAAc,EAAE;GAChB;GACA;GACA,kBAAkB;GAClB,iBAAiB,EAAE;GACnB;GACA,qBAAqB,EAAE;GACvB,GAAG,UAAU,WAAW,QAAQ;GACjC;EACF;CAGH,MAAM,OAAO,uBAAuB,OAAO,UAAU,QAAQ,SAAS;CACtE,IAAI,CAAC,MAAM;EACT,IAAI,SAAS,SAAS,GACpB,OAAO,EAAE,MAAM,eAAe;EAEhC,MAAM,aAAa,SAAS,OAAO,UAAU,OAAO;EACpD,IAAI,eAAe,MACjB,OAAO,EAAE,MAAM,eAAe;EAEhC,MAAM,sCAAsB,IAAI,KAAa;EAC7C,KAAK,MAAM,QAAQ,YACjB,KAAK,MAAM,OAAO,KAAK,YACrB,IAAI,SAAS,IAAI,IAAI,EAAE,oBAAoB,IAAI,IAAI;EAIvD,OAAO;GAAE,MAAM;GAAiB,gBAAgB;GAAY,SAD5C,mBAAmB,QAAQ,OAAO,CAAC,oBAAoB,IAAI,GAAG,CACX;GAAE;;CAGvE,MAAM,sBAAsB,2BAA2B,UAAU,KAAK;CAKtE,MAAM,gBAAgB,2BAA2B,OAAO,OAAO;CAC/D,MAAM,mBAAmB,yBAAyB,UAAU,KAAK;CAEjE,MAAM,kBAA4B,EAAE;CACpC,IAAI,mBAAmB;CAEvB,KAAK,MAAM,CAAC,GAAG,SAAS,KAAK,SAAS,EAAE;EACtC,MAAM,WAAW,MAAM,aAAa,IAAI,KAAK,KAAK;EAClD,IAAI,CAAC,YAAY,SAAS,UAAU,GAAG;EACvC,MAAM,YAAY,SAAS,QAAQ,MAAM,cAAc,IAAI,EAAE,GAAG,CAAC;EACjE,IAAI,UAAU,UAAU,GAAG;EAE3B,IAAI,iBAA2C;EAC/C,IAAI,SAAS,OAAO,GAAG;GAIrB,MAAM,YAAY,iBAAiB;GACnC,IAAI,cAAc,KAAA,GAAW;GAC7B,iBAAiB,kCAAkC,UAAU,WAAW,UAAU;;EAGpF,oBAAoB,UAAU,SAAS;EAEvC,IADe,gBAAgB,UACrB,CAAC,IAAI,kBAAkB,KAAK,eAAe;EACrD,IAAI,CAAC,UAAU,MAAM,MAAM,EAAE,kBAAkB,KAAK,cAAc,EAAE;EAEpE,MAAM,eAAe,gBAAgB,eAAe;EACpD,IACE,aAAa,SAAS,KACtB,aAAa,IAAI,kBAAkB,KAAK,iBACxC,aAAa,MAAM,MAAM,EAAE,kBAAkB,KAAK,cAAc,EAEhE,gBAAgB,KACd,MAAM,KAAK,KAAK,IAAI,eAAe,OAAO,oCAC3C;;CAIL,OAAO;EACL,MAAM;EACN,UAAU;GACR,cAAc;GACd;GACA;GACA;GACA;GACA;GACA;GACA,GAAG,UAAU,WAAW,QAAQ;GACjC;EACF;;AAGH,SAAS,2BACP,UACA,MACmB;CACnB,IAAI,SAAS,SAAS,GAAG,OAAO,EAAE;CAClC,MAAM,0BAAU,IAAI,KAAa;CACjC,KAAK,MAAM,QAAQ,MACjB,KAAK,MAAM,OAAO,KAAK,YACrB,IAAI,SAAS,IAAI,IAAI,EAAE,QAAQ,IAAI,IAAI;CAG3C,OAAO,CAAC,GAAG,QAAQ,CAAC,MAAM;;;;;;;AAQ5B,SAAS,yBACP,UACA,MACgC;CAChC,MAAM,WAAkC,EAAE;CAC1C,MAAM,sBAAM,IAAI,KAAa;CAC7B,KAAK,MAAM,QAAQ,MAAM;EACvB,SAAS,KAAK,IAAI,IAAI,IAAI,CAAC;EAC3B,KAAK,MAAM,OAAO,KAAK,YACrB,IAAI,SAAS,IAAI,IAAI,EAAE,IAAI,IAAI,IAAI;;CAGvC,OAAO;;AAGT,SAAS,kCACP,UACA,0BACA,UAC0B;CAC1B,IAAI,SAAS,SAAS,GAAG,OAAO,CAAC,GAAG,SAAS;CAC7C,OAAO,SAAS,QAAQ,MACtB,CAAC,GAAG,SAAS,CAAC,OAAO,OAAO,yBAAyB,IAAI,GAAG,IAAI,EAAE,WAAW,SAAS,GAAG,CAAC,CAC3F;;;;;;AAOH,SAAS,oBACP,OACA,UACA,QACQ;CACR,MAAM,eAAe,OAAO,KAAK,SAAS;EACxC,MAAM,4BAAY,IAAI,KAAa;EACnC,KAAK,MAAM,QAAQ,IAAI,CAAC,KAAK,GAAG,MAAM,kBAAkB,OAAO,EAAE,CAAC,EAChE,UAAU,IAAI,KAAK,MAAM;EAE3B,OAAO;GACP;CAEF,MAAM,kBAAkB,CAAC,GAAI,aAAa,MAAM,EAAE,CAAE,CAAC,QAAQ,SAC3D,aAAa,OAAO,MAAM,EAAE,IAAI,KAAK,CAAC,CACvC;CAED,IAAI,UAAU;CACd,IAAI,eAAe;CACnB,KAAK,MAAM,YAAY,iBAAiB;EACtC,MAAM,OAAO,SAAS,OAAO,UAAU,SAAS;EAChD,MAAM,QAAQ,OAAO,KAAK,SAAS;EACnC,IAAI,QAAQ,cAAc;GACxB,eAAe;GACf,UAAU;;;CAGd,OAAO;;;;;;AAOT,SAAgB,oBAAoB,OAAuB,UAAqC;CAC9F,MAAM,SAAmB,EAAE;CAC3B,KAAK,MAAM,QAAQ,IAAI,CAAC,SAAS,GAAG,MAAM,kBAAkB,OAAO,EAAE,CAAC,EACpE,IAAI,CAAC,MAAM,aAAa,IAAI,KAAK,MAAM,EAAE,QACvC,OAAO,KAAK,KAAK,MAAM;CAG3B,OAAO;;;;;;;;;;AAWT,SAAgB,SAAS,OAAsC;CAC7D,IAAI,MAAM,MAAM,SAAS,GACvB,OAAO;CAGT,IAAI,CAAC,MAAM,MAAM,IAAA,eAAwB,EACvC,MAAM,wBAAwB,CAAC,GAAG,MAAM,MAAM,CAAC;CAGjD,MAAM,SAAS,oBAAoB,OAAO,oBAAoB;CAE9D,IAAI,OAAO,WAAW,GAAG;EACvB,MAAM,YAAY,CAAC,GAAG,MAAM,MAAM,CAAC,QAAQ,MAAM,MAAM,oBAAoB;EAC3E,IAAI,UAAU,SAAS,GACrB,MAAM,cAAc,UAAU;EAEhC,OAAO;;CAGT,IAAI,OAAO,SAAS,GAAG;EACrB,MAAM,kBAAkB,oBAAoB,OAAO,qBAAqB,OAAO;EAQ/E,MAAM,qBAAqB,QAAQ;GAAE;GAAiB,UAPrC,OAAO,KAAK,QAAQ;IAEnC,OAAO;KACL;KACA,QAHW,SAAS,OAAO,iBAAiB,IAGhC,IAAI,EAAE,EAAE,KAAK,OAAO;MAAE,SAAS,EAAE;MAAS,MAAM,EAAE;MAAM,IAAI,EAAE;MAAI,EAAE;KACjF;KAE2D;GAAE,CAAC;;CAInE,OAAO,OAAO;;;;;;;AAQhB,SAAgB,oBAAoB,OAA6C;CAC/E,MAAM,WAAW,SAAS,MAAM;CAChC,IAAI,aAAa,MAAM,OAAO;CAG9B,OADa,SAAS,OAAA,gBAA4B,SACvC,EAAE,GAAG,GAAG,IAAI;;AAGzB,SAAgB,aAAa,OAA4C;CACvE,MAAM,QAAQ;CACd,MAAM,OAAO;CACb,MAAM,QAAQ;CAEd,MAAM,wBAAQ,IAAI,KAAqB;CACvC,MAAM,4BAAY,IAAI,KAA4B;CAClD,MAAM,SAAqB,EAAE;CAE7B,KAAK,MAAM,QAAQ,MAAM,OACvB,MAAM,IAAI,MAAM,MAAM;CASxB,MAAM,QAAiB,EAAE;CAEzB,SAAS,UAAU,GAAiB;EAClC,MAAM,IAAI,GAAG,KAAK;EAClB,MAAM,KAAK;GAAE,MAAM;GAAG,UAAU,MAAM,aAAa,IAAI,EAAE,IAAI,EAAE;GAAE,OAAO;GAAG,CAAC;;CAG9E,KAAK,MAAM,QAAQ,MAAM,OAAO;EAC9B,IAAI,MAAM,IAAI,KAAK,KAAK,OAAO;EAC/B,UAAU,IAAI,MAAM,KAAK;EACzB,UAAU,KAAK;EAEf,OAAO,MAAM,SAAS,GAAG;GAEvB,MAAM,QAAQ,MAAM,MAAM,SAAS;GACnC,IAAI,MAAM,SAAS,MAAM,SAAS,QAAQ;IACxC,MAAM,IAAI,MAAM,MAAM,MAAM;IAC5B,MAAM,KAAK;IACX;;GAIF,MAAM,IADO,MAAM,SAAS,MAAM,SACnB;GACf,MAAM,SAAS,MAAM,IAAI,EAAE;GAC3B,IAAI,WAAW,MAAM;IACnB,MAAM,QAAkB,CAAC,EAAE;IAC3B,IAAI,MAAM,MAAM;IAChB,OAAO,QAAQ,GAAG;KAChB,MAAM,KAAK,IAAI;KACf,MAAM,UAAU,IAAI,IAAI,IAAI;;IAE9B,MAAM,SAAS;IACf,OAAO,KAAK,MAAM;UACb,IAAI,WAAW,OAAO;IAC3B,UAAU,IAAI,GAAG,MAAM,KAAK;IAC5B,UAAU,EAAE;;;;CAKlB,OAAO;;AAGT,SAAgB,cAAc,OAAiD;CAC7E,IAAI,MAAM,MAAM,SAAS,GAAG,OAAO,EAAE;CAErC,MAAM,4BAAY,IAAI,KAAa;CACnC,MAAM,aAAuB,EAAE;CAE/B,IAAI,MAAM,aAAa,IAAA,eAAwB,EAC7C,WAAW,KAAK,oBAAoB;MAC/B;EACL,MAAM,6BAAa,IAAI,KAAa;EACpC,KAAK,MAAM,SAAS,MAAM,aAAa,QAAQ,EAC7C,KAAK,MAAM,QAAQ,OACjB,WAAW,IAAI,KAAK,GAAG;EAG3B,KAAK,MAAM,QAAQ,MAAM,OACvB,IAAI,CAAC,WAAW,IAAI,KAAK,EACvB,WAAW,KAAK,KAAK;;CAK3B,KAAK,MAAM,QAAQ,IAAI,aAAa,MAAM,kBAAkB,OAAO,EAAE,CAAC,EACpE,UAAU,IAAI,KAAK,MAAM;CAG3B,MAAM,UAA2B,EAAE;CACnC,KAAK,MAAM,CAAC,MAAM,eAAe,MAAM,cACrC,IAAI,CAAC,UAAU,IAAI,KAAK,EACtB,QAAQ,KAAK,GAAG,WAAW;CAI/B,OAAO"}
1
+ {"version":3,"file":"migration-graph-DGNnKDY5.mjs","names":[],"sources":["../src/queue.ts","../src/graph-ops.ts","../src/migration-graph.ts"],"sourcesContent":["/**\n * FIFO queue with amortised O(1) push and shift.\n *\n * Uses a head-index cursor over a backing array rather than\n * `Array.prototype.shift()`, which is O(n) on V8. Intended for BFS-shaped\n * traversals where the queue is drained in a single pass — it does not\n * reclaim memory for already-shifted items, so it is not suitable for\n * long-lived queues with many push/shift cycles.\n */\nexport class Queue<T> {\n private readonly items: T[];\n private head = 0;\n\n constructor(initial: Iterable<T> = []) {\n this.items = [...initial];\n }\n\n push(item: T): void {\n this.items.push(item);\n }\n\n /**\n * Remove and return the next item. Caller must check `isEmpty` first —\n * shifting an empty queue throws.\n */\n shift(): T {\n if (this.head >= this.items.length) {\n throw new Error('Queue.shift called on empty queue');\n }\n // biome-ignore lint/style/noNonNullAssertion: bounds-checked on the line above\n return this.items[this.head++]!;\n }\n\n get isEmpty(): boolean {\n return this.head >= this.items.length;\n }\n}\n","import { Queue } from './queue';\n\n/**\n * One step of a BFS traversal.\n *\n * `parent` and `incomingEdge` are `null` for start states — they were not\n * reached via any edge. For every other state they record the predecessor\n * state and the edge by which this state was first reached.\n *\n * `state` is the BFS state, most often a string (graph node identifier) but\n * can be a composite object. The string overload keeps the common case\n * ergonomic; the generic overload accepts a caller-supplied `key` function\n * that produces a stable equality key for dedup.\n */\nexport interface BfsStep<S, E> {\n readonly state: S;\n readonly parent: S | null;\n readonly incomingEdge: E | null;\n}\n\n/**\n * Generic breadth-first traversal.\n *\n * Direction (forward/reverse) is expressed by the caller's `neighbours`\n * closure: return `{ next, edge }` pairs where `next` is the state to visit\n * next and `edge` is the edge that connects them. Callers that don't need\n * path reconstruction can ignore the `parent`/`incomingEdge` fields of each\n * yielded step.\n *\n * Ordering — when the result needs to be deterministic (path-finding) the\n * caller is responsible for sorting inside `neighbours`; this generator\n * does not impose an ordering hook of its own. State-dependent orderings\n * have full access to the source state inside the closure.\n *\n * Stops are intrinsic — callers `break` out of the `for..of` loop when\n * they've found what they're looking for.\n */\nexport function bfs<E>(\n starts: Iterable<string>,\n neighbours: (state: string) => Iterable<{ next: string; edge: E }>,\n): Generator<BfsStep<string, E>>;\nexport function bfs<S, E>(\n starts: Iterable<S>,\n neighbours: (state: S) => Iterable<{ next: S; edge: E }>,\n key: (state: S) => string,\n): Generator<BfsStep<S, E>>;\nexport function* bfs<S, E>(\n starts: Iterable<S>,\n neighbours: (state: S) => Iterable<{ next: S; edge: E }>,\n // Identity default for the string overload. TypeScript can't express\n // \"default applies only when S = string\", so this cast bridges the\n // generic implementation signature to the public overloads — which\n // guarantee `key` is omitted only when S = string at the call site.\n key: (state: S) => string = (state) => state as unknown as string,\n): Generator<BfsStep<S, E>> {\n // Queue entries carry the state alongside its key so we don't recompute\n // key() twice per visit (once on dedup, once on parent lookup). Composite\n // keys can be non-trivial to compute; string-overload callers pay nothing\n // since key() is identity there.\n interface Entry {\n readonly state: S;\n readonly key: string;\n }\n const visited = new Set<string>();\n const parentMap = new Map<string, { parent: S; edge: E }>();\n const queue = new Queue<Entry>();\n for (const start of starts) {\n const k = key(start);\n if (!visited.has(k)) {\n visited.add(k);\n queue.push({ state: start, key: k });\n }\n }\n while (!queue.isEmpty) {\n const { state: current, key: curKey } = queue.shift();\n const parentInfo = parentMap.get(curKey);\n yield {\n state: current,\n parent: parentInfo?.parent ?? null,\n incomingEdge: parentInfo?.edge ?? null,\n };\n\n for (const { next, edge } of neighbours(current)) {\n const k = key(next);\n if (!visited.has(k)) {\n visited.add(k);\n parentMap.set(k, { parent: current, edge });\n queue.push({ state: next, key: k });\n }\n }\n }\n}\n","import { ifDefined } from '@prisma-next/utils/defined';\nimport { EMPTY_CONTRACT_HASH } from './constants';\nimport {\n errorAmbiguousTarget,\n errorDuplicateMigrationHash,\n errorNoInitialMigration,\n errorNoTarget,\n errorSameSourceAndTarget,\n} from './errors';\nimport type { MigrationEdge, MigrationGraph } from './graph';\nimport { bfs } from './graph-ops';\nimport type { OnDiskMigrationPackage } from './package';\n\n/** Forward-edge neighbours: edge `e` from `n` visits `e.to` next. */\nfunction forwardNeighbours(graph: MigrationGraph, node: string) {\n return (graph.forwardChain.get(node) ?? []).map((edge) => ({ next: edge.to, edge }));\n}\n\n/**\n * Forward-edge neighbours, sorted by the deterministic tie-break.\n * Used by path-finding so the resulting shortest path is stable across runs.\n */\nfunction sortedForwardNeighbours(graph: MigrationGraph, node: string) {\n const edges = graph.forwardChain.get(node) ?? [];\n return [...edges].sort(compareTieBreak).map((edge) => ({ next: edge.to, edge }));\n}\n\n/** Reverse-edge neighbours: edge `e` from `n` visits `e.from` next. */\nfunction reverseNeighbours(graph: MigrationGraph, node: string) {\n return (graph.reverseChain.get(node) ?? []).map((edge) => ({ next: edge.from, edge }));\n}\n\nfunction appendEdge(map: Map<string, MigrationEdge[]>, key: string, entry: MigrationEdge): void {\n const bucket = map.get(key);\n if (bucket) bucket.push(entry);\n else map.set(key, [entry]);\n}\n\nexport function reconstructGraph(packages: readonly OnDiskMigrationPackage[]): MigrationGraph {\n const nodes = new Set<string>();\n const forwardChain = new Map<string, MigrationEdge[]>();\n const reverseChain = new Map<string, MigrationEdge[]>();\n const migrationByHash = new Map<string, MigrationEdge>();\n\n for (const pkg of packages) {\n // Manifest `from` is `string | null` (null = baseline). The graph layer\n // is the marker/path layer where \"no prior state\" is encoded as the\n // EMPTY_CONTRACT_HASH sentinel; bridge here so pathfinding stays string-\n // keyed.\n const from = pkg.metadata.from ?? EMPTY_CONTRACT_HASH;\n const { to } = pkg.metadata;\n\n if (from === to) {\n const hasDataOp = pkg.ops.some((op) => op.operationClass === 'data');\n if (!hasDataOp) {\n throw errorSameSourceAndTarget(pkg.dirPath, from);\n }\n }\n\n nodes.add(from);\n nodes.add(to);\n\n const migration: MigrationEdge = {\n from,\n to,\n migrationHash: pkg.metadata.migrationHash,\n dirName: pkg.dirName,\n createdAt: pkg.metadata.createdAt,\n labels: pkg.metadata.labels,\n invariants: pkg.metadata.providedInvariants,\n };\n\n if (migrationByHash.has(migration.migrationHash)) {\n throw errorDuplicateMigrationHash(migration.migrationHash);\n }\n migrationByHash.set(migration.migrationHash, migration);\n\n appendEdge(forwardChain, from, migration);\n appendEdge(reverseChain, to, migration);\n }\n\n return { nodes, forwardChain, reverseChain, migrationByHash };\n}\n\n// ---------------------------------------------------------------------------\n// Deterministic tie-breaking for BFS neighbour order.\n// Used by path-finders only; not a general-purpose utility.\n// Ordering: label priority → createdAt → to → migrationHash.\n// ---------------------------------------------------------------------------\n\nconst LABEL_PRIORITY: Record<string, number> = { main: 0, default: 1, feature: 2 };\n\nfunction labelPriority(labels: readonly string[]): number {\n let best = 3;\n for (const l of labels) {\n const p = LABEL_PRIORITY[l];\n if (p !== undefined && p < best) best = p;\n }\n return best;\n}\n\nfunction compareTieBreak(a: MigrationEdge, b: MigrationEdge): number {\n const lp = labelPriority(a.labels) - labelPriority(b.labels);\n if (lp !== 0) return lp;\n const ca = a.createdAt.localeCompare(b.createdAt);\n if (ca !== 0) return ca;\n const tc = a.to.localeCompare(b.to);\n if (tc !== 0) return tc;\n return a.migrationHash.localeCompare(b.migrationHash);\n}\n\nfunction sortedNeighbors(edges: readonly MigrationEdge[]): readonly MigrationEdge[] {\n return [...edges].sort(compareTieBreak);\n}\n\n/**\n * Find the shortest path from `fromHash` to `toHash` using BFS over the\n * contract-hash graph. Returns the ordered list of edges, or null if no path\n * exists. Returns an empty array when `fromHash === toHash` (no-op).\n *\n * Neighbor ordering is deterministic via the tie-break sort key:\n * label priority → createdAt → to → migrationHash.\n */\nexport function findPath(\n graph: MigrationGraph,\n fromHash: string,\n toHash: string,\n): readonly MigrationEdge[] | null {\n if (fromHash === toHash) return [];\n\n const parents = new Map<string, { parent: string; edge: MigrationEdge }>();\n for (const step of bfs([fromHash], (n) => sortedForwardNeighbours(graph, n))) {\n if (step.parent !== null && step.incomingEdge !== null) {\n parents.set(step.state, { parent: step.parent, edge: step.incomingEdge });\n }\n if (step.state === toHash) {\n const path: MigrationEdge[] = [];\n let cur = toHash;\n let p = parents.get(cur);\n while (p) {\n path.push(p.edge);\n cur = p.parent;\n p = parents.get(cur);\n }\n path.reverse();\n return path;\n }\n }\n\n return null;\n}\n\n/**\n * Find the shortest path from `fromHash` to `toHash` whose edges collectively\n * cover every invariant in `required`. Returns `null` when no such path exists\n * (either `fromHash`→`toHash` is structurally unreachable, or every reachable\n * path leaves at least one required invariant uncovered). When `required` is\n * empty, delegates to `findPath` so the result is byte-identical for that case.\n *\n * Algorithm: BFS over `(node, coveredSubset)` states with state-level dedup.\n * The covered subset is a `Set<string>` of invariant ids; the state's dedup\n * key is `${node}\\0${[...covered].sort().join('\\0')}`. State keys distinguish\n * distinct `(node, covered)` tuples regardless of node-name length because\n * `\\0` cannot appear in any invariant id (validation rejects whitespace and\n * control chars at authoring time).\n *\n * Neighbour ordering when `required ≠ ∅`: edges covering ≥1 still-needed\n * invariant come first, with `labelPriority → createdAt → to → migrationHash`\n * as the secondary key. The heuristic steers BFS toward the satisfying path;\n * correctness (shortest, deterministic) does not depend on it.\n */\nexport function findPathWithInvariants(\n graph: MigrationGraph,\n fromHash: string,\n toHash: string,\n required: ReadonlySet<string>,\n): readonly MigrationEdge[] | null {\n if (required.size === 0) {\n return findPath(graph, fromHash, toHash);\n }\n\n interface InvState {\n readonly node: string;\n readonly covered: ReadonlySet<string>;\n }\n // `\\0` is a safe segment separator: `validateInvariantId` rejects any id\n // containing whitespace or control characters (NUL is U+0000), and node\n // hashes are hex strings. Distinct `(node, covered)` tuples therefore\n // map to distinct strings. If `validateInvariantId` is ever relaxed,\n // re-confirm dedup correctness here.\n const stateKey = (s: InvState): string => {\n if (s.covered.size === 0) return `${s.node}\\0`;\n return `${s.node}\\0${[...s.covered].sort().join('\\0')}`;\n };\n\n const neighbours = (s: InvState): Iterable<{ next: InvState; edge: MigrationEdge }> => {\n const outgoing = graph.forwardChain.get(s.node) ?? [];\n if (outgoing.length === 0) return [];\n return [...outgoing]\n .map((edge) => {\n let useful = false;\n let next: Set<string> | null = null;\n for (const inv of edge.invariants) {\n if (required.has(inv) && !s.covered.has(inv)) {\n if (next === null) next = new Set(s.covered);\n next.add(inv);\n useful = true;\n }\n }\n return { edge, useful, nextCovered: next ?? s.covered };\n })\n .sort((a, b) => {\n if (a.useful !== b.useful) return a.useful ? -1 : 1;\n return compareTieBreak(a.edge, b.edge);\n })\n .map(({ edge, nextCovered }) => ({\n next: { node: edge.to, covered: nextCovered },\n edge,\n }));\n };\n\n // Path reconstruction is consumer-side, keyed on stateKey, same shape as\n // findPath's parents map.\n const parents = new Map<string, { parentKey: string; edge: MigrationEdge }>();\n for (const step of bfs<InvState, MigrationEdge>(\n [{ node: fromHash, covered: new Set() }],\n neighbours,\n stateKey,\n )) {\n const curKey = stateKey(step.state);\n if (step.parent !== null && step.incomingEdge !== null) {\n parents.set(curKey, { parentKey: stateKey(step.parent), edge: step.incomingEdge });\n }\n if (step.state.node === toHash && step.state.covered.size === required.size) {\n const path: MigrationEdge[] = [];\n let cur: string | undefined = curKey;\n while (cur !== undefined) {\n const p = parents.get(cur);\n if (!p) break;\n path.push(p.edge);\n cur = p.parentKey;\n }\n path.reverse();\n return path;\n }\n }\n\n return null;\n}\n\n/**\n * Reverse-BFS from `toHash` over `reverseChain` to collect every node from\n * which `toHash` is reachable (inclusive of `toHash` itself).\n */\nfunction collectNodesReachingTarget(graph: MigrationGraph, toHash: string): Set<string> {\n const reached = new Set<string>();\n for (const step of bfs([toHash], (n) => reverseNeighbours(graph, n))) {\n reached.add(step.state);\n }\n return reached;\n}\n\nexport interface PathDecision {\n readonly selectedPath: readonly MigrationEdge[];\n readonly fromHash: string;\n readonly toHash: string;\n readonly alternativeCount: number;\n readonly tieBreakReasons: readonly string[];\n readonly refName?: string;\n /** The caller-supplied required invariant set, sorted ascending. */\n readonly requiredInvariants: readonly string[];\n /**\n * The subset of `requiredInvariants` actually covered by edges on\n * `selectedPath`. Always a subset of `requiredInvariants` (when the path\n * is satisfying, equal to it); always derived from `selectedPath`.\n */\n readonly satisfiedInvariants: readonly string[];\n}\n\n/**\n * Outcome of {@link findPathWithDecision}. The pathfinder distinguishes\n * three cases up front so callers don't re-derive structural reachability:\n *\n * - `ok` — a path covering `required` exists; `decision` carries the\n * selection metadata and per-edge invariants.\n * - `unreachable` — `from`→`to` has no structural path. Mapped by callers\n * to the existing no-path / `NO_TARGET` diagnostic.\n * - `unsatisfiable` — `from`→`to` is structurally reachable but no path\n * covers every required invariant. `structuralPath` is the\n * `findPath(graph, from, to)` result, included so callers don't have to\n * recompute it when raising `MIGRATION.NO_INVARIANT_PATH`. `missing` is\n * the subset of `required` that the structural path does *not* cover —\n * correctly accounts for partial coverage when some required invariants\n * are met by the fallback path. Only emitted when `required` is\n * non-empty.\n */\nexport type FindPathOutcome =\n | { readonly kind: 'ok'; readonly decision: PathDecision }\n | { readonly kind: 'unreachable' }\n | {\n readonly kind: 'unsatisfiable';\n readonly structuralPath: readonly MigrationEdge[];\n readonly missing: readonly string[];\n };\n\n/**\n * Routing context for {@link findPathWithDecision}. Both fields are optional;\n * `refName` is only used to decorate the resulting `PathDecision` for the\n * JSON envelope, and `required` defaults to an empty set (purely structural\n * routing). They are passed via a single options object so the call sites\n * cannot silently swap two adjacent string parameters.\n */\nexport interface FindPathWithDecisionOptions {\n readonly refName?: string;\n readonly required?: ReadonlySet<string>;\n}\n\n/**\n * Find the shortest path from `fromHash` to `toHash` and return structured\n * path-decision metadata for machine-readable output. When `required` is\n * non-empty, the returned path is the shortest one whose edges collectively\n * cover every required invariant.\n *\n * The discriminated return type tells the caller *why* a path could not be\n * found, so the CLI can pick the right structured error without re-running\n * a structural BFS.\n */\nexport function findPathWithDecision(\n graph: MigrationGraph,\n fromHash: string,\n toHash: string,\n options: FindPathWithDecisionOptions = {},\n): FindPathOutcome {\n const { refName, required = new Set<string>() } = options;\n const requiredInvariants = [...required].sort();\n\n if (fromHash === toHash && required.size === 0) {\n return {\n kind: 'ok',\n decision: {\n selectedPath: [],\n fromHash,\n toHash,\n alternativeCount: 0,\n tieBreakReasons: [],\n requiredInvariants,\n satisfiedInvariants: [],\n ...ifDefined('refName', refName),\n },\n };\n }\n\n const path = findPathWithInvariants(graph, fromHash, toHash, required);\n if (!path) {\n if (required.size === 0) {\n return { kind: 'unreachable' };\n }\n const structural = findPath(graph, fromHash, toHash);\n if (structural === null) {\n return { kind: 'unreachable' };\n }\n const coveredByStructural = new Set<string>();\n for (const edge of structural) {\n for (const inv of edge.invariants) {\n if (required.has(inv)) coveredByStructural.add(inv);\n }\n }\n const missing = requiredInvariants.filter((id) => !coveredByStructural.has(id));\n return { kind: 'unsatisfiable', structuralPath: structural, missing };\n }\n\n const satisfiedInvariants = computeSatisfiedInvariants(required, path);\n\n // Single reverse BFS marks every node from which `toHash` is reachable.\n // Replaces a per-edge `findPath(e.to, toHash)` call inside the loop below,\n // which made the whole function O(|path| · (V + E)) instead of O(V + E).\n const reachesTarget = collectNodesReachingTarget(graph, toHash);\n const coveragePrefixes = requiredCoveragePrefixes(required, path);\n\n const tieBreakReasons: string[] = [];\n let alternativeCount = 0;\n\n for (const [i, edge] of path.entries()) {\n const outgoing = graph.forwardChain.get(edge.from);\n if (!outgoing || outgoing.length <= 1) continue;\n const reachable = outgoing.filter((e) => reachesTarget.has(e.to));\n if (reachable.length <= 1) continue;\n\n let comparisonPool: readonly MigrationEdge[] = reachable;\n if (required.size > 0) {\n // coveragePrefixes is built one-per-edge from path, so the index is\n // always in range here; the explicit guard keeps the type narrowed\n // without a non-null assertion.\n const prefixSet = coveragePrefixes[i];\n if (prefixSet === undefined) continue;\n comparisonPool = invariantViableAlternativesAtStep(required, prefixSet, reachable);\n }\n\n alternativeCount += reachable.length - 1;\n const sorted = sortedNeighbors(reachable);\n if (sorted[0]?.migrationHash !== edge.migrationHash) continue;\n if (!reachable.some((e) => e.migrationHash !== edge.migrationHash)) continue;\n\n const sortedViable = sortedNeighbors(comparisonPool);\n if (\n sortedViable.length > 1 &&\n sortedViable[0]?.migrationHash === edge.migrationHash &&\n sortedViable.some((e) => e.migrationHash !== edge.migrationHash)\n ) {\n tieBreakReasons.push(\n `at ${edge.from}: ${comparisonPool.length} candidates, selected by tie-break`,\n );\n }\n }\n\n return {\n kind: 'ok',\n decision: {\n selectedPath: path,\n fromHash,\n toHash,\n alternativeCount,\n tieBreakReasons,\n requiredInvariants,\n satisfiedInvariants,\n ...ifDefined('refName', refName),\n },\n };\n}\n\nfunction computeSatisfiedInvariants(\n required: ReadonlySet<string>,\n path: readonly MigrationEdge[],\n): readonly string[] {\n if (required.size === 0) return [];\n const covered = new Set<string>();\n for (const edge of path) {\n for (const inv of edge.invariants) {\n if (required.has(inv)) covered.add(inv);\n }\n }\n return [...covered].sort();\n}\n\n/**\n * For each edge on path, invariant coverage accumulated from earlier edges only —\n * `(required ∩ ∪_{j<i} path[j].invariants)` represented as cumulative set along `required`,\n * keyed as \"full set of required ids satisfied before taking path[i]\".\n */\nfunction requiredCoveragePrefixes(\n required: ReadonlySet<string>,\n path: readonly MigrationEdge[],\n): readonly ReadonlySet<string>[] {\n const prefixes: ReadonlySet<string>[] = [];\n const acc = new Set<string>();\n for (const edge of path) {\n prefixes.push(new Set(acc));\n for (const inv of edge.invariants) {\n if (required.has(inv)) acc.add(inv);\n }\n }\n return prefixes;\n}\n\nfunction invariantViableAlternativesAtStep(\n required: ReadonlySet<string>,\n coverageBeforeTakingEdge: ReadonlySet<string>,\n outgoing: readonly MigrationEdge[],\n): readonly MigrationEdge[] {\n if (required.size === 0) return [...outgoing];\n return outgoing.filter((e) =>\n [...required].every((id) => coverageBeforeTakingEdge.has(id) || e.invariants.includes(id)),\n );\n}\n\n/**\n * Walk ancestors of each branch tip back to find the last node\n * that appears on all paths. Returns `fromHash` if no shared ancestor is found.\n */\nfunction findDivergencePoint(\n graph: MigrationGraph,\n fromHash: string,\n leaves: readonly string[],\n): string {\n const ancestorSets = leaves.map((leaf) => {\n const ancestors = new Set<string>();\n for (const step of bfs([leaf], (n) => reverseNeighbours(graph, n))) {\n ancestors.add(step.state);\n }\n return ancestors;\n });\n\n const commonAncestors = [...(ancestorSets[0] ?? [])].filter((node) =>\n ancestorSets.every((s) => s.has(node)),\n );\n\n let deepest = fromHash;\n let deepestDepth = -1;\n for (const ancestor of commonAncestors) {\n const path = findPath(graph, fromHash, ancestor);\n const depth = path ? path.length : 0;\n if (depth > deepestDepth) {\n deepestDepth = depth;\n deepest = ancestor;\n }\n }\n return deepest;\n}\n\n/**\n * Find all branch tips (nodes with no outgoing edges) reachable from\n * `fromHash` via forward edges.\n */\nexport function findReachableLeaves(graph: MigrationGraph, fromHash: string): readonly string[] {\n const leaves: string[] = [];\n for (const step of bfs([fromHash], (n) => forwardNeighbours(graph, n))) {\n if (!graph.forwardChain.get(step.state)?.length) {\n leaves.push(step.state);\n }\n }\n return leaves;\n}\n\n/**\n * Find the target contract hash of the migration graph reachable from\n * EMPTY_CONTRACT_HASH. Returns `null` for a graph that has no target\n * state (either empty, or containing only the root with no outgoing\n * edges). Throws NO_INITIAL_MIGRATION if the graph has nodes but none\n * originate from the empty hash, and AMBIGUOUS_TARGET if multiple\n * branch tips exist.\n */\nexport function findLeaf(graph: MigrationGraph): string | null {\n if (graph.nodes.size === 0) {\n return null;\n }\n\n if (!graph.nodes.has(EMPTY_CONTRACT_HASH)) {\n throw errorNoInitialMigration([...graph.nodes]);\n }\n\n const leaves = findReachableLeaves(graph, EMPTY_CONTRACT_HASH);\n\n if (leaves.length === 0) {\n const reachable = [...graph.nodes].filter((n) => n !== EMPTY_CONTRACT_HASH);\n if (reachable.length > 0) {\n throw errorNoTarget(reachable);\n }\n return null;\n }\n\n if (leaves.length > 1) {\n const divergencePoint = findDivergencePoint(graph, EMPTY_CONTRACT_HASH, leaves);\n const branches = leaves.map((tip) => {\n const path = findPath(graph, divergencePoint, tip);\n return {\n tip,\n edges: (path ?? []).map((e) => ({ dirName: e.dirName, from: e.from, to: e.to })),\n };\n });\n throw errorAmbiguousTarget(leaves, { divergencePoint, branches });\n }\n\n // biome-ignore lint/style/noNonNullAssertion: leaves.length is neither 0 nor >1 per the branches above, so exactly one leaf remains\n return leaves[0]!;\n}\n\n/**\n * Find the latest migration entry by traversing from EMPTY_CONTRACT_HASH\n * to the single target. Returns null for an empty graph.\n * Throws AMBIGUOUS_TARGET if the graph has multiple branch tips.\n */\nexport function findLatestMigration(graph: MigrationGraph): MigrationEdge | null {\n const leafHash = findLeaf(graph);\n if (leafHash === null) return null;\n\n const path = findPath(graph, EMPTY_CONTRACT_HASH, leafHash);\n return path?.at(-1) ?? null;\n}\n\nexport function detectCycles(graph: MigrationGraph): readonly string[][] {\n const WHITE = 0;\n const GRAY = 1;\n const BLACK = 2;\n\n const color = new Map<string, number>();\n const parentMap = new Map<string, string | null>();\n const cycles: string[][] = [];\n\n for (const node of graph.nodes) {\n color.set(node, WHITE);\n }\n\n // Iterative three-color DFS. A frame is (node, outgoing edges, next-index).\n interface Frame {\n node: string;\n outgoing: readonly MigrationEdge[];\n index: number;\n }\n const stack: Frame[] = [];\n\n function pushFrame(u: string): void {\n color.set(u, GRAY);\n stack.push({ node: u, outgoing: graph.forwardChain.get(u) ?? [], index: 0 });\n }\n\n for (const root of graph.nodes) {\n if (color.get(root) !== WHITE) continue;\n parentMap.set(root, null);\n pushFrame(root);\n\n while (stack.length > 0) {\n // biome-ignore lint/style/noNonNullAssertion: stack.length > 0 should guarantee that this cannot be undefined\n const frame = stack[stack.length - 1]!;\n if (frame.index >= frame.outgoing.length) {\n color.set(frame.node, BLACK);\n stack.pop();\n continue;\n }\n // biome-ignore lint/style/noNonNullAssertion: the early-continue above guarantees frame.index < frame.outgoing.length here, so this is defined\n const edge = frame.outgoing[frame.index++]!;\n const v = edge.to;\n const vColor = color.get(v);\n if (vColor === GRAY) {\n const cycle: string[] = [v];\n let cur = frame.node;\n while (cur !== v) {\n cycle.push(cur);\n cur = parentMap.get(cur) ?? v;\n }\n cycle.reverse();\n cycles.push(cycle);\n } else if (vColor === WHITE) {\n parentMap.set(v, frame.node);\n pushFrame(v);\n }\n }\n }\n\n return cycles;\n}\n\nexport function detectOrphans(graph: MigrationGraph): readonly MigrationEdge[] {\n if (graph.nodes.size === 0) return [];\n\n const reachable = new Set<string>();\n const startNodes: string[] = [];\n\n if (graph.forwardChain.has(EMPTY_CONTRACT_HASH)) {\n startNodes.push(EMPTY_CONTRACT_HASH);\n } else {\n const allTargets = new Set<string>();\n for (const edges of graph.forwardChain.values()) {\n for (const edge of edges) {\n allTargets.add(edge.to);\n }\n }\n for (const node of graph.nodes) {\n if (!allTargets.has(node)) {\n startNodes.push(node);\n }\n }\n }\n\n for (const step of bfs(startNodes, (n) => forwardNeighbours(graph, n))) {\n reachable.add(step.state);\n }\n\n const orphans: MigrationEdge[] = [];\n for (const [from, migrations] of graph.forwardChain) {\n if (!reachable.has(from)) {\n orphans.push(...migrations);\n }\n }\n\n return orphans;\n}\n"],"mappings":";;;;;;;;;;;;;AASA,IAAa,QAAb,MAAsB;CACpB;CACA,OAAe;CAEf,YAAY,UAAuB,EAAE,EAAE;EACrC,KAAK,QAAQ,CAAC,GAAG,QAAQ;;CAG3B,KAAK,MAAe;EAClB,KAAK,MAAM,KAAK,KAAK;;;;;;CAOvB,QAAW;EACT,IAAI,KAAK,QAAQ,KAAK,MAAM,QAC1B,MAAM,IAAI,MAAM,oCAAoC;EAGtD,OAAO,KAAK,MAAM,KAAK;;CAGzB,IAAI,UAAmB;EACrB,OAAO,KAAK,QAAQ,KAAK,MAAM;;;;;ACYnC,UAAiB,IACf,QACA,YAKA,OAA6B,UAAU,OACb;CAS1B,MAAM,0BAAU,IAAI,KAAa;CACjC,MAAM,4BAAY,IAAI,KAAqC;CAC3D,MAAM,QAAQ,IAAI,OAAc;CAChC,KAAK,MAAM,SAAS,QAAQ;EAC1B,MAAM,IAAI,IAAI,MAAM;EACpB,IAAI,CAAC,QAAQ,IAAI,EAAE,EAAE;GACnB,QAAQ,IAAI,EAAE;GACd,MAAM,KAAK;IAAE,OAAO;IAAO,KAAK;IAAG,CAAC;;;CAGxC,OAAO,CAAC,MAAM,SAAS;EACrB,MAAM,EAAE,OAAO,SAAS,KAAK,WAAW,MAAM,OAAO;EACrD,MAAM,aAAa,UAAU,IAAI,OAAO;EACxC,MAAM;GACJ,OAAO;GACP,QAAQ,YAAY,UAAU;GAC9B,cAAc,YAAY,QAAQ;GACnC;EAED,KAAK,MAAM,EAAE,MAAM,UAAU,WAAW,QAAQ,EAAE;GAChD,MAAM,IAAI,IAAI,KAAK;GACnB,IAAI,CAAC,QAAQ,IAAI,EAAE,EAAE;IACnB,QAAQ,IAAI,EAAE;IACd,UAAU,IAAI,GAAG;KAAE,QAAQ;KAAS;KAAM,CAAC;IAC3C,MAAM,KAAK;KAAE,OAAO;KAAM,KAAK;KAAG,CAAC;;;;;;;;ACzE3C,SAAS,kBAAkB,OAAuB,MAAc;CAC9D,QAAQ,MAAM,aAAa,IAAI,KAAK,IAAI,EAAE,EAAE,KAAK,UAAU;EAAE,MAAM,KAAK;EAAI;EAAM,EAAE;;;;;;AAOtF,SAAS,wBAAwB,OAAuB,MAAc;CAEpE,OAAO,CAAC,GADM,MAAM,aAAa,IAAI,KAAK,IAAI,EAAE,CAC/B,CAAC,KAAK,gBAAgB,CAAC,KAAK,UAAU;EAAE,MAAM,KAAK;EAAI;EAAM,EAAE;;;AAIlF,SAAS,kBAAkB,OAAuB,MAAc;CAC9D,QAAQ,MAAM,aAAa,IAAI,KAAK,IAAI,EAAE,EAAE,KAAK,UAAU;EAAE,MAAM,KAAK;EAAM;EAAM,EAAE;;AAGxF,SAAS,WAAW,KAAmC,KAAa,OAA4B;CAC9F,MAAM,SAAS,IAAI,IAAI,IAAI;CAC3B,IAAI,QAAQ,OAAO,KAAK,MAAM;MACzB,IAAI,IAAI,KAAK,CAAC,MAAM,CAAC;;AAG5B,SAAgB,iBAAiB,UAA6D;CAC5F,MAAM,wBAAQ,IAAI,KAAa;CAC/B,MAAM,+BAAe,IAAI,KAA8B;CACvD,MAAM,+BAAe,IAAI,KAA8B;CACvD,MAAM,kCAAkB,IAAI,KAA4B;CAExD,KAAK,MAAM,OAAO,UAAU;EAK1B,MAAM,OAAO,IAAI,SAAS,QAAA;EAC1B,MAAM,EAAE,OAAO,IAAI;EAEnB,IAAI,SAAS;OAEP,CADc,IAAI,IAAI,MAAM,OAAO,GAAG,mBAAmB,OAC/C,EACZ,MAAM,yBAAyB,IAAI,SAAS,KAAK;;EAIrD,MAAM,IAAI,KAAK;EACf,MAAM,IAAI,GAAG;EAEb,MAAM,YAA2B;GAC/B;GACA;GACA,eAAe,IAAI,SAAS;GAC5B,SAAS,IAAI;GACb,WAAW,IAAI,SAAS;GACxB,QAAQ,IAAI,SAAS;GACrB,YAAY,IAAI,SAAS;GAC1B;EAED,IAAI,gBAAgB,IAAI,UAAU,cAAc,EAC9C,MAAM,4BAA4B,UAAU,cAAc;EAE5D,gBAAgB,IAAI,UAAU,eAAe,UAAU;EAEvD,WAAW,cAAc,MAAM,UAAU;EACzC,WAAW,cAAc,IAAI,UAAU;;CAGzC,OAAO;EAAE;EAAO;EAAc;EAAc;EAAiB;;AAS/D,MAAM,iBAAyC;CAAE,MAAM;CAAG,SAAS;CAAG,SAAS;CAAG;AAElF,SAAS,cAAc,QAAmC;CACxD,IAAI,OAAO;CACX,KAAK,MAAM,KAAK,QAAQ;EACtB,MAAM,IAAI,eAAe;EACzB,IAAI,MAAM,KAAA,KAAa,IAAI,MAAM,OAAO;;CAE1C,OAAO;;AAGT,SAAS,gBAAgB,GAAkB,GAA0B;CACnE,MAAM,KAAK,cAAc,EAAE,OAAO,GAAG,cAAc,EAAE,OAAO;CAC5D,IAAI,OAAO,GAAG,OAAO;CACrB,MAAM,KAAK,EAAE,UAAU,cAAc,EAAE,UAAU;CACjD,IAAI,OAAO,GAAG,OAAO;CACrB,MAAM,KAAK,EAAE,GAAG,cAAc,EAAE,GAAG;CACnC,IAAI,OAAO,GAAG,OAAO;CACrB,OAAO,EAAE,cAAc,cAAc,EAAE,cAAc;;AAGvD,SAAS,gBAAgB,OAA2D;CAClF,OAAO,CAAC,GAAG,MAAM,CAAC,KAAK,gBAAgB;;;;;;;;;;AAWzC,SAAgB,SACd,OACA,UACA,QACiC;CACjC,IAAI,aAAa,QAAQ,OAAO,EAAE;CAElC,MAAM,0BAAU,IAAI,KAAsD;CAC1E,KAAK,MAAM,QAAQ,IAAI,CAAC,SAAS,GAAG,MAAM,wBAAwB,OAAO,EAAE,CAAC,EAAE;EAC5E,IAAI,KAAK,WAAW,QAAQ,KAAK,iBAAiB,MAChD,QAAQ,IAAI,KAAK,OAAO;GAAE,QAAQ,KAAK;GAAQ,MAAM,KAAK;GAAc,CAAC;EAE3E,IAAI,KAAK,UAAU,QAAQ;GACzB,MAAM,OAAwB,EAAE;GAChC,IAAI,MAAM;GACV,IAAI,IAAI,QAAQ,IAAI,IAAI;GACxB,OAAO,GAAG;IACR,KAAK,KAAK,EAAE,KAAK;IACjB,MAAM,EAAE;IACR,IAAI,QAAQ,IAAI,IAAI;;GAEtB,KAAK,SAAS;GACd,OAAO;;;CAIX,OAAO;;;;;;;;;;;;;;;;;;;;;AAsBT,SAAgB,uBACd,OACA,UACA,QACA,UACiC;CACjC,IAAI,SAAS,SAAS,GACpB,OAAO,SAAS,OAAO,UAAU,OAAO;CAY1C,MAAM,YAAY,MAAwB;EACxC,IAAI,EAAE,QAAQ,SAAS,GAAG,OAAO,GAAG,EAAE,KAAK;EAC3C,OAAO,GAAG,EAAE,KAAK,IAAI,CAAC,GAAG,EAAE,QAAQ,CAAC,MAAM,CAAC,KAAK,KAAK;;CAGvD,MAAM,cAAc,MAAmE;EACrF,MAAM,WAAW,MAAM,aAAa,IAAI,EAAE,KAAK,IAAI,EAAE;EACrD,IAAI,SAAS,WAAW,GAAG,OAAO,EAAE;EACpC,OAAO,CAAC,GAAG,SAAS,CACjB,KAAK,SAAS;GACb,IAAI,SAAS;GACb,IAAI,OAA2B;GAC/B,KAAK,MAAM,OAAO,KAAK,YACrB,IAAI,SAAS,IAAI,IAAI,IAAI,CAAC,EAAE,QAAQ,IAAI,IAAI,EAAE;IAC5C,IAAI,SAAS,MAAM,OAAO,IAAI,IAAI,EAAE,QAAQ;IAC5C,KAAK,IAAI,IAAI;IACb,SAAS;;GAGb,OAAO;IAAE;IAAM;IAAQ,aAAa,QAAQ,EAAE;IAAS;IACvD,CACD,MAAM,GAAG,MAAM;GACd,IAAI,EAAE,WAAW,EAAE,QAAQ,OAAO,EAAE,SAAS,KAAK;GAClD,OAAO,gBAAgB,EAAE,MAAM,EAAE,KAAK;IACtC,CACD,KAAK,EAAE,MAAM,mBAAmB;GAC/B,MAAM;IAAE,MAAM,KAAK;IAAI,SAAS;IAAa;GAC7C;GACD,EAAE;;CAKP,MAAM,0BAAU,IAAI,KAAyD;CAC7E,KAAK,MAAM,QAAQ,IACjB,CAAC;EAAE,MAAM;EAAU,yBAAS,IAAI,KAAK;EAAE,CAAC,EACxC,YACA,SACD,EAAE;EACD,MAAM,SAAS,SAAS,KAAK,MAAM;EACnC,IAAI,KAAK,WAAW,QAAQ,KAAK,iBAAiB,MAChD,QAAQ,IAAI,QAAQ;GAAE,WAAW,SAAS,KAAK,OAAO;GAAE,MAAM,KAAK;GAAc,CAAC;EAEpF,IAAI,KAAK,MAAM,SAAS,UAAU,KAAK,MAAM,QAAQ,SAAS,SAAS,MAAM;GAC3E,MAAM,OAAwB,EAAE;GAChC,IAAI,MAA0B;GAC9B,OAAO,QAAQ,KAAA,GAAW;IACxB,MAAM,IAAI,QAAQ,IAAI,IAAI;IAC1B,IAAI,CAAC,GAAG;IACR,KAAK,KAAK,EAAE,KAAK;IACjB,MAAM,EAAE;;GAEV,KAAK,SAAS;GACd,OAAO;;;CAIX,OAAO;;;;;;AAOT,SAAS,2BAA2B,OAAuB,QAA6B;CACtF,MAAM,0BAAU,IAAI,KAAa;CACjC,KAAK,MAAM,QAAQ,IAAI,CAAC,OAAO,GAAG,MAAM,kBAAkB,OAAO,EAAE,CAAC,EAClE,QAAQ,IAAI,KAAK,MAAM;CAEzB,OAAO;;;;;;;;;;;;AAoET,SAAgB,qBACd,OACA,UACA,QACA,UAAuC,EAAE,EACxB;CACjB,MAAM,EAAE,SAAS,2BAAW,IAAI,KAAa,KAAK;CAClD,MAAM,qBAAqB,CAAC,GAAG,SAAS,CAAC,MAAM;CAE/C,IAAI,aAAa,UAAU,SAAS,SAAS,GAC3C,OAAO;EACL,MAAM;EACN,UAAU;GACR,cAAc,EAAE;GAChB;GACA;GACA,kBAAkB;GAClB,iBAAiB,EAAE;GACnB;GACA,qBAAqB,EAAE;GACvB,GAAG,UAAU,WAAW,QAAQ;GACjC;EACF;CAGH,MAAM,OAAO,uBAAuB,OAAO,UAAU,QAAQ,SAAS;CACtE,IAAI,CAAC,MAAM;EACT,IAAI,SAAS,SAAS,GACpB,OAAO,EAAE,MAAM,eAAe;EAEhC,MAAM,aAAa,SAAS,OAAO,UAAU,OAAO;EACpD,IAAI,eAAe,MACjB,OAAO,EAAE,MAAM,eAAe;EAEhC,MAAM,sCAAsB,IAAI,KAAa;EAC7C,KAAK,MAAM,QAAQ,YACjB,KAAK,MAAM,OAAO,KAAK,YACrB,IAAI,SAAS,IAAI,IAAI,EAAE,oBAAoB,IAAI,IAAI;EAIvD,OAAO;GAAE,MAAM;GAAiB,gBAAgB;GAAY,SAD5C,mBAAmB,QAAQ,OAAO,CAAC,oBAAoB,IAAI,GAAG,CACX;GAAE;;CAGvE,MAAM,sBAAsB,2BAA2B,UAAU,KAAK;CAKtE,MAAM,gBAAgB,2BAA2B,OAAO,OAAO;CAC/D,MAAM,mBAAmB,yBAAyB,UAAU,KAAK;CAEjE,MAAM,kBAA4B,EAAE;CACpC,IAAI,mBAAmB;CAEvB,KAAK,MAAM,CAAC,GAAG,SAAS,KAAK,SAAS,EAAE;EACtC,MAAM,WAAW,MAAM,aAAa,IAAI,KAAK,KAAK;EAClD,IAAI,CAAC,YAAY,SAAS,UAAU,GAAG;EACvC,MAAM,YAAY,SAAS,QAAQ,MAAM,cAAc,IAAI,EAAE,GAAG,CAAC;EACjE,IAAI,UAAU,UAAU,GAAG;EAE3B,IAAI,iBAA2C;EAC/C,IAAI,SAAS,OAAO,GAAG;GAIrB,MAAM,YAAY,iBAAiB;GACnC,IAAI,cAAc,KAAA,GAAW;GAC7B,iBAAiB,kCAAkC,UAAU,WAAW,UAAU;;EAGpF,oBAAoB,UAAU,SAAS;EAEvC,IADe,gBAAgB,UACrB,CAAC,IAAI,kBAAkB,KAAK,eAAe;EACrD,IAAI,CAAC,UAAU,MAAM,MAAM,EAAE,kBAAkB,KAAK,cAAc,EAAE;EAEpE,MAAM,eAAe,gBAAgB,eAAe;EACpD,IACE,aAAa,SAAS,KACtB,aAAa,IAAI,kBAAkB,KAAK,iBACxC,aAAa,MAAM,MAAM,EAAE,kBAAkB,KAAK,cAAc,EAEhE,gBAAgB,KACd,MAAM,KAAK,KAAK,IAAI,eAAe,OAAO,oCAC3C;;CAIL,OAAO;EACL,MAAM;EACN,UAAU;GACR,cAAc;GACd;GACA;GACA;GACA;GACA;GACA;GACA,GAAG,UAAU,WAAW,QAAQ;GACjC;EACF;;AAGH,SAAS,2BACP,UACA,MACmB;CACnB,IAAI,SAAS,SAAS,GAAG,OAAO,EAAE;CAClC,MAAM,0BAAU,IAAI,KAAa;CACjC,KAAK,MAAM,QAAQ,MACjB,KAAK,MAAM,OAAO,KAAK,YACrB,IAAI,SAAS,IAAI,IAAI,EAAE,QAAQ,IAAI,IAAI;CAG3C,OAAO,CAAC,GAAG,QAAQ,CAAC,MAAM;;;;;;;AAQ5B,SAAS,yBACP,UACA,MACgC;CAChC,MAAM,WAAkC,EAAE;CAC1C,MAAM,sBAAM,IAAI,KAAa;CAC7B,KAAK,MAAM,QAAQ,MAAM;EACvB,SAAS,KAAK,IAAI,IAAI,IAAI,CAAC;EAC3B,KAAK,MAAM,OAAO,KAAK,YACrB,IAAI,SAAS,IAAI,IAAI,EAAE,IAAI,IAAI,IAAI;;CAGvC,OAAO;;AAGT,SAAS,kCACP,UACA,0BACA,UAC0B;CAC1B,IAAI,SAAS,SAAS,GAAG,OAAO,CAAC,GAAG,SAAS;CAC7C,OAAO,SAAS,QAAQ,MACtB,CAAC,GAAG,SAAS,CAAC,OAAO,OAAO,yBAAyB,IAAI,GAAG,IAAI,EAAE,WAAW,SAAS,GAAG,CAAC,CAC3F;;;;;;AAOH,SAAS,oBACP,OACA,UACA,QACQ;CACR,MAAM,eAAe,OAAO,KAAK,SAAS;EACxC,MAAM,4BAAY,IAAI,KAAa;EACnC,KAAK,MAAM,QAAQ,IAAI,CAAC,KAAK,GAAG,MAAM,kBAAkB,OAAO,EAAE,CAAC,EAChE,UAAU,IAAI,KAAK,MAAM;EAE3B,OAAO;GACP;CAEF,MAAM,kBAAkB,CAAC,GAAI,aAAa,MAAM,EAAE,CAAE,CAAC,QAAQ,SAC3D,aAAa,OAAO,MAAM,EAAE,IAAI,KAAK,CAAC,CACvC;CAED,IAAI,UAAU;CACd,IAAI,eAAe;CACnB,KAAK,MAAM,YAAY,iBAAiB;EACtC,MAAM,OAAO,SAAS,OAAO,UAAU,SAAS;EAChD,MAAM,QAAQ,OAAO,KAAK,SAAS;EACnC,IAAI,QAAQ,cAAc;GACxB,eAAe;GACf,UAAU;;;CAGd,OAAO;;;;;;AAOT,SAAgB,oBAAoB,OAAuB,UAAqC;CAC9F,MAAM,SAAmB,EAAE;CAC3B,KAAK,MAAM,QAAQ,IAAI,CAAC,SAAS,GAAG,MAAM,kBAAkB,OAAO,EAAE,CAAC,EACpE,IAAI,CAAC,MAAM,aAAa,IAAI,KAAK,MAAM,EAAE,QACvC,OAAO,KAAK,KAAK,MAAM;CAG3B,OAAO;;;;;;;;;;AAWT,SAAgB,SAAS,OAAsC;CAC7D,IAAI,MAAM,MAAM,SAAS,GACvB,OAAO;CAGT,IAAI,CAAC,MAAM,MAAM,IAAA,eAAwB,EACvC,MAAM,wBAAwB,CAAC,GAAG,MAAM,MAAM,CAAC;CAGjD,MAAM,SAAS,oBAAoB,OAAO,oBAAoB;CAE9D,IAAI,OAAO,WAAW,GAAG;EACvB,MAAM,YAAY,CAAC,GAAG,MAAM,MAAM,CAAC,QAAQ,MAAM,MAAM,oBAAoB;EAC3E,IAAI,UAAU,SAAS,GACrB,MAAM,cAAc,UAAU;EAEhC,OAAO;;CAGT,IAAI,OAAO,SAAS,GAAG;EACrB,MAAM,kBAAkB,oBAAoB,OAAO,qBAAqB,OAAO;EAQ/E,MAAM,qBAAqB,QAAQ;GAAE;GAAiB,UAPrC,OAAO,KAAK,QAAQ;IAEnC,OAAO;KACL;KACA,QAHW,SAAS,OAAO,iBAAiB,IAGhC,IAAI,EAAE,EAAE,KAAK,OAAO;MAAE,SAAS,EAAE;MAAS,MAAM,EAAE;MAAM,IAAI,EAAE;MAAI,EAAE;KACjF;KAE2D;GAAE,CAAC;;CAInE,OAAO,OAAO;;;;;;;AAQhB,SAAgB,oBAAoB,OAA6C;CAC/E,MAAM,WAAW,SAAS,MAAM;CAChC,IAAI,aAAa,MAAM,OAAO;CAG9B,OADa,SAAS,OAAA,gBAA4B,SACvC,EAAE,GAAG,GAAG,IAAI;;AAGzB,SAAgB,aAAa,OAA4C;CACvE,MAAM,QAAQ;CACd,MAAM,OAAO;CACb,MAAM,QAAQ;CAEd,MAAM,wBAAQ,IAAI,KAAqB;CACvC,MAAM,4BAAY,IAAI,KAA4B;CAClD,MAAM,SAAqB,EAAE;CAE7B,KAAK,MAAM,QAAQ,MAAM,OACvB,MAAM,IAAI,MAAM,MAAM;CASxB,MAAM,QAAiB,EAAE;CAEzB,SAAS,UAAU,GAAiB;EAClC,MAAM,IAAI,GAAG,KAAK;EAClB,MAAM,KAAK;GAAE,MAAM;GAAG,UAAU,MAAM,aAAa,IAAI,EAAE,IAAI,EAAE;GAAE,OAAO;GAAG,CAAC;;CAG9E,KAAK,MAAM,QAAQ,MAAM,OAAO;EAC9B,IAAI,MAAM,IAAI,KAAK,KAAK,OAAO;EAC/B,UAAU,IAAI,MAAM,KAAK;EACzB,UAAU,KAAK;EAEf,OAAO,MAAM,SAAS,GAAG;GAEvB,MAAM,QAAQ,MAAM,MAAM,SAAS;GACnC,IAAI,MAAM,SAAS,MAAM,SAAS,QAAQ;IACxC,MAAM,IAAI,MAAM,MAAM,MAAM;IAC5B,MAAM,KAAK;IACX;;GAIF,MAAM,IADO,MAAM,SAAS,MAAM,SACnB;GACf,MAAM,SAAS,MAAM,IAAI,EAAE;GAC3B,IAAI,WAAW,MAAM;IACnB,MAAM,QAAkB,CAAC,EAAE;IAC3B,IAAI,MAAM,MAAM;IAChB,OAAO,QAAQ,GAAG;KAChB,MAAM,KAAK,IAAI;KACf,MAAM,UAAU,IAAI,IAAI,IAAI;;IAE9B,MAAM,SAAS;IACf,OAAO,KAAK,MAAM;UACb,IAAI,WAAW,OAAO;IAC3B,UAAU,IAAI,GAAG,MAAM,KAAK;IAC5B,UAAU,EAAE;;;;CAKlB,OAAO;;AAGT,SAAgB,cAAc,OAAiD;CAC7E,IAAI,MAAM,MAAM,SAAS,GAAG,OAAO,EAAE;CAErC,MAAM,4BAAY,IAAI,KAAa;CACnC,MAAM,aAAuB,EAAE;CAE/B,IAAI,MAAM,aAAa,IAAA,eAAwB,EAC7C,WAAW,KAAK,oBAAoB;MAC/B;EACL,MAAM,6BAAa,IAAI,KAAa;EACpC,KAAK,MAAM,SAAS,MAAM,aAAa,QAAQ,EAC7C,KAAK,MAAM,QAAQ,OACjB,WAAW,IAAI,KAAK,GAAG;EAG3B,KAAK,MAAM,QAAQ,MAAM,OACvB,IAAI,CAAC,WAAW,IAAI,KAAK,EACvB,WAAW,KAAK,KAAK;;CAK3B,KAAK,MAAM,QAAQ,IAAI,aAAa,MAAM,kBAAkB,OAAO,EAAE,CAAC,EACpE,UAAU,IAAI,KAAK,MAAM;CAG3B,MAAM,UAA2B,EAAE;CACnC,KAAK,MAAM,CAAC,MAAM,eAAe,MAAM,cACrC,IAAI,CAAC,UAAU,IAAI,KAAK,EACtB,QAAQ,KAAK,GAAG,WAAW;CAI/B,OAAO"}
@@ -0,0 +1,298 @@
1
+ import { _ as errorInvalidSpaceId, p as errorInvalidRefFile, u as errorInvalidJson, y as errorMissingFile } from "./errors-EPL_9p9f.mjs";
2
+ import { t as MANIFEST_FILE } from "./io-D13dLvUh.mjs";
3
+ import { join } from "pathe";
4
+ import { readFile, readdir, stat } from "node:fs/promises";
5
+ import { APP_SPACE_ID } from "@prisma-next/framework-components/control";
6
+ //#region src/space-layout.ts
7
+ /**
8
+ * Pattern a contract-space identifier must match. The constraint is
9
+ * filesystem-friendly: lowercase letters / digits / hyphen / underscore,
10
+ * starts with a letter, max 64 characters.
11
+ */
12
+ const SPACE_ID_PATTERN = /^[a-z][a-z0-9_-]{0,63}$/;
13
+ function isValidSpaceId(spaceId) {
14
+ return SPACE_ID_PATTERN.test(spaceId);
15
+ }
16
+ function assertValidSpaceId(spaceId) {
17
+ if (!isValidSpaceId(spaceId)) throw errorInvalidSpaceId(spaceId);
18
+ }
19
+ /**
20
+ * Resolve the migrations subdirectory for a given contract space.
21
+ *
22
+ * Every contract space — including the app space (default `'app'`) —
23
+ * lands under `<projectMigrationsDir>/<spaceId>/`. The space id is
24
+ * validated against {@link SPACE_ID_PATTERN} because it becomes a
25
+ * filesystem directory name verbatim.
26
+ *
27
+ * `projectMigrationsDir` is the project's top-level `migrations/`
28
+ * directory; the helper does not assume anything about its absolute /
29
+ * relative shape and is symmetric with `pathe.join`.
30
+ */
31
+ function spaceMigrationDirectory(projectMigrationsDir, spaceId) {
32
+ assertValidSpaceId(spaceId);
33
+ return join(projectMigrationsDir, spaceId);
34
+ }
35
+ //#endregion
36
+ //#region src/read-contract-space-head-ref.ts
37
+ function hasErrnoCode$2(error, code) {
38
+ return error instanceof Error && error.code === code;
39
+ }
40
+ /**
41
+ * Read the head ref (`hash` + `invariants`) for a contract space from
42
+ * `<projectMigrationsDir>/<spaceId>/refs/head.json`.
43
+ *
44
+ * Returns `null` when the file does not exist (first emit). Surfaces
45
+ * `MIGRATION.INVALID_JSON` / `MIGRATION.INVALID_REF_FILE` on a corrupt
46
+ * `refs/head.json` so callers can distinguish "no head ref on disk"
47
+ * (returns `null`) from "head ref present but unreadable" (throws).
48
+ *
49
+ * Validates the space id against `[a-z][a-z0-9_-]{0,63}` for the same
50
+ * filesystem-safety reasons as the rest of the per-space helpers. The
51
+ * helper is uniform across the app and extension spaces.
52
+ */
53
+ async function readContractSpaceHeadRef(projectMigrationsDir, spaceId) {
54
+ assertValidSpaceId(spaceId);
55
+ const filePath = join(projectMigrationsDir, spaceId, "refs", "head.json");
56
+ let raw;
57
+ try {
58
+ raw = await readFile(filePath, "utf-8");
59
+ } catch (error) {
60
+ if (hasErrnoCode$2(error, "ENOENT")) return null;
61
+ throw error;
62
+ }
63
+ let parsed;
64
+ try {
65
+ parsed = JSON.parse(raw);
66
+ } catch (e) {
67
+ throw errorInvalidJson(filePath, e instanceof Error ? e.message : String(e));
68
+ }
69
+ if (typeof parsed !== "object" || parsed === null) throw errorInvalidRefFile(filePath, "expected an object");
70
+ const obj = parsed;
71
+ if (typeof obj.hash !== "string") throw errorInvalidRefFile(filePath, "expected an object with a string `hash` field");
72
+ if (!Array.isArray(obj.invariants) || obj.invariants.some((value) => typeof value !== "string")) throw errorInvalidRefFile(filePath, "expected an object with an `invariants` array of strings");
73
+ return {
74
+ hash: obj.hash,
75
+ invariants: obj.invariants
76
+ };
77
+ }
78
+ //#endregion
79
+ //#region src/detect-space-contract-drift.ts
80
+ /**
81
+ * Pure drift-detection primitive for a single contract space.
82
+ *
83
+ * Runs once per loaded extension space, just before computing the
84
+ * `priorContract` that feeds {@link import('./plan-all-spaces').planAllSpaces}.
85
+ * Hash equality is byte-for-byte (no normalisation) — both sides are
86
+ * already canonical hashes produced by the same pipeline, so any
87
+ * difference is meaningful drift.
88
+ *
89
+ * Synchronous, pure, no I/O. The caller (SQL family) reads the on-disk
90
+ * `contract.json` and computes its hash, then invokes this helper
91
+ * alongside the descriptor's `headRef.hash`. Composes naturally with
92
+ * {@link import('./read-contract-space-head-ref').readContractSpaceHeadRef}
93
+ * which provides the read-side primitive.
94
+ *
95
+ * The drift warning surfaces the extension name and the diff direction.
96
+ */
97
+ function detectSpaceContractDrift(spaceId, inputs) {
98
+ if (inputs.priorHeadHash === null) return {
99
+ kind: "firstEmit",
100
+ spaceId,
101
+ descriptorHash: inputs.descriptorHash,
102
+ priorHeadHash: null
103
+ };
104
+ if (inputs.descriptorHash === inputs.priorHeadHash) return {
105
+ kind: "noDrift",
106
+ spaceId,
107
+ descriptorHash: inputs.descriptorHash,
108
+ priorHeadHash: inputs.priorHeadHash
109
+ };
110
+ return {
111
+ kind: "drift",
112
+ spaceId,
113
+ descriptorHash: inputs.descriptorHash,
114
+ priorHeadHash: inputs.priorHeadHash
115
+ };
116
+ }
117
+ //#endregion
118
+ //#region src/verify-contract-spaces.ts
119
+ function hasErrnoCode$1(error, code) {
120
+ return error instanceof Error && error.code === code;
121
+ }
122
+ /**
123
+ * List the per-space subdirectories under
124
+ * `<projectRoot>/migrations/`. Returns space-id directory names (sorted
125
+ * alphabetically) — i.e. any non-dot-prefixed subdirectory whose root
126
+ * does **not** contain a `migration.json` manifest. The manifest is the
127
+ * structural marker of a user-authored migration directory (see
128
+ * `readMigrationsDir` in `./io`); directory names themselves belong to
129
+ * the user and are not part of the contract.
130
+ *
131
+ * Returns `[]` if the migrations directory does not exist (greenfield
132
+ * project).
133
+ *
134
+ * Reads only the user's repo. **No descriptor import.** The caller
135
+ * (verifier) feeds the result into {@link verifyContractSpaces} alongside
136
+ * the loaded-space set and the marker rows.
137
+ */
138
+ async function listContractSpaceDirectories(projectMigrationsDir) {
139
+ let entries;
140
+ try {
141
+ entries = (await readdir(projectMigrationsDir, { withFileTypes: true })).map((d) => ({
142
+ name: d.name,
143
+ isDirectory: d.isDirectory()
144
+ }));
145
+ } catch (error) {
146
+ if (hasErrnoCode$1(error, "ENOENT")) return [];
147
+ throw error;
148
+ }
149
+ const namedCandidates = entries.filter((e) => e.isDirectory).map((e) => e.name).filter((name) => !name.startsWith(".")).sort();
150
+ return (await Promise.all(namedCandidates.map(async (name) => {
151
+ try {
152
+ await stat(join(projectMigrationsDir, name, MANIFEST_FILE));
153
+ return {
154
+ name,
155
+ isMigrationDir: true
156
+ };
157
+ } catch (error) {
158
+ if (hasErrnoCode$1(error, "ENOENT")) return {
159
+ name,
160
+ isMigrationDir: false
161
+ };
162
+ throw error;
163
+ }
164
+ }))).filter((c) => !c.isMigrationDir).map((c) => c.name);
165
+ }
166
+ /**
167
+ * Pure structural verifier for the per-space mechanism. Aggregates the
168
+ * three orphan / missing checks plus per-space hash and invariant
169
+ * comparison.
170
+ *
171
+ * Algorithm:
172
+ *
173
+ * - For every extension space declared in `loadedSpaces` (`'app'`
174
+ * excluded — the per-space verifier is scoped to extension members;
175
+ * the app is verified through the aggregate path):
176
+ * - If no contract-space dir on disk → `declaredButUnmigrated`.
177
+ * - Else if `markerRowsBySpace` lacks an entry → no violation here;
178
+ * the live-DB compare done outside this helper is where the
179
+ * absence shows up.
180
+ * - Else compare marker hash / invariants vs. on-disk head hash /
181
+ * invariants → `hashMismatch` / `invariantsMismatch` on drift.
182
+ * - For every contract-space dir on disk that is not in `loadedSpaces` →
183
+ * `orphanSpaceDir`.
184
+ * - For every marker row whose `space` is not in `loadedSpaces` →
185
+ * `orphanMarker`. The app-space marker is always loaded (`'app'` is
186
+ * in `loadedSpaces` by definition).
187
+ *
188
+ * Output is deterministic: violations are sorted first by `kind`
189
+ * (`declaredButUnmigrated` → `orphanMarker` → `orphanSpaceDir` →
190
+ * `hashMismatch` → `invariantsMismatch`) then by `spaceId`. Two callers
191
+ * passing equivalent inputs see byte-identical violation lists.
192
+ *
193
+ * Synchronous, pure, no I/O. **Does not import the extension descriptor**
194
+ * (the inputs are pre-resolved by the caller); the verifier reads only
195
+ * the user repo, not `node_modules`.
196
+ */
197
+ function verifyContractSpaces(inputs) {
198
+ const violations = [];
199
+ for (const spaceId of [...inputs.loadedSpaces].sort()) {
200
+ if (spaceId === APP_SPACE_ID) continue;
201
+ if (!inputs.spaceDirsOnDisk.includes(spaceId)) {
202
+ violations.push({
203
+ kind: "declaredButUnmigrated",
204
+ spaceId,
205
+ remediation: `Extension '${spaceId}' is declared in extensionPacks but has not been emitted; run \`prisma-next migrate\`.`
206
+ });
207
+ continue;
208
+ }
209
+ const head = inputs.headRefsBySpace.get(spaceId);
210
+ const marker = inputs.markerRowsBySpace.get(spaceId);
211
+ if (!head || !marker) continue;
212
+ if (head.hash !== marker.hash) {
213
+ violations.push({
214
+ kind: "hashMismatch",
215
+ spaceId,
216
+ priorHeadHash: head.hash,
217
+ markerHash: marker.hash,
218
+ remediation: `Marker row for space '${spaceId}' is keyed at ${marker.hash}, but the on-disk ${join("migrations", spaceId, "contract.json")} resolves to ${head.hash}. Run \`prisma-next db update\` to advance the database, or \`prisma-next migrate\` if the descriptor was bumped without re-emitting.`
219
+ });
220
+ continue;
221
+ }
222
+ const onDiskInvariants = [...head.invariants].sort();
223
+ const markerInvariants = new Set(marker.invariants);
224
+ const missing = onDiskInvariants.filter((id) => !markerInvariants.has(id));
225
+ if (missing.length > 0) violations.push({
226
+ kind: "invariantsMismatch",
227
+ spaceId,
228
+ onDiskInvariants,
229
+ markerInvariants: [...marker.invariants].sort(),
230
+ remediation: `Marker row for space '${spaceId}' is missing invariants [${missing.map((s) => JSON.stringify(s)).join(", ")}]. Run \`prisma-next db update\` to apply the corresponding data-transform migrations.`
231
+ });
232
+ }
233
+ for (const dir of [...inputs.spaceDirsOnDisk].sort()) if (!inputs.loadedSpaces.has(dir)) violations.push({
234
+ kind: "orphanSpaceDir",
235
+ spaceId: dir,
236
+ remediation: `Orphan contract-space directory \`${join("migrations", dir)}/\` for an extension not in extensionPacks; remove the directory or re-add the extension.`
237
+ });
238
+ for (const space of [...inputs.markerRowsBySpace.keys()].sort()) if (!inputs.loadedSpaces.has(space)) violations.push({
239
+ kind: "orphanMarker",
240
+ spaceId: space,
241
+ remediation: `Orphan marker row for space '${space}' (no longer in extensionPacks); remediation: manually delete the row from \`prisma_contract.marker\`.`
242
+ });
243
+ if (violations.length === 0) return { ok: true };
244
+ const kindOrder = {
245
+ declaredButUnmigrated: 0,
246
+ orphanMarker: 1,
247
+ orphanSpaceDir: 2,
248
+ hashMismatch: 3,
249
+ invariantsMismatch: 4
250
+ };
251
+ violations.sort((a, b) => {
252
+ const k = kindOrder[a.kind] - kindOrder[b.kind];
253
+ if (k !== 0) return k;
254
+ if (a.spaceId < b.spaceId) return -1;
255
+ if (a.spaceId > b.spaceId) return 1;
256
+ return 0;
257
+ });
258
+ return {
259
+ ok: false,
260
+ violations
261
+ };
262
+ }
263
+ //#endregion
264
+ //#region src/read-contract-space-contract.ts
265
+ function hasErrnoCode(error, code) {
266
+ return error instanceof Error && error.code === code;
267
+ }
268
+ /**
269
+ * Read the on-disk contract value for a contract space
270
+ * (`<projectMigrationsDir>/<spaceId>/contract.json`). Returns the parsed
271
+ * JSON value as `unknown` — callers that need a typed contract validate
272
+ * via their family's `validateContract` to surface schema issues.
273
+ *
274
+ * Companion to {@link import('./read-contract-space-head-ref').readContractSpaceHeadRef}
275
+ * — same ENOENT-throws / corrupt-file-error semantics. Returns the
276
+ * canonical-JSON value the framework wrote during emit, so re-running
277
+ * this helper across machines / runs yields a byte-identical value.
278
+ */
279
+ async function readContractSpaceContract(projectMigrationsDir, spaceId) {
280
+ assertValidSpaceId(spaceId);
281
+ const filePath = join(projectMigrationsDir, spaceId, "contract.json");
282
+ let raw;
283
+ try {
284
+ raw = await readFile(filePath, "utf-8");
285
+ } catch (error) {
286
+ if (hasErrnoCode(error, "ENOENT")) throw errorMissingFile("contract.json", join(projectMigrationsDir, spaceId));
287
+ throw error;
288
+ }
289
+ try {
290
+ return JSON.parse(raw);
291
+ } catch (e) {
292
+ throw errorInvalidJson(filePath, e instanceof Error ? e.message : String(e));
293
+ }
294
+ }
295
+ //#endregion
296
+ export { readContractSpaceHeadRef as a, isValidSpaceId as c, detectSpaceContractDrift as i, spaceMigrationDirectory as l, listContractSpaceDirectories as n, APP_SPACE_ID as o, verifyContractSpaces as r, assertValidSpaceId as s, readContractSpaceContract as t };
297
+
298
+ //# sourceMappingURL=read-contract-space-contract-C3-1eyaI.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"read-contract-space-contract-C3-1eyaI.mjs","names":["hasErrnoCode","hasErrnoCode"],"sources":["../src/space-layout.ts","../src/read-contract-space-head-ref.ts","../src/detect-space-contract-drift.ts","../src/verify-contract-spaces.ts","../src/read-contract-space-contract.ts"],"sourcesContent":["import { APP_SPACE_ID } from '@prisma-next/framework-components/control';\nimport { join } from 'pathe';\nimport { errorInvalidSpaceId } from './errors';\n\nexport { APP_SPACE_ID };\n\n/**\n * Branded string carrying a compile-time guarantee that the value has\n * been validated by {@link assertValidSpaceId}. Downstream filesystem\n * helpers (e.g. {@link spaceMigrationDirectory}) accept this type to\n * make \"validated\" tracking visible at the type level rather than\n * relying purely on a runtime check.\n */\nexport type ValidSpaceId = string & { readonly __brand: 'ValidSpaceId' };\n\n/**\n * Pattern a contract-space identifier must match. The constraint is\n * filesystem-friendly: lowercase letters / digits / hyphen / underscore,\n * starts with a letter, max 64 characters.\n */\nconst SPACE_ID_PATTERN = /^[a-z][a-z0-9_-]{0,63}$/;\n\nexport function isValidSpaceId(spaceId: string): spaceId is ValidSpaceId {\n return SPACE_ID_PATTERN.test(spaceId);\n}\n\nexport function assertValidSpaceId(spaceId: string): asserts spaceId is ValidSpaceId {\n if (!isValidSpaceId(spaceId)) {\n throw errorInvalidSpaceId(spaceId);\n }\n}\n\n/**\n * Resolve the migrations subdirectory for a given contract space.\n *\n * Every contract space — including the app space (default `'app'`) —\n * lands under `<projectMigrationsDir>/<spaceId>/`. The space id is\n * validated against {@link SPACE_ID_PATTERN} because it becomes a\n * filesystem directory name verbatim.\n *\n * `projectMigrationsDir` is the project's top-level `migrations/`\n * directory; the helper does not assume anything about its absolute /\n * relative shape and is symmetric with `pathe.join`.\n */\nexport function spaceMigrationDirectory(projectMigrationsDir: string, spaceId: string): string {\n assertValidSpaceId(spaceId);\n return join(projectMigrationsDir, spaceId);\n}\n","import { readFile } from 'node:fs/promises';\nimport type { ContractSpaceHeadRef } from '@prisma-next/framework-components/control';\nimport { join } from 'pathe';\nimport { errorInvalidJson, errorInvalidRefFile } from './errors';\nimport { assertValidSpaceId } from './space-layout';\n\nexport type { ContractSpaceHeadRef };\n\nfunction hasErrnoCode(error: unknown, code: string): boolean {\n return error instanceof Error && (error as { code?: string }).code === code;\n}\n\n/**\n * Read the head ref (`hash` + `invariants`) for a contract space from\n * `<projectMigrationsDir>/<spaceId>/refs/head.json`.\n *\n * Returns `null` when the file does not exist (first emit). Surfaces\n * `MIGRATION.INVALID_JSON` / `MIGRATION.INVALID_REF_FILE` on a corrupt\n * `refs/head.json` so callers can distinguish \"no head ref on disk\"\n * (returns `null`) from \"head ref present but unreadable\" (throws).\n *\n * Validates the space id against `[a-z][a-z0-9_-]{0,63}` for the same\n * filesystem-safety reasons as the rest of the per-space helpers. The\n * helper is uniform across the app and extension spaces.\n */\nexport async function readContractSpaceHeadRef(\n projectMigrationsDir: string,\n spaceId: string,\n): Promise<ContractSpaceHeadRef | null> {\n assertValidSpaceId(spaceId);\n\n const filePath = join(projectMigrationsDir, spaceId, 'refs', 'head.json');\n\n let raw: string;\n try {\n raw = await readFile(filePath, 'utf-8');\n } catch (error) {\n if (hasErrnoCode(error, 'ENOENT')) {\n return null;\n }\n throw error;\n }\n\n let parsed: unknown;\n try {\n parsed = JSON.parse(raw);\n } catch (e) {\n throw errorInvalidJson(filePath, e instanceof Error ? e.message : String(e));\n }\n\n if (typeof parsed !== 'object' || parsed === null) {\n throw errorInvalidRefFile(filePath, 'expected an object');\n }\n const obj = parsed as { hash?: unknown; invariants?: unknown };\n if (typeof obj.hash !== 'string') {\n throw errorInvalidRefFile(filePath, 'expected an object with a string `hash` field');\n }\n if (!Array.isArray(obj.invariants) || obj.invariants.some((value) => typeof value !== 'string')) {\n throw errorInvalidRefFile(filePath, 'expected an object with an `invariants` array of strings');\n }\n\n return { hash: obj.hash, invariants: obj.invariants as readonly string[] };\n}\n","/**\n * Inputs for {@link detectSpaceContractDrift}.\n *\n * Both hashes are produced by the caller (the SQL-family wiring at the\n * consumption site) using the canonical contract hashing pipeline.\n * Keeping the helper pure lets `migration-tools` stay framework-neutral\n * — the SQL family already speaks `Contract<SqlStorage>`, the Mongo\n * family speaks its own contract type, and both reduce to a hash string\n * before drift detection runs.\n *\n * `priorHeadHash` is `null` when no `contract.json` exists yet on disk for\n * the space (the descriptor declares an extension that has never been\n * emitted into the user's repo). That's the \"first emit\" case — no\n * drift to surface; the migrate emit will create the on-disk artefacts.\n */\nexport interface DetectSpaceContractDriftInputs {\n readonly descriptorHash: string;\n readonly priorHeadHash: string | null;\n}\n\n/**\n * Result discriminant for {@link detectSpaceContractDrift}.\n *\n * - `noDrift`: descriptor hash and on-disk head hash agree byte-for-byte.\n * The migrate emit can proceed with no warning.\n * - `firstEmit`: no on-disk `contract.json` on disk yet. The extension\n * was just added to `extensionPacks`; this run will create the\n * on-disk artefacts. No warning either — the user's intent is to install\n * the extension, not to \"drift\" from a state they haven't recorded.\n * - `drift`: descriptor hash differs from on-disk head hash. The caller\n * surfaces a non-fatal warning naming the extension and the\n * diff direction (descriptor → on-disk head). The migrate emit proceeds\n * normally so the bump is materialised this run; the warning just\n * confirms the bump is being captured.\n *\n * `spaceId`, `descriptorHash`, and `priorHeadHash` are threaded through\n * verbatim so the caller (logger / TerminalUI / strict-mode envelope)\n * has everything it needs to format the warning message without\n * re-reading the descriptor or the on-disk artefact.\n */\nexport type SpaceContractDriftResult = {\n readonly kind: 'noDrift' | 'firstEmit' | 'drift';\n readonly spaceId: string;\n readonly descriptorHash: string;\n readonly priorHeadHash: string | null;\n};\n\n/**\n * Pure drift-detection primitive for a single contract space.\n *\n * Runs once per loaded extension space, just before computing the\n * `priorContract` that feeds {@link import('./plan-all-spaces').planAllSpaces}.\n * Hash equality is byte-for-byte (no normalisation) — both sides are\n * already canonical hashes produced by the same pipeline, so any\n * difference is meaningful drift.\n *\n * Synchronous, pure, no I/O. The caller (SQL family) reads the on-disk\n * `contract.json` and computes its hash, then invokes this helper\n * alongside the descriptor's `headRef.hash`. Composes naturally with\n * {@link import('./read-contract-space-head-ref').readContractSpaceHeadRef}\n * which provides the read-side primitive.\n *\n * The drift warning surfaces the extension name and the diff direction.\n */\nexport function detectSpaceContractDrift(\n spaceId: string,\n inputs: DetectSpaceContractDriftInputs,\n): SpaceContractDriftResult {\n if (inputs.priorHeadHash === null) {\n return {\n kind: 'firstEmit',\n spaceId,\n descriptorHash: inputs.descriptorHash,\n priorHeadHash: null,\n };\n }\n if (inputs.descriptorHash === inputs.priorHeadHash) {\n return {\n kind: 'noDrift',\n spaceId,\n descriptorHash: inputs.descriptorHash,\n priorHeadHash: inputs.priorHeadHash,\n };\n }\n return {\n kind: 'drift',\n spaceId,\n descriptorHash: inputs.descriptorHash,\n priorHeadHash: inputs.priorHeadHash,\n };\n}\n","import { readdir, stat } from 'node:fs/promises';\nimport { join } from 'pathe';\nimport { MANIFEST_FILE } from './io';\nimport { APP_SPACE_ID } from './space-layout';\n\nfunction hasErrnoCode(error: unknown, code: string): boolean {\n return error instanceof Error && (error as { code?: string }).code === code;\n}\n\n/**\n * List the per-space subdirectories under\n * `<projectRoot>/migrations/`. Returns space-id directory names (sorted\n * alphabetically) — i.e. any non-dot-prefixed subdirectory whose root\n * does **not** contain a `migration.json` manifest. The manifest is the\n * structural marker of a user-authored migration directory (see\n * `readMigrationsDir` in `./io`); directory names themselves belong to\n * the user and are not part of the contract.\n *\n * Returns `[]` if the migrations directory does not exist (greenfield\n * project).\n *\n * Reads only the user's repo. **No descriptor import.** The caller\n * (verifier) feeds the result into {@link verifyContractSpaces} alongside\n * the loaded-space set and the marker rows.\n */\nexport async function listContractSpaceDirectories(\n projectMigrationsDir: string,\n): Promise<readonly string[]> {\n let entries: { readonly name: string; readonly isDirectory: boolean }[];\n try {\n const dirents = await readdir(projectMigrationsDir, { withFileTypes: true });\n entries = dirents.map((d) => ({ name: d.name, isDirectory: d.isDirectory() }));\n } catch (error) {\n if (hasErrnoCode(error, 'ENOENT')) {\n return [];\n }\n throw error;\n }\n\n const namedCandidates = entries\n .filter((e) => e.isDirectory)\n .map((e) => e.name)\n .filter((name) => !name.startsWith('.'))\n .sort();\n\n const manifestChecks = await Promise.all(\n namedCandidates.map(async (name) => {\n try {\n await stat(join(projectMigrationsDir, name, MANIFEST_FILE));\n return { name, isMigrationDir: true };\n } catch (error) {\n if (hasErrnoCode(error, 'ENOENT')) {\n return { name, isMigrationDir: false };\n }\n throw error;\n }\n }),\n );\n\n return manifestChecks.filter((c) => !c.isMigrationDir).map((c) => c.name);\n}\n\n/**\n * On-disk head value (`(hash, invariants)`) for one contract space.\n * The verifier compares this against the marker row for the same space\n * to detect drift between the user-emitted artefacts and the live DB\n * marker.\n */\nexport interface ContractSpaceHeadRecord {\n readonly hash: string;\n readonly invariants: readonly string[];\n}\n\n/**\n * Marker row read from `prisma_contract.marker` (one per `space`).\n * Caller resolves these via the family runtime's marker reader before\n * invoking {@link verifyContractSpaces}.\n */\nexport interface SpaceMarkerRecord {\n readonly hash: string;\n readonly invariants: readonly string[];\n}\n\nexport interface VerifyContractSpacesInputs {\n /**\n * Set of contract spaces the project declares: `'app'` plus each\n * extension space in `extensionPacks`. The caller's discovery path\n * never reads the extension descriptor module — it walks the\n * `extensionPacks` configuration in `prisma-next.config.ts` for the\n * space ids.\n */\n readonly loadedSpaces: ReadonlySet<string>;\n\n /**\n * Per-space subdirectories observed under\n * `<projectRoot>/migrations/`. Resolved via\n * {@link listContractSpaceDirectories}.\n */\n readonly spaceDirsOnDisk: readonly string[];\n\n /**\n * Head ref per space, keyed by space id. Caller reads\n * `<projectRoot>/migrations/<space-id>/contract.json` and\n * `<projectRoot>/migrations/<space-id>/refs/head.json` to construct\n * this map. Spaces with no contract-space dir on disk simply omit a\n * map entry.\n */\n readonly headRefsBySpace: ReadonlyMap<string, ContractSpaceHeadRecord>;\n\n /**\n * Marker rows keyed by `space`. Caller reads them from the\n * `prisma_contract.marker` table.\n */\n readonly markerRowsBySpace: ReadonlyMap<string, SpaceMarkerRecord>;\n}\n\nexport type SpaceVerifierViolation =\n | {\n readonly kind: 'declaredButUnmigrated';\n readonly spaceId: string;\n readonly remediation: string;\n }\n | {\n readonly kind: 'orphanMarker';\n readonly spaceId: string;\n readonly remediation: string;\n }\n | {\n readonly kind: 'orphanSpaceDir';\n readonly spaceId: string;\n readonly remediation: string;\n }\n | {\n readonly kind: 'hashMismatch';\n readonly spaceId: string;\n readonly priorHeadHash: string;\n readonly markerHash: string;\n readonly remediation: string;\n }\n | {\n readonly kind: 'invariantsMismatch';\n readonly spaceId: string;\n readonly onDiskInvariants: readonly string[];\n readonly markerInvariants: readonly string[];\n readonly remediation: string;\n };\n\nexport type VerifyContractSpacesResult =\n | { readonly ok: true }\n | { readonly ok: false; readonly violations: readonly SpaceVerifierViolation[] };\n\n/**\n * Pure structural verifier for the per-space mechanism. Aggregates the\n * three orphan / missing checks plus per-space hash and invariant\n * comparison.\n *\n * Algorithm:\n *\n * - For every extension space declared in `loadedSpaces` (`'app'`\n * excluded — the per-space verifier is scoped to extension members;\n * the app is verified through the aggregate path):\n * - If no contract-space dir on disk → `declaredButUnmigrated`.\n * - Else if `markerRowsBySpace` lacks an entry → no violation here;\n * the live-DB compare done outside this helper is where the\n * absence shows up.\n * - Else compare marker hash / invariants vs. on-disk head hash /\n * invariants → `hashMismatch` / `invariantsMismatch` on drift.\n * - For every contract-space dir on disk that is not in `loadedSpaces` →\n * `orphanSpaceDir`.\n * - For every marker row whose `space` is not in `loadedSpaces` →\n * `orphanMarker`. The app-space marker is always loaded (`'app'` is\n * in `loadedSpaces` by definition).\n *\n * Output is deterministic: violations are sorted first by `kind`\n * (`declaredButUnmigrated` → `orphanMarker` → `orphanSpaceDir` →\n * `hashMismatch` → `invariantsMismatch`) then by `spaceId`. Two callers\n * passing equivalent inputs see byte-identical violation lists.\n *\n * Synchronous, pure, no I/O. **Does not import the extension descriptor**\n * (the inputs are pre-resolved by the caller); the verifier reads only\n * the user repo, not `node_modules`.\n */\nexport function verifyContractSpaces(\n inputs: VerifyContractSpacesInputs,\n): VerifyContractSpacesResult {\n const violations: SpaceVerifierViolation[] = [];\n\n for (const spaceId of [...inputs.loadedSpaces].sort()) {\n if (spaceId === APP_SPACE_ID) continue;\n\n if (!inputs.spaceDirsOnDisk.includes(spaceId)) {\n violations.push({\n kind: 'declaredButUnmigrated',\n spaceId,\n remediation: `Extension '${spaceId}' is declared in extensionPacks but has not been emitted; run \\`prisma-next migrate\\`.`,\n });\n continue;\n }\n\n const head = inputs.headRefsBySpace.get(spaceId);\n const marker = inputs.markerRowsBySpace.get(spaceId);\n if (!head || !marker) {\n continue;\n }\n\n if (head.hash !== marker.hash) {\n violations.push({\n kind: 'hashMismatch',\n spaceId,\n priorHeadHash: head.hash,\n markerHash: marker.hash,\n remediation: `Marker row for space '${spaceId}' is keyed at ${marker.hash}, but the on-disk ${join('migrations', spaceId, 'contract.json')} resolves to ${head.hash}. Run \\`prisma-next db update\\` to advance the database, or \\`prisma-next migrate\\` if the descriptor was bumped without re-emitting.`,\n });\n continue;\n }\n\n const onDiskInvariants = [...head.invariants].sort();\n const markerInvariants = new Set(marker.invariants);\n const missing = onDiskInvariants.filter((id) => !markerInvariants.has(id));\n if (missing.length > 0) {\n violations.push({\n kind: 'invariantsMismatch',\n spaceId,\n onDiskInvariants,\n markerInvariants: [...marker.invariants].sort(),\n remediation: `Marker row for space '${spaceId}' is missing invariants [${missing.map((s) => JSON.stringify(s)).join(', ')}]. Run \\`prisma-next db update\\` to apply the corresponding data-transform migrations.`,\n });\n }\n }\n\n for (const dir of [...inputs.spaceDirsOnDisk].sort()) {\n if (!inputs.loadedSpaces.has(dir)) {\n violations.push({\n kind: 'orphanSpaceDir',\n spaceId: dir,\n remediation: `Orphan contract-space directory \\`${join('migrations', dir)}/\\` for an extension not in extensionPacks; remove the directory or re-add the extension.`,\n });\n }\n }\n\n for (const space of [...inputs.markerRowsBySpace.keys()].sort()) {\n if (!inputs.loadedSpaces.has(space)) {\n violations.push({\n kind: 'orphanMarker',\n spaceId: space,\n remediation: `Orphan marker row for space '${space}' (no longer in extensionPacks); remediation: manually delete the row from \\`prisma_contract.marker\\`.`,\n });\n }\n }\n\n if (violations.length === 0) {\n return { ok: true };\n }\n\n const kindOrder: Record<SpaceVerifierViolation['kind'], number> = {\n declaredButUnmigrated: 0,\n orphanMarker: 1,\n orphanSpaceDir: 2,\n hashMismatch: 3,\n invariantsMismatch: 4,\n };\n\n violations.sort((a, b) => {\n const k = kindOrder[a.kind] - kindOrder[b.kind];\n if (k !== 0) return k;\n if (a.spaceId < b.spaceId) return -1;\n if (a.spaceId > b.spaceId) return 1;\n return 0;\n });\n\n return { ok: false, violations };\n}\n","import { readFile } from 'node:fs/promises';\nimport { join } from 'pathe';\nimport { errorInvalidJson, errorMissingFile } from './errors';\nimport { assertValidSpaceId } from './space-layout';\n\nfunction hasErrnoCode(error: unknown, code: string): boolean {\n return error instanceof Error && (error as { code?: string }).code === code;\n}\n\n/**\n * Read the on-disk contract value for a contract space\n * (`<projectMigrationsDir>/<spaceId>/contract.json`). Returns the parsed\n * JSON value as `unknown` — callers that need a typed contract validate\n * via their family's `validateContract` to surface schema issues.\n *\n * Companion to {@link import('./read-contract-space-head-ref').readContractSpaceHeadRef}\n * — same ENOENT-throws / corrupt-file-error semantics. Returns the\n * canonical-JSON value the framework wrote during emit, so re-running\n * this helper across machines / runs yields a byte-identical value.\n */\nexport async function readContractSpaceContract(\n projectMigrationsDir: string,\n spaceId: string,\n): Promise<unknown> {\n assertValidSpaceId(spaceId);\n\n const filePath = join(projectMigrationsDir, spaceId, 'contract.json');\n\n let raw: string;\n try {\n raw = await readFile(filePath, 'utf-8');\n } catch (error) {\n if (hasErrnoCode(error, 'ENOENT')) {\n throw errorMissingFile('contract.json', join(projectMigrationsDir, spaceId));\n }\n throw error;\n }\n\n try {\n return JSON.parse(raw);\n } catch (e) {\n throw errorInvalidJson(filePath, e instanceof Error ? e.message : String(e));\n }\n}\n"],"mappings":";;;;;;;;;;;AAoBA,MAAM,mBAAmB;AAEzB,SAAgB,eAAe,SAA0C;CACvE,OAAO,iBAAiB,KAAK,QAAQ;;AAGvC,SAAgB,mBAAmB,SAAkD;CACnF,IAAI,CAAC,eAAe,QAAQ,EAC1B,MAAM,oBAAoB,QAAQ;;;;;;;;;;;;;;AAgBtC,SAAgB,wBAAwB,sBAA8B,SAAyB;CAC7F,mBAAmB,QAAQ;CAC3B,OAAO,KAAK,sBAAsB,QAAQ;;;;ACtC5C,SAASA,eAAa,OAAgB,MAAuB;CAC3D,OAAO,iBAAiB,SAAU,MAA4B,SAAS;;;;;;;;;;;;;;;AAgBzE,eAAsB,yBACpB,sBACA,SACsC;CACtC,mBAAmB,QAAQ;CAE3B,MAAM,WAAW,KAAK,sBAAsB,SAAS,QAAQ,YAAY;CAEzE,IAAI;CACJ,IAAI;EACF,MAAM,MAAM,SAAS,UAAU,QAAQ;UAChC,OAAO;EACd,IAAIA,eAAa,OAAO,SAAS,EAC/B,OAAO;EAET,MAAM;;CAGR,IAAI;CACJ,IAAI;EACF,SAAS,KAAK,MAAM,IAAI;UACjB,GAAG;EACV,MAAM,iBAAiB,UAAU,aAAa,QAAQ,EAAE,UAAU,OAAO,EAAE,CAAC;;CAG9E,IAAI,OAAO,WAAW,YAAY,WAAW,MAC3C,MAAM,oBAAoB,UAAU,qBAAqB;CAE3D,MAAM,MAAM;CACZ,IAAI,OAAO,IAAI,SAAS,UACtB,MAAM,oBAAoB,UAAU,gDAAgD;CAEtF,IAAI,CAAC,MAAM,QAAQ,IAAI,WAAW,IAAI,IAAI,WAAW,MAAM,UAAU,OAAO,UAAU,SAAS,EAC7F,MAAM,oBAAoB,UAAU,2DAA2D;CAGjG,OAAO;EAAE,MAAM,IAAI;EAAM,YAAY,IAAI;EAAiC;;;;;;;;;;;;;;;;;;;;;ACG5E,SAAgB,yBACd,SACA,QAC0B;CAC1B,IAAI,OAAO,kBAAkB,MAC3B,OAAO;EACL,MAAM;EACN;EACA,gBAAgB,OAAO;EACvB,eAAe;EAChB;CAEH,IAAI,OAAO,mBAAmB,OAAO,eACnC,OAAO;EACL,MAAM;EACN;EACA,gBAAgB,OAAO;EACvB,eAAe,OAAO;EACvB;CAEH,OAAO;EACL,MAAM;EACN;EACA,gBAAgB,OAAO;EACvB,eAAe,OAAO;EACvB;;;;ACpFH,SAASC,eAAa,OAAgB,MAAuB;CAC3D,OAAO,iBAAiB,SAAU,MAA4B,SAAS;;;;;;;;;;;;;;;;;;AAmBzE,eAAsB,6BACpB,sBAC4B;CAC5B,IAAI;CACJ,IAAI;EAEF,WAAU,MADY,QAAQ,sBAAsB,EAAE,eAAe,MAAM,CAAC,EAC1D,KAAK,OAAO;GAAE,MAAM,EAAE;GAAM,aAAa,EAAE,aAAa;GAAE,EAAE;UACvE,OAAO;EACd,IAAIA,eAAa,OAAO,SAAS,EAC/B,OAAO,EAAE;EAEX,MAAM;;CAGR,MAAM,kBAAkB,QACrB,QAAQ,MAAM,EAAE,YAAY,CAC5B,KAAK,MAAM,EAAE,KAAK,CAClB,QAAQ,SAAS,CAAC,KAAK,WAAW,IAAI,CAAC,CACvC,MAAM;CAgBT,QAAO,MAdsB,QAAQ,IACnC,gBAAgB,IAAI,OAAO,SAAS;EAClC,IAAI;GACF,MAAM,KAAK,KAAK,sBAAsB,MAAM,cAAc,CAAC;GAC3D,OAAO;IAAE;IAAM,gBAAgB;IAAM;WAC9B,OAAO;GACd,IAAIA,eAAa,OAAO,SAAS,EAC/B,OAAO;IAAE;IAAM,gBAAgB;IAAO;GAExC,MAAM;;GAER,CACH,EAEqB,QAAQ,MAAM,CAAC,EAAE,eAAe,CAAC,KAAK,MAAM,EAAE,KAAK;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA2H3E,SAAgB,qBACd,QAC4B;CAC5B,MAAM,aAAuC,EAAE;CAE/C,KAAK,MAAM,WAAW,CAAC,GAAG,OAAO,aAAa,CAAC,MAAM,EAAE;EACrD,IAAI,YAAY,cAAc;EAE9B,IAAI,CAAC,OAAO,gBAAgB,SAAS,QAAQ,EAAE;GAC7C,WAAW,KAAK;IACd,MAAM;IACN;IACA,aAAa,cAAc,QAAQ;IACpC,CAAC;GACF;;EAGF,MAAM,OAAO,OAAO,gBAAgB,IAAI,QAAQ;EAChD,MAAM,SAAS,OAAO,kBAAkB,IAAI,QAAQ;EACpD,IAAI,CAAC,QAAQ,CAAC,QACZ;EAGF,IAAI,KAAK,SAAS,OAAO,MAAM;GAC7B,WAAW,KAAK;IACd,MAAM;IACN;IACA,eAAe,KAAK;IACpB,YAAY,OAAO;IACnB,aAAa,yBAAyB,QAAQ,gBAAgB,OAAO,KAAK,oBAAoB,KAAK,cAAc,SAAS,gBAAgB,CAAC,eAAe,KAAK,KAAK;IACrK,CAAC;GACF;;EAGF,MAAM,mBAAmB,CAAC,GAAG,KAAK,WAAW,CAAC,MAAM;EACpD,MAAM,mBAAmB,IAAI,IAAI,OAAO,WAAW;EACnD,MAAM,UAAU,iBAAiB,QAAQ,OAAO,CAAC,iBAAiB,IAAI,GAAG,CAAC;EAC1E,IAAI,QAAQ,SAAS,GACnB,WAAW,KAAK;GACd,MAAM;GACN;GACA;GACA,kBAAkB,CAAC,GAAG,OAAO,WAAW,CAAC,MAAM;GAC/C,aAAa,yBAAyB,QAAQ,2BAA2B,QAAQ,KAAK,MAAM,KAAK,UAAU,EAAE,CAAC,CAAC,KAAK,KAAK,CAAC;GAC3H,CAAC;;CAIN,KAAK,MAAM,OAAO,CAAC,GAAG,OAAO,gBAAgB,CAAC,MAAM,EAClD,IAAI,CAAC,OAAO,aAAa,IAAI,IAAI,EAC/B,WAAW,KAAK;EACd,MAAM;EACN,SAAS;EACT,aAAa,qCAAqC,KAAK,cAAc,IAAI,CAAC;EAC3E,CAAC;CAIN,KAAK,MAAM,SAAS,CAAC,GAAG,OAAO,kBAAkB,MAAM,CAAC,CAAC,MAAM,EAC7D,IAAI,CAAC,OAAO,aAAa,IAAI,MAAM,EACjC,WAAW,KAAK;EACd,MAAM;EACN,SAAS;EACT,aAAa,gCAAgC,MAAM;EACpD,CAAC;CAIN,IAAI,WAAW,WAAW,GACxB,OAAO,EAAE,IAAI,MAAM;CAGrB,MAAM,YAA4D;EAChE,uBAAuB;EACvB,cAAc;EACd,gBAAgB;EAChB,cAAc;EACd,oBAAoB;EACrB;CAED,WAAW,MAAM,GAAG,MAAM;EACxB,MAAM,IAAI,UAAU,EAAE,QAAQ,UAAU,EAAE;EAC1C,IAAI,MAAM,GAAG,OAAO;EACpB,IAAI,EAAE,UAAU,EAAE,SAAS,OAAO;EAClC,IAAI,EAAE,UAAU,EAAE,SAAS,OAAO;EAClC,OAAO;GACP;CAEF,OAAO;EAAE,IAAI;EAAO;EAAY;;;;ACzQlC,SAAS,aAAa,OAAgB,MAAuB;CAC3D,OAAO,iBAAiB,SAAU,MAA4B,SAAS;;;;;;;;;;;;;AAczE,eAAsB,0BACpB,sBACA,SACkB;CAClB,mBAAmB,QAAQ;CAE3B,MAAM,WAAW,KAAK,sBAAsB,SAAS,gBAAgB;CAErE,IAAI;CACJ,IAAI;EACF,MAAM,MAAM,SAAS,UAAU,QAAQ;UAChC,OAAO;EACd,IAAI,aAAa,OAAO,SAAS,EAC/B,MAAM,iBAAiB,iBAAiB,KAAK,sBAAsB,QAAQ,CAAC;EAE9E,MAAM;;CAGR,IAAI;EACF,OAAO,KAAK,MAAM,IAAI;UACf,GAAG;EACV,MAAM,iBAAiB,UAAU,aAAa,QAAQ,EAAE,UAAU,OAAO,EAAE,CAAC"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@prisma-next/migration-tools",
3
- "version": "0.5.0-dev.67",
3
+ "version": "0.5.0-dev.68",
4
4
  "license": "Apache-2.0",
5
5
  "type": "module",
6
6
  "sideEffects": false,
@@ -9,16 +9,16 @@
9
9
  "arktype": "^2.1.29",
10
10
  "pathe": "^2.0.3",
11
11
  "prettier": "^3.8.3",
12
- "@prisma-next/contract": "0.5.0-dev.67",
13
- "@prisma-next/framework-components": "0.5.0-dev.67",
14
- "@prisma-next/utils": "0.5.0-dev.67"
12
+ "@prisma-next/utils": "0.5.0-dev.68",
13
+ "@prisma-next/contract": "0.5.0-dev.68",
14
+ "@prisma-next/framework-components": "0.5.0-dev.68"
15
15
  },
16
16
  "devDependencies": {
17
17
  "tsdown": "0.22.0",
18
18
  "typescript": "5.9.3",
19
19
  "vitest": "4.1.5",
20
- "@prisma-next/tsdown": "0.0.0",
21
- "@prisma-next/tsconfig": "0.0.0"
20
+ "@prisma-next/tsconfig": "0.0.0",
21
+ "@prisma-next/tsdown": "0.0.0"
22
22
  },
23
23
  "files": [
24
24
  "dist",
@@ -80,6 +80,10 @@
80
80
  "types": "./dist/exports/spaces.d.mts",
81
81
  "import": "./dist/exports/spaces.mjs"
82
82
  },
83
+ "./aggregate": {
84
+ "types": "./dist/exports/aggregate.d.mts",
85
+ "import": "./dist/exports/aggregate.mjs"
86
+ },
83
87
  "./package.json": "./package.json"
84
88
  },
85
89
  "repository": {