@prisma-next/migration-tools 0.5.0-dev.21 → 0.5.0-dev.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. package/dist/{constants-DOzBI2EP.mjs → constants-BQEHsaEx.mjs} +1 -1
  2. package/dist/{constants-DOzBI2EP.mjs.map → constants-BQEHsaEx.mjs.map} +1 -1
  3. package/dist/{errors-BS_Kq8GF.mjs → errors-CRiMISRK.mjs} +24 -2
  4. package/dist/errors-CRiMISRK.mjs.map +1 -0
  5. package/dist/exports/constants.mjs +1 -1
  6. package/dist/exports/errors.d.mts +2 -1
  7. package/dist/exports/errors.d.mts.map +1 -1
  8. package/dist/exports/errors.mjs +2 -2
  9. package/dist/exports/graph.d.mts +1 -1
  10. package/dist/exports/hash.d.mts +2 -2
  11. package/dist/exports/invariants.d.mts +1 -1
  12. package/dist/exports/invariants.mjs +2 -2
  13. package/dist/exports/io.d.mts +2 -2
  14. package/dist/exports/io.d.mts.map +1 -1
  15. package/dist/exports/io.mjs +3 -8
  16. package/dist/exports/io.mjs.map +1 -1
  17. package/dist/exports/metadata.d.mts +1 -1
  18. package/dist/exports/migration-graph.d.mts +23 -3
  19. package/dist/exports/migration-graph.d.mts.map +1 -1
  20. package/dist/exports/migration-graph.mjs +126 -48
  21. package/dist/exports/migration-graph.mjs.map +1 -1
  22. package/dist/exports/migration.d.mts +1 -1
  23. package/dist/exports/migration.d.mts.map +1 -1
  24. package/dist/exports/migration.mjs +43 -2
  25. package/dist/exports/migration.mjs.map +1 -1
  26. package/dist/exports/package.d.mts +1 -1
  27. package/dist/exports/refs.mjs +1 -1
  28. package/dist/{graph-coc0V7k2.d.mts → graph-BHPv-9Gl.d.mts} +1 -1
  29. package/dist/{graph-coc0V7k2.d.mts.map → graph-BHPv-9Gl.d.mts.map} +1 -1
  30. package/dist/{invariants-jlMTqh_Q.mjs → invariants-B915SwZg.mjs} +2 -2
  31. package/dist/{invariants-jlMTqh_Q.mjs.map → invariants-B915SwZg.mjs.map} +1 -1
  32. package/dist/{metadata-CdSwaQ2k.d.mts → metadata-DuGfxBC3.d.mts} +1 -1
  33. package/dist/{metadata-CdSwaQ2k.d.mts.map → metadata-DuGfxBC3.d.mts.map} +1 -1
  34. package/dist/op-schema-DZKFua46.mjs +14 -0
  35. package/dist/op-schema-DZKFua46.mjs.map +1 -0
  36. package/dist/{package-DFjGigEm.d.mts → package-DcpcRqpb.d.mts} +2 -2
  37. package/dist/package-DcpcRqpb.d.mts.map +1 -0
  38. package/package.json +6 -6
  39. package/src/errors.ts +29 -0
  40. package/src/exports/errors.ts +1 -1
  41. package/src/exports/migration-graph.ts +1 -0
  42. package/src/graph-ops.ts +57 -30
  43. package/src/io.ts +1 -10
  44. package/src/migration-base.ts +54 -0
  45. package/src/migration-graph.ts +118 -19
  46. package/src/op-schema.ts +11 -0
  47. package/dist/errors-BS_Kq8GF.mjs.map +0 -1
  48. package/dist/package-DFjGigEm.d.mts.map +0 -1
@@ -8,9 +8,11 @@ import type {
8
8
  } from '@prisma-next/framework-components/control';
9
9
  import { ifDefined } from '@prisma-next/utils/defined';
10
10
  import { type } from 'arktype';
11
+ import { errorInvalidOperationEntry, errorStaleContractBookends } from './errors';
11
12
  import { computeMigrationHash } from './hash';
12
13
  import { deriveProvidedInvariants } from './invariants';
13
14
  import type { MigrationHints, MigrationMetadata } from './metadata';
15
+ import { MigrationOpSchema } from './op-schema';
14
16
  import type { MigrationOps } from './package';
15
17
 
16
18
  export interface MigrationMeta {
@@ -162,6 +164,8 @@ function buildAttestedMetadata(
162
164
  ops: MigrationOps,
163
165
  existing: Partial<MigrationMetadata> | null,
164
166
  ): MigrationMetadata {
167
+ assertBookendsMatchMeta(meta, existing);
168
+
165
169
  const baseMetadata: Omit<MigrationMetadata, 'migrationHash'> = {
166
170
  from: meta.from,
167
171
  to: meta.to,
@@ -184,6 +188,49 @@ function buildAttestedMetadata(
184
188
  return { ...baseMetadata, migrationHash };
185
189
  }
186
190
 
191
+ /**
192
+ * Verify each preserved contract bookend in `existing` agrees with the
193
+ * corresponding side of `describe()`'s output. A mismatch indicates the
194
+ * migration's `describe()` was edited after `migration plan` scaffolded
195
+ * the package, leaving a self-inconsistent manifest. Failing fast at
196
+ * write-time turns a silent foot-gun into an actionable diagnostic.
197
+ *
198
+ * Skipped when a side's `existing.<side>Contract` is null/absent (the
199
+ * synthesis path stays open for origin-less initial migrations and for
200
+ * bare `migration.ts` runs from scratch). When a bookend is *present*
201
+ * but its `storage.storageHash` is missing, that's treated as a
202
+ * mismatch — a malformed bookend is not equivalent to "no bookend".
203
+ *
204
+ * This check is paired with TML-2274, which removes `fromContract` /
205
+ * `toContract` from the manifest entirely; once that lands, this
206
+ * function and its error code are deleted.
207
+ */
208
+ function assertBookendsMatchMeta(
209
+ meta: MigrationMeta,
210
+ existing: Partial<MigrationMetadata> | null,
211
+ ): void {
212
+ if (existing?.fromContract != null) {
213
+ const contractHash = existing.fromContract.storage?.storageHash ?? '';
214
+ if (contractHash !== meta.from) {
215
+ throw errorStaleContractBookends({
216
+ side: 'from',
217
+ metaHash: meta.from,
218
+ contractHash,
219
+ });
220
+ }
221
+ }
222
+ if (existing?.toContract != null) {
223
+ const contractHash = existing.toContract.storage?.storageHash ?? '';
224
+ if (contractHash !== meta.to) {
225
+ throw errorStaleContractBookends({
226
+ side: 'to',
227
+ metaHash: meta.to,
228
+ contractHash,
229
+ });
230
+ }
231
+ }
232
+ }
233
+
187
234
  /**
188
235
  * Project `existing.hints` down to the known `MigrationHints` shape, dropping
189
236
  * any legacy keys that may linger in metadata scaffolded by older CLI
@@ -217,6 +264,13 @@ export function buildMigrationArtifacts(
217
264
  throw new Error('operations must be an array');
218
265
  }
219
266
 
267
+ for (let index = 0; index < ops.length; index++) {
268
+ const result = MigrationOpSchema(ops[index]);
269
+ if (result instanceof type.errors) {
270
+ throw errorInvalidOperationEntry(index, result.summary);
271
+ }
272
+ }
273
+
220
274
  const rawMeta: unknown = instance.describe();
221
275
  const parsed = MigrationMetaSchema(rawMeta);
222
276
  if (parsed instanceof type.errors) {
@@ -11,12 +11,21 @@ import type { MigrationEdge, MigrationGraph } from './graph';
11
11
  import { bfs } from './graph-ops';
12
12
  import type { MigrationPackage } from './package';
13
13
 
14
- /** Forward-edge neighbours for BFS: edge `e` from `n` visits `e.to` next. */
14
+ /** Forward-edge neighbours: edge `e` from `n` visits `e.to` next. */
15
15
  function forwardNeighbours(graph: MigrationGraph, node: string) {
16
16
  return (graph.forwardChain.get(node) ?? []).map((edge) => ({ next: edge.to, edge }));
17
17
  }
18
18
 
19
- /** Reverse-edge neighbours for BFS: edge `e` from `n` visits `e.from` next. */
19
+ /**
20
+ * Forward-edge neighbours, sorted by the deterministic tie-break.
21
+ * Used by path-finding so the resulting shortest path is stable across runs.
22
+ */
23
+ function sortedForwardNeighbours(graph: MigrationGraph, node: string) {
24
+ const edges = graph.forwardChain.get(node) ?? [];
25
+ return [...edges].sort(compareTieBreak).map((edge) => ({ next: edge.to, edge }));
26
+ }
27
+
28
+ /** Reverse-edge neighbours: edge `e` from `n` visits `e.from` next. */
20
29
  function reverseNeighbours(graph: MigrationGraph, node: string) {
21
30
  return (graph.reverseChain.get(node) ?? []).map((edge) => ({ next: edge.from, edge }));
22
31
  }
@@ -67,8 +76,8 @@ export function reconstructGraph(packages: readonly MigrationPackage[]): Migrati
67
76
 
68
77
  // ---------------------------------------------------------------------------
69
78
  // Deterministic tie-breaking for BFS neighbour order.
70
- // Used by `findPath` and `findPathWithDecision` only; not a general-purpose
71
- // utility. Ordering: label priority → createdAt → to → migrationHash.
79
+ // Used by path-finders only; not a general-purpose utility.
80
+ // Ordering: label priority → createdAt → to → migrationHash.
72
81
  // ---------------------------------------------------------------------------
73
82
 
74
83
  const LABEL_PRIORITY: Record<string, number> = { main: 0, default: 1, feature: 2 };
@@ -96,13 +105,6 @@ function sortedNeighbors(edges: readonly MigrationEdge[]): readonly MigrationEdg
96
105
  return [...edges].sort(compareTieBreak);
97
106
  }
98
107
 
99
- /** Ordering adapter for `bfs` — sorts `{next, edge}` pairs by tie-break. */
100
- function bfsOrdering(
101
- items: readonly { next: string; edge: MigrationEdge }[],
102
- ): readonly { next: string; edge: MigrationEdge }[] {
103
- return items.slice().sort((a, b) => compareTieBreak(a.edge, b.edge));
104
- }
105
-
106
108
  /**
107
109
  * Find the shortest path from `fromHash` to `toHash` using BFS over the
108
110
  * contract-hash graph. Returns the ordered list of edges, or null if no path
@@ -119,11 +121,11 @@ export function findPath(
119
121
  if (fromHash === toHash) return [];
120
122
 
121
123
  const parents = new Map<string, { parent: string; edge: MigrationEdge }>();
122
- for (const step of bfs([fromHash], (n) => forwardNeighbours(graph, n), bfsOrdering)) {
124
+ for (const step of bfs([fromHash], (n) => sortedForwardNeighbours(graph, n))) {
123
125
  if (step.parent !== null && step.incomingEdge !== null) {
124
- parents.set(step.node, { parent: step.parent, edge: step.incomingEdge });
126
+ parents.set(step.state, { parent: step.parent, edge: step.incomingEdge });
125
127
  }
126
- if (step.node === toHash) {
128
+ if (step.state === toHash) {
127
129
  const path: MigrationEdge[] = [];
128
130
  let cur = toHash;
129
131
  let p = parents.get(cur);
@@ -140,6 +142,103 @@ export function findPath(
140
142
  return null;
141
143
  }
142
144
 
145
+ /**
146
+ * Find the shortest path from `fromHash` to `toHash` whose edges collectively
147
+ * cover every invariant in `required`. Returns `null` when no such path exists
148
+ * (either `fromHash`→`toHash` is structurally unreachable, or every reachable
149
+ * path leaves at least one required invariant uncovered). When `required` is
150
+ * empty, delegates to `findPath` so the result is byte-identical for that case.
151
+ *
152
+ * Algorithm: BFS over `(node, coveredSubset)` states with state-level dedup.
153
+ * The covered subset is a `Set<string>` of invariant ids; the state's dedup
154
+ * key is `${node}\0${[...covered].sort().join('\0')}`. State keys distinguish
155
+ * distinct `(node, covered)` tuples regardless of node-name length because
156
+ * `\0` cannot appear in any invariant id (validation rejects whitespace and
157
+ * control chars at authoring time).
158
+ *
159
+ * Neighbour ordering when `required ≠ ∅`: edges covering ≥1 still-needed
160
+ * invariant come first, with `labelPriority → createdAt → to → migrationHash`
161
+ * as the secondary key. The heuristic steers BFS toward the satisfying path;
162
+ * correctness (shortest, deterministic) does not depend on it.
163
+ */
164
+ export function findPathWithInvariants(
165
+ graph: MigrationGraph,
166
+ fromHash: string,
167
+ toHash: string,
168
+ required: ReadonlySet<string>,
169
+ ): readonly MigrationEdge[] | null {
170
+ if (required.size === 0) {
171
+ return findPath(graph, fromHash, toHash);
172
+ }
173
+ if (fromHash === toHash) {
174
+ // Empty path covers no invariants; required is non-empty ⇒ unsatisfiable.
175
+ return null;
176
+ }
177
+
178
+ interface InvState {
179
+ readonly node: string;
180
+ readonly covered: ReadonlySet<string>;
181
+ }
182
+ const stateKey = (s: InvState): string => {
183
+ if (s.covered.size === 0) return `${s.node}\0`;
184
+ return `${s.node}\0${[...s.covered].sort().join('\0')}`;
185
+ };
186
+
187
+ const neighbours = (s: InvState): Iterable<{ next: InvState; edge: MigrationEdge }> => {
188
+ const outgoing = graph.forwardChain.get(s.node) ?? [];
189
+ if (outgoing.length === 0) return [];
190
+ return [...outgoing]
191
+ .map((edge) => {
192
+ let useful = false;
193
+ let next: Set<string> | null = null;
194
+ for (const inv of edge.invariants) {
195
+ if (required.has(inv) && !s.covered.has(inv)) {
196
+ if (next === null) next = new Set(s.covered);
197
+ next.add(inv);
198
+ useful = true;
199
+ }
200
+ }
201
+ return { edge, useful, nextCovered: next ?? s.covered };
202
+ })
203
+ .sort((a, b) => {
204
+ if (a.useful !== b.useful) return a.useful ? -1 : 1;
205
+ return compareTieBreak(a.edge, b.edge);
206
+ })
207
+ .map(({ edge, nextCovered }) => ({
208
+ next: { node: edge.to, covered: nextCovered },
209
+ edge,
210
+ }));
211
+ };
212
+
213
+ // Path reconstruction is consumer-side, keyed on stateKey, same shape as
214
+ // findPath's parents map.
215
+ const parents = new Map<string, { parentKey: string; edge: MigrationEdge }>();
216
+ for (const step of bfs<InvState, MigrationEdge>(
217
+ [{ node: fromHash, covered: new Set() }],
218
+ neighbours,
219
+ stateKey,
220
+ )) {
221
+ const curKey = stateKey(step.state);
222
+ if (step.parent !== null && step.incomingEdge !== null) {
223
+ parents.set(curKey, { parentKey: stateKey(step.parent), edge: step.incomingEdge });
224
+ }
225
+ if (step.state.node === toHash && step.state.covered.size === required.size) {
226
+ const path: MigrationEdge[] = [];
227
+ let cur: string | undefined = curKey;
228
+ while (cur !== undefined) {
229
+ const p = parents.get(cur);
230
+ if (!p) break;
231
+ path.push(p.edge);
232
+ cur = p.parentKey;
233
+ }
234
+ path.reverse();
235
+ return path;
236
+ }
237
+ }
238
+
239
+ return null;
240
+ }
241
+
143
242
  /**
144
243
  * Reverse-BFS from `toHash` over `reverseChain` to collect every node from
145
244
  * which `toHash` is reachable (inclusive of `toHash` itself).
@@ -147,7 +246,7 @@ export function findPath(
147
246
  function collectNodesReachingTarget(graph: MigrationGraph, toHash: string): Set<string> {
148
247
  const reached = new Set<string>();
149
248
  for (const step of bfs([toHash], (n) => reverseNeighbours(graph, n))) {
150
- reached.add(step.node);
249
+ reached.add(step.state);
151
250
  }
152
251
  return reached;
153
252
  }
@@ -233,7 +332,7 @@ function findDivergencePoint(
233
332
  const ancestorSets = leaves.map((leaf) => {
234
333
  const ancestors = new Set<string>();
235
334
  for (const step of bfs([leaf], (n) => reverseNeighbours(graph, n))) {
236
- ancestors.add(step.node);
335
+ ancestors.add(step.state);
237
336
  }
238
337
  return ancestors;
239
338
  });
@@ -262,8 +361,8 @@ function findDivergencePoint(
262
361
  export function findReachableLeaves(graph: MigrationGraph, fromHash: string): readonly string[] {
263
362
  const leaves: string[] = [];
264
363
  for (const step of bfs([fromHash], (n) => forwardNeighbours(graph, n))) {
265
- if (!graph.forwardChain.get(step.node)?.length) {
266
- leaves.push(step.node);
364
+ if (!graph.forwardChain.get(step.state)?.length) {
365
+ leaves.push(step.state);
267
366
  }
268
367
  }
269
368
  return leaves;
@@ -410,7 +509,7 @@ export function detectOrphans(graph: MigrationGraph): readonly MigrationEdge[] {
410
509
  }
411
510
 
412
511
  for (const step of bfs(startNodes, (n) => forwardNeighbours(graph, n))) {
413
- reachable.add(step.node);
512
+ reachable.add(step.state);
414
513
  }
415
514
 
416
515
  const orphans: MigrationEdge[] = [];
@@ -0,0 +1,11 @@
1
+ import { type } from 'arktype';
2
+
3
+ export const MigrationOpSchema = type({
4
+ id: 'string',
5
+ label: 'string',
6
+ operationClass: "'additive' | 'widening' | 'destructive' | 'data'",
7
+ 'invariantId?': 'string',
8
+ });
9
+
10
+ // Intentionally shallow: operation-specific payload validation is owned by planner/runner layers.
11
+ export const MigrationOpsSchema = MigrationOpSchema.array();
@@ -1 +0,0 @@
1
- {"version":3,"file":"errors-BS_Kq8GF.mjs","names":[],"sources":["../src/errors.ts"],"sourcesContent":["import { basename, dirname, relative } from 'pathe';\n\n/**\n * Build the canonical \"re-emit this package\" remediation hint.\n *\n * Every on-disk migration package ships its own `migration.ts` author-time\n * file. Running it regenerates `migration.json` and `ops.json` with the\n * correct hash + metadata, so it is the right primitive whenever a single\n * package's on-disk artifacts are missing, malformed, or otherwise corrupt.\n * Pointing users at `migration plan` would emit a *new* package rather than\n * heal the broken one.\n */\nfunction reemitHint(dir: string, fallback?: string): string {\n const relativeDir = relative(process.cwd(), dir);\n const reemit = `Re-emit the package by running \\`node \"${relativeDir}/migration.ts\"\\``;\n return fallback ? `${reemit}, ${fallback}` : `${reemit}.`;\n}\n\n/**\n * Structured error for migration tooling operations.\n *\n * Follows the NAMESPACE.SUBCODE convention from ADR 027. All codes live under\n * the MIGRATION namespace. These are tooling-time errors (file I/O, hash\n * verification, migration history reconstruction), distinct from the runtime\n * MIGRATION.* codes for apply-time failures (PRECHECK_FAILED, POSTCHECK_FAILED,\n * etc.).\n *\n * Fields:\n * - code: Stable machine-readable code (MIGRATION.SUBCODE)\n * - category: Always 'MIGRATION'\n * - why: Explains the cause in plain language\n * - fix: Actionable remediation step\n * - details: Machine-readable structured data for agents\n */\nexport class MigrationToolsError extends Error {\n readonly code: string;\n readonly category = 'MIGRATION' as const;\n readonly why: string;\n readonly fix: string;\n readonly details: Record<string, unknown> | undefined;\n\n constructor(\n code: string,\n summary: string,\n options: {\n readonly why: string;\n readonly fix: string;\n readonly details?: Record<string, unknown>;\n },\n ) {\n super(summary);\n this.name = 'MigrationToolsError';\n this.code = code;\n this.why = options.why;\n this.fix = options.fix;\n this.details = options.details;\n }\n\n static is(error: unknown): error is MigrationToolsError {\n if (!(error instanceof Error)) return false;\n const candidate = error as MigrationToolsError;\n return candidate.name === 'MigrationToolsError' && typeof candidate.code === 'string';\n }\n}\n\nexport function errorDirectoryExists(dir: string): MigrationToolsError {\n return new MigrationToolsError('MIGRATION.DIR_EXISTS', 'Migration directory already exists', {\n why: `The directory \"${dir}\" already exists. Each migration must have a unique directory.`,\n fix: 'Use --name to pick a different name, or delete the existing directory and re-run.',\n details: { dir },\n });\n}\n\nexport function errorMissingFile(file: string, dir: string): MigrationToolsError {\n return new MigrationToolsError('MIGRATION.FILE_MISSING', `Missing ${file}`, {\n why: `Expected \"${file}\" in \"${dir}\" but the file does not exist.`,\n fix: reemitHint(\n dir,\n 'or delete the directory if the migration is unwanted and the source TypeScript is gone.',\n ),\n details: { file, dir },\n });\n}\n\nexport function errorInvalidJson(filePath: string, parseError: string): MigrationToolsError {\n return new MigrationToolsError('MIGRATION.INVALID_JSON', 'Invalid JSON in migration file', {\n why: `Failed to parse \"${filePath}\": ${parseError}`,\n fix: reemitHint(dirname(filePath), 'or restore the directory from version control.'),\n details: { filePath, parseError },\n });\n}\n\nexport function errorInvalidManifest(filePath: string, reason: string): MigrationToolsError {\n return new MigrationToolsError('MIGRATION.INVALID_MANIFEST', 'Invalid migration manifest', {\n why: `Migration manifest at \"${filePath}\" is invalid: ${reason}`,\n fix: reemitHint(dirname(filePath), 'or restore the directory from version control.'),\n details: { filePath, reason },\n });\n}\n\nexport function errorInvalidSlug(slug: string): MigrationToolsError {\n return new MigrationToolsError('MIGRATION.INVALID_NAME', 'Invalid migration name', {\n why: `The slug \"${slug}\" contains no valid characters after sanitization (only a-z, 0-9 are kept).`,\n fix: 'Provide a name with at least one alphanumeric character, e.g. --name add_users.',\n details: { slug },\n });\n}\n\nexport function errorInvalidDestName(destName: string): MigrationToolsError {\n return new MigrationToolsError('MIGRATION.INVALID_DEST_NAME', 'Invalid copy destination name', {\n why: `The destination name \"${destName}\" must be a single path segment (no \"..\" or directory separators).`,\n fix: 'Use a simple file name such as \"contract.json\" for each destination in the copy list.',\n details: { destName },\n });\n}\n\nexport function errorSameSourceAndTarget(dir: string, hash: string): MigrationToolsError {\n const dirName = basename(dir);\n return new MigrationToolsError(\n 'MIGRATION.SAME_SOURCE_AND_TARGET',\n 'Migration has same source and target',\n {\n why: `Migration \"${dirName}\" has from === to === \"${hash}\". A migration must transition between two different contract states.`,\n fix: reemitHint(\n dir,\n 'or delete the directory if the migration is unwanted and the source TypeScript is gone.',\n ),\n details: { dirName, hash },\n },\n );\n}\n\nexport function errorAmbiguousTarget(\n branchTips: readonly string[],\n context?: {\n divergencePoint: string;\n branches: readonly {\n tip: string;\n edges: readonly { dirName: string; from: string; to: string }[];\n }[];\n },\n): MigrationToolsError {\n const divergenceInfo = context\n ? `\\nDivergence point: ${context.divergencePoint}\\nBranches:\\n${context.branches.map((b) => ` → ${b.tip} (${b.edges.length} edge(s): ${b.edges.map((e) => e.dirName).join(' → ') || 'direct'})`).join('\\n')}`\n : '';\n return new MigrationToolsError('MIGRATION.AMBIGUOUS_TARGET', 'Ambiguous migration target', {\n why: `The migration history has diverged into multiple branches: ${branchTips.join(', ')}. This typically happens when two developers plan migrations from the same starting point.${divergenceInfo}`,\n fix: 'Use `migration ref set <name> <hash>` to target a specific branch, delete one of the conflicting migration directories and re-run `migration plan`, or use --from <hash> to explicitly select a starting point.',\n details: {\n branchTips,\n ...(context ? { divergencePoint: context.divergencePoint, branches: context.branches } : {}),\n },\n });\n}\n\nexport function errorNoInitialMigration(nodes: readonly string[]): MigrationToolsError {\n return new MigrationToolsError('MIGRATION.NO_INITIAL_MIGRATION', 'No initial migration found', {\n why: `No migration starts from the empty contract state (known hashes: ${nodes.join(', ')}). At least one migration must originate from the empty state.`,\n fix: 'Inspect the migrations directory for corrupted migration.json files. At least one migration must start from the empty contract hash.',\n details: { nodes },\n });\n}\n\nexport function errorInvalidRefs(refsPath: string, reason: string): MigrationToolsError {\n return new MigrationToolsError('MIGRATION.INVALID_REFS', 'Invalid refs.json', {\n why: `refs.json at \"${refsPath}\" is invalid: ${reason}`,\n fix: 'Ensure refs.json is a flat object mapping valid ref names to contract hash strings.',\n details: { path: refsPath, reason },\n });\n}\n\nexport function errorInvalidRefFile(filePath: string, reason: string): MigrationToolsError {\n return new MigrationToolsError('MIGRATION.INVALID_REF_FILE', 'Invalid ref file', {\n why: `Ref file at \"${filePath}\" is invalid: ${reason}`,\n fix: 'Ensure the ref file contains valid JSON with { \"hash\": \"sha256:<64 hex chars>\", \"invariants\": [\"...\"] }.',\n details: { path: filePath, reason },\n });\n}\n\nexport function errorInvalidRefName(refName: string): MigrationToolsError {\n return new MigrationToolsError('MIGRATION.INVALID_REF_NAME', 'Invalid ref name', {\n why: `Ref name \"${refName}\" is invalid. Names must be lowercase alphanumeric with hyphens or forward slashes (no \".\" or \"..\" segments).`,\n fix: `Use a valid ref name (e.g., \"staging\", \"envs/production\").`,\n details: { refName },\n });\n}\n\nexport function errorNoTarget(reachableHashes: readonly string[]): MigrationToolsError {\n return new MigrationToolsError('MIGRATION.NO_TARGET', 'No migration target could be resolved', {\n why: `The migration history contains cycles and no target can be resolved automatically (reachable hashes: ${reachableHashes.join(', ')}). This typically happens after rollback migrations (e.g., C1→C2→C1).`,\n fix: 'Use --from <hash> to specify the planning origin explicitly.',\n details: { reachableHashes },\n });\n}\n\nexport function errorInvalidRefValue(value: string): MigrationToolsError {\n return new MigrationToolsError('MIGRATION.INVALID_REF_VALUE', 'Invalid ref value', {\n why: `Ref value \"${value}\" is not a valid contract hash. Values must be in the format \"sha256:<64 hex chars>\" or \"sha256:empty\".`,\n fix: 'Use a valid storage hash from `prisma-next contract emit` output or an existing migration.',\n details: { value },\n });\n}\n\nexport function errorDuplicateMigrationHash(migrationHash: string): MigrationToolsError {\n return new MigrationToolsError(\n 'MIGRATION.DUPLICATE_MIGRATION_HASH',\n 'Duplicate migrationHash in migration graph',\n {\n why: `Multiple migrations share migrationHash \"${migrationHash}\". Each migration must have a unique content-addressed identity.`,\n fix: 'Regenerate one of the conflicting migrations so each migrationHash is unique, then re-run migration commands.',\n details: { migrationHash },\n },\n );\n}\n\nexport function errorInvalidInvariantId(invariantId: string): MigrationToolsError {\n return new MigrationToolsError('MIGRATION.INVALID_INVARIANT_ID', 'Invalid invariantId', {\n why: `invariantId ${JSON.stringify(invariantId)} is invalid. Ids must be non-empty and contain no whitespace or control characters (including Unicode whitespace like NBSP); other content (kebab-case, camelCase, namespaced, Unicode letters) is allowed.`,\n fix: 'Pick an invariantId without spaces, tabs, newlines, or control characters — e.g. \"backfill-user-phone\", \"users/backfill-phone\", or \"BackfillUserPhone\".',\n details: { invariantId },\n });\n}\n\nexport function errorDuplicateInvariantInEdge(invariantId: string): MigrationToolsError {\n return new MigrationToolsError(\n 'MIGRATION.DUPLICATE_INVARIANT_IN_EDGE',\n 'Duplicate invariantId on a single migration',\n {\n why: `invariantId \"${invariantId}\" is declared by more than one dataTransform on the same migration. The marker stores invariants as a set and the routing layer treats them as edge-level, so two ops cannot share a routing identity.`,\n fix: 'Rename one of the conflicting dataTransform invariantIds, or drop invariantId on the op that does not need to be routing-visible.',\n details: { invariantId },\n },\n );\n}\n\nexport function errorProvidedInvariantsMismatch(\n filePath: string,\n stored: readonly string[],\n derived: readonly string[],\n): MigrationToolsError {\n const storedSet = new Set(stored);\n const derivedSet = new Set(derived);\n const missing = [...derivedSet].filter((id) => !storedSet.has(id));\n const extra = [...storedSet].filter((id) => !derivedSet.has(id));\n // When sets agree but arrays don't, the only difference is ordering — call\n // it out so the reader doesn't stare at two visually-identical arrays.\n // Canonical providedInvariants is sorted ascending; a manifest with the\n // same ids in a different order is still a mismatch (the hash check would\n // also fail), but the human-readable diagnostic is otherwise unhelpful.\n const orderingOnly = missing.length === 0 && extra.length === 0;\n const why = orderingOnly\n ? `migration.json at \"${filePath}\" stores providedInvariants ${JSON.stringify(stored)}, but the canonical value derived from ops.json is ${JSON.stringify(derived)} — same ids, different order. Canonical providedInvariants is sorted ascending.`\n : `migration.json at \"${filePath}\" stores providedInvariants ${JSON.stringify(stored)}, but the value derived from ops.json is ${JSON.stringify(derived)}. The manifest copy was likely hand-edited without re-emitting.`;\n return new MigrationToolsError(\n 'MIGRATION.PROVIDED_INVARIANTS_MISMATCH',\n 'providedInvariants on migration.json disagrees with ops.json',\n {\n why,\n fix: reemitHint(dirname(filePath), 'or restore the directory from version control.'),\n details: { filePath, stored, derived, difference: { missing, extra } },\n },\n );\n}\n\nexport function errorMigrationHashMismatch(\n dir: string,\n storedHash: string,\n computedHash: string,\n): MigrationToolsError {\n // Render a cwd-relative path in the human-readable diagnostic so users\n // running CLI commands from the project root see a familiar short path.\n // Keep the absolute path in `details.dir` for machine consumers.\n const relativeDir = relative(process.cwd(), dir);\n return new MigrationToolsError('MIGRATION.HASH_MISMATCH', 'Migration package is corrupt', {\n why: `Stored migrationHash \"${storedHash}\" does not match the recomputed hash \"${computedHash}\" for \"${relativeDir}\". The migration.json or ops.json has been edited or partially written since emit.`,\n fix: reemitHint(dir, 'or restore the directory from version control.'),\n details: { dir, storedHash, computedHash },\n });\n}\n"],"mappings":";;;;;;;;;;;;;AAYA,SAAS,WAAW,KAAa,UAA2B;CAE1D,MAAM,SAAS,0CADK,SAAS,QAAQ,KAAK,EAAE,IAAI,CACqB;AACrE,QAAO,WAAW,GAAG,OAAO,IAAI,aAAa,GAAG,OAAO;;;;;;;;;;;;;;;;;;AAmBzD,IAAa,sBAAb,cAAyC,MAAM;CAC7C,AAAS;CACT,AAAS,WAAW;CACpB,AAAS;CACT,AAAS;CACT,AAAS;CAET,YACE,MACA,SACA,SAKA;AACA,QAAM,QAAQ;AACd,OAAK,OAAO;AACZ,OAAK,OAAO;AACZ,OAAK,MAAM,QAAQ;AACnB,OAAK,MAAM,QAAQ;AACnB,OAAK,UAAU,QAAQ;;CAGzB,OAAO,GAAG,OAA8C;AACtD,MAAI,EAAE,iBAAiB,OAAQ,QAAO;EACtC,MAAM,YAAY;AAClB,SAAO,UAAU,SAAS,yBAAyB,OAAO,UAAU,SAAS;;;AAIjF,SAAgB,qBAAqB,KAAkC;AACrE,QAAO,IAAI,oBAAoB,wBAAwB,sCAAsC;EAC3F,KAAK,kBAAkB,IAAI;EAC3B,KAAK;EACL,SAAS,EAAE,KAAK;EACjB,CAAC;;AAGJ,SAAgB,iBAAiB,MAAc,KAAkC;AAC/E,QAAO,IAAI,oBAAoB,0BAA0B,WAAW,QAAQ;EAC1E,KAAK,aAAa,KAAK,QAAQ,IAAI;EACnC,KAAK,WACH,KACA,0FACD;EACD,SAAS;GAAE;GAAM;GAAK;EACvB,CAAC;;AAGJ,SAAgB,iBAAiB,UAAkB,YAAyC;AAC1F,QAAO,IAAI,oBAAoB,0BAA0B,kCAAkC;EACzF,KAAK,oBAAoB,SAAS,KAAK;EACvC,KAAK,WAAW,QAAQ,SAAS,EAAE,iDAAiD;EACpF,SAAS;GAAE;GAAU;GAAY;EAClC,CAAC;;AAGJ,SAAgB,qBAAqB,UAAkB,QAAqC;AAC1F,QAAO,IAAI,oBAAoB,8BAA8B,8BAA8B;EACzF,KAAK,0BAA0B,SAAS,gBAAgB;EACxD,KAAK,WAAW,QAAQ,SAAS,EAAE,iDAAiD;EACpF,SAAS;GAAE;GAAU;GAAQ;EAC9B,CAAC;;AAGJ,SAAgB,iBAAiB,MAAmC;AAClE,QAAO,IAAI,oBAAoB,0BAA0B,0BAA0B;EACjF,KAAK,aAAa,KAAK;EACvB,KAAK;EACL,SAAS,EAAE,MAAM;EAClB,CAAC;;AAGJ,SAAgB,qBAAqB,UAAuC;AAC1E,QAAO,IAAI,oBAAoB,+BAA+B,iCAAiC;EAC7F,KAAK,yBAAyB,SAAS;EACvC,KAAK;EACL,SAAS,EAAE,UAAU;EACtB,CAAC;;AAGJ,SAAgB,yBAAyB,KAAa,MAAmC;CACvF,MAAM,UAAU,SAAS,IAAI;AAC7B,QAAO,IAAI,oBACT,oCACA,wCACA;EACE,KAAK,cAAc,QAAQ,yBAAyB,KAAK;EACzD,KAAK,WACH,KACA,0FACD;EACD,SAAS;GAAE;GAAS;GAAM;EAC3B,CACF;;AAGH,SAAgB,qBACd,YACA,SAOqB;CACrB,MAAM,iBAAiB,UACnB,uBAAuB,QAAQ,gBAAgB,eAAe,QAAQ,SAAS,KAAK,MAAM,OAAO,EAAE,IAAI,IAAI,EAAE,MAAM,OAAO,YAAY,EAAE,MAAM,KAAK,MAAM,EAAE,QAAQ,CAAC,KAAK,MAAM,IAAI,SAAS,GAAG,CAAC,KAAK,KAAK,KAC1M;AACJ,QAAO,IAAI,oBAAoB,8BAA8B,8BAA8B;EACzF,KAAK,8DAA8D,WAAW,KAAK,KAAK,CAAC,4FAA4F;EACrL,KAAK;EACL,SAAS;GACP;GACA,GAAI,UAAU;IAAE,iBAAiB,QAAQ;IAAiB,UAAU,QAAQ;IAAU,GAAG,EAAE;GAC5F;EACF,CAAC;;AAGJ,SAAgB,wBAAwB,OAA+C;AACrF,QAAO,IAAI,oBAAoB,kCAAkC,8BAA8B;EAC7F,KAAK,oEAAoE,MAAM,KAAK,KAAK,CAAC;EAC1F,KAAK;EACL,SAAS,EAAE,OAAO;EACnB,CAAC;;AAWJ,SAAgB,oBAAoB,UAAkB,QAAqC;AACzF,QAAO,IAAI,oBAAoB,8BAA8B,oBAAoB;EAC/E,KAAK,gBAAgB,SAAS,gBAAgB;EAC9C,KAAK;EACL,SAAS;GAAE,MAAM;GAAU;GAAQ;EACpC,CAAC;;AAGJ,SAAgB,oBAAoB,SAAsC;AACxE,QAAO,IAAI,oBAAoB,8BAA8B,oBAAoB;EAC/E,KAAK,aAAa,QAAQ;EAC1B,KAAK;EACL,SAAS,EAAE,SAAS;EACrB,CAAC;;AAGJ,SAAgB,cAAc,iBAAyD;AACrF,QAAO,IAAI,oBAAoB,uBAAuB,yCAAyC;EAC7F,KAAK,wGAAwG,gBAAgB,KAAK,KAAK,CAAC;EACxI,KAAK;EACL,SAAS,EAAE,iBAAiB;EAC7B,CAAC;;AAGJ,SAAgB,qBAAqB,OAAoC;AACvE,QAAO,IAAI,oBAAoB,+BAA+B,qBAAqB;EACjF,KAAK,cAAc,MAAM;EACzB,KAAK;EACL,SAAS,EAAE,OAAO;EACnB,CAAC;;AAGJ,SAAgB,4BAA4B,eAA4C;AACtF,QAAO,IAAI,oBACT,sCACA,8CACA;EACE,KAAK,4CAA4C,cAAc;EAC/D,KAAK;EACL,SAAS,EAAE,eAAe;EAC3B,CACF;;AAGH,SAAgB,wBAAwB,aAA0C;AAChF,QAAO,IAAI,oBAAoB,kCAAkC,uBAAuB;EACtF,KAAK,eAAe,KAAK,UAAU,YAAY,CAAC;EAChD,KAAK;EACL,SAAS,EAAE,aAAa;EACzB,CAAC;;AAGJ,SAAgB,8BAA8B,aAA0C;AACtF,QAAO,IAAI,oBACT,yCACA,+CACA;EACE,KAAK,gBAAgB,YAAY;EACjC,KAAK;EACL,SAAS,EAAE,aAAa;EACzB,CACF;;AAGH,SAAgB,gCACd,UACA,QACA,SACqB;CACrB,MAAM,YAAY,IAAI,IAAI,OAAO;CACjC,MAAM,aAAa,IAAI,IAAI,QAAQ;CACnC,MAAM,UAAU,CAAC,GAAG,WAAW,CAAC,QAAQ,OAAO,CAAC,UAAU,IAAI,GAAG,CAAC;CAClE,MAAM,QAAQ,CAAC,GAAG,UAAU,CAAC,QAAQ,OAAO,CAAC,WAAW,IAAI,GAAG,CAAC;AAUhE,QAAO,IAAI,oBACT,0CACA,gEACA;EACE,KARiB,QAAQ,WAAW,KAAK,MAAM,WAAW,IAE1D,sBAAsB,SAAS,8BAA8B,KAAK,UAAU,OAAO,CAAC,qDAAqD,KAAK,UAAU,QAAQ,CAAC,mFACjK,sBAAsB,SAAS,8BAA8B,KAAK,UAAU,OAAO,CAAC,2CAA2C,KAAK,UAAU,QAAQ,CAAC;EAMvJ,KAAK,WAAW,QAAQ,SAAS,EAAE,iDAAiD;EACpF,SAAS;GAAE;GAAU;GAAQ;GAAS,YAAY;IAAE;IAAS;IAAO;GAAE;EACvE,CACF;;AAGH,SAAgB,2BACd,KACA,YACA,cACqB;AAKrB,QAAO,IAAI,oBAAoB,2BAA2B,gCAAgC;EACxF,KAAK,yBAAyB,WAAW,wCAAwC,aAAa,SAF5E,SAAS,QAAQ,KAAK,EAAE,IAAI,CAEqE;EACnH,KAAK,WAAW,KAAK,iDAAiD;EACtE,SAAS;GAAE;GAAK;GAAY;GAAc;EAC3C,CAAC"}
@@ -1 +0,0 @@
1
- {"version":3,"file":"package-DFjGigEm.d.mts","names":[],"sources":["../src/package.ts"],"sourcesContent":[],"mappings":";;;;KAGY,YAAA,YAAwB;;AAApC;AASA;;;;;UAAiB,gBAAA;;;qBAGI;gBACL"}