@prisma-next/migration-tools 0.5.0-dev.61 → 0.5.0-dev.63
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{constants-BQEHsaEx.mjs → constants-B87kJAGj.mjs} +1 -1
- package/dist/{constants-BQEHsaEx.mjs.map → constants-B87kJAGj.mjs.map} +1 -1
- package/dist/{errors-CfmjBeK0.mjs → errors-DQsXvidG.mjs} +22 -2
- package/dist/errors-DQsXvidG.mjs.map +1 -0
- package/dist/exports/constants.mjs +1 -1
- package/dist/exports/errors.d.mts.map +1 -1
- package/dist/exports/errors.mjs +1 -1
- package/dist/exports/graph.d.mts +1 -1
- package/dist/exports/hash.d.mts +3 -3
- package/dist/exports/hash.d.mts.map +1 -1
- package/dist/exports/hash.mjs +1 -1
- package/dist/exports/invariants.d.mts +1 -1
- package/dist/exports/invariants.mjs +2 -2
- package/dist/exports/io.d.mts +40 -5
- package/dist/exports/io.d.mts.map +1 -1
- package/dist/exports/io.mjs +4 -162
- package/dist/exports/metadata.d.mts +1 -1
- package/dist/exports/migration-graph.d.mts +3 -3
- package/dist/exports/migration-graph.d.mts.map +1 -1
- package/dist/exports/migration-graph.mjs +2 -2
- package/dist/exports/migration-graph.mjs.map +1 -1
- package/dist/exports/migration.d.mts +3 -3
- package/dist/exports/migration.d.mts.map +1 -1
- package/dist/exports/migration.mjs +4 -4
- package/dist/exports/package.d.mts +3 -2
- package/dist/exports/refs.mjs +1 -1
- package/dist/exports/spaces.d.mts +447 -0
- package/dist/exports/spaces.d.mts.map +1 -0
- package/dist/exports/spaces.mjs +433 -0
- package/dist/exports/spaces.mjs.map +1 -0
- package/dist/{graph-BHPv-9Gl.d.mts → graph-Czaj8O2q.d.mts} +1 -1
- package/dist/{graph-BHPv-9Gl.d.mts.map → graph-Czaj8O2q.d.mts.map} +1 -1
- package/dist/{hash-BARZdVgW.mjs → hash-G0bAfIGh.mjs} +2 -2
- package/dist/hash-G0bAfIGh.mjs.map +1 -0
- package/dist/{invariants-30VA65sB.mjs → invariants-4Avb_Yhy.mjs} +2 -2
- package/dist/{invariants-30VA65sB.mjs.map → invariants-4Avb_Yhy.mjs.map} +1 -1
- package/dist/io-CDJaWGbt.mjs +207 -0
- package/dist/io-CDJaWGbt.mjs.map +1 -0
- package/dist/metadata-CSjwljJx.d.mts +2 -0
- package/dist/{op-schema-DZKFua46.mjs → op-schema-BiF1ZYqH.mjs} +1 -1
- package/dist/{op-schema-DZKFua46.mjs.map → op-schema-BiF1ZYqH.mjs.map} +1 -1
- package/dist/package-B3Yl6DTr.d.mts +21 -0
- package/dist/package-B3Yl6DTr.d.mts.map +1 -0
- package/package.json +8 -4
- package/src/concatenate-space-apply-inputs.ts +90 -0
- package/src/detect-space-contract-drift.ts +95 -0
- package/src/emit-pinned-space-artefacts.ts +89 -0
- package/src/errors.ts +35 -0
- package/src/exports/io.ts +1 -0
- package/src/exports/package.ts +2 -1
- package/src/exports/spaces.ts +36 -0
- package/src/hash.ts +2 -2
- package/src/io.ts +71 -16
- package/src/metadata.ts +1 -41
- package/src/migration-graph.ts +2 -2
- package/src/package.ts +14 -11
- package/src/plan-all-spaces.ts +80 -0
- package/src/read-pinned-contract-hash.ts +77 -0
- package/src/space-layout.ts +55 -0
- package/src/verify-contract-spaces.ts +276 -0
- package/dist/errors-CfmjBeK0.mjs.map +0 -1
- package/dist/exports/io.mjs.map +0 -1
- package/dist/hash-BARZdVgW.mjs.map +0 -1
- package/dist/metadata-BP1cmU7Z.d.mts +0 -50
- package/dist/metadata-BP1cmU7Z.d.mts.map +0 -1
- package/dist/package-5HCCg0z-.d.mts +0 -21
- package/dist/package-5HCCg0z-.d.mts.map +0 -1
|
@@ -0,0 +1,433 @@
|
|
|
1
|
+
import { S as errorPinnedArtefactsAppSpace, f as errorInvalidRefFile, g as errorInvalidSpaceId, l as errorInvalidJson, o as errorDuplicateSpaceId } from "../errors-DQsXvidG.mjs";
|
|
2
|
+
import { r as canonicalizeJson } from "../hash-G0bAfIGh.mjs";
|
|
3
|
+
import "../invariants-4Avb_Yhy.mjs";
|
|
4
|
+
import { t as MANIFEST_FILE } from "../io-CDJaWGbt.mjs";
|
|
5
|
+
import { join } from "pathe";
|
|
6
|
+
import { mkdir, readFile, readdir, stat, writeFile } from "node:fs/promises";
|
|
7
|
+
import { APP_SPACE_ID } from "@prisma-next/framework-components/control";
|
|
8
|
+
|
|
9
|
+
//#region src/space-layout.ts
|
|
10
|
+
/**
|
|
11
|
+
* Pattern a contract-space identifier must match. The constraint is
|
|
12
|
+
* filesystem-friendly: lowercase letters / digits / hyphen / underscore,
|
|
13
|
+
* starts with a letter, max 64 characters.
|
|
14
|
+
*
|
|
15
|
+
* @see specs/framework-mechanism.spec.md § 3.
|
|
16
|
+
*/
|
|
17
|
+
const SPACE_ID_PATTERN = /^[a-z][a-z0-9_-]{0,63}$/;
|
|
18
|
+
function isValidSpaceId(spaceId) {
|
|
19
|
+
return SPACE_ID_PATTERN.test(spaceId);
|
|
20
|
+
}
|
|
21
|
+
function assertValidSpaceId(spaceId) {
|
|
22
|
+
if (!isValidSpaceId(spaceId)) throw errorInvalidSpaceId(spaceId);
|
|
23
|
+
}
|
|
24
|
+
/**
|
|
25
|
+
* Resolve the migrations subdirectory for a given contract space.
|
|
26
|
+
*
|
|
27
|
+
* - **App space** (`spaceId === APP_SPACE_ID`) keeps today's layout: the
|
|
28
|
+
* project's `migrations/` directory is the migrations directory, no
|
|
29
|
+
* subdirectory.
|
|
30
|
+
* - **Extension space** lands under `<projectMigrationsDir>/<spaceId>/`.
|
|
31
|
+
* The space id is validated against {@link SPACE_ID_PATTERN} because
|
|
32
|
+
* it becomes a filesystem directory name verbatim.
|
|
33
|
+
*
|
|
34
|
+
* `projectMigrationsDir` is the project's top-level `migrations/`
|
|
35
|
+
* directory; the helper does not assume anything about its absolute /
|
|
36
|
+
* relative shape and is symmetric with `pathe.join`.
|
|
37
|
+
*/
|
|
38
|
+
function spaceMigrationDirectory(projectMigrationsDir, spaceId) {
|
|
39
|
+
if (spaceId === APP_SPACE_ID) return projectMigrationsDir;
|
|
40
|
+
assertValidSpaceId(spaceId);
|
|
41
|
+
return join(projectMigrationsDir, spaceId);
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
//#endregion
|
|
45
|
+
//#region src/concatenate-space-apply-inputs.ts
|
|
46
|
+
/**
|
|
47
|
+
* Order a set of per-space apply inputs into the canonical cross-space
|
|
48
|
+
* sequence the runner applies under a single transaction.
|
|
49
|
+
*
|
|
50
|
+
* Cross-space ordering convention (sub-spec § 4):
|
|
51
|
+
*
|
|
52
|
+
* 1. **Extension spaces first**, alphabetically by `spaceId`.
|
|
53
|
+
* 2. **App space last** — only one `'app'` entry expected, at most.
|
|
54
|
+
*
|
|
55
|
+
* Rationale: extensions install their own structural objects (types,
|
|
56
|
+
* functions, helper tables) before the app's structural ops reference
|
|
57
|
+
* them. Putting app-space last lets app-space ops freely depend on any
|
|
58
|
+
* extension-space declaration in the same transaction.
|
|
59
|
+
*
|
|
60
|
+
* Determinism (NFR6): the output order is independent of the input
|
|
61
|
+
* order, so two callers with the same set of `extensionPacks` produce
|
|
62
|
+
* identical apply sequences.
|
|
63
|
+
*
|
|
64
|
+
* Atomicity: rejects duplicate `spaceId`s with
|
|
65
|
+
* `MIGRATION.DUPLICATE_SPACE_ID` before producing any output. This
|
|
66
|
+
* mirrors {@link import('./plan-all-spaces').planAllSpaces} so the
|
|
67
|
+
* planner-side and runner-side helpers reject malformed inputs the same
|
|
68
|
+
* way (callers don't need a separate dedup pass).
|
|
69
|
+
*
|
|
70
|
+
* Synchronous, pure, no I/O: callers resolve marker rows and `path`
|
|
71
|
+
* before invoking this helper. The actual DB application — driving the
|
|
72
|
+
* transaction, committing marker writes, recording the per-space marker
|
|
73
|
+
* rows — happens at the SQL-family consumption site (per the
|
|
74
|
+
* helper-location convention from R3).
|
|
75
|
+
*/
|
|
76
|
+
function concatenateSpaceApplyInputs(inputs) {
|
|
77
|
+
const seen = /* @__PURE__ */ new Set();
|
|
78
|
+
for (const input of inputs) {
|
|
79
|
+
if (seen.has(input.spaceId)) throw errorDuplicateSpaceId(input.spaceId);
|
|
80
|
+
seen.add(input.spaceId);
|
|
81
|
+
}
|
|
82
|
+
const extensions = [];
|
|
83
|
+
let appSpace;
|
|
84
|
+
for (const input of inputs) if (input.spaceId === APP_SPACE_ID) appSpace = input;
|
|
85
|
+
else extensions.push(input);
|
|
86
|
+
extensions.sort((a, b) => {
|
|
87
|
+
if (a.spaceId < b.spaceId) return -1;
|
|
88
|
+
if (a.spaceId > b.spaceId) return 1;
|
|
89
|
+
return 0;
|
|
90
|
+
});
|
|
91
|
+
return appSpace ? [...extensions, appSpace] : extensions;
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
//#endregion
|
|
95
|
+
//#region src/detect-space-contract-drift.ts
|
|
96
|
+
/**
|
|
97
|
+
* Pure drift-detection primitive for a single contract space.
|
|
98
|
+
*
|
|
99
|
+
* Runs once per loaded extension space, just before computing the
|
|
100
|
+
* `priorContract` that feeds {@link import('./plan-all-spaces').planAllSpaces}.
|
|
101
|
+
* Hash equality is byte-for-byte (no normalisation) — both sides are
|
|
102
|
+
* already canonical hashes produced by the same pipeline, so any
|
|
103
|
+
* difference is meaningful drift.
|
|
104
|
+
*
|
|
105
|
+
* Synchronous, pure, no I/O. The caller (SQL family in M2 R1) reads
|
|
106
|
+
* the pinned `contract.json` and computes its hash, then invokes this
|
|
107
|
+
* helper alongside the descriptor's `headRef.hash`. Composes naturally
|
|
108
|
+
* with {@link import('./read-pinned-contract-hash').readPinnedContractHash}
|
|
109
|
+
* which provides the read-side primitive.
|
|
110
|
+
*
|
|
111
|
+
* @see specs/framework-mechanism.spec.md § 3 — Drift detection (T1.9).
|
|
112
|
+
* @see specs/framework-mechanism.spec.md AM7 — drift warning surfaces
|
|
113
|
+
* the extension name and the diff direction.
|
|
114
|
+
*/
|
|
115
|
+
function detectSpaceContractDrift(spaceId, inputs) {
|
|
116
|
+
if (inputs.pinnedHash === null) return {
|
|
117
|
+
kind: "firstEmit",
|
|
118
|
+
spaceId,
|
|
119
|
+
descriptorHash: inputs.descriptorHash,
|
|
120
|
+
pinnedHash: null
|
|
121
|
+
};
|
|
122
|
+
if (inputs.descriptorHash === inputs.pinnedHash) return {
|
|
123
|
+
kind: "noDrift",
|
|
124
|
+
spaceId,
|
|
125
|
+
descriptorHash: inputs.descriptorHash,
|
|
126
|
+
pinnedHash: inputs.pinnedHash
|
|
127
|
+
};
|
|
128
|
+
return {
|
|
129
|
+
kind: "drift",
|
|
130
|
+
spaceId,
|
|
131
|
+
descriptorHash: inputs.descriptorHash,
|
|
132
|
+
pinnedHash: inputs.pinnedHash
|
|
133
|
+
};
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
//#endregion
|
|
137
|
+
//#region src/emit-pinned-space-artefacts.ts
|
|
138
|
+
/**
|
|
139
|
+
* Emit the pinned per-space artefacts (`contract.json`, `contract.d.ts`,
|
|
140
|
+
* `refs/head.json`) under `<projectMigrationsDir>/<spaceId>/`.
|
|
141
|
+
*
|
|
142
|
+
* Always-overwrite: the framework owns these files; running `migrate`
|
|
143
|
+
* twice with the same inputs is a no-op observably (idempotent), but the
|
|
144
|
+
* helper does not check pre-existing contents — re-emit always wins.
|
|
145
|
+
*
|
|
146
|
+
* Path layout matches the convention in
|
|
147
|
+
* [`spaceMigrationDirectory`](./space-layout.ts), with two restrictions
|
|
148
|
+
* specific to pinned artefacts:
|
|
149
|
+
*
|
|
150
|
+
* - Rejects the app space (`spaceId === APP_SPACE_ID`): the app space's
|
|
151
|
+
* canonical `contract.json` lives at the project root, not under
|
|
152
|
+
* `migrations/`. Callers that want to emit it use the app-space
|
|
153
|
+
* contract emit pipeline.
|
|
154
|
+
* - Validates `spaceId` against `[a-z][a-z0-9_-]{0,63}` via
|
|
155
|
+
* {@link assertValidSpaceId} for the same filesystem-safety reasons.
|
|
156
|
+
*
|
|
157
|
+
* The migrations directory and space subdirectory are created if they
|
|
158
|
+
* do not yet exist (`mkdir { recursive: true }`).
|
|
159
|
+
*
|
|
160
|
+
* @see specs/framework-mechanism.spec.md § 3 — Pinned artefact emission (T1.8).
|
|
161
|
+
*/
|
|
162
|
+
async function emitPinnedSpaceArtefacts(projectMigrationsDir, spaceId, inputs) {
|
|
163
|
+
if (spaceId === APP_SPACE_ID) throw errorPinnedArtefactsAppSpace();
|
|
164
|
+
assertValidSpaceId(spaceId);
|
|
165
|
+
const dir = join(projectMigrationsDir, spaceId);
|
|
166
|
+
await mkdir(join(dir, "refs"), { recursive: true });
|
|
167
|
+
await writeFile(join(dir, "contract.json"), `${canonicalizeJson(inputs.contract)}\n`);
|
|
168
|
+
await writeFile(join(dir, "contract.d.ts"), inputs.contractDts);
|
|
169
|
+
const sortedInvariants = [...inputs.headRef.invariants].sort();
|
|
170
|
+
const headJson = canonicalizeJson({
|
|
171
|
+
hash: inputs.headRef.hash,
|
|
172
|
+
invariants: sortedInvariants
|
|
173
|
+
});
|
|
174
|
+
await writeFile(join(dir, "refs", "head.json"), `${headJson}\n`);
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
//#endregion
|
|
178
|
+
//#region src/plan-all-spaces.ts
|
|
179
|
+
/**
|
|
180
|
+
* Iterate the per-space planner across a set of loaded contract spaces
|
|
181
|
+
* and return a deterministic shape regardless of declaration order.
|
|
182
|
+
*
|
|
183
|
+
* Behaviour:
|
|
184
|
+
*
|
|
185
|
+
* - The output is sorted alphabetically by `spaceId` (AM3). Two callers
|
|
186
|
+
* passing the same set of inputs in different orders observe
|
|
187
|
+
* byte-identical outputs.
|
|
188
|
+
* - The per-space planner (`planSpace`) is called exactly once per
|
|
189
|
+
* input, in alphabetical-by-spaceId order. Its return value is
|
|
190
|
+
* attached to the corresponding output entry verbatim.
|
|
191
|
+
* - Duplicate `spaceId`s in the input array throw
|
|
192
|
+
* `MIGRATION.DUPLICATE_SPACE_ID` before any `planSpace` call runs,
|
|
193
|
+
* keeping the planner pure when the input is malformed.
|
|
194
|
+
*
|
|
195
|
+
* The signature is generic over `TContract` and `TPackage` because the
|
|
196
|
+
* shape is framework-neutral (SQL family today, Mongo family
|
|
197
|
+
* eventually). Callers wire in whatever contract value and migration
|
|
198
|
+
* package shape their family already speaks.
|
|
199
|
+
*
|
|
200
|
+
* Synchronous: the underlying per-space planner (target's
|
|
201
|
+
* `MigrationPlanner.plan(...)`) is synchronous; callers that need to
|
|
202
|
+
* resolve async I/O (e.g. reading pinned `contract.json` from disk)
|
|
203
|
+
* resolve it before calling `planAllSpaces` and pass the materialised
|
|
204
|
+
* inputs through.
|
|
205
|
+
*
|
|
206
|
+
* @see specs/framework-mechanism.spec.md § 3 — Per-space planner (T1.3).
|
|
207
|
+
*/
|
|
208
|
+
function planAllSpaces(inputs, planSpace) {
|
|
209
|
+
const seen = /* @__PURE__ */ new Set();
|
|
210
|
+
for (const input of inputs) {
|
|
211
|
+
if (seen.has(input.spaceId)) throw errorDuplicateSpaceId(input.spaceId);
|
|
212
|
+
seen.add(input.spaceId);
|
|
213
|
+
}
|
|
214
|
+
return [...inputs].sort((a, b) => {
|
|
215
|
+
if (a.spaceId < b.spaceId) return -1;
|
|
216
|
+
if (a.spaceId > b.spaceId) return 1;
|
|
217
|
+
return 0;
|
|
218
|
+
}).map((input) => ({
|
|
219
|
+
spaceId: input.spaceId,
|
|
220
|
+
migrationPackages: planSpace(input)
|
|
221
|
+
}));
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
//#endregion
|
|
225
|
+
//#region src/read-pinned-contract-hash.ts
|
|
226
|
+
function hasErrnoCode$1(error, code) {
|
|
227
|
+
return error instanceof Error && error.code === code;
|
|
228
|
+
}
|
|
229
|
+
/**
|
|
230
|
+
* Read the pinned head hash for an extension space.
|
|
231
|
+
*
|
|
232
|
+
* Returns the `hash` field of `<projectMigrationsDir>/<spaceId>/refs/head.json`
|
|
233
|
+
* — i.e. the canonical contract hash the framework wrote on the last
|
|
234
|
+
* `migrate` for this space. Returns `null` when the file does not exist
|
|
235
|
+
* (or the migrations directory is missing entirely), which is the
|
|
236
|
+
* "first emit" signal {@link import('./detect-space-contract-drift').detectSpaceContractDrift}
|
|
237
|
+
* uses to distinguish a brand-new extension from drift.
|
|
238
|
+
*
|
|
239
|
+
* Pure I/O (read + parse). The "comparison hash" is stored on disk by
|
|
240
|
+
* {@link import('./emit-pinned-space-artefacts').emitPinnedSpaceArtefacts}
|
|
241
|
+
* via the descriptor's `headRef.hash`, so reading it back here matches
|
|
242
|
+
* the descriptor's hashing pipeline by construction — neither side
|
|
243
|
+
* recomputes anything.
|
|
244
|
+
*
|
|
245
|
+
* Validation:
|
|
246
|
+
*
|
|
247
|
+
* - Rejects the app space — pinned head refs are an extension-space
|
|
248
|
+
* concept; the app space's contract-of-record lives at the project
|
|
249
|
+
* root, not under `migrations/`.
|
|
250
|
+
* - Validates the space id against the same `[a-z][a-z0-9_-]{0,63}`
|
|
251
|
+
* pattern as the rest of the per-space helpers.
|
|
252
|
+
* - Surfaces `MIGRATION.INVALID_JSON` / `MIGRATION.INVALID_REF_FILE`
|
|
253
|
+
* on a corrupt `refs/head.json` so callers can distinguish "no
|
|
254
|
+
* pinned file" (returns `null`) from "pinned file but unreadable"
|
|
255
|
+
* (throws).
|
|
256
|
+
*
|
|
257
|
+
* @see specs/framework-mechanism.spec.md § 3 — Drift detection (T1.9).
|
|
258
|
+
*/
|
|
259
|
+
async function readPinnedContractHash(projectMigrationsDir, spaceId) {
|
|
260
|
+
if (spaceId === APP_SPACE_ID) throw errorPinnedArtefactsAppSpace();
|
|
261
|
+
assertValidSpaceId(spaceId);
|
|
262
|
+
const filePath = join(projectMigrationsDir, spaceId, "refs", "head.json");
|
|
263
|
+
let raw;
|
|
264
|
+
try {
|
|
265
|
+
raw = await readFile(filePath, "utf-8");
|
|
266
|
+
} catch (error) {
|
|
267
|
+
if (hasErrnoCode$1(error, "ENOENT")) return null;
|
|
268
|
+
throw error;
|
|
269
|
+
}
|
|
270
|
+
let parsed;
|
|
271
|
+
try {
|
|
272
|
+
parsed = JSON.parse(raw);
|
|
273
|
+
} catch (e) {
|
|
274
|
+
throw errorInvalidJson(filePath, e instanceof Error ? e.message : String(e));
|
|
275
|
+
}
|
|
276
|
+
if (typeof parsed !== "object" || parsed === null || typeof parsed.hash !== "string") throw errorInvalidRefFile(filePath, "expected an object with a string `hash` field");
|
|
277
|
+
return parsed.hash;
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
//#endregion
|
|
281
|
+
//#region src/verify-contract-spaces.ts
|
|
282
|
+
function hasErrnoCode(error, code) {
|
|
283
|
+
return error instanceof Error && error.code === code;
|
|
284
|
+
}
|
|
285
|
+
/**
|
|
286
|
+
* List the per-space pinned subdirectories under
|
|
287
|
+
* `<projectRoot>/migrations/`. Returns space-id directory names (sorted
|
|
288
|
+
* alphabetically) — i.e. any non-dot-prefixed subdirectory whose root
|
|
289
|
+
* does **not** contain a `migration.json` manifest. The manifest is the
|
|
290
|
+
* structural marker of a user-authored migration directory (see
|
|
291
|
+
* `readMigrationsDir` in `./io`); directory names themselves belong to
|
|
292
|
+
* the user and are not part of the contract.
|
|
293
|
+
*
|
|
294
|
+
* Returns `[]` if the migrations directory does not exist (greenfield
|
|
295
|
+
* project).
|
|
296
|
+
*
|
|
297
|
+
* Reads only the user's repo. **No descriptor import.** The caller
|
|
298
|
+
* (verifier) feeds the result into {@link verifyContractSpaces} alongside
|
|
299
|
+
* the loaded-space set and the marker rows.
|
|
300
|
+
*
|
|
301
|
+
* @see specs/framework-mechanism.spec.md § 4 — Verifier (steps 5–6).
|
|
302
|
+
*/
|
|
303
|
+
async function listPinnedSpaceDirectories(projectMigrationsDir) {
|
|
304
|
+
let entries;
|
|
305
|
+
try {
|
|
306
|
+
entries = (await readdir(projectMigrationsDir, { withFileTypes: true })).map((d) => ({
|
|
307
|
+
name: d.name,
|
|
308
|
+
isDirectory: d.isDirectory()
|
|
309
|
+
}));
|
|
310
|
+
} catch (error) {
|
|
311
|
+
if (hasErrnoCode(error, "ENOENT")) return [];
|
|
312
|
+
throw error;
|
|
313
|
+
}
|
|
314
|
+
const namedCandidates = entries.filter((e) => e.isDirectory).map((e) => e.name).filter((name) => !name.startsWith(".")).sort();
|
|
315
|
+
return (await Promise.all(namedCandidates.map(async (name) => {
|
|
316
|
+
try {
|
|
317
|
+
await stat(join(projectMigrationsDir, name, MANIFEST_FILE));
|
|
318
|
+
return {
|
|
319
|
+
name,
|
|
320
|
+
isMigrationDir: true
|
|
321
|
+
};
|
|
322
|
+
} catch (error) {
|
|
323
|
+
if (hasErrnoCode(error, "ENOENT")) return {
|
|
324
|
+
name,
|
|
325
|
+
isMigrationDir: false
|
|
326
|
+
};
|
|
327
|
+
throw error;
|
|
328
|
+
}
|
|
329
|
+
}))).filter((c) => !c.isMigrationDir).map((c) => c.name);
|
|
330
|
+
}
|
|
331
|
+
/**
|
|
332
|
+
* Pure structural verifier for the per-space mechanism. Aggregates the
|
|
333
|
+
* three orphan / missing checks (FR6 cases a–c) plus per-space hash and
|
|
334
|
+
* invariant comparison.
|
|
335
|
+
*
|
|
336
|
+
* Algorithm (sub-spec § 4):
|
|
337
|
+
*
|
|
338
|
+
* - For every extension space declared in `loadedSpaces` (`'app'`
|
|
339
|
+
* excluded — its pinned `contract.json` lives at the project root):
|
|
340
|
+
* - If no pinned dir on disk → `declaredButUnmigrated`.
|
|
341
|
+
* - Else if `markerRowsBySpace` lacks an entry → no violation here;
|
|
342
|
+
* the live-DB compare in step 8 (out of scope of this helper) is
|
|
343
|
+
* where the absence shows up.
|
|
344
|
+
* - Else compare marker hash / invariants vs. pinned hash /
|
|
345
|
+
* invariants → `hashMismatch` / `invariantsMismatch` on drift.
|
|
346
|
+
* - For every pinned dir on disk that is not in `loadedSpaces` →
|
|
347
|
+
* `orphanPinnedDir`.
|
|
348
|
+
* - For every marker row whose `space` is not in `loadedSpaces` →
|
|
349
|
+
* `orphanMarker`. The app-space marker is always loaded (`'app'` is
|
|
350
|
+
* in `loadedSpaces` by definition).
|
|
351
|
+
*
|
|
352
|
+
* Output is deterministic (NFR6): violations are sorted first by `kind`
|
|
353
|
+
* (`declaredButUnmigrated` → `orphanMarker` → `orphanPinnedDir` →
|
|
354
|
+
* `hashMismatch` → `invariantsMismatch`) then by `spaceId`. Two callers
|
|
355
|
+
* passing equivalent inputs see byte-identical violation lists.
|
|
356
|
+
*
|
|
357
|
+
* Synchronous, pure, no I/O. **Does not import the extension descriptor**
|
|
358
|
+
* (the inputs are pre-resolved by the caller). This is the property
|
|
359
|
+
* AC-15 / AC-26 ("verifier reads only the user repo, not
|
|
360
|
+
* `node_modules`") locks in.
|
|
361
|
+
*
|
|
362
|
+
* @see specs/framework-mechanism.spec.md § 4 — Verifier (T1.5).
|
|
363
|
+
*/
|
|
364
|
+
function verifyContractSpaces(inputs) {
|
|
365
|
+
const violations = [];
|
|
366
|
+
for (const spaceId of [...inputs.loadedSpaces].sort()) {
|
|
367
|
+
if (spaceId === APP_SPACE_ID) continue;
|
|
368
|
+
if (!inputs.pinnedDirsOnDisk.includes(spaceId)) {
|
|
369
|
+
violations.push({
|
|
370
|
+
kind: "declaredButUnmigrated",
|
|
371
|
+
spaceId,
|
|
372
|
+
remediation: `Extension '${spaceId}' is declared in extensionPacks but has not been emitted; run \`prisma-next migrate\`.`
|
|
373
|
+
});
|
|
374
|
+
continue;
|
|
375
|
+
}
|
|
376
|
+
const pinned = inputs.pinnedHashesBySpace.get(spaceId);
|
|
377
|
+
const marker = inputs.markerRowsBySpace.get(spaceId);
|
|
378
|
+
if (!pinned || !marker) continue;
|
|
379
|
+
if (pinned.hash !== marker.hash) {
|
|
380
|
+
violations.push({
|
|
381
|
+
kind: "hashMismatch",
|
|
382
|
+
spaceId,
|
|
383
|
+
pinnedHash: pinned.hash,
|
|
384
|
+
markerHash: marker.hash,
|
|
385
|
+
remediation: `Marker row for space '${spaceId}' is keyed at ${marker.hash}, but the pinned ${join("migrations", spaceId, "contract.json")} resolves to ${pinned.hash}. Run \`prisma-next db update\` to advance the database, or \`prisma-next migrate\` if the descriptor was bumped without re-emitting.`
|
|
386
|
+
});
|
|
387
|
+
continue;
|
|
388
|
+
}
|
|
389
|
+
const pinnedInvariants = [...pinned.invariants].sort();
|
|
390
|
+
const markerInvariants = new Set(marker.invariants);
|
|
391
|
+
const missing = pinnedInvariants.filter((id) => !markerInvariants.has(id));
|
|
392
|
+
if (missing.length > 0) violations.push({
|
|
393
|
+
kind: "invariantsMismatch",
|
|
394
|
+
spaceId,
|
|
395
|
+
pinnedInvariants,
|
|
396
|
+
markerInvariants: [...marker.invariants].sort(),
|
|
397
|
+
remediation: `Marker row for space '${spaceId}' is missing invariants [${missing.map((s) => JSON.stringify(s)).join(", ")}]. Run \`prisma-next db update\` to apply the corresponding data-transform migrations.`
|
|
398
|
+
});
|
|
399
|
+
}
|
|
400
|
+
for (const dir of [...inputs.pinnedDirsOnDisk].sort()) if (!inputs.loadedSpaces.has(dir)) violations.push({
|
|
401
|
+
kind: "orphanPinnedDir",
|
|
402
|
+
spaceId: dir,
|
|
403
|
+
remediation: `Orphan pinned directory \`${join("migrations", dir)}/\` for an extension not in extensionPacks; remove the directory or re-add the extension.`
|
|
404
|
+
});
|
|
405
|
+
for (const space of [...inputs.markerRowsBySpace.keys()].sort()) if (!inputs.loadedSpaces.has(space)) violations.push({
|
|
406
|
+
kind: "orphanMarker",
|
|
407
|
+
spaceId: space,
|
|
408
|
+
remediation: `Orphan marker row for space '${space}' (no longer in extensionPacks); remediation: manually delete the row from \`prisma_contract.marker\`.`
|
|
409
|
+
});
|
|
410
|
+
if (violations.length === 0) return { ok: true };
|
|
411
|
+
const kindOrder = {
|
|
412
|
+
declaredButUnmigrated: 0,
|
|
413
|
+
orphanMarker: 1,
|
|
414
|
+
orphanPinnedDir: 2,
|
|
415
|
+
hashMismatch: 3,
|
|
416
|
+
invariantsMismatch: 4
|
|
417
|
+
};
|
|
418
|
+
violations.sort((a, b) => {
|
|
419
|
+
const k = kindOrder[a.kind] - kindOrder[b.kind];
|
|
420
|
+
if (k !== 0) return k;
|
|
421
|
+
if (a.spaceId < b.spaceId) return -1;
|
|
422
|
+
if (a.spaceId > b.spaceId) return 1;
|
|
423
|
+
return 0;
|
|
424
|
+
});
|
|
425
|
+
return {
|
|
426
|
+
ok: false,
|
|
427
|
+
violations
|
|
428
|
+
};
|
|
429
|
+
}
|
|
430
|
+
|
|
431
|
+
//#endregion
|
|
432
|
+
export { APP_SPACE_ID, assertValidSpaceId, concatenateSpaceApplyInputs, detectSpaceContractDrift, emitPinnedSpaceArtefacts, isValidSpaceId, listPinnedSpaceDirectories, planAllSpaces, readPinnedContractHash, spaceMigrationDirectory, verifyContractSpaces };
|
|
433
|
+
//# sourceMappingURL=spaces.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"spaces.mjs","names":["extensions: SpaceApplyInput<TOp>[]","appSpace: SpaceApplyInput<TOp> | undefined","hasErrnoCode","raw: string","parsed: unknown","entries: { readonly name: string; readonly isDirectory: boolean }[]","violations: SpaceVerifierViolation[]","kindOrder: Record<SpaceVerifierViolation['kind'], number>"],"sources":["../../src/space-layout.ts","../../src/concatenate-space-apply-inputs.ts","../../src/detect-space-contract-drift.ts","../../src/emit-pinned-space-artefacts.ts","../../src/plan-all-spaces.ts","../../src/read-pinned-contract-hash.ts","../../src/verify-contract-spaces.ts"],"sourcesContent":["import { APP_SPACE_ID } from '@prisma-next/framework-components/control';\nimport { join } from 'pathe';\nimport { errorInvalidSpaceId } from './errors';\n\nexport { APP_SPACE_ID };\n\n/**\n * Branded string carrying a compile-time guarantee that the value has\n * been validated by {@link assertValidSpaceId}. Downstream filesystem\n * helpers (e.g. {@link spaceMigrationDirectory}) accept this type to\n * make \"validated\" tracking visible at the type level rather than\n * relying purely on a runtime check.\n */\nexport type ValidSpaceId = string & { readonly __brand: 'ValidSpaceId' };\n\n/**\n * Pattern a contract-space identifier must match. The constraint is\n * filesystem-friendly: lowercase letters / digits / hyphen / underscore,\n * starts with a letter, max 64 characters.\n *\n * @see specs/framework-mechanism.spec.md § 3.\n */\nconst SPACE_ID_PATTERN = /^[a-z][a-z0-9_-]{0,63}$/;\n\nexport function isValidSpaceId(spaceId: string): spaceId is ValidSpaceId {\n return SPACE_ID_PATTERN.test(spaceId);\n}\n\nexport function assertValidSpaceId(spaceId: string): asserts spaceId is ValidSpaceId {\n if (!isValidSpaceId(spaceId)) {\n throw errorInvalidSpaceId(spaceId);\n }\n}\n\n/**\n * Resolve the migrations subdirectory for a given contract space.\n *\n * - **App space** (`spaceId === APP_SPACE_ID`) keeps today's layout: the\n * project's `migrations/` directory is the migrations directory, no\n * subdirectory.\n * - **Extension space** lands under `<projectMigrationsDir>/<spaceId>/`.\n * The space id is validated against {@link SPACE_ID_PATTERN} because\n * it becomes a filesystem directory name verbatim.\n *\n * `projectMigrationsDir` is the project's top-level `migrations/`\n * directory; the helper does not assume anything about its absolute /\n * relative shape and is symmetric with `pathe.join`.\n */\nexport function spaceMigrationDirectory(projectMigrationsDir: string, spaceId: string): string {\n if (spaceId === APP_SPACE_ID) {\n return projectMigrationsDir;\n }\n assertValidSpaceId(spaceId);\n return join(projectMigrationsDir, spaceId);\n}\n","import { errorDuplicateSpaceId } from './errors';\nimport { APP_SPACE_ID } from './space-layout';\n\n/**\n * Per-space input the runner consumes when applying a migration.\n *\n * The shape is target-agnostic: callers (today the SQL family; later\n * any other family) bind `TOp` to their own per-target operation type\n * (e.g. `SqlMigrationPlanOperation<TTargetDetails>` for the SQL family)\n * and the helper preserves it through the concatenation.\n *\n * - `migrationDirectory` is the on-disk migration directory for the\n * space — `<projectRoot>/migrations` for `'app'` and\n * `<projectRoot>/migrations/<space-id>` for an extension space.\n * - `currentMarkerHash` and `currentMarkerInvariants` are the values\n * read from the `prisma_contract.marker` row keyed by `space = <space-id>`\n * (T1.1). `null` hash = no marker row yet.\n * - `path` is the per-space operation list resolved from\n * `findPathWithDecision(currentMarker, ref.hash, effectiveRequired)`\n * per ADR 208, materialised against the on-disk migration packages.\n *\n * @see specs/framework-mechanism.spec.md § 4 — Runner.\n */\nexport interface SpaceApplyInput<TOp> {\n readonly spaceId: string;\n readonly migrationDirectory: string;\n readonly currentMarkerHash: string | null;\n readonly currentMarkerInvariants: readonly string[];\n readonly path: readonly TOp[];\n}\n\n/**\n * Order a set of per-space apply inputs into the canonical cross-space\n * sequence the runner applies under a single transaction.\n *\n * Cross-space ordering convention (sub-spec § 4):\n *\n * 1. **Extension spaces first**, alphabetically by `spaceId`.\n * 2. **App space last** — only one `'app'` entry expected, at most.\n *\n * Rationale: extensions install their own structural objects (types,\n * functions, helper tables) before the app's structural ops reference\n * them. Putting app-space last lets app-space ops freely depend on any\n * extension-space declaration in the same transaction.\n *\n * Determinism (NFR6): the output order is independent of the input\n * order, so two callers with the same set of `extensionPacks` produce\n * identical apply sequences.\n *\n * Atomicity: rejects duplicate `spaceId`s with\n * `MIGRATION.DUPLICATE_SPACE_ID` before producing any output. This\n * mirrors {@link import('./plan-all-spaces').planAllSpaces} so the\n * planner-side and runner-side helpers reject malformed inputs the same\n * way (callers don't need a separate dedup pass).\n *\n * Synchronous, pure, no I/O: callers resolve marker rows and `path`\n * before invoking this helper. The actual DB application — driving the\n * transaction, committing marker writes, recording the per-space marker\n * rows — happens at the SQL-family consumption site (per the\n * helper-location convention from R3).\n */\nexport function concatenateSpaceApplyInputs<TOp>(\n inputs: readonly SpaceApplyInput<TOp>[],\n): readonly SpaceApplyInput<TOp>[] {\n const seen = new Set<string>();\n for (const input of inputs) {\n if (seen.has(input.spaceId)) {\n throw errorDuplicateSpaceId(input.spaceId);\n }\n seen.add(input.spaceId);\n }\n\n const extensions: SpaceApplyInput<TOp>[] = [];\n let appSpace: SpaceApplyInput<TOp> | undefined;\n for (const input of inputs) {\n if (input.spaceId === APP_SPACE_ID) {\n appSpace = input;\n } else {\n extensions.push(input);\n }\n }\n\n extensions.sort((a, b) => {\n if (a.spaceId < b.spaceId) return -1;\n if (a.spaceId > b.spaceId) return 1;\n return 0;\n });\n\n return appSpace ? [...extensions, appSpace] : extensions;\n}\n","/**\n * Inputs for {@link detectSpaceContractDrift}.\n *\n * Both hashes are produced by the caller (the SQL-family wiring at the\n * consumption site) using the canonical contract hashing pipeline.\n * Keeping the helper pure lets `migration-tools` stay framework-neutral\n * — the SQL family already speaks `Contract<SqlStorage>`, the Mongo\n * family speaks its own contract type, and both reduce to a hash string\n * before drift detection runs.\n *\n * `pinnedHash` is `null` when no pinned `contract.json` exists yet for\n * the space (the descriptor declares an extension that has never been\n * emitted into the user's repo). That's the \"first emit\" case — no\n * drift to surface; the migrate emit will create the pinned files.\n *\n * @see specs/framework-mechanism.spec.md § 3 — Drift detection (T1.9).\n */\nexport interface DetectSpaceContractDriftInputs {\n readonly descriptorHash: string;\n readonly pinnedHash: string | null;\n}\n\n/**\n * Result discriminant for {@link detectSpaceContractDrift}.\n *\n * - `noDrift`: descriptor hash and pinned hash agree byte-for-byte.\n * The migrate emit can proceed with no warning.\n * - `firstEmit`: no pinned `contract.json` on disk yet. The extension\n * was just added to `extensionPacks`; this run will create the\n * pinned files. No warning either — the user's intent is to install\n * the extension, not to \"drift\" from a state they haven't pinned.\n * - `drift`: descriptor hash differs from pinned hash. The caller\n * surfaces a non-fatal warning naming the extension and the\n * diff direction (descriptor → pinned). The migrate emit proceeds\n * normally so the bump is materialised this run; the warning just\n * confirms the bump is being captured.\n *\n * `spaceId`, `descriptorHash`, and `pinnedHash` are threaded through\n * verbatim so the caller (logger / TerminalUI / strict-mode envelope)\n * has everything it needs to format the warning message without\n * re-reading the descriptor or the pinned file.\n */\nexport type SpaceContractDriftResult = {\n readonly kind: 'noDrift' | 'firstEmit' | 'drift';\n readonly spaceId: string;\n readonly descriptorHash: string;\n readonly pinnedHash: string | null;\n};\n\n/**\n * Pure drift-detection primitive for a single contract space.\n *\n * Runs once per loaded extension space, just before computing the\n * `priorContract` that feeds {@link import('./plan-all-spaces').planAllSpaces}.\n * Hash equality is byte-for-byte (no normalisation) — both sides are\n * already canonical hashes produced by the same pipeline, so any\n * difference is meaningful drift.\n *\n * Synchronous, pure, no I/O. The caller (SQL family in M2 R1) reads\n * the pinned `contract.json` and computes its hash, then invokes this\n * helper alongside the descriptor's `headRef.hash`. Composes naturally\n * with {@link import('./read-pinned-contract-hash').readPinnedContractHash}\n * which provides the read-side primitive.\n *\n * @see specs/framework-mechanism.spec.md § 3 — Drift detection (T1.9).\n * @see specs/framework-mechanism.spec.md AM7 — drift warning surfaces\n * the extension name and the diff direction.\n */\nexport function detectSpaceContractDrift(\n spaceId: string,\n inputs: DetectSpaceContractDriftInputs,\n): SpaceContractDriftResult {\n if (inputs.pinnedHash === null) {\n return {\n kind: 'firstEmit',\n spaceId,\n descriptorHash: inputs.descriptorHash,\n pinnedHash: null,\n };\n }\n if (inputs.descriptorHash === inputs.pinnedHash) {\n return {\n kind: 'noDrift',\n spaceId,\n descriptorHash: inputs.descriptorHash,\n pinnedHash: inputs.pinnedHash,\n };\n }\n return {\n kind: 'drift',\n spaceId,\n descriptorHash: inputs.descriptorHash,\n pinnedHash: inputs.pinnedHash,\n };\n}\n","import { mkdir, writeFile } from 'node:fs/promises';\nimport { join } from 'pathe';\nimport { canonicalizeJson } from './canonicalize-json';\nimport { errorPinnedArtefactsAppSpace } from './errors';\nimport { APP_SPACE_ID, assertValidSpaceId } from './space-layout';\n\n/**\n * Pinned head reference for a contract space — `(hash, invariants)`.\n * Mirrors {@link import('./refs').RefEntry} but is redeclared locally so\n * callers can construct the input without depending on the refs module.\n */\nexport interface PinnedSpaceHeadRef {\n readonly hash: string;\n readonly invariants: readonly string[];\n}\n\n/**\n * Inputs for {@link emitPinnedSpaceArtefacts}.\n *\n * - `contract` is the canonical contract value the framework just emitted\n * for the space; it is serialised through {@link canonicalizeJson}, so\n * it must be a JSON-compatible value (objects / arrays / primitives).\n * Typed as `unknown` rather than the SQL-family `Contract<SqlStorage>`\n * to keep `migration-tools` framework-neutral; SQL-family callers pass\n * their typed value through unchanged.\n *\n * - `contractDts` is the pre-rendered `.d.ts` text. Rendering happens in\n * the SQL family (which owns the codec / typemap input the renderer\n * needs), so this helper accepts the text verbatim and writes it out\n * without further transformation.\n *\n * - `headRef` is the pinned head reference for the space.\n * `invariants` are sorted alphabetically before serialisation so two\n * callers passing the same set in different orders produce\n * byte-identical `refs/head.json`.\n */\nexport interface PinnedSpaceArtefactInputs {\n readonly contract: unknown;\n readonly contractDts: string;\n readonly headRef: PinnedSpaceHeadRef;\n}\n\n/**\n * Emit the pinned per-space artefacts (`contract.json`, `contract.d.ts`,\n * `refs/head.json`) under `<projectMigrationsDir>/<spaceId>/`.\n *\n * Always-overwrite: the framework owns these files; running `migrate`\n * twice with the same inputs is a no-op observably (idempotent), but the\n * helper does not check pre-existing contents — re-emit always wins.\n *\n * Path layout matches the convention in\n * [`spaceMigrationDirectory`](./space-layout.ts), with two restrictions\n * specific to pinned artefacts:\n *\n * - Rejects the app space (`spaceId === APP_SPACE_ID`): the app space's\n * canonical `contract.json` lives at the project root, not under\n * `migrations/`. Callers that want to emit it use the app-space\n * contract emit pipeline.\n * - Validates `spaceId` against `[a-z][a-z0-9_-]{0,63}` via\n * {@link assertValidSpaceId} for the same filesystem-safety reasons.\n *\n * The migrations directory and space subdirectory are created if they\n * do not yet exist (`mkdir { recursive: true }`).\n *\n * @see specs/framework-mechanism.spec.md § 3 — Pinned artefact emission (T1.8).\n */\nexport async function emitPinnedSpaceArtefacts(\n projectMigrationsDir: string,\n spaceId: string,\n inputs: PinnedSpaceArtefactInputs,\n): Promise<void> {\n if (spaceId === APP_SPACE_ID) {\n throw errorPinnedArtefactsAppSpace();\n }\n assertValidSpaceId(spaceId);\n\n const dir = join(projectMigrationsDir, spaceId);\n await mkdir(join(dir, 'refs'), { recursive: true });\n\n await writeFile(join(dir, 'contract.json'), `${canonicalizeJson(inputs.contract)}\\n`);\n await writeFile(join(dir, 'contract.d.ts'), inputs.contractDts);\n\n const sortedInvariants = [...inputs.headRef.invariants].sort();\n const headJson = canonicalizeJson({\n hash: inputs.headRef.hash,\n invariants: sortedInvariants,\n });\n await writeFile(join(dir, 'refs', 'head.json'), `${headJson}\\n`);\n}\n","import { errorDuplicateSpaceId } from './errors';\n\n/**\n * Per-space input for {@link planAllSpaces}. One entry per loaded\n * contract space (the application's `'app'` plus each extension that\n * exposes a `contractSpace`).\n *\n * - `priorContract` is `null` for a space that has never been emitted\n * (no `migrations/<space-id>/contract.json` on disk yet); otherwise it\n * is the canonical contract value pinned for that space.\n * - `newContract` is the canonical contract value the planner is about\n * to emit for that space — for app-space, the just-emitted root\n * `contract.json`; for an extension space, the descriptor's\n * `contractSpace.contractJson`.\n *\n * @see specs/framework-mechanism.spec.md § 3.\n */\nexport interface SpacePlanInput<TContract> {\n readonly spaceId: string;\n readonly priorContract: TContract | null;\n readonly newContract: TContract;\n}\n\nexport interface SpacePlanOutput<TPackage> {\n readonly spaceId: string;\n readonly migrationPackages: readonly TPackage[];\n}\n\n/**\n * Iterate the per-space planner across a set of loaded contract spaces\n * and return a deterministic shape regardless of declaration order.\n *\n * Behaviour:\n *\n * - The output is sorted alphabetically by `spaceId` (AM3). Two callers\n * passing the same set of inputs in different orders observe\n * byte-identical outputs.\n * - The per-space planner (`planSpace`) is called exactly once per\n * input, in alphabetical-by-spaceId order. Its return value is\n * attached to the corresponding output entry verbatim.\n * - Duplicate `spaceId`s in the input array throw\n * `MIGRATION.DUPLICATE_SPACE_ID` before any `planSpace` call runs,\n * keeping the planner pure when the input is malformed.\n *\n * The signature is generic over `TContract` and `TPackage` because the\n * shape is framework-neutral (SQL family today, Mongo family\n * eventually). Callers wire in whatever contract value and migration\n * package shape their family already speaks.\n *\n * Synchronous: the underlying per-space planner (target's\n * `MigrationPlanner.plan(...)`) is synchronous; callers that need to\n * resolve async I/O (e.g. reading pinned `contract.json` from disk)\n * resolve it before calling `planAllSpaces` and pass the materialised\n * inputs through.\n *\n * @see specs/framework-mechanism.spec.md § 3 — Per-space planner (T1.3).\n */\nexport function planAllSpaces<TContract, TPackage>(\n inputs: readonly SpacePlanInput<TContract>[],\n planSpace: (input: SpacePlanInput<TContract>) => readonly TPackage[],\n): readonly SpacePlanOutput<TPackage>[] {\n const seen = new Set<string>();\n for (const input of inputs) {\n if (seen.has(input.spaceId)) {\n throw errorDuplicateSpaceId(input.spaceId);\n }\n seen.add(input.spaceId);\n }\n\n const sorted = [...inputs].sort((a, b) => {\n if (a.spaceId < b.spaceId) return -1;\n if (a.spaceId > b.spaceId) return 1;\n return 0;\n });\n\n return sorted.map((input) => ({\n spaceId: input.spaceId,\n migrationPackages: planSpace(input),\n }));\n}\n","import { readFile } from 'node:fs/promises';\nimport { join } from 'pathe';\nimport { errorInvalidJson, errorInvalidRefFile, errorPinnedArtefactsAppSpace } from './errors';\nimport { APP_SPACE_ID, assertValidSpaceId } from './space-layout';\n\nfunction hasErrnoCode(error: unknown, code: string): boolean {\n return error instanceof Error && (error as { code?: string }).code === code;\n}\n\n/**\n * Read the pinned head hash for an extension space.\n *\n * Returns the `hash` field of `<projectMigrationsDir>/<spaceId>/refs/head.json`\n * — i.e. the canonical contract hash the framework wrote on the last\n * `migrate` for this space. Returns `null` when the file does not exist\n * (or the migrations directory is missing entirely), which is the\n * \"first emit\" signal {@link import('./detect-space-contract-drift').detectSpaceContractDrift}\n * uses to distinguish a brand-new extension from drift.\n *\n * Pure I/O (read + parse). The \"comparison hash\" is stored on disk by\n * {@link import('./emit-pinned-space-artefacts').emitPinnedSpaceArtefacts}\n * via the descriptor's `headRef.hash`, so reading it back here matches\n * the descriptor's hashing pipeline by construction — neither side\n * recomputes anything.\n *\n * Validation:\n *\n * - Rejects the app space — pinned head refs are an extension-space\n * concept; the app space's contract-of-record lives at the project\n * root, not under `migrations/`.\n * - Validates the space id against the same `[a-z][a-z0-9_-]{0,63}`\n * pattern as the rest of the per-space helpers.\n * - Surfaces `MIGRATION.INVALID_JSON` / `MIGRATION.INVALID_REF_FILE`\n * on a corrupt `refs/head.json` so callers can distinguish \"no\n * pinned file\" (returns `null`) from \"pinned file but unreadable\"\n * (throws).\n *\n * @see specs/framework-mechanism.spec.md § 3 — Drift detection (T1.9).\n */\nexport async function readPinnedContractHash(\n projectMigrationsDir: string,\n spaceId: string,\n): Promise<string | null> {\n if (spaceId === APP_SPACE_ID) {\n throw errorPinnedArtefactsAppSpace();\n }\n assertValidSpaceId(spaceId);\n\n const filePath = join(projectMigrationsDir, spaceId, 'refs', 'head.json');\n\n let raw: string;\n try {\n raw = await readFile(filePath, 'utf-8');\n } catch (error) {\n if (hasErrnoCode(error, 'ENOENT')) {\n return null;\n }\n throw error;\n }\n\n let parsed: unknown;\n try {\n parsed = JSON.parse(raw);\n } catch (e) {\n throw errorInvalidJson(filePath, e instanceof Error ? e.message : String(e));\n }\n\n if (\n typeof parsed !== 'object' ||\n parsed === null ||\n typeof (parsed as { hash?: unknown }).hash !== 'string'\n ) {\n throw errorInvalidRefFile(filePath, 'expected an object with a string `hash` field');\n }\n\n return (parsed as { hash: string }).hash;\n}\n","import { readdir, stat } from 'node:fs/promises';\nimport { join } from 'pathe';\nimport { MANIFEST_FILE } from './io';\nimport { APP_SPACE_ID } from './space-layout';\n\nfunction hasErrnoCode(error: unknown, code: string): boolean {\n return error instanceof Error && (error as { code?: string }).code === code;\n}\n\n/**\n * List the per-space pinned subdirectories under\n * `<projectRoot>/migrations/`. Returns space-id directory names (sorted\n * alphabetically) — i.e. any non-dot-prefixed subdirectory whose root\n * does **not** contain a `migration.json` manifest. The manifest is the\n * structural marker of a user-authored migration directory (see\n * `readMigrationsDir` in `./io`); directory names themselves belong to\n * the user and are not part of the contract.\n *\n * Returns `[]` if the migrations directory does not exist (greenfield\n * project).\n *\n * Reads only the user's repo. **No descriptor import.** The caller\n * (verifier) feeds the result into {@link verifyContractSpaces} alongside\n * the loaded-space set and the marker rows.\n *\n * @see specs/framework-mechanism.spec.md § 4 — Verifier (steps 5–6).\n */\nexport async function listPinnedSpaceDirectories(\n projectMigrationsDir: string,\n): Promise<readonly string[]> {\n let entries: { readonly name: string; readonly isDirectory: boolean }[];\n try {\n const dirents = await readdir(projectMigrationsDir, { withFileTypes: true });\n entries = dirents.map((d) => ({ name: d.name, isDirectory: d.isDirectory() }));\n } catch (error) {\n if (hasErrnoCode(error, 'ENOENT')) {\n return [];\n }\n throw error;\n }\n\n const namedCandidates = entries\n .filter((e) => e.isDirectory)\n .map((e) => e.name)\n .filter((name) => !name.startsWith('.'))\n .sort();\n\n const manifestChecks = await Promise.all(\n namedCandidates.map(async (name) => {\n try {\n await stat(join(projectMigrationsDir, name, MANIFEST_FILE));\n return { name, isMigrationDir: true };\n } catch (error) {\n if (hasErrnoCode(error, 'ENOENT')) {\n return { name, isMigrationDir: false };\n }\n throw error;\n }\n }),\n );\n\n return manifestChecks.filter((c) => !c.isMigrationDir).map((c) => c.name);\n}\n\n/**\n * Pinned head value (`(hash, invariants)`) for one contract space.\n * The verifier compares this against the marker row for the same space\n * to detect drift between the user-emitted artefacts and the live DB\n * marker.\n */\nexport interface SpacePinnedHashRecord {\n readonly hash: string;\n readonly invariants: readonly string[];\n}\n\n/**\n * Marker row read from `prisma_contract.marker` (one per `space`).\n * Caller resolves these via the family runtime's marker reader (T1.1)\n * before invoking {@link verifyContractSpaces}.\n */\nexport interface SpaceMarkerRecord {\n readonly hash: string;\n readonly invariants: readonly string[];\n}\n\nexport interface VerifyContractSpacesInputs {\n /**\n * Set of contract spaces the project declares: `'app'` plus each\n * extension space in `extensionPacks`. The caller's discovery path\n * never reads the extension descriptor module — it walks the\n * `extensionPacks` configuration in `prisma-next.config.ts` for the\n * space ids.\n */\n readonly loadedSpaces: ReadonlySet<string>;\n\n /**\n * Pinned per-space subdirectories observed under\n * `<projectRoot>/migrations/`. Resolved via\n * {@link listPinnedSpaceDirectories}.\n */\n readonly pinnedDirsOnDisk: readonly string[];\n\n /**\n * Pinned head ref per space, keyed by space id. Caller reads\n * `<projectRoot>/migrations/<space-id>/contract.json` and\n * `refs/head.json` (or, for app-space if its pinned shape ever moves\n * under `migrations/`, the equivalent files) to construct this map.\n * Spaces with no pinned dir on disk simply omit a map entry.\n */\n readonly pinnedHashesBySpace: ReadonlyMap<string, SpacePinnedHashRecord>;\n\n /**\n * Marker rows keyed by `space`. Caller reads them from the\n * `prisma_contract.marker` table.\n */\n readonly markerRowsBySpace: ReadonlyMap<string, SpaceMarkerRecord>;\n}\n\nexport type SpaceVerifierViolation =\n | {\n readonly kind: 'declaredButUnmigrated';\n readonly spaceId: string;\n readonly remediation: string;\n }\n | {\n readonly kind: 'orphanMarker';\n readonly spaceId: string;\n readonly remediation: string;\n }\n | {\n readonly kind: 'orphanPinnedDir';\n readonly spaceId: string;\n readonly remediation: string;\n }\n | {\n readonly kind: 'hashMismatch';\n readonly spaceId: string;\n readonly pinnedHash: string;\n readonly markerHash: string;\n readonly remediation: string;\n }\n | {\n readonly kind: 'invariantsMismatch';\n readonly spaceId: string;\n readonly pinnedInvariants: readonly string[];\n readonly markerInvariants: readonly string[];\n readonly remediation: string;\n };\n\nexport type VerifyContractSpacesResult =\n | { readonly ok: true }\n | { readonly ok: false; readonly violations: readonly SpaceVerifierViolation[] };\n\n/**\n * Pure structural verifier for the per-space mechanism. Aggregates the\n * three orphan / missing checks (FR6 cases a–c) plus per-space hash and\n * invariant comparison.\n *\n * Algorithm (sub-spec § 4):\n *\n * - For every extension space declared in `loadedSpaces` (`'app'`\n * excluded — its pinned `contract.json` lives at the project root):\n * - If no pinned dir on disk → `declaredButUnmigrated`.\n * - Else if `markerRowsBySpace` lacks an entry → no violation here;\n * the live-DB compare in step 8 (out of scope of this helper) is\n * where the absence shows up.\n * - Else compare marker hash / invariants vs. pinned hash /\n * invariants → `hashMismatch` / `invariantsMismatch` on drift.\n * - For every pinned dir on disk that is not in `loadedSpaces` →\n * `orphanPinnedDir`.\n * - For every marker row whose `space` is not in `loadedSpaces` →\n * `orphanMarker`. The app-space marker is always loaded (`'app'` is\n * in `loadedSpaces` by definition).\n *\n * Output is deterministic (NFR6): violations are sorted first by `kind`\n * (`declaredButUnmigrated` → `orphanMarker` → `orphanPinnedDir` →\n * `hashMismatch` → `invariantsMismatch`) then by `spaceId`. Two callers\n * passing equivalent inputs see byte-identical violation lists.\n *\n * Synchronous, pure, no I/O. **Does not import the extension descriptor**\n * (the inputs are pre-resolved by the caller). This is the property\n * AC-15 / AC-26 (\"verifier reads only the user repo, not\n * `node_modules`\") locks in.\n *\n * @see specs/framework-mechanism.spec.md § 4 — Verifier (T1.5).\n */\nexport function verifyContractSpaces(\n inputs: VerifyContractSpacesInputs,\n): VerifyContractSpacesResult {\n const violations: SpaceVerifierViolation[] = [];\n\n for (const spaceId of [...inputs.loadedSpaces].sort()) {\n if (spaceId === APP_SPACE_ID) continue;\n\n if (!inputs.pinnedDirsOnDisk.includes(spaceId)) {\n violations.push({\n kind: 'declaredButUnmigrated',\n spaceId,\n remediation: `Extension '${spaceId}' is declared in extensionPacks but has not been emitted; run \\`prisma-next migrate\\`.`,\n });\n continue;\n }\n\n const pinned = inputs.pinnedHashesBySpace.get(spaceId);\n const marker = inputs.markerRowsBySpace.get(spaceId);\n if (!pinned || !marker) {\n continue;\n }\n\n if (pinned.hash !== marker.hash) {\n violations.push({\n kind: 'hashMismatch',\n spaceId,\n pinnedHash: pinned.hash,\n markerHash: marker.hash,\n remediation: `Marker row for space '${spaceId}' is keyed at ${marker.hash}, but the pinned ${join('migrations', spaceId, 'contract.json')} resolves to ${pinned.hash}. Run \\`prisma-next db update\\` to advance the database, or \\`prisma-next migrate\\` if the descriptor was bumped without re-emitting.`,\n });\n continue;\n }\n\n const pinnedInvariants = [...pinned.invariants].sort();\n const markerInvariants = new Set(marker.invariants);\n const missing = pinnedInvariants.filter((id) => !markerInvariants.has(id));\n if (missing.length > 0) {\n violations.push({\n kind: 'invariantsMismatch',\n spaceId,\n pinnedInvariants,\n markerInvariants: [...marker.invariants].sort(),\n remediation: `Marker row for space '${spaceId}' is missing invariants [${missing.map((s) => JSON.stringify(s)).join(', ')}]. Run \\`prisma-next db update\\` to apply the corresponding data-transform migrations.`,\n });\n }\n }\n\n for (const dir of [...inputs.pinnedDirsOnDisk].sort()) {\n if (!inputs.loadedSpaces.has(dir)) {\n violations.push({\n kind: 'orphanPinnedDir',\n spaceId: dir,\n remediation: `Orphan pinned directory \\`${join('migrations', dir)}/\\` for an extension not in extensionPacks; remove the directory or re-add the extension.`,\n });\n }\n }\n\n for (const space of [...inputs.markerRowsBySpace.keys()].sort()) {\n if (!inputs.loadedSpaces.has(space)) {\n violations.push({\n kind: 'orphanMarker',\n spaceId: space,\n remediation: `Orphan marker row for space '${space}' (no longer in extensionPacks); remediation: manually delete the row from \\`prisma_contract.marker\\`.`,\n });\n }\n }\n\n if (violations.length === 0) {\n return { ok: true };\n }\n\n const kindOrder: Record<SpaceVerifierViolation['kind'], number> = {\n declaredButUnmigrated: 0,\n orphanMarker: 1,\n orphanPinnedDir: 2,\n hashMismatch: 3,\n invariantsMismatch: 4,\n };\n\n violations.sort((a, b) => {\n const k = kindOrder[a.kind] - kindOrder[b.kind];\n if (k !== 0) return k;\n if (a.spaceId < b.spaceId) return -1;\n if (a.spaceId > b.spaceId) return 1;\n return 0;\n });\n\n return { ok: false, violations };\n}\n"],"mappings":";;;;;;;;;;;;;;;;AAsBA,MAAM,mBAAmB;AAEzB,SAAgB,eAAe,SAA0C;AACvE,QAAO,iBAAiB,KAAK,QAAQ;;AAGvC,SAAgB,mBAAmB,SAAkD;AACnF,KAAI,CAAC,eAAe,QAAQ,CAC1B,OAAM,oBAAoB,QAAQ;;;;;;;;;;;;;;;;AAkBtC,SAAgB,wBAAwB,sBAA8B,SAAyB;AAC7F,KAAI,YAAY,aACd,QAAO;AAET,oBAAmB,QAAQ;AAC3B,QAAO,KAAK,sBAAsB,QAAQ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACQ5C,SAAgB,4BACd,QACiC;CACjC,MAAM,uBAAO,IAAI,KAAa;AAC9B,MAAK,MAAM,SAAS,QAAQ;AAC1B,MAAI,KAAK,IAAI,MAAM,QAAQ,CACzB,OAAM,sBAAsB,MAAM,QAAQ;AAE5C,OAAK,IAAI,MAAM,QAAQ;;CAGzB,MAAMA,aAAqC,EAAE;CAC7C,IAAIC;AACJ,MAAK,MAAM,SAAS,OAClB,KAAI,MAAM,YAAY,aACpB,YAAW;KAEX,YAAW,KAAK,MAAM;AAI1B,YAAW,MAAM,GAAG,MAAM;AACxB,MAAI,EAAE,UAAU,EAAE,QAAS,QAAO;AAClC,MAAI,EAAE,UAAU,EAAE,QAAS,QAAO;AAClC,SAAO;GACP;AAEF,QAAO,WAAW,CAAC,GAAG,YAAY,SAAS,GAAG;;;;;;;;;;;;;;;;;;;;;;;;ACpBhD,SAAgB,yBACd,SACA,QAC0B;AAC1B,KAAI,OAAO,eAAe,KACxB,QAAO;EACL,MAAM;EACN;EACA,gBAAgB,OAAO;EACvB,YAAY;EACb;AAEH,KAAI,OAAO,mBAAmB,OAAO,WACnC,QAAO;EACL,MAAM;EACN;EACA,gBAAgB,OAAO;EACvB,YAAY,OAAO;EACpB;AAEH,QAAO;EACL,MAAM;EACN;EACA,gBAAgB,OAAO;EACvB,YAAY,OAAO;EACpB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC3BH,eAAsB,yBACpB,sBACA,SACA,QACe;AACf,KAAI,YAAY,aACd,OAAM,8BAA8B;AAEtC,oBAAmB,QAAQ;CAE3B,MAAM,MAAM,KAAK,sBAAsB,QAAQ;AAC/C,OAAM,MAAM,KAAK,KAAK,OAAO,EAAE,EAAE,WAAW,MAAM,CAAC;AAEnD,OAAM,UAAU,KAAK,KAAK,gBAAgB,EAAE,GAAG,iBAAiB,OAAO,SAAS,CAAC,IAAI;AACrF,OAAM,UAAU,KAAK,KAAK,gBAAgB,EAAE,OAAO,YAAY;CAE/D,MAAM,mBAAmB,CAAC,GAAG,OAAO,QAAQ,WAAW,CAAC,MAAM;CAC9D,MAAM,WAAW,iBAAiB;EAChC,MAAM,OAAO,QAAQ;EACrB,YAAY;EACb,CAAC;AACF,OAAM,UAAU,KAAK,KAAK,QAAQ,YAAY,EAAE,GAAG,SAAS,IAAI;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC9BlE,SAAgB,cACd,QACA,WACsC;CACtC,MAAM,uBAAO,IAAI,KAAa;AAC9B,MAAK,MAAM,SAAS,QAAQ;AAC1B,MAAI,KAAK,IAAI,MAAM,QAAQ,CACzB,OAAM,sBAAsB,MAAM,QAAQ;AAE5C,OAAK,IAAI,MAAM,QAAQ;;AASzB,QANe,CAAC,GAAG,OAAO,CAAC,MAAM,GAAG,MAAM;AACxC,MAAI,EAAE,UAAU,EAAE,QAAS,QAAO;AAClC,MAAI,EAAE,UAAU,EAAE,QAAS,QAAO;AAClC,SAAO;GACP,CAEY,KAAK,WAAW;EAC5B,SAAS,MAAM;EACf,mBAAmB,UAAU,MAAM;EACpC,EAAE;;;;;ACzEL,SAASC,eAAa,OAAgB,MAAuB;AAC3D,QAAO,iBAAiB,SAAU,MAA4B,SAAS;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiCzE,eAAsB,uBACpB,sBACA,SACwB;AACxB,KAAI,YAAY,aACd,OAAM,8BAA8B;AAEtC,oBAAmB,QAAQ;CAE3B,MAAM,WAAW,KAAK,sBAAsB,SAAS,QAAQ,YAAY;CAEzE,IAAIC;AACJ,KAAI;AACF,QAAM,MAAM,SAAS,UAAU,QAAQ;UAChC,OAAO;AACd,MAAID,eAAa,OAAO,SAAS,CAC/B,QAAO;AAET,QAAM;;CAGR,IAAIE;AACJ,KAAI;AACF,WAAS,KAAK,MAAM,IAAI;UACjB,GAAG;AACV,QAAM,iBAAiB,UAAU,aAAa,QAAQ,EAAE,UAAU,OAAO,EAAE,CAAC;;AAG9E,KACE,OAAO,WAAW,YAClB,WAAW,QACX,OAAQ,OAA8B,SAAS,SAE/C,OAAM,oBAAoB,UAAU,gDAAgD;AAGtF,QAAQ,OAA4B;;;;;ACtEtC,SAAS,aAAa,OAAgB,MAAuB;AAC3D,QAAO,iBAAiB,SAAU,MAA4B,SAAS;;;;;;;;;;;;;;;;;;;;AAqBzE,eAAsB,2BACpB,sBAC4B;CAC5B,IAAIC;AACJ,KAAI;AAEF,aADgB,MAAM,QAAQ,sBAAsB,EAAE,eAAe,MAAM,CAAC,EAC1D,KAAK,OAAO;GAAE,MAAM,EAAE;GAAM,aAAa,EAAE,aAAa;GAAE,EAAE;UACvE,OAAO;AACd,MAAI,aAAa,OAAO,SAAS,CAC/B,QAAO,EAAE;AAEX,QAAM;;CAGR,MAAM,kBAAkB,QACrB,QAAQ,MAAM,EAAE,YAAY,CAC5B,KAAK,MAAM,EAAE,KAAK,CAClB,QAAQ,SAAS,CAAC,KAAK,WAAW,IAAI,CAAC,CACvC,MAAM;AAgBT,SAduB,MAAM,QAAQ,IACnC,gBAAgB,IAAI,OAAO,SAAS;AAClC,MAAI;AACF,SAAM,KAAK,KAAK,sBAAsB,MAAM,cAAc,CAAC;AAC3D,UAAO;IAAE;IAAM,gBAAgB;IAAM;WAC9B,OAAO;AACd,OAAI,aAAa,OAAO,SAAS,CAC/B,QAAO;IAAE;IAAM,gBAAgB;IAAO;AAExC,SAAM;;GAER,CACH,EAEqB,QAAQ,MAAM,CAAC,EAAE,eAAe,CAAC,KAAK,MAAM,EAAE,KAAK;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA6H3E,SAAgB,qBACd,QAC4B;CAC5B,MAAMC,aAAuC,EAAE;AAE/C,MAAK,MAAM,WAAW,CAAC,GAAG,OAAO,aAAa,CAAC,MAAM,EAAE;AACrD,MAAI,YAAY,aAAc;AAE9B,MAAI,CAAC,OAAO,iBAAiB,SAAS,QAAQ,EAAE;AAC9C,cAAW,KAAK;IACd,MAAM;IACN;IACA,aAAa,cAAc,QAAQ;IACpC,CAAC;AACF;;EAGF,MAAM,SAAS,OAAO,oBAAoB,IAAI,QAAQ;EACtD,MAAM,SAAS,OAAO,kBAAkB,IAAI,QAAQ;AACpD,MAAI,CAAC,UAAU,CAAC,OACd;AAGF,MAAI,OAAO,SAAS,OAAO,MAAM;AAC/B,cAAW,KAAK;IACd,MAAM;IACN;IACA,YAAY,OAAO;IACnB,YAAY,OAAO;IACnB,aAAa,yBAAyB,QAAQ,gBAAgB,OAAO,KAAK,mBAAmB,KAAK,cAAc,SAAS,gBAAgB,CAAC,eAAe,OAAO,KAAK;IACtK,CAAC;AACF;;EAGF,MAAM,mBAAmB,CAAC,GAAG,OAAO,WAAW,CAAC,MAAM;EACtD,MAAM,mBAAmB,IAAI,IAAI,OAAO,WAAW;EACnD,MAAM,UAAU,iBAAiB,QAAQ,OAAO,CAAC,iBAAiB,IAAI,GAAG,CAAC;AAC1E,MAAI,QAAQ,SAAS,EACnB,YAAW,KAAK;GACd,MAAM;GACN;GACA;GACA,kBAAkB,CAAC,GAAG,OAAO,WAAW,CAAC,MAAM;GAC/C,aAAa,yBAAyB,QAAQ,2BAA2B,QAAQ,KAAK,MAAM,KAAK,UAAU,EAAE,CAAC,CAAC,KAAK,KAAK,CAAC;GAC3H,CAAC;;AAIN,MAAK,MAAM,OAAO,CAAC,GAAG,OAAO,iBAAiB,CAAC,MAAM,CACnD,KAAI,CAAC,OAAO,aAAa,IAAI,IAAI,CAC/B,YAAW,KAAK;EACd,MAAM;EACN,SAAS;EACT,aAAa,6BAA6B,KAAK,cAAc,IAAI,CAAC;EACnE,CAAC;AAIN,MAAK,MAAM,SAAS,CAAC,GAAG,OAAO,kBAAkB,MAAM,CAAC,CAAC,MAAM,CAC7D,KAAI,CAAC,OAAO,aAAa,IAAI,MAAM,CACjC,YAAW,KAAK;EACd,MAAM;EACN,SAAS;EACT,aAAa,gCAAgC,MAAM;EACpD,CAAC;AAIN,KAAI,WAAW,WAAW,EACxB,QAAO,EAAE,IAAI,MAAM;CAGrB,MAAMC,YAA4D;EAChE,uBAAuB;EACvB,cAAc;EACd,iBAAiB;EACjB,cAAc;EACd,oBAAoB;EACrB;AAED,YAAW,MAAM,GAAG,MAAM;EACxB,MAAM,IAAI,UAAU,EAAE,QAAQ,UAAU,EAAE;AAC1C,MAAI,MAAM,EAAG,QAAO;AACpB,MAAI,EAAE,UAAU,EAAE,QAAS,QAAO;AAClC,MAAI,EAAE,UAAU,EAAE,QAAS,QAAO;AAClC,SAAO;GACP;AAEF,QAAO;EAAE,IAAI;EAAO;EAAY"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"graph-
|
|
1
|
+
{"version":3,"file":"graph-Czaj8O2q.d.mts","names":[],"sources":["../src/graph.ts"],"sourcesContent":[],"mappings":";;AAIA;AAeA;;AAEsD,UAjBrC,aAAA,CAiBqC;EAA7B,SAAA,IAAA,EAAA,MAAA;EAC6B,SAAA,EAAA,EAAA,MAAA;EAA7B,SAAA,aAAA,EAAA,MAAA;EACuB,SAAA,OAAA,EAAA,MAAA;EAApB,SAAA,SAAA,EAAA,MAAA;EAAW,SAAA,MAAA,EAAA,SAAA,MAAA,EAAA;;;;;;;;UAJtB,cAAA;kBACC;yBACO,6BAA6B;yBAC7B,6BAA6B;4BAC1B,oBAAoB"}
|
|
@@ -72,5 +72,5 @@ function verifyMigrationHash(pkg) {
|
|
|
72
72
|
}
|
|
73
73
|
|
|
74
74
|
//#endregion
|
|
75
|
-
export { verifyMigrationHash as n, computeMigrationHash as t };
|
|
76
|
-
//# sourceMappingURL=hash-
|
|
75
|
+
export { verifyMigrationHash as n, canonicalizeJson as r, computeMigrationHash as t };
|
|
76
|
+
//# sourceMappingURL=hash-G0bAfIGh.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"hash-G0bAfIGh.mjs","names":["sorted: Record<string, unknown>"],"sources":["../src/canonicalize-json.ts","../src/hash.ts"],"sourcesContent":["function sortKeys(value: unknown): unknown {\n if (value === null || typeof value !== 'object') {\n return value;\n }\n if (Array.isArray(value)) {\n return value.map(sortKeys);\n }\n const sorted: Record<string, unknown> = {};\n for (const key of Object.keys(value).sort()) {\n sorted[key] = sortKeys((value as Record<string, unknown>)[key]);\n }\n return sorted;\n}\n\nexport function canonicalizeJson(value: unknown): string {\n return JSON.stringify(sortKeys(value));\n}\n","import { createHash } from 'node:crypto';\nimport { canonicalizeJson } from './canonicalize-json';\nimport type { MigrationMetadata } from './metadata';\nimport type { MigrationOps, OnDiskMigrationPackage } from './package';\n\nexport interface VerifyResult {\n readonly ok: boolean;\n readonly reason?: 'mismatch';\n readonly storedHash: string;\n readonly computedHash: string;\n}\n\nfunction sha256Hex(input: string): string {\n return createHash('sha256').update(input).digest('hex');\n}\n\n/**\n * Content-addressed migration hash over (metadata envelope sans\n * contracts/hints/signature, ops). See ADR 199 — Storage-only migration\n * identity for the rationale: contracts are anchored separately by the\n * storage-hash bookends inside the envelope; planner hints are advisory\n * and must not affect identity.\n *\n * The integrity check is purely structural, not semantic. The function\n * canonicalizes its inputs via `sortKeys` (recursive) + `JSON.stringify`\n * and hashes the result. Target-specific operation payloads (`step.sql`,\n * Mongo's pipeline AST, …) are hashed verbatim — no per-target\n * normalization is required, because what's being verified is \"do the\n * on-disk bytes still produce their recorded hash\", not \"do two\n * semantically-equivalent migrations hash the same\". The latter is an\n * emit-drift concern (ADR 192 step 2).\n *\n * The symmetry across write and read holds because `JSON.parse(\n * JSON.stringify(x))` round-trips JSON-safe values losslessly and\n * `sortKeys` is idempotent and deterministic — write-time and read-time\n * canonicalization produce the same canonical bytes regardless of\n * source-side key ordering or whitespace.\n *\n * The `migrationHash` field on the metadata is stripped before hashing\n * so the function can be used both at write time (when no hash exists\n * yet) and at verify time (rehashing an already-attested record).\n */\nexport function computeMigrationHash(\n metadata: Omit<MigrationMetadata, 'migrationHash'> & { readonly migrationHash?: string },\n ops: MigrationOps,\n): string {\n const {\n migrationHash: _migrationHash,\n signature: _signature,\n fromContract: _fromContract,\n toContract: _toContract,\n hints: _hints,\n ...strippedMeta\n } = metadata;\n\n const canonicalMetadata = canonicalizeJson(strippedMeta);\n const canonicalOps = canonicalizeJson(ops);\n\n const partHashes = [canonicalMetadata, canonicalOps].map(sha256Hex);\n const hash = sha256Hex(canonicalizeJson(partHashes));\n\n return `sha256:${hash}`;\n}\n\n/**\n * Re-hash an in-memory migration package and compare against the stored\n * `migrationHash`. See `computeMigrationHash` for the canonicalization rules.\n *\n * Returns `{ ok: true }` when the package is internally consistent, or\n * `{ ok: false, reason: 'mismatch', storedHash, computedHash }` when it is\n * not — typically a sign of FS corruption, partial writes, or a post-emit\n * hand edit.\n */\nexport function verifyMigrationHash(pkg: OnDiskMigrationPackage): VerifyResult {\n const computed = computeMigrationHash(pkg.metadata, pkg.ops);\n\n if (pkg.metadata.migrationHash === computed) {\n return {\n ok: true,\n storedHash: pkg.metadata.migrationHash,\n computedHash: computed,\n };\n }\n\n return {\n ok: false,\n reason: 'mismatch',\n storedHash: pkg.metadata.migrationHash,\n computedHash: computed,\n };\n}\n"],"mappings":";;;AAAA,SAAS,SAAS,OAAyB;AACzC,KAAI,UAAU,QAAQ,OAAO,UAAU,SACrC,QAAO;AAET,KAAI,MAAM,QAAQ,MAAM,CACtB,QAAO,MAAM,IAAI,SAAS;CAE5B,MAAMA,SAAkC,EAAE;AAC1C,MAAK,MAAM,OAAO,OAAO,KAAK,MAAM,CAAC,MAAM,CACzC,QAAO,OAAO,SAAU,MAAkC,KAAK;AAEjE,QAAO;;AAGT,SAAgB,iBAAiB,OAAwB;AACvD,QAAO,KAAK,UAAU,SAAS,MAAM,CAAC;;;;;ACHxC,SAAS,UAAU,OAAuB;AACxC,QAAO,WAAW,SAAS,CAAC,OAAO,MAAM,CAAC,OAAO,MAAM;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA6BzD,SAAgB,qBACd,UACA,KACQ;CACR,MAAM,EACJ,eAAe,gBACf,WAAW,YACX,cAAc,eACd,YAAY,aACZ,OAAO,QACP,GAAG,iBACD;AAQJ,QAAO,UAFM,UAAU,iBADJ,CAHO,iBAAiB,aAAa,EACnC,iBAAiB,IAAI,CAEU,CAAC,IAAI,UAAU,CAChB,CAAC;;;;;;;;;;;AActD,SAAgB,oBAAoB,KAA2C;CAC7E,MAAM,WAAW,qBAAqB,IAAI,UAAU,IAAI,IAAI;AAE5D,KAAI,IAAI,SAAS,kBAAkB,SACjC,QAAO;EACL,IAAI;EACJ,YAAY,IAAI,SAAS;EACzB,cAAc;EACf;AAGH,QAAO;EACL,IAAI;EACJ,QAAQ;EACR,YAAY,IAAI,SAAS;EACzB,cAAc;EACf"}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { c as errorInvalidInvariantId, i as errorDuplicateInvariantInEdge } from "./errors-DQsXvidG.mjs";
|
|
2
2
|
|
|
3
3
|
//#region src/invariants.ts
|
|
4
4
|
/**
|
|
@@ -39,4 +39,4 @@ function readInvariantId(op) {
|
|
|
39
39
|
|
|
40
40
|
//#endregion
|
|
41
41
|
export { validateInvariantId as n, deriveProvidedInvariants as t };
|
|
42
|
-
//# sourceMappingURL=invariants-
|
|
42
|
+
//# sourceMappingURL=invariants-4Avb_Yhy.mjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"invariants-
|
|
1
|
+
{"version":3,"file":"invariants-4Avb_Yhy.mjs","names":[],"sources":["../src/invariants.ts"],"sourcesContent":["import type { MigrationPlanOperation } from '@prisma-next/framework-components/control';\nimport { errorDuplicateInvariantInEdge, errorInvalidInvariantId } from './errors';\nimport type { MigrationOps } from './package';\n\n/**\n * Hygiene check for `invariantId`. Rejects empty values plus any\n * whitespace or control character (including Unicode whitespace like\n * NBSP and em space, which are visually identical to ASCII space and\n * routinely sneak in via paste).\n */\nexport function validateInvariantId(invariantId: string): boolean {\n if (invariantId.length === 0) return false;\n return !/[\\p{Cc}\\p{White_Space}]/u.test(invariantId);\n}\n\n/**\n * Walk a migration's operations and produce its `providedInvariants`\n * aggregate: the sorted, deduplicated list of `invariantId`s declared\n * by data-transform ops. Ops without `operationClass === 'data'` are\n * skipped; data ops without an `invariantId` are skipped.\n *\n * Throws `MIGRATION.INVALID_INVARIANT_ID` on a malformed id and\n * `MIGRATION.DUPLICATE_INVARIANT_IN_EDGE` on duplicates.\n */\nexport function deriveProvidedInvariants(ops: MigrationOps): readonly string[] {\n const seen = new Set<string>();\n for (const op of ops) {\n const invariantId = readInvariantId(op);\n if (invariantId === undefined) continue;\n if (!validateInvariantId(invariantId)) {\n throw errorInvalidInvariantId(invariantId);\n }\n if (seen.has(invariantId)) {\n throw errorDuplicateInvariantInEdge(invariantId);\n }\n seen.add(invariantId);\n }\n return [...seen].sort();\n}\n\nfunction readInvariantId(op: MigrationPlanOperation): string | undefined {\n if (op.operationClass !== 'data') return undefined;\n const candidate = (op as { invariantId?: unknown }).invariantId;\n return typeof candidate === 'string' ? candidate : undefined;\n}\n"],"mappings":";;;;;;;;;AAUA,SAAgB,oBAAoB,aAA8B;AAChE,KAAI,YAAY,WAAW,EAAG,QAAO;AACrC,QAAO,CAAC,2BAA2B,KAAK,YAAY;;;;;;;;;;;AAYtD,SAAgB,yBAAyB,KAAsC;CAC7E,MAAM,uBAAO,IAAI,KAAa;AAC9B,MAAK,MAAM,MAAM,KAAK;EACpB,MAAM,cAAc,gBAAgB,GAAG;AACvC,MAAI,gBAAgB,OAAW;AAC/B,MAAI,CAAC,oBAAoB,YAAY,CACnC,OAAM,wBAAwB,YAAY;AAE5C,MAAI,KAAK,IAAI,YAAY,CACvB,OAAM,8BAA8B,YAAY;AAElD,OAAK,IAAI,YAAY;;AAEvB,QAAO,CAAC,GAAG,KAAK,CAAC,MAAM;;AAGzB,SAAS,gBAAgB,IAAgD;AACvE,KAAI,GAAG,mBAAmB,OAAQ,QAAO;CACzC,MAAM,YAAa,GAAiC;AACpD,QAAO,OAAO,cAAc,WAAW,YAAY"}
|