@unrdf/project-engine 5.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +53 -0
- package/package.json +58 -0
- package/src/api-contract-validator.mjs +711 -0
- package/src/auto-test-generator.mjs +444 -0
- package/src/autonomic-mapek.mjs +511 -0
- package/src/capabilities-manifest.mjs +125 -0
- package/src/code-complexity-js.mjs +368 -0
- package/src/dependency-graph.mjs +276 -0
- package/src/doc-drift-checker.mjs +172 -0
- package/src/doc-generator.mjs +229 -0
- package/src/domain-infer.mjs +966 -0
- package/src/drift-snapshot.mjs +775 -0
- package/src/file-roles.mjs +94 -0
- package/src/fs-scan.mjs +305 -0
- package/src/gap-finder.mjs +376 -0
- package/src/golden-structure.mjs +149 -0
- package/src/hotspot-analyzer.mjs +412 -0
- package/src/index.mjs +151 -0
- package/src/initialize.mjs +957 -0
- package/src/lens/project-structure.mjs +74 -0
- package/src/mapek-orchestration.mjs +665 -0
- package/src/materialize-apply.mjs +505 -0
- package/src/materialize-plan.mjs +422 -0
- package/src/materialize.mjs +137 -0
- package/src/policy-derivation.mjs +869 -0
- package/src/project-config.mjs +142 -0
- package/src/project-diff.mjs +28 -0
- package/src/project-engine/build-utils.mjs +237 -0
- package/src/project-engine/code-analyzer.mjs +248 -0
- package/src/project-engine/doc-generator.mjs +407 -0
- package/src/project-engine/infrastructure.mjs +213 -0
- package/src/project-engine/metrics.mjs +146 -0
- package/src/project-model.mjs +111 -0
- package/src/project-report.mjs +348 -0
- package/src/refactoring-guide.mjs +242 -0
- package/src/stack-detect.mjs +102 -0
- package/src/stack-linter.mjs +213 -0
- package/src/template-infer.mjs +674 -0
- package/src/type-auditor.mjs +609 -0
|
@@ -0,0 +1,505 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @file Materialization applier - execute file write plans with transactions
|
|
3
|
+
* @module project-engine/materialize-apply
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { z } from 'zod';
|
|
7
|
+
import { promises as fs } from 'fs';
|
|
8
|
+
import path from 'path';
|
|
9
|
+
import { createHash } from 'crypto';
|
|
10
|
+
import { scanFileSystemToStore } from './fs-scan.mjs';
|
|
11
|
+
import { diffProjectStructure } from './project-diff.mjs';
|
|
12
|
+
|
|
13
|
+
/**
|
|
14
|
+
* @typedef {import('./materialize-plan.mjs').MaterializationPlan} MaterializationPlan
|
|
15
|
+
* @typedef {import('./materialize-plan.mjs').WriteOperation} WriteOperation
|
|
16
|
+
* @typedef {import('./materialize-plan.mjs').UpdateOperation} UpdateOperation
|
|
17
|
+
* @typedef {import('./materialize-plan.mjs').DeleteOperation} DeleteOperation
|
|
18
|
+
*/
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* @typedef {Object} ApplyResult
|
|
22
|
+
* @property {number} appliedCount - Number of operations applied
|
|
23
|
+
* @property {number} skippedCount - Number of operations skipped
|
|
24
|
+
* @property {string[]} writtenPaths - Paths that were written
|
|
25
|
+
* @property {string[]} updatedPaths - Paths that were updated
|
|
26
|
+
* @property {string[]} deletedPaths - Paths that were deleted
|
|
27
|
+
* @property {string[]} errors - Any errors encountered
|
|
28
|
+
*/
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* @typedef {Object} ApplyReceipt
|
|
32
|
+
* @property {string} planHash - Hash of the plan that was applied
|
|
33
|
+
* @property {string} beforeHash - Hash of FS state before
|
|
34
|
+
* @property {string} afterHash - Hash of FS state after
|
|
35
|
+
* @property {Object} fsDiff - Diff between before/after FS graphs
|
|
36
|
+
* @property {string} timestamp - ISO timestamp
|
|
37
|
+
* @property {boolean} success - Whether all operations succeeded
|
|
38
|
+
*/
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* @typedef {Object} ApplyOutput
|
|
42
|
+
* @property {ApplyResult} result
|
|
43
|
+
* @property {ApplyReceipt} receipt
|
|
44
|
+
*/
|
|
45
|
+
|
|
46
|
+
const ApplyOptionsSchema = z.object({
|
|
47
|
+
dryRun: z.boolean().default(false),
|
|
48
|
+
createDirectories: z.boolean().default(true),
|
|
49
|
+
validateHashes: z.boolean().default(true),
|
|
50
|
+
outputRoot: z.string().default('.'),
|
|
51
|
+
snapshotBefore: z.boolean().default(true),
|
|
52
|
+
snapshotAfter: z.boolean().default(true),
|
|
53
|
+
});
|
|
54
|
+
|
|
55
|
+
const WriteOperationSchema = z.object({
|
|
56
|
+
path: z.string(),
|
|
57
|
+
content: z.string(),
|
|
58
|
+
hash: z.string(),
|
|
59
|
+
templateIri: z.string(),
|
|
60
|
+
entityIri: z.string(),
|
|
61
|
+
entityType: z.string(),
|
|
62
|
+
});
|
|
63
|
+
|
|
64
|
+
const UpdateOperationSchema = z.object({
|
|
65
|
+
path: z.string(),
|
|
66
|
+
content: z.string(),
|
|
67
|
+
oldHash: z.string(),
|
|
68
|
+
newHash: z.string(),
|
|
69
|
+
templateIri: z.string(),
|
|
70
|
+
entityIri: z.string(),
|
|
71
|
+
});
|
|
72
|
+
|
|
73
|
+
const DeleteOperationSchema = z.object({
|
|
74
|
+
path: z.string(),
|
|
75
|
+
hash: z.string(),
|
|
76
|
+
reason: z.string(),
|
|
77
|
+
});
|
|
78
|
+
|
|
79
|
+
const MaterializationPlanSchema = z.object({
|
|
80
|
+
writes: z.array(WriteOperationSchema),
|
|
81
|
+
updates: z.array(UpdateOperationSchema),
|
|
82
|
+
deletes: z.array(DeleteOperationSchema),
|
|
83
|
+
});
|
|
84
|
+
|
|
85
|
+
/**
|
|
86
|
+
* Compute SHA256 hash of content
|
|
87
|
+
*
|
|
88
|
+
* @param {string} content
|
|
89
|
+
* @returns {string}
|
|
90
|
+
*/
|
|
91
|
+
function hashContent(content) {
|
|
92
|
+
return createHash('sha256').update(content).digest('hex');
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
/**
|
|
96
|
+
* Ensure directory exists for a file path
|
|
97
|
+
*
|
|
98
|
+
* @param {string} filePath
|
|
99
|
+
* @returns {Promise<void>}
|
|
100
|
+
*/
|
|
101
|
+
async function ensureDirectoryExists(filePath) {
|
|
102
|
+
const dir = path.dirname(filePath);
|
|
103
|
+
await fs.mkdir(dir, { recursive: true });
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
/**
|
|
107
|
+
* Read file and compute hash, returns null if file doesn't exist
|
|
108
|
+
*
|
|
109
|
+
* @param {string} filePath
|
|
110
|
+
* @returns {Promise<{content: string, hash: string} | null>}
|
|
111
|
+
*/
|
|
112
|
+
async function readFileWithHash(filePath) {
|
|
113
|
+
try {
|
|
114
|
+
const content = await fs.readFile(filePath, 'utf-8');
|
|
115
|
+
return {
|
|
116
|
+
content,
|
|
117
|
+
hash: hashContent(content),
|
|
118
|
+
};
|
|
119
|
+
} catch (err) {
|
|
120
|
+
if (err.code === 'ENOENT') return null;
|
|
121
|
+
throw err;
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
/**
|
|
126
|
+
* Apply a single write operation
|
|
127
|
+
*
|
|
128
|
+
* @param {WriteOperation} op
|
|
129
|
+
* @param {Object} options
|
|
130
|
+
* @param {boolean} options.dryRun
|
|
131
|
+
* @param {boolean} options.createDirectories
|
|
132
|
+
* @param {string} options.outputRoot
|
|
133
|
+
* @returns {Promise<{success: boolean, error?: string}>}
|
|
134
|
+
*/
|
|
135
|
+
async function applyWrite(op, options) {
|
|
136
|
+
const fullPath = path.resolve(options.outputRoot, op.path);
|
|
137
|
+
|
|
138
|
+
// Check if file already exists
|
|
139
|
+
const existing = await readFileWithHash(fullPath);
|
|
140
|
+
if (existing !== null) {
|
|
141
|
+
return {
|
|
142
|
+
success: false,
|
|
143
|
+
error: `File already exists: ${op.path}`,
|
|
144
|
+
};
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
if (options.dryRun) {
|
|
148
|
+
return { success: true };
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
if (options.createDirectories) {
|
|
152
|
+
await ensureDirectoryExists(fullPath);
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
await fs.writeFile(fullPath, op.content, 'utf-8');
|
|
156
|
+
return { success: true };
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
/**
|
|
160
|
+
* Apply a single update operation
|
|
161
|
+
*
|
|
162
|
+
* @param {UpdateOperation} op
|
|
163
|
+
* @param {Object} options
|
|
164
|
+
* @param {boolean} options.dryRun
|
|
165
|
+
* @param {boolean} options.validateHashes
|
|
166
|
+
* @param {string} options.outputRoot
|
|
167
|
+
* @returns {Promise<{success: boolean, error?: string}>}
|
|
168
|
+
*/
|
|
169
|
+
async function applyUpdate(op, options) {
|
|
170
|
+
const fullPath = path.resolve(options.outputRoot, op.path);
|
|
171
|
+
|
|
172
|
+
// Read existing file
|
|
173
|
+
const existing = await readFileWithHash(fullPath);
|
|
174
|
+
|
|
175
|
+
if (existing === null) {
|
|
176
|
+
return {
|
|
177
|
+
success: false,
|
|
178
|
+
error: `File not found for update: ${op.path}`,
|
|
179
|
+
};
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
// Validate hash if required
|
|
183
|
+
if (options.validateHashes && existing.hash !== op.oldHash) {
|
|
184
|
+
return {
|
|
185
|
+
success: false,
|
|
186
|
+
error: `Hash mismatch for ${op.path}: expected ${op.oldHash}, got ${existing.hash}`,
|
|
187
|
+
};
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
if (options.dryRun) {
|
|
191
|
+
return { success: true };
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
await fs.writeFile(fullPath, op.content, 'utf-8');
|
|
195
|
+
return { success: true };
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
/**
|
|
199
|
+
* Apply a single delete operation
|
|
200
|
+
*
|
|
201
|
+
* @param {DeleteOperation} op
|
|
202
|
+
* @param {Object} options
|
|
203
|
+
* @param {boolean} options.dryRun
|
|
204
|
+
* @param {boolean} options.validateHashes
|
|
205
|
+
* @param {string} options.outputRoot
|
|
206
|
+
* @returns {Promise<{success: boolean, error?: string}>}
|
|
207
|
+
*/
|
|
208
|
+
async function applyDelete(op, options) {
|
|
209
|
+
const fullPath = path.resolve(options.outputRoot, op.path);
|
|
210
|
+
|
|
211
|
+
// Read existing file
|
|
212
|
+
const existing = await readFileWithHash(fullPath);
|
|
213
|
+
|
|
214
|
+
if (existing === null) {
|
|
215
|
+
// File already doesn't exist - skip
|
|
216
|
+
return { success: true };
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
// Validate hash if required
|
|
220
|
+
if (options.validateHashes && existing.hash !== op.hash) {
|
|
221
|
+
return {
|
|
222
|
+
success: false,
|
|
223
|
+
error: `Hash mismatch for delete ${op.path}: expected ${op.hash}, got ${existing.hash}`,
|
|
224
|
+
};
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
if (options.dryRun) {
|
|
228
|
+
return { success: true };
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
await fs.unlink(fullPath);
|
|
232
|
+
return { success: true };
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
/**
|
|
236
|
+
* Create FS snapshot using scanFileSystemToStore
|
|
237
|
+
*
|
|
238
|
+
* @param {string} root
|
|
239
|
+
* @returns {Promise<{store: Store, hash: string}>}
|
|
240
|
+
*/
|
|
241
|
+
async function snapshotFileSystem(root) {
|
|
242
|
+
try {
|
|
243
|
+
const { store, summary } = await scanFileSystemToStore({
|
|
244
|
+
root,
|
|
245
|
+
baseIri: 'http://example.org/unrdf/materialize#',
|
|
246
|
+
});
|
|
247
|
+
|
|
248
|
+
const hash = createHash('sha256')
|
|
249
|
+
.update(`files:${summary.fileCount}|folders:${summary.folderCount}`)
|
|
250
|
+
.digest('hex')
|
|
251
|
+
.substring(0, 16);
|
|
252
|
+
|
|
253
|
+
return { store, hash };
|
|
254
|
+
} catch (err) {
|
|
255
|
+
// If directory doesn't exist, return empty store
|
|
256
|
+
const { Store } = await import('n3');
|
|
257
|
+
return {
|
|
258
|
+
store: new Store(),
|
|
259
|
+
hash: 'empty',
|
|
260
|
+
};
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
/**
|
|
265
|
+
* Apply a materialization plan
|
|
266
|
+
*
|
|
267
|
+
* Execution flow:
|
|
268
|
+
* 1. Validate plan (check hashes match current state)
|
|
269
|
+
* 2. Snapshot before with scanFileSystemToStore
|
|
270
|
+
* 3. For each write in plan:
|
|
271
|
+
* - Check file doesn't exist (or is in updates list)
|
|
272
|
+
* - Write file with template content
|
|
273
|
+
* 4. For each update in plan:
|
|
274
|
+
* - Validate existing hash matches
|
|
275
|
+
* - Write new content
|
|
276
|
+
* 5. For each delete in plan:
|
|
277
|
+
* - Validate hash matches
|
|
278
|
+
* - Delete file
|
|
279
|
+
* 6. Snapshot after with scanFileSystemToStore
|
|
280
|
+
* 7. Compute diff using diffProjectStructure
|
|
281
|
+
* 8. Return result + receipt
|
|
282
|
+
*
|
|
283
|
+
* @param {MaterializationPlan} plan - The plan to apply
|
|
284
|
+
* @param {Object} [options] - Apply options
|
|
285
|
+
* @param {boolean} [options.dryRun] - If true, don't actually write files
|
|
286
|
+
* @param {boolean} [options.createDirectories] - Create parent directories if needed
|
|
287
|
+
* @param {boolean} [options.validateHashes] - Validate file hashes before updates/deletes
|
|
288
|
+
* @param {string} [options.outputRoot] - Base directory for file operations
|
|
289
|
+
* @param {boolean} [options.snapshotBefore] - Take FS snapshot before applying
|
|
290
|
+
* @param {boolean} [options.snapshotAfter] - Take FS snapshot after applying
|
|
291
|
+
* @returns {Promise<ApplyOutput>}
|
|
292
|
+
*/
|
|
293
|
+
export async function applyMaterializationPlan(plan, options = {}) {
|
|
294
|
+
const opts = ApplyOptionsSchema.parse(options);
|
|
295
|
+
const validatedPlan = MaterializationPlanSchema.parse(plan);
|
|
296
|
+
|
|
297
|
+
/** @type {ApplyResult} */
|
|
298
|
+
const result = {
|
|
299
|
+
appliedCount: 0,
|
|
300
|
+
skippedCount: 0,
|
|
301
|
+
writtenPaths: [],
|
|
302
|
+
updatedPaths: [],
|
|
303
|
+
deletedPaths: [],
|
|
304
|
+
errors: [],
|
|
305
|
+
};
|
|
306
|
+
|
|
307
|
+
// Snapshot before
|
|
308
|
+
let beforeSnapshot = null;
|
|
309
|
+
if (opts.snapshotBefore) {
|
|
310
|
+
beforeSnapshot = await snapshotFileSystem(opts.outputRoot);
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
// Apply writes
|
|
314
|
+
for (const op of validatedPlan.writes) {
|
|
315
|
+
const writeResult = await applyWrite(op, opts);
|
|
316
|
+
if (writeResult.success) {
|
|
317
|
+
result.appliedCount++;
|
|
318
|
+
result.writtenPaths.push(op.path);
|
|
319
|
+
} else {
|
|
320
|
+
result.skippedCount++;
|
|
321
|
+
result.errors.push(writeResult.error);
|
|
322
|
+
}
|
|
323
|
+
}
|
|
324
|
+
|
|
325
|
+
// Apply updates
|
|
326
|
+
for (const op of validatedPlan.updates) {
|
|
327
|
+
const updateResult = await applyUpdate(op, opts);
|
|
328
|
+
if (updateResult.success) {
|
|
329
|
+
result.appliedCount++;
|
|
330
|
+
result.updatedPaths.push(op.path);
|
|
331
|
+
} else {
|
|
332
|
+
result.skippedCount++;
|
|
333
|
+
result.errors.push(updateResult.error);
|
|
334
|
+
}
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
// Apply deletes
|
|
338
|
+
for (const op of validatedPlan.deletes) {
|
|
339
|
+
const deleteResult = await applyDelete(op, opts);
|
|
340
|
+
if (deleteResult.success) {
|
|
341
|
+
result.appliedCount++;
|
|
342
|
+
result.deletedPaths.push(op.path);
|
|
343
|
+
} else {
|
|
344
|
+
result.skippedCount++;
|
|
345
|
+
result.errors.push(deleteResult.error);
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
|
|
349
|
+
// Snapshot after
|
|
350
|
+
let afterSnapshot = null;
|
|
351
|
+
let fsDiff = null;
|
|
352
|
+
if (opts.snapshotAfter && !opts.dryRun) {
|
|
353
|
+
afterSnapshot = await snapshotFileSystem(opts.outputRoot);
|
|
354
|
+
|
|
355
|
+
// Compute diff if we have both snapshots
|
|
356
|
+
if (beforeSnapshot && afterSnapshot) {
|
|
357
|
+
try {
|
|
358
|
+
fsDiff = diffProjectStructure({
|
|
359
|
+
actualStore: afterSnapshot.store,
|
|
360
|
+
goldenStore: beforeSnapshot.store,
|
|
361
|
+
});
|
|
362
|
+
} catch (err) {
|
|
363
|
+
// Diff failed, but apply succeeded
|
|
364
|
+
fsDiff = { error: err.message };
|
|
365
|
+
}
|
|
366
|
+
}
|
|
367
|
+
}
|
|
368
|
+
|
|
369
|
+
// Create plan hash
|
|
370
|
+
const planHash = createHash('sha256')
|
|
371
|
+
.update(
|
|
372
|
+
JSON.stringify({
|
|
373
|
+
writes: validatedPlan.writes.length,
|
|
374
|
+
updates: validatedPlan.updates.length,
|
|
375
|
+
deletes: validatedPlan.deletes.length,
|
|
376
|
+
})
|
|
377
|
+
)
|
|
378
|
+
.digest('hex')
|
|
379
|
+
.substring(0, 16);
|
|
380
|
+
|
|
381
|
+
/** @type {ApplyReceipt} */
|
|
382
|
+
const receipt = {
|
|
383
|
+
planHash,
|
|
384
|
+
beforeHash: beforeSnapshot?.hash || 'none',
|
|
385
|
+
afterHash: afterSnapshot?.hash || 'none',
|
|
386
|
+
fsDiff: fsDiff || {},
|
|
387
|
+
timestamp: new Date().toISOString(),
|
|
388
|
+
success: result.errors.length === 0,
|
|
389
|
+
};
|
|
390
|
+
|
|
391
|
+
return { result, receipt };
|
|
392
|
+
}
|
|
393
|
+
|
|
394
|
+
/**
|
|
395
|
+
* Rollback a materialization by deleting written files
|
|
396
|
+
*
|
|
397
|
+
* @param {ApplyResult} result - Result from a previous apply
|
|
398
|
+
* @param {Object} [options]
|
|
399
|
+
* @param {string} [options.outputRoot] - Base directory
|
|
400
|
+
* @returns {Promise<{rolledBack: string[], errors: string[]}>}
|
|
401
|
+
*/
|
|
402
|
+
export async function rollbackMaterialization(result, options = {}) {
|
|
403
|
+
const outputRoot = options.outputRoot || '.';
|
|
404
|
+
const rolledBack = [];
|
|
405
|
+
const errors = [];
|
|
406
|
+
|
|
407
|
+
// Delete written files
|
|
408
|
+
for (const filePath of result.writtenPaths) {
|
|
409
|
+
const fullPath = path.resolve(outputRoot, filePath);
|
|
410
|
+
try {
|
|
411
|
+
await fs.unlink(fullPath);
|
|
412
|
+
rolledBack.push(filePath);
|
|
413
|
+
} catch (err) {
|
|
414
|
+
if (err.code !== 'ENOENT') {
|
|
415
|
+
errors.push(`Failed to rollback ${filePath}: ${err.message}`);
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
}
|
|
419
|
+
|
|
420
|
+
// Note: We can't easily rollback updates or restore deleted files
|
|
421
|
+
// without having stored the original content
|
|
422
|
+
|
|
423
|
+
return { rolledBack, errors };
|
|
424
|
+
}
|
|
425
|
+
|
|
426
|
+
/**
|
|
427
|
+
* Preview a plan without applying
|
|
428
|
+
*
|
|
429
|
+
* @param {MaterializationPlan} plan
|
|
430
|
+
* @returns {Object} Summary of what would be done
|
|
431
|
+
*/
|
|
432
|
+
export function previewPlan(plan) {
|
|
433
|
+
const validatedPlan = MaterializationPlanSchema.parse(plan);
|
|
434
|
+
|
|
435
|
+
return {
|
|
436
|
+
totalOperations:
|
|
437
|
+
validatedPlan.writes.length + validatedPlan.updates.length + validatedPlan.deletes.length,
|
|
438
|
+
writes: validatedPlan.writes.map(op => ({
|
|
439
|
+
path: op.path,
|
|
440
|
+
templateIri: op.templateIri,
|
|
441
|
+
entityIri: op.entityIri,
|
|
442
|
+
})),
|
|
443
|
+
updates: validatedPlan.updates.map(op => ({
|
|
444
|
+
path: op.path,
|
|
445
|
+
templateIri: op.templateIri,
|
|
446
|
+
})),
|
|
447
|
+
deletes: validatedPlan.deletes.map(op => ({
|
|
448
|
+
path: op.path,
|
|
449
|
+
reason: op.reason,
|
|
450
|
+
})),
|
|
451
|
+
};
|
|
452
|
+
}
|
|
453
|
+
|
|
454
|
+
/**
|
|
455
|
+
* Check if a plan can be safely applied
|
|
456
|
+
*
|
|
457
|
+
* Validates:
|
|
458
|
+
* - No files exist for write operations
|
|
459
|
+
* - All files exist for update operations
|
|
460
|
+
* - Hash matches for update/delete operations
|
|
461
|
+
*
|
|
462
|
+
* @param {MaterializationPlan} plan
|
|
463
|
+
* @param {Object} [options]
|
|
464
|
+
* @param {string} [options.outputRoot] - Base directory
|
|
465
|
+
* @returns {Promise<{canApply: boolean, issues: string[]}>}
|
|
466
|
+
*/
|
|
467
|
+
export async function checkPlanApplicability(plan, options = {}) {
|
|
468
|
+
const outputRoot = options.outputRoot || '.';
|
|
469
|
+
const validatedPlan = MaterializationPlanSchema.parse(plan);
|
|
470
|
+
const issues = [];
|
|
471
|
+
|
|
472
|
+
// Check writes don't conflict
|
|
473
|
+
for (const op of validatedPlan.writes) {
|
|
474
|
+
const fullPath = path.resolve(outputRoot, op.path);
|
|
475
|
+
const existing = await readFileWithHash(fullPath);
|
|
476
|
+
if (existing !== null) {
|
|
477
|
+
issues.push(`Write conflict: ${op.path} already exists`);
|
|
478
|
+
}
|
|
479
|
+
}
|
|
480
|
+
|
|
481
|
+
// Check updates exist and match
|
|
482
|
+
for (const op of validatedPlan.updates) {
|
|
483
|
+
const fullPath = path.resolve(outputRoot, op.path);
|
|
484
|
+
const existing = await readFileWithHash(fullPath);
|
|
485
|
+
if (existing === null) {
|
|
486
|
+
issues.push(`Update target missing: ${op.path}`);
|
|
487
|
+
} else if (existing.hash !== op.oldHash) {
|
|
488
|
+
issues.push(`Update hash mismatch: ${op.path}`);
|
|
489
|
+
}
|
|
490
|
+
}
|
|
491
|
+
|
|
492
|
+
// Check deletes exist and match
|
|
493
|
+
for (const op of validatedPlan.deletes) {
|
|
494
|
+
const fullPath = path.resolve(outputRoot, op.path);
|
|
495
|
+
const existing = await readFileWithHash(fullPath);
|
|
496
|
+
if (existing !== null && existing.hash !== op.hash) {
|
|
497
|
+
issues.push(`Delete hash mismatch: ${op.path}`);
|
|
498
|
+
}
|
|
499
|
+
}
|
|
500
|
+
|
|
501
|
+
return {
|
|
502
|
+
canApply: issues.length === 0,
|
|
503
|
+
issues,
|
|
504
|
+
};
|
|
505
|
+
}
|