@visulima/vis 1.0.0-alpha.11 → 1.0.0-alpha.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +101 -0
- package/LICENSE.md +559 -186
- package/README.md +18 -0
- package/dist/bin.js +1 -9
- package/dist/config/index.d.ts +477 -556
- package/dist/config/index.js +1 -2
- package/dist/generate/index.js +1 -3
- package/dist/packem_chunks/applyDefaults.js +2 -336
- package/dist/packem_chunks/bin.js +234 -9552
- package/dist/packem_chunks/doctor-probe.js +2 -112
- package/dist/packem_chunks/fix.js +11 -234
- package/dist/packem_chunks/handler.js +1 -99
- package/dist/packem_chunks/handler10.js +2 -53
- package/dist/packem_chunks/handler11.js +1 -32
- package/dist/packem_chunks/handler12.js +5 -100
- package/dist/packem_chunks/handler13.js +1 -25
- package/dist/packem_chunks/handler14.js +18 -916
- package/dist/packem_chunks/handler15.js +15 -201
- package/dist/packem_chunks/handler16.js +1 -124
- package/dist/packem_chunks/handler17.js +1 -13
- package/dist/packem_chunks/handler18.js +1 -106
- package/dist/packem_chunks/handler19.js +1 -19
- package/dist/packem_chunks/handler2.js +2 -75
- package/dist/packem_chunks/handler20.js +5 -29
- package/dist/packem_chunks/handler21.js +1 -222
- package/dist/packem_chunks/handler22.js +1 -237
- package/dist/packem_chunks/handler23.js +5 -101
- package/dist/packem_chunks/handler24.js +1 -110
- package/dist/packem_chunks/handler25.js +3 -402
- package/dist/packem_chunks/handler26.js +1 -13
- package/dist/packem_chunks/handler27.js +1 -63
- package/dist/packem_chunks/handler28.js +7 -34
- package/dist/packem_chunks/handler29.js +21 -456
- package/dist/packem_chunks/handler3.js +4 -95
- package/dist/packem_chunks/handler30.js +3 -170
- package/dist/packem_chunks/handler31.js +1 -530
- package/dist/packem_chunks/handler32.js +2 -214
- package/dist/packem_chunks/handler33.js +25 -119
- package/dist/packem_chunks/handler34.js +2 -630
- package/dist/packem_chunks/handler35.js +3 -283
- package/dist/packem_chunks/handler36.js +22 -542
- package/dist/packem_chunks/handler37.js +410 -744
- package/dist/packem_chunks/handler38.js +22 -989
- package/dist/packem_chunks/handler39.js +22 -574
- package/dist/packem_chunks/handler4.js +2 -90
- package/dist/packem_chunks/handler40.js +22 -1685
- package/dist/packem_chunks/handler41.js +6 -1088
- package/dist/packem_chunks/handler42.js +5 -797
- package/dist/packem_chunks/handler43.js +10 -2658
- package/dist/packem_chunks/handler44.js +51 -3784
- package/dist/packem_chunks/handler45.js +25 -2574
- package/dist/packem_chunks/handler46.js +3 -3769
- package/dist/packem_chunks/handler47.js +21 -1485
- package/dist/packem_chunks/handler48.js +42 -0
- package/dist/packem_chunks/handler5.js +8 -174
- package/dist/packem_chunks/handler6.js +1 -95
- package/dist/packem_chunks/handler7.js +1 -115
- package/dist/packem_chunks/handler8.js +1 -12
- package/dist/packem_chunks/handler9.js +1 -29
- package/dist/packem_chunks/heal-accept.js +10 -522
- package/dist/packem_chunks/heal.js +14 -673
- package/dist/packem_chunks/index.js +7 -873
- package/dist/packem_chunks/loader.js +1 -23
- package/dist/packem_chunks/tar.js +3 -0
- package/dist/packem_shared/ai-analysis-hm8d2W7z.js +67 -0
- package/dist/packem_shared/ai-cache-DoiF80AR.js +1 -0
- package/dist/packem_shared/ai-fix-nn4zOE95.js +43 -0
- package/dist/packem_shared/cache-directory-CwHlJhgx.js +1 -0
- package/dist/packem_shared/dependency-scan-COr5n63B.js +2 -0
- package/dist/packem_shared/docker-D6OGr5_S.js +2 -0
- package/dist/packem_shared/failure-log-iUVLf6ts.js +2 -0
- package/dist/packem_shared/flakiness-D9wf0t56.js +1 -0
- package/dist/packem_shared/giget-CcEy_Elm.js +2 -0
- package/dist/packem_shared/index-DH-5hsrC.js +1 -0
- package/dist/packem_shared/otel-DxDUPJJH.js +6 -0
- package/dist/packem_shared/otelPlugin-CQq6poq8.js +1 -0
- package/dist/packem_shared/registry-CkubDdiY.js +2 -0
- package/dist/packem_shared/run-summary-utils-BfBvjzhY.js +1 -0
- package/dist/packem_shared/runtime-check-BXZ43CBW.js +1 -0
- package/dist/packem_shared/selectors-BylODRiM.js +3 -0
- package/dist/packem_shared/symbols-CQmER5MT.js +1 -0
- package/dist/packem_shared/toolchain-BgBOUHII.js +5 -0
- package/dist/packem_shared/typosquats-CcZl99B1.js +1 -0
- package/dist/packem_shared/use-measured-height-DjYgUOKk.js +1 -0
- package/dist/packem_shared/utils-DrNg0XTR.js +1 -0
- package/dist/packem_shared/verify-Baj5mFJ7.js +1 -0
- package/dist/packem_shared/vis-update-app-D1jl0UZZ.js +1 -0
- package/dist/packem_shared/xxh3-DrAUNq4n.js +1 -0
- package/index.js +556 -727
- package/package.json +19 -29
- package/schemas/project.schema.json +739 -297
- package/schemas/vis-config.schema.json +3365 -384
- package/templates/buildkite-ci/template.yml +20 -20
- package/dist/packem_shared/VisUpdateApp-D-Yz_wvg.js +0 -1316
- package/dist/packem_shared/_commonjsHelpers-BqLXS_qQ.js +0 -5
- package/dist/packem_shared/ai-analysis-CHeB1joD.js +0 -367
- package/dist/packem_shared/ai-cache-Be_jexe4.js +0 -142
- package/dist/packem_shared/ai-fix-B9iQVcD2.js +0 -379
- package/dist/packem_shared/cache-directory-2qvs4goY.js +0 -98
- package/dist/packem_shared/catalog-BJTtyi-O.js +0 -1371
- package/dist/packem_shared/dependency-scan-A0KSklpG.js +0 -188
- package/dist/packem_shared/docker-2iZzc280.js +0 -181
- package/dist/packem_shared/failure-log-Cz3Z4SKL.js +0 -100
- package/dist/packem_shared/flakiness-goTxXuCX.js +0 -180
- package/dist/packem_shared/otel-DCvqCTz_.js +0 -158
- package/dist/packem_shared/otelPlugin-DFaLDvJf.js +0 -3
- package/dist/packem_shared/registry-CbqXI0rc.js +0 -272
- package/dist/packem_shared/run-summary-utils-PVMl4aIh.js +0 -130
- package/dist/packem_shared/runtime-check-Cobi3p6l.js +0 -127
- package/dist/packem_shared/selectors-SM69TfqC.js +0 -194
- package/dist/packem_shared/symbols-Ta7g2nU-.js +0 -14
- package/dist/packem_shared/toolchain-BdZd9eBi.js +0 -975
- package/dist/packem_shared/typosquats-C-bCh3PX.js +0 -1210
- package/dist/packem_shared/use-measured-height-CNP0vT4M.js +0 -20
- package/dist/packem_shared/utils-CthVdBPS.js +0 -40
- package/dist/packem_shared/xxh3-Ck8mXNg1.js +0 -239
|
@@ -1,916 +1,18 @@
|
|
|
1
|
-
import {
|
|
2
|
-
|
|
3
|
-
const
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
const
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
return
|
|
18
|
-
};
|
|
19
|
-
|
|
20
|
-
const {
|
|
21
|
-
readdir,
|
|
22
|
-
stat,
|
|
23
|
-
realpath,
|
|
24
|
-
rm,
|
|
25
|
-
mkdtemp
|
|
26
|
-
} = __cjs_getBuiltinModule("node:fs/promises");
|
|
27
|
-
const {
|
|
28
|
-
tmpdir
|
|
29
|
-
} = __cjs_getBuiltinModule("node:os");
|
|
30
|
-
const {
|
|
31
|
-
createInterface
|
|
32
|
-
} = __cjs_getBuiltinModule("node:readline");
|
|
33
|
-
import { isAccessibleSync } from '@visulima/fs';
|
|
34
|
-
import { formatBytes } from '@visulima/humanizer';
|
|
35
|
-
import { join, relative } from '@visulima/path';
|
|
36
|
-
import { Cache, readLastRunSummary, parseCacheSize, getLastRunSummaryPath, digestFile } from '@visulima/task-runner';
|
|
37
|
-
import { g as getCacheStats, c as clearCache } from '../packem_shared/ai-cache-Be_jexe4.js';
|
|
38
|
-
import { i as isCacheDirectoryInsideWorkspace, r as resolveSharedCacheDirectory } from '../packem_shared/cache-directory-2qvs4goY.js';
|
|
39
|
-
import { p as pail, u as getSocketCacheStats, v as clearSocketCache } from './bin.js';
|
|
40
|
-
import { r as readRunSummaryById, f as findTaskInSummary, a as readPreviousRunSummary, d as diffHashDetails } from '../packem_shared/run-summary-utils-PVMl4aIh.js';
|
|
41
|
-
|
|
42
|
-
const sumDirectorySize = async (directory) => {
|
|
43
|
-
let total = 0;
|
|
44
|
-
try {
|
|
45
|
-
const entries = await readdir(directory, { withFileTypes: true });
|
|
46
|
-
for (const entry of entries) {
|
|
47
|
-
const full = join(directory, entry.name);
|
|
48
|
-
if (entry.isDirectory()) {
|
|
49
|
-
total += await sumDirectorySize(full);
|
|
50
|
-
} else if (entry.isFile()) {
|
|
51
|
-
const s = await stat(full);
|
|
52
|
-
total += s.size;
|
|
53
|
-
}
|
|
54
|
-
}
|
|
55
|
-
} catch {
|
|
56
|
-
}
|
|
57
|
-
return total;
|
|
58
|
-
};
|
|
59
|
-
const collectCacheEntries = async (cacheDirectory) => {
|
|
60
|
-
const entries = [];
|
|
61
|
-
let dirents;
|
|
62
|
-
try {
|
|
63
|
-
dirents = await readdir(cacheDirectory);
|
|
64
|
-
} catch {
|
|
65
|
-
return [];
|
|
66
|
-
}
|
|
67
|
-
for (const name of dirents) {
|
|
68
|
-
if (name.startsWith(".")) {
|
|
69
|
-
continue;
|
|
70
|
-
}
|
|
71
|
-
const fullPath = join(cacheDirectory, name);
|
|
72
|
-
try {
|
|
73
|
-
const s = await stat(fullPath);
|
|
74
|
-
if (!s.isDirectory()) {
|
|
75
|
-
continue;
|
|
76
|
-
}
|
|
77
|
-
const sizeBytes = await sumDirectorySize(fullPath);
|
|
78
|
-
entries.push({
|
|
79
|
-
hash: name,
|
|
80
|
-
mtimeMs: s.mtimeMs,
|
|
81
|
-
path: fullPath,
|
|
82
|
-
sizeBytes
|
|
83
|
-
});
|
|
84
|
-
} catch {
|
|
85
|
-
}
|
|
86
|
-
}
|
|
87
|
-
entries.sort((a, b) => b.mtimeMs - a.mtimeMs);
|
|
88
|
-
return entries;
|
|
89
|
-
};
|
|
90
|
-
const formatAge = (mtimeMs, now = Date.now()) => {
|
|
91
|
-
const seconds = Math.max(0, Math.floor((now - mtimeMs) / 1e3));
|
|
92
|
-
if (seconds < 60) {
|
|
93
|
-
return `${String(seconds)}s`;
|
|
94
|
-
}
|
|
95
|
-
if (seconds < 3600) {
|
|
96
|
-
return `${String(Math.floor(seconds / 60))}m`;
|
|
97
|
-
}
|
|
98
|
-
if (seconds < 86400) {
|
|
99
|
-
return `${String(Math.floor(seconds / 3600))}h`;
|
|
100
|
-
}
|
|
101
|
-
return `${String(Math.floor(seconds / 86400))}d`;
|
|
102
|
-
};
|
|
103
|
-
const confirmPrompt = (question) => new Promise((resolvePromise) => {
|
|
104
|
-
const rl = createInterface({ input: process.stdin, output: process.stderr });
|
|
105
|
-
rl.question(`${question} (y/N) `, (answer) => {
|
|
106
|
-
rl.close();
|
|
107
|
-
const trimmed = answer.trim().toLowerCase();
|
|
108
|
-
resolvePromise(trimmed === "y" || trimmed === "yes");
|
|
109
|
-
});
|
|
110
|
-
});
|
|
111
|
-
const runList = async (cacheDirectory, format, logger) => {
|
|
112
|
-
if (!isAccessibleSync(cacheDirectory)) {
|
|
113
|
-
if (format === "json") {
|
|
114
|
-
process.stdout.write(`${JSON.stringify({ directory: cacheDirectory, entries: [], totalBytes: 0, totalCount: 0 }, void 0, 2)}
|
|
115
|
-
`);
|
|
116
|
-
return;
|
|
117
|
-
}
|
|
118
|
-
pail.info(`No cache directory found at ${cacheDirectory}`);
|
|
119
|
-
return;
|
|
120
|
-
}
|
|
121
|
-
const entries = await collectCacheEntries(cacheDirectory);
|
|
122
|
-
if (entries.length === 0) {
|
|
123
|
-
if (format === "json") {
|
|
124
|
-
process.stdout.write(`${JSON.stringify({ directory: cacheDirectory, entries: [], totalBytes: 0, totalCount: 0 }, void 0, 2)}
|
|
125
|
-
`);
|
|
126
|
-
return;
|
|
127
|
-
}
|
|
128
|
-
pail.info(`Cache directory is empty: ${cacheDirectory}`);
|
|
129
|
-
return;
|
|
130
|
-
}
|
|
131
|
-
const totalBytes = entries.reduce((sum, entry) => sum + entry.sizeBytes, 0);
|
|
132
|
-
if (format === "json") {
|
|
133
|
-
const now = Date.now();
|
|
134
|
-
process.stdout.write(
|
|
135
|
-
`${JSON.stringify(
|
|
136
|
-
{
|
|
137
|
-
directory: cacheDirectory,
|
|
138
|
-
entries: entries.map((entry) => {
|
|
139
|
-
return {
|
|
140
|
-
ageMs: now - entry.mtimeMs,
|
|
141
|
-
hash: entry.hash,
|
|
142
|
-
mtimeIso: new Date(entry.mtimeMs).toISOString(),
|
|
143
|
-
sizeBytes: entry.sizeBytes
|
|
144
|
-
};
|
|
145
|
-
}),
|
|
146
|
-
totalBytes,
|
|
147
|
-
totalCount: entries.length
|
|
148
|
-
},
|
|
149
|
-
void 0,
|
|
150
|
-
2
|
|
151
|
-
)}
|
|
152
|
-
`
|
|
153
|
-
);
|
|
154
|
-
return;
|
|
155
|
-
}
|
|
156
|
-
pail.info(`Cache directory: ${cacheDirectory}`);
|
|
157
|
-
pail.info(`Entries: ${String(entries.length)} (${formatBytes(totalBytes, { decimals: 1, space: false })})`);
|
|
158
|
-
logger.info("");
|
|
159
|
-
const renderedAt = Date.now();
|
|
160
|
-
const rows = entries.map((entry) => {
|
|
161
|
-
return {
|
|
162
|
-
age: formatAge(entry.mtimeMs, renderedAt),
|
|
163
|
-
hash: entry.hash.slice(0, 12),
|
|
164
|
-
size: formatBytes(entry.sizeBytes, { decimals: 1, space: false })
|
|
165
|
-
};
|
|
166
|
-
});
|
|
167
|
-
const hashWidth = Math.max(4, ...rows.map((r) => r.hash.length));
|
|
168
|
-
const sizeWidth = Math.max(4, ...rows.map((r) => r.size.length));
|
|
169
|
-
const ageWidth = Math.max(3, ...rows.map((r) => r.age.length));
|
|
170
|
-
logger.info(` ${"hash".padEnd(hashWidth)} ${"size".padEnd(sizeWidth)} ${"age".padEnd(ageWidth)}`);
|
|
171
|
-
logger.info(` ${"-".repeat(hashWidth)} ${"-".repeat(sizeWidth)} ${"-".repeat(ageWidth)}`);
|
|
172
|
-
for (const row of rows) {
|
|
173
|
-
logger.info(` ${row.hash.padEnd(hashWidth)} ${row.size.padEnd(sizeWidth)} ${row.age.padEnd(ageWidth)}`);
|
|
174
|
-
}
|
|
175
|
-
};
|
|
176
|
-
const clearAiCacheSafe = () => {
|
|
177
|
-
try {
|
|
178
|
-
const aiDeleted = clearCache();
|
|
179
|
-
if (aiDeleted > 0) {
|
|
180
|
-
pail.info(`Cleared ${String(aiDeleted)} cached AI response${aiDeleted === 1 ? "" : "s"}.`);
|
|
181
|
-
}
|
|
182
|
-
} catch (error) {
|
|
183
|
-
pail.warn(`Failed to clear AI response cache: ${error instanceof Error ? error.message : String(error)}`);
|
|
184
|
-
}
|
|
185
|
-
};
|
|
186
|
-
const clearSocketCacheSafe = () => {
|
|
187
|
-
try {
|
|
188
|
-
const socketDeleted = clearSocketCache();
|
|
189
|
-
if (socketDeleted > 0) {
|
|
190
|
-
pail.info(`Cleared ${String(socketDeleted)} cached Socket.dev report${socketDeleted === 1 ? "" : "s"}.`);
|
|
191
|
-
}
|
|
192
|
-
} catch (error) {
|
|
193
|
-
pail.warn(`Failed to clear Socket.dev cache: ${error instanceof Error ? error.message : String(error)}`);
|
|
194
|
-
}
|
|
195
|
-
};
|
|
196
|
-
const runClean = async (cacheDirectory, workspaceRoot, options) => {
|
|
197
|
-
if (!isAccessibleSync(cacheDirectory)) {
|
|
198
|
-
pail.info(`No cache directory to clean at ${cacheDirectory}`);
|
|
199
|
-
return;
|
|
200
|
-
}
|
|
201
|
-
if (options.dryRun) {
|
|
202
|
-
const entries = await collectCacheEntries(cacheDirectory);
|
|
203
|
-
const totalBytes = entries.reduce((sum, entry) => sum + entry.sizeBytes, 0);
|
|
204
|
-
pail.info(
|
|
205
|
-
`Would remove ${String(entries.length)} cache entr${entries.length === 1 ? "y" : "ies"} (${formatBytes(totalBytes, { decimals: 1, space: false })}) from ${cacheDirectory}`
|
|
206
|
-
);
|
|
207
|
-
return;
|
|
208
|
-
}
|
|
209
|
-
const insideWorkspace = isCacheDirectoryInsideWorkspace(cacheDirectory, workspaceRoot);
|
|
210
|
-
try {
|
|
211
|
-
const realCache = await realpath(cacheDirectory);
|
|
212
|
-
const realWorkspace = await realpath(workspaceRoot);
|
|
213
|
-
if (realCache === realWorkspace) {
|
|
214
|
-
pail.error("Refusing to delete the workspace root. The cache directory resolved to the same path as the workspace.");
|
|
215
|
-
process.exitCode = 1;
|
|
216
|
-
return;
|
|
217
|
-
}
|
|
218
|
-
} catch {
|
|
219
|
-
}
|
|
220
|
-
if (!insideWorkspace && !options.force) {
|
|
221
|
-
pail.warn(`Cache directory is outside the workspace root: ${cacheDirectory}`);
|
|
222
|
-
pail.warn("This will recursively delete the entire directory, including anything stored there by other tools.");
|
|
223
|
-
if (!process.stdin.isTTY) {
|
|
224
|
-
pail.error("Refusing to clean an out-of-workspace cache without --force (stdin is not a TTY).");
|
|
225
|
-
process.exitCode = 1;
|
|
226
|
-
return;
|
|
227
|
-
}
|
|
228
|
-
const confirmed = await confirmPrompt(" Continue?");
|
|
229
|
-
if (!confirmed) {
|
|
230
|
-
pail.info("Aborted.");
|
|
231
|
-
return;
|
|
232
|
-
}
|
|
233
|
-
}
|
|
234
|
-
if (insideWorkspace) {
|
|
235
|
-
const cache = new Cache({ cacheDirectory, workspaceRoot });
|
|
236
|
-
await cache.clear();
|
|
237
|
-
} else {
|
|
238
|
-
await rm(cacheDirectory, { force: true, recursive: true });
|
|
239
|
-
}
|
|
240
|
-
pail.success(`Cleared cache: ${cacheDirectory}`);
|
|
241
|
-
};
|
|
242
|
-
const runPrune = async (cacheDirectory, workspaceRoot, options) => {
|
|
243
|
-
if (!isAccessibleSync(cacheDirectory)) {
|
|
244
|
-
pail.info(`No cache directory to prune at ${cacheDirectory}`);
|
|
245
|
-
return;
|
|
246
|
-
}
|
|
247
|
-
if (options.maxCacheAgeDays !== void 0 && (!Number.isFinite(options.maxCacheAgeDays) || options.maxCacheAgeDays < 0)) {
|
|
248
|
-
pail.error(`Invalid --max-age-days value: expected a finite number >= 0, got ${String(options.maxCacheAgeDays)}`);
|
|
249
|
-
process.exitCode = 1;
|
|
250
|
-
return;
|
|
251
|
-
}
|
|
252
|
-
if (options.keepLast !== void 0 && (!Number.isFinite(options.keepLast) || options.keepLast < 0 || !Number.isInteger(options.keepLast))) {
|
|
253
|
-
pail.error(`Invalid --keep-last value: expected a non-negative integer, got ${String(options.keepLast)}`);
|
|
254
|
-
process.exitCode = 1;
|
|
255
|
-
return;
|
|
256
|
-
}
|
|
257
|
-
if (options.maxCacheSize !== void 0) {
|
|
258
|
-
let parsedBytes;
|
|
259
|
-
try {
|
|
260
|
-
parsedBytes = parseCacheSize(options.maxCacheSize);
|
|
261
|
-
} catch (error) {
|
|
262
|
-
pail.error(`Invalid --max-size value: ${error instanceof Error ? error.message : String(error)}`);
|
|
263
|
-
process.exitCode = 1;
|
|
264
|
-
return;
|
|
265
|
-
}
|
|
266
|
-
if (!Number.isFinite(parsedBytes) || parsedBytes <= 0) {
|
|
267
|
-
pail.error(`Invalid --max-size value: expected a positive size, got "${options.maxCacheSize}" (${String(parsedBytes)} bytes)`);
|
|
268
|
-
process.exitCode = 1;
|
|
269
|
-
return;
|
|
270
|
-
}
|
|
271
|
-
}
|
|
272
|
-
const maxCacheAge = options.maxCacheAgeDays === void 0 ? void 0 : options.maxCacheAgeDays * 24 * 60 * 60 * 1e3;
|
|
273
|
-
const before = await collectCacheEntries(cacheDirectory);
|
|
274
|
-
const beforeBytes = before.reduce((sum, entry) => sum + entry.sizeBytes, 0);
|
|
275
|
-
if (options.keepLast !== void 0 && before.length > options.keepLast) {
|
|
276
|
-
const stale = before.slice(options.keepLast);
|
|
277
|
-
await Promise.all(stale.map((entry) => rm(entry.path, { force: true, recursive: true })));
|
|
278
|
-
}
|
|
279
|
-
const cache = new Cache({
|
|
280
|
-
cacheDirectory,
|
|
281
|
-
maxCacheAge,
|
|
282
|
-
maxCacheSize: options.maxCacheSize,
|
|
283
|
-
workspaceRoot
|
|
284
|
-
});
|
|
285
|
-
await cache.removeOldEntries();
|
|
286
|
-
const after = await collectCacheEntries(cacheDirectory);
|
|
287
|
-
const afterBytes = after.reduce((sum, entry) => sum + entry.sizeBytes, 0);
|
|
288
|
-
const removed = before.length - after.length;
|
|
289
|
-
const reclaimedBytes = beforeBytes - afterBytes;
|
|
290
|
-
if (removed <= 0) {
|
|
291
|
-
pail.info("Nothing to prune — all entries are within the configured limits.");
|
|
292
|
-
return;
|
|
293
|
-
}
|
|
294
|
-
pail.success(`Pruned ${String(removed)} entr${removed === 1 ? "y" : "ies"}, freed ${formatBytes(reclaimedBytes, { decimals: 1, space: false })}.`);
|
|
295
|
-
};
|
|
296
|
-
const HASH_DISPLAY_PREFIX = 16;
|
|
297
|
-
const truncateHash = (value) => value.length > HASH_DISPLAY_PREFIX ? `${value.slice(0, HASH_DISPLAY_PREFIX)}…` : value;
|
|
298
|
-
const renderHashDetailsBucket = (label, bucket, logger) => {
|
|
299
|
-
const entries = Object.entries(bucket ?? {});
|
|
300
|
-
if (entries.length === 0) {
|
|
301
|
-
return;
|
|
302
|
-
}
|
|
303
|
-
logger.info(` ${label}:`);
|
|
304
|
-
entries.sort(([a], [b]) => a.localeCompare(b));
|
|
305
|
-
for (const [key, value] of entries) {
|
|
306
|
-
logger.info(` ${key.padEnd(40)} ${truncateHash(value)}`);
|
|
307
|
-
}
|
|
308
|
-
};
|
|
309
|
-
const runWhy = async (taskId, options, logger) => {
|
|
310
|
-
const { format, runId, workspaceRoot } = options;
|
|
311
|
-
const summary = runId === void 0 ? await readLastRunSummary(workspaceRoot) : await readRunSummaryById(workspaceRoot, runId);
|
|
312
|
-
if (!summary) {
|
|
313
|
-
if (format === "json") {
|
|
314
|
-
process.stdout.write(`${JSON.stringify({ error: "no-summary", runId: runId ?? null, taskId }, void 0, 2)}
|
|
315
|
-
`);
|
|
316
|
-
process.exitCode = 1;
|
|
317
|
-
return;
|
|
318
|
-
}
|
|
319
|
-
if (runId === void 0) {
|
|
320
|
-
pail.error("No previous run summary found. Run a task first to populate `.task-runner/last-summary.json`.");
|
|
321
|
-
} else {
|
|
322
|
-
pail.error(`Run summary "${runId}" not found in .task-runner/runs/.`);
|
|
323
|
-
}
|
|
324
|
-
process.exitCode = 1;
|
|
325
|
-
return;
|
|
326
|
-
}
|
|
327
|
-
const task = findTaskInSummary(summary, taskId);
|
|
328
|
-
if (!task) {
|
|
329
|
-
if (format === "json") {
|
|
330
|
-
process.stdout.write(`${JSON.stringify({ error: "task-not-in-summary", runId: summary.id, taskId }, void 0, 2)}
|
|
331
|
-
`);
|
|
332
|
-
process.exitCode = 1;
|
|
333
|
-
return;
|
|
334
|
-
}
|
|
335
|
-
pail.error(`Task "${taskId}" was not part of run ${summary.id}.`);
|
|
336
|
-
pail.info(`Tasks in this run: ${summary.tasks.map((t) => t.taskId).join(", ") || "(none)"}`);
|
|
337
|
-
process.exitCode = 1;
|
|
338
|
-
return;
|
|
339
|
-
}
|
|
340
|
-
const previousSummary = await readPreviousRunSummary(workspaceRoot, summary.id);
|
|
341
|
-
const previousTask = previousSummary ? findTaskInSummary(previousSummary, taskId) : void 0;
|
|
342
|
-
const diff = diffHashDetails(task.hashDetails, previousTask?.hashDetails);
|
|
343
|
-
if (format === "json") {
|
|
344
|
-
process.stdout.write(
|
|
345
|
-
`${JSON.stringify(
|
|
346
|
-
{
|
|
347
|
-
diff,
|
|
348
|
-
previousRunId: previousSummary?.id ?? null,
|
|
349
|
-
previousTask: previousTask ? {
|
|
350
|
-
cacheStatus: previousTask.cacheStatus,
|
|
351
|
-
hash: previousTask.hash ?? null,
|
|
352
|
-
hashDetails: previousTask.hashDetails ?? null
|
|
353
|
-
} : null,
|
|
354
|
-
runId: summary.id,
|
|
355
|
-
task: {
|
|
356
|
-
cacheStatus: task.cacheStatus,
|
|
357
|
-
hash: task.hash ?? null,
|
|
358
|
-
hashDetails: task.hashDetails ?? null,
|
|
359
|
-
taskId: task.taskId
|
|
360
|
-
}
|
|
361
|
-
},
|
|
362
|
-
void 0,
|
|
363
|
-
2
|
|
364
|
-
)}
|
|
365
|
-
`
|
|
366
|
-
);
|
|
367
|
-
return;
|
|
368
|
-
}
|
|
369
|
-
pail.info(`Why ${taskId}? (run ${summary.id})`);
|
|
370
|
-
logger.info("");
|
|
371
|
-
logger.info(` status: ${task.cacheStatus}`);
|
|
372
|
-
logger.info(` hash: ${task.hash ?? "(none)"}`);
|
|
373
|
-
if (previousTask) {
|
|
374
|
-
logger.info(` prev: ${previousTask.hash ?? "(none)"} [run ${previousSummary?.id ?? "?"}]`);
|
|
375
|
-
} else {
|
|
376
|
-
logger.info(` prev: (no prior run found)`);
|
|
377
|
-
}
|
|
378
|
-
logger.info("");
|
|
379
|
-
if (!previousTask) {
|
|
380
|
-
pail.info("No previous run to diff against — first time this task was recorded.");
|
|
381
|
-
return;
|
|
382
|
-
}
|
|
383
|
-
const noChanges = !diff.commandChanged && diff.nodes.added.length === 0 && diff.nodes.changed.length === 0 && diff.nodes.removed.length === 0 && diff.runtime.added.length === 0 && diff.runtime.changed.length === 0 && diff.runtime.removed.length === 0 && diff.implicitDeps.added.length === 0 && diff.implicitDeps.changed.length === 0 && diff.implicitDeps.removed.length === 0;
|
|
384
|
-
if (noChanges) {
|
|
385
|
-
pail.success("No hash inputs changed since the previous run.");
|
|
386
|
-
return;
|
|
387
|
-
}
|
|
388
|
-
logger.info("Hash inputs that changed since the previous run:");
|
|
389
|
-
logger.info("");
|
|
390
|
-
if (diff.commandChanged) {
|
|
391
|
-
logger.info(" command: changed");
|
|
392
|
-
}
|
|
393
|
-
for (const bucket of ["nodes", "runtime", "implicitDeps"]) {
|
|
394
|
-
const bucketDiff = diff[bucket];
|
|
395
|
-
if (bucketDiff.added.length === 0 && bucketDiff.changed.length === 0 && bucketDiff.removed.length === 0) {
|
|
396
|
-
continue;
|
|
397
|
-
}
|
|
398
|
-
logger.info(` ${bucket}:`);
|
|
399
|
-
for (const key of bucketDiff.added) {
|
|
400
|
-
logger.info(` + ${key}`);
|
|
401
|
-
}
|
|
402
|
-
for (const key of bucketDiff.changed) {
|
|
403
|
-
logger.info(` ~ ${key}`);
|
|
404
|
-
}
|
|
405
|
-
for (const key of bucketDiff.removed) {
|
|
406
|
-
logger.info(` - ${key}`);
|
|
407
|
-
}
|
|
408
|
-
}
|
|
409
|
-
logger.info("");
|
|
410
|
-
pail.info(`Last summary file: ${getLastRunSummaryPath(workspaceRoot)}`);
|
|
411
|
-
};
|
|
412
|
-
const runHash = async (taskId, options, logger) => {
|
|
413
|
-
const { format, runId, workspaceRoot } = options;
|
|
414
|
-
const summary = runId === void 0 ? await readLastRunSummary(workspaceRoot) : await readRunSummaryById(workspaceRoot, runId);
|
|
415
|
-
if (!summary) {
|
|
416
|
-
if (format === "json") {
|
|
417
|
-
process.stdout.write(`${JSON.stringify({ error: "no-summary", runId: runId ?? null, taskId }, void 0, 2)}
|
|
418
|
-
`);
|
|
419
|
-
process.exitCode = 1;
|
|
420
|
-
return;
|
|
421
|
-
}
|
|
422
|
-
if (runId === void 0) {
|
|
423
|
-
pail.error("No previous run summary found. Run a task first to populate `.task-runner/last-summary.json`.");
|
|
424
|
-
} else {
|
|
425
|
-
pail.error(`Run summary "${runId}" not found in .task-runner/runs/.`);
|
|
426
|
-
}
|
|
427
|
-
process.exitCode = 1;
|
|
428
|
-
return;
|
|
429
|
-
}
|
|
430
|
-
const task = findTaskInSummary(summary, taskId);
|
|
431
|
-
if (!task) {
|
|
432
|
-
if (format === "json") {
|
|
433
|
-
process.stdout.write(`${JSON.stringify({ error: "task-not-in-summary", runId: summary.id, taskId }, void 0, 2)}
|
|
434
|
-
`);
|
|
435
|
-
process.exitCode = 1;
|
|
436
|
-
return;
|
|
437
|
-
}
|
|
438
|
-
pail.error(`Task "${taskId}" was not part of run ${summary.id}.`);
|
|
439
|
-
process.exitCode = 1;
|
|
440
|
-
return;
|
|
441
|
-
}
|
|
442
|
-
if (format === "json") {
|
|
443
|
-
process.stdout.write(
|
|
444
|
-
`${JSON.stringify(
|
|
445
|
-
{
|
|
446
|
-
cacheStatus: task.cacheStatus,
|
|
447
|
-
hash: task.hash ?? null,
|
|
448
|
-
hashDetails: task.hashDetails ?? null,
|
|
449
|
-
runId: summary.id,
|
|
450
|
-
taskId: task.taskId
|
|
451
|
-
},
|
|
452
|
-
void 0,
|
|
453
|
-
2
|
|
454
|
-
)}
|
|
455
|
-
`
|
|
456
|
-
);
|
|
457
|
-
return;
|
|
458
|
-
}
|
|
459
|
-
pail.info(`Hash for ${taskId} (run ${summary.id})`);
|
|
460
|
-
logger.info("");
|
|
461
|
-
logger.info(` status: ${task.cacheStatus}`);
|
|
462
|
-
logger.info(` hash: ${task.hash ?? "(none)"}`);
|
|
463
|
-
if (task.hashDetails) {
|
|
464
|
-
logger.info("");
|
|
465
|
-
logger.info(` command: ${truncateHash(task.hashDetails.command)}`);
|
|
466
|
-
renderHashDetailsBucket("nodes", task.hashDetails.nodes, logger);
|
|
467
|
-
renderHashDetailsBucket("runtime", task.hashDetails.runtime, logger);
|
|
468
|
-
renderHashDetailsBucket("implicitDeps", task.hashDetails.implicitDeps, logger);
|
|
469
|
-
} else {
|
|
470
|
-
logger.info("");
|
|
471
|
-
pail.info("No hash details recorded for this task.");
|
|
472
|
-
}
|
|
473
|
-
};
|
|
474
|
-
const runSize = async (cacheDirectory, format) => {
|
|
475
|
-
if (!isAccessibleSync(cacheDirectory)) {
|
|
476
|
-
if (format === "json") {
|
|
477
|
-
process.stdout.write(`${JSON.stringify({ directory: cacheDirectory, exists: false, totalBytes: 0, totalCount: 0 })}
|
|
478
|
-
`);
|
|
479
|
-
return;
|
|
480
|
-
}
|
|
481
|
-
pail.info(`No cache directory at ${cacheDirectory}`);
|
|
482
|
-
return;
|
|
483
|
-
}
|
|
484
|
-
const entries = await collectCacheEntries(cacheDirectory);
|
|
485
|
-
const totalBytes = entries.reduce((sum, entry) => sum + entry.sizeBytes, 0);
|
|
486
|
-
if (format === "json") {
|
|
487
|
-
process.stdout.write(
|
|
488
|
-
`${JSON.stringify({
|
|
489
|
-
directory: cacheDirectory,
|
|
490
|
-
exists: true,
|
|
491
|
-
totalBytes,
|
|
492
|
-
totalCount: entries.length
|
|
493
|
-
})}
|
|
494
|
-
`
|
|
495
|
-
);
|
|
496
|
-
return;
|
|
497
|
-
}
|
|
498
|
-
pail.info(`Cache directory: ${cacheDirectory}`);
|
|
499
|
-
pail.info(`Entries: ${String(entries.length)}`);
|
|
500
|
-
pail.info(`Total size: ${formatBytes(totalBytes, { decimals: 1, space: false })}`);
|
|
501
|
-
};
|
|
502
|
-
const walkAndDigest = async (root) => {
|
|
503
|
-
const out = [];
|
|
504
|
-
const walk = async (absolute) => {
|
|
505
|
-
const items = (await readdir(absolute, { withFileTypes: true })).sort((a, b) => a.name.localeCompare(b.name));
|
|
506
|
-
for (const item of items) {
|
|
507
|
-
const childAbsolute = join(absolute, item.name);
|
|
508
|
-
if (item.isDirectory()) {
|
|
509
|
-
await walk(childAbsolute);
|
|
510
|
-
continue;
|
|
511
|
-
}
|
|
512
|
-
if (!item.isFile()) {
|
|
513
|
-
continue;
|
|
514
|
-
}
|
|
515
|
-
const [info, digest] = await Promise.all([stat(childAbsolute), digestFile(childAbsolute)]);
|
|
516
|
-
out.push({
|
|
517
|
-
hash: digest?.hash ?? "",
|
|
518
|
-
// eslint-disable-next-line no-bitwise -- low 12 bits hold the rwx triplet
|
|
519
|
-
mode: info.mode & 4095,
|
|
520
|
-
mtimeMs: info.mtimeMs,
|
|
521
|
-
relativePath: relative(root, childAbsolute).replaceAll("\\", "/"),
|
|
522
|
-
sizeBytes: info.size
|
|
523
|
-
});
|
|
524
|
-
}
|
|
525
|
-
};
|
|
526
|
-
await walk(root);
|
|
527
|
-
return out;
|
|
528
|
-
};
|
|
529
|
-
const compareSecondsTruncated = (a, b) => Math.floor(a / 1e3) === Math.floor(b / 1e3);
|
|
530
|
-
const VERIFY_DIFF_CONCURRENCY = 16;
|
|
531
|
-
const computeFileDiff = async (file, workspaceRoot) => {
|
|
532
|
-
const livePath = join(workspaceRoot, file.relativePath);
|
|
533
|
-
const liveStat = await stat(livePath).catch(() => void 0);
|
|
534
|
-
if (!liveStat) {
|
|
535
|
-
return {
|
|
536
|
-
expected: { hash: file.hash, mode: file.mode, mtimeMs: file.mtimeMs },
|
|
537
|
-
issues: ["missing"],
|
|
538
|
-
relativePath: file.relativePath
|
|
539
|
-
};
|
|
540
|
-
}
|
|
541
|
-
const liveDigest = await digestFile(livePath);
|
|
542
|
-
const issues = [];
|
|
543
|
-
if ((liveDigest?.hash ?? "") !== file.hash) {
|
|
544
|
-
issues.push("content");
|
|
545
|
-
}
|
|
546
|
-
const liveMode = liveStat.mode & 4095;
|
|
547
|
-
if (process.platform !== "win32" && liveMode !== file.mode) {
|
|
548
|
-
issues.push("mode");
|
|
549
|
-
}
|
|
550
|
-
if (!compareSecondsTruncated(liveStat.mtimeMs, file.mtimeMs)) {
|
|
551
|
-
issues.push("mtime");
|
|
552
|
-
}
|
|
553
|
-
if (issues.length === 0) {
|
|
554
|
-
return void 0;
|
|
555
|
-
}
|
|
556
|
-
return {
|
|
557
|
-
actual: { hash: liveDigest?.hash, mode: liveMode, mtimeMs: liveStat.mtimeMs },
|
|
558
|
-
expected: { hash: file.hash, mode: file.mode, mtimeMs: file.mtimeMs },
|
|
559
|
-
issues,
|
|
560
|
-
relativePath: file.relativePath
|
|
561
|
-
};
|
|
562
|
-
};
|
|
563
|
-
const runVerify = async (taskId, options, logger) => {
|
|
564
|
-
const { cacheDirectories, format, workspaceRoot } = options;
|
|
565
|
-
if (cacheDirectories.length === 0) {
|
|
566
|
-
if (format === "json") {
|
|
567
|
-
process.stdout.write(`${JSON.stringify({ error: "no-cache-directory", taskId }, void 0, 2)}
|
|
568
|
-
`);
|
|
569
|
-
} else {
|
|
570
|
-
pail.error("No cache directory resolved — pass --cache-dir or run inside a workspace.");
|
|
571
|
-
}
|
|
572
|
-
process.exitCode = 1;
|
|
573
|
-
return;
|
|
574
|
-
}
|
|
575
|
-
let cacheDirectory;
|
|
576
|
-
let cached;
|
|
577
|
-
for (const directory of cacheDirectories) {
|
|
578
|
-
const found = await new Cache({ cacheDirectory: directory, workspaceRoot }).getByTaskId(taskId);
|
|
579
|
-
if (found) {
|
|
580
|
-
cacheDirectory = directory;
|
|
581
|
-
cached = found;
|
|
582
|
-
break;
|
|
583
|
-
}
|
|
584
|
-
}
|
|
585
|
-
if (!cached || !cacheDirectory) {
|
|
586
|
-
if (format === "json") {
|
|
587
|
-
process.stdout.write(`${JSON.stringify({ error: "no-cached-entry", searchedCaches: cacheDirectories, taskId }, void 0, 2)}
|
|
588
|
-
`);
|
|
589
|
-
} else {
|
|
590
|
-
pail.error(`No cached entry found for task "${taskId}". Run it once before verifying.`);
|
|
591
|
-
}
|
|
592
|
-
process.exitCode = 1;
|
|
593
|
-
return;
|
|
594
|
-
}
|
|
595
|
-
const stagingRoot = await mkdtemp(join(tmpdir(), "vis-cache-verify-"));
|
|
596
|
-
try {
|
|
597
|
-
const stagingCache = new Cache({ cacheDirectory, workspaceRoot: stagingRoot });
|
|
598
|
-
const restored = await stagingCache.restoreOutputs(cached.hash);
|
|
599
|
-
if (!restored) {
|
|
600
|
-
if (format === "json") {
|
|
601
|
-
process.stdout.write(`${JSON.stringify({ error: "restore-failed", hash: cached.hash, taskId }, void 0, 2)}
|
|
602
|
-
`);
|
|
603
|
-
} else {
|
|
604
|
-
pail.error(`Cache restore failed for ${taskId} (hash ${cached.hash}).`);
|
|
605
|
-
}
|
|
606
|
-
process.exitCode = 1;
|
|
607
|
-
return;
|
|
608
|
-
}
|
|
609
|
-
const cachedFiles = await walkAndDigest(stagingRoot);
|
|
610
|
-
if (cachedFiles.length === 0) {
|
|
611
|
-
if (format === "json") {
|
|
612
|
-
process.stdout.write(`${JSON.stringify({ diffs: [], hash: cached.hash, status: "no-outputs", taskId }, void 0, 2)}
|
|
613
|
-
`);
|
|
614
|
-
} else {
|
|
615
|
-
pail.info(`Cached entry for ${taskId} has no recorded outputs — nothing to verify.`);
|
|
616
|
-
}
|
|
617
|
-
return;
|
|
618
|
-
}
|
|
619
|
-
const diffSlots = Array.from({ length: cachedFiles.length });
|
|
620
|
-
for (let offset = 0; offset < cachedFiles.length; offset += VERIFY_DIFF_CONCURRENCY) {
|
|
621
|
-
const chunk = cachedFiles.slice(offset, offset + VERIFY_DIFF_CONCURRENCY);
|
|
622
|
-
const results = await Promise.all(chunk.map(async (file) => computeFileDiff(file, workspaceRoot)));
|
|
623
|
-
for (const [index, result] of results.entries()) {
|
|
624
|
-
diffSlots[offset + index] = result;
|
|
625
|
-
}
|
|
626
|
-
}
|
|
627
|
-
const diffs = diffSlots.filter((slot) => slot !== void 0);
|
|
628
|
-
if (format === "json") {
|
|
629
|
-
process.stdout.write(
|
|
630
|
-
`${JSON.stringify(
|
|
631
|
-
{
|
|
632
|
-
cacheDirectory,
|
|
633
|
-
cachedFileCount: cachedFiles.length,
|
|
634
|
-
diffs,
|
|
635
|
-
hash: cached.hash,
|
|
636
|
-
status: diffs.length === 0 ? "ok" : "drift",
|
|
637
|
-
taskId
|
|
638
|
-
},
|
|
639
|
-
void 0,
|
|
640
|
-
2
|
|
641
|
-
)}
|
|
642
|
-
`
|
|
643
|
-
);
|
|
644
|
-
if (diffs.length > 0) {
|
|
645
|
-
process.exitCode = 1;
|
|
646
|
-
}
|
|
647
|
-
return;
|
|
648
|
-
}
|
|
649
|
-
pail.info(`Verify ${taskId} (hash ${cached.hash})`);
|
|
650
|
-
logger.info("");
|
|
651
|
-
logger.info(` cache: ${cacheDirectory}`);
|
|
652
|
-
logger.info(` files: ${String(cachedFiles.length)}`);
|
|
653
|
-
if (diffs.length === 0) {
|
|
654
|
-
logger.info("");
|
|
655
|
-
pail.success("Cache restore is faithful — all files match content, mode, and mtime.");
|
|
656
|
-
return;
|
|
657
|
-
}
|
|
658
|
-
logger.info(` drift: ${String(diffs.length)} file(s)`);
|
|
659
|
-
logger.info("");
|
|
660
|
-
for (const diff of diffs) {
|
|
661
|
-
const tag = diff.issues.includes("missing") ? "MISSING" : diff.issues.join(",").toUpperCase();
|
|
662
|
-
logger.info(` [${tag}] ${diff.relativePath}`);
|
|
663
|
-
if (!diff.issues.includes("missing") && diff.expected && diff.actual) {
|
|
664
|
-
if (diff.issues.includes("content")) {
|
|
665
|
-
logger.info(` expected hash: ${diff.expected.hash || "(none)"}`);
|
|
666
|
-
logger.info(` actual hash: ${diff.actual.hash ?? "(unreadable)"}`);
|
|
667
|
-
}
|
|
668
|
-
if (diff.issues.includes("mode")) {
|
|
669
|
-
logger.info(` expected mode: ${diff.expected.mode.toString(8)}`);
|
|
670
|
-
logger.info(` actual mode: ${(diff.actual.mode ?? 0).toString(8)}`);
|
|
671
|
-
}
|
|
672
|
-
if (diff.issues.includes("mtime")) {
|
|
673
|
-
logger.info(` expected mtime: ${new Date(diff.expected.mtimeMs).toISOString()}`);
|
|
674
|
-
logger.info(` actual mtime: ${diff.actual.mtimeMs === void 0 ? "(unreadable)" : new Date(diff.actual.mtimeMs).toISOString()}`);
|
|
675
|
-
}
|
|
676
|
-
}
|
|
677
|
-
}
|
|
678
|
-
process.exitCode = 1;
|
|
679
|
-
} finally {
|
|
680
|
-
await rm(stagingRoot, { force: true, recursive: true }).catch(() => {
|
|
681
|
-
});
|
|
682
|
-
}
|
|
683
|
-
};
|
|
684
|
-
const parseCacheTarget = (raw) => {
|
|
685
|
-
if (raw === "task" || raw === "ai" || raw === "socket" || raw === "all") {
|
|
686
|
-
return raw;
|
|
687
|
-
}
|
|
688
|
-
if (raw && raw.length > 0) {
|
|
689
|
-
pail.warn(`Unknown --type value '${raw}'; falling back to 'all'.`);
|
|
690
|
-
}
|
|
691
|
-
return "all";
|
|
692
|
-
};
|
|
693
|
-
const includesTarget = (selected, kind) => selected === "all" || selected === kind;
|
|
694
|
-
const isoOrNull = (value) => value === void 0 ? null : new Date(value).toISOString();
|
|
695
|
-
const printAuxStatsBlock = (label, stats) => {
|
|
696
|
-
pail.info(`${label}:`);
|
|
697
|
-
pail.info(` Entries: ${String(stats.entries)}`);
|
|
698
|
-
pail.info(` Total size: ${formatBytes(stats.totalSizeBytes, { decimals: 1, space: false })}`);
|
|
699
|
-
pail.info(` Oldest: ${stats.oldestEntry ? new Date(stats.oldestEntry).toISOString() : "N/A"}`);
|
|
700
|
-
pail.info(` Newest: ${stats.newestEntry ? new Date(stats.newestEntry).toISOString() : "N/A"}`);
|
|
701
|
-
};
|
|
702
|
-
const parseScope = (raw) => {
|
|
703
|
-
if (raw === "worktree" || raw === "shared" || raw === "all") {
|
|
704
|
-
return raw;
|
|
705
|
-
}
|
|
706
|
-
if (raw && raw.length > 0) {
|
|
707
|
-
pail.warn(`Unknown --scope value '${raw}'; falling back to 'shared'.`);
|
|
708
|
-
}
|
|
709
|
-
return "shared";
|
|
710
|
-
};
|
|
711
|
-
const resolveCacheDirectoryFromContext = (workspaceRoot, options, visConfig) => {
|
|
712
|
-
const resolvedWorkspaceRoot = workspaceRoot ?? process.cwd();
|
|
713
|
-
const cfg = visConfig ?? {};
|
|
714
|
-
const taskRunnerOptions = cfg.taskRunnerOptions ?? {};
|
|
715
|
-
const scope = parseScope(options.scope);
|
|
716
|
-
const optionsCacheDir = options.cacheDir;
|
|
717
|
-
const worktreeDirectory = resolveSharedCacheDirectory(resolvedWorkspaceRoot, optionsCacheDir, taskRunnerOptions.cacheDirectory, false);
|
|
718
|
-
const sharedDirectory = resolveSharedCacheDirectory(resolvedWorkspaceRoot, optionsCacheDir, taskRunnerOptions.cacheDirectory, cfg.sharedWorktreeCache);
|
|
719
|
-
let primary;
|
|
720
|
-
let directories;
|
|
721
|
-
switch (scope) {
|
|
722
|
-
case "all": {
|
|
723
|
-
primary = sharedDirectory;
|
|
724
|
-
directories = sharedDirectory === worktreeDirectory ? [sharedDirectory] : [sharedDirectory, worktreeDirectory];
|
|
725
|
-
break;
|
|
726
|
-
}
|
|
727
|
-
case "worktree": {
|
|
728
|
-
primary = worktreeDirectory;
|
|
729
|
-
directories = [worktreeDirectory];
|
|
730
|
-
break;
|
|
731
|
-
}
|
|
732
|
-
default: {
|
|
733
|
-
primary = sharedDirectory;
|
|
734
|
-
directories = [sharedDirectory];
|
|
735
|
-
}
|
|
736
|
-
}
|
|
737
|
-
return {
|
|
738
|
-
cacheDirectories: directories,
|
|
739
|
-
cacheDirectory: primary,
|
|
740
|
-
scope,
|
|
741
|
-
sharedWorktreeCache: cfg.sharedWorktreeCache,
|
|
742
|
-
workspaceRoot: resolvedWorkspaceRoot
|
|
743
|
-
};
|
|
744
|
-
};
|
|
745
|
-
const cacheListExecute = async ({ logger, options, visConfig, workspaceRoot: wsRoot }) => {
|
|
746
|
-
const { cacheDirectories } = resolveCacheDirectoryFromContext(wsRoot, options, visConfig);
|
|
747
|
-
const format = options.format ?? "table";
|
|
748
|
-
for (const directory of cacheDirectories) {
|
|
749
|
-
if (cacheDirectories.length > 1) {
|
|
750
|
-
pail.info(`# ${directory}`);
|
|
751
|
-
}
|
|
752
|
-
await runList(directory, format, logger);
|
|
753
|
-
}
|
|
754
|
-
};
|
|
755
|
-
const cacheCleanExecute = async ({ options, visConfig, workspaceRoot: wsRoot }) => {
|
|
756
|
-
const target = parseCacheTarget(options.type);
|
|
757
|
-
const dryRun = Boolean(options.dryRun);
|
|
758
|
-
if (includesTarget(target, "task")) {
|
|
759
|
-
const { cacheDirectory, workspaceRoot } = resolveCacheDirectoryFromContext(wsRoot, options, visConfig);
|
|
760
|
-
await runClean(cacheDirectory, workspaceRoot, {
|
|
761
|
-
dryRun,
|
|
762
|
-
force: Boolean(options.force)
|
|
763
|
-
});
|
|
764
|
-
}
|
|
765
|
-
if (includesTarget(target, "ai")) {
|
|
766
|
-
if (dryRun) {
|
|
767
|
-
const stats = getCacheStats();
|
|
768
|
-
pail.info(`Would clear ${String(stats.entries)} cached AI response${stats.entries === 1 ? "" : "s"}.`);
|
|
769
|
-
} else {
|
|
770
|
-
clearAiCacheSafe();
|
|
771
|
-
}
|
|
772
|
-
}
|
|
773
|
-
if (includesTarget(target, "socket")) {
|
|
774
|
-
if (dryRun) {
|
|
775
|
-
const stats = getSocketCacheStats();
|
|
776
|
-
pail.info(`Would clear ${String(stats.entries)} cached Socket.dev report${stats.entries === 1 ? "" : "s"}.`);
|
|
777
|
-
} else {
|
|
778
|
-
clearSocketCacheSafe();
|
|
779
|
-
}
|
|
780
|
-
}
|
|
781
|
-
};
|
|
782
|
-
const cachePruneExecute = async ({ options, visConfig, workspaceRoot: wsRoot }) => {
|
|
783
|
-
const { cacheDirectories, workspaceRoot } = resolveCacheDirectoryFromContext(wsRoot, options, visConfig);
|
|
784
|
-
for (const directory of cacheDirectories) {
|
|
785
|
-
if (cacheDirectories.length > 1) {
|
|
786
|
-
pail.info(`# ${directory}`);
|
|
787
|
-
}
|
|
788
|
-
await runPrune(directory, workspaceRoot, {
|
|
789
|
-
keepLast: typeof options.keepLast === "number" ? options.keepLast : void 0,
|
|
790
|
-
maxCacheAgeDays: typeof options.maxAgeDays === "number" ? options.maxAgeDays : void 0,
|
|
791
|
-
maxCacheSize: options.maxSize
|
|
792
|
-
});
|
|
793
|
-
}
|
|
794
|
-
};
|
|
795
|
-
const resolveWorkspaceRoot = (workspaceRoot) => workspaceRoot ?? process.cwd();
|
|
796
|
-
const cacheWhyExecute = async ({ argument, logger, options, workspaceRoot: wsRoot }) => {
|
|
797
|
-
const taskId = argument[0];
|
|
798
|
-
if (!taskId) {
|
|
799
|
-
pail.error("No task ID specified. Usage: vis cache why <project>:<target>");
|
|
800
|
-
process.exitCode = 1;
|
|
801
|
-
return;
|
|
802
|
-
}
|
|
803
|
-
await runWhy(
|
|
804
|
-
taskId,
|
|
805
|
-
{
|
|
806
|
-
format: options.format ?? "table",
|
|
807
|
-
runId: options.run,
|
|
808
|
-
workspaceRoot: resolveWorkspaceRoot(wsRoot)
|
|
809
|
-
},
|
|
810
|
-
logger
|
|
811
|
-
);
|
|
812
|
-
};
|
|
813
|
-
const cacheHashExecute = async ({ argument, logger, options, workspaceRoot: wsRoot }) => {
|
|
814
|
-
const taskId = argument[0];
|
|
815
|
-
if (!taskId) {
|
|
816
|
-
pail.error("No task ID specified. Usage: vis cache hash <project>:<target>");
|
|
817
|
-
process.exitCode = 1;
|
|
818
|
-
return;
|
|
819
|
-
}
|
|
820
|
-
await runHash(
|
|
821
|
-
taskId,
|
|
822
|
-
{
|
|
823
|
-
format: options.format ?? "table",
|
|
824
|
-
runId: options.run,
|
|
825
|
-
workspaceRoot: resolveWorkspaceRoot(wsRoot)
|
|
826
|
-
},
|
|
827
|
-
logger
|
|
828
|
-
);
|
|
829
|
-
};
|
|
830
|
-
const cacheSizeExecute = async ({ options, visConfig, workspaceRoot: wsRoot }) => {
|
|
831
|
-
const target = parseCacheTarget(options.type);
|
|
832
|
-
const format = options.format ?? "table";
|
|
833
|
-
if (format === "json") {
|
|
834
|
-
const payload = {};
|
|
835
|
-
if (includesTarget(target, "task")) {
|
|
836
|
-
const { cacheDirectories } = resolveCacheDirectoryFromContext(wsRoot, options, visConfig);
|
|
837
|
-
payload.task = await Promise.all(
|
|
838
|
-
cacheDirectories.map(async (directory) => {
|
|
839
|
-
const exists = isAccessibleSync(directory);
|
|
840
|
-
const entries = exists ? await collectCacheEntries(directory) : [];
|
|
841
|
-
const totalBytes = entries.reduce((sum, entry) => sum + entry.sizeBytes, 0);
|
|
842
|
-
return {
|
|
843
|
-
directory,
|
|
844
|
-
entries: entries.length,
|
|
845
|
-
exists,
|
|
846
|
-
newestEntry: isoOrNull(entries[0]?.mtimeMs),
|
|
847
|
-
oldestEntry: isoOrNull(entries.at(-1)?.mtimeMs),
|
|
848
|
-
totalBytes
|
|
849
|
-
};
|
|
850
|
-
})
|
|
851
|
-
);
|
|
852
|
-
}
|
|
853
|
-
if (includesTarget(target, "ai")) {
|
|
854
|
-
const stats = getCacheStats();
|
|
855
|
-
payload.ai = {
|
|
856
|
-
entries: stats.entries,
|
|
857
|
-
newestEntry: isoOrNull(stats.newestEntry),
|
|
858
|
-
oldestEntry: isoOrNull(stats.oldestEntry),
|
|
859
|
-
totalBytes: stats.totalSizeBytes
|
|
860
|
-
};
|
|
861
|
-
}
|
|
862
|
-
if (includesTarget(target, "socket")) {
|
|
863
|
-
const stats = getSocketCacheStats();
|
|
864
|
-
payload.socket = {
|
|
865
|
-
entries: stats.entries,
|
|
866
|
-
newestEntry: isoOrNull(stats.newestEntry),
|
|
867
|
-
oldestEntry: isoOrNull(stats.oldestEntry),
|
|
868
|
-
totalBytes: stats.totalSizeBytes
|
|
869
|
-
};
|
|
870
|
-
}
|
|
871
|
-
process.stdout.write(`${JSON.stringify(payload, void 0, 2)}
|
|
872
|
-
`);
|
|
873
|
-
return;
|
|
874
|
-
}
|
|
875
|
-
if (includesTarget(target, "task")) {
|
|
876
|
-
const { cacheDirectories } = resolveCacheDirectoryFromContext(wsRoot, options, visConfig);
|
|
877
|
-
for (const directory of cacheDirectories) {
|
|
878
|
-
if (cacheDirectories.length > 1) {
|
|
879
|
-
pail.info(`# ${directory}`);
|
|
880
|
-
}
|
|
881
|
-
await runSize(directory, "table");
|
|
882
|
-
}
|
|
883
|
-
}
|
|
884
|
-
if (includesTarget(target, "ai")) {
|
|
885
|
-
printAuxStatsBlock("AI response cache", getCacheStats());
|
|
886
|
-
}
|
|
887
|
-
if (includesTarget(target, "socket")) {
|
|
888
|
-
printAuxStatsBlock("Socket.dev report cache", getSocketCacheStats());
|
|
889
|
-
}
|
|
890
|
-
};
|
|
891
|
-
const cacheVerifyExecute = async ({
|
|
892
|
-
argument,
|
|
893
|
-
logger,
|
|
894
|
-
options,
|
|
895
|
-
visConfig,
|
|
896
|
-
workspaceRoot: wsRoot
|
|
897
|
-
}) => {
|
|
898
|
-
const taskId = argument[0];
|
|
899
|
-
if (!taskId) {
|
|
900
|
-
pail.error("No task ID specified. Usage: vis cache verify <project>:<target>");
|
|
901
|
-
process.exitCode = 1;
|
|
902
|
-
return;
|
|
903
|
-
}
|
|
904
|
-
const { cacheDirectories, workspaceRoot } = resolveCacheDirectoryFromContext(wsRoot, options, visConfig);
|
|
905
|
-
await runVerify(
|
|
906
|
-
taskId,
|
|
907
|
-
{
|
|
908
|
-
cacheDirectories,
|
|
909
|
-
format: options.format ?? "table",
|
|
910
|
-
workspaceRoot
|
|
911
|
-
},
|
|
912
|
-
logger
|
|
913
|
-
);
|
|
914
|
-
};
|
|
915
|
-
|
|
916
|
-
export { cacheCleanExecute, cacheHashExecute, cacheListExecute, cachePruneExecute, cacheSizeExecute, cacheVerifyExecute, cacheWhyExecute, clearAiCacheSafe, clearSocketCacheSafe, collectCacheEntries, formatAge, runClean, runHash, runList, runPrune, runSize, runVerify, runWhy };
|
|
1
|
+
var G=Object.defineProperty;var E=(e,r)=>G(e,"name",{value:r,configurable:!0});import{createRequire as K}from"node:module";import{isAccessibleSync as S}from"@visulima/fs";import{formatBytes as w}from"@visulima/humanizer";import{join as C,relative as te}from"@visulima/path";import{Cache as x,parseCacheSize as se,readLastRunSummary as L,getLastRunSummaryPath as oe,digestFile as F}from"@visulima/task-runner";import{B as re,J as R}from"../packem_shared/ai-cache-DoiF80AR.js";import{m as ae,E as A}from"../packem_shared/cache-directory-CwHlJhgx.js";import{p as i,H as ie,J as z}from"./bin.js";import{r as W,f as M,a as ne,d as ce}from"../packem_shared/run-summary-utils-BfBvjzhY.js";const Q=K(import.meta.url),v=typeof globalThis<"u"&&typeof globalThis.process<"u"?globalThis.process:process,b=E(e=>{if(typeof v<"u"&&v.versions&&v.versions.node){const[r,t]=v.versions.node.split(".").map(Number);if(r>22||r===22&&t>=3||r===20&&t>=16)return v.getBuiltinModule(e)}return Q(e)},"__cjs_getBuiltinModule"),{readdir:j,stat:D,realpath:O,rm:B,mkdtemp:X}=b("node:fs/promises"),{tmpdir:Z}=b("node:os"),{createInterface:ee}=b("node:readline");var he=Object.defineProperty,f=E((e,r)=>he(e,"name",{value:r,configurable:!0}),"f");const _=f(async e=>{let r=0;try{const t=await j(e,{withFileTypes:!0});for(const o of t){const s=C(e,o.name);if(o.isDirectory())r+=await _(s);else if(o.isFile()){const a=await D(s);r+=a.size}}}catch{}return r},"sumDirectorySize"),$=f(async e=>{const r=[];let t;try{t=await j(e)}catch{return[]}for(const o of t){if(o.startsWith("."))continue;const s=C(e,o);try{const a=await D(s);if(!a.isDirectory())continue;const n=await _(s);r.push({hash:o,mtimeMs:a.mtimeMs,path:s,sizeBytes:n})}catch{}}return r.sort((o,s)=>s.mtimeMs-o.mtimeMs),r},"collectCacheEntries"),de=f((e,r=Date.now())=>{const t=Math.max(0,Math.floor((r-e)/1e3));return t<60?`${String(t)}s`:t<3600?`${String(Math.floor(t/60))}m`:t<86400?`${String(Math.floor(t/3600))}h`:`${String(Math.floor(t/86400))}d`},"formatAge"),ue=f(e=>new Promise(r=>{const t=ee({input:process.stdin,output:process.stderr});t.question(`${e} (y/N) `,o=>{t.close();const s=o.trim().toLowerCase();r(s==="y"||s==="yes")})}),"confirmPrompt"),fe=f(async(e,r,t)=>{if(!S(e)){if(r==="json"){process.stdout.write(`${JSON.stringify({directory:e,entries:[],totalBytes:0,totalCount:0},void 0,2)}
|
|
2
|
+
`);return}i.info(`No cache directory found at ${e}`);return}const o=await $(e);if(o.length===0){if(r==="json"){process.stdout.write(`${JSON.stringify({directory:e,entries:[],totalBytes:0,totalCount:0},void 0,2)}
|
|
3
|
+
`);return}i.info(`Cache directory is empty: ${e}`);return}const s=o.reduce((c,d)=>c+d.sizeBytes,0);if(r==="json"){const c=Date.now();process.stdout.write(`${JSON.stringify({directory:e,entries:o.map(d=>({ageMs:c-d.mtimeMs,hash:d.hash,mtimeIso:new Date(d.mtimeMs).toISOString(),sizeBytes:d.sizeBytes})),totalBytes:s,totalCount:o.length},void 0,2)}
|
|
4
|
+
`);return}i.info(`Cache directory: ${e}`),i.info(`Entries: ${String(o.length)} (${w(s,{decimals:1,space:!1})})`),t.info("");const a=Date.now(),n=o.map(c=>({age:de(c.mtimeMs,a),hash:c.hash.slice(0,12),size:w(c.sizeBytes,{decimals:1,space:!1})})),h=Math.max(4,...n.map(c=>c.hash.length)),m=Math.max(4,...n.map(c=>c.size.length)),u=Math.max(3,...n.map(c=>c.age.length));t.info(` ${"hash".padEnd(h)} ${"size".padEnd(m)} ${"age".padEnd(u)}`),t.info(` ${"-".repeat(h)} ${"-".repeat(m)} ${"-".repeat(u)}`);for(const c of n)t.info(` ${c.hash.padEnd(h)} ${c.size.padEnd(m)} ${c.age.padEnd(u)}`)},"runList"),le=f(()=>{try{const e=re();e>0&&i.info(`Cleared ${String(e)} cached AI response${e===1?"":"s"}.`)}catch(e){i.warn(`Failed to clear AI response cache: ${e instanceof Error?e.message:String(e)}`)}},"clearAiCacheSafe"),me=f(()=>{try{const e=ie();e>0&&i.info(`Cleared ${String(e)} cached Socket.dev report${e===1?"":"s"}.`)}catch(e){i.warn(`Failed to clear Socket.dev cache: ${e instanceof Error?e.message:String(e)}`)}},"clearSocketCacheSafe"),pe=f(async(e,r,t)=>{if(!S(e)){i.info(`No cache directory to clean at ${e}`);return}if(t.dryRun){const s=await $(e),a=s.reduce((n,h)=>n+h.sizeBytes,0);i.info(`Would remove ${String(s.length)} cache entr${s.length===1?"y":"ies"} (${w(a,{decimals:1,space:!1})}) from ${e}`);return}const o=ae(e,r);try{const s=await O(e),a=await O(r);if(s===a){i.error("Refusing to delete the workspace root. The cache directory resolved to the same path as the workspace."),process.exitCode=1;return}}catch{}if(!o&&!t.force){if(i.warn(`Cache directory is outside the workspace root: ${e}`),i.warn("This will recursively delete the entire directory, including anything stored there by other tools."),!process.stdin.isTTY){i.error("Refusing to clean an out-of-workspace cache without --force (stdin is not a TTY)."),process.exitCode=1;return}if(!await ue(" Continue?")){i.info("Aborted.");return}}o?await new x({cacheDirectory:e,workspaceRoot:r}).clear():await B(e,{force:!0,recursive:!0}),i.success(`Cleared cache: ${e}`)},"runClean"),ye=f(async(e,r,t)=>{if(!S(e)){i.info(`No cache directory to prune at ${e}`);return}if(t.maxCacheAgeDays!==void 0&&(!Number.isFinite(t.maxCacheAgeDays)||t.maxCacheAgeDays<0)){i.error(`Invalid --max-age-days value: expected a finite number >= 0, got ${String(t.maxCacheAgeDays)}`),process.exitCode=1;return}if(t.keepLast!==void 0&&(!Number.isFinite(t.keepLast)||t.keepLast<0||!Number.isInteger(t.keepLast))){i.error(`Invalid --keep-last value: expected a non-negative integer, got ${String(t.keepLast)}`),process.exitCode=1;return}if(t.maxCacheSize!==void 0){let c;try{c=se(t.maxCacheSize)}catch(d){i.error(`Invalid --max-size value: ${d instanceof Error?d.message:String(d)}`),process.exitCode=1;return}if(!Number.isFinite(c)||c<=0){i.error(`Invalid --max-size value: expected a positive size, got "${t.maxCacheSize}" (${String(c)} bytes)`),process.exitCode=1;return}}const o=t.maxCacheAgeDays===void 0?void 0:t.maxCacheAgeDays*24*60*60*1e3,s=await $(e),a=s.reduce((c,d)=>c+d.sizeBytes,0);if(t.keepLast!==void 0&&s.length>t.keepLast){const c=s.slice(t.keepLast);await Promise.all(c.map(d=>B(d.path,{force:!0,recursive:!0})))}await new x({cacheDirectory:e,maxCacheAge:o,maxCacheSize:t.maxCacheSize,workspaceRoot:r}).removeOldEntries();const n=await $(e),h=n.reduce((c,d)=>c+d.sizeBytes,0),m=s.length-n.length,u=a-h;if(m<=0){i.info("Nothing to prune — all entries are within the configured limits.");return}i.success(`Pruned ${String(m)} entr${m===1?"y":"ies"}, freed ${w(u,{decimals:1,space:!1})}.`)},"runPrune"),T=16,H=f(e=>e.length>T?`${e.slice(0,T)}…`:e,"truncateHash"),N=f((e,r,t)=>{const o=Object.entries(r??{});if(o.length!==0){t.info(` ${e}:`),o.sort(([s],[a])=>s.localeCompare(a));for(const[s,a]of o)t.info(` ${s.padEnd(40)} ${H(a)}`)}},"renderHashDetailsBucket"),ge=f(async(e,r,t)=>{const{format:o,runId:s,workspaceRoot:a}=r,n=s===void 0?await L(a):await W(a,s);if(!n){if(o==="json"){process.stdout.write(`${JSON.stringify({error:"no-summary",runId:s??null,taskId:e},void 0,2)}
|
|
5
|
+
`),process.exitCode=1;return}s===void 0?i.error("No previous run summary found. Run a task first to populate `.task-runner/last-summary.json`."):i.error(`Run summary "${s}" not found in .task-runner/runs/.`),process.exitCode=1;return}const h=M(n,e);if(!h){if(o==="json"){process.stdout.write(`${JSON.stringify({error:"task-not-in-summary",runId:n.id,taskId:e},void 0,2)}
|
|
6
|
+
`),process.exitCode=1;return}i.error(`Task "${e}" was not part of run ${n.id}.`),i.info(`Tasks in this run: ${n.tasks.map(d=>d.taskId).join(", ")||"(none)"}`),process.exitCode=1;return}const m=await ne(a,n.id),u=m?M(m,e):void 0,c=ce(h.hashDetails,u?.hashDetails);if(o==="json"){process.stdout.write(`${JSON.stringify({diff:c,previousRunId:m?.id??null,previousTask:u?{cacheStatus:u.cacheStatus,hash:u.hash??null,hashDetails:u.hashDetails??null}:null,runId:n.id,task:{cacheStatus:h.cacheStatus,hash:h.hash??null,hashDetails:h.hashDetails??null,taskId:h.taskId}},void 0,2)}
|
|
7
|
+
`);return}if(i.info(`Why ${e}? (run ${n.id})`),t.info(""),t.info(` status: ${h.cacheStatus}`),t.info(` hash: ${h.hash??"(none)"}`),u?t.info(` prev: ${u.hash??"(none)"} [run ${m?.id??"?"}]`):t.info(" prev: (no prior run found)"),t.info(""),!u){i.info("No previous run to diff against — first time this task was recorded.");return}if(!c.commandChanged&&c.nodes.added.length===0&&c.nodes.changed.length===0&&c.nodes.removed.length===0&&c.runtime.added.length===0&&c.runtime.changed.length===0&&c.runtime.removed.length===0&&c.implicitDeps.added.length===0&&c.implicitDeps.changed.length===0&&c.implicitDeps.removed.length===0){i.success("No hash inputs changed since the previous run.");return}t.info("Hash inputs that changed since the previous run:"),t.info(""),c.commandChanged&&t.info(" command: changed");for(const d of["nodes","runtime","implicitDeps"]){const l=c[d];if(!(l.added.length===0&&l.changed.length===0&&l.removed.length===0)){t.info(` ${d}:`);for(const p of l.added)t.info(` + ${p}`);for(const p of l.changed)t.info(` ~ ${p}`);for(const p of l.removed)t.info(` - ${p}`)}}t.info(""),i.info(`Last summary file: ${oe(a)}`)},"runWhy"),we=f(async(e,r,t)=>{const{format:o,runId:s,workspaceRoot:a}=r,n=s===void 0?await L(a):await W(a,s);if(!n){if(o==="json"){process.stdout.write(`${JSON.stringify({error:"no-summary",runId:s??null,taskId:e},void 0,2)}
|
|
8
|
+
`),process.exitCode=1;return}s===void 0?i.error("No previous run summary found. Run a task first to populate `.task-runner/last-summary.json`."):i.error(`Run summary "${s}" not found in .task-runner/runs/.`),process.exitCode=1;return}const h=M(n,e);if(!h){if(o==="json"){process.stdout.write(`${JSON.stringify({error:"task-not-in-summary",runId:n.id,taskId:e},void 0,2)}
|
|
9
|
+
`),process.exitCode=1;return}i.error(`Task "${e}" was not part of run ${n.id}.`),process.exitCode=1;return}if(o==="json"){process.stdout.write(`${JSON.stringify({cacheStatus:h.cacheStatus,hash:h.hash??null,hashDetails:h.hashDetails??null,runId:n.id,taskId:h.taskId},void 0,2)}
|
|
10
|
+
`);return}i.info(`Hash for ${e} (run ${n.id})`),t.info(""),t.info(` status: ${h.cacheStatus}`),t.info(` hash: ${h.hash??"(none)"}`),h.hashDetails?(t.info(""),t.info(` command: ${H(h.hashDetails.command)}`),N("nodes",h.hashDetails.nodes,t),N("runtime",h.hashDetails.runtime,t),N("implicitDeps",h.hashDetails.implicitDeps,t)):(t.info(""),i.info("No hash details recorded for this task."))},"runHash"),$e=f(async(e,r)=>{if(!S(e)){if(r==="json"){process.stdout.write(`${JSON.stringify({directory:e,exists:!1,totalBytes:0,totalCount:0})}
|
|
11
|
+
`);return}i.info(`No cache directory at ${e}`);return}const t=await $(e),o=t.reduce((s,a)=>s+a.sizeBytes,0);if(r==="json"){process.stdout.write(`${JSON.stringify({directory:e,exists:!0,totalBytes:o,totalCount:t.length})}
|
|
12
|
+
`);return}i.info(`Cache directory: ${e}`),i.info(`Entries: ${String(t.length)}`),i.info(`Total size: ${w(o,{decimals:1,space:!1})}`)},"runSize"),ke=f(async e=>{const r=[],t=f(async o=>{const s=(await j(o,{withFileTypes:!0})).toSorted((a,n)=>a.name.localeCompare(n.name));for(const a of s){const n=C(o,a.name);if(a.isDirectory()){await t(n);continue}if(!a.isFile())continue;const[h,m]=await Promise.all([D(n),F(n)]);r.push({hash:m?.hash??"",mode:h.mode&4095,mtimeMs:h.mtimeMs,relativePath:te(e,n).replaceAll("\\","/"),sizeBytes:h.size})}},"walk");return await t(e),r},"walkAndDigest"),ve=f((e,r)=>Math.floor(e/1e3)===Math.floor(r/1e3),"compareSecondsTruncated"),J=16,Se=f(async(e,r)=>{const t=C(r,e.relativePath),o=await D(t).catch(()=>{});if(!o)return{expected:{hash:e.hash,mode:e.mode,mtimeMs:e.mtimeMs},issues:["missing"],relativePath:e.relativePath};const s=await F(t),a=[];(s?.hash??"")!==e.hash&&a.push("content");const n=o.mode&4095;if(process.platform!=="win32"&&n!==e.mode&&a.push("mode"),ve(o.mtimeMs,e.mtimeMs)||a.push("mtime"),a.length!==0)return{actual:{hash:s?.hash,mode:n,mtimeMs:o.mtimeMs},expected:{hash:e.hash,mode:e.mode,mtimeMs:e.mtimeMs},issues:a,relativePath:e.relativePath}},"computeFileDiff"),Ce=f(async(e,r,t)=>{const{cacheDirectories:o,format:s,workspaceRoot:a}=r;if(o.length===0){s==="json"?process.stdout.write(`${JSON.stringify({error:"no-cache-directory",taskId:e},void 0,2)}
|
|
13
|
+
`):i.error("No cache directory resolved — pass --cache-dir or run inside a workspace."),process.exitCode=1;return}let n,h;for(const u of o){const c=await new x({cacheDirectory:u,workspaceRoot:a}).getByTaskId(e);if(c){n=u,h=c;break}}if(!h||!n){s==="json"?process.stdout.write(`${JSON.stringify({error:"no-cached-entry",searchedCaches:o,taskId:e},void 0,2)}
|
|
14
|
+
`):i.error(`No cached entry found for task "${e}". Run it once before verifying.`),process.exitCode=1;return}const m=await X(C(Z(),"vis-cache-verify-"));try{if(!await new x({cacheDirectory:n,workspaceRoot:m}).restoreOutputs(h.hash)){s==="json"?process.stdout.write(`${JSON.stringify({error:"restore-failed",hash:h.hash,taskId:e},void 0,2)}
|
|
15
|
+
`):i.error(`Cache restore failed for ${e} (hash ${h.hash}).`),process.exitCode=1;return}const u=await ke(m);if(u.length===0){s==="json"?process.stdout.write(`${JSON.stringify({diffs:[],hash:h.hash,status:"no-outputs",taskId:e},void 0,2)}
|
|
16
|
+
`):i.info(`Cached entry for ${e} has no recorded outputs — nothing to verify.`);return}const c=Array.from({length:u.length});for(let l=0;l<u.length;l+=J){const p=u.slice(l,l+J),q=await Promise.all(p.map(async I=>Se(I,a)));for(const[I,Y]of q.entries())c[l+I]=Y}const d=c.filter(l=>l!==void 0);if(s==="json"){process.stdout.write(`${JSON.stringify({cachedFileCount:u.length,cacheDirectory:n,diffs:d,hash:h.hash,status:d.length===0?"ok":"drift",taskId:e},void 0,2)}
|
|
17
|
+
`),d.length>0&&(process.exitCode=1);return}if(i.info(`Verify ${e} (hash ${h.hash})`),t.info(""),t.info(` cache: ${n}`),t.info(` files: ${String(u.length)}`),d.length===0){t.info(""),i.success("Cache restore is faithful — all files match content, mode, and mtime.");return}t.info(` drift: ${String(d.length)} file(s)`),t.info("");for(const l of d){const p=l.issues.includes("missing")?"MISSING":l.issues.join(",").toUpperCase();t.info(` [${p}] ${l.relativePath}`),!l.issues.includes("missing")&&l.expected&&l.actual&&(l.issues.includes("content")&&(t.info(` expected hash: ${l.expected.hash||"(none)"}`),t.info(` actual hash: ${l.actual.hash??"(unreadable)"}`)),l.issues.includes("mode")&&(t.info(` expected mode: ${l.expected.mode.toString(8)}`),t.info(` actual mode: ${(l.actual.mode??0).toString(8)}`)),l.issues.includes("mtime")&&(t.info(` expected mtime: ${new Date(l.expected.mtimeMs).toISOString()}`),t.info(` actual mtime: ${l.actual.mtimeMs===void 0?"(unreadable)":new Date(l.actual.mtimeMs).toISOString()}`)))}process.exitCode=1}finally{await B(m,{force:!0,recursive:!0}).catch(()=>{})}},"runVerify"),U=f(e=>e==="task"||e==="ai"||e==="socket"||e==="all"?e:(e&&e.length>0&&i.warn(`Unknown --type value '${e}'; falling back to 'all'.`),"all"),"parseCacheTarget"),y=f((e,r)=>e==="all"||e===r,"includesTarget"),g=f(e=>e===void 0?null:new Date(e).toISOString(),"isoOrNull"),P=f((e,r)=>{i.info(`${e}:`),i.info(` Entries: ${String(r.entries)}`),i.info(` Total size: ${w(r.totalSizeBytes,{decimals:1,space:!1})}`),i.info(` Oldest: ${r.oldestEntry?new Date(r.oldestEntry).toISOString():"N/A"}`),i.info(` Newest: ${r.newestEntry?new Date(r.newestEntry).toISOString():"N/A"}`)},"printAuxStatsBlock"),xe=f(e=>e==="worktree"||e==="shared"||e==="all"?e:(e&&e.length>0&&i.warn(`Unknown --scope value '${e}'; falling back to 'shared'.`),"shared"),"parseScope"),k=f((e,r,t)=>{const o=e??process.cwd(),s=t??{},a=s.taskRunnerOptions??{},n=xe(r.scope),h=r.cacheDir,m=A(o,h,a.cacheDirectory,!1),u=A(o,h,a.cacheDirectory,s.sharedWorktreeCache);let c,d;switch(n){case"all":{c=u,d=u===m?[u]:[u,m];break}case"worktree":{c=m,d=[m];break}default:c=u,d=[u]}return{cacheDirectories:d,cacheDirectory:c,scope:n,sharedWorktreeCache:s.sharedWorktreeCache,workspaceRoot:o}},"resolveCacheDirectoryFromContext"),Oe=f(async({logger:e,options:r,visConfig:t,workspaceRoot:o})=>{const{cacheDirectories:s}=k(o,r,t),a=r.format??"table";for(const n of s)s.length>1&&i.info(`# ${n}`),await fe(n,a,e)},"cacheListExecute"),Ae=f(async({options:e,visConfig:r,workspaceRoot:t})=>{const o=U(e.type),s=!!e.dryRun;if(y(o,"task")){const{cacheDirectory:a,workspaceRoot:n}=k(t,e,r);await pe(a,n,{dryRun:s,force:!!e.force})}if(y(o,"ai"))if(s){const a=R();i.info(`Would clear ${String(a.entries)} cached AI response${a.entries===1?"":"s"}.`)}else le();if(y(o,"socket"))if(s){const a=z();i.info(`Would clear ${String(a.entries)} cached Socket.dev report${a.entries===1?"":"s"}.`)}else me()},"cacheCleanExecute"),Te=f(async({options:e,visConfig:r,workspaceRoot:t})=>{const{cacheDirectories:o,workspaceRoot:s}=k(t,e,r);for(const a of o)o.length>1&&i.info(`# ${a}`),await ye(a,s,{keepLast:typeof e.keepLast=="number"?e.keepLast:void 0,maxCacheAgeDays:typeof e.maxAgeDays=="number"?e.maxAgeDays:void 0,maxCacheSize:e.maxSize})},"cachePruneExecute"),V=f(e=>e??process.cwd(),"resolveWorkspaceRoot"),Je=f(async({argument:e,logger:r,options:t,workspaceRoot:o})=>{const s=e[0];if(!s){i.error("No task ID specified. Usage: vis cache why <project>:<target>"),process.exitCode=1;return}await ge(s,{format:t.format??"table",runId:t.run,workspaceRoot:V(o)},r)},"cacheWhyExecute"),Pe=f(async({argument:e,logger:r,options:t,workspaceRoot:o})=>{const s=e[0];if(!s){i.error("No task ID specified. Usage: vis cache hash <project>:<target>"),process.exitCode=1;return}await we(s,{format:t.format??"table",runId:t.run,workspaceRoot:V(o)},r)},"cacheHashExecute"),Le=f(async({options:e,visConfig:r,workspaceRoot:t})=>{const o=U(e.type);if((e.format??"table")==="json"){const s={};if(y(o,"task")){const{cacheDirectories:a}=k(t,e,r);s.task=await Promise.all(a.map(async n=>{const h=S(n),m=h?await $(n):[],u=m.reduce((c,d)=>c+d.sizeBytes,0);return{directory:n,entries:m.length,exists:h,newestEntry:g(m[0]?.mtimeMs),oldestEntry:g(m.at(-1)?.mtimeMs),totalBytes:u}}))}if(y(o,"ai")){const a=R();s.ai={entries:a.entries,newestEntry:g(a.newestEntry),oldestEntry:g(a.oldestEntry),totalBytes:a.totalSizeBytes}}if(y(o,"socket")){const a=z();s.socket={entries:a.entries,newestEntry:g(a.newestEntry),oldestEntry:g(a.oldestEntry),totalBytes:a.totalSizeBytes}}process.stdout.write(`${JSON.stringify(s,void 0,2)}
|
|
18
|
+
`);return}if(y(o,"task")){const{cacheDirectories:s}=k(t,e,r);for(const a of s)s.length>1&&i.info(`# ${a}`),await $e(a,"table")}y(o,"ai")&&P("AI response cache",R()),y(o,"socket")&&P("Socket.dev report cache",z())},"cacheSizeExecute"),Fe=f(async({argument:e,logger:r,options:t,visConfig:o,workspaceRoot:s})=>{const a=e[0];if(!a){i.error("No task ID specified. Usage: vis cache verify <project>:<target>"),process.exitCode=1;return}const{cacheDirectories:n,workspaceRoot:h}=k(s,t,o);await Ce(a,{cacheDirectories:n,format:t.format??"table",workspaceRoot:h},r)},"cacheVerifyExecute");export{Ae as cacheCleanExecute,Pe as cacheHashExecute,Oe as cacheListExecute,Te as cachePruneExecute,Le as cacheSizeExecute,Fe as cacheVerifyExecute,Je as cacheWhyExecute,le as clearAiCacheSafe,me as clearSocketCacheSafe,$ as collectCacheEntries,de as formatAge,pe as runClean,we as runHash,fe as runList,ye as runPrune,$e as runSize,Ce as runVerify,ge as runWhy};
|