@visulima/vis 1.0.0-alpha.10 → 1.0.0-alpha.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +95 -42
- package/LICENSE.md +213 -0
- package/README.md +8 -4
- package/dist/bin.js +9 -1
- package/dist/config/index.d.ts +1818 -0
- package/dist/config/index.js +2 -0
- package/dist/generate/index.d.ts +1 -1
- package/dist/generate/index.js +3 -1
- package/dist/packem_chunks/applyDefaults.js +336 -0
- package/dist/packem_chunks/bin.js +9554 -64
- package/dist/packem_chunks/doctor-probe.js +112 -0
- package/dist/packem_chunks/fix.js +229 -48
- package/dist/packem_chunks/handler.js +99 -1
- package/dist/packem_chunks/handler10.js +53 -1
- package/dist/packem_chunks/handler11.js +32 -1
- package/dist/packem_chunks/handler12.js +100 -2
- package/dist/packem_chunks/handler13.js +25 -1
- package/dist/packem_chunks/handler14.js +916 -5
- package/dist/packem_chunks/handler15.js +206 -1
- package/dist/packem_chunks/handler16.js +122 -18
- package/dist/packem_chunks/handler17.js +13 -1
- package/dist/packem_chunks/handler18.js +106 -1
- package/dist/packem_chunks/handler19.js +19 -1
- package/dist/packem_chunks/handler2.js +75 -1
- package/dist/packem_chunks/handler20.js +29 -1
- package/dist/packem_chunks/handler21.js +222 -1
- package/dist/packem_chunks/handler22.js +237 -5
- package/dist/packem_chunks/handler23.js +101 -1
- package/dist/packem_chunks/handler24.js +110 -1
- package/dist/packem_chunks/handler25.js +402 -5
- package/dist/packem_chunks/handler26.js +13 -1
- package/dist/packem_chunks/handler27.js +63 -3
- package/dist/packem_chunks/handler28.js +34 -1
- package/dist/packem_chunks/handler29.js +458 -7
- package/dist/packem_chunks/handler3.js +95 -2
- package/dist/packem_chunks/handler30.js +168 -21
- package/dist/packem_chunks/handler31.js +530 -3
- package/dist/packem_chunks/handler32.js +214 -2
- package/dist/packem_chunks/handler33.js +119 -24
- package/dist/packem_chunks/handler34.js +630 -2
- package/dist/packem_chunks/handler35.js +283 -19
- package/dist/packem_chunks/handler36.js +521 -407
- package/dist/packem_chunks/handler37.js +762 -22
- package/dist/packem_chunks/handler38.js +989 -22
- package/dist/packem_chunks/handler39.js +574 -22
- package/dist/packem_chunks/handler4.js +90 -4
- package/dist/packem_chunks/handler40.js +1685 -3
- package/dist/packem_chunks/handler41.js +1088 -10
- package/dist/packem_chunks/handler42.js +785 -141
- package/dist/packem_chunks/handler43.js +2658 -42
- package/dist/packem_chunks/handler44.js +3886 -3
- package/dist/packem_chunks/handler45.js +2568 -21
- package/dist/packem_chunks/handler46.js +3769 -0
- package/dist/packem_chunks/handler47.js +1491 -0
- package/dist/packem_chunks/handler5.js +174 -2
- package/dist/packem_chunks/handler6.js +95 -13
- package/dist/packem_chunks/handler7.js +115 -8
- package/dist/packem_chunks/handler8.js +12 -1
- package/dist/packem_chunks/handler9.js +29 -1
- package/dist/packem_chunks/heal-accept.js +522 -0
- package/dist/packem_chunks/heal.js +673 -0
- package/dist/packem_chunks/index.js +873 -7
- package/dist/packem_chunks/loader.js +23 -1
- package/dist/packem_shared/VisUpdateApp-D-Yz_wvg.js +1316 -0
- package/dist/packem_shared/_commonjsHelpers-BqLXS_qQ.js +5 -0
- package/dist/packem_shared/ai-analysis-CHeB1joD.js +367 -0
- package/dist/packem_shared/ai-cache-Be_jexe4.js +142 -0
- package/dist/packem_shared/ai-fix-B9iQVcD2.js +379 -0
- package/dist/packem_shared/cache-directory-2qvs4goY.js +98 -0
- package/dist/packem_shared/catalog-BJTtyi-O.js +1371 -0
- package/dist/packem_shared/dependency-scan-A0KSklpG.js +188 -0
- package/dist/packem_shared/docker-2iZzc280.js +181 -0
- package/dist/packem_shared/failure-log-Cz3Z4SKL.js +100 -0
- package/dist/packem_shared/flakiness-goTxXuCX.js +180 -0
- package/dist/packem_shared/otel-DCvqCTz_.js +158 -0
- package/dist/packem_shared/otelPlugin-DFaLDvJf.js +3 -0
- package/dist/packem_shared/registry-CbqXI0rc.js +272 -0
- package/dist/packem_shared/run-summary-utils-PVMl4aIh.js +130 -0
- package/dist/packem_shared/runtime-check-Cobi3p6l.js +127 -0
- package/dist/packem_shared/selectors-SM69TfqC.js +194 -0
- package/dist/packem_shared/symbols-Ta7g2nU-.js +14 -0
- package/dist/packem_shared/toolchain-BdZd9eBi.js +975 -0
- package/dist/packem_shared/typosquats-C-bCh3PX.js +1210 -0
- package/dist/packem_shared/use-measured-height-CNP0vT4M.js +20 -0
- package/dist/packem_shared/utils-CthVdBPS.js +40 -0
- package/dist/packem_shared/xxh3-Ck8mXNg1.js +239 -0
- package/index.js +727 -555
- package/package.json +35 -17
- package/schemas/project.schema.json +8 -10
- package/schemas/vis-config.schema.json +132 -8
- package/skills/vis/SKILL.md +96 -0
- package/templates/buildkite-ci/.buildkite/pipeline.yml.tera +85 -0
- package/templates/buildkite-ci/template.yml +20 -0
- package/dist/errors/index.d.ts +0 -26
- package/dist/errors/index.js +0 -1
- package/dist/packem_chunks/config.js +0 -2
- package/dist/packem_shared/VisConfigCycleError-CAYNC7d-.js +0 -1
- package/dist/packem_shared/VisConfigError-B5LP1zRf.js +0 -1
- package/dist/packem_shared/VisConfigLoadError-CeqBSd2Z.js +0 -2
- package/dist/packem_shared/VisConfigNotFoundError-DZ9KC527.js +0 -5
- package/dist/packem_shared/VisUpdateApp-D-L4_-Iu.js +0 -1
- package/dist/packem_shared/_commonjsHelpers-D6W6KoPK.js +0 -1
- package/dist/packem_shared/ai-analysis-CGuy7dfE.js +0 -67
- package/dist/packem_shared/ai-cache-Bynt6Y9x.js +0 -1
- package/dist/packem_shared/cache-directory-D72ZEag2.js +0 -1
- package/dist/packem_shared/catalog-BVPerCwG.js +0 -12
- package/dist/packem_shared/dependency-scan-Du0tBu64.js +0 -2
- package/dist/packem_shared/docker-BcfqH4Av.js +0 -2
- package/dist/packem_shared/failure-log-DqYen0LC.js +0 -2
- package/dist/packem_shared/flakiness-DSIHZGBT.js +0 -1
- package/dist/packem_shared/run-summary-utils-C24Aaf9E.js +0 -1
- package/dist/packem_shared/runtime-check-CGHal8SO.js +0 -1
- package/dist/packem_shared/selectors-CfH9ZY08.js +0 -3
- package/dist/packem_shared/symbols-CQmER5MT.js +0 -1
- package/dist/packem_shared/target-merge-DNa-6eWu.js +0 -1
- package/dist/packem_shared/toolchain-DQfTQY8E.js +0 -5
- package/dist/packem_shared/typosquats-DOR8izpX.js +0 -1
- package/dist/packem_shared/use-measured-height-DjYgUOKk.js +0 -1
- package/dist/packem_shared/utils-DrNg0XTR.js +0 -1
- package/dist/packem_shared/xxh3-DrAUNq4n.js +0 -1
|
@@ -1,5 +1,916 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
1
|
+
import { createRequire as __cjs_createRequire } from "node:module";
|
|
2
|
+
|
|
3
|
+
const __cjs_require = __cjs_createRequire(import.meta.url);
|
|
4
|
+
|
|
5
|
+
const __cjs_getProcess = typeof globalThis !== "undefined" && typeof globalThis.process !== "undefined" ? globalThis.process : process;
|
|
6
|
+
|
|
7
|
+
const __cjs_getBuiltinModule = (module) => {
|
|
8
|
+
// Check if we're in Node.js and version supports getBuiltinModule
|
|
9
|
+
if (typeof __cjs_getProcess !== "undefined" && __cjs_getProcess.versions && __cjs_getProcess.versions.node) {
|
|
10
|
+
const [major, minor] = __cjs_getProcess.versions.node.split(".").map(Number);
|
|
11
|
+
// Node.js 20.16.0+ and 22.3.0+
|
|
12
|
+
if (major > 22 || (major === 22 && minor >= 3) || (major === 20 && minor >= 16)) {
|
|
13
|
+
return __cjs_getProcess.getBuiltinModule(module);
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
// Fallback to createRequire
|
|
17
|
+
return __cjs_require(module);
|
|
18
|
+
};
|
|
19
|
+
|
|
20
|
+
const {
|
|
21
|
+
readdir,
|
|
22
|
+
stat,
|
|
23
|
+
realpath,
|
|
24
|
+
rm,
|
|
25
|
+
mkdtemp
|
|
26
|
+
} = __cjs_getBuiltinModule("node:fs/promises");
|
|
27
|
+
const {
|
|
28
|
+
tmpdir
|
|
29
|
+
} = __cjs_getBuiltinModule("node:os");
|
|
30
|
+
const {
|
|
31
|
+
createInterface
|
|
32
|
+
} = __cjs_getBuiltinModule("node:readline");
|
|
33
|
+
import { isAccessibleSync } from '@visulima/fs';
|
|
34
|
+
import { formatBytes } from '@visulima/humanizer';
|
|
35
|
+
import { join, relative } from '@visulima/path';
|
|
36
|
+
import { Cache, readLastRunSummary, parseCacheSize, getLastRunSummaryPath, digestFile } from '@visulima/task-runner';
|
|
37
|
+
import { g as getCacheStats, c as clearCache } from '../packem_shared/ai-cache-Be_jexe4.js';
|
|
38
|
+
import { i as isCacheDirectoryInsideWorkspace, r as resolveSharedCacheDirectory } from '../packem_shared/cache-directory-2qvs4goY.js';
|
|
39
|
+
import { p as pail, u as getSocketCacheStats, v as clearSocketCache } from './bin.js';
|
|
40
|
+
import { r as readRunSummaryById, f as findTaskInSummary, a as readPreviousRunSummary, d as diffHashDetails } from '../packem_shared/run-summary-utils-PVMl4aIh.js';
|
|
41
|
+
|
|
42
|
+
const sumDirectorySize = async (directory) => {
|
|
43
|
+
let total = 0;
|
|
44
|
+
try {
|
|
45
|
+
const entries = await readdir(directory, { withFileTypes: true });
|
|
46
|
+
for (const entry of entries) {
|
|
47
|
+
const full = join(directory, entry.name);
|
|
48
|
+
if (entry.isDirectory()) {
|
|
49
|
+
total += await sumDirectorySize(full);
|
|
50
|
+
} else if (entry.isFile()) {
|
|
51
|
+
const s = await stat(full);
|
|
52
|
+
total += s.size;
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
} catch {
|
|
56
|
+
}
|
|
57
|
+
return total;
|
|
58
|
+
};
|
|
59
|
+
const collectCacheEntries = async (cacheDirectory) => {
|
|
60
|
+
const entries = [];
|
|
61
|
+
let dirents;
|
|
62
|
+
try {
|
|
63
|
+
dirents = await readdir(cacheDirectory);
|
|
64
|
+
} catch {
|
|
65
|
+
return [];
|
|
66
|
+
}
|
|
67
|
+
for (const name of dirents) {
|
|
68
|
+
if (name.startsWith(".")) {
|
|
69
|
+
continue;
|
|
70
|
+
}
|
|
71
|
+
const fullPath = join(cacheDirectory, name);
|
|
72
|
+
try {
|
|
73
|
+
const s = await stat(fullPath);
|
|
74
|
+
if (!s.isDirectory()) {
|
|
75
|
+
continue;
|
|
76
|
+
}
|
|
77
|
+
const sizeBytes = await sumDirectorySize(fullPath);
|
|
78
|
+
entries.push({
|
|
79
|
+
hash: name,
|
|
80
|
+
mtimeMs: s.mtimeMs,
|
|
81
|
+
path: fullPath,
|
|
82
|
+
sizeBytes
|
|
83
|
+
});
|
|
84
|
+
} catch {
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
entries.sort((a, b) => b.mtimeMs - a.mtimeMs);
|
|
88
|
+
return entries;
|
|
89
|
+
};
|
|
90
|
+
const formatAge = (mtimeMs, now = Date.now()) => {
|
|
91
|
+
const seconds = Math.max(0, Math.floor((now - mtimeMs) / 1e3));
|
|
92
|
+
if (seconds < 60) {
|
|
93
|
+
return `${String(seconds)}s`;
|
|
94
|
+
}
|
|
95
|
+
if (seconds < 3600) {
|
|
96
|
+
return `${String(Math.floor(seconds / 60))}m`;
|
|
97
|
+
}
|
|
98
|
+
if (seconds < 86400) {
|
|
99
|
+
return `${String(Math.floor(seconds / 3600))}h`;
|
|
100
|
+
}
|
|
101
|
+
return `${String(Math.floor(seconds / 86400))}d`;
|
|
102
|
+
};
|
|
103
|
+
const confirmPrompt = (question) => new Promise((resolvePromise) => {
|
|
104
|
+
const rl = createInterface({ input: process.stdin, output: process.stderr });
|
|
105
|
+
rl.question(`${question} (y/N) `, (answer) => {
|
|
106
|
+
rl.close();
|
|
107
|
+
const trimmed = answer.trim().toLowerCase();
|
|
108
|
+
resolvePromise(trimmed === "y" || trimmed === "yes");
|
|
109
|
+
});
|
|
110
|
+
});
|
|
111
|
+
const runList = async (cacheDirectory, format, logger) => {
|
|
112
|
+
if (!isAccessibleSync(cacheDirectory)) {
|
|
113
|
+
if (format === "json") {
|
|
114
|
+
process.stdout.write(`${JSON.stringify({ directory: cacheDirectory, entries: [], totalBytes: 0, totalCount: 0 }, void 0, 2)}
|
|
115
|
+
`);
|
|
116
|
+
return;
|
|
117
|
+
}
|
|
118
|
+
pail.info(`No cache directory found at ${cacheDirectory}`);
|
|
119
|
+
return;
|
|
120
|
+
}
|
|
121
|
+
const entries = await collectCacheEntries(cacheDirectory);
|
|
122
|
+
if (entries.length === 0) {
|
|
123
|
+
if (format === "json") {
|
|
124
|
+
process.stdout.write(`${JSON.stringify({ directory: cacheDirectory, entries: [], totalBytes: 0, totalCount: 0 }, void 0, 2)}
|
|
125
|
+
`);
|
|
126
|
+
return;
|
|
127
|
+
}
|
|
128
|
+
pail.info(`Cache directory is empty: ${cacheDirectory}`);
|
|
129
|
+
return;
|
|
130
|
+
}
|
|
131
|
+
const totalBytes = entries.reduce((sum, entry) => sum + entry.sizeBytes, 0);
|
|
132
|
+
if (format === "json") {
|
|
133
|
+
const now = Date.now();
|
|
134
|
+
process.stdout.write(
|
|
135
|
+
`${JSON.stringify(
|
|
136
|
+
{
|
|
137
|
+
directory: cacheDirectory,
|
|
138
|
+
entries: entries.map((entry) => {
|
|
139
|
+
return {
|
|
140
|
+
ageMs: now - entry.mtimeMs,
|
|
141
|
+
hash: entry.hash,
|
|
142
|
+
mtimeIso: new Date(entry.mtimeMs).toISOString(),
|
|
143
|
+
sizeBytes: entry.sizeBytes
|
|
144
|
+
};
|
|
145
|
+
}),
|
|
146
|
+
totalBytes,
|
|
147
|
+
totalCount: entries.length
|
|
148
|
+
},
|
|
149
|
+
void 0,
|
|
150
|
+
2
|
|
151
|
+
)}
|
|
152
|
+
`
|
|
153
|
+
);
|
|
154
|
+
return;
|
|
155
|
+
}
|
|
156
|
+
pail.info(`Cache directory: ${cacheDirectory}`);
|
|
157
|
+
pail.info(`Entries: ${String(entries.length)} (${formatBytes(totalBytes, { decimals: 1, space: false })})`);
|
|
158
|
+
logger.info("");
|
|
159
|
+
const renderedAt = Date.now();
|
|
160
|
+
const rows = entries.map((entry) => {
|
|
161
|
+
return {
|
|
162
|
+
age: formatAge(entry.mtimeMs, renderedAt),
|
|
163
|
+
hash: entry.hash.slice(0, 12),
|
|
164
|
+
size: formatBytes(entry.sizeBytes, { decimals: 1, space: false })
|
|
165
|
+
};
|
|
166
|
+
});
|
|
167
|
+
const hashWidth = Math.max(4, ...rows.map((r) => r.hash.length));
|
|
168
|
+
const sizeWidth = Math.max(4, ...rows.map((r) => r.size.length));
|
|
169
|
+
const ageWidth = Math.max(3, ...rows.map((r) => r.age.length));
|
|
170
|
+
logger.info(` ${"hash".padEnd(hashWidth)} ${"size".padEnd(sizeWidth)} ${"age".padEnd(ageWidth)}`);
|
|
171
|
+
logger.info(` ${"-".repeat(hashWidth)} ${"-".repeat(sizeWidth)} ${"-".repeat(ageWidth)}`);
|
|
172
|
+
for (const row of rows) {
|
|
173
|
+
logger.info(` ${row.hash.padEnd(hashWidth)} ${row.size.padEnd(sizeWidth)} ${row.age.padEnd(ageWidth)}`);
|
|
174
|
+
}
|
|
175
|
+
};
|
|
176
|
+
const clearAiCacheSafe = () => {
|
|
177
|
+
try {
|
|
178
|
+
const aiDeleted = clearCache();
|
|
179
|
+
if (aiDeleted > 0) {
|
|
180
|
+
pail.info(`Cleared ${String(aiDeleted)} cached AI response${aiDeleted === 1 ? "" : "s"}.`);
|
|
181
|
+
}
|
|
182
|
+
} catch (error) {
|
|
183
|
+
pail.warn(`Failed to clear AI response cache: ${error instanceof Error ? error.message : String(error)}`);
|
|
184
|
+
}
|
|
185
|
+
};
|
|
186
|
+
const clearSocketCacheSafe = () => {
|
|
187
|
+
try {
|
|
188
|
+
const socketDeleted = clearSocketCache();
|
|
189
|
+
if (socketDeleted > 0) {
|
|
190
|
+
pail.info(`Cleared ${String(socketDeleted)} cached Socket.dev report${socketDeleted === 1 ? "" : "s"}.`);
|
|
191
|
+
}
|
|
192
|
+
} catch (error) {
|
|
193
|
+
pail.warn(`Failed to clear Socket.dev cache: ${error instanceof Error ? error.message : String(error)}`);
|
|
194
|
+
}
|
|
195
|
+
};
|
|
196
|
+
const runClean = async (cacheDirectory, workspaceRoot, options) => {
|
|
197
|
+
if (!isAccessibleSync(cacheDirectory)) {
|
|
198
|
+
pail.info(`No cache directory to clean at ${cacheDirectory}`);
|
|
199
|
+
return;
|
|
200
|
+
}
|
|
201
|
+
if (options.dryRun) {
|
|
202
|
+
const entries = await collectCacheEntries(cacheDirectory);
|
|
203
|
+
const totalBytes = entries.reduce((sum, entry) => sum + entry.sizeBytes, 0);
|
|
204
|
+
pail.info(
|
|
205
|
+
`Would remove ${String(entries.length)} cache entr${entries.length === 1 ? "y" : "ies"} (${formatBytes(totalBytes, { decimals: 1, space: false })}) from ${cacheDirectory}`
|
|
206
|
+
);
|
|
207
|
+
return;
|
|
208
|
+
}
|
|
209
|
+
const insideWorkspace = isCacheDirectoryInsideWorkspace(cacheDirectory, workspaceRoot);
|
|
210
|
+
try {
|
|
211
|
+
const realCache = await realpath(cacheDirectory);
|
|
212
|
+
const realWorkspace = await realpath(workspaceRoot);
|
|
213
|
+
if (realCache === realWorkspace) {
|
|
214
|
+
pail.error("Refusing to delete the workspace root. The cache directory resolved to the same path as the workspace.");
|
|
215
|
+
process.exitCode = 1;
|
|
216
|
+
return;
|
|
217
|
+
}
|
|
218
|
+
} catch {
|
|
219
|
+
}
|
|
220
|
+
if (!insideWorkspace && !options.force) {
|
|
221
|
+
pail.warn(`Cache directory is outside the workspace root: ${cacheDirectory}`);
|
|
222
|
+
pail.warn("This will recursively delete the entire directory, including anything stored there by other tools.");
|
|
223
|
+
if (!process.stdin.isTTY) {
|
|
224
|
+
pail.error("Refusing to clean an out-of-workspace cache without --force (stdin is not a TTY).");
|
|
225
|
+
process.exitCode = 1;
|
|
226
|
+
return;
|
|
227
|
+
}
|
|
228
|
+
const confirmed = await confirmPrompt(" Continue?");
|
|
229
|
+
if (!confirmed) {
|
|
230
|
+
pail.info("Aborted.");
|
|
231
|
+
return;
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
if (insideWorkspace) {
|
|
235
|
+
const cache = new Cache({ cacheDirectory, workspaceRoot });
|
|
236
|
+
await cache.clear();
|
|
237
|
+
} else {
|
|
238
|
+
await rm(cacheDirectory, { force: true, recursive: true });
|
|
239
|
+
}
|
|
240
|
+
pail.success(`Cleared cache: ${cacheDirectory}`);
|
|
241
|
+
};
|
|
242
|
+
const runPrune = async (cacheDirectory, workspaceRoot, options) => {
|
|
243
|
+
if (!isAccessibleSync(cacheDirectory)) {
|
|
244
|
+
pail.info(`No cache directory to prune at ${cacheDirectory}`);
|
|
245
|
+
return;
|
|
246
|
+
}
|
|
247
|
+
if (options.maxCacheAgeDays !== void 0 && (!Number.isFinite(options.maxCacheAgeDays) || options.maxCacheAgeDays < 0)) {
|
|
248
|
+
pail.error(`Invalid --max-age-days value: expected a finite number >= 0, got ${String(options.maxCacheAgeDays)}`);
|
|
249
|
+
process.exitCode = 1;
|
|
250
|
+
return;
|
|
251
|
+
}
|
|
252
|
+
if (options.keepLast !== void 0 && (!Number.isFinite(options.keepLast) || options.keepLast < 0 || !Number.isInteger(options.keepLast))) {
|
|
253
|
+
pail.error(`Invalid --keep-last value: expected a non-negative integer, got ${String(options.keepLast)}`);
|
|
254
|
+
process.exitCode = 1;
|
|
255
|
+
return;
|
|
256
|
+
}
|
|
257
|
+
if (options.maxCacheSize !== void 0) {
|
|
258
|
+
let parsedBytes;
|
|
259
|
+
try {
|
|
260
|
+
parsedBytes = parseCacheSize(options.maxCacheSize);
|
|
261
|
+
} catch (error) {
|
|
262
|
+
pail.error(`Invalid --max-size value: ${error instanceof Error ? error.message : String(error)}`);
|
|
263
|
+
process.exitCode = 1;
|
|
264
|
+
return;
|
|
265
|
+
}
|
|
266
|
+
if (!Number.isFinite(parsedBytes) || parsedBytes <= 0) {
|
|
267
|
+
pail.error(`Invalid --max-size value: expected a positive size, got "${options.maxCacheSize}" (${String(parsedBytes)} bytes)`);
|
|
268
|
+
process.exitCode = 1;
|
|
269
|
+
return;
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
const maxCacheAge = options.maxCacheAgeDays === void 0 ? void 0 : options.maxCacheAgeDays * 24 * 60 * 60 * 1e3;
|
|
273
|
+
const before = await collectCacheEntries(cacheDirectory);
|
|
274
|
+
const beforeBytes = before.reduce((sum, entry) => sum + entry.sizeBytes, 0);
|
|
275
|
+
if (options.keepLast !== void 0 && before.length > options.keepLast) {
|
|
276
|
+
const stale = before.slice(options.keepLast);
|
|
277
|
+
await Promise.all(stale.map((entry) => rm(entry.path, { force: true, recursive: true })));
|
|
278
|
+
}
|
|
279
|
+
const cache = new Cache({
|
|
280
|
+
cacheDirectory,
|
|
281
|
+
maxCacheAge,
|
|
282
|
+
maxCacheSize: options.maxCacheSize,
|
|
283
|
+
workspaceRoot
|
|
284
|
+
});
|
|
285
|
+
await cache.removeOldEntries();
|
|
286
|
+
const after = await collectCacheEntries(cacheDirectory);
|
|
287
|
+
const afterBytes = after.reduce((sum, entry) => sum + entry.sizeBytes, 0);
|
|
288
|
+
const removed = before.length - after.length;
|
|
289
|
+
const reclaimedBytes = beforeBytes - afterBytes;
|
|
290
|
+
if (removed <= 0) {
|
|
291
|
+
pail.info("Nothing to prune — all entries are within the configured limits.");
|
|
292
|
+
return;
|
|
293
|
+
}
|
|
294
|
+
pail.success(`Pruned ${String(removed)} entr${removed === 1 ? "y" : "ies"}, freed ${formatBytes(reclaimedBytes, { decimals: 1, space: false })}.`);
|
|
295
|
+
};
|
|
296
|
+
const HASH_DISPLAY_PREFIX = 16;
|
|
297
|
+
const truncateHash = (value) => value.length > HASH_DISPLAY_PREFIX ? `${value.slice(0, HASH_DISPLAY_PREFIX)}…` : value;
|
|
298
|
+
const renderHashDetailsBucket = (label, bucket, logger) => {
|
|
299
|
+
const entries = Object.entries(bucket ?? {});
|
|
300
|
+
if (entries.length === 0) {
|
|
301
|
+
return;
|
|
302
|
+
}
|
|
303
|
+
logger.info(` ${label}:`);
|
|
304
|
+
entries.sort(([a], [b]) => a.localeCompare(b));
|
|
305
|
+
for (const [key, value] of entries) {
|
|
306
|
+
logger.info(` ${key.padEnd(40)} ${truncateHash(value)}`);
|
|
307
|
+
}
|
|
308
|
+
};
|
|
309
|
+
const runWhy = async (taskId, options, logger) => {
|
|
310
|
+
const { format, runId, workspaceRoot } = options;
|
|
311
|
+
const summary = runId === void 0 ? await readLastRunSummary(workspaceRoot) : await readRunSummaryById(workspaceRoot, runId);
|
|
312
|
+
if (!summary) {
|
|
313
|
+
if (format === "json") {
|
|
314
|
+
process.stdout.write(`${JSON.stringify({ error: "no-summary", runId: runId ?? null, taskId }, void 0, 2)}
|
|
315
|
+
`);
|
|
316
|
+
process.exitCode = 1;
|
|
317
|
+
return;
|
|
318
|
+
}
|
|
319
|
+
if (runId === void 0) {
|
|
320
|
+
pail.error("No previous run summary found. Run a task first to populate `.task-runner/last-summary.json`.");
|
|
321
|
+
} else {
|
|
322
|
+
pail.error(`Run summary "${runId}" not found in .task-runner/runs/.`);
|
|
323
|
+
}
|
|
324
|
+
process.exitCode = 1;
|
|
325
|
+
return;
|
|
326
|
+
}
|
|
327
|
+
const task = findTaskInSummary(summary, taskId);
|
|
328
|
+
if (!task) {
|
|
329
|
+
if (format === "json") {
|
|
330
|
+
process.stdout.write(`${JSON.stringify({ error: "task-not-in-summary", runId: summary.id, taskId }, void 0, 2)}
|
|
331
|
+
`);
|
|
332
|
+
process.exitCode = 1;
|
|
333
|
+
return;
|
|
334
|
+
}
|
|
335
|
+
pail.error(`Task "${taskId}" was not part of run ${summary.id}.`);
|
|
336
|
+
pail.info(`Tasks in this run: ${summary.tasks.map((t) => t.taskId).join(", ") || "(none)"}`);
|
|
337
|
+
process.exitCode = 1;
|
|
338
|
+
return;
|
|
339
|
+
}
|
|
340
|
+
const previousSummary = await readPreviousRunSummary(workspaceRoot, summary.id);
|
|
341
|
+
const previousTask = previousSummary ? findTaskInSummary(previousSummary, taskId) : void 0;
|
|
342
|
+
const diff = diffHashDetails(task.hashDetails, previousTask?.hashDetails);
|
|
343
|
+
if (format === "json") {
|
|
344
|
+
process.stdout.write(
|
|
345
|
+
`${JSON.stringify(
|
|
346
|
+
{
|
|
347
|
+
diff,
|
|
348
|
+
previousRunId: previousSummary?.id ?? null,
|
|
349
|
+
previousTask: previousTask ? {
|
|
350
|
+
cacheStatus: previousTask.cacheStatus,
|
|
351
|
+
hash: previousTask.hash ?? null,
|
|
352
|
+
hashDetails: previousTask.hashDetails ?? null
|
|
353
|
+
} : null,
|
|
354
|
+
runId: summary.id,
|
|
355
|
+
task: {
|
|
356
|
+
cacheStatus: task.cacheStatus,
|
|
357
|
+
hash: task.hash ?? null,
|
|
358
|
+
hashDetails: task.hashDetails ?? null,
|
|
359
|
+
taskId: task.taskId
|
|
360
|
+
}
|
|
361
|
+
},
|
|
362
|
+
void 0,
|
|
363
|
+
2
|
|
364
|
+
)}
|
|
365
|
+
`
|
|
366
|
+
);
|
|
367
|
+
return;
|
|
368
|
+
}
|
|
369
|
+
pail.info(`Why ${taskId}? (run ${summary.id})`);
|
|
370
|
+
logger.info("");
|
|
371
|
+
logger.info(` status: ${task.cacheStatus}`);
|
|
372
|
+
logger.info(` hash: ${task.hash ?? "(none)"}`);
|
|
373
|
+
if (previousTask) {
|
|
374
|
+
logger.info(` prev: ${previousTask.hash ?? "(none)"} [run ${previousSummary?.id ?? "?"}]`);
|
|
375
|
+
} else {
|
|
376
|
+
logger.info(` prev: (no prior run found)`);
|
|
377
|
+
}
|
|
378
|
+
logger.info("");
|
|
379
|
+
if (!previousTask) {
|
|
380
|
+
pail.info("No previous run to diff against — first time this task was recorded.");
|
|
381
|
+
return;
|
|
382
|
+
}
|
|
383
|
+
const noChanges = !diff.commandChanged && diff.nodes.added.length === 0 && diff.nodes.changed.length === 0 && diff.nodes.removed.length === 0 && diff.runtime.added.length === 0 && diff.runtime.changed.length === 0 && diff.runtime.removed.length === 0 && diff.implicitDeps.added.length === 0 && diff.implicitDeps.changed.length === 0 && diff.implicitDeps.removed.length === 0;
|
|
384
|
+
if (noChanges) {
|
|
385
|
+
pail.success("No hash inputs changed since the previous run.");
|
|
386
|
+
return;
|
|
387
|
+
}
|
|
388
|
+
logger.info("Hash inputs that changed since the previous run:");
|
|
389
|
+
logger.info("");
|
|
390
|
+
if (diff.commandChanged) {
|
|
391
|
+
logger.info(" command: changed");
|
|
392
|
+
}
|
|
393
|
+
for (const bucket of ["nodes", "runtime", "implicitDeps"]) {
|
|
394
|
+
const bucketDiff = diff[bucket];
|
|
395
|
+
if (bucketDiff.added.length === 0 && bucketDiff.changed.length === 0 && bucketDiff.removed.length === 0) {
|
|
396
|
+
continue;
|
|
397
|
+
}
|
|
398
|
+
logger.info(` ${bucket}:`);
|
|
399
|
+
for (const key of bucketDiff.added) {
|
|
400
|
+
logger.info(` + ${key}`);
|
|
401
|
+
}
|
|
402
|
+
for (const key of bucketDiff.changed) {
|
|
403
|
+
logger.info(` ~ ${key}`);
|
|
404
|
+
}
|
|
405
|
+
for (const key of bucketDiff.removed) {
|
|
406
|
+
logger.info(` - ${key}`);
|
|
407
|
+
}
|
|
408
|
+
}
|
|
409
|
+
logger.info("");
|
|
410
|
+
pail.info(`Last summary file: ${getLastRunSummaryPath(workspaceRoot)}`);
|
|
411
|
+
};
|
|
412
|
+
const runHash = async (taskId, options, logger) => {
|
|
413
|
+
const { format, runId, workspaceRoot } = options;
|
|
414
|
+
const summary = runId === void 0 ? await readLastRunSummary(workspaceRoot) : await readRunSummaryById(workspaceRoot, runId);
|
|
415
|
+
if (!summary) {
|
|
416
|
+
if (format === "json") {
|
|
417
|
+
process.stdout.write(`${JSON.stringify({ error: "no-summary", runId: runId ?? null, taskId }, void 0, 2)}
|
|
418
|
+
`);
|
|
419
|
+
process.exitCode = 1;
|
|
420
|
+
return;
|
|
421
|
+
}
|
|
422
|
+
if (runId === void 0) {
|
|
423
|
+
pail.error("No previous run summary found. Run a task first to populate `.task-runner/last-summary.json`.");
|
|
424
|
+
} else {
|
|
425
|
+
pail.error(`Run summary "${runId}" not found in .task-runner/runs/.`);
|
|
426
|
+
}
|
|
427
|
+
process.exitCode = 1;
|
|
428
|
+
return;
|
|
429
|
+
}
|
|
430
|
+
const task = findTaskInSummary(summary, taskId);
|
|
431
|
+
if (!task) {
|
|
432
|
+
if (format === "json") {
|
|
433
|
+
process.stdout.write(`${JSON.stringify({ error: "task-not-in-summary", runId: summary.id, taskId }, void 0, 2)}
|
|
434
|
+
`);
|
|
435
|
+
process.exitCode = 1;
|
|
436
|
+
return;
|
|
437
|
+
}
|
|
438
|
+
pail.error(`Task "${taskId}" was not part of run ${summary.id}.`);
|
|
439
|
+
process.exitCode = 1;
|
|
440
|
+
return;
|
|
441
|
+
}
|
|
442
|
+
if (format === "json") {
|
|
443
|
+
process.stdout.write(
|
|
444
|
+
`${JSON.stringify(
|
|
445
|
+
{
|
|
446
|
+
cacheStatus: task.cacheStatus,
|
|
447
|
+
hash: task.hash ?? null,
|
|
448
|
+
hashDetails: task.hashDetails ?? null,
|
|
449
|
+
runId: summary.id,
|
|
450
|
+
taskId: task.taskId
|
|
451
|
+
},
|
|
452
|
+
void 0,
|
|
453
|
+
2
|
|
454
|
+
)}
|
|
455
|
+
`
|
|
456
|
+
);
|
|
457
|
+
return;
|
|
458
|
+
}
|
|
459
|
+
pail.info(`Hash for ${taskId} (run ${summary.id})`);
|
|
460
|
+
logger.info("");
|
|
461
|
+
logger.info(` status: ${task.cacheStatus}`);
|
|
462
|
+
logger.info(` hash: ${task.hash ?? "(none)"}`);
|
|
463
|
+
if (task.hashDetails) {
|
|
464
|
+
logger.info("");
|
|
465
|
+
logger.info(` command: ${truncateHash(task.hashDetails.command)}`);
|
|
466
|
+
renderHashDetailsBucket("nodes", task.hashDetails.nodes, logger);
|
|
467
|
+
renderHashDetailsBucket("runtime", task.hashDetails.runtime, logger);
|
|
468
|
+
renderHashDetailsBucket("implicitDeps", task.hashDetails.implicitDeps, logger);
|
|
469
|
+
} else {
|
|
470
|
+
logger.info("");
|
|
471
|
+
pail.info("No hash details recorded for this task.");
|
|
472
|
+
}
|
|
473
|
+
};
|
|
474
|
+
const runSize = async (cacheDirectory, format) => {
|
|
475
|
+
if (!isAccessibleSync(cacheDirectory)) {
|
|
476
|
+
if (format === "json") {
|
|
477
|
+
process.stdout.write(`${JSON.stringify({ directory: cacheDirectory, exists: false, totalBytes: 0, totalCount: 0 })}
|
|
478
|
+
`);
|
|
479
|
+
return;
|
|
480
|
+
}
|
|
481
|
+
pail.info(`No cache directory at ${cacheDirectory}`);
|
|
482
|
+
return;
|
|
483
|
+
}
|
|
484
|
+
const entries = await collectCacheEntries(cacheDirectory);
|
|
485
|
+
const totalBytes = entries.reduce((sum, entry) => sum + entry.sizeBytes, 0);
|
|
486
|
+
if (format === "json") {
|
|
487
|
+
process.stdout.write(
|
|
488
|
+
`${JSON.stringify({
|
|
489
|
+
directory: cacheDirectory,
|
|
490
|
+
exists: true,
|
|
491
|
+
totalBytes,
|
|
492
|
+
totalCount: entries.length
|
|
493
|
+
})}
|
|
494
|
+
`
|
|
495
|
+
);
|
|
496
|
+
return;
|
|
497
|
+
}
|
|
498
|
+
pail.info(`Cache directory: ${cacheDirectory}`);
|
|
499
|
+
pail.info(`Entries: ${String(entries.length)}`);
|
|
500
|
+
pail.info(`Total size: ${formatBytes(totalBytes, { decimals: 1, space: false })}`);
|
|
501
|
+
};
|
|
502
|
+
const walkAndDigest = async (root) => {
|
|
503
|
+
const out = [];
|
|
504
|
+
const walk = async (absolute) => {
|
|
505
|
+
const items = (await readdir(absolute, { withFileTypes: true })).sort((a, b) => a.name.localeCompare(b.name));
|
|
506
|
+
for (const item of items) {
|
|
507
|
+
const childAbsolute = join(absolute, item.name);
|
|
508
|
+
if (item.isDirectory()) {
|
|
509
|
+
await walk(childAbsolute);
|
|
510
|
+
continue;
|
|
511
|
+
}
|
|
512
|
+
if (!item.isFile()) {
|
|
513
|
+
continue;
|
|
514
|
+
}
|
|
515
|
+
const [info, digest] = await Promise.all([stat(childAbsolute), digestFile(childAbsolute)]);
|
|
516
|
+
out.push({
|
|
517
|
+
hash: digest?.hash ?? "",
|
|
518
|
+
// eslint-disable-next-line no-bitwise -- low 12 bits hold the rwx triplet
|
|
519
|
+
mode: info.mode & 4095,
|
|
520
|
+
mtimeMs: info.mtimeMs,
|
|
521
|
+
relativePath: relative(root, childAbsolute).replaceAll("\\", "/"),
|
|
522
|
+
sizeBytes: info.size
|
|
523
|
+
});
|
|
524
|
+
}
|
|
525
|
+
};
|
|
526
|
+
await walk(root);
|
|
527
|
+
return out;
|
|
528
|
+
};
|
|
529
|
+
const compareSecondsTruncated = (a, b) => Math.floor(a / 1e3) === Math.floor(b / 1e3);
|
|
530
|
+
const VERIFY_DIFF_CONCURRENCY = 16;
|
|
531
|
+
const computeFileDiff = async (file, workspaceRoot) => {
|
|
532
|
+
const livePath = join(workspaceRoot, file.relativePath);
|
|
533
|
+
const liveStat = await stat(livePath).catch(() => void 0);
|
|
534
|
+
if (!liveStat) {
|
|
535
|
+
return {
|
|
536
|
+
expected: { hash: file.hash, mode: file.mode, mtimeMs: file.mtimeMs },
|
|
537
|
+
issues: ["missing"],
|
|
538
|
+
relativePath: file.relativePath
|
|
539
|
+
};
|
|
540
|
+
}
|
|
541
|
+
const liveDigest = await digestFile(livePath);
|
|
542
|
+
const issues = [];
|
|
543
|
+
if ((liveDigest?.hash ?? "") !== file.hash) {
|
|
544
|
+
issues.push("content");
|
|
545
|
+
}
|
|
546
|
+
const liveMode = liveStat.mode & 4095;
|
|
547
|
+
if (process.platform !== "win32" && liveMode !== file.mode) {
|
|
548
|
+
issues.push("mode");
|
|
549
|
+
}
|
|
550
|
+
if (!compareSecondsTruncated(liveStat.mtimeMs, file.mtimeMs)) {
|
|
551
|
+
issues.push("mtime");
|
|
552
|
+
}
|
|
553
|
+
if (issues.length === 0) {
|
|
554
|
+
return void 0;
|
|
555
|
+
}
|
|
556
|
+
return {
|
|
557
|
+
actual: { hash: liveDigest?.hash, mode: liveMode, mtimeMs: liveStat.mtimeMs },
|
|
558
|
+
expected: { hash: file.hash, mode: file.mode, mtimeMs: file.mtimeMs },
|
|
559
|
+
issues,
|
|
560
|
+
relativePath: file.relativePath
|
|
561
|
+
};
|
|
562
|
+
};
|
|
563
|
+
const runVerify = async (taskId, options, logger) => {
|
|
564
|
+
const { cacheDirectories, format, workspaceRoot } = options;
|
|
565
|
+
if (cacheDirectories.length === 0) {
|
|
566
|
+
if (format === "json") {
|
|
567
|
+
process.stdout.write(`${JSON.stringify({ error: "no-cache-directory", taskId }, void 0, 2)}
|
|
568
|
+
`);
|
|
569
|
+
} else {
|
|
570
|
+
pail.error("No cache directory resolved — pass --cache-dir or run inside a workspace.");
|
|
571
|
+
}
|
|
572
|
+
process.exitCode = 1;
|
|
573
|
+
return;
|
|
574
|
+
}
|
|
575
|
+
let cacheDirectory;
|
|
576
|
+
let cached;
|
|
577
|
+
for (const directory of cacheDirectories) {
|
|
578
|
+
const found = await new Cache({ cacheDirectory: directory, workspaceRoot }).getByTaskId(taskId);
|
|
579
|
+
if (found) {
|
|
580
|
+
cacheDirectory = directory;
|
|
581
|
+
cached = found;
|
|
582
|
+
break;
|
|
583
|
+
}
|
|
584
|
+
}
|
|
585
|
+
if (!cached || !cacheDirectory) {
|
|
586
|
+
if (format === "json") {
|
|
587
|
+
process.stdout.write(`${JSON.stringify({ error: "no-cached-entry", searchedCaches: cacheDirectories, taskId }, void 0, 2)}
|
|
588
|
+
`);
|
|
589
|
+
} else {
|
|
590
|
+
pail.error(`No cached entry found for task "${taskId}". Run it once before verifying.`);
|
|
591
|
+
}
|
|
592
|
+
process.exitCode = 1;
|
|
593
|
+
return;
|
|
594
|
+
}
|
|
595
|
+
const stagingRoot = await mkdtemp(join(tmpdir(), "vis-cache-verify-"));
|
|
596
|
+
try {
|
|
597
|
+
const stagingCache = new Cache({ cacheDirectory, workspaceRoot: stagingRoot });
|
|
598
|
+
const restored = await stagingCache.restoreOutputs(cached.hash);
|
|
599
|
+
if (!restored) {
|
|
600
|
+
if (format === "json") {
|
|
601
|
+
process.stdout.write(`${JSON.stringify({ error: "restore-failed", hash: cached.hash, taskId }, void 0, 2)}
|
|
602
|
+
`);
|
|
603
|
+
} else {
|
|
604
|
+
pail.error(`Cache restore failed for ${taskId} (hash ${cached.hash}).`);
|
|
605
|
+
}
|
|
606
|
+
process.exitCode = 1;
|
|
607
|
+
return;
|
|
608
|
+
}
|
|
609
|
+
const cachedFiles = await walkAndDigest(stagingRoot);
|
|
610
|
+
if (cachedFiles.length === 0) {
|
|
611
|
+
if (format === "json") {
|
|
612
|
+
process.stdout.write(`${JSON.stringify({ diffs: [], hash: cached.hash, status: "no-outputs", taskId }, void 0, 2)}
|
|
613
|
+
`);
|
|
614
|
+
} else {
|
|
615
|
+
pail.info(`Cached entry for ${taskId} has no recorded outputs — nothing to verify.`);
|
|
616
|
+
}
|
|
617
|
+
return;
|
|
618
|
+
}
|
|
619
|
+
const diffSlots = Array.from({ length: cachedFiles.length });
|
|
620
|
+
for (let offset = 0; offset < cachedFiles.length; offset += VERIFY_DIFF_CONCURRENCY) {
|
|
621
|
+
const chunk = cachedFiles.slice(offset, offset + VERIFY_DIFF_CONCURRENCY);
|
|
622
|
+
const results = await Promise.all(chunk.map(async (file) => computeFileDiff(file, workspaceRoot)));
|
|
623
|
+
for (const [index, result] of results.entries()) {
|
|
624
|
+
diffSlots[offset + index] = result;
|
|
625
|
+
}
|
|
626
|
+
}
|
|
627
|
+
const diffs = diffSlots.filter((slot) => slot !== void 0);
|
|
628
|
+
if (format === "json") {
|
|
629
|
+
process.stdout.write(
|
|
630
|
+
`${JSON.stringify(
|
|
631
|
+
{
|
|
632
|
+
cacheDirectory,
|
|
633
|
+
cachedFileCount: cachedFiles.length,
|
|
634
|
+
diffs,
|
|
635
|
+
hash: cached.hash,
|
|
636
|
+
status: diffs.length === 0 ? "ok" : "drift",
|
|
637
|
+
taskId
|
|
638
|
+
},
|
|
639
|
+
void 0,
|
|
640
|
+
2
|
|
641
|
+
)}
|
|
642
|
+
`
|
|
643
|
+
);
|
|
644
|
+
if (diffs.length > 0) {
|
|
645
|
+
process.exitCode = 1;
|
|
646
|
+
}
|
|
647
|
+
return;
|
|
648
|
+
}
|
|
649
|
+
pail.info(`Verify ${taskId} (hash ${cached.hash})`);
|
|
650
|
+
logger.info("");
|
|
651
|
+
logger.info(` cache: ${cacheDirectory}`);
|
|
652
|
+
logger.info(` files: ${String(cachedFiles.length)}`);
|
|
653
|
+
if (diffs.length === 0) {
|
|
654
|
+
logger.info("");
|
|
655
|
+
pail.success("Cache restore is faithful — all files match content, mode, and mtime.");
|
|
656
|
+
return;
|
|
657
|
+
}
|
|
658
|
+
logger.info(` drift: ${String(diffs.length)} file(s)`);
|
|
659
|
+
logger.info("");
|
|
660
|
+
for (const diff of diffs) {
|
|
661
|
+
const tag = diff.issues.includes("missing") ? "MISSING" : diff.issues.join(",").toUpperCase();
|
|
662
|
+
logger.info(` [${tag}] ${diff.relativePath}`);
|
|
663
|
+
if (!diff.issues.includes("missing") && diff.expected && diff.actual) {
|
|
664
|
+
if (diff.issues.includes("content")) {
|
|
665
|
+
logger.info(` expected hash: ${diff.expected.hash || "(none)"}`);
|
|
666
|
+
logger.info(` actual hash: ${diff.actual.hash ?? "(unreadable)"}`);
|
|
667
|
+
}
|
|
668
|
+
if (diff.issues.includes("mode")) {
|
|
669
|
+
logger.info(` expected mode: ${diff.expected.mode.toString(8)}`);
|
|
670
|
+
logger.info(` actual mode: ${(diff.actual.mode ?? 0).toString(8)}`);
|
|
671
|
+
}
|
|
672
|
+
if (diff.issues.includes("mtime")) {
|
|
673
|
+
logger.info(` expected mtime: ${new Date(diff.expected.mtimeMs).toISOString()}`);
|
|
674
|
+
logger.info(` actual mtime: ${diff.actual.mtimeMs === void 0 ? "(unreadable)" : new Date(diff.actual.mtimeMs).toISOString()}`);
|
|
675
|
+
}
|
|
676
|
+
}
|
|
677
|
+
}
|
|
678
|
+
process.exitCode = 1;
|
|
679
|
+
} finally {
|
|
680
|
+
await rm(stagingRoot, { force: true, recursive: true }).catch(() => {
|
|
681
|
+
});
|
|
682
|
+
}
|
|
683
|
+
};
|
|
684
|
+
const parseCacheTarget = (raw) => {
|
|
685
|
+
if (raw === "task" || raw === "ai" || raw === "socket" || raw === "all") {
|
|
686
|
+
return raw;
|
|
687
|
+
}
|
|
688
|
+
if (raw && raw.length > 0) {
|
|
689
|
+
pail.warn(`Unknown --type value '${raw}'; falling back to 'all'.`);
|
|
690
|
+
}
|
|
691
|
+
return "all";
|
|
692
|
+
};
|
|
693
|
+
const includesTarget = (selected, kind) => selected === "all" || selected === kind;
|
|
694
|
+
const isoOrNull = (value) => value === void 0 ? null : new Date(value).toISOString();
|
|
695
|
+
const printAuxStatsBlock = (label, stats) => {
|
|
696
|
+
pail.info(`${label}:`);
|
|
697
|
+
pail.info(` Entries: ${String(stats.entries)}`);
|
|
698
|
+
pail.info(` Total size: ${formatBytes(stats.totalSizeBytes, { decimals: 1, space: false })}`);
|
|
699
|
+
pail.info(` Oldest: ${stats.oldestEntry ? new Date(stats.oldestEntry).toISOString() : "N/A"}`);
|
|
700
|
+
pail.info(` Newest: ${stats.newestEntry ? new Date(stats.newestEntry).toISOString() : "N/A"}`);
|
|
701
|
+
};
|
|
702
|
+
const parseScope = (raw) => {
|
|
703
|
+
if (raw === "worktree" || raw === "shared" || raw === "all") {
|
|
704
|
+
return raw;
|
|
705
|
+
}
|
|
706
|
+
if (raw && raw.length > 0) {
|
|
707
|
+
pail.warn(`Unknown --scope value '${raw}'; falling back to 'shared'.`);
|
|
708
|
+
}
|
|
709
|
+
return "shared";
|
|
710
|
+
};
|
|
711
|
+
const resolveCacheDirectoryFromContext = (workspaceRoot, options, visConfig) => {
|
|
712
|
+
const resolvedWorkspaceRoot = workspaceRoot ?? process.cwd();
|
|
713
|
+
const cfg = visConfig ?? {};
|
|
714
|
+
const taskRunnerOptions = cfg.taskRunnerOptions ?? {};
|
|
715
|
+
const scope = parseScope(options.scope);
|
|
716
|
+
const optionsCacheDir = options.cacheDir;
|
|
717
|
+
const worktreeDirectory = resolveSharedCacheDirectory(resolvedWorkspaceRoot, optionsCacheDir, taskRunnerOptions.cacheDirectory, false);
|
|
718
|
+
const sharedDirectory = resolveSharedCacheDirectory(resolvedWorkspaceRoot, optionsCacheDir, taskRunnerOptions.cacheDirectory, cfg.sharedWorktreeCache);
|
|
719
|
+
let primary;
|
|
720
|
+
let directories;
|
|
721
|
+
switch (scope) {
|
|
722
|
+
case "all": {
|
|
723
|
+
primary = sharedDirectory;
|
|
724
|
+
directories = sharedDirectory === worktreeDirectory ? [sharedDirectory] : [sharedDirectory, worktreeDirectory];
|
|
725
|
+
break;
|
|
726
|
+
}
|
|
727
|
+
case "worktree": {
|
|
728
|
+
primary = worktreeDirectory;
|
|
729
|
+
directories = [worktreeDirectory];
|
|
730
|
+
break;
|
|
731
|
+
}
|
|
732
|
+
default: {
|
|
733
|
+
primary = sharedDirectory;
|
|
734
|
+
directories = [sharedDirectory];
|
|
735
|
+
}
|
|
736
|
+
}
|
|
737
|
+
return {
|
|
738
|
+
cacheDirectories: directories,
|
|
739
|
+
cacheDirectory: primary,
|
|
740
|
+
scope,
|
|
741
|
+
sharedWorktreeCache: cfg.sharedWorktreeCache,
|
|
742
|
+
workspaceRoot: resolvedWorkspaceRoot
|
|
743
|
+
};
|
|
744
|
+
};
|
|
745
|
+
const cacheListExecute = async ({ logger, options, visConfig, workspaceRoot: wsRoot }) => {
|
|
746
|
+
const { cacheDirectories } = resolveCacheDirectoryFromContext(wsRoot, options, visConfig);
|
|
747
|
+
const format = options.format ?? "table";
|
|
748
|
+
for (const directory of cacheDirectories) {
|
|
749
|
+
if (cacheDirectories.length > 1) {
|
|
750
|
+
pail.info(`# ${directory}`);
|
|
751
|
+
}
|
|
752
|
+
await runList(directory, format, logger);
|
|
753
|
+
}
|
|
754
|
+
};
|
|
755
|
+
const cacheCleanExecute = async ({ options, visConfig, workspaceRoot: wsRoot }) => {
|
|
756
|
+
const target = parseCacheTarget(options.type);
|
|
757
|
+
const dryRun = Boolean(options.dryRun);
|
|
758
|
+
if (includesTarget(target, "task")) {
|
|
759
|
+
const { cacheDirectory, workspaceRoot } = resolveCacheDirectoryFromContext(wsRoot, options, visConfig);
|
|
760
|
+
await runClean(cacheDirectory, workspaceRoot, {
|
|
761
|
+
dryRun,
|
|
762
|
+
force: Boolean(options.force)
|
|
763
|
+
});
|
|
764
|
+
}
|
|
765
|
+
if (includesTarget(target, "ai")) {
|
|
766
|
+
if (dryRun) {
|
|
767
|
+
const stats = getCacheStats();
|
|
768
|
+
pail.info(`Would clear ${String(stats.entries)} cached AI response${stats.entries === 1 ? "" : "s"}.`);
|
|
769
|
+
} else {
|
|
770
|
+
clearAiCacheSafe();
|
|
771
|
+
}
|
|
772
|
+
}
|
|
773
|
+
if (includesTarget(target, "socket")) {
|
|
774
|
+
if (dryRun) {
|
|
775
|
+
const stats = getSocketCacheStats();
|
|
776
|
+
pail.info(`Would clear ${String(stats.entries)} cached Socket.dev report${stats.entries === 1 ? "" : "s"}.`);
|
|
777
|
+
} else {
|
|
778
|
+
clearSocketCacheSafe();
|
|
779
|
+
}
|
|
780
|
+
}
|
|
781
|
+
};
|
|
782
|
+
const cachePruneExecute = async ({ options, visConfig, workspaceRoot: wsRoot }) => {
|
|
783
|
+
const { cacheDirectories, workspaceRoot } = resolveCacheDirectoryFromContext(wsRoot, options, visConfig);
|
|
784
|
+
for (const directory of cacheDirectories) {
|
|
785
|
+
if (cacheDirectories.length > 1) {
|
|
786
|
+
pail.info(`# ${directory}`);
|
|
787
|
+
}
|
|
788
|
+
await runPrune(directory, workspaceRoot, {
|
|
789
|
+
keepLast: typeof options.keepLast === "number" ? options.keepLast : void 0,
|
|
790
|
+
maxCacheAgeDays: typeof options.maxAgeDays === "number" ? options.maxAgeDays : void 0,
|
|
791
|
+
maxCacheSize: options.maxSize
|
|
792
|
+
});
|
|
793
|
+
}
|
|
794
|
+
};
|
|
795
|
+
const resolveWorkspaceRoot = (workspaceRoot) => workspaceRoot ?? process.cwd();
|
|
796
|
+
const cacheWhyExecute = async ({ argument, logger, options, workspaceRoot: wsRoot }) => {
|
|
797
|
+
const taskId = argument[0];
|
|
798
|
+
if (!taskId) {
|
|
799
|
+
pail.error("No task ID specified. Usage: vis cache why <project>:<target>");
|
|
800
|
+
process.exitCode = 1;
|
|
801
|
+
return;
|
|
802
|
+
}
|
|
803
|
+
await runWhy(
|
|
804
|
+
taskId,
|
|
805
|
+
{
|
|
806
|
+
format: options.format ?? "table",
|
|
807
|
+
runId: options.run,
|
|
808
|
+
workspaceRoot: resolveWorkspaceRoot(wsRoot)
|
|
809
|
+
},
|
|
810
|
+
logger
|
|
811
|
+
);
|
|
812
|
+
};
|
|
813
|
+
const cacheHashExecute = async ({ argument, logger, options, workspaceRoot: wsRoot }) => {
|
|
814
|
+
const taskId = argument[0];
|
|
815
|
+
if (!taskId) {
|
|
816
|
+
pail.error("No task ID specified. Usage: vis cache hash <project>:<target>");
|
|
817
|
+
process.exitCode = 1;
|
|
818
|
+
return;
|
|
819
|
+
}
|
|
820
|
+
await runHash(
|
|
821
|
+
taskId,
|
|
822
|
+
{
|
|
823
|
+
format: options.format ?? "table",
|
|
824
|
+
runId: options.run,
|
|
825
|
+
workspaceRoot: resolveWorkspaceRoot(wsRoot)
|
|
826
|
+
},
|
|
827
|
+
logger
|
|
828
|
+
);
|
|
829
|
+
};
|
|
830
|
+
const cacheSizeExecute = async ({ options, visConfig, workspaceRoot: wsRoot }) => {
|
|
831
|
+
const target = parseCacheTarget(options.type);
|
|
832
|
+
const format = options.format ?? "table";
|
|
833
|
+
if (format === "json") {
|
|
834
|
+
const payload = {};
|
|
835
|
+
if (includesTarget(target, "task")) {
|
|
836
|
+
const { cacheDirectories } = resolveCacheDirectoryFromContext(wsRoot, options, visConfig);
|
|
837
|
+
payload.task = await Promise.all(
|
|
838
|
+
cacheDirectories.map(async (directory) => {
|
|
839
|
+
const exists = isAccessibleSync(directory);
|
|
840
|
+
const entries = exists ? await collectCacheEntries(directory) : [];
|
|
841
|
+
const totalBytes = entries.reduce((sum, entry) => sum + entry.sizeBytes, 0);
|
|
842
|
+
return {
|
|
843
|
+
directory,
|
|
844
|
+
entries: entries.length,
|
|
845
|
+
exists,
|
|
846
|
+
newestEntry: isoOrNull(entries[0]?.mtimeMs),
|
|
847
|
+
oldestEntry: isoOrNull(entries.at(-1)?.mtimeMs),
|
|
848
|
+
totalBytes
|
|
849
|
+
};
|
|
850
|
+
})
|
|
851
|
+
);
|
|
852
|
+
}
|
|
853
|
+
if (includesTarget(target, "ai")) {
|
|
854
|
+
const stats = getCacheStats();
|
|
855
|
+
payload.ai = {
|
|
856
|
+
entries: stats.entries,
|
|
857
|
+
newestEntry: isoOrNull(stats.newestEntry),
|
|
858
|
+
oldestEntry: isoOrNull(stats.oldestEntry),
|
|
859
|
+
totalBytes: stats.totalSizeBytes
|
|
860
|
+
};
|
|
861
|
+
}
|
|
862
|
+
if (includesTarget(target, "socket")) {
|
|
863
|
+
const stats = getSocketCacheStats();
|
|
864
|
+
payload.socket = {
|
|
865
|
+
entries: stats.entries,
|
|
866
|
+
newestEntry: isoOrNull(stats.newestEntry),
|
|
867
|
+
oldestEntry: isoOrNull(stats.oldestEntry),
|
|
868
|
+
totalBytes: stats.totalSizeBytes
|
|
869
|
+
};
|
|
870
|
+
}
|
|
871
|
+
process.stdout.write(`${JSON.stringify(payload, void 0, 2)}
|
|
872
|
+
`);
|
|
873
|
+
return;
|
|
874
|
+
}
|
|
875
|
+
if (includesTarget(target, "task")) {
|
|
876
|
+
const { cacheDirectories } = resolveCacheDirectoryFromContext(wsRoot, options, visConfig);
|
|
877
|
+
for (const directory of cacheDirectories) {
|
|
878
|
+
if (cacheDirectories.length > 1) {
|
|
879
|
+
pail.info(`# ${directory}`);
|
|
880
|
+
}
|
|
881
|
+
await runSize(directory, "table");
|
|
882
|
+
}
|
|
883
|
+
}
|
|
884
|
+
if (includesTarget(target, "ai")) {
|
|
885
|
+
printAuxStatsBlock("AI response cache", getCacheStats());
|
|
886
|
+
}
|
|
887
|
+
if (includesTarget(target, "socket")) {
|
|
888
|
+
printAuxStatsBlock("Socket.dev report cache", getSocketCacheStats());
|
|
889
|
+
}
|
|
890
|
+
};
|
|
891
|
+
const cacheVerifyExecute = async ({
|
|
892
|
+
argument,
|
|
893
|
+
logger,
|
|
894
|
+
options,
|
|
895
|
+
visConfig,
|
|
896
|
+
workspaceRoot: wsRoot
|
|
897
|
+
}) => {
|
|
898
|
+
const taskId = argument[0];
|
|
899
|
+
if (!taskId) {
|
|
900
|
+
pail.error("No task ID specified. Usage: vis cache verify <project>:<target>");
|
|
901
|
+
process.exitCode = 1;
|
|
902
|
+
return;
|
|
903
|
+
}
|
|
904
|
+
const { cacheDirectories, workspaceRoot } = resolveCacheDirectoryFromContext(wsRoot, options, visConfig);
|
|
905
|
+
await runVerify(
|
|
906
|
+
taskId,
|
|
907
|
+
{
|
|
908
|
+
cacheDirectories,
|
|
909
|
+
format: options.format ?? "table",
|
|
910
|
+
workspaceRoot
|
|
911
|
+
},
|
|
912
|
+
logger
|
|
913
|
+
);
|
|
914
|
+
};
|
|
915
|
+
|
|
916
|
+
export { cacheCleanExecute, cacheHashExecute, cacheListExecute, cachePruneExecute, cacheSizeExecute, cacheVerifyExecute, cacheWhyExecute, clearAiCacheSafe, clearSocketCacheSafe, collectCacheEntries, formatAge, runClean, runHash, runList, runPrune, runSize, runVerify, runWhy };
|