docs-cache 0.4.3 → 0.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.mjs +13 -13
- package/dist/esm/api.d.ts +14 -0
- package/dist/esm/api.mjs +14 -0
- package/dist/esm/cache/cache-layout.d.ts +1 -0
- package/dist/esm/cache/cache-layout.mjs +12 -0
- package/dist/esm/cache/lock.d.ts +21 -0
- package/dist/esm/cache/lock.mjs +91 -0
- package/dist/esm/cache/manifest.d.ts +11 -0
- package/dist/esm/cache/manifest.mjs +68 -0
- package/dist/esm/cache/materialize.d.ts +26 -0
- package/dist/esm/cache/materialize.mjs +442 -0
- package/dist/esm/cache/targets.d.ts +19 -0
- package/dist/esm/cache/targets.mjs +66 -0
- package/dist/esm/cache/toc.d.ts +12 -0
- package/dist/esm/cache/toc.mjs +167 -0
- package/dist/esm/cli/exit-code.d.ts +11 -0
- package/dist/esm/cli/exit-code.mjs +5 -0
- package/dist/esm/cli/index.d.ts +5 -0
- package/dist/esm/cli/index.mjs +345 -0
- package/dist/esm/cli/live-output.d.ts +12 -0
- package/dist/esm/cli/live-output.mjs +30 -0
- package/dist/esm/cli/parse-args.d.ts +13 -0
- package/dist/esm/cli/parse-args.mjs +295 -0
- package/dist/esm/cli/run.d.ts +1 -0
- package/dist/esm/cli/run.mjs +2 -0
- package/dist/esm/cli/task-reporter.d.ts +32 -0
- package/dist/esm/cli/task-reporter.mjs +122 -0
- package/dist/esm/cli/types.d.ts +51 -0
- package/dist/esm/cli/types.mjs +0 -0
- package/dist/esm/cli/ui.d.ts +21 -0
- package/dist/esm/cli/ui.mjs +64 -0
- package/dist/esm/commands/add.d.ts +20 -0
- package/dist/esm/commands/add.mjs +81 -0
- package/dist/esm/commands/clean-git-cache.d.ts +10 -0
- package/dist/esm/commands/clean-git-cache.mjs +48 -0
- package/dist/esm/commands/clean.d.ts +10 -0
- package/dist/esm/commands/clean.mjs +27 -0
- package/dist/esm/commands/init.d.ts +19 -0
- package/dist/esm/commands/init.mjs +179 -0
- package/dist/esm/commands/prune.d.ts +11 -0
- package/dist/esm/commands/prune.mjs +52 -0
- package/dist/esm/commands/remove.d.ts +12 -0
- package/dist/esm/commands/remove.mjs +87 -0
- package/dist/esm/commands/status.d.ts +16 -0
- package/dist/esm/commands/status.mjs +78 -0
- package/dist/esm/commands/sync.d.ts +33 -0
- package/dist/esm/commands/sync.mjs +730 -0
- package/dist/esm/commands/verify.d.ts +11 -0
- package/dist/esm/commands/verify.mjs +120 -0
- package/dist/esm/config/index.d.ts +15 -0
- package/dist/esm/config/index.mjs +196 -0
- package/dist/esm/config/io.d.ts +30 -0
- package/dist/esm/config/io.mjs +112 -0
- package/dist/esm/config/schema.d.ts +171 -0
- package/dist/esm/config/schema.mjs +69 -0
- package/dist/esm/errors.d.ts +3 -0
- package/dist/esm/errors.mjs +2 -0
- package/dist/esm/git/cache-dir.d.ts +16 -0
- package/dist/esm/git/cache-dir.mjs +23 -0
- package/dist/esm/git/fetch-source.d.ts +19 -0
- package/dist/esm/git/fetch-source.mjs +477 -0
- package/dist/esm/git/redact.d.ts +1 -0
- package/dist/esm/git/redact.mjs +4 -0
- package/dist/esm/git/resolve-remote.d.ts +15 -0
- package/dist/esm/git/resolve-remote.mjs +87 -0
- package/dist/esm/git/resolve-repo.d.ts +5 -0
- package/dist/esm/git/resolve-repo.mjs +52 -0
- package/dist/esm/gitignore.d.ts +18 -0
- package/dist/esm/gitignore.mjs +80 -0
- package/dist/esm/paths.d.ts +8 -0
- package/dist/esm/paths.mjs +34 -0
- package/dist/esm/source-id.d.ts +1 -0
- package/dist/esm/source-id.mjs +29 -0
- package/dist/esm/types/sync.d.ts +25 -0
- package/dist/esm/types/sync.mjs +0 -0
- package/package.json +138 -91
- package/dist/chunks/add.mjs +0 -3
- package/dist/chunks/clean-git-cache.mjs +0 -2
- package/dist/chunks/clean.mjs +0 -2
- package/dist/chunks/init.mjs +0 -3
- package/dist/chunks/prune.mjs +0 -2
- package/dist/chunks/remove.mjs +0 -3
- package/dist/chunks/status.mjs +0 -2
- package/dist/chunks/sync.mjs +0 -9
- package/dist/chunks/verify.mjs +0 -2
- package/dist/shared/docs-cache.BOr9BnyP.mjs +0 -5
- package/dist/shared/docs-cache.BSvQNKuf.mjs +0 -2
- package/dist/shared/docs-cache.CQiaFDb_.mjs +0 -7
- package/dist/shared/docs-cache.CaOcl4OS.mjs +0 -3
- package/dist/shared/docs-cache.kK1DPQIQ.mjs +0 -2
|
@@ -0,0 +1,442 @@
|
|
|
1
|
+
import { createHash, randomBytes } from "node:crypto";
|
|
2
|
+
import { constants, createReadStream, createWriteStream } from "node:fs";
|
|
3
|
+
import {
|
|
4
|
+
access,
|
|
5
|
+
lstat,
|
|
6
|
+
mkdir,
|
|
7
|
+
mkdtemp,
|
|
8
|
+
open,
|
|
9
|
+
rename,
|
|
10
|
+
rm,
|
|
11
|
+
writeFile
|
|
12
|
+
} from "node:fs/promises";
|
|
13
|
+
import os from "node:os";
|
|
14
|
+
import path from "node:path";
|
|
15
|
+
import { pipeline } from "node:stream/promises";
|
|
16
|
+
import fg from "fast-glob";
|
|
17
|
+
import { MANIFEST_FILENAME } from "#cache/manifest";
|
|
18
|
+
import { symbols, ui } from "#cli/ui";
|
|
19
|
+
import { getErrnoCode } from "#core/errors";
|
|
20
|
+
import { getCacheLayout, toPosixPath } from "#core/paths";
|
|
21
|
+
import { assertSafeSourceId } from "#core/source-id";
|
|
22
|
+
const normalizePath = (value) => toPosixPath(value);
|
|
23
|
+
const escapeParens = (value) => {
|
|
24
|
+
let output = "";
|
|
25
|
+
let escaped = false;
|
|
26
|
+
for (const char of value) {
|
|
27
|
+
if (escaped) {
|
|
28
|
+
output += char;
|
|
29
|
+
escaped = false;
|
|
30
|
+
continue;
|
|
31
|
+
}
|
|
32
|
+
if (char === "\\") {
|
|
33
|
+
output += char;
|
|
34
|
+
escaped = true;
|
|
35
|
+
continue;
|
|
36
|
+
}
|
|
37
|
+
if (char === "(" || char === ")") {
|
|
38
|
+
output += `\\${char}`;
|
|
39
|
+
continue;
|
|
40
|
+
}
|
|
41
|
+
output += char;
|
|
42
|
+
}
|
|
43
|
+
return output;
|
|
44
|
+
};
|
|
45
|
+
const normalizeIncludePatterns = (patterns) => {
|
|
46
|
+
return patterns.map((pattern) => {
|
|
47
|
+
let decoded = pattern;
|
|
48
|
+
if (pattern.includes("%")) {
|
|
49
|
+
try {
|
|
50
|
+
decoded = decodeURIComponent(pattern);
|
|
51
|
+
} catch {
|
|
52
|
+
decoded = pattern;
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
const hasExtGlob = /(^|[^\\])[@!+?*]\(/.test(decoded);
|
|
56
|
+
return hasExtGlob ? decoded : escapeParens(decoded);
|
|
57
|
+
});
|
|
58
|
+
};
|
|
59
|
+
const STREAM_COPY_THRESHOLD_MB = Number(
|
|
60
|
+
process.env.DOCS_CACHE_STREAM_THRESHOLD_MB ?? "2"
|
|
61
|
+
);
|
|
62
|
+
const STREAM_COPY_THRESHOLD_BYTES = Number.isFinite(STREAM_COPY_THRESHOLD_MB) && STREAM_COPY_THRESHOLD_MB > 0 ? Math.floor(STREAM_COPY_THRESHOLD_MB * 1024 * 1024) : 1024 * 1024;
|
|
63
|
+
const ensureSafePath = (root, target) => {
|
|
64
|
+
const resolvedRoot = path.resolve(root);
|
|
65
|
+
const resolvedTarget = path.resolve(target);
|
|
66
|
+
if (!resolvedTarget.startsWith(resolvedRoot + path.sep)) {
|
|
67
|
+
throw new Error(`Path traversal detected: ${target}`);
|
|
68
|
+
}
|
|
69
|
+
};
|
|
70
|
+
const openFileNoFollow = async (filePath) => {
|
|
71
|
+
try {
|
|
72
|
+
return await open(filePath, constants.O_RDONLY | constants.O_NOFOLLOW);
|
|
73
|
+
} catch (error) {
|
|
74
|
+
const code = getErrnoCode(error);
|
|
75
|
+
if (code === "ELOOP") {
|
|
76
|
+
return null;
|
|
77
|
+
}
|
|
78
|
+
if (code === "EINVAL" || code === "ENOSYS" || code === "ENOTSUP") {
|
|
79
|
+
const stats = await lstat(filePath);
|
|
80
|
+
if (stats.isSymbolicLink()) {
|
|
81
|
+
return null;
|
|
82
|
+
}
|
|
83
|
+
return await open(filePath, "r");
|
|
84
|
+
}
|
|
85
|
+
throw error;
|
|
86
|
+
}
|
|
87
|
+
};
|
|
88
|
+
const resolveUnwrapPrefix = (entries, unwrapSingleRootDir) => {
|
|
89
|
+
if (!unwrapSingleRootDir || entries.length === 0) {
|
|
90
|
+
return null;
|
|
91
|
+
}
|
|
92
|
+
let prefix = "";
|
|
93
|
+
while (true) {
|
|
94
|
+
let rootDir = null;
|
|
95
|
+
for (const entry of entries) {
|
|
96
|
+
const remaining = prefix ? entry.normalized.slice(prefix.length) : entry.normalized;
|
|
97
|
+
const parts = remaining.split("/");
|
|
98
|
+
if (parts.length < 2) {
|
|
99
|
+
return prefix || null;
|
|
100
|
+
}
|
|
101
|
+
const nextRoot = parts[0];
|
|
102
|
+
if (!rootDir) {
|
|
103
|
+
rootDir = nextRoot;
|
|
104
|
+
continue;
|
|
105
|
+
}
|
|
106
|
+
if (rootDir !== nextRoot) {
|
|
107
|
+
return prefix || null;
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
if (!rootDir) {
|
|
111
|
+
return prefix || null;
|
|
112
|
+
}
|
|
113
|
+
const nextPrefix = `${prefix}${rootDir}/`;
|
|
114
|
+
if (nextPrefix === prefix) {
|
|
115
|
+
return prefix || null;
|
|
116
|
+
}
|
|
117
|
+
prefix = nextPrefix;
|
|
118
|
+
}
|
|
119
|
+
};
|
|
120
|
+
const resolveMaterializeParams = (params) => ({
|
|
121
|
+
...params,
|
|
122
|
+
exclude: params.exclude ?? [],
|
|
123
|
+
ignoreHidden: params.ignoreHidden ?? false,
|
|
124
|
+
unwrapSingleRootDir: params.unwrapSingleRootDir ?? false,
|
|
125
|
+
json: params.json ?? false,
|
|
126
|
+
progressThrottleMs: params.progressThrottleMs ?? 120
|
|
127
|
+
});
|
|
128
|
+
const acquireLock = async (lockPath, timeoutMs = 5e3) => {
|
|
129
|
+
const start = Date.now();
|
|
130
|
+
while (Date.now() - start < timeoutMs) {
|
|
131
|
+
try {
|
|
132
|
+
const fd = await open(lockPath, "wx");
|
|
133
|
+
return {
|
|
134
|
+
release: async () => {
|
|
135
|
+
await fd.close();
|
|
136
|
+
await rm(lockPath, { force: true });
|
|
137
|
+
}
|
|
138
|
+
};
|
|
139
|
+
} catch (error) {
|
|
140
|
+
const code = getErrnoCode(error);
|
|
141
|
+
if (code !== "EEXIST") {
|
|
142
|
+
throw error;
|
|
143
|
+
}
|
|
144
|
+
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
throw new Error(`Failed to acquire lock for ${lockPath}.`);
|
|
148
|
+
};
|
|
149
|
+
export const materializeSource = async (params) => {
|
|
150
|
+
const resolved = resolveMaterializeParams(params);
|
|
151
|
+
assertSafeSourceId(resolved.sourceId, "sourceId");
|
|
152
|
+
const layout = getCacheLayout(resolved.cacheDir, resolved.sourceId);
|
|
153
|
+
await mkdir(resolved.cacheDir, { recursive: true });
|
|
154
|
+
const tempDir = await mkdtemp(
|
|
155
|
+
path.join(resolved.cacheDir, `.tmp-${resolved.sourceId}-`)
|
|
156
|
+
);
|
|
157
|
+
let manifestStreamRef = null;
|
|
158
|
+
const closeManifestStream = async () => {
|
|
159
|
+
const stream = manifestStreamRef;
|
|
160
|
+
if (!stream || stream.closed || stream.destroyed) {
|
|
161
|
+
return;
|
|
162
|
+
}
|
|
163
|
+
await new Promise((resolve) => {
|
|
164
|
+
const cleanup = () => {
|
|
165
|
+
stream.off("close", onClose);
|
|
166
|
+
stream.off("error", onError);
|
|
167
|
+
resolve();
|
|
168
|
+
};
|
|
169
|
+
const onClose = () => cleanup();
|
|
170
|
+
const onError = () => cleanup();
|
|
171
|
+
stream.once("close", onClose);
|
|
172
|
+
stream.once("error", onError);
|
|
173
|
+
try {
|
|
174
|
+
stream.end();
|
|
175
|
+
} catch {
|
|
176
|
+
cleanup();
|
|
177
|
+
}
|
|
178
|
+
});
|
|
179
|
+
};
|
|
180
|
+
try {
|
|
181
|
+
const ignorePatterns = [
|
|
182
|
+
".git/**",
|
|
183
|
+
...resolved.ignoreHidden ? [".*", "**/.*", "**/.*/**"] : [],
|
|
184
|
+
...resolved.exclude
|
|
185
|
+
];
|
|
186
|
+
const includePatterns = normalizeIncludePatterns(resolved.include);
|
|
187
|
+
const files = await fg(includePatterns, {
|
|
188
|
+
cwd: resolved.repoDir,
|
|
189
|
+
ignore: ignorePatterns,
|
|
190
|
+
dot: true,
|
|
191
|
+
onlyFiles: true,
|
|
192
|
+
followSymbolicLinks: false
|
|
193
|
+
});
|
|
194
|
+
if (!resolved.json && includePatterns.length > 0 && files.length === 0) {
|
|
195
|
+
ui.line(
|
|
196
|
+
`${symbols.warn} No files matched include patterns for ${resolved.sourceId}: ${includePatterns.join(", ")}`
|
|
197
|
+
);
|
|
198
|
+
}
|
|
199
|
+
const entries = files.map((relativePath) => ({
|
|
200
|
+
relativePath,
|
|
201
|
+
normalized: normalizePath(relativePath)
|
|
202
|
+
})).sort((left, right) => left.normalized.localeCompare(right.normalized));
|
|
203
|
+
const totalEntries = entries.length;
|
|
204
|
+
const unwrapPrefix = resolveUnwrapPrefix(
|
|
205
|
+
entries,
|
|
206
|
+
resolved.unwrapSingleRootDir
|
|
207
|
+
);
|
|
208
|
+
const targetDirs = /* @__PURE__ */ new Set();
|
|
209
|
+
for (const { normalized } of entries) {
|
|
210
|
+
const rootPath = unwrapPrefix ? normalized.slice(unwrapPrefix.length) : normalized;
|
|
211
|
+
targetDirs.add(path.posix.dirname(rootPath));
|
|
212
|
+
}
|
|
213
|
+
await Promise.all(
|
|
214
|
+
Array.from(
|
|
215
|
+
targetDirs,
|
|
216
|
+
(dir) => mkdir(path.join(tempDir, dir), { recursive: true })
|
|
217
|
+
)
|
|
218
|
+
);
|
|
219
|
+
let bytes = 0;
|
|
220
|
+
let fileCount = 0;
|
|
221
|
+
let lastProgressAt = 0;
|
|
222
|
+
const concurrency = Math.max(
|
|
223
|
+
1,
|
|
224
|
+
Math.min(
|
|
225
|
+
entries.length,
|
|
226
|
+
Math.max(8, Math.min(128, os.cpus().length * 8))
|
|
227
|
+
)
|
|
228
|
+
);
|
|
229
|
+
const manifestPath = path.join(tempDir, MANIFEST_FILENAME);
|
|
230
|
+
const manifestStream = createWriteStream(manifestPath, {
|
|
231
|
+
encoding: "utf8"
|
|
232
|
+
});
|
|
233
|
+
manifestStreamRef = manifestStream;
|
|
234
|
+
const manifestHash = createHash("sha256");
|
|
235
|
+
const writeManifestLine = async (line) => {
|
|
236
|
+
return new Promise((resolve, reject) => {
|
|
237
|
+
const onError = (error) => {
|
|
238
|
+
manifestStream.off("drain", onDrain);
|
|
239
|
+
reject(error);
|
|
240
|
+
};
|
|
241
|
+
const onDrain = () => {
|
|
242
|
+
manifestStream.off("error", onError);
|
|
243
|
+
resolve();
|
|
244
|
+
};
|
|
245
|
+
manifestStream.once("error", onError);
|
|
246
|
+
if (!manifestStream.write(line)) {
|
|
247
|
+
manifestStream.once("drain", onDrain);
|
|
248
|
+
} else {
|
|
249
|
+
manifestStream.off("error", onError);
|
|
250
|
+
resolve();
|
|
251
|
+
}
|
|
252
|
+
});
|
|
253
|
+
};
|
|
254
|
+
for (let i = 0; i < entries.length; i += concurrency) {
|
|
255
|
+
const batch = entries.slice(i, i + concurrency);
|
|
256
|
+
const results = await Promise.all(
|
|
257
|
+
batch.map(async (entry) => {
|
|
258
|
+
const filePath = path.join(resolved.repoDir, entry.relativePath);
|
|
259
|
+
const fileHandle = await openFileNoFollow(filePath);
|
|
260
|
+
if (!fileHandle) {
|
|
261
|
+
return null;
|
|
262
|
+
}
|
|
263
|
+
try {
|
|
264
|
+
const stats = await fileHandle.stat();
|
|
265
|
+
if (!stats.isFile()) {
|
|
266
|
+
return null;
|
|
267
|
+
}
|
|
268
|
+
const normalizedPath = unwrapPrefix ? entry.normalized.slice(unwrapPrefix.length) : entry.normalized;
|
|
269
|
+
const targetPath = path.join(tempDir, normalizedPath);
|
|
270
|
+
ensureSafePath(tempDir, targetPath);
|
|
271
|
+
if (stats.size >= STREAM_COPY_THRESHOLD_BYTES) {
|
|
272
|
+
const reader = createReadStream(filePath, {
|
|
273
|
+
fd: fileHandle.fd,
|
|
274
|
+
autoClose: false
|
|
275
|
+
});
|
|
276
|
+
const writer = createWriteStream(targetPath);
|
|
277
|
+
await pipeline(reader, writer);
|
|
278
|
+
} else {
|
|
279
|
+
const data = await fileHandle.readFile();
|
|
280
|
+
await writeFile(targetPath, data);
|
|
281
|
+
}
|
|
282
|
+
return {
|
|
283
|
+
path: unwrapPrefix ? entry.normalized.slice(unwrapPrefix.length) : entry.normalized,
|
|
284
|
+
size: stats.size
|
|
285
|
+
};
|
|
286
|
+
} finally {
|
|
287
|
+
await fileHandle.close();
|
|
288
|
+
}
|
|
289
|
+
})
|
|
290
|
+
);
|
|
291
|
+
for (const entry of results) {
|
|
292
|
+
if (!entry) {
|
|
293
|
+
continue;
|
|
294
|
+
}
|
|
295
|
+
if (resolved.maxFiles !== void 0 && fileCount + 1 > resolved.maxFiles) {
|
|
296
|
+
throw new Error(
|
|
297
|
+
`Materialized content exceeds maxFiles (${resolved.maxFiles}).`
|
|
298
|
+
);
|
|
299
|
+
}
|
|
300
|
+
bytes += entry.size;
|
|
301
|
+
if (bytes > resolved.maxBytes) {
|
|
302
|
+
throw new Error(
|
|
303
|
+
`Materialized content exceeds maxBytes (${resolved.maxBytes}).`
|
|
304
|
+
);
|
|
305
|
+
}
|
|
306
|
+
const line = `${JSON.stringify(entry)}
|
|
307
|
+
`;
|
|
308
|
+
manifestHash.update(line);
|
|
309
|
+
await writeManifestLine(line);
|
|
310
|
+
fileCount += 1;
|
|
311
|
+
}
|
|
312
|
+
if (resolved.progressLogger && totalEntries > 0) {
|
|
313
|
+
const now = Date.now();
|
|
314
|
+
const shouldEmit = now - lastProgressAt >= resolved.progressThrottleMs || fileCount === totalEntries;
|
|
315
|
+
if (shouldEmit) {
|
|
316
|
+
lastProgressAt = now;
|
|
317
|
+
const percent = Math.min(
|
|
318
|
+
100,
|
|
319
|
+
Math.round(fileCount / totalEntries * 100)
|
|
320
|
+
);
|
|
321
|
+
resolved.progressLogger(
|
|
322
|
+
`materializing ${fileCount}/${totalEntries} (${percent}%)`
|
|
323
|
+
);
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
await new Promise((resolve, reject) => {
|
|
328
|
+
manifestStream.end(() => resolve());
|
|
329
|
+
manifestStream.once("error", reject);
|
|
330
|
+
});
|
|
331
|
+
const manifestSha256 = manifestHash.digest("hex");
|
|
332
|
+
const exists = async (target) => {
|
|
333
|
+
try {
|
|
334
|
+
await access(target);
|
|
335
|
+
return true;
|
|
336
|
+
} catch {
|
|
337
|
+
return false;
|
|
338
|
+
}
|
|
339
|
+
};
|
|
340
|
+
const replaceDirectory = async (source, target) => {
|
|
341
|
+
const lock = await acquireLock(`${target}.lock`);
|
|
342
|
+
try {
|
|
343
|
+
const hasTarget = await exists(target);
|
|
344
|
+
const backupPath = `${target}.bak-${randomBytes(8).toString("hex")}`;
|
|
345
|
+
if (hasTarget) {
|
|
346
|
+
await rename(target, backupPath);
|
|
347
|
+
}
|
|
348
|
+
try {
|
|
349
|
+
await rename(source, target);
|
|
350
|
+
} catch (error) {
|
|
351
|
+
if (hasTarget) {
|
|
352
|
+
try {
|
|
353
|
+
await rename(backupPath, target);
|
|
354
|
+
} catch (restoreError) {
|
|
355
|
+
const restoreMsg = restoreError instanceof Error ? restoreError.message : String(restoreError);
|
|
356
|
+
process.stderr.write(
|
|
357
|
+
`Warning: Failed to restore backup: ${restoreMsg}
|
|
358
|
+
`
|
|
359
|
+
);
|
|
360
|
+
}
|
|
361
|
+
}
|
|
362
|
+
throw error;
|
|
363
|
+
}
|
|
364
|
+
if (hasTarget) {
|
|
365
|
+
await rm(backupPath, { recursive: true, force: true });
|
|
366
|
+
}
|
|
367
|
+
} finally {
|
|
368
|
+
await lock.release();
|
|
369
|
+
}
|
|
370
|
+
};
|
|
371
|
+
await replaceDirectory(tempDir, layout.sourceDir);
|
|
372
|
+
return {
|
|
373
|
+
bytes,
|
|
374
|
+
fileCount,
|
|
375
|
+
manifestSha256
|
|
376
|
+
};
|
|
377
|
+
} catch (error) {
|
|
378
|
+
try {
|
|
379
|
+
await closeManifestStream();
|
|
380
|
+
} catch {
|
|
381
|
+
}
|
|
382
|
+
await rm(tempDir, { recursive: true, force: true });
|
|
383
|
+
throw error;
|
|
384
|
+
}
|
|
385
|
+
};
|
|
386
|
+
export const computeManifestHash = async (params) => {
|
|
387
|
+
assertSafeSourceId(params.sourceId, "sourceId");
|
|
388
|
+
const includePatterns = normalizeIncludePatterns(params.include);
|
|
389
|
+
const files = await fg(includePatterns, {
|
|
390
|
+
cwd: params.repoDir,
|
|
391
|
+
ignore: [
|
|
392
|
+
".git/**",
|
|
393
|
+
...params.ignoreHidden ? [".*", "**/.*", "**/.*/**"] : [],
|
|
394
|
+
...params.exclude ?? []
|
|
395
|
+
],
|
|
396
|
+
dot: true,
|
|
397
|
+
onlyFiles: true,
|
|
398
|
+
followSymbolicLinks: false
|
|
399
|
+
});
|
|
400
|
+
files.sort(
|
|
401
|
+
(left, right) => normalizePath(left).localeCompare(normalizePath(right))
|
|
402
|
+
);
|
|
403
|
+
let bytes = 0;
|
|
404
|
+
let fileCount = 0;
|
|
405
|
+
const manifestHash = createHash("sha256");
|
|
406
|
+
for (const relativePath of files) {
|
|
407
|
+
const relNormalized = normalizePath(relativePath);
|
|
408
|
+
const filePath = path.join(params.repoDir, relativePath);
|
|
409
|
+
const fileHandle = await openFileNoFollow(filePath);
|
|
410
|
+
if (!fileHandle) {
|
|
411
|
+
continue;
|
|
412
|
+
}
|
|
413
|
+
try {
|
|
414
|
+
const stats = await fileHandle.stat();
|
|
415
|
+
if (!stats.isFile()) {
|
|
416
|
+
continue;
|
|
417
|
+
}
|
|
418
|
+
if (params.maxFiles !== void 0 && fileCount + 1 > params.maxFiles) {
|
|
419
|
+
throw new Error(
|
|
420
|
+
`Materialized content exceeds maxFiles (${params.maxFiles}).`
|
|
421
|
+
);
|
|
422
|
+
}
|
|
423
|
+
bytes += stats.size;
|
|
424
|
+
if (bytes > params.maxBytes) {
|
|
425
|
+
throw new Error(
|
|
426
|
+
`Materialized content exceeds maxBytes (${params.maxBytes}).`
|
|
427
|
+
);
|
|
428
|
+
}
|
|
429
|
+
const line = `${JSON.stringify({ path: relNormalized, size: stats.size })}
|
|
430
|
+
`;
|
|
431
|
+
manifestHash.update(line);
|
|
432
|
+
fileCount += 1;
|
|
433
|
+
} finally {
|
|
434
|
+
await fileHandle.close();
|
|
435
|
+
}
|
|
436
|
+
}
|
|
437
|
+
return {
|
|
438
|
+
bytes,
|
|
439
|
+
fileCount,
|
|
440
|
+
manifestSha256: manifestHash.digest("hex")
|
|
441
|
+
};
|
|
442
|
+
};
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { cp, mkdir, readdir, rm, symlink } from "node:fs/promises";
|
|
2
|
+
type TargetDeps = {
|
|
3
|
+
cp: typeof cp;
|
|
4
|
+
mkdir: typeof mkdir;
|
|
5
|
+
readdir: typeof readdir;
|
|
6
|
+
rm: typeof rm;
|
|
7
|
+
symlink: typeof symlink;
|
|
8
|
+
stderr: NodeJS.WritableStream;
|
|
9
|
+
};
|
|
10
|
+
type TargetParams = {
|
|
11
|
+
sourceDir: string;
|
|
12
|
+
targetDir: string;
|
|
13
|
+
mode?: "symlink" | "copy";
|
|
14
|
+
explicitTargetMode?: boolean;
|
|
15
|
+
unwrapSingleRootDir?: boolean;
|
|
16
|
+
deps?: TargetDeps;
|
|
17
|
+
};
|
|
18
|
+
export declare const applyTargetDir: (params: TargetParams) => Promise<void>;
|
|
19
|
+
export {};
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
import { cp, mkdir, readdir, rm, symlink } from "node:fs/promises";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { MANIFEST_FILENAME } from "#cache/manifest";
|
|
4
|
+
import { getErrnoCode } from "#core/errors";
|
|
5
|
+
import { DEFAULT_TOC_FILENAME } from "#core/paths";
|
|
6
|
+
const removeTarget = async (targetDir, deps) => {
|
|
7
|
+
await deps.rm(targetDir, { recursive: true, force: true });
|
|
8
|
+
};
|
|
9
|
+
const resolveSourceDir = async (params, deps) => {
|
|
10
|
+
if (!params.unwrapSingleRootDir) {
|
|
11
|
+
return params.sourceDir;
|
|
12
|
+
}
|
|
13
|
+
const entries = await deps.readdir(params.sourceDir, { withFileTypes: true });
|
|
14
|
+
const metaFiles = /* @__PURE__ */ new Set([MANIFEST_FILENAME, DEFAULT_TOC_FILENAME]);
|
|
15
|
+
const nonMeta = entries.filter((entry) => {
|
|
16
|
+
if (entry.isFile() && metaFiles.has(entry.name)) {
|
|
17
|
+
return false;
|
|
18
|
+
}
|
|
19
|
+
return true;
|
|
20
|
+
});
|
|
21
|
+
const directories = nonMeta.filter((entry) => entry.isDirectory());
|
|
22
|
+
const nonMetaFiles = nonMeta.filter((entry) => entry.isFile());
|
|
23
|
+
if (directories.length !== 1 || nonMetaFiles.length > 0) {
|
|
24
|
+
return params.sourceDir;
|
|
25
|
+
}
|
|
26
|
+
return path.join(params.sourceDir, directories[0].name);
|
|
27
|
+
};
|
|
28
|
+
export const applyTargetDir = async (params) => {
|
|
29
|
+
const deps = params.deps ?? {
|
|
30
|
+
cp,
|
|
31
|
+
mkdir,
|
|
32
|
+
readdir,
|
|
33
|
+
rm,
|
|
34
|
+
symlink,
|
|
35
|
+
stderr: process.stderr
|
|
36
|
+
};
|
|
37
|
+
const sourceDir = await resolveSourceDir(params, deps);
|
|
38
|
+
const parentDir = path.dirname(params.targetDir);
|
|
39
|
+
await deps.mkdir(parentDir, { recursive: true });
|
|
40
|
+
await removeTarget(params.targetDir, deps);
|
|
41
|
+
const defaultMode = process.platform === "win32" ? "copy" : "symlink";
|
|
42
|
+
const mode = params.mode ?? defaultMode;
|
|
43
|
+
if (mode === "copy") {
|
|
44
|
+
await deps.cp(sourceDir, params.targetDir, { recursive: true });
|
|
45
|
+
return;
|
|
46
|
+
}
|
|
47
|
+
const type = process.platform === "win32" ? "junction" : "dir";
|
|
48
|
+
try {
|
|
49
|
+
await deps.symlink(sourceDir, params.targetDir, type);
|
|
50
|
+
} catch (error) {
|
|
51
|
+
const code = getErrnoCode(error);
|
|
52
|
+
const fallbackCodes = /* @__PURE__ */ new Set(["EPERM", "EACCES", "ENOTSUP", "EINVAL"]);
|
|
53
|
+
if (code && fallbackCodes.has(code)) {
|
|
54
|
+
if (params.explicitTargetMode) {
|
|
55
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
56
|
+
deps.stderr.write(
|
|
57
|
+
`Warning: Failed to create symlink at ${params.targetDir}. Falling back to copy. ${message}
|
|
58
|
+
`
|
|
59
|
+
);
|
|
60
|
+
}
|
|
61
|
+
await deps.cp(sourceDir, params.targetDir, { recursive: true });
|
|
62
|
+
return;
|
|
63
|
+
}
|
|
64
|
+
throw error;
|
|
65
|
+
}
|
|
66
|
+
};
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import type { DocsCacheLock } from "#cache/lock";
|
|
2
|
+
import type { DocsCacheResolvedSource } from "#config";
|
|
3
|
+
export declare const writeToc: (params: {
|
|
4
|
+
cacheDir: string;
|
|
5
|
+
configPath: string;
|
|
6
|
+
lock: DocsCacheLock;
|
|
7
|
+
sources: DocsCacheResolvedSource[];
|
|
8
|
+
results?: Array<{
|
|
9
|
+
id: string;
|
|
10
|
+
status: "up-to-date" | "changed" | "missing";
|
|
11
|
+
}>;
|
|
12
|
+
}) => Promise<void>;
|
|
@@ -0,0 +1,167 @@
|
|
|
1
|
+
import { access, readFile, rm, writeFile } from "node:fs/promises";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { symbols, ui } from "#cli/ui";
|
|
4
|
+
import {
|
|
5
|
+
DEFAULT_TOC_FILENAME,
|
|
6
|
+
resolveTargetDir,
|
|
7
|
+
toPosixPath
|
|
8
|
+
} from "#core/paths";
|
|
9
|
+
const createTocTree = (files) => {
|
|
10
|
+
const root = { dirs: /* @__PURE__ */ new Map(), files: [] };
|
|
11
|
+
for (const file of files) {
|
|
12
|
+
const parts = file.split("/").filter(Boolean);
|
|
13
|
+
if (parts.length === 0) {
|
|
14
|
+
continue;
|
|
15
|
+
}
|
|
16
|
+
let node = root;
|
|
17
|
+
for (const part of parts.slice(0, -1)) {
|
|
18
|
+
let child = node.dirs.get(part);
|
|
19
|
+
if (!child) {
|
|
20
|
+
child = { dirs: /* @__PURE__ */ new Map(), files: [] };
|
|
21
|
+
node.dirs.set(part, child);
|
|
22
|
+
}
|
|
23
|
+
node = child;
|
|
24
|
+
}
|
|
25
|
+
const name = parts[parts.length - 1];
|
|
26
|
+
node.files.push({ name, path: file });
|
|
27
|
+
}
|
|
28
|
+
return root;
|
|
29
|
+
};
|
|
30
|
+
const renderTocTree = (tree, depth, lines) => {
|
|
31
|
+
const indent = " ".repeat(depth);
|
|
32
|
+
const dirNames = Array.from(tree.dirs.keys()).sort();
|
|
33
|
+
const files = [...tree.files].sort((a, b) => a.name.localeCompare(b.name));
|
|
34
|
+
for (const dirName of dirNames) {
|
|
35
|
+
lines.push(`${indent}- ${dirName}/`);
|
|
36
|
+
const child = tree.dirs.get(dirName);
|
|
37
|
+
if (child) {
|
|
38
|
+
renderTocTree(child, depth + 1, lines);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
for (const file of files) {
|
|
42
|
+
lines.push(`${indent}- [${file.name}](./${file.path})`);
|
|
43
|
+
}
|
|
44
|
+
};
|
|
45
|
+
const renderCompressedToc = (files, lines, label) => {
|
|
46
|
+
const sortedFiles = [...files].sort((a, b) => a.localeCompare(b));
|
|
47
|
+
const dirGroups = /* @__PURE__ */ new Map();
|
|
48
|
+
for (const file of sortedFiles) {
|
|
49
|
+
const lastSlash = file.lastIndexOf("/");
|
|
50
|
+
const dir = lastSlash === -1 ? "" : file.substring(0, lastSlash);
|
|
51
|
+
const filename = lastSlash === -1 ? file : file.substring(lastSlash + 1);
|
|
52
|
+
const existing = dirGroups.get(dir);
|
|
53
|
+
if (existing) {
|
|
54
|
+
existing.push(filename);
|
|
55
|
+
} else {
|
|
56
|
+
dirGroups.set(dir, [filename]);
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
const sortedDirs = Array.from(dirGroups.keys()).sort();
|
|
60
|
+
const segments = [];
|
|
61
|
+
segments.push(`[${label}]`);
|
|
62
|
+
for (const dir of sortedDirs) {
|
|
63
|
+
const filesInDir = dirGroups.get(dir);
|
|
64
|
+
if (!filesInDir) continue;
|
|
65
|
+
const fileList = filesInDir.join(",");
|
|
66
|
+
if (dir === "") {
|
|
67
|
+
segments.push(`root:{${fileList}}`);
|
|
68
|
+
} else {
|
|
69
|
+
segments.push(`${dir}:{${fileList}}`);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
lines.push(segments.join("|"));
|
|
73
|
+
};
|
|
74
|
+
const generateSourceToc = (entry, format = "compressed") => {
|
|
75
|
+
const lines = [];
|
|
76
|
+
if (format === "tree") {
|
|
77
|
+
lines.push(`# ${entry.id} - Documentation`);
|
|
78
|
+
lines.push("");
|
|
79
|
+
lines.push("## Files");
|
|
80
|
+
lines.push("");
|
|
81
|
+
const tree = createTocTree(entry.files);
|
|
82
|
+
renderTocTree(tree, 0, lines);
|
|
83
|
+
} else {
|
|
84
|
+
const label = `${entry.id} Docs Index`;
|
|
85
|
+
renderCompressedToc(entry.files, lines, label);
|
|
86
|
+
}
|
|
87
|
+
lines.push("");
|
|
88
|
+
return lines.join("\n");
|
|
89
|
+
};
|
|
90
|
+
const readManifest = async (sourceDir) => {
|
|
91
|
+
const manifestPath = path.join(sourceDir, ".manifest.jsonl");
|
|
92
|
+
try {
|
|
93
|
+
const raw = await readFile(manifestPath, "utf8");
|
|
94
|
+
const files = [];
|
|
95
|
+
for (const line of raw.split("\n")) {
|
|
96
|
+
if (line.trim()) {
|
|
97
|
+
const entry = JSON.parse(line);
|
|
98
|
+
if (entry.path) {
|
|
99
|
+
files.push(entry.path);
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
return files;
|
|
104
|
+
} catch {
|
|
105
|
+
return [];
|
|
106
|
+
}
|
|
107
|
+
};
|
|
108
|
+
export const writeToc = async (params) => {
|
|
109
|
+
const sourcesById = new Map(
|
|
110
|
+
params.sources.map((source) => [source.id, source])
|
|
111
|
+
);
|
|
112
|
+
const resultsById = new Map(
|
|
113
|
+
(params.results ?? []).map((result) => [result.id, result])
|
|
114
|
+
);
|
|
115
|
+
for (const [id, lockEntry] of Object.entries(params.lock.sources)) {
|
|
116
|
+
const source = sourcesById.get(id);
|
|
117
|
+
const targetDir = source?.targetDir ? toPosixPath(resolveTargetDir(params.configPath, source.targetDir)) : void 0;
|
|
118
|
+
const sourceDir = path.join(params.cacheDir, id);
|
|
119
|
+
try {
|
|
120
|
+
await access(sourceDir);
|
|
121
|
+
} catch {
|
|
122
|
+
continue;
|
|
123
|
+
}
|
|
124
|
+
const files = await readManifest(sourceDir);
|
|
125
|
+
const entry = {
|
|
126
|
+
id,
|
|
127
|
+
repo: lockEntry.repo,
|
|
128
|
+
ref: lockEntry.ref,
|
|
129
|
+
resolvedCommit: lockEntry.resolvedCommit,
|
|
130
|
+
fileCount: lockEntry.fileCount,
|
|
131
|
+
cachePath: toPosixPath(path.join(params.cacheDir, id)),
|
|
132
|
+
targetDir,
|
|
133
|
+
files
|
|
134
|
+
};
|
|
135
|
+
const sourceTocConfig = source?.toc;
|
|
136
|
+
const tocEnabled = sourceTocConfig !== false;
|
|
137
|
+
let tocFormat = "compressed";
|
|
138
|
+
if (typeof sourceTocConfig === "string") {
|
|
139
|
+
tocFormat = sourceTocConfig;
|
|
140
|
+
}
|
|
141
|
+
const sourceTocPath = path.join(sourceDir, DEFAULT_TOC_FILENAME);
|
|
142
|
+
if (tocEnabled) {
|
|
143
|
+
const result = resultsById.get(id);
|
|
144
|
+
let existingContent = null;
|
|
145
|
+
try {
|
|
146
|
+
existingContent = await readFile(sourceTocPath, "utf8");
|
|
147
|
+
} catch {
|
|
148
|
+
existingContent = null;
|
|
149
|
+
}
|
|
150
|
+
const sourceTocContent = generateSourceToc(entry, tocFormat);
|
|
151
|
+
if (result?.status === "up-to-date" && existingContent === sourceTocContent) {
|
|
152
|
+
continue;
|
|
153
|
+
}
|
|
154
|
+
if (existingContent !== null && existingContent !== sourceTocContent) {
|
|
155
|
+
ui.line(
|
|
156
|
+
`${symbols.warn} Overwriting existing ${DEFAULT_TOC_FILENAME} for ${id}`
|
|
157
|
+
);
|
|
158
|
+
}
|
|
159
|
+
await writeFile(sourceTocPath, sourceTocContent, "utf8");
|
|
160
|
+
} else {
|
|
161
|
+
try {
|
|
162
|
+
await rm(sourceTocPath, { force: true });
|
|
163
|
+
} catch {
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
};
|