docs-cache 0.4.3 → 0.5.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.mjs +13 -13
- package/dist/esm/api.d.ts +14 -0
- package/dist/esm/api.mjs +14 -0
- package/dist/esm/cache/cache-layout.d.ts +1 -0
- package/dist/esm/cache/cache-layout.mjs +12 -0
- package/dist/esm/cache/lock.d.ts +21 -0
- package/dist/esm/cache/lock.mjs +91 -0
- package/dist/esm/cache/manifest.d.ts +11 -0
- package/dist/esm/cache/manifest.mjs +68 -0
- package/dist/esm/cache/materialize.d.ts +26 -0
- package/dist/esm/cache/materialize.mjs +442 -0
- package/dist/esm/cache/targets.d.ts +19 -0
- package/dist/esm/cache/targets.mjs +67 -0
- package/dist/esm/cache/toc.d.ts +12 -0
- package/dist/esm/cache/toc.mjs +167 -0
- package/dist/esm/cli/exit-code.d.ts +11 -0
- package/dist/esm/cli/exit-code.mjs +5 -0
- package/dist/esm/cli/index.d.ts +5 -0
- package/dist/esm/cli/index.mjs +345 -0
- package/dist/esm/cli/live-output.d.ts +12 -0
- package/dist/esm/cli/live-output.mjs +30 -0
- package/dist/esm/cli/parse-args.d.ts +13 -0
- package/dist/esm/cli/parse-args.mjs +295 -0
- package/dist/esm/cli/run.d.ts +1 -0
- package/dist/esm/cli/run.mjs +2 -0
- package/dist/esm/cli/task-reporter.d.ts +32 -0
- package/dist/esm/cli/task-reporter.mjs +122 -0
- package/dist/esm/cli/types.d.ts +51 -0
- package/dist/esm/cli/types.mjs +0 -0
- package/dist/esm/cli/ui.d.ts +21 -0
- package/dist/esm/cli/ui.mjs +64 -0
- package/dist/esm/commands/add.d.ts +20 -0
- package/dist/esm/commands/add.mjs +81 -0
- package/dist/esm/commands/clean-git-cache.d.ts +10 -0
- package/dist/esm/commands/clean-git-cache.mjs +48 -0
- package/dist/esm/commands/clean.d.ts +10 -0
- package/dist/esm/commands/clean.mjs +27 -0
- package/dist/esm/commands/init.d.ts +19 -0
- package/dist/esm/commands/init.mjs +179 -0
- package/dist/esm/commands/prune.d.ts +11 -0
- package/dist/esm/commands/prune.mjs +52 -0
- package/dist/esm/commands/remove.d.ts +12 -0
- package/dist/esm/commands/remove.mjs +87 -0
- package/dist/esm/commands/status.d.ts +16 -0
- package/dist/esm/commands/status.mjs +78 -0
- package/dist/esm/commands/sync.d.ts +33 -0
- package/dist/esm/commands/sync.mjs +730 -0
- package/dist/esm/commands/verify.d.ts +11 -0
- package/dist/esm/commands/verify.mjs +120 -0
- package/dist/esm/config/index.d.ts +15 -0
- package/dist/esm/config/index.mjs +196 -0
- package/dist/esm/config/io.d.ts +30 -0
- package/dist/esm/config/io.mjs +112 -0
- package/dist/esm/config/schema.d.ts +171 -0
- package/dist/esm/config/schema.mjs +69 -0
- package/dist/esm/errors.d.ts +3 -0
- package/dist/esm/errors.mjs +2 -0
- package/dist/esm/git/cache-dir.d.ts +16 -0
- package/dist/esm/git/cache-dir.mjs +23 -0
- package/dist/esm/git/fetch-source.d.ts +19 -0
- package/dist/esm/git/fetch-source.mjs +477 -0
- package/dist/esm/git/redact.d.ts +1 -0
- package/dist/esm/git/redact.mjs +4 -0
- package/dist/esm/git/resolve-remote.d.ts +15 -0
- package/dist/esm/git/resolve-remote.mjs +87 -0
- package/dist/esm/git/resolve-repo.d.ts +5 -0
- package/dist/esm/git/resolve-repo.mjs +52 -0
- package/dist/esm/gitignore.d.ts +18 -0
- package/dist/esm/gitignore.mjs +80 -0
- package/dist/esm/paths.d.ts +8 -0
- package/dist/esm/paths.mjs +34 -0
- package/dist/esm/source-id.d.ts +1 -0
- package/dist/esm/source-id.mjs +29 -0
- package/dist/esm/types/sync.d.ts +25 -0
- package/dist/esm/types/sync.mjs +0 -0
- package/package.json +51 -7
- package/dist/chunks/add.mjs +0 -3
- package/dist/chunks/clean-git-cache.mjs +0 -2
- package/dist/chunks/clean.mjs +0 -2
- package/dist/chunks/init.mjs +0 -3
- package/dist/chunks/prune.mjs +0 -2
- package/dist/chunks/remove.mjs +0 -3
- package/dist/chunks/status.mjs +0 -2
- package/dist/chunks/sync.mjs +0 -9
- package/dist/chunks/verify.mjs +0 -2
- package/dist/shared/docs-cache.BOr9BnyP.mjs +0 -5
- package/dist/shared/docs-cache.BSvQNKuf.mjs +0 -2
- package/dist/shared/docs-cache.CQiaFDb_.mjs +0 -7
- package/dist/shared/docs-cache.CaOcl4OS.mjs +0 -3
- package/dist/shared/docs-cache.kK1DPQIQ.mjs +0 -2
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
import { execFile } from "node:child_process";
|
|
2
|
+
import { promisify } from "node:util";
|
|
3
|
+
import { redactRepoUrl } from "#git/redact";
|
|
4
|
+
const execFileAsync = promisify(execFile);
|
|
5
|
+
const DEFAULT_TIMEOUT_MS = 3e4;
|
|
6
|
+
const BLOCKED_PROTOCOLS = /* @__PURE__ */ new Set(["file:", "ftp:", "data:", "javascript:"]);
|
|
7
|
+
const assertAllowedProtocol = (repo) => {
|
|
8
|
+
try {
|
|
9
|
+
const url = new URL(repo);
|
|
10
|
+
if (BLOCKED_PROTOCOLS.has(url.protocol)) {
|
|
11
|
+
throw new Error(
|
|
12
|
+
`Blocked protocol '${url.protocol}' in repo URL '${redactRepoUrl(repo)}'.`
|
|
13
|
+
);
|
|
14
|
+
}
|
|
15
|
+
} catch (error) {
|
|
16
|
+
if (error instanceof TypeError) {
|
|
17
|
+
return;
|
|
18
|
+
}
|
|
19
|
+
throw error;
|
|
20
|
+
}
|
|
21
|
+
};
|
|
22
|
+
const parseRepoHost = (repo) => {
|
|
23
|
+
assertAllowedProtocol(repo);
|
|
24
|
+
const scpMatch = repo.match(/^[^@]+@([^:]+):/);
|
|
25
|
+
if (scpMatch) {
|
|
26
|
+
return scpMatch[1] || null;
|
|
27
|
+
}
|
|
28
|
+
try {
|
|
29
|
+
const url = new URL(repo);
|
|
30
|
+
if (url.protocol !== "https:" && url.protocol !== "ssh:") {
|
|
31
|
+
return null;
|
|
32
|
+
}
|
|
33
|
+
return url.hostname || null;
|
|
34
|
+
} catch {
|
|
35
|
+
return null;
|
|
36
|
+
}
|
|
37
|
+
};
|
|
38
|
+
export const enforceHostAllowlist = (repo, allowHosts) => {
|
|
39
|
+
const host = parseRepoHost(repo);
|
|
40
|
+
if (!host) {
|
|
41
|
+
throw new Error(
|
|
42
|
+
`Unsupported repo URL '${redactRepoUrl(repo)}'. Use HTTPS or SSH.`
|
|
43
|
+
);
|
|
44
|
+
}
|
|
45
|
+
const normalizedHost = host.toLowerCase();
|
|
46
|
+
const allowed = allowHosts.map((entry) => entry.toLowerCase());
|
|
47
|
+
const isAllowed = allowed.some(
|
|
48
|
+
(entry) => normalizedHost === entry || normalizedHost.endsWith(`.${entry}`)
|
|
49
|
+
);
|
|
50
|
+
if (!isAllowed) {
|
|
51
|
+
throw new Error(
|
|
52
|
+
`Host '${host}' is not in allowHosts for '${redactRepoUrl(repo)}'.`
|
|
53
|
+
);
|
|
54
|
+
}
|
|
55
|
+
};
|
|
56
|
+
export const parseLsRemote = (stdout) => {
|
|
57
|
+
const lines = stdout.trim().split("\n").filter(Boolean);
|
|
58
|
+
if (lines.length === 0) {
|
|
59
|
+
return null;
|
|
60
|
+
}
|
|
61
|
+
const first = lines[0].split(/\s+/)[0];
|
|
62
|
+
return first || null;
|
|
63
|
+
};
|
|
64
|
+
export const resolveRemoteCommit = async (params) => {
|
|
65
|
+
enforceHostAllowlist(params.repo, params.allowHosts);
|
|
66
|
+
const repoLabel = redactRepoUrl(params.repo);
|
|
67
|
+
params.logger?.(`git ls-remote ${repoLabel} ${params.ref}`);
|
|
68
|
+
const { stdout } = await execFileAsync(
|
|
69
|
+
"git",
|
|
70
|
+
["ls-remote", params.repo, params.ref],
|
|
71
|
+
{
|
|
72
|
+
timeout: params.timeoutMs ?? DEFAULT_TIMEOUT_MS,
|
|
73
|
+
maxBuffer: 1024 * 1024
|
|
74
|
+
}
|
|
75
|
+
);
|
|
76
|
+
const resolvedCommit = parseLsRemote(stdout);
|
|
77
|
+
if (!resolvedCommit) {
|
|
78
|
+
throw new Error(
|
|
79
|
+
`Unable to resolve ref '${params.ref}' for ${redactRepoUrl(params.repo)}.`
|
|
80
|
+
);
|
|
81
|
+
}
|
|
82
|
+
return {
|
|
83
|
+
repo: params.repo,
|
|
84
|
+
ref: params.ref,
|
|
85
|
+
resolvedCommit
|
|
86
|
+
};
|
|
87
|
+
};
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
export const resolveRepoInput = (repo) => {
|
|
2
|
+
const trimmed = repo.trim();
|
|
3
|
+
const sshMatch = trimmed.match(/^git@([^:]+):(.+)$/);
|
|
4
|
+
if (sshMatch) {
|
|
5
|
+
const host = sshMatch[1];
|
|
6
|
+
const rawPath = sshMatch[2];
|
|
7
|
+
const [pathPart, rawRef] = rawPath.split("#", 2);
|
|
8
|
+
const sanitizedPath = pathPart.replace(/^\//, "");
|
|
9
|
+
const inferredId = sanitizedPath.split("/").filter(Boolean).pop()?.replace(/\.git$/i, "");
|
|
10
|
+
const repoUrl = `git@${host}:${sanitizedPath}`;
|
|
11
|
+
const ref = rawRef?.trim() || void 0;
|
|
12
|
+
return { repoUrl, ref, inferredId };
|
|
13
|
+
}
|
|
14
|
+
const plainMatch = trimmed.match(/^([^\s/:]+)\/([^\s#]+)(?:#(.+))?$/);
|
|
15
|
+
if (plainMatch) {
|
|
16
|
+
const [, owner, name, rawRef] = plainMatch;
|
|
17
|
+
const sanitizedPath = `${owner}/${name}`.replace(/\.git$/i, "");
|
|
18
|
+
const repoUrl = `https://github.com/${sanitizedPath}.git`;
|
|
19
|
+
return {
|
|
20
|
+
repoUrl,
|
|
21
|
+
ref: rawRef?.trim() || void 0,
|
|
22
|
+
inferredId: name.replace(/\.git$/i, "")
|
|
23
|
+
};
|
|
24
|
+
}
|
|
25
|
+
const shortcutMatch = trimmed.match(/^(github|gitlab):(.+)$/i);
|
|
26
|
+
if (shortcutMatch) {
|
|
27
|
+
const provider = shortcutMatch[1].toLowerCase();
|
|
28
|
+
const rawPath = shortcutMatch[2];
|
|
29
|
+
const [pathPart, rawRef] = rawPath.split("#", 2);
|
|
30
|
+
const sanitizedPath = pathPart.replace(/^\//, "");
|
|
31
|
+
const inferredId = sanitizedPath.split("/").filter(Boolean).pop()?.replace(/\.git$/i, "");
|
|
32
|
+
const host = provider === "gitlab" ? "gitlab.com" : "github.com";
|
|
33
|
+
const suffix = sanitizedPath.endsWith(".git") ? "" : ".git";
|
|
34
|
+
const repoUrl = `https://${host}/${sanitizedPath}${suffix}`;
|
|
35
|
+
const ref = rawRef?.trim() || void 0;
|
|
36
|
+
return { repoUrl, ref, inferredId };
|
|
37
|
+
}
|
|
38
|
+
try {
|
|
39
|
+
const url = new URL(trimmed);
|
|
40
|
+
if (url.protocol === "https:" || url.protocol === "ssh:") {
|
|
41
|
+
const parts = url.pathname.split("/").filter(Boolean);
|
|
42
|
+
const inferredId = parts.pop()?.replace(/\.git$/i, "");
|
|
43
|
+
return {
|
|
44
|
+
repoUrl: trimmed,
|
|
45
|
+
ref: void 0,
|
|
46
|
+
inferredId
|
|
47
|
+
};
|
|
48
|
+
}
|
|
49
|
+
} catch {
|
|
50
|
+
}
|
|
51
|
+
return { repoUrl: trimmed, ref: void 0, inferredId: void 0 };
|
|
52
|
+
};
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
export declare const getGitignoreStatus: (rootDir: string, cacheDir: string) => Promise<{
|
|
2
|
+
gitignorePath: any;
|
|
3
|
+
entry: null;
|
|
4
|
+
hasEntry: boolean;
|
|
5
|
+
} | {
|
|
6
|
+
gitignorePath: any;
|
|
7
|
+
entry: string;
|
|
8
|
+
hasEntry: boolean;
|
|
9
|
+
}>;
|
|
10
|
+
export declare const ensureGitignoreEntry: (rootDir: string, cacheDir: string) => Promise<{
|
|
11
|
+
updated: boolean;
|
|
12
|
+
gitignorePath: any;
|
|
13
|
+
entry: null;
|
|
14
|
+
} | {
|
|
15
|
+
updated: boolean;
|
|
16
|
+
gitignorePath: any;
|
|
17
|
+
entry: string;
|
|
18
|
+
}>;
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import { access, readFile, writeFile } from "node:fs/promises";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { toPosixPath } from "#core/paths";
|
|
4
|
+
const exists = async (target) => {
|
|
5
|
+
try {
|
|
6
|
+
await access(target);
|
|
7
|
+
return true;
|
|
8
|
+
} catch {
|
|
9
|
+
return false;
|
|
10
|
+
}
|
|
11
|
+
};
|
|
12
|
+
const normalizeEntry = (value) => {
|
|
13
|
+
const trimmed = value.trim();
|
|
14
|
+
if (!trimmed || trimmed.startsWith("#") || trimmed.startsWith("!")) {
|
|
15
|
+
return "";
|
|
16
|
+
}
|
|
17
|
+
let normalized = trimmed.replace(/^\//, "");
|
|
18
|
+
normalized = normalized.replace(/^\.\//, "");
|
|
19
|
+
normalized = normalized.replace(/\/+$/, "");
|
|
20
|
+
return toPosixPath(normalized);
|
|
21
|
+
};
|
|
22
|
+
const resolveGitignoreEntry = (rootDir, cacheDir) => {
|
|
23
|
+
const resolved = path.isAbsolute(cacheDir) ? path.resolve(cacheDir) : path.resolve(rootDir, cacheDir);
|
|
24
|
+
const relative = path.relative(rootDir, resolved);
|
|
25
|
+
const isOutside = relative === ".." || relative.startsWith(`..${path.sep}`) || path.isAbsolute(relative);
|
|
26
|
+
if (isOutside) {
|
|
27
|
+
return null;
|
|
28
|
+
}
|
|
29
|
+
return relative.length === 0 ? "." : relative;
|
|
30
|
+
};
|
|
31
|
+
export const getGitignoreStatus = async (rootDir, cacheDir) => {
|
|
32
|
+
const gitignorePath = path.resolve(rootDir, ".gitignore");
|
|
33
|
+
const entry = resolveGitignoreEntry(rootDir, cacheDir);
|
|
34
|
+
if (!entry) {
|
|
35
|
+
return { gitignorePath, entry: null, hasEntry: false };
|
|
36
|
+
}
|
|
37
|
+
const normalizedEntry = normalizeEntry(entry);
|
|
38
|
+
if (!normalizedEntry) {
|
|
39
|
+
return { gitignorePath, entry: null, hasEntry: false };
|
|
40
|
+
}
|
|
41
|
+
let contents = "";
|
|
42
|
+
if (await exists(gitignorePath)) {
|
|
43
|
+
contents = await readFile(gitignorePath, "utf8");
|
|
44
|
+
}
|
|
45
|
+
const lines = contents.split(/\r?\n/);
|
|
46
|
+
const existing = new Set(
|
|
47
|
+
lines.map((line) => normalizeEntry(line)).filter(Boolean)
|
|
48
|
+
);
|
|
49
|
+
return {
|
|
50
|
+
gitignorePath,
|
|
51
|
+
entry: `${normalizedEntry}/`,
|
|
52
|
+
hasEntry: existing.has(normalizedEntry)
|
|
53
|
+
};
|
|
54
|
+
};
|
|
55
|
+
export const ensureGitignoreEntry = async (rootDir, cacheDir) => {
|
|
56
|
+
const status = await getGitignoreStatus(rootDir, cacheDir);
|
|
57
|
+
if (!status.entry) {
|
|
58
|
+
return { updated: false, gitignorePath: status.gitignorePath, entry: null };
|
|
59
|
+
}
|
|
60
|
+
if (status.hasEntry) {
|
|
61
|
+
return {
|
|
62
|
+
updated: false,
|
|
63
|
+
gitignorePath: status.gitignorePath,
|
|
64
|
+
entry: status.entry
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
let contents = "";
|
|
68
|
+
if (await exists(status.gitignorePath)) {
|
|
69
|
+
contents = await readFile(status.gitignorePath, "utf8");
|
|
70
|
+
}
|
|
71
|
+
const prefix = contents.length === 0 || contents.endsWith("\n") ? "" : "\n";
|
|
72
|
+
const next = `${contents}${prefix}${status.entry}
|
|
73
|
+
`;
|
|
74
|
+
await writeFile(status.gitignorePath, next, "utf8");
|
|
75
|
+
return {
|
|
76
|
+
updated: true,
|
|
77
|
+
gitignorePath: status.gitignorePath,
|
|
78
|
+
entry: status.entry
|
|
79
|
+
};
|
|
80
|
+
};
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
export declare const DEFAULT_TOC_FILENAME = "TOC.md";
|
|
2
|
+
export declare const toPosixPath: (value: string) => string;
|
|
3
|
+
export declare const resolveTargetDir: (configPath: string, targetDir: string) => any;
|
|
4
|
+
export declare const resolveCacheDir: (configPath: string, cacheDir: string, overrideCacheDir?: string) => any;
|
|
5
|
+
export declare const getCacheLayout: (cacheDir: string, sourceId: string) => {
|
|
6
|
+
cacheDir: string;
|
|
7
|
+
sourceDir: any;
|
|
8
|
+
};
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
export const DEFAULT_TOC_FILENAME = "TOC.md";
|
|
3
|
+
export const toPosixPath = (value) => value.replace(/\\/g, "/");
|
|
4
|
+
export const resolveTargetDir = (configPath, targetDir) => {
|
|
5
|
+
const configDir = path.dirname(path.resolve(configPath));
|
|
6
|
+
const resolved = path.resolve(configDir, targetDir);
|
|
7
|
+
const relative = path.relative(configDir, resolved);
|
|
8
|
+
const isOutside = relative === ".." || relative.startsWith(`..${path.sep}`) || path.isAbsolute(relative);
|
|
9
|
+
if (isOutside) {
|
|
10
|
+
throw new Error(
|
|
11
|
+
`targetDir '${targetDir}' escapes project directory. Must be within ${configDir}.`
|
|
12
|
+
);
|
|
13
|
+
}
|
|
14
|
+
const segments = toPosixPath(relative).split("/").filter(Boolean);
|
|
15
|
+
if (segments.includes(".git")) {
|
|
16
|
+
throw new Error("targetDir cannot be within .git directory.");
|
|
17
|
+
}
|
|
18
|
+
return resolved;
|
|
19
|
+
};
|
|
20
|
+
export const resolveCacheDir = (configPath, cacheDir, overrideCacheDir) => {
|
|
21
|
+
if (overrideCacheDir) {
|
|
22
|
+
return path.resolve(overrideCacheDir);
|
|
23
|
+
}
|
|
24
|
+
const configDir = path.dirname(configPath);
|
|
25
|
+
return path.resolve(configDir, cacheDir);
|
|
26
|
+
};
|
|
27
|
+
export const getCacheLayout = (cacheDir, sourceId) => {
|
|
28
|
+
const _reposDir = path.join(cacheDir, "repos");
|
|
29
|
+
const sourceDir = path.join(cacheDir, sourceId);
|
|
30
|
+
return {
|
|
31
|
+
cacheDir,
|
|
32
|
+
sourceDir
|
|
33
|
+
};
|
|
34
|
+
};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare const assertSafeSourceId: (value: unknown, label: string) => string;
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
const SAFE_ID_PATTERN = /^[a-zA-Z0-9_-]+$/;
|
|
2
|
+
const MAX_ID_LENGTH = 200;
|
|
3
|
+
const RESERVED_NAMES = /* @__PURE__ */ new Set([
|
|
4
|
+
".",
|
|
5
|
+
"..",
|
|
6
|
+
"CON",
|
|
7
|
+
"PRN",
|
|
8
|
+
"AUX",
|
|
9
|
+
"NUL",
|
|
10
|
+
"COM1",
|
|
11
|
+
"LPT1"
|
|
12
|
+
]);
|
|
13
|
+
export const assertSafeSourceId = (value, label) => {
|
|
14
|
+
if (typeof value !== "string" || value.length === 0) {
|
|
15
|
+
throw new Error(`${label} must be a non-empty string.`);
|
|
16
|
+
}
|
|
17
|
+
if (value.length > MAX_ID_LENGTH) {
|
|
18
|
+
throw new Error(`${label} exceeds maximum length of ${MAX_ID_LENGTH}.`);
|
|
19
|
+
}
|
|
20
|
+
if (!SAFE_ID_PATTERN.test(value)) {
|
|
21
|
+
throw new Error(
|
|
22
|
+
`${label} must contain only alphanumeric characters, hyphens, and underscores.`
|
|
23
|
+
);
|
|
24
|
+
}
|
|
25
|
+
if (RESERVED_NAMES.has(value.toUpperCase())) {
|
|
26
|
+
throw new Error(`${label} uses reserved name '${value}'.`);
|
|
27
|
+
}
|
|
28
|
+
return value;
|
|
29
|
+
};
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
export type SyncOptions = {
|
|
2
|
+
configPath?: string;
|
|
3
|
+
cacheDirOverride?: string;
|
|
4
|
+
json: boolean;
|
|
5
|
+
lockOnly: boolean;
|
|
6
|
+
offline: boolean;
|
|
7
|
+
failOnMiss: boolean;
|
|
8
|
+
verbose?: boolean;
|
|
9
|
+
concurrency?: number;
|
|
10
|
+
sourceFilter?: string[];
|
|
11
|
+
timeoutMs?: number;
|
|
12
|
+
};
|
|
13
|
+
export type SyncResult = {
|
|
14
|
+
id: string;
|
|
15
|
+
repo: string;
|
|
16
|
+
ref: string;
|
|
17
|
+
resolvedCommit: string;
|
|
18
|
+
lockCommit: string | null;
|
|
19
|
+
lockRulesSha256?: string;
|
|
20
|
+
status: "up-to-date" | "changed" | "missing";
|
|
21
|
+
bytes?: number;
|
|
22
|
+
fileCount?: number;
|
|
23
|
+
manifestSha256?: string;
|
|
24
|
+
rulesSha256?: string;
|
|
25
|
+
};
|
|
File without changes
|
package/package.json
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
"name": "docs-cache",
|
|
3
3
|
"private": false,
|
|
4
4
|
"type": "module",
|
|
5
|
-
"version": "0.
|
|
5
|
+
"version": "0.5.2",
|
|
6
6
|
"description": "CLI for deterministic local caching of external documentation for agents and tools",
|
|
7
7
|
"author": "Frederik Bosch",
|
|
8
8
|
"license": "MIT",
|
|
@@ -33,32 +33,70 @@
|
|
|
33
33
|
"files": [
|
|
34
34
|
"bin",
|
|
35
35
|
"dist/cli.mjs",
|
|
36
|
-
"dist/
|
|
36
|
+
"dist/esm/**/*.mjs",
|
|
37
|
+
"dist/esm/**/*.d.ts",
|
|
37
38
|
"dist/lock.mjs",
|
|
38
39
|
"dist/shared/*.mjs",
|
|
39
40
|
"README.md",
|
|
40
41
|
"LICENSE"
|
|
41
42
|
],
|
|
43
|
+
"imports": {
|
|
44
|
+
"#cache/*": {
|
|
45
|
+
"types": "./dist/esm/cache/*.d.ts",
|
|
46
|
+
"default": "./dist/esm/cache/*.mjs"
|
|
47
|
+
},
|
|
48
|
+
"#cli/*": {
|
|
49
|
+
"types": "./dist/esm/cli/*.d.ts",
|
|
50
|
+
"default": "./dist/esm/cli/*.mjs"
|
|
51
|
+
},
|
|
52
|
+
"#commands/*": {
|
|
53
|
+
"types": "./dist/esm/commands/*.d.ts",
|
|
54
|
+
"default": "./dist/esm/commands/*.mjs"
|
|
55
|
+
},
|
|
56
|
+
"#core/*": {
|
|
57
|
+
"types": "./dist/esm/*.d.ts",
|
|
58
|
+
"default": "./dist/esm/*.mjs"
|
|
59
|
+
},
|
|
60
|
+
"#config": {
|
|
61
|
+
"types": "./dist/esm/config/index.d.ts",
|
|
62
|
+
"default": "./dist/esm/config/index.mjs"
|
|
63
|
+
},
|
|
64
|
+
"#config/*": {
|
|
65
|
+
"types": "./dist/esm/config/*.d.ts",
|
|
66
|
+
"default": "./dist/esm/config/*.mjs"
|
|
67
|
+
},
|
|
68
|
+
"#git/*": {
|
|
69
|
+
"types": "./dist/esm/git/*.d.ts",
|
|
70
|
+
"default": "./dist/esm/git/*.mjs"
|
|
71
|
+
},
|
|
72
|
+
"#types/*": {
|
|
73
|
+
"types": "./dist/esm/types/*.d.ts",
|
|
74
|
+
"default": "./dist/esm/types/*.mjs"
|
|
75
|
+
}
|
|
76
|
+
},
|
|
42
77
|
"dependencies": {
|
|
43
78
|
"@clack/prompts": "^1.0.0",
|
|
44
79
|
"cac": "^6.7.14",
|
|
80
|
+
"cli-truncate": "^4.0.0",
|
|
45
81
|
"execa": "^9.6.1",
|
|
46
82
|
"fast-glob": "^3.3.2",
|
|
83
|
+
"log-update": "^7.0.2",
|
|
47
84
|
"picocolors": "^1.1.1",
|
|
48
|
-
"picomatch": "^
|
|
85
|
+
"picomatch": "^4.0.3",
|
|
49
86
|
"zod": "^4.3.6"
|
|
50
87
|
},
|
|
51
88
|
"devDependencies": {
|
|
52
|
-
"@biomejs/biome": "^2.3.
|
|
53
|
-
"@size-limit/file": "^
|
|
54
|
-
"@types/node": "^
|
|
89
|
+
"@biomejs/biome": "^2.3.14",
|
|
90
|
+
"@size-limit/file": "^12.0.0",
|
|
91
|
+
"@types/node": "^25.2.0",
|
|
55
92
|
"bumpp": "^10.3.2",
|
|
56
93
|
"c8": "^10.1.3",
|
|
57
94
|
"jiti": "^2.5.1",
|
|
58
95
|
"lint-staged": "^16.2.7",
|
|
59
96
|
"simple-git-hooks": "^2.13.1",
|
|
60
|
-
"size-limit": "^
|
|
97
|
+
"size-limit": "^12.0.0",
|
|
61
98
|
"tinybench": "^6.0.0",
|
|
99
|
+
"ts-complex": "^1.0.0",
|
|
62
100
|
"typescript": "^5.9.3",
|
|
63
101
|
"unbuild": "^3.6.1"
|
|
64
102
|
},
|
|
@@ -68,6 +106,11 @@
|
|
|
68
106
|
"limit": "10 kB"
|
|
69
107
|
}
|
|
70
108
|
],
|
|
109
|
+
"complexity": {
|
|
110
|
+
"maxCyclomatic": 20,
|
|
111
|
+
"minMaintainability": 60,
|
|
112
|
+
"top": 10
|
|
113
|
+
},
|
|
71
114
|
"simple-git-hooks": {
|
|
72
115
|
"pre-commit": "pnpm lint-staged && pnpm typecheck"
|
|
73
116
|
},
|
|
@@ -84,6 +127,7 @@
|
|
|
84
127
|
"test": "pnpm build && node --test",
|
|
85
128
|
"test:coverage": "pnpm build && c8 --include dist --exclude bin --reporter=text node --test",
|
|
86
129
|
"bench": "pnpm build && node scripts/benchmarks/run.mjs",
|
|
130
|
+
"complexity": "node scripts/complexity/run.mjs",
|
|
87
131
|
"schema:build": "node scripts/generate-schema.mjs",
|
|
88
132
|
"size": "size-limit",
|
|
89
133
|
"test:watch": "node --test --watch",
|
package/dist/chunks/add.mjs
DELETED
|
@@ -1,3 +0,0 @@
|
|
|
1
|
-
import{readFile as D,writeFile as k,access as E}from"node:fs/promises";import l from"node:path";import{v,D as P,a as A,s as F,w as I,b as U,r as S}from"../shared/docs-cache.CaOcl4OS.mjs";import{e as N}from"../shared/docs-cache.BOr9BnyP.mjs";import{r as O}from"../shared/docs-cache.CQiaFDb_.mjs";import{r as x}from"../shared/docs-cache.BSvQNKuf.mjs";import"zod";import"node:process";import"cac";import"picocolors";const u=async o=>{try{return await E(o),!0}catch{return!1}},y="package.json",C=async o=>{const i=await D(o,"utf8"),e=JSON.parse(i),a=e["docs-cache"];return a?{parsed:e,config:v(a)}:{parsed:e,config:null}},J=async o=>{if(o){const a=S(o);return{resolvedPath:a,mode:l.basename(a)===y?"package":"config"}}const i=S();if(await u(i))return{resolvedPath:i,mode:"config"};const e=l.resolve(process.cwd(),y);return await u(e)&&(await C(e)).config?{resolvedPath:e,mode:"package"}:{resolvedPath:i,mode:"config"}},T=async o=>{const i=await J(o.configPath),e=i.resolvedPath;let a=P,t=null,g=null,f=!1;if(await u(e))if(i.mode==="package"){const r=await C(e);g=r.parsed,t=r.config,a=t??P,f=!!t}else{const r=await D(e,"utf8");t=JSON.parse(r.toString()),a=v(t),f=!0}const b="https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json",p=new Set(a.sources.map(r=>r.id)),m=[],d=o.entries.map(r=>{const n=x(r.repo),w=r.id||n.inferredId;if(!w)throw new Error("Unable to infer id. Provide an explicit id.");const c=A(w,"source id");return p.has(c)?(m.push(c),null):(p.add(c),r.targetDir&&O(e,r.targetDir),{id:c,repo:n.repoUrl,...r.targetDir?{targetDir:r.targetDir}:{},...n.ref?{ref:n.ref}:{}})}).filter(Boolean);if(d.length===0)throw new Error("All sources already exist in config.");const s={$schema:b,sources:[...a.sources,...d]};if(t?.cacheDir&&(s.cacheDir=t.cacheDir),t?.defaults&&(s.defaults=t.defaults),i.mode==="package"){const r=g??{};r["docs-cache"]=F(s),await k(e,`${JSON.stringify(r,null,2)}
|
|
2
|
-
`,"utf8")}else await I(e,s);const h=f?null:await N(l.dirname(e),t?.cacheDir??U);return{configPath:e,sources:d,skipped:m,created:!0,gitignoreUpdated:h?.updated??!1,gitignorePath:h?.gitignorePath??null}};export{T as addSources};
|
|
3
|
-
//# sourceMappingURL=add.mjs.map
|
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
import{rm as n,readdir as o,stat as m}from"node:fs/promises";import u from"node:path";import{r as f,e as w}from"../shared/docs-cache.kK1DPQIQ.mjs";import"node:os";const c=async t=>{try{const e=await o(t,{withFileTypes:!0});let r=0;for(const a of e){const i=u.join(t,a.name);if(a.isDirectory())r+=await c(i);else{const s=await m(i);r+=s.size}}return r}catch{return 0}},h=async t=>{try{return(await o(t)).length}catch{return 0}},p=async()=>{const t=f();if(!await w(t))return{removed:!1,cacheDir:t};const e=await h(t),r=await c(t);return await n(t,{recursive:!0,force:!0}),{removed:!0,cacheDir:t,repoCount:e,bytesFreed:r}};export{p as cleanGitCache};
|
|
2
|
-
//# sourceMappingURL=clean-git-cache.mjs.map
|
package/dist/chunks/clean.mjs
DELETED
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
import{rm as o,access as i}from"node:fs/promises";import{l as s,b as m}from"../shared/docs-cache.CaOcl4OS.mjs";import{c as n}from"../shared/docs-cache.CQiaFDb_.mjs";import"node:path";import"zod";import"node:process";import"cac";import"picocolors";const f=async r=>{try{return await i(r),!0}catch{return!1}},p=async r=>{const{config:t,resolvedPath:c}=await s(r.configPath),a=n(c,t.cacheDir??m,r.cacheDirOverride),e=await f(a);return e&&await o(a,{recursive:!0,force:!0}),{cacheDir:a,removed:e}};export{p as cleanCache};
|
|
2
|
-
//# sourceMappingURL=clean.mjs.map
|
package/dist/chunks/init.mjs
DELETED
|
@@ -1,3 +0,0 @@
|
|
|
1
|
-
import{readFile as C,writeFile as N,access as S}from"node:fs/promises";import c from"node:path";import{confirm as V,isCancel as G,select as J,text as U}from"@clack/prompts";import{c as _,b as r,s as L,w as T}from"../shared/docs-cache.CaOcl4OS.mjs";import{g as H,e as k}from"../shared/docs-cache.BOr9BnyP.mjs";import"zod";import"../shared/docs-cache.CQiaFDb_.mjs";import"node:process";import"cac";import"picocolors";const h=async n=>{try{return await S(n),!0}catch{return!1}},M=async(n,s={})=>{const y=s.confirm??V,l=s.isCancel??G,x=s.select??J,F=s.text??U,f=n.cwd??process.cwd(),d=c.resolve(f,_),i=c.resolve(f,"package.json"),g=[];if(await h(d)&&g.push(d),await h(i)){const e=await C(i,"utf8");JSON.parse(e)["docs-cache"]&&g.push(i)}if(g.length>0)throw new Error(`Config already exists at ${g.join(", ")}. Init aborted.`);let b=!1;if(await h(i)){const e=await C(i,"utf8");if(!JSON.parse(e)["docs-cache"]){const o=await x({message:"Config location",options:[{value:"config",label:"docs.config.json"},{value:"package",label:"package.json"}],initialValue:"config"});if(l(o))throw new Error("Init cancelled.");b=o==="package"}}const $=b?i:d,v=n.cacheDirOverride??r,u=await F({message:"Cache directory",initialValue:v});if(l(u))throw new Error("Init cancelled.");const A=u||r,D=await y({message:"Generate TOC.md (table of contents with links to all documentation)",initialValue:!0});if(l(D))throw new Error("Init cancelled.");const I=await H(f,A);let P=!1;if(I.entry&&!I.hasEntry){const e=await y({message:"Add cache directory to .gitignore",initialValue:!0});if(l(e))throw new Error("Init cancelled.");P=e}const a={configPath:$,cacheDir:u,toc:D,gitignore:P},t=c.resolve(f,a.configPath);if(c.basename(t)==="package.json"){const e=await C(t,"utf8"),o=JSON.parse(e);if(o["docs-cache"])throw new Error(`docs-cache config already exists in ${t}.`);const p={$schema:"https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json",sources:[]},E=a.cacheDir||r;E!==r&&(p.cacheDir=E),a.toc||(p.defaults={toc:!1}),o["docs-cache"]=L(p),await N(t,`${JSON.stringify(o,null,2)}
|
|
2
|
-
`,"utf8");const O=a.gitignore?await k(c.dirname(t),E):null;return{configPath:t,created:!0,gitignoreUpdated:O?.updated??!1,gitignorePath:O?.gitignorePath??null}}if(await h(t))throw new Error(`Config already exists at ${t}.`);const w={$schema:"https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json",sources:[]},m=a.cacheDir||r;m!==r&&(w.cacheDir=m),a.toc||(w.defaults={toc:!1}),await T(t,w);const j=a.gitignore?await k(c.dirname(t),m):null;return{configPath:t,created:!0,gitignoreUpdated:j?.updated??!1,gitignorePath:j?.gitignorePath??null}};export{M as initConfig};
|
|
3
|
-
//# sourceMappingURL=init.mjs.map
|
package/dist/chunks/prune.mjs
DELETED
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
import{readdir as p,rm as f,access as h}from"node:fs/promises";import u from"node:path";import{l as d,b as D}from"../shared/docs-cache.CaOcl4OS.mjs";import{c as v}from"../shared/docs-cache.CQiaFDb_.mjs";import"zod";import"node:process";import"cac";import"picocolors";const w=async t=>{try{return await h(t),!0}catch{return!1}},l=async t=>{const{config:c,resolvedPath:s,sources:a}=await d(t.configPath),e=v(s,c.cacheDir??D,t.cacheDirOverride);if(!await w(e))return{cacheDir:e,removed:[],kept:a.map(r=>r.id)};const n=new Set(a.map(r=>r.id)),m=await p(e,{withFileTypes:!0}),o=[];for(const r of m){if(!r.isDirectory())continue;const i=r.name;n.has(i)||i.startsWith(".tmp-")||(await f(u.join(e,i),{recursive:!0,force:!0}),o.push(i))}return{cacheDir:e,removed:o,kept:a.map(r=>r.id)}};export{l as pruneCache};
|
|
2
|
-
//# sourceMappingURL=prune.mjs.map
|
package/dist/chunks/remove.mjs
DELETED
|
@@ -1,3 +0,0 @@
|
|
|
1
|
-
import{readFile as w,writeFile as N,rm as $,access as k}from"node:fs/promises";import v from"node:path";import{v as D,D as E,s as F,w as I,r as P}from"../shared/docs-cache.CaOcl4OS.mjs";import{r as O}from"../shared/docs-cache.CQiaFDb_.mjs";import{r as U}from"../shared/docs-cache.BSvQNKuf.mjs";import"zod";import"node:process";import"cac";import"picocolors";const f=async a=>{try{return await k(a),!0}catch{return!1}},y="package.json",S=async a=>{const s=await w(a,"utf8"),o=JSON.parse(s),r=o["docs-cache"];return r?{parsed:o,config:D(r)}:{parsed:o,config:null}},b=async a=>{if(a){const r=P(a);return{resolvedPath:r,mode:v.basename(r)===y?"package":"config"}}const s=P();if(await f(s))return{resolvedPath:s,mode:"config"};const o=v.resolve(process.cwd(),y);return await f(o)&&(await S(o)).config?{resolvedPath:o,mode:"package"}:{resolvedPath:s,mode:"config"}},J=async a=>{if(a.ids.length===0)throw new Error("No sources specified to remove.");const s=await b(a.configPath),o=s.resolvedPath;let r=E,t=null,d=null;if(await f(o))if(s.mode==="package"){const e=await S(o);if(d=e.parsed,t=e.config,!t)throw new Error(`Missing docs-cache config in ${o}.`);r=t}else{const e=await w(o,"utf8");t=JSON.parse(e.toString()),r=D(t)}else throw new Error(`Config not found at ${o}.`);const u=new Map(r.sources.map(e=>[e.id,e])),g=new Map(r.sources.map(e=>[e.repo,e])),n=new Set,l=[];for(const e of a.ids){if(u.has(e)){n.add(e);continue}const i=U(e);if(i.repoUrl&&g.has(i.repoUrl)){const p=g.get(i.repoUrl);p&&n.add(p.id);continue}if(i.inferredId&&u.has(i.inferredId)){n.add(i.inferredId);continue}l.push(e)}const C=r.sources.filter(e=>!n.has(e.id)),h=r.sources.filter(e=>n.has(e.id)).map(e=>e.id),M=r.sources.filter(e=>n.has(e.id));if(h.length===0)throw new Error("No matching sources found to remove.");const c={$schema:t?.$schema??"https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json",sources:C};if(t?.cacheDir&&(c.cacheDir=t.cacheDir),t?.defaults&&(c.defaults=t.defaults),t?.targetMode&&(c.targetMode=t.targetMode),s.mode==="package"){const e=d??{};e["docs-cache"]=F(c),await N(o,`${JSON.stringify(e,null,2)}
|
|
2
|
-
`,"utf8")}else await I(o,c);const m=[];for(const e of M){if(!e.targetDir)continue;const i=O(o,e.targetDir);await $(i,{recursive:!0,force:!0}),m.push({id:e.id,targetDir:i})}return{configPath:o,removed:h,missing:l,targetsRemoved:m}};export{J as removeSources};
|
|
3
|
-
//# sourceMappingURL=remove.mjs.map
|
package/dist/chunks/status.mjs
DELETED
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
import{access as y}from"node:fs/promises";import t from"picocolors";import{u as o,b as u,c as D,g as w}from"../shared/docs-cache.CQiaFDb_.mjs";import{l as C,b as L}from"../shared/docs-cache.CaOcl4OS.mjs";import{DEFAULT_LOCK_FILENAME as v,resolveLockPath as x,readLock as P}from"../lock.mjs";import"node:process";import"cac";import"node:path";import"zod";const h=async r=>{try{return await y(r),!0}catch{return!1}},A=async r=>{const{config:c,resolvedPath:a,sources:n}=await C(r.configPath),s=D(a,c.cacheDir??L,r.cacheDirOverride),l=await h(s),e=x(a),i=await h(e);let d=!1,f=null;if(i)try{f=await P(e),d=!0}catch{d=!1}const E=await Promise.all(n.map(async m=>{const p=w(s,m.id),g=await h(p.sourceDir),k=f?.sources?.[m.id]??null;return{id:m.id,docsPath:p.sourceDir,docsExists:g,lockEntry:k}}));return{configPath:a,cacheDir:s,cacheDirExists:l,lockPath:e,lockExists:i,lockValid:d,sources:E}},_=r=>{const c=o.path(r.cacheDir),a=r.cacheDirExists?t.green("present"):t.red("missing"),n=r.lockExists?r.lockValid?t.green("valid"):t.red("invalid"):t.yellow("missing");if(o.header("Cache",`${c} (${a})`),o.header("Lock",`${v} (${n})`),r.sources.length===0){o.line(),o.line(`${u.warn} No sources configured.`);return}o.line();for(const s of r.sources){const l=s.docsExists?u.success:u.error,e=s.lockEntry?t.green("locked"):t.yellow("new"),i=o.hash(s.lockEntry?.resolvedCommit);o.item(l,s.id.padEnd(20),`${e.padEnd(10)} ${i}`)}};export{A as getStatus,_ as printStatus};
|
|
2
|
-
//# sourceMappingURL=status.mjs.map
|
package/dist/chunks/sync.mjs
DELETED
|
@@ -1,9 +0,0 @@
|
|
|
1
|
-
import{createHash as N,randomBytes as Me}from"node:crypto";import{mkdtemp as ie,mkdir as b,rm as j,readFile as H,writeFile as se,access as _,rename as J,open as q,lstat as Pe,symlink as xe,readdir as Ee,cp as Te}from"node:fs/promises";import g from"node:path";import F from"picocolors";import{g as Oe,u as $,b as T,t as K,D as V,r as Z,c as Fe}from"../shared/docs-cache.CQiaFDb_.mjs";import{a as Q,l as ke,D as Re,b as be}from"../shared/docs-cache.CaOcl4OS.mjs";import je,{tmpdir as He}from"node:os";import{pathToFileURL as Ae}from"node:url";import{execa as Ie}from"execa";import{g as B,M as U,v as ae}from"./verify.mjs";import{e as Le,r as ne}from"../shared/docs-cache.kK1DPQIQ.mjs";import{execFile as Ne}from"node:child_process";import{promisify as _e}from"node:util";import{writeLock as Be,resolveLockPath as Ue,readLock as ze}from"../lock.mjs";import{createWriteStream as ce,createReadStream as Ye,constants as le}from"node:fs";import{pipeline as Ge}from"node:stream/promises";import ue from"fast-glob";const Xe=/^(https?:\/\/)([^@]+)@/i,A=e=>e.replace(Xe,"$1***@"),We=_e(Ne),Je=3e4,qe=new Set(["file:","ftp:","data:","javascript:"]),Ke=e=>{try{const r=new URL(e);if(qe.has(r.protocol))throw new Error(`Blocked protocol '${r.protocol}' in repo URL '${A(e)}'.`)}catch(r){if(r instanceof TypeError)return;throw r}},Ve=e=>{Ke(e);const r=e.match(/^[^@]+@([^:]+):/);if(r)return r[1]||null;try{const t=new URL(e);return t.protocol!=="https:"&&t.protocol!=="ssh:"?null:t.hostname||null}catch{return null}},fe=(e,r)=>{const t=Ve(e);if(!t)throw new Error(`Unsupported repo URL '${A(e)}'. Use HTTPS or SSH.`);const i=t.toLowerCase();if(!r.map(o=>o.toLowerCase()).some(o=>i===o||i.endsWith(`.${o}`)))throw new Error(`Host '${t}' is not in allowHosts for '${A(e)}'.`)},me=e=>{const r=e.trim().split(`
|
|
2
|
-
`).filter(Boolean);return r.length===0?null:r[0].split(/\s+/)[0]||null},Ze=async e=>{fe(e.repo,e.allowHosts);const r=A(e.repo);e.logger?.(`git ls-remote ${r} ${e.ref}`);const{stdout:t}=await We("git",["ls-remote",e.repo,e.ref],{timeout:e.timeoutMs??Je,maxBuffer:1024*1024}),i=me(t);if(!i)throw new Error(`Unable to resolve ref '${e.ref}' for ${A(e.repo)}.`);return{repo:e.repo,ref:e.ref,resolvedCommit:i}},Qe=12e4,z=1,et=3,tt=100,k=async(e,r)=>{const t=process.env.PATH??process.env.Path,i=process.env.PATHEXT??(process.platform==="win32"?".COM;.EXE;.BAT;.CMD":void 0),o=["-c","core.hooksPath=/dev/null","-c","submodule.recurse=false","-c","protocol.ext.allow=never"];r?.allowFileProtocol?o.push("-c","protocol.file.allow=always"):o.push("-c","protocol.file.allow=never");const n=[...o,...e],s=`git ${n.join(" ")}`;r?.logger?.(s);const a=Ie("git",n,{cwd:r?.cwd,timeout:r?.timeoutMs??Qe,maxBuffer:10*1024*1024,stdout:"pipe",stderr:"pipe",env:{...process.env,...t?{PATH:t,Path:t}:{},...i?{PATHEXT:i}:{},HOME:process.env.HOME,USER:process.env.USER,USERPROFILE:process.env.USERPROFILE,TMPDIR:process.env.TMPDIR,TMP:process.env.TMP,TEMP:process.env.TEMP,SYSTEMROOT:process.env.SYSTEMROOT,WINDIR:process.env.WINDIR,SSH_AUTH_SOCK:process.env.SSH_AUTH_SOCK,SSH_AGENT_PID:process.env.SSH_AGENT_PID,HTTP_PROXY:process.env.HTTP_PROXY,HTTPS_PROXY:process.env.HTTPS_PROXY,NO_PROXY:process.env.NO_PROXY,GIT_TERMINAL_PROMPT:"0",GIT_CONFIG_NOSYSTEM:"1",GIT_CONFIG_NOGLOBAL:"1",...process.platform==="win32"?{}:{GIT_ASKPASS:"/bin/false"}}});if(r?.logger){const l=u=>{u&&u.on("data",f=>{const m=f instanceof Buffer?f.toString("utf8"):String(f);for(const y of m.split(/\r?\n/))y&&r.logger?.(`${s} | ${y}`)})};l(a.stdout),l(a.stderr)}await a},I=async(e,r=et)=>{for(let t=0;t<=r;t+=1)try{await j(e,{recursive:!0,force:!0});return}catch(i){const o=B(i);if(o!=="ENOTEMPTY"&&o!=="EBUSY"&&o!=="EPERM"||t===r)throw i;await new Promise(n=>setTimeout(n,tt*(t+1)))}},rt=e=>N("sha256").update(e).digest("hex").substring(0,16),ot=e=>{const r=rt(e);return g.join(ne(),r)},it=async e=>{try{return await k(["rev-parse","--git-dir"],{cwd:e}),!0}catch{return!1}},de=async e=>{try{const r=g.join(e,".git","config"),t=(await H(r,"utf8")).toLowerCase();return t.includes("partialclone")||t.includes("promisor")||t.includes("partialclonefilter")}catch{return!1}},ee=async(e,r,t)=>{try{await k(["-C",e,"cat-file","-e",`${r}^{commit}`],{timeoutMs:t?.timeoutMs,allowFileProtocol:t?.allowFileProtocol,logger:t?.logger});return}catch{}await k(["-C",e,"fetch","origin",r],{timeoutMs:t?.timeoutMs,allowFileProtocol:t?.allowFileProtocol,logger:t?.logger})},he=e=>{if(!e||e.length===0)return!1;for(const r of e)if(!r||r.includes("**"))return!1;return!0},pe=e=>{if(!e)return[];const r=e.map(t=>{const i=t.replace(/\\/g,"/"),o=i.indexOf("*");return(o===-1?i:i.slice(0,o)).replace(/\/+$|\/$/,"")});return Array.from(new Set(r.filter(t=>t.length>0)))},te=async(e,r)=>{const t=/^[0-9a-f]{7,40}$/i.test(e.ref),i=he(e.include),o=["clone","--no-checkout","--depth",String(z),"--recurse-submodules=no","--no-tags"];if(i&&o.push("--sparse"),t||(o.push("--single-branch"),e.ref!=="HEAD"&&o.push("--branch",e.ref)),o.push(e.repo,r),await k(o,{timeoutMs:e.timeoutMs,logger:e.logger}),await ee(r,e.resolvedCommit,{timeoutMs:e.timeoutMs,logger:e.logger}),i){const n=pe(e.include);n.length>0&&await k(["-C",r,"sparse-checkout","set",...n],{timeoutMs:e.timeoutMs,logger:e.logger})}await k(["-C",r,"checkout","--quiet","--detach",e.resolvedCommit],{timeoutMs:e.timeoutMs,logger:e.logger})},st=async(e,r)=>{const t=ot(e.repo),i=await Le(t),o=i&&await it(t),n=/^[0-9a-f]{7,40}$/i.test(e.ref),s=he(e.include);let a=o;const l=ne();if(await b(l,{recursive:!0}),o)if(await de(t))await I(t),await te(e,t),a=!1;else try{const m=["fetch","origin"];if(n)m.push("--depth",String(z));else{const y=e.ref==="HEAD"?"HEAD":`${e.ref}:refs/remotes/origin/${e.ref}`;m.push(y,"--depth",String(z))}await k(["-C",t,...m],{timeoutMs:e.timeoutMs,logger:e.logger}),await ee(t,e.resolvedCommit,{timeoutMs:e.timeoutMs,logger:e.logger})}catch{await I(t),await te(e,t),a=!1}else i&&await I(t),await te(e,t),a=!1;await b(r,{recursive:!0});const u=["clone","--no-checkout","--depth",String(z),"--recurse-submodules=no","--no-tags"];await de(t)&&u.splice(2,0,"--filter=blob:none"),s&&u.push("--sparse"),n||(u.push("--single-branch"),e.ref!=="HEAD"&&u.push("--branch",e.ref));const f=Ae(t).href;if(u.push(f,r),await k(u,{timeoutMs:e.timeoutMs,allowFileProtocol:!0,logger:e.logger}),s){const m=pe(e.include);m.length>0&&await k(["-C",r,"sparse-checkout","set",...m],{timeoutMs:e.timeoutMs,allowFileProtocol:!0,logger:e.logger})}return await ee(r,e.resolvedCommit,{timeoutMs:e.timeoutMs,allowFileProtocol:!0,logger:e.logger}),await k(["-C",r,"checkout","--quiet","--detach",e.resolvedCommit],{timeoutMs:e.timeoutMs,allowFileProtocol:!0,logger:e.logger}),{usedCache:a}},at=async e=>{Q(e.sourceId,"sourceId");const r=await ie(g.join(He(),`docs-cache-${e.sourceId}-`));try{const{usedCache:t}=await st(e,r);return{repoDir:r,cleanup:async()=>{await I(r)},fromCache:t}}catch(t){throw await I(r),t}},Y=e=>K(e),nt=e=>{let r="",t=!1;for(const i of e){if(t){r+=i,t=!1;continue}if(i==="\\"){r+=i,t=!0;continue}if(i==="("||i===")"){r+=`\\${i}`;continue}r+=i}return r},ge=e=>e.map(r=>{let t=r;if(r.includes("%"))try{t=decodeURIComponent(r)}catch{t=r}return/(^|[^\\])[@!+?*]\(/.test(t)?t:nt(t)}),re=Number(process.env.DOCS_CACHE_STREAM_THRESHOLD_MB??"2"),ct=Number.isFinite(re)&&re>0?Math.floor(re*1024*1024):1024*1024,lt=(e,r)=>{const t=g.resolve(e);if(!g.resolve(r).startsWith(t+g.sep))throw new Error(`Path traversal detected: ${r}`)},we=async e=>{try{return await q(e,le.O_RDONLY|le.O_NOFOLLOW)}catch(r){const t=B(r);if(t==="ELOOP")return null;if(t==="EINVAL"||t==="ENOSYS"||t==="ENOTSUP")return(await Pe(e)).isSymbolicLink()?null:await q(e,"r");throw r}},ut=(e,r)=>{if(!r||e.length===0)return null;let t="";for(;;){let i=null;for(const n of e){const s=(t?n.normalized.slice(t.length):n.normalized).split("/");if(s.length<2)return t||null;const a=s[0];if(!i){i=a;continue}if(i!==a)return t||null}if(!i)return t||null;const o=`${t}${i}/`;if(o===t)return t||null;t=o}},ft=e=>({...e,exclude:e.exclude??[],ignoreHidden:e.ignoreHidden??!1,unwrapSingleRootDir:e.unwrapSingleRootDir??!1,json:e.json??!1}),mt=async(e,r=5e3)=>{const t=Date.now();for(;Date.now()-t<r;)try{const i=await q(e,"wx");return{release:async()=>{await i.close(),await j(e,{force:!0})}}}catch(i){if(B(i)!=="EEXIST")throw i;await new Promise(o=>setTimeout(o,100))}throw new Error(`Failed to acquire lock for ${e}.`)},dt=async e=>{const r=ft(e);Q(r.sourceId,"sourceId");const t=Oe(r.cacheDir,r.sourceId);await b(r.cacheDir,{recursive:!0});const i=await ie(g.join(r.cacheDir,`.tmp-${r.sourceId}-`));let o=null;const n=async()=>{const s=o;!s||s.closed||s.destroyed||await new Promise(a=>{const l=()=>{s.off("close",u),s.off("error",f),a()},u=()=>l(),f=()=>l();s.once("close",u),s.once("error",f);try{s.end()}catch{l()}})};try{const s=[".git/**",...r.ignoreHidden?[".*","**/.*","**/.*/**"]:[],...r.exclude],a=ge(r.include),l=await ue(a,{cwd:r.repoDir,ignore:s,dot:!0,onlyFiles:!0,followSymbolicLinks:!1});!r.json&&a.length>0&&l.length===0&&$.line(`${T.warn} No files matched include patterns for ${r.sourceId}: ${a.join(", ")}`);const u=l.map(c=>({relativePath:c,normalized:Y(c)})).sort((c,D)=>c.normalized.localeCompare(D.normalized)),f=ut(u,r.unwrapSingleRootDir),m=new Set;for(const{normalized:c}of u){const D=f?c.slice(f.length):c;m.add(g.posix.dirname(D))}await Promise.all(Array.from(m,c=>b(g.join(i,c),{recursive:!0})));let y=0,d=0;const h=Math.max(1,Math.min(u.length,Math.max(8,Math.min(128,je.cpus().length*8)))),S=g.join(i,U),w=ce(S,{encoding:"utf8"});o=w;const x=N("sha256"),p=async c=>new Promise((D,O)=>{const C=P=>{w.off("drain",M),O(P)},M=()=>{w.off("error",C),D()};w.once("error",C),w.write(c)?(w.off("error",C),D()):w.once("drain",M)});for(let c=0;c<u.length;c+=h){const D=u.slice(c,c+h),O=await Promise.all(D.map(async C=>{const M=g.join(r.repoDir,C.relativePath),P=await we(M);if(!P)return null;try{const R=await P.stat();if(!R.isFile())return null;const G=f?C.normalized.slice(f.length):C.normalized,X=g.join(i,G);if(lt(i,X),R.size>=ct){const W=Ye(M,{fd:P.fd,autoClose:!1}),$e=ce(X);await Ge(W,$e)}else{const W=await P.readFile();await se(X,W)}return{path:f?C.normalized.slice(f.length):C.normalized,size:R.size}}finally{await P.close()}}));for(const C of O){if(!C)continue;if(r.maxFiles!==void 0&&d+1>r.maxFiles)throw new Error(`Materialized content exceeds maxFiles (${r.maxFiles}).`);if(y+=C.size,y>r.maxBytes)throw new Error(`Materialized content exceeds maxBytes (${r.maxBytes}).`);const M=`${JSON.stringify(C)}
|
|
3
|
-
`;x.update(M),await p(M),d+=1}}await new Promise((c,D)=>{w.end(()=>c()),w.once("error",D)});const E=x.digest("hex"),v=async c=>{try{return await _(c),!0}catch{return!1}};return await(async(c,D)=>{const O=await mt(`${D}.lock`);try{const C=await v(D),M=`${D}.bak-${Me(8).toString("hex")}`;C&&await J(D,M);try{await J(c,D)}catch(P){if(C)try{await J(M,D)}catch(R){const G=R instanceof Error?R.message:String(R);process.stderr.write(`Warning: Failed to restore backup: ${G}
|
|
4
|
-
`)}throw P}C&&await j(M,{recursive:!0,force:!0})}finally{await O.release()}})(i,t.sourceDir),{bytes:y,fileCount:d,manifestSha256:E}}catch(s){try{await n()}catch{}throw await j(i,{recursive:!0,force:!0}),s}},ht=async e=>{Q(e.sourceId,"sourceId");const r=ge(e.include),t=await ue(r,{cwd:e.repoDir,ignore:[".git/**",...e.ignoreHidden?[".*","**/.*","**/.*/**"]:[],...e.exclude??[]],dot:!0,onlyFiles:!0,followSymbolicLinks:!1});t.sort((s,a)=>Y(s).localeCompare(Y(a)));let i=0,o=0;const n=N("sha256");for(const s of t){const a=Y(s),l=g.join(e.repoDir,s),u=await we(l);if(u)try{const f=await u.stat();if(!f.isFile())continue;if(e.maxFiles!==void 0&&o+1>e.maxFiles)throw new Error(`Materialized content exceeds maxFiles (${e.maxFiles}).`);if(i+=f.size,i>e.maxBytes)throw new Error(`Materialized content exceeds maxBytes (${e.maxBytes}).`);const m=`${JSON.stringify({path:a,size:f.size})}
|
|
5
|
-
`;n.update(m),o+=1}finally{await u.close()}}return{bytes:i,fileCount:o,manifestSha256:n.digest("hex")}},pt=async(e,r)=>{await r.rm(e,{recursive:!0,force:!0})},gt=async(e,r)=>{if(!e.unwrapSingleRootDir)return e.sourceDir;const t=await r.readdir(e.sourceDir,{withFileTypes:!0}),i=new Set([U,V]),o=t.filter(a=>!(a.isFile()&&i.has(a.name))),n=o.filter(a=>a.isDirectory()),s=o.filter(a=>a.isFile());return n.length!==1||s.length>0?e.sourceDir:g.join(e.sourceDir,n[0].name)},oe=async e=>{const r=e.deps??{cp:Te,mkdir:b,readdir:Ee,rm:j,symlink:xe,stderr:process.stderr},t=await gt(e,r),i=g.dirname(e.targetDir);await r.mkdir(i,{recursive:!0}),await pt(e.targetDir,r);const o=process.platform==="win32"?"copy":"symlink";if((e.mode??o)==="copy"){await r.cp(t,e.targetDir,{recursive:!0});return}const n=process.platform==="win32"?"junction":"dir";try{await r.symlink(t,e.targetDir,n)}catch(s){const a=B(s);if(a&&new Set(["EPERM","EACCES","ENOTSUP","EINVAL"]).has(a)){if(e.explicitTargetMode){const l=s instanceof Error?s.message:String(s);r.stderr.write(`Warning: Failed to create symlink at ${e.targetDir}. Falling back to copy. ${l}
|
|
6
|
-
`)}await r.cp(t,e.targetDir,{recursive:!0});return}throw s}},wt=e=>{const r={dirs:new Map,files:[]};for(const t of e){const i=t.split("/").filter(Boolean);if(i.length===0)continue;let o=r;for(const s of i.slice(0,-1)){let a=o.dirs.get(s);a||(a={dirs:new Map,files:[]},o.dirs.set(s,a)),o=a}const n=i[i.length-1];o.files.push({name:n,path:t})}return r},ye=(e,r,t)=>{const i=" ".repeat(r),o=Array.from(e.dirs.keys()).sort(),n=[...e.files].sort((s,a)=>s.name.localeCompare(a.name));for(const s of o){t.push(`${i}- ${s}/`);const a=e.dirs.get(s);a&&ye(a,r+1,t)}for(const s of n)t.push(`${i}- [${s.name}](./${s.path})`)},yt=(e,r,t)=>{const i=[...e].sort((a,l)=>a.localeCompare(l)),o=new Map;for(const a of i){const l=a.lastIndexOf("/"),u=l===-1?"":a.substring(0,l),f=l===-1?a:a.substring(l+1),m=o.get(u);m?m.push(f):o.set(u,[f])}const n=Array.from(o.keys()).sort(),s=[];s.push(`[${t}]`);for(const a of n){const l=o.get(a);if(!l)continue;const u=l.join(",");a===""?s.push(`root:{${u}}`):s.push(`${a}:{${u}}`)}r.push(s.join("|"))},St=(e,r="compressed")=>{const t=[];if(r==="tree"){t.push(`# ${e.id} - Documentation`),t.push(""),t.push("## Files"),t.push("");const i=wt(e.files);ye(i,0,t)}else{const i=`${e.id} Docs Index`;yt(e.files,t,i)}return t.push(""),t.join(`
|
|
7
|
-
`)},Dt=async e=>{const r=g.join(e,".manifest.jsonl");try{const t=await H(r,"utf8"),i=[];for(const o of t.split(`
|
|
8
|
-
`))if(o.trim()){const n=JSON.parse(o);n.path&&i.push(n.path)}return i}catch{return[]}},vt=async e=>{const r=new Map(e.sources.map(i=>[i.id,i])),t=new Map((e.results??[]).map(i=>[i.id,i]));for(const[i,o]of Object.entries(e.lock.sources)){const n=r.get(i);n?.targetDir&&K(Z(e.configPath,n.targetDir));const s=g.join(e.cacheDir,i);try{await _(s)}catch{continue}const a=await Dt(s),l={id:i,repo:o.repo,ref:o.ref,resolvedCommit:o.resolvedCommit,fileCount:o.fileCount,cachePath:K(g.join(e.cacheDir,i)),files:a},u=n?.toc,f=u!==!1;let m="compressed";typeof u=="string"&&(m=u);const y=g.join(s,V);if(f){if(t.get(i)?.status==="up-to-date")try{await _(y);continue}catch{}let d=null;try{d=await H(y,"utf8")}catch{d=null}const h=St(l,m);d!==null&&d!==h&&$.line(`${T.warn} Overwriting existing ${V} for ${i}`),await se(y,h,"utf8")}else try{await j(y,{force:!0})}catch{}}},Ct=e=>{if(e<1024)return`${e} B`;const r=["KB","MB","GB","TB"];let t=e,i=-1;for(;t>=1024&&i<r.length-1;)t/=1024,i+=1;return`${t.toFixed(1)} ${r[i]}`},L=async e=>{try{return await _(e),!0}catch{return!1}},Se=async(e,r)=>{const t=g.join(e,r);return await L(t)?await L(g.join(t,U)):!1},$t=e=>{if(!e||e.length===0)return[];const r=e.map(t=>t.trim()).filter(t=>t.length>0);return Array.from(new Set(r)).sort()},Mt=["mode","include","exclude","maxBytes","maxFiles","ignoreHidden","unwrapSingleRootDir"],Pt=(e,r)=>e==="include"&&Array.isArray(r)||e==="exclude"&&Array.isArray(r)?$t(r):r,xt=e=>{const r=Mt.map(o=>[o,Pt(o,e[o])]);r.sort(([o],[n])=>o.localeCompare(n));const t=Object.fromEntries(r),i=N("sha256");return i.update(JSON.stringify(t)),i.digest("hex")},De=async(e,r={})=>{const{config:t,resolvedPath:i,sources:o}=await ke(e.configPath),n=t.defaults??Re.defaults,s=Fe(i,t.cacheDir??be,e.cacheDirOverride),a=Ue(i),l=await L(a);let u=null;l&&(u=await ze(a));const f=r.resolveRemoteCommit??Ze,m=e.sourceFilter?.length?o.filter(d=>e.sourceFilter?.includes(d.id)):o,y=await Promise.all(m.map(async d=>{const h=u?.sources?.[d.id],S=d.include??n.include,w=d.exclude??n.exclude,x=xt({...d,include:S,exclude:w});if(e.offline){const c=await Se(s,d.id);return{id:d.id,repo:h?.repo??d.repo,ref:h?.ref??d.ref??n.ref,resolvedCommit:h?.resolvedCommit??"offline",lockCommit:h?.resolvedCommit??null,lockRulesSha256:h?.rulesSha256,status:h&&c?"up-to-date":"missing",bytes:h?.bytes,fileCount:h?.fileCount,manifestSha256:h?.manifestSha256,rulesSha256:x}}const p=await f({repo:d.repo,ref:d.ref,allowHosts:n.allowHosts,timeoutMs:e.timeoutMs,logger:e.verbose&&!e.json?$.debug:void 0}),E=h?.resolvedCommit===p.resolvedCommit&&h?.rulesSha256===x,v=h?E?"up-to-date":"changed":"missing";return{id:d.id,repo:p.repo,ref:p.ref,resolvedCommit:p.resolvedCommit,lockCommit:h?.resolvedCommit??null,lockRulesSha256:h?.rulesSha256,status:v,bytes:h?.bytes,fileCount:h?.fileCount,manifestSha256:h?.manifestSha256,rulesSha256:x}}));return{config:t,configPath:i,cacheDir:s,lockPath:a,lockExists:l,lockData:u,results:y,sources:m,defaults:n}},Et=async()=>{const e=g.resolve(process.cwd(),"package.json");try{const r=await H(e,"utf8"),t=JSON.parse(r.toString());return typeof t.version=="string"?t.version:"0.0.0"}catch{}try{const r=await H(new URL("../package.json",import.meta.url),"utf8"),t=JSON.parse(r.toString());return typeof t.version=="string"?t.version:"0.0.0"}catch{}try{const r=await H(new URL("../../package.json",import.meta.url),"utf8"),t=JSON.parse(r.toString());return typeof t.version=="string"?t.version:"0.0.0"}catch{return"0.0.0"}},Tt=async(e,r)=>{const t=await Et(),i=new Date().toISOString(),o={...r?.sources??{}};for(const n of e.results){const s=o[n.id];o[n.id]={repo:n.repo,ref:n.ref,resolvedCommit:n.resolvedCommit,bytes:n.bytes??s?.bytes??0,fileCount:n.fileCount??s?.fileCount??0,manifestSha256:n.manifestSha256??s?.manifestSha256??n.resolvedCommit,rulesSha256:n.rulesSha256??s?.rulesSha256,updatedAt:i}}return{version:1,generatedAt:i,toolVersion:t,sources:o}},ve=async(e,r={})=>{const t=process.hrtime.bigint();let i=0;const o=await De(e,r);await b(o.cacheDir,{recursive:!0});const n=o.lockData,s=o.results.filter(l=>{const u=o.sources.find(f=>f.id===l.id);return l.status==="missing"&&(u?.required??!0)});if(e.failOnMiss&&s.length>0)throw new Error(`Missing required source(s): ${s.map(l=>l.id).join(", ")}.`);if(!e.lockOnly){const l=o.defaults,u=r.fetchSource??at,f=r.materializeSource??dt,m=new Map,y=async(S,w)=>{const x=S?.length?o.results.filter(p=>S.includes(p.id)):o.results;return(await Promise.all(x.map(async p=>{const E=o.sources.find(c=>c.id===p.id);if(!E)return null;if(w)return{result:p,source:E};let v=m.get(p.id);return v===void 0&&(v=await Se(o.cacheDir,p.id),m.set(p.id,v)),p.status!=="up-to-date"||!v?{result:p,source:E}:null}))).filter(Boolean)},d=async()=>{await Promise.all(o.sources.map(async S=>{if(!S.targetDir)return;const w=Z(o.configPath,S.targetDir);await L(w)||await oe({sourceDir:g.join(o.cacheDir,S.id),targetDir:w,mode:S.targetMode??l.targetMode,explicitTargetMode:S.targetMode!==void 0,unwrapSingleRootDir:S.unwrapSingleRootDir})}))},h=async S=>{const w=e.concurrency??4;let x=0;const p=async()=>{const E=S[x];if(!E||!E.source)return;x+=1;const{result:v,source:c}=E,D=o.lockData?.sources?.[c.id],O=await u({sourceId:c.id,repo:c.repo,ref:c.ref,resolvedCommit:v.resolvedCommit,cacheDir:o.cacheDir,include:c.include??l.include,timeoutMs:e.timeoutMs,logger:e.verbose&&!e.json?$.debug:void 0});e.json||$.step(O.fromCache?"Restoring from cache":"Downloading repo",c.id);try{const C=g.join(o.cacheDir,c.id,U);if(v.status!=="up-to-date"&&D?.manifestSha256&&D?.rulesSha256===v.rulesSha256&&await L(C)){const P=await ht({sourceId:c.id,repoDir:O.repoDir,cacheDir:o.cacheDir,include:c.include??l.include,exclude:c.exclude,maxBytes:c.maxBytes??l.maxBytes,maxFiles:c.maxFiles??l.maxFiles,ignoreHidden:c.ignoreHidden??l.ignoreHidden});if(P.manifestSha256===D.manifestSha256){v.bytes=P.bytes,v.fileCount=P.fileCount,v.manifestSha256=P.manifestSha256,v.status="up-to-date",e.json||$.item(T.success,c.id,"no content changes"),await p();return}}e.json||$.step("Materializing",c.id);const M=await f({sourceId:c.id,repoDir:O.repoDir,cacheDir:o.cacheDir,include:c.include??l.include,exclude:c.exclude,maxBytes:c.maxBytes??l.maxBytes,maxFiles:c.maxFiles??l.maxFiles,ignoreHidden:c.ignoreHidden??l.ignoreHidden,unwrapSingleRootDir:c.unwrapSingleRootDir,json:e.json});if(c.targetDir){const P=Z(o.configPath,c.targetDir);await oe({sourceDir:g.join(o.cacheDir,c.id),targetDir:P,mode:c.targetMode??l.targetMode,explicitTargetMode:c.targetMode!==void 0,unwrapSingleRootDir:c.unwrapSingleRootDir})}v.bytes=M.bytes,v.fileCount=M.fileCount,v.manifestSha256=M.manifestSha256,e.json||$.item(T.success,c.id,`synced ${M.fileCount} files`)}finally{await O.cleanup()}await p()};await Promise.all(Array.from({length:Math.min(w,S.length)},p))};if(e.offline)await d();else{const S=await y();await h(S),await d()}if(!e.offline){const S=(await ae({configPath:o.configPath,cacheDirOverride:o.cacheDir})).results.filter(w=>!w.ok);if(S.length>0){const w=await y(S.map(p=>p.id),!0);w.length>0&&(await h(w),await d());const x=(await ae({configPath:o.configPath,cacheDirOverride:o.cacheDir})).results.filter(p=>!p.ok);if(x.length>0&&(i+=1,!e.json)){const p=x.map(E=>`${E.id} (${E.issues.join("; ")})`).join(", ");$.line(`${T.warn} Verify failed for ${x.length} source(s): ${p}`)}}}}const a=await Tt(o,n);if(await Be(o.lockPath,a),!e.json){const l=Number(process.hrtime.bigint()-t)/1e6,u=o.results.reduce((m,y)=>m+(y.bytes??0),0),f=o.results.reduce((m,y)=>m+(y.fileCount??0),0);$.line(`${T.info} Completed in ${l.toFixed(0)}ms \xB7 ${Ct(u)} \xB7 ${f} files${i?` \xB7 ${i} warning${i===1?"":"s"}`:""}`)}return await vt({cacheDir:o.cacheDir,configPath:o.configPath,lock:a,sources:o.sources,results:o.results}),o.lockExists=!0,o},Ce=e=>{const r={upToDate:e.results.filter(t=>t.status==="up-to-date").length,changed:e.results.filter(t=>t.status==="changed").length,missing:e.results.filter(t=>t.status==="missing").length};if(e.results.length===0){$.line(`${T.info} No sources to sync.`);return}$.line(`${T.info} ${e.results.length} sources (${r.upToDate} up-to-date, ${r.changed} changed, ${r.missing} missing)`);for(const t of e.results){const i=$.hash(t.resolvedCommit),o=$.hash(t.lockCommit),n=!!t.lockRulesSha256&&!!t.rulesSha256&&t.lockRulesSha256!==t.rulesSha256;if(t.status==="up-to-date"){$.item(T.success,t.id,`${F.dim("up-to-date")} ${F.gray(i)}`);continue}if(t.status==="changed"){if(t.lockCommit===t.resolvedCommit&&n){$.item(T.warn,t.id,`${F.dim("rules changed")} ${F.gray(i)}`);continue}$.item(T.warn,t.id,`${F.dim("changed")} ${F.gray(o)} ${F.dim("->")} ${F.gray(i)}`);continue}$.item(T.warn,t.id,`${F.dim("missing")} ${F.gray(i)}`)}},Ot={__proto__:null,getSyncPlan:De,printSyncPlan:Ce,runSync:ve};export{oe as a,Ce as b,ve as c,fe as e,me as p,A as r,Ot as s};
|
|
9
|
-
//# sourceMappingURL=sync.mjs.map
|
package/dist/chunks/verify.mjs
DELETED
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
import{stat as N,access as z}from"node:fs/promises";import h from"node:path";import{c as $,r as v,u as l,b as m}from"../shared/docs-cache.CQiaFDb_.mjs";import{l as M,b as j}from"../shared/docs-cache.CaOcl4OS.mjs";import{createReadStream as k}from"node:fs";import C from"node:readline";const T=t=>typeof t=="object"&&t!==null&&"code"in t&&(typeof t.code=="string"||typeof t.code=="number"||t.code===void 0),y=t=>T(t)&&typeof t.code=="string"?t.code:void 0,_=t=>{if(!t||typeof t!="object")throw new Error("Manifest entry must be an object.");const e=t;if(typeof e.path!="string"||e.path.length===0)throw new Error("Manifest entry path must be a non-empty string.");if(typeof e.size!="number"||Number.isNaN(e.size))throw new Error("Manifest entry size must be a number.");if(e.size<0)throw new Error("Manifest entry size must be zero or greater.");return{path:e.path,size:e.size}},w=".manifest.jsonl",I=async function*(t){const e=h.join(t,w),a=k(e,{encoding:"utf8"}),s=C.createInterface({input:a,crlfDelay:1/0});try{for await(const u of s){const f=u.trim();f&&(yield _(JSON.parse(f)))}}finally{s.close(),a.destroy()}},O=async t=>{try{return await z(t),!0}catch{return!1}},E=async t=>{const{config:e,resolvedPath:a,sources:s}=await M(t.configPath),u=$(a,e.cacheDir??j,t.cacheDirOverride),f=async(i,n)=>{if(!await O(i))return{ok:!1,issues:[n==="source"?"missing source directory":"missing target directory"]};try{let r=0,o=0;for await(const g of I(i)){const D=h.join(i,g.path);try{(await N(D)).size!==g.size&&(o+=1)}catch(p){const d=y(p);if(d==="ENOENT"||d==="ENOTDIR"){r+=1;continue}throw p}}const c=[];return r>0&&c.push(n==="source"?`missing files: ${r}`:`target missing files: ${r}`),o>0&&c.push(n==="source"?`size mismatch: ${o}`:`target size mismatch: ${o}`),{ok:c.length===0,issues:c}}catch(r){const o=y(r);if(o==="ENOENT"||o==="ENOTDIR")return{ok:!1,issues:[n==="source"?"missing manifest":"missing target manifest"]};throw r}},b=await Promise.all(s.map(async i=>{const n=h.join(u,i.id),r=[...(await f(n,"source")).issues];if(i.targetDir&&i.targetMode==="copy"){const o=v(a,i.targetDir),c=await f(o,"target");r.push(...c.issues)}return{id:i.id,ok:r.length===0,issues:r}}));return{cacheDir:u,results:b}},R=t=>{const e=t.results.filter(s=>s.ok).length,a=t.results.length-e;if(t.results.length===0){l.line(`${m.warn} No sources to verify.`);return}l.line(`${m.info} Verified ${t.results.length} sources (${e} ok, ${a} failed)`);for(const s of t.results)s.ok?l.item(m.success,s.id):l.item(m.warn,s.id,s.issues.join(", "))},A={__proto__:null,printVerify:R,verifyCache:E};export{w as M,A as a,y as g,E as v};
|
|
2
|
-
//# sourceMappingURL=verify.mjs.map
|
|
@@ -1,5 +0,0 @@
|
|
|
1
|
-
import{readFile as o,writeFile as f,access as P}from"node:fs/promises";import a from"node:path";import{t as p}from"./docs-cache.CQiaFDb_.mjs";const g=async n=>{try{return await P(n),!0}catch{return!1}},l=n=>{const r=n.trim();if(!r||r.startsWith("#")||r.startsWith("!"))return"";let t=r.replace(/^\//,"");return t=t.replace(/^\.\//,""),t=t.replace(/\/+$/,""),p(t)},d=(n,r)=>{const t=a.isAbsolute(r)?a.resolve(r):a.resolve(n,r),e=a.relative(n,t);return e===".."||e.startsWith(`..${a.sep}`)||a.isAbsolute(e)?null:e.length===0?".":e},u=async(n,r)=>{const t=a.resolve(n,".gitignore"),e=d(n,r);if(!e)return{gitignorePath:t,entry:null,hasEntry:!1};const i=l(e);if(!i)return{gitignorePath:t,entry:null,hasEntry:!1};let s="";await g(t)&&(s=await o(t,"utf8"));const h=s.split(/\r?\n/),c=new Set(h.map(y=>l(y)).filter(Boolean));return{gitignorePath:t,entry:`${i}/`,hasEntry:c.has(i)}},w=async(n,r)=>{const t=await u(n,r);if(!t.entry)return{updated:!1,gitignorePath:t.gitignorePath,entry:null};if(t.hasEntry)return{updated:!1,gitignorePath:t.gitignorePath,entry:t.entry};let e="";await g(t.gitignorePath)&&(e=await o(t.gitignorePath,"utf8"));const i=e.length===0||e.endsWith(`
|
|
2
|
-
`)?"":`
|
|
3
|
-
`,s=`${e}${i}${t.entry}
|
|
4
|
-
`;return await f(t.gitignorePath,s,"utf8"),{updated:!0,gitignorePath:t.gitignorePath,entry:t.entry}};export{w as e,u as g};
|
|
5
|
-
//# sourceMappingURL=docs-cache.BOr9BnyP.mjs.map
|
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
const h=f=>{const e=f.trim(),p=e.match(/^git@([^:]+):(.+)$/);if(p){const t=p[1],r=p[2],[o,n]=r.split("#",2),i=o.replace(/^\//,""),c=i.split("/").filter(Boolean).pop()?.replace(/\.git$/i,""),s=`git@${t}:${i}`,a=n?.trim()||void 0;return{repoUrl:s,ref:a,inferredId:c}}const d=e.match(/^([^\s/:]+)\/([^\s#]+)(?:#(.+))?$/);if(d){const[,t,r,o]=d;return{repoUrl:`https://github.com/${`${t}/${r}`.replace(/\.git$/i,"")}.git`,ref:o?.trim()||void 0,inferredId:r.replace(/\.git$/i,"")}}const l=e.match(/^(github|gitlab):(.+)$/i);if(l){const t=l[1].toLowerCase(),r=l[2],[o,n]=r.split("#",2),i=o.replace(/^\//,""),c=i.split("/").filter(Boolean).pop()?.replace(/\.git$/i,""),s=t==="gitlab"?"gitlab.com":"github.com",a=i.endsWith(".git")?"":".git",g=`https://${s}/${i}${a}`,$=n?.trim()||void 0;return{repoUrl:g,ref:$,inferredId:c}}try{const t=new URL(e);if(t.protocol==="https:"||t.protocol==="ssh:"){const r=t.pathname.split("/").filter(Boolean).pop()?.replace(/\.git$/i,"");return{repoUrl:e,ref:void 0,inferredId:r}}}catch{}return{repoUrl:e,ref:void 0,inferredId:void 0}};export{h as r};
|
|
2
|
-
//# sourceMappingURL=docs-cache.BSvQNKuf.mjs.map
|
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
import g from"node:process";import b from"cac";import u from"node:path";import d from"picocolors";var h=(e=>(e[e.Success=0]="Success",e[e.FatalError=1]="FatalError",e[e.InvalidArgument=9]="InvalidArgument",e))(h||{});const E=["add","remove","sync","status","clean","clean-cache","prune","verify","init"],$=new Set(["--source","--target","--target-dir","--id"]),k=new Set(["--config","--cache-dir","--concurrency","--timeout-ms"]),w=e=>{const t=e.findIndex(l=>!l.startsWith("-")),o=t===-1?[]:e.slice(t+1),r=[];let n=-1,c=null,i=!1;const m=new Set(["--config","--cache-dir","--concurrency","--timeout-ms"]);for(let l=0;l<o.length;l+=1){const s=o[l];if(s==="--id"||s.startsWith("--id=")){const a=s==="--id"?o[l+1]:s.slice(5);if(!a||a.startsWith("-"))throw new Error("--id expects a value.");if(s==="--id"&&(l+=1),i&&n!==-1&&r[n]?.id===void 0&&c===null){r[n].id=a,i=!1;continue}if(c!==null)throw new Error("--id must be followed by a source.");c=a,i=!1;continue}if(s==="--source"){const a=o[l+1];if(!a||a.startsWith("-"))throw new Error("--source expects a value.");r.push({repo:a,...c?{id:c}:{}}),n=r.length-1,c=null,i=!0,l+=1;continue}if(s==="--target"||s==="--target-dir"){const a=o[l+1];if(!a||a.startsWith("-"))throw new Error("--target expects a value.");if(n===-1)throw new Error("--target must follow a --source entry.");r[n].targetDir=a,l+=1,i=!1;continue}if(m.has(s)){l+=1,i=!1;continue}if(s.startsWith("--")){i=!1;continue}r.push({repo:s,...c?{id:c}:{}}),n=r.length-1,c=null,i=!0}if(c!==null)throw new Error("--id must be followed by a source.");return r},v=e=>{const t=e.findIndex(n=>!n.startsWith("-")),o=t===-1?[]:e.slice(t+1),r=[];for(let n=0;n<o.length;n+=1){const c=o[n];if(k.has(c)){n+=1;continue}c.startsWith("--")||r.push(c)}return r},W=(e,t)=>{if(e!=="add")for(const o of t){if($.has(o))throw new Error(`${o} is only valid for add.`);if(o.startsWith("--id=")||o.startsWith("--source=")||o.startsWith("--target=")||o.startsWith("--target-dir="))throw new Error(`${o.split("=")[0]} is only valid for add.`)}},S=(e=g.argv)=>{try{const t=b("docs-cache");t.option("--config <path>","Path to config file").option("--cache-dir <path>","Override cache directory").option("--offline","Disable network access").option("--fail-on-miss","Fail when required sources are missing").option("--lock-only","Update lock without materializing files").option("--prune","Prune cache on remove").option("--concurrency <n>","Concurrency limit").option("--json","Output JSON").option("--timeout-ms <n>","Network timeout in milliseconds").option("--silent","Suppress non-error output").option("--verbose","Enable verbose logging").help(),t.command("add [repo...]","Add sources to the config").option("--source <repo>","Source repo").option("--target <dir>","Target directory for source").option("--target-dir <path>","Target directory for source").option("--id <id>","Source id"),t.command("remove <id...>","Remove sources from the config and targets"),t.command("sync","Synchronize cache with config"),t.command("status","Show cache status"),t.command("clean","Remove project cache"),t.command("clean-cache","Clear global git cache"),t.command("prune","Remove unused data"),t.command("verify","Validate cache integrity"),t.command("init","Create a new config interactively");const o=t.parse(e,{run:!1}),r=e.slice(2),n=r.findIndex(a=>!a.startsWith("-")),c=n===-1?void 0:r[n];if(c&&!E.includes(c))throw new Error(`Unknown command '${c}'.`);const i={config:o.options.config,cacheDir:o.options.cacheDir,offline:!!o.options.offline,failOnMiss:!!o.options.failOnMiss,lockOnly:!!o.options.lockOnly,prune:!!o.options.prune,concurrency:o.options.concurrency?Number(o.options.concurrency):void 0,json:!!o.options.json,timeoutMs:o.options.timeoutMs?Number(o.options.timeoutMs):void 0,silent:!!o.options.silent,verbose:!!o.options.verbose};if(i.concurrency!==void 0&&i.concurrency<1)throw new Error("--concurrency must be a positive number.");if(i.timeoutMs!==void 0&&i.timeoutMs<1)throw new Error("--timeout-ms must be a positive number.");W(c??null,r);let m=null;const l=(()=>{switch(c??null){case"add":return m=w(r),m.map(a=>a.repo);case"remove":return v(r);default:return v(r)}})();let s;switch(c??null){case"add":s={command:"add",entries:m??w(r),options:i};break;case"remove":s={command:"remove",ids:l,options:i};break;case"sync":s={command:"sync",options:i};break;case"status":s={command:"status",options:i};break;case"clean":s={command:"clean",options:i};break;case"clean-cache":s={command:"clean-cache",options:i};break;case"prune":s={command:"prune",options:i};break;case"verify":s={command:"verify",options:i};break;case"init":s={command:"init",options:i};break;default:s={command:null,options:i};break}return{command:c??null,options:i,positionals:l,rawArgs:r,help:!!o.options.help,parsed:s}}catch(t){const o=t instanceof Error?t.message:String(t);console.error(o),g.exit(h.InvalidArgument)}},D="TOC.md",f=e=>e.replace(/\\/g,"/"),M=(e,t)=>{const o=u.dirname(u.resolve(e)),r=u.resolve(o,t),n=u.relative(o,r);if(n===".."||n.startsWith(`..${u.sep}`)||u.isAbsolute(n))throw new Error(`targetDir '${t}' escapes project directory. Must be within ${o}.`);if(f(n).split("/").filter(Boolean).includes(".git"))throw new Error("targetDir cannot be within .git directory.");return r},x=(e,t,o)=>{if(o)return u.resolve(o);const r=u.dirname(e);return u.resolve(r,t)},A=(e,t)=>{u.join(e,"repos");const o=u.join(e,t);return{cacheDir:e,sourceDir:o}},O={error:d.red("\u2716"),success:d.green("\u2714"),info:d.blue("\u2139"),warn:d.yellow("\u26A0")};let p=!1,y=!1;const C=e=>{p=e},j=e=>{y=e},I={path:e=>{const t=u.relative(process.cwd(),e),o=t.length<e.length?t:e;return f(o)},hash:e=>e?e.slice(0,7):"-",pad:(e,t)=>e.padEnd(t),line:(e="")=>{p||process.stdout.write(`${e}
|
|
2
|
-
`)},header:(e,t)=>{p||process.stdout.write(`${d.blue("\u2139")} ${e.padEnd(10)} ${t}
|
|
3
|
-
`)},item:(e,t,o)=>{if(p)return;const r=d.bold(t),n=o?d.gray(o):"";process.stdout.write(` ${e} ${r} ${n}
|
|
4
|
-
`)},step:(e,t,o)=>{if(p)return;const r=d.cyan("\u2192");process.stdout.write(` ${r} ${e} ${d.bold(t)}${o?` ${d.dim(o)}`:""}
|
|
5
|
-
`)},debug:e=>{p||!y||process.stdout.write(`${d.dim("\u2022")} ${d.dim(e)}
|
|
6
|
-
`)}};export{D,h as E,j as a,O as b,x as c,A as g,S as p,M as r,C as s,f as t,I as u};
|
|
7
|
-
//# sourceMappingURL=docs-cache.CQiaFDb_.mjs.map
|