docs-cache 0.4.3 → 0.5.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.mjs +13 -13
- package/dist/esm/api.d.ts +14 -0
- package/dist/esm/api.mjs +14 -0
- package/dist/esm/cache/cache-layout.d.ts +1 -0
- package/dist/esm/cache/cache-layout.mjs +12 -0
- package/dist/esm/cache/lock.d.ts +21 -0
- package/dist/esm/cache/lock.mjs +91 -0
- package/dist/esm/cache/manifest.d.ts +11 -0
- package/dist/esm/cache/manifest.mjs +68 -0
- package/dist/esm/cache/materialize.d.ts +26 -0
- package/dist/esm/cache/materialize.mjs +442 -0
- package/dist/esm/cache/targets.d.ts +19 -0
- package/dist/esm/cache/targets.mjs +67 -0
- package/dist/esm/cache/toc.d.ts +12 -0
- package/dist/esm/cache/toc.mjs +167 -0
- package/dist/esm/cli/exit-code.d.ts +11 -0
- package/dist/esm/cli/exit-code.mjs +5 -0
- package/dist/esm/cli/index.d.ts +5 -0
- package/dist/esm/cli/index.mjs +345 -0
- package/dist/esm/cli/live-output.d.ts +12 -0
- package/dist/esm/cli/live-output.mjs +30 -0
- package/dist/esm/cli/parse-args.d.ts +13 -0
- package/dist/esm/cli/parse-args.mjs +295 -0
- package/dist/esm/cli/run.d.ts +1 -0
- package/dist/esm/cli/run.mjs +2 -0
- package/dist/esm/cli/task-reporter.d.ts +32 -0
- package/dist/esm/cli/task-reporter.mjs +122 -0
- package/dist/esm/cli/types.d.ts +51 -0
- package/dist/esm/cli/types.mjs +0 -0
- package/dist/esm/cli/ui.d.ts +21 -0
- package/dist/esm/cli/ui.mjs +64 -0
- package/dist/esm/commands/add.d.ts +20 -0
- package/dist/esm/commands/add.mjs +81 -0
- package/dist/esm/commands/clean-git-cache.d.ts +10 -0
- package/dist/esm/commands/clean-git-cache.mjs +48 -0
- package/dist/esm/commands/clean.d.ts +10 -0
- package/dist/esm/commands/clean.mjs +27 -0
- package/dist/esm/commands/init.d.ts +19 -0
- package/dist/esm/commands/init.mjs +179 -0
- package/dist/esm/commands/prune.d.ts +11 -0
- package/dist/esm/commands/prune.mjs +52 -0
- package/dist/esm/commands/remove.d.ts +12 -0
- package/dist/esm/commands/remove.mjs +87 -0
- package/dist/esm/commands/status.d.ts +16 -0
- package/dist/esm/commands/status.mjs +78 -0
- package/dist/esm/commands/sync.d.ts +33 -0
- package/dist/esm/commands/sync.mjs +730 -0
- package/dist/esm/commands/verify.d.ts +11 -0
- package/dist/esm/commands/verify.mjs +120 -0
- package/dist/esm/config/index.d.ts +15 -0
- package/dist/esm/config/index.mjs +196 -0
- package/dist/esm/config/io.d.ts +30 -0
- package/dist/esm/config/io.mjs +112 -0
- package/dist/esm/config/schema.d.ts +171 -0
- package/dist/esm/config/schema.mjs +69 -0
- package/dist/esm/errors.d.ts +3 -0
- package/dist/esm/errors.mjs +2 -0
- package/dist/esm/git/cache-dir.d.ts +16 -0
- package/dist/esm/git/cache-dir.mjs +23 -0
- package/dist/esm/git/fetch-source.d.ts +19 -0
- package/dist/esm/git/fetch-source.mjs +477 -0
- package/dist/esm/git/redact.d.ts +1 -0
- package/dist/esm/git/redact.mjs +4 -0
- package/dist/esm/git/resolve-remote.d.ts +15 -0
- package/dist/esm/git/resolve-remote.mjs +87 -0
- package/dist/esm/git/resolve-repo.d.ts +5 -0
- package/dist/esm/git/resolve-repo.mjs +52 -0
- package/dist/esm/gitignore.d.ts +18 -0
- package/dist/esm/gitignore.mjs +80 -0
- package/dist/esm/paths.d.ts +8 -0
- package/dist/esm/paths.mjs +34 -0
- package/dist/esm/source-id.d.ts +1 -0
- package/dist/esm/source-id.mjs +29 -0
- package/dist/esm/types/sync.d.ts +25 -0
- package/dist/esm/types/sync.mjs +0 -0
- package/package.json +51 -7
- package/dist/chunks/add.mjs +0 -3
- package/dist/chunks/clean-git-cache.mjs +0 -2
- package/dist/chunks/clean.mjs +0 -2
- package/dist/chunks/init.mjs +0 -3
- package/dist/chunks/prune.mjs +0 -2
- package/dist/chunks/remove.mjs +0 -3
- package/dist/chunks/status.mjs +0 -2
- package/dist/chunks/sync.mjs +0 -9
- package/dist/chunks/verify.mjs +0 -2
- package/dist/shared/docs-cache.BOr9BnyP.mjs +0 -5
- package/dist/shared/docs-cache.BSvQNKuf.mjs +0 -2
- package/dist/shared/docs-cache.CQiaFDb_.mjs +0 -7
- package/dist/shared/docs-cache.CaOcl4OS.mjs +0 -3
- package/dist/shared/docs-cache.kK1DPQIQ.mjs +0 -2
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
import * as z from "zod";
|
|
2
|
+
import { assertSafeSourceId } from "#core/source-id";
|
|
3
|
+
export const TargetModeSchema = z.enum(["symlink", "copy"]);
|
|
4
|
+
export const CacheModeSchema = z.enum(["materialize"]);
|
|
5
|
+
export const TocFormatSchema = z.enum(["tree", "compressed"]);
|
|
6
|
+
export const IntegritySchema = z.object({
|
|
7
|
+
type: z.enum(["commit", "manifest"]),
|
|
8
|
+
value: z.string().nullable()
|
|
9
|
+
}).strict();
|
|
10
|
+
const CommonOptionsSchema = z.object({
|
|
11
|
+
ref: z.string().min(1),
|
|
12
|
+
mode: CacheModeSchema,
|
|
13
|
+
include: z.array(z.string().min(1)).min(1),
|
|
14
|
+
exclude: z.array(z.string().min(1)).optional(),
|
|
15
|
+
targetMode: TargetModeSchema.optional(),
|
|
16
|
+
required: z.boolean(),
|
|
17
|
+
maxBytes: z.number().min(1),
|
|
18
|
+
maxFiles: z.number().min(1).optional(),
|
|
19
|
+
ignoreHidden: z.boolean(),
|
|
20
|
+
toc: z.union([z.boolean(), TocFormatSchema]).optional(),
|
|
21
|
+
unwrapSingleRootDir: z.boolean().optional()
|
|
22
|
+
});
|
|
23
|
+
export const DefaultsSchema = CommonOptionsSchema.extend({
|
|
24
|
+
allowHosts: z.array(z.string().min(1)).min(1)
|
|
25
|
+
}).strict();
|
|
26
|
+
export const SourceSchema = CommonOptionsSchema.partial().extend({
|
|
27
|
+
id: z.string().min(1).superRefine((value, ctx) => {
|
|
28
|
+
try {
|
|
29
|
+
assertSafeSourceId(value, "id");
|
|
30
|
+
} catch (error) {
|
|
31
|
+
ctx.addIssue({
|
|
32
|
+
code: z.ZodIssueCode.custom,
|
|
33
|
+
message: error instanceof Error ? error.message : "Invalid source id."
|
|
34
|
+
});
|
|
35
|
+
}
|
|
36
|
+
}),
|
|
37
|
+
repo: z.string().min(1),
|
|
38
|
+
targetDir: z.string().min(1).optional(),
|
|
39
|
+
integrity: IntegritySchema.optional()
|
|
40
|
+
}).extend({
|
|
41
|
+
include: z.array(z.string().min(1)).min(1, { message: "include must be a non-empty array" }).optional()
|
|
42
|
+
}).strict();
|
|
43
|
+
export const ResolvedSourceSchema = SourceSchema.extend(
|
|
44
|
+
CommonOptionsSchema.shape
|
|
45
|
+
).strict();
|
|
46
|
+
export const ConfigSchema = z.object({
|
|
47
|
+
$schema: z.string().min(1).optional(),
|
|
48
|
+
cacheDir: z.string().min(1).optional(),
|
|
49
|
+
targetMode: TargetModeSchema.optional(),
|
|
50
|
+
defaults: DefaultsSchema.partial().optional(),
|
|
51
|
+
sources: z.array(SourceSchema)
|
|
52
|
+
}).strict().superRefine((value, ctx) => {
|
|
53
|
+
const seen = /* @__PURE__ */ new Set();
|
|
54
|
+
const duplicates = /* @__PURE__ */ new Set();
|
|
55
|
+
value.sources.forEach((source) => {
|
|
56
|
+
if (seen.has(source.id)) {
|
|
57
|
+
duplicates.add(source.id);
|
|
58
|
+
} else {
|
|
59
|
+
seen.add(source.id);
|
|
60
|
+
}
|
|
61
|
+
});
|
|
62
|
+
if (duplicates.size > 0) {
|
|
63
|
+
ctx.addIssue({
|
|
64
|
+
code: z.ZodIssueCode.custom,
|
|
65
|
+
path: ["sources"],
|
|
66
|
+
message: `Duplicate source IDs found: ${Array.from(duplicates).join(", ")}.`
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
});
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
export const isErrnoException = (error) => typeof error === "object" && error !== null && "code" in error && (typeof error.code === "string" || typeof error.code === "number" || error.code === void 0);
|
|
2
|
+
export const getErrnoCode = (error) => isErrnoException(error) && typeof error.code === "string" ? error.code : void 0;
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Get platform-specific cache directory
|
|
3
|
+
* - macOS: ~/Library/Caches
|
|
4
|
+
* - Windows: %LOCALAPPDATA% or ~/AppData/Local
|
|
5
|
+
* - Linux: $XDG_CACHE_HOME or ~/.cache
|
|
6
|
+
*/
|
|
7
|
+
export declare const getCacheBaseDir: () => string;
|
|
8
|
+
/**
|
|
9
|
+
* Resolve the git cache directory
|
|
10
|
+
* Can be overridden via DOCS_CACHE_GIT_DIR environment variable
|
|
11
|
+
*/
|
|
12
|
+
export declare const resolveGitCacheDir: () => string;
|
|
13
|
+
/**
|
|
14
|
+
* Check if a file or directory exists
|
|
15
|
+
*/
|
|
16
|
+
export declare const exists: (filePath: string) => Promise<boolean>;
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import { access } from "node:fs/promises";
|
|
2
|
+
import { homedir } from "node:os";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
export const getCacheBaseDir = () => {
|
|
5
|
+
const home = homedir();
|
|
6
|
+
switch (process.platform) {
|
|
7
|
+
case "darwin":
|
|
8
|
+
return path.join(home, "Library", "Caches");
|
|
9
|
+
case "win32":
|
|
10
|
+
return process.env.LOCALAPPDATA || path.join(home, "AppData", "Local");
|
|
11
|
+
default:
|
|
12
|
+
return process.env.XDG_CACHE_HOME || path.join(home, ".cache");
|
|
13
|
+
}
|
|
14
|
+
};
|
|
15
|
+
export const resolveGitCacheDir = () => process.env.DOCS_CACHE_GIT_DIR || path.join(getCacheBaseDir(), "docs-cache-git");
|
|
16
|
+
export const exists = async (filePath) => {
|
|
17
|
+
try {
|
|
18
|
+
await access(filePath);
|
|
19
|
+
return true;
|
|
20
|
+
} catch {
|
|
21
|
+
return false;
|
|
22
|
+
}
|
|
23
|
+
};
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
type FetchParams = {
|
|
2
|
+
sourceId: string;
|
|
3
|
+
repo: string;
|
|
4
|
+
ref: string;
|
|
5
|
+
resolvedCommit: string;
|
|
6
|
+
cacheDir: string;
|
|
7
|
+
include?: string[];
|
|
8
|
+
timeoutMs?: number;
|
|
9
|
+
logger?: (message: string) => void;
|
|
10
|
+
progressLogger?: (message: string) => void;
|
|
11
|
+
offline?: boolean;
|
|
12
|
+
};
|
|
13
|
+
type FetchResult = {
|
|
14
|
+
repoDir: string;
|
|
15
|
+
cleanup: () => Promise<void>;
|
|
16
|
+
fromCache: boolean;
|
|
17
|
+
};
|
|
18
|
+
export declare const fetchSource: (params: FetchParams) => Promise<FetchResult>;
|
|
19
|
+
export {};
|
|
@@ -0,0 +1,477 @@
|
|
|
1
|
+
import { createHash } from "node:crypto";
|
|
2
|
+
import { mkdir, mkdtemp, readFile, rm } from "node:fs/promises";
|
|
3
|
+
import { tmpdir } from "node:os";
|
|
4
|
+
import path from "node:path";
|
|
5
|
+
import { pathToFileURL } from "node:url";
|
|
6
|
+
import { execa } from "execa";
|
|
7
|
+
import { getErrnoCode } from "#core/errors";
|
|
8
|
+
import { assertSafeSourceId } from "#core/source-id";
|
|
9
|
+
import { exists, resolveGitCacheDir } from "#git/cache-dir";
|
|
10
|
+
const DEFAULT_TIMEOUT_MS = 12e4;
|
|
11
|
+
const DEFAULT_GIT_DEPTH = 1;
|
|
12
|
+
const DEFAULT_RM_RETRIES = 3;
|
|
13
|
+
const DEFAULT_RM_BACKOFF_MS = 100;
|
|
14
|
+
const buildGitEnv = () => {
|
|
15
|
+
const pathValue = process.env.PATH ?? process.env.Path;
|
|
16
|
+
const pathExtValue = process.env.PATHEXT ?? (process.platform === "win32" ? ".COM;.EXE;.BAT;.CMD" : void 0);
|
|
17
|
+
return {
|
|
18
|
+
...process.env,
|
|
19
|
+
...pathValue ? { PATH: pathValue, Path: pathValue } : {},
|
|
20
|
+
...pathExtValue ? { PATHEXT: pathExtValue } : {},
|
|
21
|
+
HOME: process.env.HOME,
|
|
22
|
+
USER: process.env.USER,
|
|
23
|
+
USERPROFILE: process.env.USERPROFILE,
|
|
24
|
+
TMPDIR: process.env.TMPDIR,
|
|
25
|
+
TMP: process.env.TMP,
|
|
26
|
+
TEMP: process.env.TEMP,
|
|
27
|
+
SYSTEMROOT: process.env.SYSTEMROOT,
|
|
28
|
+
WINDIR: process.env.WINDIR,
|
|
29
|
+
SSH_AUTH_SOCK: process.env.SSH_AUTH_SOCK,
|
|
30
|
+
SSH_AGENT_PID: process.env.SSH_AGENT_PID,
|
|
31
|
+
HTTP_PROXY: process.env.HTTP_PROXY,
|
|
32
|
+
HTTPS_PROXY: process.env.HTTPS_PROXY,
|
|
33
|
+
NO_PROXY: process.env.NO_PROXY,
|
|
34
|
+
GIT_TERMINAL_PROMPT: "0",
|
|
35
|
+
GIT_CONFIG_NOSYSTEM: "1",
|
|
36
|
+
GIT_CONFIG_NOGLOBAL: "1",
|
|
37
|
+
...process.platform === "win32" ? {} : { GIT_ASKPASS: "/bin/false" }
|
|
38
|
+
};
|
|
39
|
+
};
|
|
40
|
+
const buildGitConfigs = (allowFileProtocol) => [
|
|
41
|
+
"-c",
|
|
42
|
+
"core.hooksPath=/dev/null",
|
|
43
|
+
"-c",
|
|
44
|
+
"submodule.recurse=false",
|
|
45
|
+
"-c",
|
|
46
|
+
"protocol.ext.allow=never",
|
|
47
|
+
"-c",
|
|
48
|
+
`protocol.file.allow=${allowFileProtocol ? "always" : "never"}`
|
|
49
|
+
];
|
|
50
|
+
const buildCommandArgs = (args, allowFileProtocol, forceProgress) => {
|
|
51
|
+
const configs = buildGitConfigs(allowFileProtocol);
|
|
52
|
+
const commandArgs = [...configs, ...args];
|
|
53
|
+
if (forceProgress) {
|
|
54
|
+
commandArgs.push("--progress");
|
|
55
|
+
}
|
|
56
|
+
return commandArgs;
|
|
57
|
+
};
|
|
58
|
+
const isProgressLine = (line) => line.includes("Receiving objects") || line.includes("Resolving deltas") || line.includes("Compressing objects") || line.includes("Updating files") || line.includes("Counting objects");
|
|
59
|
+
const shouldEmitProgress = (line, now, lastProgressAt, throttleMs) => now - lastProgressAt >= throttleMs || line.includes("100%") || line.includes("done");
|
|
60
|
+
const attachLoggers = (subprocess, commandLabel, options) => {
|
|
61
|
+
if (!options?.logger && !options?.progressLogger) {
|
|
62
|
+
return;
|
|
63
|
+
}
|
|
64
|
+
let lastProgressAt = 0;
|
|
65
|
+
const forward = (stream) => {
|
|
66
|
+
if (!stream) return;
|
|
67
|
+
stream.on("data", (chunk) => {
|
|
68
|
+
const text = chunk instanceof Buffer ? chunk.toString("utf8") : String(chunk);
|
|
69
|
+
for (const line of text.split(/\r?\n/)) {
|
|
70
|
+
if (!line) continue;
|
|
71
|
+
options.logger?.(`${commandLabel} | ${line}`);
|
|
72
|
+
if (!options?.progressLogger) continue;
|
|
73
|
+
if (!isProgressLine(line)) continue;
|
|
74
|
+
const now = Date.now();
|
|
75
|
+
const throttleMs = options.progressThrottleMs ?? 120;
|
|
76
|
+
if (shouldEmitProgress(line, now, lastProgressAt, throttleMs)) {
|
|
77
|
+
lastProgressAt = now;
|
|
78
|
+
options.progressLogger(line);
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
});
|
|
82
|
+
};
|
|
83
|
+
forward(subprocess.stdout);
|
|
84
|
+
forward(subprocess.stderr);
|
|
85
|
+
};
|
|
86
|
+
const git = async (args, options) => {
|
|
87
|
+
const commandArgs = buildCommandArgs(
|
|
88
|
+
args,
|
|
89
|
+
options?.allowFileProtocol,
|
|
90
|
+
options?.forceProgress
|
|
91
|
+
);
|
|
92
|
+
const commandLabel = `git ${commandArgs.join(" ")}`;
|
|
93
|
+
options?.logger?.(commandLabel);
|
|
94
|
+
const subprocess = execa("git", commandArgs, {
|
|
95
|
+
cwd: options?.cwd,
|
|
96
|
+
timeout: options?.timeoutMs ?? DEFAULT_TIMEOUT_MS,
|
|
97
|
+
maxBuffer: 10 * 1024 * 1024,
|
|
98
|
+
stdout: "pipe",
|
|
99
|
+
stderr: "pipe",
|
|
100
|
+
env: buildGitEnv()
|
|
101
|
+
});
|
|
102
|
+
attachLoggers(subprocess, commandLabel, options);
|
|
103
|
+
await subprocess;
|
|
104
|
+
};
|
|
105
|
+
const removeDir = async (dirPath, retries = DEFAULT_RM_RETRIES) => {
|
|
106
|
+
for (let attempt = 0; attempt <= retries; attempt += 1) {
|
|
107
|
+
try {
|
|
108
|
+
await rm(dirPath, { recursive: true, force: true });
|
|
109
|
+
return;
|
|
110
|
+
} catch (error) {
|
|
111
|
+
const code = getErrnoCode(error);
|
|
112
|
+
if (code !== "ENOTEMPTY" && code !== "EBUSY" && code !== "EPERM") {
|
|
113
|
+
throw error;
|
|
114
|
+
}
|
|
115
|
+
if (attempt === retries) {
|
|
116
|
+
throw error;
|
|
117
|
+
}
|
|
118
|
+
await new Promise(
|
|
119
|
+
(resolve) => setTimeout(resolve, DEFAULT_RM_BACKOFF_MS * (attempt + 1))
|
|
120
|
+
);
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
};
|
|
124
|
+
const hashRepoUrl = (repo) => {
|
|
125
|
+
return createHash("sha256").update(repo).digest("hex").substring(0, 16);
|
|
126
|
+
};
|
|
127
|
+
const getPersistentCachePath = (repo) => {
|
|
128
|
+
const repoHash = hashRepoUrl(repo);
|
|
129
|
+
return path.join(resolveGitCacheDir(), repoHash);
|
|
130
|
+
};
|
|
131
|
+
const isValidGitRepo = async (repoPath) => {
|
|
132
|
+
try {
|
|
133
|
+
await git(["rev-parse", "--git-dir"], { cwd: repoPath });
|
|
134
|
+
return true;
|
|
135
|
+
} catch {
|
|
136
|
+
return false;
|
|
137
|
+
}
|
|
138
|
+
};
|
|
139
|
+
const isPartialClone = async (repoPath) => {
|
|
140
|
+
try {
|
|
141
|
+
const configPath = path.join(repoPath, ".git", "config");
|
|
142
|
+
const raw = await readFile(configPath, "utf8");
|
|
143
|
+
const lower = raw.toLowerCase();
|
|
144
|
+
return lower.includes("partialclone") || lower.includes("promisor") || lower.includes("partialclonefilter");
|
|
145
|
+
} catch {
|
|
146
|
+
return false;
|
|
147
|
+
}
|
|
148
|
+
};
|
|
149
|
+
const hasCommitInRepo = async (repoPath, commit, options) => {
|
|
150
|
+
try {
|
|
151
|
+
await git(["-C", repoPath, "cat-file", "-e", `${commit}^{commit}`], {
|
|
152
|
+
timeoutMs: options?.timeoutMs,
|
|
153
|
+
allowFileProtocol: options?.allowFileProtocol,
|
|
154
|
+
logger: options?.logger
|
|
155
|
+
});
|
|
156
|
+
return true;
|
|
157
|
+
} catch {
|
|
158
|
+
return false;
|
|
159
|
+
}
|
|
160
|
+
};
|
|
161
|
+
const ensureCommitAvailable = async (repoPath, commit, options) => {
|
|
162
|
+
try {
|
|
163
|
+
await git(["-C", repoPath, "cat-file", "-e", `${commit}^{commit}`], {
|
|
164
|
+
timeoutMs: options?.timeoutMs,
|
|
165
|
+
allowFileProtocol: options?.allowFileProtocol,
|
|
166
|
+
logger: options?.logger
|
|
167
|
+
});
|
|
168
|
+
return;
|
|
169
|
+
} catch {
|
|
170
|
+
}
|
|
171
|
+
if (options?.offline && !options?.allowFileProtocol) {
|
|
172
|
+
throw new Error(`Commit ${commit} not found in cache (offline).`);
|
|
173
|
+
}
|
|
174
|
+
await git(["-C", repoPath, "fetch", "origin", commit], {
|
|
175
|
+
timeoutMs: options?.timeoutMs,
|
|
176
|
+
allowFileProtocol: options?.allowFileProtocol,
|
|
177
|
+
logger: options?.logger
|
|
178
|
+
});
|
|
179
|
+
};
|
|
180
|
+
const isSparseEligible = (include) => {
|
|
181
|
+
if (!include || include.length === 0) {
|
|
182
|
+
return false;
|
|
183
|
+
}
|
|
184
|
+
for (const pattern of include) {
|
|
185
|
+
if (!pattern || pattern.includes("**")) {
|
|
186
|
+
return false;
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
return true;
|
|
190
|
+
};
|
|
191
|
+
const extractSparsePaths = (include) => {
|
|
192
|
+
if (!include) {
|
|
193
|
+
return [];
|
|
194
|
+
}
|
|
195
|
+
const paths = include.map((pattern) => {
|
|
196
|
+
const normalized = pattern.replace(/\\/g, "/");
|
|
197
|
+
const starIndex = normalized.indexOf("*");
|
|
198
|
+
const base = starIndex === -1 ? normalized : normalized.slice(0, starIndex);
|
|
199
|
+
return base.replace(/\/+$|\/$/, "");
|
|
200
|
+
});
|
|
201
|
+
return Array.from(new Set(paths.filter((value) => value.length > 0)));
|
|
202
|
+
};
|
|
203
|
+
const cloneRepo = async (params, outDir) => {
|
|
204
|
+
if (params.offline) {
|
|
205
|
+
throw new Error(`Cannot clone ${params.repo} while offline.`);
|
|
206
|
+
}
|
|
207
|
+
const isCommitRef = /^[0-9a-f]{7,40}$/i.test(params.ref);
|
|
208
|
+
const useSparse = isSparseEligible(params.include);
|
|
209
|
+
const buildCloneArgs = () => {
|
|
210
|
+
const cloneArgs2 = [
|
|
211
|
+
"clone",
|
|
212
|
+
"--no-checkout",
|
|
213
|
+
"--depth",
|
|
214
|
+
String(DEFAULT_GIT_DEPTH),
|
|
215
|
+
"--recurse-submodules=no",
|
|
216
|
+
"--no-tags"
|
|
217
|
+
];
|
|
218
|
+
return cloneArgs2;
|
|
219
|
+
};
|
|
220
|
+
const cloneArgs = buildCloneArgs();
|
|
221
|
+
if (useSparse) {
|
|
222
|
+
cloneArgs.push("--sparse");
|
|
223
|
+
}
|
|
224
|
+
if (!isCommitRef) {
|
|
225
|
+
cloneArgs.push("--single-branch");
|
|
226
|
+
if (params.ref !== "HEAD") {
|
|
227
|
+
cloneArgs.push("--branch", params.ref);
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
cloneArgs.push(params.repo, outDir);
|
|
231
|
+
await git(cloneArgs, {
|
|
232
|
+
timeoutMs: params.timeoutMs,
|
|
233
|
+
logger: params.logger,
|
|
234
|
+
progressLogger: params.progressLogger,
|
|
235
|
+
forceProgress: Boolean(params.progressLogger)
|
|
236
|
+
});
|
|
237
|
+
await ensureCommitAvailable(outDir, params.resolvedCommit, {
|
|
238
|
+
timeoutMs: params.timeoutMs,
|
|
239
|
+
logger: params.logger,
|
|
240
|
+
offline: params.offline
|
|
241
|
+
});
|
|
242
|
+
if (useSparse) {
|
|
243
|
+
const sparsePaths = extractSparsePaths(params.include);
|
|
244
|
+
if (sparsePaths.length > 0) {
|
|
245
|
+
await git(["-C", outDir, "sparse-checkout", "set", ...sparsePaths], {
|
|
246
|
+
timeoutMs: params.timeoutMs,
|
|
247
|
+
logger: params.logger
|
|
248
|
+
});
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
await git(
|
|
252
|
+
["-C", outDir, "checkout", "--quiet", "--detach", params.resolvedCommit],
|
|
253
|
+
{
|
|
254
|
+
timeoutMs: params.timeoutMs,
|
|
255
|
+
logger: params.logger
|
|
256
|
+
}
|
|
257
|
+
);
|
|
258
|
+
};
|
|
259
|
+
const addWorktreeFromCache = async (params, cachePath, outDir) => {
|
|
260
|
+
await git(
|
|
261
|
+
[
|
|
262
|
+
"-C",
|
|
263
|
+
cachePath,
|
|
264
|
+
"worktree",
|
|
265
|
+
"add",
|
|
266
|
+
"--detach",
|
|
267
|
+
outDir,
|
|
268
|
+
params.resolvedCommit
|
|
269
|
+
],
|
|
270
|
+
{
|
|
271
|
+
timeoutMs: params.timeoutMs,
|
|
272
|
+
logger: params.logger,
|
|
273
|
+
allowFileProtocol: true
|
|
274
|
+
}
|
|
275
|
+
);
|
|
276
|
+
await git(
|
|
277
|
+
["-C", outDir, "checkout", "--quiet", "--detach", params.resolvedCommit],
|
|
278
|
+
{
|
|
279
|
+
timeoutMs: params.timeoutMs,
|
|
280
|
+
logger: params.logger,
|
|
281
|
+
allowFileProtocol: true
|
|
282
|
+
}
|
|
283
|
+
);
|
|
284
|
+
const sparsePaths = isSparseEligible(params.include) ? extractSparsePaths(params.include) : [];
|
|
285
|
+
if (sparsePaths.length > 0) {
|
|
286
|
+
await git(["-C", outDir, "sparse-checkout", "set", ...sparsePaths], {
|
|
287
|
+
timeoutMs: params.timeoutMs,
|
|
288
|
+
logger: params.logger,
|
|
289
|
+
allowFileProtocol: true
|
|
290
|
+
});
|
|
291
|
+
}
|
|
292
|
+
return {
|
|
293
|
+
usedCache: true,
|
|
294
|
+
cleanup: async () => {
|
|
295
|
+
try {
|
|
296
|
+
await git(["-C", cachePath, "worktree", "remove", "--force", outDir], {
|
|
297
|
+
timeoutMs: params.timeoutMs,
|
|
298
|
+
logger: params.logger,
|
|
299
|
+
allowFileProtocol: true
|
|
300
|
+
});
|
|
301
|
+
} catch {
|
|
302
|
+
}
|
|
303
|
+
}
|
|
304
|
+
};
|
|
305
|
+
};
|
|
306
|
+
const buildFetchArgs = (ref, isCommitRef) => {
|
|
307
|
+
const fetchArgs = ["fetch", "origin"];
|
|
308
|
+
if (!isCommitRef) {
|
|
309
|
+
const refSpec = ref === "HEAD" ? "HEAD" : `${ref}:refs/remotes/origin/${ref}`;
|
|
310
|
+
fetchArgs.push(refSpec, "--depth", String(DEFAULT_GIT_DEPTH));
|
|
311
|
+
return fetchArgs;
|
|
312
|
+
}
|
|
313
|
+
fetchArgs.push("--depth", String(DEFAULT_GIT_DEPTH));
|
|
314
|
+
return fetchArgs;
|
|
315
|
+
};
|
|
316
|
+
const fetchCommitFromOrigin = async (params, cachePath, isCommitRef) => {
|
|
317
|
+
const fetchArgs = buildFetchArgs(params.ref, isCommitRef);
|
|
318
|
+
await git(["-C", cachePath, ...fetchArgs], {
|
|
319
|
+
timeoutMs: params.timeoutMs,
|
|
320
|
+
logger: params.logger,
|
|
321
|
+
progressLogger: params.progressLogger,
|
|
322
|
+
forceProgress: Boolean(params.progressLogger),
|
|
323
|
+
allowFileProtocol: true
|
|
324
|
+
});
|
|
325
|
+
await ensureCommitAvailable(cachePath, params.resolvedCommit, {
|
|
326
|
+
timeoutMs: params.timeoutMs,
|
|
327
|
+
logger: params.logger,
|
|
328
|
+
offline: params.offline
|
|
329
|
+
});
|
|
330
|
+
};
|
|
331
|
+
const handleValidCache = async (params, cachePath, isCommitRef) => {
|
|
332
|
+
if (await isPartialClone(cachePath)) {
|
|
333
|
+
if (params.offline) {
|
|
334
|
+
throw new Error(`Cache for ${params.repo} is partial (offline).`);
|
|
335
|
+
}
|
|
336
|
+
await removeDir(cachePath);
|
|
337
|
+
await cloneRepo(params, cachePath);
|
|
338
|
+
return { usedCache: false, worktreeUsed: false };
|
|
339
|
+
}
|
|
340
|
+
try {
|
|
341
|
+
const commitExists = await hasCommitInRepo(
|
|
342
|
+
cachePath,
|
|
343
|
+
params.resolvedCommit,
|
|
344
|
+
{
|
|
345
|
+
timeoutMs: params.timeoutMs,
|
|
346
|
+
logger: params.logger
|
|
347
|
+
}
|
|
348
|
+
);
|
|
349
|
+
if (commitExists) {
|
|
350
|
+
return { usedCache: true, worktreeUsed: true };
|
|
351
|
+
}
|
|
352
|
+
if (params.offline) {
|
|
353
|
+
throw new Error(
|
|
354
|
+
`Commit ${params.resolvedCommit} not found in cache (offline).`
|
|
355
|
+
);
|
|
356
|
+
}
|
|
357
|
+
await fetchCommitFromOrigin(params, cachePath, isCommitRef);
|
|
358
|
+
return { usedCache: true, worktreeUsed: false };
|
|
359
|
+
} catch (_error) {
|
|
360
|
+
if (params.offline) {
|
|
361
|
+
throw new Error(`Cache for ${params.repo} is unavailable (offline).`);
|
|
362
|
+
}
|
|
363
|
+
await removeDir(cachePath);
|
|
364
|
+
await cloneRepo(params, cachePath);
|
|
365
|
+
return { usedCache: false, worktreeUsed: false };
|
|
366
|
+
}
|
|
367
|
+
};
|
|
368
|
+
const handleMissingCache = async (params, cachePath, cacheExists) => {
|
|
369
|
+
if (cacheExists) {
|
|
370
|
+
await removeDir(cachePath);
|
|
371
|
+
}
|
|
372
|
+
if (params.offline) {
|
|
373
|
+
throw new Error(`Cache for ${params.repo} is missing (offline).`);
|
|
374
|
+
}
|
|
375
|
+
await cloneRepo(params, cachePath);
|
|
376
|
+
return { usedCache: false, worktreeUsed: false };
|
|
377
|
+
};
|
|
378
|
+
const cloneOrUpdateRepo = async (params, outDir) => {
|
|
379
|
+
const cachePath = getPersistentCachePath(params.repo);
|
|
380
|
+
const cacheExists = await exists(cachePath);
|
|
381
|
+
const cacheValid = cacheExists && await isValidGitRepo(cachePath);
|
|
382
|
+
const isCommitRef = /^[0-9a-f]{7,40}$/i.test(params.ref);
|
|
383
|
+
const useSparse = isSparseEligible(params.include);
|
|
384
|
+
let usedCache = cacheValid;
|
|
385
|
+
let worktreeUsed = false;
|
|
386
|
+
const cacheRoot = resolveGitCacheDir();
|
|
387
|
+
await mkdir(cacheRoot, { recursive: true });
|
|
388
|
+
if (cacheValid) {
|
|
389
|
+
const result = await handleValidCache(params, cachePath, isCommitRef);
|
|
390
|
+
usedCache = result.usedCache;
|
|
391
|
+
worktreeUsed = result.worktreeUsed;
|
|
392
|
+
}
|
|
393
|
+
if (!cacheValid) {
|
|
394
|
+
const result = await handleMissingCache(params, cachePath, cacheExists);
|
|
395
|
+
usedCache = result.usedCache;
|
|
396
|
+
worktreeUsed = result.worktreeUsed;
|
|
397
|
+
}
|
|
398
|
+
if (worktreeUsed && cacheValid) {
|
|
399
|
+
return addWorktreeFromCache(params, cachePath, outDir);
|
|
400
|
+
}
|
|
401
|
+
await mkdir(outDir, { recursive: true });
|
|
402
|
+
const localCloneArgs = [
|
|
403
|
+
"clone",
|
|
404
|
+
"--no-checkout",
|
|
405
|
+
"--depth",
|
|
406
|
+
String(DEFAULT_GIT_DEPTH),
|
|
407
|
+
"--recurse-submodules=no",
|
|
408
|
+
"--no-tags"
|
|
409
|
+
];
|
|
410
|
+
if (await isPartialClone(cachePath)) {
|
|
411
|
+
localCloneArgs.splice(2, 0, "--filter=blob:none");
|
|
412
|
+
}
|
|
413
|
+
if (useSparse) {
|
|
414
|
+
localCloneArgs.push("--sparse");
|
|
415
|
+
}
|
|
416
|
+
if (!isCommitRef) {
|
|
417
|
+
localCloneArgs.push("--single-branch");
|
|
418
|
+
if (params.ref !== "HEAD") {
|
|
419
|
+
localCloneArgs.push("--branch", params.ref);
|
|
420
|
+
}
|
|
421
|
+
}
|
|
422
|
+
const cacheUrl = pathToFileURL(cachePath).href;
|
|
423
|
+
localCloneArgs.push(cacheUrl, outDir);
|
|
424
|
+
await git(localCloneArgs, {
|
|
425
|
+
timeoutMs: params.timeoutMs,
|
|
426
|
+
allowFileProtocol: true,
|
|
427
|
+
logger: params.logger,
|
|
428
|
+
progressLogger: params.progressLogger,
|
|
429
|
+
forceProgress: Boolean(params.progressLogger)
|
|
430
|
+
});
|
|
431
|
+
if (useSparse) {
|
|
432
|
+
const sparsePaths = extractSparsePaths(params.include);
|
|
433
|
+
if (sparsePaths.length > 0) {
|
|
434
|
+
await git(["-C", outDir, "sparse-checkout", "set", ...sparsePaths], {
|
|
435
|
+
timeoutMs: params.timeoutMs,
|
|
436
|
+
allowFileProtocol: true,
|
|
437
|
+
logger: params.logger
|
|
438
|
+
});
|
|
439
|
+
}
|
|
440
|
+
}
|
|
441
|
+
await ensureCommitAvailable(outDir, params.resolvedCommit, {
|
|
442
|
+
timeoutMs: params.timeoutMs,
|
|
443
|
+
allowFileProtocol: true,
|
|
444
|
+
logger: params.logger,
|
|
445
|
+
offline: params.offline
|
|
446
|
+
});
|
|
447
|
+
await git(
|
|
448
|
+
["-C", outDir, "checkout", "--quiet", "--detach", params.resolvedCommit],
|
|
449
|
+
{
|
|
450
|
+
timeoutMs: params.timeoutMs,
|
|
451
|
+
allowFileProtocol: true,
|
|
452
|
+
logger: params.logger
|
|
453
|
+
}
|
|
454
|
+
);
|
|
455
|
+
return { usedCache, cleanup: async () => void 0 };
|
|
456
|
+
};
|
|
457
|
+
export const fetchSource = async (params) => {
|
|
458
|
+
assertSafeSourceId(params.sourceId, "sourceId");
|
|
459
|
+
const tempRoot = await mkdtemp(
|
|
460
|
+
path.join(tmpdir(), `docs-cache-${params.sourceId}-`)
|
|
461
|
+
);
|
|
462
|
+
const tempDir = path.join(tempRoot, "repo");
|
|
463
|
+
try {
|
|
464
|
+
const { usedCache, cleanup } = await cloneOrUpdateRepo(params, tempDir);
|
|
465
|
+
return {
|
|
466
|
+
repoDir: tempDir,
|
|
467
|
+
cleanup: async () => {
|
|
468
|
+
await cleanup();
|
|
469
|
+
await removeDir(tempRoot);
|
|
470
|
+
},
|
|
471
|
+
fromCache: usedCache
|
|
472
|
+
};
|
|
473
|
+
} catch (error) {
|
|
474
|
+
await removeDir(tempRoot);
|
|
475
|
+
throw error;
|
|
476
|
+
}
|
|
477
|
+
};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare const redactRepoUrl: (repo: string) => string;
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
type ResolveRemoteParams = {
|
|
2
|
+
repo: string;
|
|
3
|
+
ref: string;
|
|
4
|
+
allowHosts: string[];
|
|
5
|
+
timeoutMs?: number;
|
|
6
|
+
logger?: (message: string) => void;
|
|
7
|
+
};
|
|
8
|
+
export declare const enforceHostAllowlist: (repo: string, allowHosts: string[]) => void;
|
|
9
|
+
export declare const parseLsRemote: (stdout: string) => string | null;
|
|
10
|
+
export declare const resolveRemoteCommit: (params: ResolveRemoteParams) => Promise<{
|
|
11
|
+
repo: string;
|
|
12
|
+
ref: string;
|
|
13
|
+
resolvedCommit: string;
|
|
14
|
+
}>;
|
|
15
|
+
export {};
|