docs-cache 0.4.3 → 0.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.mjs +13 -13
- package/dist/esm/api.d.ts +14 -0
- package/dist/esm/api.mjs +14 -0
- package/dist/esm/cache/cache-layout.d.ts +1 -0
- package/dist/esm/cache/cache-layout.mjs +12 -0
- package/dist/esm/cache/lock.d.ts +21 -0
- package/dist/esm/cache/lock.mjs +91 -0
- package/dist/esm/cache/manifest.d.ts +11 -0
- package/dist/esm/cache/manifest.mjs +68 -0
- package/dist/esm/cache/materialize.d.ts +26 -0
- package/dist/esm/cache/materialize.mjs +442 -0
- package/dist/esm/cache/targets.d.ts +19 -0
- package/dist/esm/cache/targets.mjs +66 -0
- package/dist/esm/cache/toc.d.ts +12 -0
- package/dist/esm/cache/toc.mjs +167 -0
- package/dist/esm/cli/exit-code.d.ts +11 -0
- package/dist/esm/cli/exit-code.mjs +5 -0
- package/dist/esm/cli/index.d.ts +5 -0
- package/dist/esm/cli/index.mjs +345 -0
- package/dist/esm/cli/live-output.d.ts +12 -0
- package/dist/esm/cli/live-output.mjs +30 -0
- package/dist/esm/cli/parse-args.d.ts +13 -0
- package/dist/esm/cli/parse-args.mjs +295 -0
- package/dist/esm/cli/run.d.ts +1 -0
- package/dist/esm/cli/run.mjs +2 -0
- package/dist/esm/cli/task-reporter.d.ts +32 -0
- package/dist/esm/cli/task-reporter.mjs +122 -0
- package/dist/esm/cli/types.d.ts +51 -0
- package/dist/esm/cli/types.mjs +0 -0
- package/dist/esm/cli/ui.d.ts +21 -0
- package/dist/esm/cli/ui.mjs +64 -0
- package/dist/esm/commands/add.d.ts +20 -0
- package/dist/esm/commands/add.mjs +81 -0
- package/dist/esm/commands/clean-git-cache.d.ts +10 -0
- package/dist/esm/commands/clean-git-cache.mjs +48 -0
- package/dist/esm/commands/clean.d.ts +10 -0
- package/dist/esm/commands/clean.mjs +27 -0
- package/dist/esm/commands/init.d.ts +19 -0
- package/dist/esm/commands/init.mjs +179 -0
- package/dist/esm/commands/prune.d.ts +11 -0
- package/dist/esm/commands/prune.mjs +52 -0
- package/dist/esm/commands/remove.d.ts +12 -0
- package/dist/esm/commands/remove.mjs +87 -0
- package/dist/esm/commands/status.d.ts +16 -0
- package/dist/esm/commands/status.mjs +78 -0
- package/dist/esm/commands/sync.d.ts +33 -0
- package/dist/esm/commands/sync.mjs +730 -0
- package/dist/esm/commands/verify.d.ts +11 -0
- package/dist/esm/commands/verify.mjs +120 -0
- package/dist/esm/config/index.d.ts +15 -0
- package/dist/esm/config/index.mjs +196 -0
- package/dist/esm/config/io.d.ts +30 -0
- package/dist/esm/config/io.mjs +112 -0
- package/dist/esm/config/schema.d.ts +171 -0
- package/dist/esm/config/schema.mjs +69 -0
- package/dist/esm/errors.d.ts +3 -0
- package/dist/esm/errors.mjs +2 -0
- package/dist/esm/git/cache-dir.d.ts +16 -0
- package/dist/esm/git/cache-dir.mjs +23 -0
- package/dist/esm/git/fetch-source.d.ts +19 -0
- package/dist/esm/git/fetch-source.mjs +477 -0
- package/dist/esm/git/redact.d.ts +1 -0
- package/dist/esm/git/redact.mjs +4 -0
- package/dist/esm/git/resolve-remote.d.ts +15 -0
- package/dist/esm/git/resolve-remote.mjs +87 -0
- package/dist/esm/git/resolve-repo.d.ts +5 -0
- package/dist/esm/git/resolve-repo.mjs +52 -0
- package/dist/esm/gitignore.d.ts +18 -0
- package/dist/esm/gitignore.mjs +80 -0
- package/dist/esm/paths.d.ts +8 -0
- package/dist/esm/paths.mjs +34 -0
- package/dist/esm/source-id.d.ts +1 -0
- package/dist/esm/source-id.mjs +29 -0
- package/dist/esm/types/sync.d.ts +25 -0
- package/dist/esm/types/sync.mjs +0 -0
- package/package.json +138 -91
- package/dist/chunks/add.mjs +0 -3
- package/dist/chunks/clean-git-cache.mjs +0 -2
- package/dist/chunks/clean.mjs +0 -2
- package/dist/chunks/init.mjs +0 -3
- package/dist/chunks/prune.mjs +0 -2
- package/dist/chunks/remove.mjs +0 -3
- package/dist/chunks/status.mjs +0 -2
- package/dist/chunks/sync.mjs +0 -9
- package/dist/chunks/verify.mjs +0 -2
- package/dist/shared/docs-cache.BOr9BnyP.mjs +0 -5
- package/dist/shared/docs-cache.BSvQNKuf.mjs +0 -2
- package/dist/shared/docs-cache.CQiaFDb_.mjs +0 -7
- package/dist/shared/docs-cache.CaOcl4OS.mjs +0 -3
- package/dist/shared/docs-cache.kK1DPQIQ.mjs +0 -2
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
import { execFile } from "node:child_process";
|
|
2
|
+
import { promisify } from "node:util";
|
|
3
|
+
import { redactRepoUrl } from "#git/redact";
|
|
4
|
+
const execFileAsync = promisify(execFile);
|
|
5
|
+
const DEFAULT_TIMEOUT_MS = 3e4;
|
|
6
|
+
const BLOCKED_PROTOCOLS = /* @__PURE__ */ new Set(["file:", "ftp:", "data:", "javascript:"]);
|
|
7
|
+
const assertAllowedProtocol = (repo) => {
|
|
8
|
+
try {
|
|
9
|
+
const url = new URL(repo);
|
|
10
|
+
if (BLOCKED_PROTOCOLS.has(url.protocol)) {
|
|
11
|
+
throw new Error(
|
|
12
|
+
`Blocked protocol '${url.protocol}' in repo URL '${redactRepoUrl(repo)}'.`
|
|
13
|
+
);
|
|
14
|
+
}
|
|
15
|
+
} catch (error) {
|
|
16
|
+
if (error instanceof TypeError) {
|
|
17
|
+
return;
|
|
18
|
+
}
|
|
19
|
+
throw error;
|
|
20
|
+
}
|
|
21
|
+
};
|
|
22
|
+
const parseRepoHost = (repo) => {
|
|
23
|
+
assertAllowedProtocol(repo);
|
|
24
|
+
const scpMatch = repo.match(/^[^@]+@([^:]+):/);
|
|
25
|
+
if (scpMatch) {
|
|
26
|
+
return scpMatch[1] || null;
|
|
27
|
+
}
|
|
28
|
+
try {
|
|
29
|
+
const url = new URL(repo);
|
|
30
|
+
if (url.protocol !== "https:" && url.protocol !== "ssh:") {
|
|
31
|
+
return null;
|
|
32
|
+
}
|
|
33
|
+
return url.hostname || null;
|
|
34
|
+
} catch {
|
|
35
|
+
return null;
|
|
36
|
+
}
|
|
37
|
+
};
|
|
38
|
+
export const enforceHostAllowlist = (repo, allowHosts) => {
|
|
39
|
+
const host = parseRepoHost(repo);
|
|
40
|
+
if (!host) {
|
|
41
|
+
throw new Error(
|
|
42
|
+
`Unsupported repo URL '${redactRepoUrl(repo)}'. Use HTTPS or SSH.`
|
|
43
|
+
);
|
|
44
|
+
}
|
|
45
|
+
const normalizedHost = host.toLowerCase();
|
|
46
|
+
const allowed = allowHosts.map((entry) => entry.toLowerCase());
|
|
47
|
+
const isAllowed = allowed.some(
|
|
48
|
+
(entry) => normalizedHost === entry || normalizedHost.endsWith(`.${entry}`)
|
|
49
|
+
);
|
|
50
|
+
if (!isAllowed) {
|
|
51
|
+
throw new Error(
|
|
52
|
+
`Host '${host}' is not in allowHosts for '${redactRepoUrl(repo)}'.`
|
|
53
|
+
);
|
|
54
|
+
}
|
|
55
|
+
};
|
|
56
|
+
export const parseLsRemote = (stdout) => {
|
|
57
|
+
const lines = stdout.trim().split("\n").filter(Boolean);
|
|
58
|
+
if (lines.length === 0) {
|
|
59
|
+
return null;
|
|
60
|
+
}
|
|
61
|
+
const first = lines[0].split(/\s+/)[0];
|
|
62
|
+
return first || null;
|
|
63
|
+
};
|
|
64
|
+
export const resolveRemoteCommit = async (params) => {
|
|
65
|
+
enforceHostAllowlist(params.repo, params.allowHosts);
|
|
66
|
+
const repoLabel = redactRepoUrl(params.repo);
|
|
67
|
+
params.logger?.(`git ls-remote ${repoLabel} ${params.ref}`);
|
|
68
|
+
const { stdout } = await execFileAsync(
|
|
69
|
+
"git",
|
|
70
|
+
["ls-remote", params.repo, params.ref],
|
|
71
|
+
{
|
|
72
|
+
timeout: params.timeoutMs ?? DEFAULT_TIMEOUT_MS,
|
|
73
|
+
maxBuffer: 1024 * 1024
|
|
74
|
+
}
|
|
75
|
+
);
|
|
76
|
+
const resolvedCommit = parseLsRemote(stdout);
|
|
77
|
+
if (!resolvedCommit) {
|
|
78
|
+
throw new Error(
|
|
79
|
+
`Unable to resolve ref '${params.ref}' for ${redactRepoUrl(params.repo)}.`
|
|
80
|
+
);
|
|
81
|
+
}
|
|
82
|
+
return {
|
|
83
|
+
repo: params.repo,
|
|
84
|
+
ref: params.ref,
|
|
85
|
+
resolvedCommit
|
|
86
|
+
};
|
|
87
|
+
};
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
export const resolveRepoInput = (repo) => {
|
|
2
|
+
const trimmed = repo.trim();
|
|
3
|
+
const sshMatch = trimmed.match(/^git@([^:]+):(.+)$/);
|
|
4
|
+
if (sshMatch) {
|
|
5
|
+
const host = sshMatch[1];
|
|
6
|
+
const rawPath = sshMatch[2];
|
|
7
|
+
const [pathPart, rawRef] = rawPath.split("#", 2);
|
|
8
|
+
const sanitizedPath = pathPart.replace(/^\//, "");
|
|
9
|
+
const inferredId = sanitizedPath.split("/").filter(Boolean).pop()?.replace(/\.git$/i, "");
|
|
10
|
+
const repoUrl = `git@${host}:${sanitizedPath}`;
|
|
11
|
+
const ref = rawRef?.trim() || void 0;
|
|
12
|
+
return { repoUrl, ref, inferredId };
|
|
13
|
+
}
|
|
14
|
+
const plainMatch = trimmed.match(/^([^\s/:]+)\/([^\s#]+)(?:#(.+))?$/);
|
|
15
|
+
if (plainMatch) {
|
|
16
|
+
const [, owner, name, rawRef] = plainMatch;
|
|
17
|
+
const sanitizedPath = `${owner}/${name}`.replace(/\.git$/i, "");
|
|
18
|
+
const repoUrl = `https://github.com/${sanitizedPath}.git`;
|
|
19
|
+
return {
|
|
20
|
+
repoUrl,
|
|
21
|
+
ref: rawRef?.trim() || void 0,
|
|
22
|
+
inferredId: name.replace(/\.git$/i, "")
|
|
23
|
+
};
|
|
24
|
+
}
|
|
25
|
+
const shortcutMatch = trimmed.match(/^(github|gitlab):(.+)$/i);
|
|
26
|
+
if (shortcutMatch) {
|
|
27
|
+
const provider = shortcutMatch[1].toLowerCase();
|
|
28
|
+
const rawPath = shortcutMatch[2];
|
|
29
|
+
const [pathPart, rawRef] = rawPath.split("#", 2);
|
|
30
|
+
const sanitizedPath = pathPart.replace(/^\//, "");
|
|
31
|
+
const inferredId = sanitizedPath.split("/").filter(Boolean).pop()?.replace(/\.git$/i, "");
|
|
32
|
+
const host = provider === "gitlab" ? "gitlab.com" : "github.com";
|
|
33
|
+
const suffix = sanitizedPath.endsWith(".git") ? "" : ".git";
|
|
34
|
+
const repoUrl = `https://${host}/${sanitizedPath}${suffix}`;
|
|
35
|
+
const ref = rawRef?.trim() || void 0;
|
|
36
|
+
return { repoUrl, ref, inferredId };
|
|
37
|
+
}
|
|
38
|
+
try {
|
|
39
|
+
const url = new URL(trimmed);
|
|
40
|
+
if (url.protocol === "https:" || url.protocol === "ssh:") {
|
|
41
|
+
const parts = url.pathname.split("/").filter(Boolean);
|
|
42
|
+
const inferredId = parts.pop()?.replace(/\.git$/i, "");
|
|
43
|
+
return {
|
|
44
|
+
repoUrl: trimmed,
|
|
45
|
+
ref: void 0,
|
|
46
|
+
inferredId
|
|
47
|
+
};
|
|
48
|
+
}
|
|
49
|
+
} catch {
|
|
50
|
+
}
|
|
51
|
+
return { repoUrl: trimmed, ref: void 0, inferredId: void 0 };
|
|
52
|
+
};
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
export declare const getGitignoreStatus: (rootDir: string, cacheDir: string) => Promise<{
|
|
2
|
+
gitignorePath: any;
|
|
3
|
+
entry: null;
|
|
4
|
+
hasEntry: boolean;
|
|
5
|
+
} | {
|
|
6
|
+
gitignorePath: any;
|
|
7
|
+
entry: string;
|
|
8
|
+
hasEntry: boolean;
|
|
9
|
+
}>;
|
|
10
|
+
export declare const ensureGitignoreEntry: (rootDir: string, cacheDir: string) => Promise<{
|
|
11
|
+
updated: boolean;
|
|
12
|
+
gitignorePath: any;
|
|
13
|
+
entry: null;
|
|
14
|
+
} | {
|
|
15
|
+
updated: boolean;
|
|
16
|
+
gitignorePath: any;
|
|
17
|
+
entry: string;
|
|
18
|
+
}>;
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import { access, readFile, writeFile } from "node:fs/promises";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { toPosixPath } from "#core/paths";
|
|
4
|
+
const exists = async (target) => {
|
|
5
|
+
try {
|
|
6
|
+
await access(target);
|
|
7
|
+
return true;
|
|
8
|
+
} catch {
|
|
9
|
+
return false;
|
|
10
|
+
}
|
|
11
|
+
};
|
|
12
|
+
const normalizeEntry = (value) => {
|
|
13
|
+
const trimmed = value.trim();
|
|
14
|
+
if (!trimmed || trimmed.startsWith("#") || trimmed.startsWith("!")) {
|
|
15
|
+
return "";
|
|
16
|
+
}
|
|
17
|
+
let normalized = trimmed.replace(/^\//, "");
|
|
18
|
+
normalized = normalized.replace(/^\.\//, "");
|
|
19
|
+
normalized = normalized.replace(/\/+$/, "");
|
|
20
|
+
return toPosixPath(normalized);
|
|
21
|
+
};
|
|
22
|
+
const resolveGitignoreEntry = (rootDir, cacheDir) => {
|
|
23
|
+
const resolved = path.isAbsolute(cacheDir) ? path.resolve(cacheDir) : path.resolve(rootDir, cacheDir);
|
|
24
|
+
const relative = path.relative(rootDir, resolved);
|
|
25
|
+
const isOutside = relative === ".." || relative.startsWith(`..${path.sep}`) || path.isAbsolute(relative);
|
|
26
|
+
if (isOutside) {
|
|
27
|
+
return null;
|
|
28
|
+
}
|
|
29
|
+
return relative.length === 0 ? "." : relative;
|
|
30
|
+
};
|
|
31
|
+
export const getGitignoreStatus = async (rootDir, cacheDir) => {
|
|
32
|
+
const gitignorePath = path.resolve(rootDir, ".gitignore");
|
|
33
|
+
const entry = resolveGitignoreEntry(rootDir, cacheDir);
|
|
34
|
+
if (!entry) {
|
|
35
|
+
return { gitignorePath, entry: null, hasEntry: false };
|
|
36
|
+
}
|
|
37
|
+
const normalizedEntry = normalizeEntry(entry);
|
|
38
|
+
if (!normalizedEntry) {
|
|
39
|
+
return { gitignorePath, entry: null, hasEntry: false };
|
|
40
|
+
}
|
|
41
|
+
let contents = "";
|
|
42
|
+
if (await exists(gitignorePath)) {
|
|
43
|
+
contents = await readFile(gitignorePath, "utf8");
|
|
44
|
+
}
|
|
45
|
+
const lines = contents.split(/\r?\n/);
|
|
46
|
+
const existing = new Set(
|
|
47
|
+
lines.map((line) => normalizeEntry(line)).filter(Boolean)
|
|
48
|
+
);
|
|
49
|
+
return {
|
|
50
|
+
gitignorePath,
|
|
51
|
+
entry: `${normalizedEntry}/`,
|
|
52
|
+
hasEntry: existing.has(normalizedEntry)
|
|
53
|
+
};
|
|
54
|
+
};
|
|
55
|
+
export const ensureGitignoreEntry = async (rootDir, cacheDir) => {
|
|
56
|
+
const status = await getGitignoreStatus(rootDir, cacheDir);
|
|
57
|
+
if (!status.entry) {
|
|
58
|
+
return { updated: false, gitignorePath: status.gitignorePath, entry: null };
|
|
59
|
+
}
|
|
60
|
+
if (status.hasEntry) {
|
|
61
|
+
return {
|
|
62
|
+
updated: false,
|
|
63
|
+
gitignorePath: status.gitignorePath,
|
|
64
|
+
entry: status.entry
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
let contents = "";
|
|
68
|
+
if (await exists(status.gitignorePath)) {
|
|
69
|
+
contents = await readFile(status.gitignorePath, "utf8");
|
|
70
|
+
}
|
|
71
|
+
const prefix = contents.length === 0 || contents.endsWith("\n") ? "" : "\n";
|
|
72
|
+
const next = `${contents}${prefix}${status.entry}
|
|
73
|
+
`;
|
|
74
|
+
await writeFile(status.gitignorePath, next, "utf8");
|
|
75
|
+
return {
|
|
76
|
+
updated: true,
|
|
77
|
+
gitignorePath: status.gitignorePath,
|
|
78
|
+
entry: status.entry
|
|
79
|
+
};
|
|
80
|
+
};
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
export declare const DEFAULT_TOC_FILENAME = "TOC.md";
|
|
2
|
+
export declare const toPosixPath: (value: string) => string;
|
|
3
|
+
export declare const resolveTargetDir: (configPath: string, targetDir: string) => any;
|
|
4
|
+
export declare const resolveCacheDir: (configPath: string, cacheDir: string, overrideCacheDir?: string) => any;
|
|
5
|
+
export declare const getCacheLayout: (cacheDir: string, sourceId: string) => {
|
|
6
|
+
cacheDir: string;
|
|
7
|
+
sourceDir: any;
|
|
8
|
+
};
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
export const DEFAULT_TOC_FILENAME = "TOC.md";
|
|
3
|
+
export const toPosixPath = (value) => value.replace(/\\/g, "/");
|
|
4
|
+
export const resolveTargetDir = (configPath, targetDir) => {
|
|
5
|
+
const configDir = path.dirname(path.resolve(configPath));
|
|
6
|
+
const resolved = path.resolve(configDir, targetDir);
|
|
7
|
+
const relative = path.relative(configDir, resolved);
|
|
8
|
+
const isOutside = relative === ".." || relative.startsWith(`..${path.sep}`) || path.isAbsolute(relative);
|
|
9
|
+
if (isOutside) {
|
|
10
|
+
throw new Error(
|
|
11
|
+
`targetDir '${targetDir}' escapes project directory. Must be within ${configDir}.`
|
|
12
|
+
);
|
|
13
|
+
}
|
|
14
|
+
const segments = toPosixPath(relative).split("/").filter(Boolean);
|
|
15
|
+
if (segments.includes(".git")) {
|
|
16
|
+
throw new Error("targetDir cannot be within .git directory.");
|
|
17
|
+
}
|
|
18
|
+
return resolved;
|
|
19
|
+
};
|
|
20
|
+
export const resolveCacheDir = (configPath, cacheDir, overrideCacheDir) => {
|
|
21
|
+
if (overrideCacheDir) {
|
|
22
|
+
return path.resolve(overrideCacheDir);
|
|
23
|
+
}
|
|
24
|
+
const configDir = path.dirname(configPath);
|
|
25
|
+
return path.resolve(configDir, cacheDir);
|
|
26
|
+
};
|
|
27
|
+
export const getCacheLayout = (cacheDir, sourceId) => {
|
|
28
|
+
const _reposDir = path.join(cacheDir, "repos");
|
|
29
|
+
const sourceDir = path.join(cacheDir, sourceId);
|
|
30
|
+
return {
|
|
31
|
+
cacheDir,
|
|
32
|
+
sourceDir
|
|
33
|
+
};
|
|
34
|
+
};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare const assertSafeSourceId: (value: unknown, label: string) => string;
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
const SAFE_ID_PATTERN = /^[a-zA-Z0-9_-]+$/;
|
|
2
|
+
const MAX_ID_LENGTH = 200;
|
|
3
|
+
const RESERVED_NAMES = /* @__PURE__ */ new Set([
|
|
4
|
+
".",
|
|
5
|
+
"..",
|
|
6
|
+
"CON",
|
|
7
|
+
"PRN",
|
|
8
|
+
"AUX",
|
|
9
|
+
"NUL",
|
|
10
|
+
"COM1",
|
|
11
|
+
"LPT1"
|
|
12
|
+
]);
|
|
13
|
+
export const assertSafeSourceId = (value, label) => {
|
|
14
|
+
if (typeof value !== "string" || value.length === 0) {
|
|
15
|
+
throw new Error(`${label} must be a non-empty string.`);
|
|
16
|
+
}
|
|
17
|
+
if (value.length > MAX_ID_LENGTH) {
|
|
18
|
+
throw new Error(`${label} exceeds maximum length of ${MAX_ID_LENGTH}.`);
|
|
19
|
+
}
|
|
20
|
+
if (!SAFE_ID_PATTERN.test(value)) {
|
|
21
|
+
throw new Error(
|
|
22
|
+
`${label} must contain only alphanumeric characters, hyphens, and underscores.`
|
|
23
|
+
);
|
|
24
|
+
}
|
|
25
|
+
if (RESERVED_NAMES.has(value.toUpperCase())) {
|
|
26
|
+
throw new Error(`${label} uses reserved name '${value}'.`);
|
|
27
|
+
}
|
|
28
|
+
return value;
|
|
29
|
+
};
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
export type SyncOptions = {
|
|
2
|
+
configPath?: string;
|
|
3
|
+
cacheDirOverride?: string;
|
|
4
|
+
json: boolean;
|
|
5
|
+
lockOnly: boolean;
|
|
6
|
+
offline: boolean;
|
|
7
|
+
failOnMiss: boolean;
|
|
8
|
+
verbose?: boolean;
|
|
9
|
+
concurrency?: number;
|
|
10
|
+
sourceFilter?: string[];
|
|
11
|
+
timeoutMs?: number;
|
|
12
|
+
};
|
|
13
|
+
export type SyncResult = {
|
|
14
|
+
id: string;
|
|
15
|
+
repo: string;
|
|
16
|
+
ref: string;
|
|
17
|
+
resolvedCommit: string;
|
|
18
|
+
lockCommit: string | null;
|
|
19
|
+
lockRulesSha256?: string;
|
|
20
|
+
status: "up-to-date" | "changed" | "missing";
|
|
21
|
+
bytes?: number;
|
|
22
|
+
fileCount?: number;
|
|
23
|
+
manifestSha256?: string;
|
|
24
|
+
rulesSha256?: string;
|
|
25
|
+
};
|
|
File without changes
|
package/package.json
CHANGED
|
@@ -1,92 +1,139 @@
|
|
|
1
1
|
{
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
}
|
|
2
|
+
"name": "docs-cache",
|
|
3
|
+
"private": false,
|
|
4
|
+
"type": "module",
|
|
5
|
+
"version": "0.5.1",
|
|
6
|
+
"packageManager": "pnpm@10.14.0+sha512.ad27a79641b49c3e481a16a805baa71817a04bbe06a38d17e60e2eaee83f6a146c6a688125f5792e48dd5ba30e7da52a5cda4c3992b9ccf333f9ce223af84748",
|
|
7
|
+
"description": "CLI for deterministic local caching of external documentation for agents and tools",
|
|
8
|
+
"author": "Frederik Bosch",
|
|
9
|
+
"license": "MIT",
|
|
10
|
+
"homepage": "https://github.com/fbosch/docs-cache#readme",
|
|
11
|
+
"repository": {
|
|
12
|
+
"type": "git",
|
|
13
|
+
"url": "https://github.com/fbosch/docs-cache.git"
|
|
14
|
+
},
|
|
15
|
+
"bugs": {
|
|
16
|
+
"url": "https://github.com/fbosch/docs-cache/issues"
|
|
17
|
+
},
|
|
18
|
+
"keywords": [
|
|
19
|
+
"docs",
|
|
20
|
+
"documentation",
|
|
21
|
+
"cache",
|
|
22
|
+
"agent",
|
|
23
|
+
"ai",
|
|
24
|
+
"git",
|
|
25
|
+
"cli"
|
|
26
|
+
],
|
|
27
|
+
"sideEffects": false,
|
|
28
|
+
"engines": {
|
|
29
|
+
"node": ">=18"
|
|
30
|
+
},
|
|
31
|
+
"bin": {
|
|
32
|
+
"docs-cache": "./bin/docs-cache.mjs"
|
|
33
|
+
},
|
|
34
|
+
"files": [
|
|
35
|
+
"bin",
|
|
36
|
+
"dist/cli.mjs",
|
|
37
|
+
"dist/esm/**/*.mjs",
|
|
38
|
+
"dist/esm/**/*.d.ts",
|
|
39
|
+
"dist/lock.mjs",
|
|
40
|
+
"dist/shared/*.mjs",
|
|
41
|
+
"README.md",
|
|
42
|
+
"LICENSE"
|
|
43
|
+
],
|
|
44
|
+
"scripts": {
|
|
45
|
+
"build": "unbuild",
|
|
46
|
+
"dev": "unbuild --stub",
|
|
47
|
+
"lint": "biome check .",
|
|
48
|
+
"prepublishOnly": "pnpm audit --audit-level=high && pnpm build && pnpm size && pnpm schema:build",
|
|
49
|
+
"release": "pnpm run lint && pnpm run typecheck && bumpp && pnpm publish --access public",
|
|
50
|
+
"test": "pnpm build && node --test",
|
|
51
|
+
"test:coverage": "pnpm build && c8 --include dist --exclude bin --reporter=text node --test",
|
|
52
|
+
"bench": "pnpm build && node scripts/benchmarks/run.mjs",
|
|
53
|
+
"complexity": "node scripts/complexity/run.mjs",
|
|
54
|
+
"schema:build": "node scripts/generate-schema.mjs",
|
|
55
|
+
"size": "size-limit",
|
|
56
|
+
"test:watch": "node --test --watch",
|
|
57
|
+
"typecheck": "tsc --noEmit",
|
|
58
|
+
"prepare": "simple-git-hooks"
|
|
59
|
+
},
|
|
60
|
+
"imports": {
|
|
61
|
+
"#cache/*": {
|
|
62
|
+
"types": "./dist/esm/cache/*.d.ts",
|
|
63
|
+
"default": "./dist/esm/cache/*.mjs"
|
|
64
|
+
},
|
|
65
|
+
"#cli/*": {
|
|
66
|
+
"types": "./dist/esm/cli/*.d.ts",
|
|
67
|
+
"default": "./dist/esm/cli/*.mjs"
|
|
68
|
+
},
|
|
69
|
+
"#commands/*": {
|
|
70
|
+
"types": "./dist/esm/commands/*.d.ts",
|
|
71
|
+
"default": "./dist/esm/commands/*.mjs"
|
|
72
|
+
},
|
|
73
|
+
"#core/*": {
|
|
74
|
+
"types": "./dist/esm/*.d.ts",
|
|
75
|
+
"default": "./dist/esm/*.mjs"
|
|
76
|
+
},
|
|
77
|
+
"#config": {
|
|
78
|
+
"types": "./dist/esm/config/index.d.ts",
|
|
79
|
+
"default": "./dist/esm/config/index.mjs"
|
|
80
|
+
},
|
|
81
|
+
"#config/*": {
|
|
82
|
+
"types": "./dist/esm/config/*.d.ts",
|
|
83
|
+
"default": "./dist/esm/config/*.mjs"
|
|
84
|
+
},
|
|
85
|
+
"#git/*": {
|
|
86
|
+
"types": "./dist/esm/git/*.d.ts",
|
|
87
|
+
"default": "./dist/esm/git/*.mjs"
|
|
88
|
+
},
|
|
89
|
+
"#types/*": {
|
|
90
|
+
"types": "./dist/esm/types/*.d.ts",
|
|
91
|
+
"default": "./dist/esm/types/*.mjs"
|
|
92
|
+
}
|
|
93
|
+
},
|
|
94
|
+
"dependencies": {
|
|
95
|
+
"@clack/prompts": "^1.0.0",
|
|
96
|
+
"cac": "^6.7.14",
|
|
97
|
+
"cli-truncate": "^4.0.0",
|
|
98
|
+
"execa": "^9.6.1",
|
|
99
|
+
"fast-glob": "^3.3.2",
|
|
100
|
+
"log-update": "^7.0.2",
|
|
101
|
+
"picocolors": "^1.1.1",
|
|
102
|
+
"picomatch": "^4.0.3",
|
|
103
|
+
"zod": "^4.3.6"
|
|
104
|
+
},
|
|
105
|
+
"devDependencies": {
|
|
106
|
+
"@biomejs/biome": "^2.3.14",
|
|
107
|
+
"@size-limit/file": "^12.0.0",
|
|
108
|
+
"@types/node": "^25.2.0",
|
|
109
|
+
"bumpp": "^10.3.2",
|
|
110
|
+
"c8": "^10.1.3",
|
|
111
|
+
"jiti": "^2.5.1",
|
|
112
|
+
"lint-staged": "^16.2.7",
|
|
113
|
+
"simple-git-hooks": "^2.13.1",
|
|
114
|
+
"size-limit": "^12.0.0",
|
|
115
|
+
"tinybench": "^6.0.0",
|
|
116
|
+
"ts-complex": "^1.0.0",
|
|
117
|
+
"typescript": "^5.9.3",
|
|
118
|
+
"unbuild": "^3.6.1"
|
|
119
|
+
},
|
|
120
|
+
"size-limit": [
|
|
121
|
+
{
|
|
122
|
+
"path": "dist/cli.mjs",
|
|
123
|
+
"limit": "10 kB"
|
|
124
|
+
}
|
|
125
|
+
],
|
|
126
|
+
"complexity": {
|
|
127
|
+
"maxCyclomatic": 20,
|
|
128
|
+
"minMaintainability": 60,
|
|
129
|
+
"top": 10
|
|
130
|
+
},
|
|
131
|
+
"simple-git-hooks": {
|
|
132
|
+
"pre-commit": "pnpm lint-staged && pnpm typecheck"
|
|
133
|
+
},
|
|
134
|
+
"lint-staged": {
|
|
135
|
+
"*.{js,ts,cjs,mjs,d.cts,d.mts,jsx,tsx,json,jsonc}": [
|
|
136
|
+
"biome check --write --no-errors-on-unmatched"
|
|
137
|
+
]
|
|
138
|
+
}
|
|
139
|
+
}
|
package/dist/chunks/add.mjs
DELETED
|
@@ -1,3 +0,0 @@
|
|
|
1
|
-
import{readFile as D,writeFile as k,access as E}from"node:fs/promises";import l from"node:path";import{v,D as P,a as A,s as F,w as I,b as U,r as S}from"../shared/docs-cache.CaOcl4OS.mjs";import{e as N}from"../shared/docs-cache.BOr9BnyP.mjs";import{r as O}from"../shared/docs-cache.CQiaFDb_.mjs";import{r as x}from"../shared/docs-cache.BSvQNKuf.mjs";import"zod";import"node:process";import"cac";import"picocolors";const u=async o=>{try{return await E(o),!0}catch{return!1}},y="package.json",C=async o=>{const i=await D(o,"utf8"),e=JSON.parse(i),a=e["docs-cache"];return a?{parsed:e,config:v(a)}:{parsed:e,config:null}},J=async o=>{if(o){const a=S(o);return{resolvedPath:a,mode:l.basename(a)===y?"package":"config"}}const i=S();if(await u(i))return{resolvedPath:i,mode:"config"};const e=l.resolve(process.cwd(),y);return await u(e)&&(await C(e)).config?{resolvedPath:e,mode:"package"}:{resolvedPath:i,mode:"config"}},T=async o=>{const i=await J(o.configPath),e=i.resolvedPath;let a=P,t=null,g=null,f=!1;if(await u(e))if(i.mode==="package"){const r=await C(e);g=r.parsed,t=r.config,a=t??P,f=!!t}else{const r=await D(e,"utf8");t=JSON.parse(r.toString()),a=v(t),f=!0}const b="https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json",p=new Set(a.sources.map(r=>r.id)),m=[],d=o.entries.map(r=>{const n=x(r.repo),w=r.id||n.inferredId;if(!w)throw new Error("Unable to infer id. Provide an explicit id.");const c=A(w,"source id");return p.has(c)?(m.push(c),null):(p.add(c),r.targetDir&&O(e,r.targetDir),{id:c,repo:n.repoUrl,...r.targetDir?{targetDir:r.targetDir}:{},...n.ref?{ref:n.ref}:{}})}).filter(Boolean);if(d.length===0)throw new Error("All sources already exist in config.");const s={$schema:b,sources:[...a.sources,...d]};if(t?.cacheDir&&(s.cacheDir=t.cacheDir),t?.defaults&&(s.defaults=t.defaults),i.mode==="package"){const r=g??{};r["docs-cache"]=F(s),await k(e,`${JSON.stringify(r,null,2)}
|
|
2
|
-
`,"utf8")}else await I(e,s);const h=f?null:await N(l.dirname(e),t?.cacheDir??U);return{configPath:e,sources:d,skipped:m,created:!0,gitignoreUpdated:h?.updated??!1,gitignorePath:h?.gitignorePath??null}};export{T as addSources};
|
|
3
|
-
//# sourceMappingURL=add.mjs.map
|
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
import{rm as n,readdir as o,stat as m}from"node:fs/promises";import u from"node:path";import{r as f,e as w}from"../shared/docs-cache.kK1DPQIQ.mjs";import"node:os";const c=async t=>{try{const e=await o(t,{withFileTypes:!0});let r=0;for(const a of e){const i=u.join(t,a.name);if(a.isDirectory())r+=await c(i);else{const s=await m(i);r+=s.size}}return r}catch{return 0}},h=async t=>{try{return(await o(t)).length}catch{return 0}},p=async()=>{const t=f();if(!await w(t))return{removed:!1,cacheDir:t};const e=await h(t),r=await c(t);return await n(t,{recursive:!0,force:!0}),{removed:!0,cacheDir:t,repoCount:e,bytesFreed:r}};export{p as cleanGitCache};
|
|
2
|
-
//# sourceMappingURL=clean-git-cache.mjs.map
|
package/dist/chunks/clean.mjs
DELETED
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
import{rm as o,access as i}from"node:fs/promises";import{l as s,b as m}from"../shared/docs-cache.CaOcl4OS.mjs";import{c as n}from"../shared/docs-cache.CQiaFDb_.mjs";import"node:path";import"zod";import"node:process";import"cac";import"picocolors";const f=async r=>{try{return await i(r),!0}catch{return!1}},p=async r=>{const{config:t,resolvedPath:c}=await s(r.configPath),a=n(c,t.cacheDir??m,r.cacheDirOverride),e=await f(a);return e&&await o(a,{recursive:!0,force:!0}),{cacheDir:a,removed:e}};export{p as cleanCache};
|
|
2
|
-
//# sourceMappingURL=clean.mjs.map
|
package/dist/chunks/init.mjs
DELETED
|
@@ -1,3 +0,0 @@
|
|
|
1
|
-
import{readFile as C,writeFile as N,access as S}from"node:fs/promises";import c from"node:path";import{confirm as V,isCancel as G,select as J,text as U}from"@clack/prompts";import{c as _,b as r,s as L,w as T}from"../shared/docs-cache.CaOcl4OS.mjs";import{g as H,e as k}from"../shared/docs-cache.BOr9BnyP.mjs";import"zod";import"../shared/docs-cache.CQiaFDb_.mjs";import"node:process";import"cac";import"picocolors";const h=async n=>{try{return await S(n),!0}catch{return!1}},M=async(n,s={})=>{const y=s.confirm??V,l=s.isCancel??G,x=s.select??J,F=s.text??U,f=n.cwd??process.cwd(),d=c.resolve(f,_),i=c.resolve(f,"package.json"),g=[];if(await h(d)&&g.push(d),await h(i)){const e=await C(i,"utf8");JSON.parse(e)["docs-cache"]&&g.push(i)}if(g.length>0)throw new Error(`Config already exists at ${g.join(", ")}. Init aborted.`);let b=!1;if(await h(i)){const e=await C(i,"utf8");if(!JSON.parse(e)["docs-cache"]){const o=await x({message:"Config location",options:[{value:"config",label:"docs.config.json"},{value:"package",label:"package.json"}],initialValue:"config"});if(l(o))throw new Error("Init cancelled.");b=o==="package"}}const $=b?i:d,v=n.cacheDirOverride??r,u=await F({message:"Cache directory",initialValue:v});if(l(u))throw new Error("Init cancelled.");const A=u||r,D=await y({message:"Generate TOC.md (table of contents with links to all documentation)",initialValue:!0});if(l(D))throw new Error("Init cancelled.");const I=await H(f,A);let P=!1;if(I.entry&&!I.hasEntry){const e=await y({message:"Add cache directory to .gitignore",initialValue:!0});if(l(e))throw new Error("Init cancelled.");P=e}const a={configPath:$,cacheDir:u,toc:D,gitignore:P},t=c.resolve(f,a.configPath);if(c.basename(t)==="package.json"){const e=await C(t,"utf8"),o=JSON.parse(e);if(o["docs-cache"])throw new Error(`docs-cache config already exists in ${t}.`);const p={$schema:"https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json",sources:[]},E=a.cacheDir||r;E!==r&&(p.cacheDir=E),a.toc||(p.defaults={toc:!1}),o["docs-cache"]=L(p),await N(t,`${JSON.stringify(o,null,2)}
|
|
2
|
-
`,"utf8");const O=a.gitignore?await k(c.dirname(t),E):null;return{configPath:t,created:!0,gitignoreUpdated:O?.updated??!1,gitignorePath:O?.gitignorePath??null}}if(await h(t))throw new Error(`Config already exists at ${t}.`);const w={$schema:"https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json",sources:[]},m=a.cacheDir||r;m!==r&&(w.cacheDir=m),a.toc||(w.defaults={toc:!1}),await T(t,w);const j=a.gitignore?await k(c.dirname(t),m):null;return{configPath:t,created:!0,gitignoreUpdated:j?.updated??!1,gitignorePath:j?.gitignorePath??null}};export{M as initConfig};
|
|
3
|
-
//# sourceMappingURL=init.mjs.map
|
package/dist/chunks/prune.mjs
DELETED
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
import{readdir as p,rm as f,access as h}from"node:fs/promises";import u from"node:path";import{l as d,b as D}from"../shared/docs-cache.CaOcl4OS.mjs";import{c as v}from"../shared/docs-cache.CQiaFDb_.mjs";import"zod";import"node:process";import"cac";import"picocolors";const w=async t=>{try{return await h(t),!0}catch{return!1}},l=async t=>{const{config:c,resolvedPath:s,sources:a}=await d(t.configPath),e=v(s,c.cacheDir??D,t.cacheDirOverride);if(!await w(e))return{cacheDir:e,removed:[],kept:a.map(r=>r.id)};const n=new Set(a.map(r=>r.id)),m=await p(e,{withFileTypes:!0}),o=[];for(const r of m){if(!r.isDirectory())continue;const i=r.name;n.has(i)||i.startsWith(".tmp-")||(await f(u.join(e,i),{recursive:!0,force:!0}),o.push(i))}return{cacheDir:e,removed:o,kept:a.map(r=>r.id)}};export{l as pruneCache};
|
|
2
|
-
//# sourceMappingURL=prune.mjs.map
|
package/dist/chunks/remove.mjs
DELETED
|
@@ -1,3 +0,0 @@
|
|
|
1
|
-
import{readFile as w,writeFile as N,rm as $,access as k}from"node:fs/promises";import v from"node:path";import{v as D,D as E,s as F,w as I,r as P}from"../shared/docs-cache.CaOcl4OS.mjs";import{r as O}from"../shared/docs-cache.CQiaFDb_.mjs";import{r as U}from"../shared/docs-cache.BSvQNKuf.mjs";import"zod";import"node:process";import"cac";import"picocolors";const f=async a=>{try{return await k(a),!0}catch{return!1}},y="package.json",S=async a=>{const s=await w(a,"utf8"),o=JSON.parse(s),r=o["docs-cache"];return r?{parsed:o,config:D(r)}:{parsed:o,config:null}},b=async a=>{if(a){const r=P(a);return{resolvedPath:r,mode:v.basename(r)===y?"package":"config"}}const s=P();if(await f(s))return{resolvedPath:s,mode:"config"};const o=v.resolve(process.cwd(),y);return await f(o)&&(await S(o)).config?{resolvedPath:o,mode:"package"}:{resolvedPath:s,mode:"config"}},J=async a=>{if(a.ids.length===0)throw new Error("No sources specified to remove.");const s=await b(a.configPath),o=s.resolvedPath;let r=E,t=null,d=null;if(await f(o))if(s.mode==="package"){const e=await S(o);if(d=e.parsed,t=e.config,!t)throw new Error(`Missing docs-cache config in ${o}.`);r=t}else{const e=await w(o,"utf8");t=JSON.parse(e.toString()),r=D(t)}else throw new Error(`Config not found at ${o}.`);const u=new Map(r.sources.map(e=>[e.id,e])),g=new Map(r.sources.map(e=>[e.repo,e])),n=new Set,l=[];for(const e of a.ids){if(u.has(e)){n.add(e);continue}const i=U(e);if(i.repoUrl&&g.has(i.repoUrl)){const p=g.get(i.repoUrl);p&&n.add(p.id);continue}if(i.inferredId&&u.has(i.inferredId)){n.add(i.inferredId);continue}l.push(e)}const C=r.sources.filter(e=>!n.has(e.id)),h=r.sources.filter(e=>n.has(e.id)).map(e=>e.id),M=r.sources.filter(e=>n.has(e.id));if(h.length===0)throw new Error("No matching sources found to remove.");const c={$schema:t?.$schema??"https://raw.githubusercontent.com/fbosch/docs-cache/main/docs.config.schema.json",sources:C};if(t?.cacheDir&&(c.cacheDir=t.cacheDir),t?.defaults&&(c.defaults=t.defaults),t?.targetMode&&(c.targetMode=t.targetMode),s.mode==="package"){const e=d??{};e["docs-cache"]=F(c),await N(o,`${JSON.stringify(e,null,2)}
|
|
2
|
-
`,"utf8")}else await I(o,c);const m=[];for(const e of M){if(!e.targetDir)continue;const i=O(o,e.targetDir);await $(i,{recursive:!0,force:!0}),m.push({id:e.id,targetDir:i})}return{configPath:o,removed:h,missing:l,targetsRemoved:m}};export{J as removeSources};
|
|
3
|
-
//# sourceMappingURL=remove.mjs.map
|
package/dist/chunks/status.mjs
DELETED
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
import{access as y}from"node:fs/promises";import t from"picocolors";import{u as o,b as u,c as D,g as w}from"../shared/docs-cache.CQiaFDb_.mjs";import{l as C,b as L}from"../shared/docs-cache.CaOcl4OS.mjs";import{DEFAULT_LOCK_FILENAME as v,resolveLockPath as x,readLock as P}from"../lock.mjs";import"node:process";import"cac";import"node:path";import"zod";const h=async r=>{try{return await y(r),!0}catch{return!1}},A=async r=>{const{config:c,resolvedPath:a,sources:n}=await C(r.configPath),s=D(a,c.cacheDir??L,r.cacheDirOverride),l=await h(s),e=x(a),i=await h(e);let d=!1,f=null;if(i)try{f=await P(e),d=!0}catch{d=!1}const E=await Promise.all(n.map(async m=>{const p=w(s,m.id),g=await h(p.sourceDir),k=f?.sources?.[m.id]??null;return{id:m.id,docsPath:p.sourceDir,docsExists:g,lockEntry:k}}));return{configPath:a,cacheDir:s,cacheDirExists:l,lockPath:e,lockExists:i,lockValid:d,sources:E}},_=r=>{const c=o.path(r.cacheDir),a=r.cacheDirExists?t.green("present"):t.red("missing"),n=r.lockExists?r.lockValid?t.green("valid"):t.red("invalid"):t.yellow("missing");if(o.header("Cache",`${c} (${a})`),o.header("Lock",`${v} (${n})`),r.sources.length===0){o.line(),o.line(`${u.warn} No sources configured.`);return}o.line();for(const s of r.sources){const l=s.docsExists?u.success:u.error,e=s.lockEntry?t.green("locked"):t.yellow("new"),i=o.hash(s.lockEntry?.resolvedCommit);o.item(l,s.id.padEnd(20),`${e.padEnd(10)} ${i}`)}};export{A as getStatus,_ as printStatus};
|
|
2
|
-
//# sourceMappingURL=status.mjs.map
|