docs-cache 0.5.4 → 0.5.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +24 -15
- package/dist/cli.mjs +26 -13
- package/dist/esm/api.d.ts +2 -0
- package/dist/esm/api.mjs +2 -0
- package/dist/esm/cli/index.mjs +104 -1
- package/dist/esm/cli/parse-args.d.ts +1 -1
- package/dist/esm/cli/parse-args.mjs +40 -4
- package/dist/esm/cli/types.d.ts +12 -0
- package/dist/esm/commands/pin.d.ts +26 -0
- package/dist/esm/commands/pin.mjs +111 -0
- package/dist/esm/commands/sync.mjs +12 -0
- package/dist/esm/commands/update.d.ts +28 -0
- package/dist/esm/commands/update.mjs +51 -0
- package/dist/esm/git/fetch-source.mjs +45 -28
- package/dist/esm/git/git-env.d.ts +3 -0
- package/dist/esm/git/git-env.mjs +33 -0
- package/dist/esm/git/resolve-remote.mjs +6 -6
- package/dist/esm/types/sync.d.ts +1 -0
- package/package.json +135 -138
package/README.md
CHANGED
|
@@ -27,31 +27,38 @@ Documentation is cached in a gitignored location, exposed to agent and tool targ
|
|
|
27
27
|
# Initialize (optional)
|
|
28
28
|
npx docs-cache init
|
|
29
29
|
|
|
30
|
-
# Add
|
|
30
|
+
# Add source(s)
|
|
31
31
|
npx docs-cache add github:owner/repo#main
|
|
32
|
-
npx docs-cache add gitlab:framework/core
|
|
33
|
-
npx docs-cache add https://github.com/framework/core.git
|
|
34
|
-
npx docs-cache add framework/core framework/other-repo
|
|
35
32
|
|
|
36
|
-
# Sync
|
|
33
|
+
# Sync and lock
|
|
37
34
|
npx docs-cache sync
|
|
35
|
+
npx docs-cache sync --frozen
|
|
38
36
|
|
|
39
|
-
#
|
|
40
|
-
npx docs-cache
|
|
41
|
-
|
|
42
|
-
# Check Status
|
|
43
|
-
npx docs-cache status
|
|
37
|
+
# Refresh tracked refs (write lock/materialized output)
|
|
38
|
+
npx docs-cache update <source-id>
|
|
39
|
+
npx docs-cache update --all --dry-run
|
|
44
40
|
|
|
45
|
-
#
|
|
46
|
-
npx docs-cache
|
|
47
|
-
npx docs-cache remove framework/other-repo --prune
|
|
41
|
+
# Optional: pin config ref(s) to commit SHA
|
|
42
|
+
npx docs-cache pin <source-id>
|
|
48
43
|
|
|
49
|
-
#
|
|
44
|
+
# Inspect / maintain
|
|
45
|
+
npx docs-cache verify
|
|
46
|
+
npx docs-cache status
|
|
47
|
+
npx docs-cache remove <source-id>
|
|
50
48
|
npx docs-cache clean
|
|
51
49
|
```
|
|
52
50
|
|
|
53
51
|
> for more options: `npx docs-cache --help`
|
|
54
52
|
|
|
53
|
+
## Recommended Workflow
|
|
54
|
+
|
|
55
|
+
Use this flow to keep behavior predictable (similar to package manager manifest + lock workflows):
|
|
56
|
+
|
|
57
|
+
1. Keep source intent in config (`ref: "main"`, `ref: "v1"`, or a commit SHA).
|
|
58
|
+
2. Run `npx docs-cache update <id...>` (or `--all`) to refresh selected sources and lock data.
|
|
59
|
+
3. Use `npx docs-cache sync --frozen` in CI to fail fast when lock data drifts.
|
|
60
|
+
4. Use `npx docs-cache pin <id...>` only when you explicitly want to rewrite config refs to commit SHAs.
|
|
61
|
+
|
|
55
62
|
## Configuration
|
|
56
63
|
|
|
57
64
|
`docs.config.json` at project root (or a `docs-cache` field in `package.json`):
|
|
@@ -100,10 +107,12 @@ These fields can be set in `defaults` and are inherited by every source unless o
|
|
|
100
107
|
| `maxBytes` | Maximum total bytes to materialize. Default: `200000000` (200 MB). |
|
|
101
108
|
| `maxFiles` | Maximum total files to materialize. |
|
|
102
109
|
| `ignoreHidden` | Skip hidden files and directories (dotfiles). Default: `false`. |
|
|
103
|
-
| `allowHosts` | Allowed Git hosts. Default: `["github.com", "gitlab.com", "visualstudio.com"]`.
|
|
110
|
+
| `allowHosts` | Allowed Git hosts. Default: `["github.com", "gitlab.com", "visualstudio.com"]`. |
|
|
104
111
|
| `toc` | Generate per-source `TOC.md`. Default: `true`. Supports `true`, `false`, or a format: `"tree"` (human readable), `"compressed"` |
|
|
105
112
|
| `unwrapSingleRootDir` | If the materialized output is nested under a single directory, unwrap it (recursively). Default: `true`. |
|
|
106
113
|
|
|
114
|
+
> Brace expansion in `include` supports comma-separated lists (including multiple groups) like `**/*.{md,mdx}` and is capped at 500 expanded patterns per include entry. It does not support nested braces or numeric ranges.
|
|
115
|
+
|
|
107
116
|
### Source options
|
|
108
117
|
|
|
109
118
|
#### Required
|
package/dist/cli.mjs
CHANGED
|
@@ -1,9 +1,11 @@
|
|
|
1
|
-
import
|
|
2
|
-
Usage: ${
|
|
1
|
+
import l from"node:path";import a from"node:process";import d from"picocolors";import{ExitCode as g}from"#cli/exit-code";import{parseArgs as w}from"#cli/parse-args";import{setSilentMode as y,setVerboseMode as v,symbols as r,ui as o}from"#cli/ui";const u="docs-cache",j=`
|
|
2
|
+
Usage: ${u} <command> [options]
|
|
3
3
|
|
|
4
4
|
Commands:
|
|
5
5
|
add Add sources to the config (supports github:org/repo#ref)
|
|
6
6
|
remove Remove sources from the config and targets
|
|
7
|
+
pin Pin source refs to current commits
|
|
8
|
+
update Refresh selected sources and lock data
|
|
7
9
|
sync Synchronize cache with config
|
|
8
10
|
status Show cache status
|
|
9
11
|
clean Remove project cache
|
|
@@ -15,6 +17,7 @@ Commands:
|
|
|
15
17
|
Global options:
|
|
16
18
|
--config <path>
|
|
17
19
|
--cache-dir <path>
|
|
20
|
+
--frozen
|
|
18
21
|
--offline
|
|
19
22
|
--fail-on-miss
|
|
20
23
|
--lock-only
|
|
@@ -29,15 +32,25 @@ Add options:
|
|
|
29
32
|
--target <dir>
|
|
30
33
|
--target-dir <path>
|
|
31
34
|
--id <id>
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
`)
|
|
42
|
-
`);return}o.line(`${
|
|
35
|
+
|
|
36
|
+
Pin options:
|
|
37
|
+
--all
|
|
38
|
+
--dry-run
|
|
39
|
+
|
|
40
|
+
Update options:
|
|
41
|
+
--all
|
|
42
|
+
--dry-run
|
|
43
|
+
`,h=()=>{a.stdout.write(j.trimStart())},m=i=>{a.stderr.write(`${r.error} ${i}
|
|
44
|
+
`)},O=async i=>{const e=i.options,{addSources:s}=await import("#commands/add"),{runSync:t}=await import("#commands/sync");if(i.entries.length===0)throw new Error("Usage: docs-cache add [--source <repo> --target <dir>] <repo...>");const n=await s({configPath:e.config,entries:i.entries});if(e.offline?e.json||o.line(`${r.warn} Offline: skipped sync`):await t({configPath:e.config,cacheDirOverride:e.cacheDir,json:e.json,lockOnly:e.lockOnly,offline:e.offline,failOnMiss:e.failOnMiss,sourceFilter:n.sources.map(c=>c.id),timeoutMs:e.timeoutMs,verbose:e.verbose}),e.json){a.stdout.write(`${JSON.stringify(n,null,2)}
|
|
45
|
+
`);return}for(const c of n.sources){const f=c.repo.replace(/^https?:\/\//,"").replace(/\.git$/,""),$=c.targetDir?` ${d.dim("->")} ${d.magenta(c.targetDir)}`:"";o.item(r.success,c.id,`${d.blue(f)}${$}`)}n.skipped?.length&&o.line(`${r.warn} Skipped ${n.skipped.length} existing source${n.skipped.length===1?"":"s"}: ${n.skipped.join(", ")}`),o.line(`${r.info} Updated ${d.gray(l.relative(a.cwd(),n.configPath)||"docs.config.json")}`),n.gitignoreUpdated&&n.gitignorePath&&o.line(`${r.info} Updated ${d.gray(o.path(n.gitignorePath))}`)},S=async i=>{const e=i.options,{removeSources:s}=await import("#commands/remove"),{pruneCache:t}=await import("#commands/prune");if(i.ids.length===0)throw new Error("Usage: docs-cache remove <id...>");const n=await s({configPath:e.config,ids:i.ids});if(e.json){a.stdout.write(`${JSON.stringify(n,null,2)}
|
|
46
|
+
`);return}if(n.removed.length>0&&o.line(`${r.success} Removed ${n.removed.length} source${n.removed.length===1?"":"s"}: ${n.removed.join(", ")}`),n.missing.length>0&&o.line(`${r.warn} Missing ${n.missing.length} source${n.missing.length===1?"":"s"}: ${n.missing.join(", ")}`),n.targetsRemoved.length>0){const c=n.targetsRemoved.map(f=>`${f.id} -> ${o.path(f.targetDir)}`).join(", ");o.line(`${r.success} Removed ${n.targetsRemoved.length} target${n.targetsRemoved.length===1?"":"s"}: ${c}`)}o.line(`${r.info} Updated ${d.gray(l.relative(a.cwd(),n.configPath)||"docs.config.json")}`),e.prune&&await t({configPath:e.config,cacheDirOverride:e.cacheDir,json:e.json})},D=async i=>{const e=i.options;if(e.offline)throw new Error("Pin does not support --offline.");if(!e.all&&i.ids.length===0)throw new Error("Usage: docs-cache pin <id...> [--all]");const{pinSources:s}=await import("#commands/pin"),t=await s({configPath:e.config,ids:i.ids,all:e.all,dryRun:e.dryRun,timeoutMs:e.timeoutMs});if(e.json){a.stdout.write(`${JSON.stringify(t,null,2)}
|
|
47
|
+
`);return}for(const n of t.updated)o.item(r.success,n.id,`${n.fromRef} -> ${n.toRef}`);for(const n of t.unchanged)o.item(r.info,n,"already pinned");if(t.missing.length>0&&o.line(`${r.warn} Missing ${t.missing.length} source${t.missing.length===1?"":"s"}: ${t.missing.join(", ")}`),t.dryRun){o.line(`${r.info} Dry run: no changes written to ${d.gray(l.relative(a.cwd(),t.configPath)||"docs.config.json")}`);return}o.line(`${r.info} Updated ${d.gray(l.relative(a.cwd(),t.configPath)||"docs.config.json")}`)},P=async i=>{const e=i.options;if(e.offline)throw new Error("Update does not support --offline.");if(!e.all&&i.ids.length===0)throw new Error("Usage: docs-cache update <id...> [--all]");const{printSyncPlan:s}=await import("#commands/sync"),{updateSources:t}=await import("#commands/update"),n=await t({configPath:e.config,cacheDirOverride:e.cacheDir,ids:i.ids,all:e.all,dryRun:e.dryRun,json:e.json,lockOnly:e.lockOnly,failOnMiss:e.failOnMiss,timeoutMs:e.timeoutMs,verbose:e.verbose,concurrency:e.concurrency,frozen:e.frozen});if(e.json){a.stdout.write(`${JSON.stringify(n,null,2)}
|
|
48
|
+
`);return}s(n.plan),n.missing.length>0&&o.line(`${r.warn} Missing ${n.missing.length} source${n.missing.length===1?"":"s"}: ${n.missing.join(", ")}`),n.dryRun&&o.line(`${r.info} Dry run: no changes written to ${d.gray(l.relative(a.cwd(),n.plan.configPath)||"docs.config.json")}`)},M=async i=>{const e=i.options,{getStatus:s,printStatus:t}=await import("#commands/status"),n=await s({configPath:e.config,cacheDirOverride:e.cacheDir,json:e.json});if(e.json){a.stdout.write(`${JSON.stringify(n,null,2)}
|
|
49
|
+
`);return}t(n)},R=async i=>{const e=i.options,{cleanCache:s}=await import("#commands/clean"),t=await s({configPath:e.config,cacheDirOverride:e.cacheDir,json:e.json});if(e.json){a.stdout.write(`${JSON.stringify(t,null,2)}
|
|
50
|
+
`);return}if(t.removed){o.line(`${r.success} Removed cache at ${o.path(t.cacheDir)}`);return}o.line(`${r.info} Cache already missing at ${o.path(t.cacheDir)}`)},b=async i=>{const e=i.options,{cleanGitCache:s}=await import("#commands/clean-git-cache"),t=await s();if(e.json){a.stdout.write(`${JSON.stringify(t,null,2)}
|
|
51
|
+
`);return}if(!t.removed){o.line(`${r.info} Global git cache already empty at ${o.path(t.cacheDir)}`);return}const n=t.bytesFreed!==void 0?`${(t.bytesFreed/1024/1024).toFixed(2)} MB`:"unknown size",c=t.repoCount!==void 0?` (${t.repoCount} cached repositor${t.repoCount===1?"y":"ies"})`:"";o.line(`${r.success} Cleared global git cache${c}: ${n} freed`),o.line(`${r.info} Cache location: ${o.path(t.cacheDir)}`)},C=async i=>{const e=i.options,{pruneCache:s}=await import("#commands/prune"),t=await s({configPath:e.config,cacheDirOverride:e.cacheDir,json:e.json});if(e.json){a.stdout.write(`${JSON.stringify(t,null,2)}
|
|
52
|
+
`);return}if(t.removed.length===0){o.line(`${r.info} No cache entries to prune.`);return}o.line(`${r.success} Pruned ${t.removed.length} cache entr${t.removed.length===1?"y":"ies"}: ${t.removed.join(", ")}`)},k=async i=>{const e=i.options,{printSyncPlan:s,runSync:t}=await import("#commands/sync"),n=i.ids.length>0?i.ids:void 0,c=await t({configPath:e.config,cacheDirOverride:e.cacheDir,json:e.json,lockOnly:e.lockOnly,offline:e.offline,failOnMiss:e.failOnMiss,frozen:e.frozen,sourceFilter:n,timeoutMs:e.timeoutMs,verbose:e.verbose});if(e.json){a.stdout.write(`${JSON.stringify(c,null,2)}
|
|
53
|
+
`);return}s(c)},U=async i=>{const e=i.options,{printVerify:s,verifyCache:t}=await import("#commands/verify"),n=await t({configPath:e.config,cacheDirOverride:e.cacheDir,json:e.json});e.json?a.stdout.write(`${JSON.stringify(n,null,2)}
|
|
54
|
+
`):s(n),n.results.some(c=>!c.ok)&&a.exit(g.FatalError)},E=async i=>{const e=i.options,{initConfig:s}=await import("#commands/init");if(e.config)throw new Error("Init does not accept --config. Use the project root.");const t=await s({cacheDirOverride:e.cacheDir,json:e.json});if(e.json){a.stdout.write(`${JSON.stringify(t,null,2)}
|
|
55
|
+
`);return}o.line(`${r.success} Wrote ${d.gray(o.path(t.configPath))}`),t.gitignoreUpdated&&t.gitignorePath&&o.line(`${r.info} Updated ${d.gray(o.path(t.gitignorePath))}`)},N=async i=>{switch(i.command){case"add":await O(i);return;case"remove":await S(i);return;case"pin":await D(i);return;case"update":await P(i);return;case"status":await M(i);return;case"clean":await R(i);return;case"clean-cache":await b(i);return;case"prune":await C(i);return;case"sync":await k(i);return;case"verify":await U(i);return;case"init":await E(i);return;default:o.line(`${u} ${i.command}: not implemented yet.`)}};async function x(){try{a.on("uncaughtException",p),a.on("unhandledRejection",p);const i=w();y(i.options.silent),v(i.options.verbose),i.help&&(h(),a.exit(g.Success)),i.command||(h(),a.exit(g.InvalidArgument)),i.command!=="add"&&i.command!=="remove"&&i.command!=="pin"&&i.command!=="update"&&i.command!=="sync"&&i.positionals.length>0&&(m(`${u}: unexpected arguments.`),h(),a.exit(g.InvalidArgument)),await N(i.parsed)}catch(i){p(i)}}function p(i){const e=i instanceof Error?i.message:String(i);m(e),a.exit(g.FatalError)}export{u as CLI_NAME,x as main};
|
|
43
56
|
//# sourceMappingURL=cli.mjs.map
|
package/dist/esm/api.d.ts
CHANGED
|
@@ -4,9 +4,11 @@ export { parseArgs } from "#cli/parse-args";
|
|
|
4
4
|
export { cleanCache } from "#commands/clean";
|
|
5
5
|
export { cleanGitCache } from "#commands/clean-git-cache";
|
|
6
6
|
export { initConfig } from "#commands/init";
|
|
7
|
+
export { pinSources } from "#commands/pin";
|
|
7
8
|
export { pruneCache } from "#commands/prune";
|
|
8
9
|
export { removeSources } from "#commands/remove";
|
|
9
10
|
export { printSyncPlan, runSync } from "#commands/sync";
|
|
11
|
+
export { updateSources } from "#commands/update";
|
|
10
12
|
export { verifyCache } from "#commands/verify";
|
|
11
13
|
export { loadConfig } from "#config";
|
|
12
14
|
export { redactRepoUrl } from "#git/redact";
|
package/dist/esm/api.mjs
CHANGED
|
@@ -4,9 +4,11 @@ export { parseArgs } from "#cli/parse-args";
|
|
|
4
4
|
export { cleanCache } from "#commands/clean";
|
|
5
5
|
export { cleanGitCache } from "#commands/clean-git-cache";
|
|
6
6
|
export { initConfig } from "#commands/init";
|
|
7
|
+
export { pinSources } from "#commands/pin";
|
|
7
8
|
export { pruneCache } from "#commands/prune";
|
|
8
9
|
export { removeSources } from "#commands/remove";
|
|
9
10
|
export { printSyncPlan, runSync } from "#commands/sync";
|
|
11
|
+
export { updateSources } from "#commands/update";
|
|
10
12
|
export { verifyCache } from "#commands/verify";
|
|
11
13
|
export { loadConfig } from "#config";
|
|
12
14
|
export { redactRepoUrl } from "#git/redact";
|
package/dist/esm/cli/index.mjs
CHANGED
|
@@ -11,6 +11,8 @@ Usage: ${CLI_NAME} <command> [options]
|
|
|
11
11
|
Commands:
|
|
12
12
|
add Add sources to the config (supports github:org/repo#ref)
|
|
13
13
|
remove Remove sources from the config and targets
|
|
14
|
+
pin Pin source refs to current commits
|
|
15
|
+
update Refresh selected sources and lock data
|
|
14
16
|
sync Synchronize cache with config
|
|
15
17
|
status Show cache status
|
|
16
18
|
clean Remove project cache
|
|
@@ -22,6 +24,7 @@ Commands:
|
|
|
22
24
|
Global options:
|
|
23
25
|
--config <path>
|
|
24
26
|
--cache-dir <path>
|
|
27
|
+
--frozen
|
|
25
28
|
--offline
|
|
26
29
|
--fail-on-miss
|
|
27
30
|
--lock-only
|
|
@@ -36,6 +39,14 @@ Add options:
|
|
|
36
39
|
--target <dir>
|
|
37
40
|
--target-dir <path>
|
|
38
41
|
--id <id>
|
|
42
|
+
|
|
43
|
+
Pin options:
|
|
44
|
+
--all
|
|
45
|
+
--dry-run
|
|
46
|
+
|
|
47
|
+
Update options:
|
|
48
|
+
--all
|
|
49
|
+
--dry-run
|
|
39
50
|
`;
|
|
40
51
|
const printHelp = () => {
|
|
41
52
|
process.stdout.write(HELP_TEXT.trimStart());
|
|
@@ -141,6 +152,89 @@ const runRemove = async (parsed) => {
|
|
|
141
152
|
});
|
|
142
153
|
}
|
|
143
154
|
};
|
|
155
|
+
const runPin = async (parsed) => {
|
|
156
|
+
const options = parsed.options;
|
|
157
|
+
if (options.offline) {
|
|
158
|
+
throw new Error("Pin does not support --offline.");
|
|
159
|
+
}
|
|
160
|
+
if (!options.all && parsed.ids.length === 0) {
|
|
161
|
+
throw new Error("Usage: docs-cache pin <id...> [--all]");
|
|
162
|
+
}
|
|
163
|
+
const { pinSources } = await import("#commands/pin");
|
|
164
|
+
const result = await pinSources({
|
|
165
|
+
configPath: options.config,
|
|
166
|
+
ids: parsed.ids,
|
|
167
|
+
all: options.all,
|
|
168
|
+
dryRun: options.dryRun,
|
|
169
|
+
timeoutMs: options.timeoutMs
|
|
170
|
+
});
|
|
171
|
+
if (options.json) {
|
|
172
|
+
process.stdout.write(`${JSON.stringify(result, null, 2)}
|
|
173
|
+
`);
|
|
174
|
+
return;
|
|
175
|
+
}
|
|
176
|
+
for (const entry of result.updated) {
|
|
177
|
+
ui.item(symbols.success, entry.id, `${entry.fromRef} -> ${entry.toRef}`);
|
|
178
|
+
}
|
|
179
|
+
for (const id of result.unchanged) {
|
|
180
|
+
ui.item(symbols.info, id, "already pinned");
|
|
181
|
+
}
|
|
182
|
+
if (result.missing.length > 0) {
|
|
183
|
+
ui.line(
|
|
184
|
+
`${symbols.warn} Missing ${result.missing.length} source${result.missing.length === 1 ? "" : "s"}: ${result.missing.join(", ")}`
|
|
185
|
+
);
|
|
186
|
+
}
|
|
187
|
+
if (result.dryRun) {
|
|
188
|
+
ui.line(
|
|
189
|
+
`${symbols.info} Dry run: no changes written to ${pc.gray(path.relative(process.cwd(), result.configPath) || "docs.config.json")}`
|
|
190
|
+
);
|
|
191
|
+
return;
|
|
192
|
+
}
|
|
193
|
+
ui.line(
|
|
194
|
+
`${symbols.info} Updated ${pc.gray(path.relative(process.cwd(), result.configPath) || "docs.config.json")}`
|
|
195
|
+
);
|
|
196
|
+
};
|
|
197
|
+
const runUpdate = async (parsed) => {
|
|
198
|
+
const options = parsed.options;
|
|
199
|
+
if (options.offline) {
|
|
200
|
+
throw new Error("Update does not support --offline.");
|
|
201
|
+
}
|
|
202
|
+
if (!options.all && parsed.ids.length === 0) {
|
|
203
|
+
throw new Error("Usage: docs-cache update <id...> [--all]");
|
|
204
|
+
}
|
|
205
|
+
const { printSyncPlan } = await import("#commands/sync");
|
|
206
|
+
const { updateSources } = await import("#commands/update");
|
|
207
|
+
const result = await updateSources({
|
|
208
|
+
configPath: options.config,
|
|
209
|
+
cacheDirOverride: options.cacheDir,
|
|
210
|
+
ids: parsed.ids,
|
|
211
|
+
all: options.all,
|
|
212
|
+
dryRun: options.dryRun,
|
|
213
|
+
json: options.json,
|
|
214
|
+
lockOnly: options.lockOnly,
|
|
215
|
+
failOnMiss: options.failOnMiss,
|
|
216
|
+
timeoutMs: options.timeoutMs,
|
|
217
|
+
verbose: options.verbose,
|
|
218
|
+
concurrency: options.concurrency,
|
|
219
|
+
frozen: options.frozen
|
|
220
|
+
});
|
|
221
|
+
if (options.json) {
|
|
222
|
+
process.stdout.write(`${JSON.stringify(result, null, 2)}
|
|
223
|
+
`);
|
|
224
|
+
return;
|
|
225
|
+
}
|
|
226
|
+
printSyncPlan(result.plan);
|
|
227
|
+
if (result.missing.length > 0) {
|
|
228
|
+
ui.line(
|
|
229
|
+
`${symbols.warn} Missing ${result.missing.length} source${result.missing.length === 1 ? "" : "s"}: ${result.missing.join(", ")}`
|
|
230
|
+
);
|
|
231
|
+
}
|
|
232
|
+
if (result.dryRun) {
|
|
233
|
+
ui.line(
|
|
234
|
+
`${symbols.info} Dry run: no changes written to ${pc.gray(path.relative(process.cwd(), result.plan.configPath) || "docs.config.json")}`
|
|
235
|
+
);
|
|
236
|
+
}
|
|
237
|
+
};
|
|
144
238
|
const runStatus = async (parsed) => {
|
|
145
239
|
const options = parsed.options;
|
|
146
240
|
const { getStatus, printStatus } = await import("#commands/status");
|
|
@@ -223,6 +317,7 @@ const runPrune = async (parsed) => {
|
|
|
223
317
|
const runSyncCommand = async (parsed) => {
|
|
224
318
|
const options = parsed.options;
|
|
225
319
|
const { printSyncPlan, runSync } = await import("#commands/sync");
|
|
320
|
+
const sourceFilter = parsed.ids.length > 0 ? parsed.ids : void 0;
|
|
226
321
|
const plan = await runSync({
|
|
227
322
|
configPath: options.config,
|
|
228
323
|
cacheDirOverride: options.cacheDir,
|
|
@@ -230,6 +325,8 @@ const runSyncCommand = async (parsed) => {
|
|
|
230
325
|
lockOnly: options.lockOnly,
|
|
231
326
|
offline: options.offline,
|
|
232
327
|
failOnMiss: options.failOnMiss,
|
|
328
|
+
frozen: options.frozen,
|
|
329
|
+
sourceFilter,
|
|
233
330
|
timeoutMs: options.timeoutMs,
|
|
234
331
|
verbose: options.verbose
|
|
235
332
|
});
|
|
@@ -288,6 +385,12 @@ const runCommand = async (parsed) => {
|
|
|
288
385
|
case "remove":
|
|
289
386
|
await runRemove(parsed);
|
|
290
387
|
return;
|
|
388
|
+
case "pin":
|
|
389
|
+
await runPin(parsed);
|
|
390
|
+
return;
|
|
391
|
+
case "update":
|
|
392
|
+
await runUpdate(parsed);
|
|
393
|
+
return;
|
|
291
394
|
case "status":
|
|
292
395
|
await runStatus(parsed);
|
|
293
396
|
return;
|
|
@@ -328,7 +431,7 @@ export async function main() {
|
|
|
328
431
|
printHelp();
|
|
329
432
|
process.exit(ExitCode.InvalidArgument);
|
|
330
433
|
}
|
|
331
|
-
if (parsed.command !== "add" && parsed.command !== "remove" && parsed.positionals.length > 0) {
|
|
434
|
+
if (parsed.command !== "add" && parsed.command !== "remove" && parsed.command !== "pin" && parsed.command !== "update" && parsed.command !== "sync" && parsed.positionals.length > 0) {
|
|
332
435
|
printError(`${CLI_NAME}: unexpected arguments.`);
|
|
333
436
|
printHelp();
|
|
334
437
|
process.exit(ExitCode.InvalidArgument);
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import type { CliCommand, CliOptions } from "./types";
|
|
2
|
-
declare const COMMANDS: readonly ["add", "remove", "sync", "status", "clean", "clean-cache", "prune", "verify", "init"];
|
|
2
|
+
declare const COMMANDS: readonly ["add", "remove", "pin", "update", "sync", "status", "clean", "clean-cache", "prune", "verify", "init"];
|
|
3
3
|
type Command = (typeof COMMANDS)[number];
|
|
4
4
|
export type ParsedArgs = {
|
|
5
5
|
command: Command | null;
|
|
@@ -4,6 +4,8 @@ import { ExitCode } from "#cli/exit-code";
|
|
|
4
4
|
const COMMANDS = [
|
|
5
5
|
"add",
|
|
6
6
|
"remove",
|
|
7
|
+
"pin",
|
|
8
|
+
"update",
|
|
7
9
|
"sync",
|
|
8
10
|
"status",
|
|
9
11
|
"clean",
|
|
@@ -18,6 +20,7 @@ const ADD_ONLY_OPTIONS = /* @__PURE__ */ new Set([
|
|
|
18
20
|
"--target-dir",
|
|
19
21
|
"--id"
|
|
20
22
|
]);
|
|
23
|
+
const SCOPED_SOURCE_OPTIONS = /* @__PURE__ */ new Set(["--all", "--dry-run"]);
|
|
21
24
|
const POSITIONAL_SKIP_OPTIONS = /* @__PURE__ */ new Set([
|
|
22
25
|
"--config",
|
|
23
26
|
"--cache-dir",
|
|
@@ -168,16 +171,40 @@ const parsePositionals = (rawArgs) => {
|
|
|
168
171
|
};
|
|
169
172
|
const assertAddOnlyOptions = (command, rawArgs) => {
|
|
170
173
|
if (command === "add") {
|
|
174
|
+
for (const arg of rawArgs) {
|
|
175
|
+
const [flag] = arg.split("=");
|
|
176
|
+
if (SCOPED_SOURCE_OPTIONS.has(flag)) {
|
|
177
|
+
throw new Error(`${arg} is only valid for pin or update.`);
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
return;
|
|
181
|
+
}
|
|
182
|
+
if (command === "pin" || command === "update") {
|
|
183
|
+
for (const arg of rawArgs) {
|
|
184
|
+
if (ADD_ONLY_OPTIONS.has(arg)) {
|
|
185
|
+
throw new Error(`${arg} is only valid for add.`);
|
|
186
|
+
}
|
|
187
|
+
if (!arg.startsWith("--")) {
|
|
188
|
+
continue;
|
|
189
|
+
}
|
|
190
|
+
const [flag] = arg.split("=");
|
|
191
|
+
if (ADD_ONLY_OPTIONS_WITH_VALUES.has(flag)) {
|
|
192
|
+
throw new Error(`${flag} is only valid for add.`);
|
|
193
|
+
}
|
|
194
|
+
}
|
|
171
195
|
return;
|
|
172
196
|
}
|
|
173
197
|
for (const arg of rawArgs) {
|
|
198
|
+
const [flag] = arg.split("=");
|
|
199
|
+
if (SCOPED_SOURCE_OPTIONS.has(flag)) {
|
|
200
|
+
throw new Error(`${arg} is only valid for pin or update.`);
|
|
201
|
+
}
|
|
174
202
|
if (ADD_ONLY_OPTIONS.has(arg)) {
|
|
175
203
|
throw new Error(`${arg} is only valid for add.`);
|
|
176
204
|
}
|
|
177
205
|
if (!arg.startsWith("--")) {
|
|
178
206
|
continue;
|
|
179
207
|
}
|
|
180
|
-
const [flag] = arg.split("=");
|
|
181
208
|
if (ADD_ONLY_OPTIONS_WITH_VALUES.has(flag)) {
|
|
182
209
|
throw new Error(`${flag} is only valid for add.`);
|
|
183
210
|
}
|
|
@@ -191,6 +218,9 @@ const buildOptions = (result) => {
|
|
|
191
218
|
failOnMiss: Boolean(result.options.failOnMiss),
|
|
192
219
|
lockOnly: Boolean(result.options.lockOnly),
|
|
193
220
|
prune: Boolean(result.options.prune),
|
|
221
|
+
all: Boolean(result.options.all),
|
|
222
|
+
dryRun: Boolean(result.options.dryRun),
|
|
223
|
+
frozen: Boolean(result.options.frozen),
|
|
194
224
|
concurrency: result.options.concurrency ? Number(result.options.concurrency) : void 0,
|
|
195
225
|
json: Boolean(result.options.json),
|
|
196
226
|
timeoutMs: result.options.timeoutMs ? Number(result.options.timeoutMs) : void 0,
|
|
@@ -236,8 +266,12 @@ const buildParsedCommand = (command, options, positionals, addEntries) => {
|
|
|
236
266
|
};
|
|
237
267
|
case "remove":
|
|
238
268
|
return { command: "remove", ids: positionals, options };
|
|
269
|
+
case "pin":
|
|
270
|
+
return { command: "pin", ids: positionals, options };
|
|
271
|
+
case "update":
|
|
272
|
+
return { command: "update", ids: positionals, options };
|
|
239
273
|
case "sync":
|
|
240
|
-
return { command: "sync", options };
|
|
274
|
+
return { command: "sync", ids: positionals, options };
|
|
241
275
|
case "status":
|
|
242
276
|
return { command: "status", options };
|
|
243
277
|
case "clean":
|
|
@@ -257,10 +291,12 @@ const buildParsedCommand = (command, options, positionals, addEntries) => {
|
|
|
257
291
|
export const parseArgs = (argv = process.argv) => {
|
|
258
292
|
try {
|
|
259
293
|
const cli = cac("docs-cache");
|
|
260
|
-
cli.option("--config <path>", "Path to config file").option("--cache-dir <path>", "Override cache directory").option("--offline", "Disable network access").option("--fail-on-miss", "Fail when required sources are missing").option("--lock-only", "Update lock without materializing files").option("--prune", "Prune cache on remove").option("--concurrency <n>", "Concurrency limit").option("--json", "Output JSON").option("--timeout-ms <n>", "Network timeout in milliseconds").option("--silent", "Suppress non-error output").option("--verbose", "Enable verbose logging").help();
|
|
294
|
+
cli.option("--config <path>", "Path to config file").option("--cache-dir <path>", "Override cache directory").option("--all", "Apply command to all sources").option("--dry-run", "Preview changes without writing files").option("--frozen", "Fail if lock and resolved refs differ").option("--offline", "Disable network access").option("--fail-on-miss", "Fail when required sources are missing").option("--lock-only", "Update lock without materializing files").option("--prune", "Prune cache on remove").option("--concurrency <n>", "Concurrency limit").option("--json", "Output JSON").option("--timeout-ms <n>", "Network timeout in milliseconds").option("--silent", "Suppress non-error output").option("--verbose", "Enable verbose logging").help();
|
|
261
295
|
cli.command("add [repo...]", "Add sources to the config").option("--source <repo>", "Source repo").option("--target <dir>", "Target directory for source").option("--target-dir <path>", "Target directory for source").option("--id <id>", "Source id");
|
|
262
296
|
cli.command("remove <id...>", "Remove sources from the config and targets");
|
|
263
|
-
cli.command("
|
|
297
|
+
cli.command("pin [id...]", "Pin source refs to current commit");
|
|
298
|
+
cli.command("update [id...]", "Refresh selected sources and lock data");
|
|
299
|
+
cli.command("sync [id...]", "Synchronize cache with config");
|
|
264
300
|
cli.command("status", "Show cache status");
|
|
265
301
|
cli.command("clean", "Remove project cache");
|
|
266
302
|
cli.command("clean-cache", "Clear global git cache");
|
package/dist/esm/cli/types.d.ts
CHANGED
|
@@ -5,6 +5,9 @@ export type CliOptions = {
|
|
|
5
5
|
failOnMiss: boolean;
|
|
6
6
|
lockOnly: boolean;
|
|
7
7
|
prune: boolean;
|
|
8
|
+
all: boolean;
|
|
9
|
+
dryRun: boolean;
|
|
10
|
+
frozen: boolean;
|
|
8
11
|
concurrency?: number;
|
|
9
12
|
json: boolean;
|
|
10
13
|
timeoutMs?: number;
|
|
@@ -24,8 +27,17 @@ export type CliCommand = {
|
|
|
24
27
|
command: "remove";
|
|
25
28
|
ids: string[];
|
|
26
29
|
options: CliOptions;
|
|
30
|
+
} | {
|
|
31
|
+
command: "pin";
|
|
32
|
+
ids: string[];
|
|
33
|
+
options: CliOptions;
|
|
34
|
+
} | {
|
|
35
|
+
command: "update";
|
|
36
|
+
ids: string[];
|
|
37
|
+
options: CliOptions;
|
|
27
38
|
} | {
|
|
28
39
|
command: "sync";
|
|
40
|
+
ids: string[];
|
|
29
41
|
options: CliOptions;
|
|
30
42
|
} | {
|
|
31
43
|
command: "status";
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import { resolveRemoteCommit } from "#git/resolve-remote";
|
|
2
|
+
type PinParams = {
|
|
3
|
+
configPath?: string;
|
|
4
|
+
ids: string[];
|
|
5
|
+
all: boolean;
|
|
6
|
+
dryRun?: boolean;
|
|
7
|
+
timeoutMs?: number;
|
|
8
|
+
};
|
|
9
|
+
type PinDeps = {
|
|
10
|
+
resolveRemoteCommit?: typeof resolveRemoteCommit;
|
|
11
|
+
};
|
|
12
|
+
type PinResultEntry = {
|
|
13
|
+
id: string;
|
|
14
|
+
fromRef: string;
|
|
15
|
+
toRef: string;
|
|
16
|
+
repo: string;
|
|
17
|
+
};
|
|
18
|
+
export declare const pinSources: (params: PinParams, deps?: PinDeps) => Promise<{
|
|
19
|
+
configPath: any;
|
|
20
|
+
dryRun: boolean;
|
|
21
|
+
pinned: PinResultEntry[];
|
|
22
|
+
updated: PinResultEntry[];
|
|
23
|
+
unchanged: string[];
|
|
24
|
+
missing: string[];
|
|
25
|
+
}>;
|
|
26
|
+
export {};
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
import { resolveSources } from "#config";
|
|
2
|
+
import {
|
|
3
|
+
mergeConfigBase,
|
|
4
|
+
readConfigAtPath,
|
|
5
|
+
resolveConfigTarget,
|
|
6
|
+
writeConfigFile
|
|
7
|
+
} from "#config/io";
|
|
8
|
+
import { resolveRemoteCommit } from "#git/resolve-remote";
|
|
9
|
+
const DEFAULT_ALLOW_HOSTS = ["github.com", "gitlab.com", "visualstudio.com"];
|
|
10
|
+
const PIN_RESOLVE_CONCURRENCY = 4;
|
|
11
|
+
const isPinnedCommitRef = (ref) => /^[0-9a-f]{40}$/i.test(ref.trim());
|
|
12
|
+
export const pinSources = async (params, deps = {}) => {
|
|
13
|
+
if (!params.all && params.ids.length === 0) {
|
|
14
|
+
throw new Error("Usage: docs-cache pin <id...> [--all]");
|
|
15
|
+
}
|
|
16
|
+
const target = await resolveConfigTarget(params.configPath);
|
|
17
|
+
const resolvedPath = target.resolvedPath;
|
|
18
|
+
const { config, rawConfig, rawPackage } = await readConfigAtPath(target);
|
|
19
|
+
const selectedIds = params.all ? new Set(config.sources.map((source) => source.id)) : new Set(params.ids);
|
|
20
|
+
const missing = params.all ? [] : params.ids.filter(
|
|
21
|
+
(id) => !config.sources.some((source) => source.id === id)
|
|
22
|
+
);
|
|
23
|
+
const resolvedSources = resolveSources(config);
|
|
24
|
+
const resolvedById = new Map(
|
|
25
|
+
resolvedSources.map((source) => [source.id, source])
|
|
26
|
+
);
|
|
27
|
+
const allowHosts = config.defaults?.allowHosts ?? DEFAULT_ALLOW_HOSTS;
|
|
28
|
+
const resolveCommit = deps.resolveRemoteCommit ?? resolveRemoteCommit;
|
|
29
|
+
const entriesById = /* @__PURE__ */ new Map();
|
|
30
|
+
const sourcesToProcess = config.sources.filter(
|
|
31
|
+
(source) => selectedIds.has(source.id)
|
|
32
|
+
);
|
|
33
|
+
const queue = [];
|
|
34
|
+
let cursor = 0;
|
|
35
|
+
const runNext = async () => {
|
|
36
|
+
const index = cursor;
|
|
37
|
+
cursor += 1;
|
|
38
|
+
const source = sourcesToProcess[index];
|
|
39
|
+
if (!source) {
|
|
40
|
+
return;
|
|
41
|
+
}
|
|
42
|
+
const resolved = resolvedById.get(source.id);
|
|
43
|
+
if (!resolved) {
|
|
44
|
+
return runNext();
|
|
45
|
+
}
|
|
46
|
+
const fromRef = source.ref ?? resolved.ref;
|
|
47
|
+
const trimmedFromRef = fromRef.trim();
|
|
48
|
+
if (isPinnedCommitRef(trimmedFromRef)) {
|
|
49
|
+
entriesById.set(source.id, {
|
|
50
|
+
id: source.id,
|
|
51
|
+
fromRef,
|
|
52
|
+
toRef: trimmedFromRef,
|
|
53
|
+
repo: resolved.repo
|
|
54
|
+
});
|
|
55
|
+
return runNext();
|
|
56
|
+
}
|
|
57
|
+
const remote = await resolveCommit({
|
|
58
|
+
repo: resolved.repo,
|
|
59
|
+
ref: resolved.ref,
|
|
60
|
+
allowHosts,
|
|
61
|
+
timeoutMs: params.timeoutMs
|
|
62
|
+
});
|
|
63
|
+
entriesById.set(source.id, {
|
|
64
|
+
id: source.id,
|
|
65
|
+
fromRef,
|
|
66
|
+
toRef: remote.resolvedCommit,
|
|
67
|
+
repo: remote.repo
|
|
68
|
+
});
|
|
69
|
+
return runNext();
|
|
70
|
+
};
|
|
71
|
+
for (let worker = 0; worker < Math.min(PIN_RESOLVE_CONCURRENCY, sourcesToProcess.length); worker += 1) {
|
|
72
|
+
queue.push(runNext());
|
|
73
|
+
}
|
|
74
|
+
await Promise.all(queue);
|
|
75
|
+
if (entriesById.size === 0) {
|
|
76
|
+
throw new Error("No matching sources found to pin.");
|
|
77
|
+
}
|
|
78
|
+
const nextSources = config.sources.map((source) => {
|
|
79
|
+
const pin = entriesById.get(source.id);
|
|
80
|
+
if (!pin) {
|
|
81
|
+
return source;
|
|
82
|
+
}
|
|
83
|
+
if (source.ref === pin.toRef) {
|
|
84
|
+
return source;
|
|
85
|
+
}
|
|
86
|
+
return {
|
|
87
|
+
...source,
|
|
88
|
+
ref: pin.toRef
|
|
89
|
+
};
|
|
90
|
+
});
|
|
91
|
+
if (!params.dryRun) {
|
|
92
|
+
const nextConfig = mergeConfigBase(rawConfig ?? config, nextSources);
|
|
93
|
+
await writeConfigFile({
|
|
94
|
+
mode: target.mode,
|
|
95
|
+
resolvedPath,
|
|
96
|
+
config: nextConfig,
|
|
97
|
+
rawPackage
|
|
98
|
+
});
|
|
99
|
+
}
|
|
100
|
+
const pinned = Array.from(entriesById.values());
|
|
101
|
+
const updated = pinned.filter((entry) => entry.fromRef !== entry.toRef);
|
|
102
|
+
const unchanged = pinned.filter((entry) => entry.fromRef === entry.toRef).map((entry) => entry.id);
|
|
103
|
+
return {
|
|
104
|
+
configPath: resolvedPath,
|
|
105
|
+
dryRun: Boolean(params.dryRun),
|
|
106
|
+
pinned,
|
|
107
|
+
updated,
|
|
108
|
+
unchanged,
|
|
109
|
+
missing
|
|
110
|
+
};
|
|
111
|
+
};
|
|
@@ -646,6 +646,18 @@ export const runSync = async (options, deps = {}) => {
|
|
|
646
646
|
`Missing required source(s): ${requiredMissing.map((result) => result.id).join(", ")}.`
|
|
647
647
|
);
|
|
648
648
|
}
|
|
649
|
+
if (options.frozen) {
|
|
650
|
+
const drifted = plan.results.filter(
|
|
651
|
+
(result) => result.status !== "up-to-date"
|
|
652
|
+
);
|
|
653
|
+
if (drifted.length > 0) {
|
|
654
|
+
throw new Error(
|
|
655
|
+
`Frozen sync failed: lock is out of date for source(s): ${drifted.map((result) => result.id).join(
|
|
656
|
+
", "
|
|
657
|
+
)}. Run docs-cache update or docs-cache sync to refresh the lock.`
|
|
658
|
+
);
|
|
659
|
+
}
|
|
660
|
+
}
|
|
649
661
|
if (!options.lockOnly) {
|
|
650
662
|
const defaults = plan.defaults;
|
|
651
663
|
const runFetch = deps.fetchSource ?? fetchSource;
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import type { materializeSource } from "#cache/materialize";
|
|
2
|
+
import type { fetchSource } from "#git/fetch-source";
|
|
3
|
+
import type { resolveRemoteCommit } from "#git/resolve-remote";
|
|
4
|
+
type UpdateOptions = {
|
|
5
|
+
configPath?: string;
|
|
6
|
+
cacheDirOverride?: string;
|
|
7
|
+
ids: string[];
|
|
8
|
+
all: boolean;
|
|
9
|
+
dryRun: boolean;
|
|
10
|
+
json: boolean;
|
|
11
|
+
lockOnly: boolean;
|
|
12
|
+
failOnMiss: boolean;
|
|
13
|
+
timeoutMs?: number;
|
|
14
|
+
verbose?: boolean;
|
|
15
|
+
concurrency?: number;
|
|
16
|
+
frozen?: boolean;
|
|
17
|
+
};
|
|
18
|
+
type UpdateDeps = {
|
|
19
|
+
resolveRemoteCommit?: typeof resolveRemoteCommit;
|
|
20
|
+
fetchSource?: typeof fetchSource;
|
|
21
|
+
materializeSource?: typeof materializeSource;
|
|
22
|
+
};
|
|
23
|
+
export declare const updateSources: (options: UpdateOptions, deps?: UpdateDeps) => Promise<{
|
|
24
|
+
dryRun: boolean;
|
|
25
|
+
missing: string[];
|
|
26
|
+
plan: any;
|
|
27
|
+
}>;
|
|
28
|
+
export {};
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import { getSyncPlan, runSync } from "#commands/sync";
|
|
2
|
+
import { loadConfig } from "#config";
|
|
3
|
+
const resolveSelectedSourceIds = async (options) => {
|
|
4
|
+
const { sources } = await loadConfig(options.configPath);
|
|
5
|
+
if (options.all) {
|
|
6
|
+
return {
|
|
7
|
+
selectedIds: sources.map((source) => source.id),
|
|
8
|
+
missing: []
|
|
9
|
+
};
|
|
10
|
+
}
|
|
11
|
+
const existing = new Set(sources.map((source) => source.id));
|
|
12
|
+
const selectedIds = options.ids.filter((id) => existing.has(id));
|
|
13
|
+
const missing = options.ids.filter((id) => !existing.has(id));
|
|
14
|
+
if (selectedIds.length === 0) {
|
|
15
|
+
throw new Error("No matching sources found to update.");
|
|
16
|
+
}
|
|
17
|
+
return { selectedIds, missing };
|
|
18
|
+
};
|
|
19
|
+
export const updateSources = async (options, deps = {}) => {
|
|
20
|
+
if (!options.all && options.ids.length === 0) {
|
|
21
|
+
throw new Error("Usage: docs-cache update <id...> [--all]");
|
|
22
|
+
}
|
|
23
|
+
const { selectedIds, missing } = await resolveSelectedSourceIds(options);
|
|
24
|
+
const syncOptions = {
|
|
25
|
+
configPath: options.configPath,
|
|
26
|
+
cacheDirOverride: options.cacheDirOverride,
|
|
27
|
+
json: options.json,
|
|
28
|
+
lockOnly: options.lockOnly,
|
|
29
|
+
offline: false,
|
|
30
|
+
failOnMiss: options.failOnMiss,
|
|
31
|
+
frozen: options.frozen,
|
|
32
|
+
verbose: options.verbose,
|
|
33
|
+
concurrency: options.concurrency,
|
|
34
|
+
sourceFilter: selectedIds,
|
|
35
|
+
timeoutMs: options.timeoutMs
|
|
36
|
+
};
|
|
37
|
+
if (options.dryRun) {
|
|
38
|
+
const plan2 = await getSyncPlan(syncOptions, deps);
|
|
39
|
+
return {
|
|
40
|
+
dryRun: true,
|
|
41
|
+
missing,
|
|
42
|
+
plan: plan2
|
|
43
|
+
};
|
|
44
|
+
}
|
|
45
|
+
const plan = await runSync(syncOptions, deps);
|
|
46
|
+
return {
|
|
47
|
+
dryRun: false,
|
|
48
|
+
missing,
|
|
49
|
+
plan
|
|
50
|
+
};
|
|
51
|
+
};
|
|
@@ -7,36 +7,12 @@ import { execa } from "execa";
|
|
|
7
7
|
import { getErrnoCode } from "#core/errors";
|
|
8
8
|
import { assertSafeSourceId } from "#core/source-id";
|
|
9
9
|
import { exists, resolveGitCacheDir } from "#git/cache-dir";
|
|
10
|
+
import { buildGitEnv, resolveGitCommand } from "#git/git-env";
|
|
10
11
|
const DEFAULT_TIMEOUT_MS = 12e4;
|
|
11
12
|
const DEFAULT_GIT_DEPTH = 1;
|
|
12
13
|
const DEFAULT_RM_RETRIES = 3;
|
|
13
14
|
const DEFAULT_RM_BACKOFF_MS = 100;
|
|
14
|
-
const
|
|
15
|
-
const pathValue = process.env.PATH ?? process.env.Path;
|
|
16
|
-
const pathExtValue = process.env.PATHEXT ?? (process.platform === "win32" ? ".COM;.EXE;.BAT;.CMD" : void 0);
|
|
17
|
-
return {
|
|
18
|
-
...process.env,
|
|
19
|
-
...pathValue ? { PATH: pathValue, Path: pathValue } : {},
|
|
20
|
-
...pathExtValue ? { PATHEXT: pathExtValue } : {},
|
|
21
|
-
HOME: process.env.HOME,
|
|
22
|
-
USER: process.env.USER,
|
|
23
|
-
USERPROFILE: process.env.USERPROFILE,
|
|
24
|
-
TMPDIR: process.env.TMPDIR,
|
|
25
|
-
TMP: process.env.TMP,
|
|
26
|
-
TEMP: process.env.TEMP,
|
|
27
|
-
SYSTEMROOT: process.env.SYSTEMROOT,
|
|
28
|
-
WINDIR: process.env.WINDIR,
|
|
29
|
-
SSH_AUTH_SOCK: process.env.SSH_AUTH_SOCK,
|
|
30
|
-
SSH_AGENT_PID: process.env.SSH_AGENT_PID,
|
|
31
|
-
HTTP_PROXY: process.env.HTTP_PROXY,
|
|
32
|
-
HTTPS_PROXY: process.env.HTTPS_PROXY,
|
|
33
|
-
NO_PROXY: process.env.NO_PROXY,
|
|
34
|
-
GIT_TERMINAL_PROMPT: "0",
|
|
35
|
-
GIT_CONFIG_NOSYSTEM: "1",
|
|
36
|
-
GIT_CONFIG_NOGLOBAL: "1",
|
|
37
|
-
...process.platform === "win32" ? {} : { GIT_ASKPASS: "/bin/false" }
|
|
38
|
-
};
|
|
39
|
-
};
|
|
15
|
+
const MAX_BRACE_EXPANSIONS = 500;
|
|
40
16
|
const buildGitConfigs = (allowFileProtocol) => [
|
|
41
17
|
"-c",
|
|
42
18
|
"core.hooksPath=/dev/null",
|
|
@@ -91,7 +67,7 @@ const git = async (args, options) => {
|
|
|
91
67
|
);
|
|
92
68
|
const commandLabel = `git ${commandArgs.join(" ")}`;
|
|
93
69
|
options?.logger?.(commandLabel);
|
|
94
|
-
const subprocess = execa(
|
|
70
|
+
const subprocess = execa(resolveGitCommand(), commandArgs, {
|
|
95
71
|
cwd: options?.cwd,
|
|
96
72
|
timeout: options?.timeoutMs ?? DEFAULT_TIMEOUT_MS,
|
|
97
73
|
maxBuffer: 10 * 1024 * 1024,
|
|
@@ -178,7 +154,48 @@ const ensureCommitAvailable = async (repoPath, commit, options) => {
|
|
|
178
154
|
});
|
|
179
155
|
};
|
|
180
156
|
const patternHasGlob = (pattern) => pattern.includes("*") || pattern.includes("?") || pattern.includes("[");
|
|
181
|
-
const
|
|
157
|
+
const expandBracePattern = (pattern) => {
|
|
158
|
+
const results = [];
|
|
159
|
+
const expand = (value) => {
|
|
160
|
+
const braceMatch = value.match(/^(.*?){([^}]+)}(.*)$/);
|
|
161
|
+
if (!braceMatch) {
|
|
162
|
+
if (results.length >= MAX_BRACE_EXPANSIONS) {
|
|
163
|
+
throw new Error(
|
|
164
|
+
`Brace expansion exceeded ${MAX_BRACE_EXPANSIONS} patterns for '${pattern}'.`
|
|
165
|
+
);
|
|
166
|
+
}
|
|
167
|
+
results.push(value);
|
|
168
|
+
return;
|
|
169
|
+
}
|
|
170
|
+
const [, prefix, values, suffix] = braceMatch;
|
|
171
|
+
const valueList = values.split(",").map((entry) => entry.trim()).filter((entry) => entry.length > 0);
|
|
172
|
+
if (valueList.length === 0) {
|
|
173
|
+
if (results.length >= MAX_BRACE_EXPANSIONS) {
|
|
174
|
+
throw new Error(
|
|
175
|
+
`Brace expansion exceeded ${MAX_BRACE_EXPANSIONS} patterns for '${pattern}'.`
|
|
176
|
+
);
|
|
177
|
+
}
|
|
178
|
+
results.push(value);
|
|
179
|
+
return;
|
|
180
|
+
}
|
|
181
|
+
for (const entry of valueList) {
|
|
182
|
+
const expandedPattern = `${prefix}${entry}${suffix}`;
|
|
183
|
+
expand(expandedPattern);
|
|
184
|
+
}
|
|
185
|
+
};
|
|
186
|
+
expand(pattern);
|
|
187
|
+
return results;
|
|
188
|
+
};
|
|
189
|
+
const normalizeSparsePatterns = (include) => {
|
|
190
|
+
const patterns = include ?? [];
|
|
191
|
+
const expanded = [];
|
|
192
|
+
for (const pattern of patterns) {
|
|
193
|
+
const normalized = pattern.replace(/\\/g, "/");
|
|
194
|
+
if (!normalized) continue;
|
|
195
|
+
expanded.push(...expandBracePattern(normalized));
|
|
196
|
+
}
|
|
197
|
+
return expanded;
|
|
198
|
+
};
|
|
182
199
|
const isDirectoryLiteral = (pattern) => pattern.endsWith("/");
|
|
183
200
|
const toNoConePattern = (pattern) => {
|
|
184
201
|
if (!patternHasGlob(pattern) && isDirectoryLiteral(pattern)) {
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
const resolveGitCommand = () => {
|
|
2
|
+
const override = process.env.DOCS_CACHE_GIT_COMMAND;
|
|
3
|
+
if (override) {
|
|
4
|
+
return override;
|
|
5
|
+
}
|
|
6
|
+
return "git";
|
|
7
|
+
};
|
|
8
|
+
const buildGitEnv = () => {
|
|
9
|
+
const pathValue = process.env.PATH ?? process.env.Path;
|
|
10
|
+
const pathExtValue = process.env.PATHEXT ?? (process.platform === "win32" ? ".COM;.EXE;.BAT;.CMD" : void 0);
|
|
11
|
+
return {
|
|
12
|
+
...process.env,
|
|
13
|
+
...pathValue ? { PATH: pathValue, Path: pathValue } : {},
|
|
14
|
+
...pathExtValue ? { PATHEXT: pathExtValue } : {},
|
|
15
|
+
HOME: process.env.HOME,
|
|
16
|
+
USER: process.env.USER,
|
|
17
|
+
USERPROFILE: process.env.USERPROFILE,
|
|
18
|
+
TMPDIR: process.env.TMPDIR,
|
|
19
|
+
TMP: process.env.TMP,
|
|
20
|
+
TEMP: process.env.TEMP,
|
|
21
|
+
SYSTEMROOT: process.env.SYSTEMROOT,
|
|
22
|
+
WINDIR: process.env.WINDIR,
|
|
23
|
+
SSH_AUTH_SOCK: process.env.SSH_AUTH_SOCK,
|
|
24
|
+
SSH_AGENT_PID: process.env.SSH_AGENT_PID,
|
|
25
|
+
HTTP_PROXY: process.env.HTTP_PROXY,
|
|
26
|
+
HTTPS_PROXY: process.env.HTTPS_PROXY,
|
|
27
|
+
NO_PROXY: process.env.NO_PROXY,
|
|
28
|
+
GIT_TERMINAL_PROMPT: "0",
|
|
29
|
+
GIT_CONFIG_NOSYSTEM: "1",
|
|
30
|
+
...process.platform === "win32" ? {} : { GIT_ASKPASS: "/bin/false" }
|
|
31
|
+
};
|
|
32
|
+
};
|
|
33
|
+
export { buildGitEnv, resolveGitCommand };
|
|
@@ -1,7 +1,6 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import {
|
|
1
|
+
import { execa } from "execa";
|
|
2
|
+
import { buildGitEnv, resolveGitCommand } from "#git/git-env";
|
|
3
3
|
import { redactRepoUrl } from "#git/redact";
|
|
4
|
-
const execFileAsync = promisify(execFile);
|
|
5
4
|
const DEFAULT_TIMEOUT_MS = 3e4;
|
|
6
5
|
const BLOCKED_PROTOCOLS = /* @__PURE__ */ new Set(["file:", "ftp:", "data:", "javascript:"]);
|
|
7
6
|
const assertAllowedProtocol = (repo) => {
|
|
@@ -65,12 +64,13 @@ export const resolveRemoteCommit = async (params) => {
|
|
|
65
64
|
enforceHostAllowlist(params.repo, params.allowHosts);
|
|
66
65
|
const repoLabel = redactRepoUrl(params.repo);
|
|
67
66
|
params.logger?.(`git ls-remote ${repoLabel} ${params.ref}`);
|
|
68
|
-
const { stdout } = await
|
|
69
|
-
|
|
67
|
+
const { stdout } = await execa(
|
|
68
|
+
resolveGitCommand(),
|
|
70
69
|
["ls-remote", params.repo, params.ref],
|
|
71
70
|
{
|
|
72
71
|
timeout: params.timeoutMs ?? DEFAULT_TIMEOUT_MS,
|
|
73
|
-
maxBuffer: 1024 * 1024
|
|
72
|
+
maxBuffer: 1024 * 1024,
|
|
73
|
+
env: buildGitEnv()
|
|
74
74
|
}
|
|
75
75
|
);
|
|
76
76
|
const resolvedCommit = parseLsRemote(stdout);
|
package/dist/esm/types/sync.d.ts
CHANGED
package/package.json
CHANGED
|
@@ -1,139 +1,136 @@
|
|
|
1
1
|
{
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
]
|
|
138
|
-
}
|
|
139
|
-
}
|
|
2
|
+
"name": "docs-cache",
|
|
3
|
+
"private": false,
|
|
4
|
+
"type": "module",
|
|
5
|
+
"version": "0.5.6",
|
|
6
|
+
"description": "CLI for deterministic local caching of external documentation for agents and tools",
|
|
7
|
+
"author": "Frederik Bosch",
|
|
8
|
+
"license": "MIT",
|
|
9
|
+
"homepage": "https://github.com/fbosch/docs-cache#readme",
|
|
10
|
+
"repository": {
|
|
11
|
+
"type": "git",
|
|
12
|
+
"url": "https://github.com/fbosch/docs-cache.git"
|
|
13
|
+
},
|
|
14
|
+
"bugs": {
|
|
15
|
+
"url": "https://github.com/fbosch/docs-cache/issues"
|
|
16
|
+
},
|
|
17
|
+
"keywords": [
|
|
18
|
+
"docs",
|
|
19
|
+
"documentation",
|
|
20
|
+
"cache",
|
|
21
|
+
"agent",
|
|
22
|
+
"ai",
|
|
23
|
+
"git",
|
|
24
|
+
"cli"
|
|
25
|
+
],
|
|
26
|
+
"sideEffects": false,
|
|
27
|
+
"engines": {
|
|
28
|
+
"node": ">=18"
|
|
29
|
+
},
|
|
30
|
+
"bin": {
|
|
31
|
+
"docs-cache": "./bin/docs-cache.mjs"
|
|
32
|
+
},
|
|
33
|
+
"files": [
|
|
34
|
+
"bin",
|
|
35
|
+
"dist/cli.mjs",
|
|
36
|
+
"dist/esm/**/*.mjs",
|
|
37
|
+
"dist/esm/**/*.d.ts",
|
|
38
|
+
"dist/lock.mjs",
|
|
39
|
+
"dist/shared/*.mjs",
|
|
40
|
+
"README.md",
|
|
41
|
+
"LICENSE"
|
|
42
|
+
],
|
|
43
|
+
"imports": {
|
|
44
|
+
"#cache/*": {
|
|
45
|
+
"types": "./dist/esm/cache/*.d.ts",
|
|
46
|
+
"default": "./dist/esm/cache/*.mjs"
|
|
47
|
+
},
|
|
48
|
+
"#cli/*": {
|
|
49
|
+
"types": "./dist/esm/cli/*.d.ts",
|
|
50
|
+
"default": "./dist/esm/cli/*.mjs"
|
|
51
|
+
},
|
|
52
|
+
"#commands/*": {
|
|
53
|
+
"types": "./dist/esm/commands/*.d.ts",
|
|
54
|
+
"default": "./dist/esm/commands/*.mjs"
|
|
55
|
+
},
|
|
56
|
+
"#core/*": {
|
|
57
|
+
"types": "./dist/esm/*.d.ts",
|
|
58
|
+
"default": "./dist/esm/*.mjs"
|
|
59
|
+
},
|
|
60
|
+
"#config": {
|
|
61
|
+
"types": "./dist/esm/config/index.d.ts",
|
|
62
|
+
"default": "./dist/esm/config/index.mjs"
|
|
63
|
+
},
|
|
64
|
+
"#config/*": {
|
|
65
|
+
"types": "./dist/esm/config/*.d.ts",
|
|
66
|
+
"default": "./dist/esm/config/*.mjs"
|
|
67
|
+
},
|
|
68
|
+
"#git/*": {
|
|
69
|
+
"types": "./dist/esm/git/*.d.ts",
|
|
70
|
+
"default": "./dist/esm/git/*.mjs"
|
|
71
|
+
},
|
|
72
|
+
"#types/*": {
|
|
73
|
+
"types": "./dist/esm/types/*.d.ts",
|
|
74
|
+
"default": "./dist/esm/types/*.mjs"
|
|
75
|
+
}
|
|
76
|
+
},
|
|
77
|
+
"dependencies": {
|
|
78
|
+
"@clack/prompts": "^1.0.0",
|
|
79
|
+
"cac": "^6.7.14",
|
|
80
|
+
"cli-truncate": "^4.0.0",
|
|
81
|
+
"execa": "^9.6.1",
|
|
82
|
+
"fast-glob": "^3.3.2",
|
|
83
|
+
"log-update": "^7.0.2",
|
|
84
|
+
"picocolors": "^1.1.1",
|
|
85
|
+
"picomatch": "^4.0.3",
|
|
86
|
+
"zod": "^4.3.6"
|
|
87
|
+
},
|
|
88
|
+
"devDependencies": {
|
|
89
|
+
"@biomejs/biome": "^2.3.14",
|
|
90
|
+
"@size-limit/file": "^12.0.0",
|
|
91
|
+
"@types/node": "^25.2.0",
|
|
92
|
+
"bumpp": "^10.3.2",
|
|
93
|
+
"c8": "^10.1.3",
|
|
94
|
+
"jiti": "^2.5.1",
|
|
95
|
+
"lint-staged": "^16.2.7",
|
|
96
|
+
"simple-git-hooks": "^2.13.1",
|
|
97
|
+
"size-limit": "^12.0.0",
|
|
98
|
+
"tinybench": "^6.0.0",
|
|
99
|
+
"ts-complex": "^1.0.0",
|
|
100
|
+
"typescript": "^5.9.3",
|
|
101
|
+
"unbuild": "^3.6.1"
|
|
102
|
+
},
|
|
103
|
+
"size-limit": [
|
|
104
|
+
{
|
|
105
|
+
"path": "dist/cli.mjs",
|
|
106
|
+
"limit": "10 kB"
|
|
107
|
+
}
|
|
108
|
+
],
|
|
109
|
+
"complexity": {
|
|
110
|
+
"maxCyclomatic": 20,
|
|
111
|
+
"minMaintainability": 60,
|
|
112
|
+
"top": 10
|
|
113
|
+
},
|
|
114
|
+
"simple-git-hooks": {
|
|
115
|
+
"pre-commit": "pnpm lint-staged && pnpm typecheck"
|
|
116
|
+
},
|
|
117
|
+
"lint-staged": {
|
|
118
|
+
"*.{js,ts,cjs,mjs,d.cts,d.mts,jsx,tsx,json,jsonc}": [
|
|
119
|
+
"biome check --write --no-errors-on-unmatched"
|
|
120
|
+
]
|
|
121
|
+
},
|
|
122
|
+
"scripts": {
|
|
123
|
+
"build": "unbuild",
|
|
124
|
+
"dev": "unbuild --stub",
|
|
125
|
+
"lint": "biome check .",
|
|
126
|
+
"release": "pnpm run lint && pnpm run typecheck && bumpp && pnpm publish --access public",
|
|
127
|
+
"test": "pnpm build && node --test",
|
|
128
|
+
"test:coverage": "pnpm build && c8 --include dist --exclude bin --reporter=text node --test",
|
|
129
|
+
"bench": "pnpm build && node scripts/benchmarks/run.mjs",
|
|
130
|
+
"complexity": "node scripts/complexity/run.mjs",
|
|
131
|
+
"schema:build": "node scripts/generate-schema.mjs",
|
|
132
|
+
"size": "size-limit",
|
|
133
|
+
"test:watch": "node --test --watch",
|
|
134
|
+
"typecheck": "tsc --noEmit"
|
|
135
|
+
}
|
|
136
|
+
}
|